language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/SubtasksTimesInfo.java
|
{
"start": 3627,
"end": 5876
}
|
class ____ {
public static final String FIELD_NAME_SUBTASK = "subtask";
public static final String FIELD_NAME_ENDPOINT = "endpoint";
public static final String FIELD_NAME_DURATION = "duration";
public static final String FIELD_NAME_TIMESTAMPS = "timestamps";
@JsonProperty(FIELD_NAME_SUBTASK)
private final int subtask;
@JsonProperty(FIELD_NAME_ENDPOINT)
private final String endpoint;
@JsonProperty(FIELD_NAME_DURATION)
private final long duration;
@JsonProperty(FIELD_NAME_TIMESTAMPS)
private final Map<ExecutionState, Long> timestamps;
public SubtaskTimeInfo(
@JsonProperty(FIELD_NAME_SUBTASK) int subtask,
@JsonProperty(FIELD_NAME_ENDPOINT) String endpoint,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_TIMESTAMPS) Map<ExecutionState, Long> timestamps) {
this.subtask = subtask;
this.endpoint = checkNotNull(endpoint);
this.duration = duration;
this.timestamps = checkNotNull(timestamps);
}
@JsonIgnore
public int getSubtask() {
return subtask;
}
@JsonIgnore
public String getEndpoint() {
return endpoint;
}
@JsonIgnore
public long getDuration() {
return duration;
}
@JsonIgnore
public Map<ExecutionState, Long> getTimestamps() {
return timestamps;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (null == o || this.getClass() != o.getClass()) {
return false;
}
SubtaskTimeInfo that = (SubtaskTimeInfo) o;
return subtask == that.subtask
&& Objects.equals(endpoint, that.endpoint)
&& duration == that.duration
&& Objects.equals(timestamps, that.timestamps);
}
@Override
public int hashCode() {
return Objects.hash(subtask, endpoint, duration, timestamps);
}
}
}
|
SubtaskTimeInfo
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/spr9031/Spr9031Tests.java
|
{
"start": 1460,
"end": 2229
}
|
class ____ {
/**
* Use of @Import to register LowLevelConfig results in ASM-based annotation
* processing.
*/
@Test
void withAsmAnnotationProcessing() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(HighLevelConfig.class);
assertThat(ctx.getBean(LowLevelConfig.class).scanned).isNotNull();
ctx.close();
}
/**
* Direct registration of LowLevelConfig results in reflection-based annotation
* processing.
*/
@Test
void withoutAsmAnnotationProcessing() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(LowLevelConfig.class);
assertThat(ctx.getBean(LowLevelConfig.class).scanned).isNotNull();
ctx.close();
}
@Configuration
@Import(LowLevelConfig.class)
static
|
Spr9031Tests
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/tags/UrlTagTests.java
|
{
"start": 1206,
"end": 14622
}
|
class ____ extends AbstractTagTests {
private UrlTag tag;
private MockPageContext context;
@BeforeEach
void setup() {
context = createPageContext();
tag = new UrlTag();
tag.setPageContext(context);
}
@Test
void paramSupport() {
assertThat(tag).isInstanceOf(ParamAware.class);
}
@Test
void doStartTag() throws JspException {
int action = tag.doStartTag();
assertThat(action).isEqualTo(Tag.EVAL_BODY_INCLUDE);
}
@Test
void doEndTag() throws JspException {
tag.setValue("url/path");
tag.doStartTag();
int action = tag.doEndTag();
assertThat(action).isEqualTo(Tag.EVAL_PAGE);
}
@Test
void varDefaultScope() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.doStartTag();
tag.doEndTag();
assertThat(context.getAttribute("var", PageContext.PAGE_SCOPE)).isEqualTo("url/path");
}
@Test
void varExplicitScope() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.setScope("request");
tag.doStartTag();
tag.doEndTag();
assertThat(context.getAttribute("var", PageContext.REQUEST_SCOPE)).isEqualTo("url/path");
}
@Test
void setHtmlEscapeDefault() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.doStartTag();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
tag.addParam(param);
param = new Param();
param.setName("name");
param.setValue("value2");
tag.addParam(param);
tag.doEndTag();
assertThat(context.getAttribute("var")).isEqualTo("url/path?n%20me=v%26l%3De&name=value2");
}
@Test
void setHtmlEscapeFalse() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.setHtmlEscape(false);
tag.doStartTag();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
tag.addParam(param);
param = new Param();
param.setName("name");
param.setValue("value2");
tag.addParam(param);
tag.doEndTag();
assertThat(context.getAttribute("var")).isEqualTo("url/path?n%20me=v%26l%3De&name=value2");
}
@Test
void setHtmlEscapeTrue() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.setHtmlEscape(true);
tag.doStartTag();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
tag.addParam(param);
param = new Param();
param.setName("name");
param.setValue("value2");
tag.addParam(param);
tag.doEndTag();
assertThat(context.getAttribute("var")).isEqualTo("url/path?n%20me=v%26l%3De&name=value2");
}
@Test
void setJavaScriptEscapeTrue() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.setJavaScriptEscape(true);
tag.doStartTag();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
tag.addParam(param);
param = new Param();
param.setName("name");
param.setValue("value2");
tag.addParam(param);
tag.doEndTag();
assertThat(context.getAttribute("var")).isEqualTo("url\\/path?n%20me=v%26l%3De&name=value2");
}
@Test
void setHtmlAndJavaScriptEscapeTrue() throws JspException {
tag.setValue("url/path");
tag.setVar("var");
tag.setHtmlEscape(true);
tag.setJavaScriptEscape(true);
tag.doStartTag();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
tag.addParam(param);
param = new Param();
param.setName("name");
param.setValue("value2");
tag.addParam(param);
tag.doEndTag();
assertThat(context.getAttribute("var")).isEqualTo("url\\/path?n%20me=v%26l%3De&name=value2");
}
@Test
void createQueryStringNoParams() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEmpty();
}
@Test
void createQueryStringOneParam() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("value");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEqualTo("?name=value");
}
@Test
void createQueryStringOneParamForExistingQueryString() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("value");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, false);
assertThat(queryString).isEqualTo("&name=value");
}
@Test
void createQueryStringOneParamEmptyValue() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEqualTo("?name=");
}
@Test
void createQueryStringOneParamNullValue() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue(null);
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEqualTo("?name");
}
@Test
void createQueryStringOneParamAlreadyUsed() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("value");
params.add(param);
usedParams.add("name");
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEmpty();
}
@Test
void createQueryStringTwoParams() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("value");
params.add(param);
param = new Param();
param.setName("name");
param.setValue("value2");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEqualTo("?name=value&name=value2");
}
@Test
void createQueryStringUrlEncoding() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("n me");
param.setValue("v&l=e");
params.add(param);
param = new Param();
param.setName("name");
param.setValue("value2");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEqualTo("?n%20me=v%26l%3De&name=value2");
}
@Test
void createQueryStringParamNullName() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName(null);
param.setValue("value");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEmpty();
}
@Test
void createQueryStringParamEmptyName() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("");
param.setValue("value");
params.add(param);
String queryString = tag.createQueryString(params, usedParams, true);
assertThat(queryString).isEmpty();
}
@Test
void replaceUriTemplateParamsNoParams() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
String uri = tag.replaceUriTemplateParams("url/path", params, usedParams);
assertThat(uri).isEqualTo("url/path");
assertThat(usedParams).isEmpty();
}
@Test
void replaceUriTemplateParamsTemplateWithoutParamMatch() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
String uri = tag.replaceUriTemplateParams("url/{path}", params, usedParams);
assertThat(uri).isEqualTo("url/{path}");
assertThat(usedParams).isEmpty();
}
@Test
void replaceUriTemplateParamsTemplateWithParamMatch() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("value");
params.add(param);
String uri = tag.replaceUriTemplateParams("url/{name}", params, usedParams);
assertThat(uri).isEqualTo("url/value");
assertThat(usedParams).hasSize(1);
assertThat(usedParams).contains("name");
}
@Test
void replaceUriTemplateParamsTemplateWithParamMatchNamePreEncoding() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("n me");
param.setValue("value");
params.add(param);
String uri = tag.replaceUriTemplateParams("url/{n me}", params, usedParams);
assertThat(uri).isEqualTo("url/value");
assertThat(usedParams).hasSize(1);
assertThat(usedParams).contains("n me");
}
@Test
void replaceUriTemplateParamsTemplateWithParamMatchValueEncoded() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("v lue");
params.add(param);
String uri = tag.replaceUriTemplateParams("url/{name}", params,
usedParams);
assertThat(uri).isEqualTo("url/v%20lue");
assertThat(usedParams).hasSize(1);
assertThat(usedParams).contains("name");
}
@Test // SPR-11401
public void replaceUriTemplateParamsTemplateWithPathSegment() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("my/Id");
params.add(param);
String uri = tag.replaceUriTemplateParams("url/{/name}", params, usedParams);
assertThat(uri).isEqualTo("url/my%2FId");
assertThat(usedParams).hasSize(1);
assertThat(usedParams).contains("name");
}
@Test
void replaceUriTemplateParamsTemplateWithPath() throws JspException {
List<Param> params = new ArrayList<>();
Set<String> usedParams = new HashSet<>();
Param param = new Param();
param.setName("name");
param.setValue("my/Id");
params.add(param);
String uri = tag.replaceUriTemplateParams("url/{name}", params, usedParams);
assertThat(uri).isEqualTo("url/my/Id");
assertThat(usedParams).hasSize(1);
assertThat(usedParams).contains("name");
}
@Test
void createUrlRemoteServer() throws JspException {
tag.setValue("https://www.springframework.org/");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("https://www.springframework.org/");
}
@Test
void createUrlRelative() throws JspException {
tag.setValue("url/path");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("url/path");
}
@Test
void createUrlLocalContext() throws JspException {
((MockHttpServletRequest) context.getRequest()).setContextPath("/app-context");
tag.setValue("/url/path");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("/app-context/url/path");
}
@Test
void createUrlRemoteContext() throws JspException {
((MockHttpServletRequest) context.getRequest()).setContextPath("/app-context");
tag.setValue("/url/path");
tag.setContext("some-other-context");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("/some-other-context/url/path");
}
@Test
void createUrlRemoteContextWithSlash() throws JspException {
((MockHttpServletRequest) context.getRequest()).setContextPath("/app-context");
tag.setValue("/url/path");
tag.setContext("/some-other-context");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("/some-other-context/url/path");
}
@Test
void createUrlRemoteContextSingleSlash() throws JspException {
((MockHttpServletRequest) context.getRequest()).setContextPath("/app-context");
tag.setValue("/url/path");
tag.setContext("/");
tag.doStartTag();
String uri = tag.createUrl();
assertThat(uri).isEqualTo("/url/path");
}
@Test
void createUrlWithParams() throws JspException {
tag.setValue("url/path");
tag.doStartTag();
Param param = new Param();
param.setName("name");
param.setValue("value");
tag.addParam(param);
param = new Param();
param.setName("n me");
param.setValue("v lue");
tag.addParam(param);
String uri = tag.createUrl();
assertThat(uri).isEqualTo("url/path?name=value&n%20me=v%20lue");
}
@Test
void createUrlWithTemplateParams() throws JspException {
tag.setValue("url/{name}");
tag.doStartTag();
Param param = new Param();
param.setName("name");
param.setValue("value");
tag.addParam(param);
param = new Param();
param.setName("n me");
param.setValue("v lue");
tag.addParam(param);
String uri = tag.createUrl();
assertThat(uri).isEqualTo("url/value?n%20me=v%20lue");
}
@Test
void createUrlWithParamAndExistingQueryString() throws JspException {
tag.setValue("url/path?foo=bar");
tag.doStartTag();
Param param = new Param();
param.setName("name");
param.setValue("value");
tag.addParam(param);
String uri = tag.createUrl();
assertThat(uri).isEqualTo("url/path?foo=bar&name=value");
}
}
|
UrlTagTests
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/bootstrap/builders/AbstractMethodBuilderTest.java
|
{
"start": 5938,
"end": 6334
}
|
class ____ extends AbstractMethodBuilder<MethodConfig, MethodBuilder> {
public MethodConfig build() {
MethodConfig parameterConfig = new MethodConfig();
super.build(parameterConfig);
return parameterConfig;
}
@Override
protected MethodBuilder getThis() {
return this;
}
}
private static
|
MethodBuilder
|
java
|
quarkusio__quarkus
|
core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java
|
{
"start": 8335,
"end": 8429
}
|
class ____ implements SingleWithBound<List<String>> {
}
public static
|
SingleWithBoundImpl
|
java
|
apache__kafka
|
server-common/src/main/java/org/apache/kafka/config/BrokerReconfigurable.java
|
{
"start": 921,
"end": 1035
}
|
interface ____ Kafka broker configs that support dynamic reconfiguration.
* <p>
* Components that implement this
|
for
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
|
{
"start": 8272,
"end": 8942
}
|
class ____ be immutable and thread safe. Instances of
* {@link ColumnAtATimeReader} and {@link RowStrideReader} are all mutable and can only
* be accessed by one thread at a time but <strong>may</strong> be passed between threads.
* See implementations {@link Reader#canReuse} for how that's handled. "Normal" java objects
* don't need to do anything special to be kicked from thread to thread - the transfer itself
* establishes a {@code happens-before} relationship that makes everything you need visible.
* But Lucene's readers aren't "normal" java objects and sometimes need to be rebuilt if we
* shift threads.
* </p>
*/
public
|
must
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java
|
{
"start": 25654,
"end": 25942
}
|
interface ____ {
int f();
}
""")
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
import com.google.errorprone.annotations.ThreadSafe;
@ThreadSafe
|
Super
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedsourceproperties/source/Song.java
|
{
"start": 274,
"end": 832
}
|
class ____ {
private Artist artist;
private String title;
private List<Integer> positions;
public Artist getArtist() {
return artist;
}
public void setArtist(Artist artist) {
this.artist = artist;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<Integer> getPositions() {
return positions;
}
public void setPositions(List<Integer> positions) {
this.positions = positions;
}
}
|
Song
|
java
|
quarkusio__quarkus
|
independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/client/maven/MavenPlatformExtensionsResolver.java
|
{
"start": 708,
"end": 5295
}
|
class ____ implements RegistryPlatformExtensionsResolver {
private final MavenRegistryArtifactResolver artifactResolver;
private final MessageWriter log;
public MavenPlatformExtensionsResolver(MavenRegistryArtifactResolver artifactResolver,
MessageWriter log) {
this.artifactResolver = Objects.requireNonNull(artifactResolver);
this.log = Objects.requireNonNull(log);
}
@Override
public ExtensionCatalog.Mutable resolvePlatformExtensions(ArtifactCoords platformCoords)
throws RegistryResolutionException {
final String version;
if (platformCoords.getVersion() == null) {
version = resolveLatestBomVersion(platformCoords, "[0-alpha,)");
} else if (isVersionRange(platformCoords.getVersion())) {
version = resolveLatestBomVersion(platformCoords, platformCoords.getVersion());
} else {
version = platformCoords.getVersion();
}
final String groupId = platformCoords.getGroupId();
final String artifactId = PlatformArtifacts.ensureCatalogArtifactId(platformCoords.getArtifactId());
final String classifier = version;
final Artifact catalogArtifact = new DefaultArtifact(groupId, artifactId, classifier, "json", version);
log.debug("Resolving platform extension catalog %s", catalogArtifact);
final Path jsonPath;
try {
jsonPath = artifactResolver.resolve(catalogArtifact);
} catch (Exception e) {
RemoteRepository repo = null;
Throwable t = e;
while (t != null) {
if (t instanceof ArtifactNotFoundException) {
repo = ((ArtifactNotFoundException) t).getRepository();
break;
}
t = t.getCause();
}
final StringBuilder buf = new StringBuilder();
buf.append("Failed to resolve extension catalog of ")
.append(PlatformArtifacts.ensureBomArtifact(platformCoords).toCompactCoords());
if (repo != null) {
buf.append(" from Maven repository ").append(repo.getId()).append(" (").append(repo.getUrl()).append(")");
final List<RemoteRepository> mirrored = repo.getMirroredRepositories();
if (!mirrored.isEmpty()) {
buf.append(" which is a mirror of ");
buf.append(mirrored.get(0).getId()).append(" (").append(mirrored.get(0).getUrl()).append(")");
for (int i = 1; i < mirrored.size(); ++i) {
buf.append(", ").append(mirrored.get(i).getId()).append(" (").append(mirrored.get(i).getUrl())
.append(")");
}
buf.append(". The mirror may be out of sync.");
}
}
throw new RegistryResolutionException(buf.toString(), e);
}
try {
return ExtensionCatalog.mutableFromFile(jsonPath);
} catch (IOException e) {
throw new RegistryResolutionException("Failed to parse Quarkus extension catalog " + jsonPath, e);
}
}
private String resolveLatestBomVersion(ArtifactCoords bom, String versionRange)
throws RegistryResolutionException {
final Artifact bomArtifact = new DefaultArtifact(bom.getGroupId(),
PlatformArtifacts.ensureBomArtifactId(bom.getArtifactId()),
"", "pom", bom.getVersion());
log.debug("Resolving the latest version of %s:%s:%s:%s in the range %s", bom.getGroupId(), bom.getArtifactId(),
bom.getClassifier(), bom.getType(), versionRange);
try {
return artifactResolver.getLatestVersionFromRange(bomArtifact, versionRange);
} catch (Exception e) {
throw new RegistryResolutionException("Failed to resolve the latest version of " + bomArtifact.getGroupId()
+ ":" + bom.getArtifactId() + ":" + bom.getClassifier() + ":" + bom.getType() + ":" + versionRange, e);
}
}
private static boolean isVersionRange(String versionStr) {
if (versionStr == null || versionStr.isEmpty()) {
return false;
}
char c = versionStr.charAt(0);
if (c == '[' || c == '(') {
return true;
}
c = versionStr.charAt(versionStr.length() - 1);
if (c == ']' || c == ')') {
return true;
}
return versionStr.indexOf(',') >= 0;
}
}
|
MavenPlatformExtensionsResolver
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
|
{
"start": 5467,
"end": 17955
}
|
class ____<T extends RestfulGateway> extends LeaderRetrievalHandler<T> {
/** Timezone in which this server answers its "if-modified" requests. */
private static final TimeZone GMT_TIMEZONE = TimeZone.getTimeZone("GMT");
/** Date format for HTTP. */
public static final String HTTP_DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss zzz";
/** Be default, we allow files to be cached for 5 minutes. */
private static final int HTTP_CACHE_SECONDS = 300;
// ------------------------------------------------------------------------
/** The path in which the static documents are. */
private final File rootPath;
public StaticFileServerHandler(
GatewayRetriever<? extends T> retriever, Duration timeout, File rootPath)
throws IOException {
super(retriever, timeout, Collections.emptyMap());
this.rootPath = checkNotNull(rootPath).getCanonicalFile();
}
// ------------------------------------------------------------------------
// Responses to requests
// ------------------------------------------------------------------------
@Override
protected void respondAsLeader(
ChannelHandlerContext channelHandlerContext, RoutedRequest routedRequest, T gateway)
throws Exception {
final HttpRequest request = routedRequest.getRequest();
final String requestPath;
// make sure we request the "index.html" in case there is a directory request
if (routedRequest.getPath().endsWith("/")) {
requestPath = routedRequest.getPath() + "index.html";
} else {
requestPath = routedRequest.getPath();
}
try {
respondToRequest(channelHandlerContext, request, requestPath);
} catch (RestHandlerException rhe) {
HandlerUtils.sendErrorResponse(
channelHandlerContext,
routedRequest.getRequest(),
new ErrorResponseBody(rhe.getMessage()),
rhe.getHttpResponseStatus(),
responseHeaders);
}
}
/** Response when running with leading JobManager. */
private void respondToRequest(
ChannelHandlerContext ctx, HttpRequest request, String requestPath)
throws IOException, ParseException, URISyntaxException, RestHandlerException {
// convert to absolute path
final File file = new File(rootPath, requestPath);
if (!file.exists()) {
// file does not exist. Try to load it with the classloader
ClassLoader cl = StaticFileServerHandler.class.getClassLoader();
try (InputStream resourceStream = cl.getResourceAsStream("web" + requestPath)) {
boolean success = false;
try {
if (resourceStream != null) {
URL root = cl.getResource("web");
URL requested = cl.getResource("web" + requestPath);
if (root != null && requested != null) {
URI rootURI = new URI(root.getPath()).normalize();
URI requestedURI = new URI(requested.getPath()).normalize();
// Check that we don't load anything from outside of the
// expected scope.
if (!rootURI.relativize(requestedURI).equals(requestedURI)) {
logger.debug(
"Loading missing file from classloader: {}", requestPath);
// ensure that directory to file exists.
file.getParentFile().mkdirs();
Files.copy(resourceStream, file.toPath());
success = true;
}
}
}
} catch (Throwable t) {
logger.error("error while responding", t);
} finally {
if (!success) {
logger.debug(
"Unable to load requested file {} from classloader", requestPath);
throw new NotFoundException(
String.format("Unable to load requested file %s.", requestPath));
}
}
}
}
checkFileValidity(file, rootPath, logger);
// cache validation
final String ifModifiedSince = request.headers().get(IF_MODIFIED_SINCE);
if (ifModifiedSince != null && !ifModifiedSince.isEmpty()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince);
// Only compare up to the second because the datetime format we send to the client
// does not have milliseconds
long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000;
long fileLastModifiedSeconds = file.lastModified() / 1000;
if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) {
if (logger.isDebugEnabled()) {
logger.debug(
"Responding 'NOT MODIFIED' for file '" + file.getAbsolutePath() + '\'');
}
sendNotModified(ctx);
return;
}
}
if (logger.isDebugEnabled()) {
logger.debug("Responding with file '" + file.getAbsolutePath() + '\'');
}
// Don't need to close this manually. Netty's DefaultFileRegion will take care of it.
final RandomAccessFile raf;
try {
raf = new RandomAccessFile(file, "r");
} catch (FileNotFoundException e) {
if (logger.isDebugEnabled()) {
logger.debug("Could not find file {}.", file.getAbsolutePath());
}
throw new NotFoundException("File not found.");
}
try {
long fileLength = raf.length();
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
setContentTypeHeader(response, file);
setDateAndCacheHeaders(response, file);
if (HttpUtil.isKeepAlive(request)) {
response.headers().set(CONNECTION, HttpHeaderValues.KEEP_ALIVE);
}
HttpUtil.setContentLength(response, fileLength);
// write the initial line and the header.
ctx.write(response);
// write the content.
ChannelFuture lastContentFuture;
if (ctx.pipeline().get(SslHandler.class) == null) {
ctx.write(
new DefaultFileRegion(raf.getChannel(), 0, fileLength),
ctx.newProgressivePromise());
lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
} else {
lastContentFuture =
ctx.writeAndFlush(
new HttpChunkedInput(new ChunkedFile(raf, 0, fileLength, 8192)),
ctx.newProgressivePromise());
// HttpChunkedInput will write the end marker (LastHttpContent) for us.
}
// close the connection, if no keep-alive is needed
if (!HttpUtil.isKeepAlive(request)) {
lastContentFuture.addListener(ChannelFutureListener.CLOSE);
}
} catch (Exception e) {
raf.close();
logger.error("Failed to serve file.", e);
throw new RestHandlerException("Internal server error.", INTERNAL_SERVER_ERROR);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
if (ctx.channel().isActive()) {
logger.error("Caught exception", cause);
HandlerUtils.sendErrorResponse(
ctx,
false,
new ErrorResponseBody("Internal server error."),
INTERNAL_SERVER_ERROR,
Collections.emptyMap());
}
}
// ------------------------------------------------------------------------
// Utilities to encode headers and responses
// ------------------------------------------------------------------------
/**
* Send the "304 Not Modified" response. This response can be used when the file timestamp is
* the same as what the browser is sending up.
*
* @param ctx The channel context to write the response to.
*/
public static void sendNotModified(ChannelHandlerContext ctx) {
FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, NOT_MODIFIED);
setDateHeader(response);
// close the connection as soon as the error message is sent.
ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE);
}
/**
* Sets the "date" header for the HTTP response.
*
* @param response HTTP response
*/
public static void setDateHeader(FullHttpResponse response) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
dateFormatter.setTimeZone(GMT_TIMEZONE);
Calendar time = new GregorianCalendar();
response.headers().set(DATE, dateFormatter.format(time.getTime()));
}
/**
* Sets the "date" and "cache" headers for the HTTP Response.
*
* @param response The HTTP response object.
* @param fileToCache File to extract the modification timestamp from.
*/
public static void setDateAndCacheHeaders(HttpResponse response, File fileToCache) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
dateFormatter.setTimeZone(GMT_TIMEZONE);
// date header
Calendar time = new GregorianCalendar();
response.headers().set(DATE, dateFormatter.format(time.getTime()));
// cache headers
time.add(Calendar.SECOND, HTTP_CACHE_SECONDS);
response.headers().set(EXPIRES, dateFormatter.format(time.getTime()));
response.headers().set(CACHE_CONTROL, "private, max-age=" + HTTP_CACHE_SECONDS);
response.headers()
.set(LAST_MODIFIED, dateFormatter.format(new Date(fileToCache.lastModified())));
}
/**
* Sets the content type header for the HTTP Response.
*
* @param response HTTP response
* @param file file to extract content type
*/
public static void setContentTypeHeader(HttpResponse response, File file) {
String mimeType = MimeTypes.getMimeTypeForFileName(file.getName());
String mimeFinal = mimeType != null ? mimeType : MimeTypes.getDefaultMimeType();
response.headers().set(CONTENT_TYPE, mimeFinal);
}
/**
* Checks various conditions for file access. If all checks pass this method returns, and
* processing of the request may continue. If any check fails this method throws a {@link
* RestHandlerException}, and further processing of the request must be limited to sending an
* error response.
*/
public static void checkFileValidity(File file, File rootPath, Logger logger)
throws IOException, RestHandlerException {
// this check must be done first to prevent probing for arbitrary files
if (!file.getCanonicalFile().toPath().startsWith(rootPath.toPath())) {
if (logger.isDebugEnabled()) {
logger.debug(
"Requested path {} points outside the root directory.",
file.getAbsolutePath());
}
throw new RestHandlerException("Forbidden.", FORBIDDEN);
}
if (!file.exists() || file.isHidden()) {
if (logger.isDebugEnabled()) {
logger.debug("Requested path {} cannot be found.", file.getAbsolutePath());
}
throw new RestHandlerException("File not found.", NOT_FOUND);
}
if (file.isDirectory() || !file.isFile()) {
if (logger.isDebugEnabled()) {
logger.debug("Requested path {} does not point to a file.", file.getAbsolutePath());
}
throw new RestHandlerException("File not found.", METHOD_NOT_ALLOWED);
}
}
}
|
StaticFileServerHandler
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootTestGroovyConventionConfigurationTests.java
|
{
"start": 1034,
"end": 1211
}
|
class ____ {
@Autowired
private String foo;
@Test
void groovyConfigLoaded() {
assertThat(this.foo).isEqualTo("World");
}
}
|
SpringBootTestGroovyConventionConfigurationTests
|
java
|
spring-projects__spring-boot
|
module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/health/ConnectionFactoryHealthIndicator.java
|
{
"start": 1471,
"end": 4359
}
|
class ____ extends AbstractReactiveHealthIndicator {
private final ConnectionFactory connectionFactory;
private final @Nullable String validationQuery;
/**
* Create a new {@link ConnectionFactoryHealthIndicator} using the specified
* {@link ConnectionFactory} and no validation query.
* @param connectionFactory the connection factory
* @see Connection#validate(ValidationDepth)
*/
public ConnectionFactoryHealthIndicator(ConnectionFactory connectionFactory) {
this(connectionFactory, null);
}
/**
* Create a new {@link ConnectionFactoryHealthIndicator} using the specified
* {@link ConnectionFactory} and validation query.
* @param connectionFactory the connection factory
* @param validationQuery the validation query, can be {@code null} to use connection
* validation
*/
public ConnectionFactoryHealthIndicator(ConnectionFactory connectionFactory, @Nullable String validationQuery) {
Assert.notNull(connectionFactory, "'connectionFactory' must not be null");
this.connectionFactory = connectionFactory;
this.validationQuery = validationQuery;
}
@Override
protected final Mono<Health> doHealthCheck(Health.Builder builder) {
return validate(builder).defaultIfEmpty(builder.build())
.onErrorResume(Exception.class, (ex) -> Mono.just(builder.down(ex).build()));
}
private Mono<Health> validate(Health.Builder builder) {
builder.withDetail("database", this.connectionFactory.getMetadata().getName());
return (StringUtils.hasText(this.validationQuery)) ? validateWithQuery(builder)
: validateWithConnectionValidation(builder);
}
private Mono<Health> validateWithQuery(Health.Builder builder) {
Assert.state(this.validationQuery != null, "'validationQuery' must not be null");
builder.withDetail("validationQuery", this.validationQuery);
Mono<Object> connectionValidation = Mono.usingWhen(this.connectionFactory.create(),
(conn) -> Flux.from(conn.createStatement(this.validationQuery).execute())
.flatMap((it) -> it.map(this::extractResult))
.next(),
Connection::close, (o, throwable) -> o.close(), Connection::close);
return connectionValidation.map((result) -> builder.up().withDetail("result", result).build());
}
private Mono<Health> validateWithConnectionValidation(Health.Builder builder) {
builder.withDetail("validationQuery", "validate(REMOTE)");
Mono<Boolean> connectionValidation = Mono.usingWhen(this.connectionFactory.create(),
(connection) -> Mono.from(connection.validate(ValidationDepth.REMOTE)), Connection::close,
(connection, ex) -> connection.close(), Connection::close);
return connectionValidation.map((valid) -> builder.status((valid) ? Status.UP : Status.DOWN).build());
}
private @Nullable Object extractResult(Row row, RowMetadata metadata) {
return row.get(metadata.getColumnMetadatas().iterator().next().getName());
}
}
|
ConnectionFactoryHealthIndicator
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
|
{
"start": 17381,
"end": 17510
}
|
class ____ extends ArrowVectorAccessor {
NullAccessor(NullVector vector) {
super(vector);
}
}
static
|
NullAccessor
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/jdk8/ObservableMapOptionalTest.java
|
{
"start": 1090,
"end": 11552
}
|
class ____ extends RxJavaTest {
static final Function<? super Integer, Optional<? extends Integer>> MODULO = v -> v % 2 == 0 ? Optional.of(v) : Optional.<Integer>empty();
@Test
public void allPresent() {
Observable.range(1, 5)
.mapOptional(Optional::of)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void allEmpty() {
Observable.range(1, 5)
.mapOptional(v -> Optional.<Integer>empty())
.test()
.assertResult();
}
@Test
public void mixed() {
Observable.range(1, 10)
.mapOptional(MODULO)
.test()
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void mapperChash() {
BehaviorSubject<Integer> source = BehaviorSubject.createDefault(1);
source
.mapOptional(v -> { throw new TestException(); })
.test()
.assertFailure(TestException.class);
assertFalse(source.hasObservers());
}
@Test
public void mapperNull() {
BehaviorSubject<Integer> source = BehaviorSubject.createDefault(1);
source
.mapOptional(v -> null)
.test()
.assertFailure(NullPointerException.class);
assertFalse(source.hasObservers());
}
@Test
public void crashDropsOnNexts() {
Observable<Integer> source = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onNext(2);
}
};
source
.mapOptional(v -> { throw new TestException(); })
.test()
.assertFailure(TestException.class);
}
@Test
public void syncFusedAll() {
Observable.range(1, 5)
.mapOptional(Optional::of)
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void asyncFusedAll() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(Optional::of)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void boundaryFusedAll() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(Optional::of)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void syncFusedNone() {
Observable.range(1, 5)
.mapOptional(v -> Optional.empty())
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult();
}
@Test
public void asyncFusedNone() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(v -> Optional.empty())
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult();
}
@Test
public void boundaryFusedNone() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(v -> Optional.empty())
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult();
}
@Test
public void syncFusedMixed() {
Observable.range(1, 10)
.mapOptional(MODULO)
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void asyncFusedMixed() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
us
.mapOptional(MODULO)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void boundaryFusedMixed() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
us
.mapOptional(MODULO)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void allPresentConditional() {
Observable.range(1, 5)
.mapOptional(Optional::of)
.filter(v -> true)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void allEmptyConditional() {
Observable.range(1, 5)
.mapOptional(v -> Optional.<Integer>empty())
.filter(v -> true)
.test()
.assertResult();
}
@Test
public void mixedConditional() {
Observable.range(1, 10)
.mapOptional(MODULO)
.filter(v -> true)
.test()
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void mapperChashConditional() {
BehaviorSubject<Integer> source = BehaviorSubject.createDefault(1);
source
.mapOptional(v -> { throw new TestException(); })
.filter(v -> true)
.test()
.assertFailure(TestException.class);
assertFalse(source.hasObservers());
}
@Test
public void mapperNullConditional() {
BehaviorSubject<Integer> source = BehaviorSubject.createDefault(1);
source
.mapOptional(v -> null)
.filter(v -> true)
.test()
.assertFailure(NullPointerException.class);
assertFalse(source.hasObservers());
}
@Test
public void crashDropsOnNextsConditional() {
Observable<Integer> source = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onNext(2);
}
};
source
.mapOptional(v -> { throw new TestException(); })
.filter(v -> true)
.test()
.assertFailure(TestException.class);
}
@Test
public void syncFusedAllConditional() {
Observable.range(1, 5)
.mapOptional(Optional::of)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void asyncFusedAllConditional() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(Optional::of)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void boundaryFusedAllConditiona() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(Optional::of)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void syncFusedNoneConditional() {
Observable.range(1, 5)
.mapOptional(v -> Optional.empty())
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult();
}
@Test
public void asyncFusedNoneConditional() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(v -> Optional.empty())
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult();
}
@Test
public void boundaryFusedNoneConditional() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5);
us
.mapOptional(v -> Optional.empty())
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult();
}
@Test
public void syncFusedMixedConditional() {
Observable.range(1, 10)
.mapOptional(MODULO)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.SYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void asyncFusedMixedConditional() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
us
.mapOptional(MODULO)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC))
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(2, 4, 6, 8, 10);
}
@Test
public void boundaryFusedMixedConditional() {
UnicastSubject<Integer> us = UnicastSubject.create();
TestHelper.emit(us, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
us
.mapOptional(MODULO)
.filter(v -> true)
.to(TestHelper.testConsumer(false, QueueFuseable.ASYNC | QueueFuseable.BOUNDARY))
.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(2, 4, 6, 8, 10);
}
}
|
ObservableMapOptionalTest
|
java
|
alibaba__nacos
|
config/src/test/java/com/alibaba/nacos/config/server/utils/YamlParserUtilTest.java
|
{
"start": 1100,
"end": 3331
}
|
class ____ {
private static final String CONFIG_METADATA_STRING =
"metadata:\n" + "- dataId: testData1\n" + " group: testGroup1\n" + " type: text\n" + "- appName: testAppName\n"
+ " dataId: testData2\n" + " desc: test desc\n" + " group: testGroup2\n" + " type: yaml\n";
private ConfigMetadata.ConfigExportItem item1;
private ConfigMetadata.ConfigExportItem item2;
@BeforeEach
void setUp() {
item1 = new ConfigMetadata.ConfigExportItem();
item1.setDataId("testData1");
item1.setGroup("testGroup1");
item1.setType("text");
item2 = new ConfigMetadata.ConfigExportItem();
item2.setDataId("testData2");
item2.setGroup("testGroup2");
item2.setType("yaml");
item2.setAppName("testAppName");
item2.setDesc("test desc");
}
@Test
void testDumpObject() {
ConfigMetadata configMetadata = new ConfigMetadata();
List<ConfigMetadata.ConfigExportItem> configMetadataItems = new ArrayList<>();
configMetadataItems.add(item1);
configMetadataItems.add(item2);
configMetadata.setMetadata(configMetadataItems);
String parseString = YamlParserUtil.dumpObject(configMetadata);
assertEquals(CONFIG_METADATA_STRING, parseString);
}
@Test
void testLoadObject() {
ConfigMetadata configMetadata = YamlParserUtil.loadObject(CONFIG_METADATA_STRING, ConfigMetadata.class);
assertNotNull(configMetadata);
List<ConfigMetadata.ConfigExportItem> metadataList = configMetadata.getMetadata();
assertNotNull(metadataList);
assertEquals(2, metadataList.size());
ConfigMetadata.ConfigExportItem configExportItem1 = metadataList.get(0);
ConfigMetadata.ConfigExportItem configExportItem2 = metadataList.get(1);
assertEquals(configExportItem1, item1);
assertEquals(configExportItem2, item2);
}
@Test
void testNotSupportType() {
assertThrows(ConstructorException.class, () -> {
YamlParserUtil.loadObject("name: test", YamlTest.class);
});
}
private static
|
YamlParserUtilTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/MutabilityAnnotation.java
|
{
"start": 468,
"end": 1604
}
|
class ____ implements Mutability {
private java.lang.Class<? extends org.hibernate.type.descriptor.java.MutabilityPlan<?>> value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public MutabilityAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public MutabilityAnnotation(Mutability annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public MutabilityAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Class<? extends org.hibernate.type.descriptor.java.MutabilityPlan<?>>) attributeValues
.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return Mutability.class;
}
@Override
public java.lang.Class<? extends org.hibernate.type.descriptor.java.MutabilityPlan<?>> value() {
return value;
}
public void value(java.lang.Class<? extends org.hibernate.type.descriptor.java.MutabilityPlan<?>> value) {
this.value = value;
}
}
|
MutabilityAnnotation
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/client/producer/SendStatus.java
|
{
"start": 855,
"end": 962
}
|
enum ____ {
SEND_OK,
FLUSH_DISK_TIMEOUT,
FLUSH_SLAVE_TIMEOUT,
SLAVE_NOT_AVAILABLE,
}
|
SendStatus
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest110_drds.java
|
{
"start": 325,
"end": 1095
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "create table a (id int(10)) dbpartition by hash(id) dbpartitionS 2 tbpartition by hash(id) tbpartitions 2 ";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLCreateTableStatement stmt = (SQLCreateTableStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals(1, stmt.getTableElementList().size());
assertEquals("CREATE TABLE a (\n" +
"\tid int(10)\n" +
")\n" +
"DBPARTITION BY hash(id) DBPARTITIONS 2\n" +
"TBPARTITION BY hash(id) TBPARTITIONS 2", stmt.toString());
}
}
|
MySqlCreateTableTest110_drds
|
java
|
netty__netty
|
codec-base/src/main/java/io/netty/handler/codec/DelimiterBasedFrameDecoder.java
|
{
"start": 2125,
"end": 13174
}
|
class ____ extends ByteToMessageDecoder {
private final ByteBuf[] delimiters;
private final int maxFrameLength;
private final boolean stripDelimiter;
private final boolean failFast;
private boolean discardingTooLongFrame;
private int tooLongFrameLength;
/** Set only when decoding with "\n" and "\r\n" as the delimiter. */
private final LineBasedFrameDecoder lineBasedDecoder;
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param delimiter the delimiter
*/
public DelimiterBasedFrameDecoder(int maxFrameLength, ByteBuf delimiter) {
this(maxFrameLength, true, delimiter);
}
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param stripDelimiter whether the decoded frame should strip out the
* delimiter or not
* @param delimiter the delimiter
*/
public DelimiterBasedFrameDecoder(
int maxFrameLength, boolean stripDelimiter, ByteBuf delimiter) {
this(maxFrameLength, stripDelimiter, true, delimiter);
}
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param stripDelimiter whether the decoded frame should strip out the
* delimiter or not
* @param failFast If <tt>true</tt>, a {@link TooLongFrameException} is
* thrown as soon as the decoder notices the length of the
* frame will exceed <tt>maxFrameLength</tt> regardless of
* whether the entire frame has been read.
* If <tt>false</tt>, a {@link TooLongFrameException} is
* thrown after the entire frame that exceeds
* <tt>maxFrameLength</tt> has been read.
* @param delimiter the delimiter
*/
public DelimiterBasedFrameDecoder(
int maxFrameLength, boolean stripDelimiter, boolean failFast,
ByteBuf delimiter) {
this(maxFrameLength, stripDelimiter, failFast, new ByteBuf[] {
delimiter.slice(delimiter.readerIndex(), delimiter.readableBytes())});
}
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param delimiters the delimiters
*/
public DelimiterBasedFrameDecoder(int maxFrameLength, ByteBuf... delimiters) {
this(maxFrameLength, true, delimiters);
}
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param stripDelimiter whether the decoded frame should strip out the
* delimiter or not
* @param delimiters the delimiters
*/
public DelimiterBasedFrameDecoder(
int maxFrameLength, boolean stripDelimiter, ByteBuf... delimiters) {
this(maxFrameLength, stripDelimiter, true, delimiters);
}
/**
* Creates a new instance.
*
* @param maxFrameLength the maximum length of the decoded frame.
* A {@link TooLongFrameException} is thrown if
* the length of the frame exceeds this value.
* @param stripDelimiter whether the decoded frame should strip out the
* delimiter or not
* @param failFast If <tt>true</tt>, a {@link TooLongFrameException} is
* thrown as soon as the decoder notices the length of the
* frame will exceed <tt>maxFrameLength</tt> regardless of
* whether the entire frame has been read.
* If <tt>false</tt>, a {@link TooLongFrameException} is
* thrown after the entire frame that exceeds
* <tt>maxFrameLength</tt> has been read.
* @param delimiters the delimiters
*/
public DelimiterBasedFrameDecoder(
int maxFrameLength, boolean stripDelimiter, boolean failFast, ByteBuf... delimiters) {
validateMaxFrameLength(maxFrameLength);
ObjectUtil.checkNonEmpty(delimiters, "delimiters");
if (isLineBased(delimiters) && !isSubclass()) {
lineBasedDecoder = new LineBasedFrameDecoder(maxFrameLength, stripDelimiter, failFast);
this.delimiters = null;
} else {
this.delimiters = new ByteBuf[delimiters.length];
for (int i = 0; i < delimiters.length; i ++) {
ByteBuf d = delimiters[i];
validateDelimiter(d);
this.delimiters[i] = d.slice(d.readerIndex(), d.readableBytes());
}
lineBasedDecoder = null;
}
this.maxFrameLength = maxFrameLength;
this.stripDelimiter = stripDelimiter;
this.failFast = failFast;
}
/** Returns true if the delimiters are "\n" and "\r\n". */
private static boolean isLineBased(final ByteBuf[] delimiters) {
if (delimiters.length != 2) {
return false;
}
ByteBuf a = delimiters[0];
ByteBuf b = delimiters[1];
if (a.capacity() < b.capacity()) {
a = delimiters[1];
b = delimiters[0];
}
return a.capacity() == 2 && b.capacity() == 1
&& a.getByte(0) == '\r' && a.getByte(1) == '\n'
&& b.getByte(0) == '\n';
}
/**
* Return {@code true} if the current instance is a subclass of DelimiterBasedFrameDecoder
*/
private boolean isSubclass() {
return getClass() != DelimiterBasedFrameDecoder.class;
}
@Override
protected final void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
Object decoded = decode(ctx, in);
if (decoded != null) {
out.add(decoded);
}
}
/**
* Create a frame out of the {@link ByteBuf} and return it.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ByteToMessageDecoder} belongs to
* @param buffer the {@link ByteBuf} from which to read data
* @return frame the {@link ByteBuf} which represent the frame or {@code null} if no frame could
* be created.
*/
protected Object decode(ChannelHandlerContext ctx, ByteBuf buffer) throws Exception {
if (lineBasedDecoder != null) {
return lineBasedDecoder.decode(ctx, buffer);
}
// Try all delimiters and choose the delimiter which yields the shortest frame.
int minFrameLength = Integer.MAX_VALUE;
ByteBuf minDelim = null;
for (ByteBuf delim: delimiters) {
int frameLength = indexOf(buffer, delim);
if (frameLength >= 0 && frameLength < minFrameLength) {
minFrameLength = frameLength;
minDelim = delim;
}
}
if (minDelim != null) {
int minDelimLength = minDelim.capacity();
ByteBuf frame;
if (discardingTooLongFrame) {
// We've just finished discarding a very large frame.
// Go back to the initial state.
discardingTooLongFrame = false;
buffer.skipBytes(minFrameLength + minDelimLength);
int tooLongFrameLength = this.tooLongFrameLength;
this.tooLongFrameLength = 0;
if (!failFast) {
fail(tooLongFrameLength);
}
return null;
}
if (minFrameLength > maxFrameLength) {
// Discard read frame.
buffer.skipBytes(minFrameLength + minDelimLength);
fail(minFrameLength);
return null;
}
if (stripDelimiter) {
frame = buffer.readRetainedSlice(minFrameLength);
buffer.skipBytes(minDelimLength);
} else {
frame = buffer.readRetainedSlice(minFrameLength + minDelimLength);
}
return frame;
} else {
if (!discardingTooLongFrame) {
if (buffer.readableBytes() > maxFrameLength) {
// Discard the content of the buffer until a delimiter is found.
tooLongFrameLength = buffer.readableBytes();
buffer.skipBytes(buffer.readableBytes());
discardingTooLongFrame = true;
if (failFast) {
fail(tooLongFrameLength);
}
}
} else {
// Still discarding the buffer since a delimiter is not found.
tooLongFrameLength += buffer.readableBytes();
buffer.skipBytes(buffer.readableBytes());
}
return null;
}
}
private void fail(long frameLength) {
if (frameLength > 0) {
throw new TooLongFrameException(
"frame length exceeds " + maxFrameLength +
": " + frameLength + " - discarded");
} else {
throw new TooLongFrameException(
"frame length exceeds " + maxFrameLength +
" - discarding");
}
}
/**
* Returns the number of bytes between the readerIndex of the haystack and
* the first needle found in the haystack. -1 is returned if no needle is
* found in the haystack.
*/
private static int indexOf(ByteBuf haystack, ByteBuf needle) {
int index = ByteBufUtil.indexOf(needle, haystack);
if (index == -1) {
return -1;
}
return index - haystack.readerIndex();
}
private static void validateDelimiter(ByteBuf delimiter) {
ObjectUtil.checkNotNull(delimiter, "delimiter");
if (!delimiter.isReadable()) {
throw new IllegalArgumentException("empty delimiter");
}
}
private static void validateMaxFrameLength(int maxFrameLength) {
checkPositive(maxFrameLength, "maxFrameLength");
}
}
|
DelimiterBasedFrameDecoder
|
java
|
apache__kafka
|
server-common/src/main/java/org/apache/kafka/timeline/Revertable.java
|
{
"start": 970,
"end": 1314
}
|
interface ____ {
/**
* Revert to the target epoch.
*
* @param targetEpoch The epoch to revert to.
* @param delta The delta associated with this epoch for this object.
*/
void executeRevert(long targetEpoch, Delta delta);
/**
* Reverts to the initial value.
*/
void reset();
}
|
Revertable
|
java
|
junit-team__junit5
|
junit-platform-engine/src/testFixtures/java/org/junit/platform/fakes/FaultyTestEngines.java
|
{
"start": 793,
"end": 2593
}
|
class ____ {
public static TestEngineStub createEngineThatCannotResolveAnything(String engineId) {
return new TestEngineStub(engineId) {
@Override
public TestDescriptor discover(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId) {
discoveryRequest.getSelectorsByType(DiscoverySelector.class) //
.forEach(selector -> discoveryRequest.getDiscoveryListener().selectorProcessed(uniqueId,
selector, SelectorResolutionResult.unresolved()));
return new EngineDescriptor(uniqueId, "Some Engine");
}
@Override
public void execute(ExecutionRequest request) {
var listener = request.getEngineExecutionListener();
var rootTestDescriptor = request.getRootTestDescriptor();
listener.executionStarted(rootTestDescriptor);
listener.executionFinished(rootTestDescriptor, TestExecutionResult.successful());
}
};
}
public static TestEngineStub createEngineThatFailsToResolveAnything(String engineId, Throwable rootCause) {
return new TestEngineStub(engineId) {
@Override
public TestDescriptor discover(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId) {
discoveryRequest.getSelectorsByType(DiscoverySelector.class) //
.forEach(selector -> discoveryRequest.getDiscoveryListener().selectorProcessed(uniqueId,
selector, SelectorResolutionResult.failed(rootCause)));
return new EngineDescriptor(uniqueId, "Some Engine");
}
@Override
public void execute(ExecutionRequest request) {
var listener = request.getEngineExecutionListener();
var rootTestDescriptor = request.getRootTestDescriptor();
listener.executionStarted(rootTestDescriptor);
listener.executionFinished(rootTestDescriptor, TestExecutionResult.successful());
}
};
}
private FaultyTestEngines() {
}
}
|
FaultyTestEngines
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
|
{
"start": 1284,
"end": 5998
}
|
class ____ implements RestInterceptor {
private static final Logger logger = LogManager.getLogger(SecurityRestFilter.class);
private final SecondaryAuthenticator secondaryAuthenticator;
private final AuditTrailService auditTrailService;
private final boolean enabled;
private final ThreadContext threadContext;
private final OperatorPrivileges.OperatorPrivilegesService operatorPrivilegesService;
public SecurityRestFilter(
boolean enabled,
ThreadContext threadContext,
SecondaryAuthenticator secondaryAuthenticator,
AuditTrailService auditTrailService,
OperatorPrivileges.OperatorPrivilegesService operatorPrivilegesService
) {
this.enabled = enabled;
this.threadContext = threadContext;
this.secondaryAuthenticator = secondaryAuthenticator;
this.auditTrailService = auditTrailService;
// can be null if security is not enabled
this.operatorPrivilegesService = operatorPrivilegesService == null
? OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE
: operatorPrivilegesService;
}
@Override
public void intercept(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener<Boolean> listener)
throws Exception {
// requests with the OPTIONS method should be handled elsewhere, and not by calling {@code RestHandler#handleRequest}
// authn is bypassed for HTTP requests with the OPTIONS method, so this sanity check prevents dispatching unauthenticated requests
if (request.method() == Method.OPTIONS) {
handleException(
request,
new ElasticsearchSecurityException("Cannot dispatch OPTIONS request, as they are not authenticated"),
listener
);
return;
}
if (enabled == false) {
listener.onResponse(Boolean.TRUE);
return;
}
// RestRequest might have stream content, in some cases we need to aggregate request content, for example audit logging.
final Consumer<RestRequest> aggregationCallback = (aggregatedRestRequest) -> {
final RestRequest wrappedRequest = maybeWrapRestRequest(aggregatedRestRequest, targetHandler);
auditTrailService.get().authenticationSuccess(wrappedRequest);
secondaryAuthenticator.authenticateAndAttachToContext(wrappedRequest, ActionListener.wrap(secondaryAuthentication -> {
if (secondaryAuthentication != null) {
logger.trace(
"Found secondary authentication {} in REST request [{}]",
secondaryAuthentication,
aggregatedRestRequest.uri()
);
}
WorkflowService.resolveWorkflowAndStoreInThreadContext(targetHandler, threadContext);
doHandleRequest(aggregatedRestRequest, channel, targetHandler, listener);
}, e -> handleException(aggregatedRestRequest, e, listener)));
};
if (request.isStreamedContent() && auditTrailService.includeRequestBody()) {
aggregate(request, aggregationCallback::accept);
} else {
aggregationCallback.accept(request);
}
}
private void doHandleRequest(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener<Boolean> listener) {
threadContext.sanitizeHeaders();
// operator privileges can short circuit to return a non-successful response
if (operatorPrivilegesService.checkRest(targetHandler, request, channel, threadContext)) {
listener.onResponse(Boolean.TRUE);
} else {
// The service sends its own response if it returns `false`.
// That's kind of ugly, and it would be better if we throw an exception and let the rest controller serialize it as normal
listener.onResponse(Boolean.FALSE);
}
}
protected void handleException(RestRequest request, Exception e, ActionListener<?> listener) {
logger.debug(() -> format("failed for REST request [%s]", request.uri()), e);
threadContext.sanitizeHeaders();
listener.onFailure(e);
}
// for testing
OperatorPrivileges.OperatorPrivilegesService getOperatorPrivilegesService() {
return operatorPrivilegesService;
}
private RestRequest maybeWrapRestRequest(RestRequest restRequest, RestHandler targetHandler) {
if (targetHandler instanceof RestRequestFilter rrf) {
return rrf.getFilteredRequest(restRequest);
}
return restRequest;
}
}
|
SecurityRestFilter
|
java
|
google__guava
|
android/guava-tests/benchmark/com/google/common/hash/HashFunctionBenchmark.java
|
{
"start": 1160,
"end": 2410
}
|
class ____ {
// Use a statically configured random instance for all of the benchmarks
private static final Random random = new Random(42);
@Param({"10", "1000", "100000", "1000000"})
private int size;
@Param HashFunctionEnum hashFunctionEnum;
private byte[] testBytes;
@BeforeExperiment
void setUp() {
testBytes = new byte[size];
random.nextBytes(testBytes);
}
@Benchmark
int hasher(int reps) {
HashFunction hashFunction = hashFunctionEnum.getHashFunction();
int result = 37;
for (int i = 0; i < reps; i++) {
result ^= hashFunction.newHasher().putBytes(testBytes).hash().asBytes()[0];
}
return result;
}
@Benchmark
int hashFunction(int reps) {
HashFunction hashFunction = hashFunctionEnum.getHashFunction();
int result = 37;
for (int i = 0; i < reps; i++) {
result ^= hashFunction.hashBytes(testBytes).asBytes()[0];
}
return result;
}
@Benchmark
int hashFunctionWithOffset(int reps) {
HashFunction hashFunction = hashFunctionEnum.getHashFunction();
int result = 37;
for (int i = 0; i < reps; i++) {
result ^= hashFunction.hashBytes(testBytes, 1, testBytes.length - 1).asBytes()[0];
}
return result;
}
}
|
HashFunctionBenchmark
|
java
|
apache__camel
|
components/camel-jgroups-raft/src/main/java/org/apache/camel/component/jgroups/raft/utils/NopStateMachine.java
|
{
"start": 1141,
"end": 1932
}
|
class ____ implements StateMachine {
private static final transient Logger LOG = LoggerFactory.getLogger(NopStateMachine.class);
@Override
public byte[] apply(byte[] data, int offset, int length, boolean serialize_response) throws Exception {
LOG.trace("Called StateMachine.apply(byte[] {}, int {}, int {}) on {}", data, offset, length, this);
return new byte[0];
}
@Override
public void readContentFrom(DataInput dataInput) throws Exception {
LOG.trace("Called StateMachine.readContentFrom(DataInput {}) on {}", dataInput, this);
}
@Override
public void writeContentTo(DataOutput dataOutput) throws Exception {
LOG.trace("Called StateMachine.readContentFrom(DataOutput {}) on {}", dataOutput, this);
}
}
|
NopStateMachine
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MembersInjectionTest.java
|
{
"start": 42375,
"end": 42654
}
|
class ____ {",
" @Inject Foo() {}",
"}");
Source supertype =
CompilerTests.javaSource(
"other.Supertype",
"package other;",
"",
"import javax.inject.Inject;",
"",
"public
|
Foo
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/lifecycle/MissingProjectException.java
|
{
"start": 968,
"end": 1222
}
|
class ____ extends Exception {
/**
* Creates a new exception.
*
* @param message The detail message, may be {@code null}.
*/
public MissingProjectException(String message) {
super(message);
}
}
|
MissingProjectException
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/TenantSelectTest2.java
|
{
"start": 971,
"end": 2399
}
|
class ____ extends TestCase {
private String sql = "SELECT ID, NAME FROM orders WHERE FID = ? OR FID = ?";
private String expect_sql = "SELECT ID, NAME, tenant" +
"\nFROM orders" +
"\nWHERE FID = ?" +
"\n\tOR FID = ?";
private WallConfig config = new WallConfig();
private WallConfig config_callback = new WallConfig();
protected void setUp() throws Exception {
config.setTenantTablePattern("*");
config.setTenantColumn("tenant");
config_callback.setTenantCallBack(new TenantTestCallBack());
}
public void testMySql() throws Exception {
WallProvider.setTenantValue(123);
MySqlWallProvider provider = new MySqlWallProvider(config);
WallCheckResult checkResult = provider.check(sql);
assertEquals(0, checkResult.getViolations().size());
String resultSql = SQLUtils.toSQLString(checkResult.getStatementList(), JdbcConstants.MYSQL);
assertEquals(expect_sql, resultSql);
}
public void testMySql2() throws Exception {
MySqlWallProvider provider = new MySqlWallProvider(config_callback);
WallCheckResult checkResult = provider.check(sql);
assertEquals(0, checkResult.getViolations().size());
String resultSql = SQLUtils.toSQLString(checkResult.getStatementList(), JdbcConstants.MYSQL);
assertEquals(expect_sql, resultSql);
}
}
|
TenantSelectTest2
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/response/HuggingFaceRerankResponseEntityTests.java
|
{
"start": 1081,
"end": 6097
}
|
class ____ extends ESTestCase {
private static final String MISSED_FIELD_INDEX = "index";
private static final String MISSED_FIELD_SCORE = "score";
private static final String RESPONSE_JSON_TWO_DOCS = """
[
{
"index": 4,
"score": -0.22222222222222222,
"text": "ranked second"
},
{
"index": 1,
"score": 1.11111111111111111,
"text": "ranked first"
}
]
""";
private static final List<RankedDocsResultsTests.RerankExpectation> EXPECTED_TWO_DOCS = List.of(
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 1, "relevance_score", 1.11111111111111111F, "text", "ranked first")),
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 4, "relevance_score", -0.22222222222222222F, "text", "ranked second"))
);
private static final String RESPONSE_JSON_FIVE_DOCS = """
[
{
"index": 1,
"score": 1.11111111111111111,
"text": "ranked first"
},
{
"index": 3,
"score": -0.33333333333333333,
"text": "ranked third"
},
{
"index": 0,
"score": -0.55555555555555555,
"text": "ranked fifth"
},
{
"index": 2,
"score": -0.44444444444444444,
"text": "ranked fourth"
},
{
"index": 4,
"score": -0.22222222222222222,
"text": "ranked second"
}
]
""";
private static final List<RankedDocsResultsTests.RerankExpectation> EXPECTED_FIVE_DOCS = List.of(
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 1, "relevance_score", 1.11111111111111111F, "text", "ranked first")),
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 4, "relevance_score", -0.22222222222222222F, "text", "ranked second")),
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 3, "relevance_score", -0.33333333333333333F, "text", "ranked third")),
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 2, "relevance_score", -0.44444444444444444F, "text", "ranked fourth")),
new RankedDocsResultsTests.RerankExpectation(Map.of("index", 0, "relevance_score", -0.55555555555555555F, "text", "ranked fifth"))
);
private static final HuggingFaceRerankRequest REQUEST_MOCK = mock(HuggingFaceRerankRequest.class);
public void testFromResponse_CreatesRankedDocsResults_TopNNull_FiveDocs_NoLimit() throws IOException {
assertTopNLimit(null, RESPONSE_JSON_FIVE_DOCS, EXPECTED_FIVE_DOCS);
}
public void testFromResponse_CreatesRankedDocsResults_TopN5_TwoDocs_NoLimit() throws IOException {
assertTopNLimit(5, RESPONSE_JSON_TWO_DOCS, EXPECTED_TWO_DOCS);
}
public void testFromResponse_CreatesRankedDocsResults_TopN2_FiveDocs_Limits() throws IOException {
assertTopNLimit(2, RESPONSE_JSON_FIVE_DOCS, EXPECTED_TWO_DOCS);
}
public void testFails_CreateRankedDocsResults_IndexFieldNull() {
String responseJson = """
[
{
"score": 1.11111111111111111,
"text": "ranked first"
}
]
""";
assertMissingFieldThrowsIllegalArgumentException(responseJson, MISSED_FIELD_INDEX);
}
public void testFails_CreateRankedDocsResults_ScoreFieldNull() {
String responseJson = """
[
{
"index": 1,
"text": "ranked first"
}
]
""";
assertMissingFieldThrowsIllegalArgumentException(responseJson, MISSED_FIELD_SCORE);
}
private void assertMissingFieldThrowsIllegalArgumentException(String responseJson, String missingField) {
when(REQUEST_MOCK.getTopN()).thenReturn(1);
var thrownException = expectThrows(
IllegalArgumentException.class,
() -> HuggingFaceRerankResponseEntity.fromResponse(
REQUEST_MOCK,
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
)
);
assertThat(thrownException.getMessage(), is("Required [" + missingField + "]"));
}
private void assertTopNLimit(Integer topN, String responseJson, List<RankedDocsResultsTests.RerankExpectation> expectation)
throws IOException {
when(REQUEST_MOCK.getTopN()).thenReturn(topN);
RankedDocsResults parsedResults = HuggingFaceRerankResponseEntity.fromResponse(
REQUEST_MOCK,
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(parsedResults.asMap(), is(buildExpectationRerank(expectation)));
}
}
|
HuggingFaceRerankResponseEntityTests
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/processor/idempotent/MemoryIdempotentRepository.java
|
{
"start": 3397,
"end": 5695
}
|
class ____ a memory leak.
*
* @param cache the cache
*/
public static IdempotentRepository memoryIdempotentRepository(Map<String, Object> cache) {
return new MemoryIdempotentRepository(cache);
}
@Override
@ManagedOperation(description = "Adds the key to the store")
public boolean add(String key) {
cacheAndStoreLock.lock();
try {
if (cache.containsKey(key)) {
return false;
} else {
cache.put(key, key);
return true;
}
} finally {
cacheAndStoreLock.unlock();
}
}
@Override
@ManagedOperation(description = "Does the store contain the given key")
public boolean contains(String key) {
cacheAndStoreLock.lock();
try {
return cache.containsKey(key);
} finally {
cacheAndStoreLock.unlock();
}
}
@Override
@ManagedOperation(description = "Remove the key from the store")
public boolean remove(String key) {
cacheAndStoreLock.lock();
try {
return cache.remove(key) != null;
} finally {
cacheAndStoreLock.unlock();
}
}
@Override
public boolean confirm(String key) {
// noop
return true;
}
@Override
@ManagedOperation(description = "Clear the store")
public void clear() {
cacheAndStoreLock.lock();
try {
cache.clear();
} finally {
cacheAndStoreLock.unlock();
}
}
public Map<String, Object> getCache() {
return cache;
}
@ManagedAttribute(description = "The current cache size")
public int getCacheSize() {
return cache.size();
}
@ManagedAttribute(description = "The maximum cache size")
public int getMaxCacheSize() {
return cacheSize;
}
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
@Override
protected void doStart() throws Exception {
if (cache == null) {
cache = LRUCacheFactory.newLRUCache(cacheSize <= 0 ? MAX_CACHE_SIZE : cacheSize);
}
}
@Override
protected void doStop() throws Exception {
cache.clear();
}
}
|
being
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/json/AbstractJsonMarshalTester.java
|
{
"start": 1774,
"end": 2477
}
|
class ____ {
*
* private AbstractJsonTester<ExampleObject> json = //...
*
* @Test
* public void testWriteJson() {
* ExampleObject object = //...
* assertThat(json.write(object)).isEqualToJson("expected.json");
* assertThat(json.read("expected.json")).isEqualTo(object);
* }
*
* }
* </pre> For a complete list of supported assertions see {@link JsonContentAssert} and
* {@link ObjectContentAssert}.
* <p>
* To use this library JSONAssert must be on the test classpath.
*
* @param <T> the type under test
* @author Phillip Webb
* @since 1.4.0
* @see JsonContentAssert
* @see ObjectContentAssert
*/
public abstract
|
ExampleObjectJsonTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
|
{
"start": 1492,
"end": 2256
}
|
class ____
extends AbstractTestS3AEncryption {
@Override
protected Configuration createConfiguration() {
// get the KMS key for this test.
Configuration c = new Configuration();
String kmsKey = S3AUtils.getS3EncryptionKey(getTestBucketName(c), c);
// skip the test if SSE-KMS or KMS key not set.
if (StringUtils.isBlank(kmsKey) || !SSE_KMS.getMethod()
.equals(c.get(S3_ENCRYPTION_ALGORITHM))) {
skip(S3_ENCRYPTION_KEY + " is not set for " +
SSE_KMS.getMethod());
}
Configuration conf = super.createConfiguration();
conf.set(S3_ENCRYPTION_KEY, kmsKey);
return conf;
}
@Override
protected S3AEncryptionMethods getSSEAlgorithm() {
return SSE_KMS;
}
}
|
ITestS3AEncryptionSSEKMSUserDefinedKey
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/ConstructedToConstructedCastRule.java
|
{
"start": 1064,
"end": 1237
}
|
interface ____ a default implementation for {@link #canFail(LogicalType, LogicalType)}
* for constructed type casts, e.g. ARRAY to ARRAY (but not ARRAY to STRING).
*/
|
provides
|
java
|
quarkusio__quarkus
|
integration-tests/reactive-messaging-kafka/src/main/java/io/quarkus/it/kafka/KafkaEndpoint.java
|
{
"start": 540,
"end": 1853
}
|
class ____ {
@Inject
KafkaReceivers receivers;
@Inject
@RedisClientName("my-redis")
RedisDataSource rds;
@GET
@Path("/people")
@Produces(MediaType.APPLICATION_JSON)
public List<Person> getPeople() {
return receivers.getPeople();
}
@GET
@Path("/people-state/{key}")
@Produces(MediaType.APPLICATION_JSON)
public ProcessingState<KafkaReceivers.PeopleState> getPeopleState(@PathParam("key") String key) {
return (ProcessingState<KafkaReceivers.PeopleState>) rds.value(ProcessingState.class).get(key);
}
@GET
@Path("/fruits")
@Produces(MediaType.APPLICATION_JSON)
public List<Fruit> getFruits() {
return receivers.getFruits();
}
@GET
@Path("/pets")
@Produces(MediaType.APPLICATION_JSON)
public List<Pet> getPets() {
return receivers.getPets().stream().map(Record::key).collect(Collectors.toList());
}
@GET
@Path("/data-with-metadata")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> getDataWithMetadata() {
return receivers.getDataWithMetadata();
}
@GET
@Path("/data-for-keyed")
@Produces(MediaType.APPLICATION_JSON)
public List<String> getDataForKeyed() {
return receivers.getDataForKeyed();
}
}
|
KafkaEndpoint
|
java
|
google__dagger
|
javatests/artifacts/hilt-android/simple/app/src/sharedTest/java/dagger/hilt/android/simple/BaseTestApplication.java
|
{
"start": 1249,
"end": 1334
}
|
interface ____ {
Foo lazyFoo();
}
@Singleton
public static final
|
FooEntryPoint
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/physical/common/CommonPhysicalMatchRule.java
|
{
"start": 2258,
"end": 6824
}
|
class ____ extends ConverterRule {
public CommonPhysicalMatchRule(
Class<? extends RelNode> clazz, RelTrait in, RelTrait out, String descriptionPrefix) {
super(Config.INSTANCE.as(Config.class).withConversion(clazz, in, out, descriptionPrefix));
}
@Override
public boolean matches(RelOptRuleCall call) {
FlinkLogicalMatch logicalMatch = call.rel(0);
validateAggregations(logicalMatch.getMeasures().values());
validateAggregations(logicalMatch.getPatternDefinitions().values());
// This check might be obsolete once CALCITE-2747 is resolved
validateAmbiguousColumns(logicalMatch);
return true;
}
public RelNode convert(RelNode rel, FlinkConvention convention) {
FlinkLogicalMatch logicalMatch = (FlinkLogicalMatch) rel;
RelTraitSet traitSet = rel.getTraitSet().replace(convention);
ImmutableBitSet partitionKeys = logicalMatch.getPartitionKeys();
FlinkRelDistribution requiredDistribution =
partitionKeys.isEmpty()
? FlinkRelDistribution.SINGLETON()
: FlinkRelDistribution.hash(logicalMatch.getPartitionKeys().asList(), true);
RelTraitSet requiredTraitSet =
rel.getCluster()
.getPlanner()
.emptyTraitSet()
.replace(requiredDistribution)
.replace(convention);
RelNode convertInput = RelOptRule.convert(logicalMatch.getInput(), requiredTraitSet);
try {
Class.forName(
"org.apache.flink.cep.pattern.Pattern",
false,
ShortcutUtils.unwrapContext(rel).getClassLoader());
} catch (ClassNotFoundException e) {
throw new TableException(
"MATCH RECOGNIZE clause requires flink-cep dependency to be present on the classpath.",
e);
}
return convertToPhysicalMatch(
rel.getCluster(),
traitSet,
convertInput,
new MatchRecognize(
logicalMatch.getPattern(),
logicalMatch.getPatternDefinitions(),
logicalMatch.getMeasures(),
logicalMatch.getAfter(),
logicalMatch.getSubsets(),
logicalMatch.isAllRows(),
logicalMatch.getPartitionKeys(),
logicalMatch.getOrderKeys(),
logicalMatch.getInterval()),
logicalMatch.getRowType());
}
protected abstract RelNode convertToPhysicalMatch(
RelOptCluster cluster,
RelTraitSet traitSet,
RelNode convertInput,
MatchRecognize matchRecognize,
RelDataType rowType);
private void validateAggregations(Iterable<RexNode> expr) {
AggregationsValidator validator = new AggregationsValidator();
expr.forEach(e -> e.accept(validator));
}
private void validateAmbiguousColumns(FlinkLogicalMatch logicalMatch) {
if (logicalMatch.isAllRows()) {
throw new TableException("All rows per match mode is not supported yet.");
} else {
validateAmbiguousColumnsOnRowPerMatch(
logicalMatch.getPartitionKeys(),
logicalMatch.getMeasures().keySet(),
logicalMatch.getInput().getRowType(),
logicalMatch.getRowType());
}
}
private void validateAmbiguousColumnsOnRowPerMatch(
ImmutableBitSet partitionKeys,
Set<String> measuresNames,
RelDataType inputSchema,
RelDataType expectedSchema) {
int actualSize = partitionKeys.toArray().length + measuresNames.size();
int expectedSize = expectedSchema.getFieldCount();
if (actualSize != expectedSize) {
// try to find ambiguous column
String ambiguousColumns =
Arrays.stream(partitionKeys.toArray())
.mapToObj(k -> inputSchema.getFieldList().get(k).getName())
.filter(measuresNames::contains)
.collect(Collectors.joining(", ", "{", "}"));
throw new ValidationException(
String.format("Columns ambiguously defined: %s", ambiguousColumns));
}
}
private static
|
CommonPhysicalMatchRule
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/WebMvcAutoConfigurationTests.java
|
{
"start": 67916,
"end": 68385
}
|
class ____ {
@Bean
ApiVersionResolver apiVersionResolver() {
return (request) -> "latest";
}
@Bean
ApiVersionDeprecationHandler apiVersionDeprecationHandler(ApiVersionParser<?> apiVersionParser) {
return new StandardApiVersionDeprecationHandler(apiVersionParser);
}
@Bean
ApiVersionParser<String> apiVersionParser() {
return (version) -> String.valueOf(version);
}
}
@Configuration(proxyBeanMethods = false)
static
|
ApiVersionConfiguration
|
java
|
apache__kafka
|
connect/api/src/main/java/org/apache/kafka/connect/header/Headers.java
|
{
"start": 1286,
"end": 11649
}
|
interface ____ extends Iterable<Header> {
/**
* Get the number of headers in this object.
*
* @return the number of headers; never negative
*/
int size();
/**
* Determine whether this object has no headers.
*
* @return true if there are no headers, or false if there is at least one header
*/
boolean isEmpty();
/**
* Get the collection of {@link Header} objects whose {@link Header#key() keys} all match the specified key.
*
* @param key the key; may not be null
* @return the iterator over headers with the specified key; may be null if there are no headers with the specified key
*/
Iterator<Header> allWithName(String key);
/**
* Return the last {@link Header} with the specified key.
*
* @param key the key for the header; may not be null
* @return the last Header, or null if there are no headers with the specified key
*/
Header lastWithName(String key);
/**
* Add the given {@link Header} to this collection.
*
* @param header the header; may not be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers add(Header header);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param schemaAndValue the {@link SchemaAndValue} for the header; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers add(String key, SchemaAndValue schemaAndValue);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @param schema the schema for the header's value; may not be null if the value is not null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers add(String key, Object value, Schema schema);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addString(String key, String value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addBoolean(String key, boolean value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addByte(String key, byte value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addShort(String key, short value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addInt(String key, int value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addLong(String key, long value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addFloat(String key, float value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addDouble(String key, double value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addBytes(String key, byte[] value);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @param schema the schema describing the list value; may not be null
* @return this object to facilitate chaining multiple methods; never null
* @throws DataException if the header's value is invalid
*/
Headers addList(String key, List<?> value, Schema schema);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @param schema the schema describing the map value; may not be null
* @return this object to facilitate chaining multiple methods; never null
* @throws DataException if the header's value is invalid
*/
Headers addMap(String key, Map<?, ?> value, Schema schema);
/**
* Add to this collection a {@link Header} with the given key and value.
*
* @param key the header's key; may not be null
* @param value the header's value; may be null
* @return this object to facilitate chaining multiple methods; never null
* @throws DataException if the header's value is invalid
*/
Headers addStruct(String key, Struct value);
/**
* Add to this collection a {@link Header} with the given key and {@link org.apache.kafka.connect.data.Decimal} value.
*
* @param key the header's key; may not be null
* @param value the header's {@link org.apache.kafka.connect.data.Decimal} value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addDecimal(String key, BigDecimal value);
/**
* Add to this collection a {@link Header} with the given key and {@link org.apache.kafka.connect.data.Date} value.
*
* @param key the header's key; may not be null
* @param value the header's {@link org.apache.kafka.connect.data.Date} value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addDate(String key, java.util.Date value);
/**
* Add to this collection a {@link Header} with the given key and {@link org.apache.kafka.connect.data.Time} value.
*
* @param key the header's key; may not be null
* @param value the header's {@link org.apache.kafka.connect.data.Time} value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addTime(String key, java.util.Date value);
/**
* Add to this collection a {@link Header} with the given key and {@link org.apache.kafka.connect.data.Timestamp} value.
*
* @param key the header's key; may not be null
* @param value the header's {@link org.apache.kafka.connect.data.Timestamp} value; may be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers addTimestamp(String key, java.util.Date value);
/**
* Removes all {@link Header} objects whose {@link Header#key() key} matches the specified key.
*
* @param key the key; may not be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers remove(String key);
/**
* Removes all but the latest {@link Header} objects whose {@link Header#key() key} matches the specified key.
*
* @param key the key; may not be null
* @return this object to facilitate chaining multiple methods; never null
*/
Headers retainLatest(String key);
/**
* Removes all but the last {@link Header} object with each key.
*
* @return this object to facilitate chaining multiple methods; never null
*/
Headers retainLatest();
/**
* Removes all headers from this object.
*
* @return this object to facilitate chaining multiple methods; never null
*/
Headers clear();
/**
* Create a copy of this {@link Headers} object. The new copy will contain all of the same {@link Header} objects as this object.
* @return the copy; never null
*/
Headers duplicate();
/**
* Get all {@link Header}s, apply the transform to each and store the result in place of the original.
*
* @param transform the transform to apply; may not be null
* @return this object to facilitate chaining multiple methods; never null
* @throws DataException if the header's value is invalid
*/
Headers apply(HeaderTransform transform);
/**
* Get all {@link Header}s with the given key, apply the transform to each and store the result in place of the original.
*
* @param key the header's key; may not be null
* @param transform the transform to apply; may not be null
* @return this object to facilitate chaining multiple methods; never null
* @throws DataException if the header's value is invalid
*/
Headers apply(String key, HeaderTransform transform);
/**
* A function to transform the supplied {@link Header}. Implementations will likely need to use {@link Header#with(Schema, Object)}
* to create the new instance.
*/
|
Headers
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isAfter_Test.java
|
{
"start": 1536,
"end": 3977
}
|
class ____ extends AbstractOffsetDateTimeAssertBaseTest {
@Override
protected OffsetDateTimeAssert invoke_api_method() {
return assertions.isAfter(REFERENCE)
.isAfter(BEFORE.toString());
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertIsAfter(getInfo(assertions), getActual(assertions), REFERENCE);
verify(comparables).assertIsAfter(getInfo(assertions), getActual(assertions), BEFORE);
}
@Test
void should_pass_if_actual_is_after_offsetDateTime_parameter_with_different_offset() {
assertThat(AFTER_WITH_DIFFERENT_OFFSET).isAfter(REFERENCE);
}
@Test
void should_fail_if_actual_is_equal_to_offsetDateTime_parameter_with_different_offset() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(REFERENCE_WITH_DIFFERENT_OFFSET).isAfter(REFERENCE));
// THEN
then(assertionError).hasMessage(shouldBeAfter(REFERENCE_WITH_DIFFERENT_OFFSET, REFERENCE, COMPARISON_STRATEGY).create());
}
@Test
void should_fail_if_actual_is_before_offsetDateTime_parameter_with_different_offset() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(BEFORE_WITH_DIFFERENT_OFFSET).isAfter(REFERENCE));
// THEN
then(assertionError).hasMessage(shouldBeAfter(BEFORE_WITH_DIFFERENT_OFFSET, REFERENCE, COMPARISON_STRATEGY).create());
}
@Test
void should_fail_if_offsetDateTime_parameter_is_null() {
// GIVEN
OffsetDateTime otherOffsetDateTime = null;
// WHEN
ThrowingCallable code = () -> assertThat(now()).isAfter(otherOffsetDateTime);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The OffsetDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_offsetDateTime_as_string_parameter_is_null() {
// GIVEN
String otherOffsetDateTimeAsString = null;
// WHEN
ThrowingCallable code = () -> assertThat(now()).isAfter(otherOffsetDateTimeAsString);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The String representing the OffsetDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_given_string_parameter_cant_be_parsed() {
assertThatThrownBy(() -> assertions.isAfter("not a OffsetDateTime")).isInstanceOf(DateTimeParseException.class);
}
}
|
OffsetDateTimeAssert_isAfter_Test
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/constants/CommonConstants.java
|
{
"start": 10851,
"end": 12065
}
|
class ____ of the ServiceNameMapping {@link Properties} file
*
* @since 2.7.8
*/
String DEFAULT_SERVICE_NAME_MAPPING_PROPERTIES_PATH = "META-INF/dubbo/service-name-mapping.properties";
String ENABLE_NATIVE_JAVA_GENERIC_SERIALIZE = "dubbo.security.serialize.generic.native-java-enable";
String SERIALIZE_BLOCKED_LIST_FILE_PATH = "security/serialize.blockedlist";
String SERIALIZE_ALLOW_LIST_FILE_PATH = "security/serialize.allowlist";
String SERIALIZE_CHECK_STATUS_KEY = "dubbo.application.serialize-check-status";
String QOS_LIVE_PROBE_EXTENSION = "dubbo.application.liveness-probe";
String QOS_READY_PROBE_EXTENSION = "dubbo.application.readiness-probe";
String QOS_STARTUP_PROBE_EXTENSION = "dubbo.application.startup-probe";
String REGISTRY_DELAY_NOTIFICATION_KEY = "delay-notification";
String CACHE_CLEAR_TASK_INTERVAL = "dubbo.application.url.cache.task.interval";
String CACHE_CLEAR_WAITING_THRESHOLD = "dubbo.application.url.cache.clear.waiting";
String CLUSTER_INTERCEPTOR_COMPATIBLE_KEY = "dubbo.application.cluster.interceptor.compatible";
String UTF8ENCODE = "UTF-8";
/**
* Pseudo URL prefix for loading from the
|
path
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/memory/ManagedMemoryUseCase.java
|
{
"start": 1373,
"end": 1425
}
|
enum ____ {
SLOT,
OPERATOR
}
}
|
Scope
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/ControllerEndpointDiscovererTests.java
|
{
"start": 7567,
"end": 7665
}
|
class ____ {
}
@ControllerEndpoint(id = "testcontroller")
static
|
WithRegularEndpointConfiguration
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/schemaStat/SchemaStatTest2.java
|
{
"start": 416,
"end": 1181
}
|
class ____ extends TestCase {
public void test_schemaStat() throws Exception {
String sql = "select "
+ " create_time_dd as 来电日期"
+ " from alisec_app.adl_tb_wing_rubbish_laidian_new_reason_realname_fdt "
+ " order by 来电日期 desc limit 30;";
DbType dbType = JdbcConstants.MYSQL;
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement stmt = parser.parseStatementList().get(0);
SchemaStatVisitor statVisitor = SQLUtils.createSchemaStatVisitor(dbType);
stmt.accept(statVisitor);
System.out.println(statVisitor.getColumns());
assertEquals(1, statVisitor.getColumns().size());
}
}
|
SchemaStatTest2
|
java
|
grpc__grpc-java
|
api/src/jmh/java/io/grpc/ReadBenchmark.java
|
{
"start": 1158,
"end": 3068
}
|
class ____ {
List<Context.Key<Object>> keys = new ArrayList<>();
List<Context> contexts = new ArrayList<>();
@Setup
public void setup() {
for (int i = 0; i < 8; i++) {
keys.add(Context.key("Key" + i));
}
contexts.add(Context.ROOT.withValue(keys.get(0), new Object()));
contexts.add(Context.ROOT.withValues(keys.get(0), new Object(), keys.get(1), new Object()));
contexts.add(
Context.ROOT.withValues(
keys.get(0), new Object(), keys.get(1), new Object(), keys.get(2), new Object()));
contexts.add(
Context.ROOT.withValues(
keys.get(0),
new Object(),
keys.get(1),
new Object(),
keys.get(2),
new Object(),
keys.get(3),
new Object()));
contexts.add(contexts.get(0).withValue(keys.get(1), new Object()));
contexts.add(
contexts.get(1).withValues(keys.get(2), new Object(), keys.get(3), new Object()));
contexts.add(
contexts
.get(2)
.withValues(
keys.get(3), new Object(), keys.get(4), new Object(), keys.get(5), new Object()));
contexts.add(
contexts
.get(3)
.withValues(
keys.get(4),
new Object(),
keys.get(5),
new Object(),
keys.get(6),
new Object(),
keys.get(7),
new Object()));
}
}
/** Perform the read operation. */
@Benchmark
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public void testContextLookup(ContextState state, Blackhole bh) {
for (Context.Key<?> key : state.keys) {
for (Context ctx : state.contexts) {
bh.consume(key.get(ctx));
}
}
}
}
|
ContextState
|
java
|
quarkusio__quarkus
|
core/deployment/src/test/java/io/quarkus/deployment/util/OuterRaw.java
|
{
"start": 2051,
"end": 2344
}
|
class ____<Y extends CharSequence> {
public <T extends Number & Comparable<T>, U extends Comparable<U>, V extends Exception> T iii(
List<?> arg, Y arg2, X arg3, DoubleInner<Y> self) throws V {
return null;
}
}
}
}
|
DoubleInner
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/CCSFieldCapabilitiesIT.java
|
{
"start": 1978,
"end": 21826
}
|
class ____ extends AbstractMultiClustersTestCase {
@Override
protected List<String> remoteClusterAlias() {
return List.of("remote_cluster");
}
@Override
protected boolean reuseClusters() {
return false;
}
public void testFailuresFromRemote() throws IOException {
Settings indexSettings = Settings.builder().put("index.number_of_replicas", 0).build();
final Client localClient = client(LOCAL_CLUSTER);
final Client remoteClient = client("remote_cluster");
String localIndex = "local_test";
assertAcked(localClient.admin().indices().prepareCreate(localIndex).setSettings(indexSettings));
localClient.prepareIndex(localIndex).setId("1").setSource("foo", "bar").get();
localClient.admin().indices().prepareRefresh(localIndex).get();
String remoteErrorIndex = "remote_test_error";
assertAcked(remoteClient.admin().indices().prepareCreate(remoteErrorIndex).setSettings(indexSettings));
remoteClient.prepareIndex(remoteErrorIndex).setId("2").setSource("foo", "bar").get();
remoteClient.admin().indices().prepareRefresh(remoteErrorIndex).get();
// regular field_caps across clusters
FieldCapabilitiesResponse response = client().prepareFieldCaps("*", "remote_cluster:*").setFields("*").get();
assertThat(Arrays.asList(response.getIndices()), containsInAnyOrder(localIndex, "remote_cluster:" + remoteErrorIndex));
// Closed shards will result to index error because shards must be in readable state
FieldCapabilitiesIT.closeShards(cluster("remote_cluster"), remoteErrorIndex);
response = client().prepareFieldCaps("*", "remote_cluster:*").setFields("*").get();
assertThat(response.getIndices()[0], equalTo(localIndex));
assertThat(response.getFailedIndicesCount(), equalTo(1));
FieldCapabilitiesFailure failure = response.getFailures()
.stream()
.filter(f -> Arrays.asList(f.getIndices()).contains("remote_cluster:*"))
.findFirst()
.get();
Exception ex = failure.getException();
assertEquals(RemoteTransportException.class, ex.getClass());
Throwable cause = ExceptionsHelper.unwrapCause(ex);
assertEquals(IllegalIndexShardStateException.class, cause.getClass());
assertEquals(
"CurrentState[CLOSED] operations only allowed when shard state is one of [POST_RECOVERY, STARTED]",
cause.getMessage()
);
// if we only query the remote we should get back an exception only
ex = expectThrows(IllegalIndexShardStateException.class, client().prepareFieldCaps("remote_cluster:*").setFields("*"));
assertEquals("CurrentState[CLOSED] operations only allowed when shard state is one of [POST_RECOVERY, STARTED]", ex.getMessage());
// add an index that doesn't fail to the remote
assertAcked(remoteClient.admin().indices().prepareCreate("okay_remote_index"));
remoteClient.prepareIndex("okay_remote_index").setId("2").setSource("foo", "bar").get();
remoteClient.admin().indices().prepareRefresh("okay_remote_index").get();
response = client().prepareFieldCaps("*", "remote_cluster:*").setFields("*").get();
assertThat(Arrays.asList(response.getIndices()), containsInAnyOrder(localIndex, "remote_cluster:okay_remote_index"));
assertThat(response.getFailedIndicesCount(), equalTo(1));
failure = response.getFailures()
.stream()
.filter(f -> Arrays.asList(f.getIndices()).contains("remote_cluster:" + remoteErrorIndex))
.findFirst()
.get();
ex = failure.getException();
assertEquals(IllegalIndexShardStateException.class, ex.getClass());
assertEquals("CurrentState[CLOSED] operations only allowed when shard state is one of [POST_RECOVERY, STARTED]", ex.getMessage());
}
public void testFailedToConnectToRemoteCluster() throws Exception {
String localIndex = "local_index";
assertAcked(client(LOCAL_CLUSTER).admin().indices().prepareCreate(localIndex));
client(LOCAL_CLUSTER).prepareIndex(localIndex).setId("1").setSource("foo", "bar").get();
client(LOCAL_CLUSTER).admin().indices().prepareRefresh(localIndex).get();
cluster("remote_cluster").close();
FieldCapabilitiesResponse response = client().prepareFieldCaps("*", "remote_cluster:*").setFields("*").get();
assertThat(response.getIndices(), arrayContaining(localIndex));
List<FieldCapabilitiesFailure> failures = response.getFailures();
assertThat(failures, hasSize(1));
assertThat(failures.get(0).getIndices(), arrayContaining("remote_cluster:*"));
}
private void populateIndices(String localIndex, String remoteIndex, String remoteClusterAlias, boolean invertLocalRemoteMappings) {
final Client localClient = client(LOCAL_CLUSTER);
final Client remoteClient = client(remoteClusterAlias);
String[] localMappings = new String[] { "timestamp", "type=date", "field1", "type=keyword", "field3", "type=keyword" };
String[] remoteMappings = new String[] { "timestamp", "type=date", "field2", "type=long", "field3", "type=long" };
assertAcked(
localClient.admin().indices().prepareCreate(localIndex).setMapping(invertLocalRemoteMappings ? remoteMappings : localMappings)
);
assertAcked(
remoteClient.admin().indices().prepareCreate(remoteIndex).setMapping(invertLocalRemoteMappings ? localMappings : remoteMappings)
);
}
public void testIncludeIndices() {
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, false);
remoteIndex = String.join(":", remoteClusterAlias, remoteIndex);
FieldCapabilitiesResponse response = client().prepareFieldCaps(localIndex, remoteIndex)
.setFields("*")
.setIncludeIndices(true)
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
assertThat(response.getField("timestamp"), aMapWithSize(1));
assertThat(response.getField("timestamp"), hasKey("date"));
assertThat(response.getField("timestamp").get("date").indices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
assertThat(response.getField("field1"), aMapWithSize(1));
assertThat(response.getField("field1"), hasKey("keyword"));
assertThat(response.getField("field1").get("keyword").indices(), arrayContaining(localIndex));
assertThat(response.getField("field2"), aMapWithSize(1));
assertThat(response.getField("field2"), hasKey("long"));
assertThat(response.getField("field2").get("long").indices(), arrayContaining(remoteIndex));
assertThat(response.getField("field3"), aMapWithSize(2));
assertThat(response.getField("field3"), hasKey("long"));
assertThat(response.getField("field3"), hasKey("keyword"));
// mapping conflict, therefore indices is always present for `field3`
assertThat(response.getField("field3").get("long").indices(), arrayContaining(remoteIndex));
assertThat(response.getField("field3").get("keyword").indices(), arrayContaining(localIndex));
}
public void testRandomIncludeIndices() {
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, false);
remoteIndex = String.join(":", remoteClusterAlias, remoteIndex);
boolean shouldAlwaysIncludeIndices = randomBoolean();
FieldCapabilitiesResponse response = client().prepareFieldCaps(localIndex, remoteIndex)
.setFields("*")
.setIncludeIndices(shouldAlwaysIncludeIndices)
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
assertThat(response.getField("timestamp"), aMapWithSize(1));
assertThat(response.getField("timestamp"), hasKey("date"));
if (shouldAlwaysIncludeIndices) {
assertThat(response.getField("timestamp").get("date").indices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
} else {
assertNull(response.getField("timestamp").get("date").indices());
}
assertThat(response.getField("field1"), aMapWithSize(1));
assertThat(response.getField("field1"), hasKey("keyword"));
if (shouldAlwaysIncludeIndices) {
assertThat(response.getField("field1").get("keyword").indices(), arrayContaining(localIndex));
} else {
assertNull(response.getField("field1").get("keyword").indices());
}
assertThat(response.getField("field2"), aMapWithSize(1));
assertThat(response.getField("field2"), hasKey("long"));
if (shouldAlwaysIncludeIndices) {
assertThat(response.getField("field2").get("long").indices(), arrayContaining(remoteIndex));
} else {
assertNull(response.getField("field2").get("long").indices());
}
assertThat(response.getField("field3"), aMapWithSize(2));
assertThat(response.getField("field3"), hasKey("long"));
assertThat(response.getField("field3"), hasKey("keyword"));
// mapping conflict, therefore indices is always present for `field3`
assertThat(response.getField("field3").get("long").indices(), arrayContaining(remoteIndex));
assertThat(response.getField("field3").get("keyword").indices(), arrayContaining(localIndex));
}
public void testIncludeIndicesSwapped() {
// exact same setup as testIncludeIndices but with mappings swapped between local and remote index
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, true);
remoteIndex = String.join(":", remoteClusterAlias, remoteIndex);
FieldCapabilitiesResponse response = client().prepareFieldCaps(localIndex, remoteIndex)
.setFields("*")
.setIncludeIndices(true)
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
assertThat(response.getField("timestamp"), aMapWithSize(1));
assertThat(response.getField("timestamp"), hasKey("date"));
assertThat(response.getField("timestamp").get("date").indices(), arrayContainingInAnyOrder(localIndex, remoteIndex));
assertThat(response.getField("field1"), aMapWithSize(1));
assertThat(response.getField("field1"), hasKey("keyword"));
assertThat(response.getField("field1").get("keyword").indices(), arrayContaining(remoteIndex));
assertThat(response.getField("field2"), aMapWithSize(1));
assertThat(response.getField("field2"), hasKey("long"));
assertThat(response.getField("field2").get("long").indices(), arrayContaining(localIndex));
assertThat(response.getField("field3"), aMapWithSize(2));
assertThat(response.getField("field3"), hasKey("long"));
assertThat(response.getField("field3"), hasKey("keyword"));
assertThat(response.getField("field3").get("long").indices(), arrayContaining(localIndex));
assertThat(response.getField("field3").get("keyword").indices(), arrayContaining(remoteIndex));
}
public void testReturnAllLocal() {
assertAcked(
client().admin()
.indices()
.prepareCreate("index")
.setMapping("@timestamp", "type=date", "field1", "type=keyword", "field2", "type=long")
);
for (var pattern : List.of("fake-remote*:index", "fake-remote*:*")) {
{
// returnLocalAll = true by default
var response = client().prepareFieldCaps(pattern).setFields("*").get();
assertThat(response.getIndices(), arrayContaining("index"));
assertThat(response.get().keySet(), hasItems("@timestamp", "field1", "field2"));
}
{
// user can opt out by explicitly setting returnLocalAll=false
var response = client().prepareFieldCaps(pattern).setFields("*").setReturnLocalAll(false).get();
assertThat(response.getIndices(), emptyArray());
assertThat(response.get().keySet(), not(hasItems("@timestamp", "field1", "field2")));
}
}
}
public void testResolvedToMatchingEverywhere() {
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, false);
String remoteIndexWithCluster = String.join(":", remoteClusterAlias, remoteIndex);
FieldCapabilitiesResponse response = client().prepareFieldCaps(localIndex, remoteIndexWithCluster)
.setFields("*")
.setIncludeResolvedTo(true)
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(localIndex, remoteIndexWithCluster));
ResolvedIndexExpressions local = response.getResolvedLocally();
assertThat(local, notNullValue());
assertThat(local.expressions(), hasSize(1));
assertEquals(
local.expressions().get(0).localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.SUCCESS
);
List<String> localIndicesList = local.getLocalIndicesList();
assertThat(localIndicesList, hasSize(1));
assertThat(localIndicesList, containsInAnyOrder(localIndex));
Map<String, ResolvedIndexExpressions> remote = response.getResolvedRemotely();
assertThat(remote, notNullValue());
assertThat(remote, aMapWithSize(1));
assertThat(remote.keySet(), contains(remoteClusterAlias));
ResolvedIndexExpressions remoteResponse = remote.get(remoteClusterAlias);
List<String> remoteIndicesList = remoteResponse.getLocalIndicesList();
assertThat(remoteIndicesList, hasSize(1));
assertEquals(
remoteResponse.expressions().get(0).localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.SUCCESS
);
assertThat(remoteIndicesList, containsInAnyOrder(remoteIndex));
}
public void testResolvedToMatchingLocallyOnly() {
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
String nonExistentIndex = "non-existent-index";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, false);
String remoteIndexWithCluster = String.join(":", remoteClusterAlias, nonExistentIndex);
FieldCapabilitiesResponse response = client().prepareFieldCaps(localIndex, remoteIndexWithCluster)
.setFields("*")
.setIncludeResolvedTo(true)
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(localIndex));
ResolvedIndexExpressions local = response.getResolvedLocally();
assertThat(local, notNullValue());
assertThat(local.expressions(), hasSize(1));
assertEquals(
local.expressions().get(0).localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.SUCCESS
);
List<String> localIndicesList = local.getLocalIndicesList();
assertThat(localIndicesList, hasSize(1));
assertThat(localIndicesList, containsInAnyOrder(localIndex));
Map<String, ResolvedIndexExpressions> remote = response.getResolvedRemotely();
assertThat(remote, notNullValue());
assertThat(remote, aMapWithSize(1));
assertThat(remote.keySet(), contains(remoteClusterAlias));
ResolvedIndexExpressions remoteResponse = remote.get(remoteClusterAlias);
List<String> remoteIndicesList = remoteResponse.getLocalIndicesList();
assertThat(remoteIndicesList, hasSize(0));
List<ResolvedIndexExpression> remoteResolvedExpressions = remoteResponse.expressions();
assertEquals(1, remoteResolvedExpressions.size());
assertEquals(
remoteResolvedExpressions.get(0).localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.CONCRETE_RESOURCE_NOT_VISIBLE
);
assertEquals(0, remoteIndicesList.size());
}
public void testResolvedToMatchingRemotelyOnly() {
String localIndex = "index-local";
String remoteIndex = "index-remote";
String remoteClusterAlias = "remote_cluster";
String nonExistentIndex = "non-existent-index";
populateIndices(localIndex, remoteIndex, remoteClusterAlias, false);
String remoteIndexWithCluster = String.join(":", remoteClusterAlias, remoteIndex);
boolean ignoreUnavailable = true;
IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false);
FieldCapabilitiesResponse response = client().prepareFieldCaps(nonExistentIndex, remoteIndexWithCluster)
.setFields("*")
.setIncludeResolvedTo(true)
.setIndicesOptions(options) // without ignore unavaliable would throw error
.get();
assertThat(response.getIndices(), arrayContainingInAnyOrder(remoteIndexWithCluster));
ResolvedIndexExpressions local = response.getResolvedLocally();
assertThat(local, notNullValue());
assertThat(local.expressions(), hasSize(1));
assertEquals(
local.expressions().get(0).localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.CONCRETE_RESOURCE_NOT_VISIBLE
);
List<String> localIndicesList = local.getLocalIndicesList();
assertThat(localIndicesList, hasSize(0));
Map<String, ResolvedIndexExpressions> remote = response.getResolvedRemotely();
assertThat(remote, notNullValue());
assertThat(remote, aMapWithSize(1));
assertThat(remote.keySet(), contains(remoteClusterAlias));
ResolvedIndexExpressions remoteResponse = remote.get(remoteClusterAlias);
List<String> remoteIndicesList = remoteResponse.getLocalIndicesList();
assertThat(remoteIndicesList, hasSize(1));
assertThat(remoteIndicesList, containsInAnyOrder(remoteIndex));
List<ResolvedIndexExpression> remoteResolvedExpressions = remoteResponse.expressions();
assertEquals(1, remoteResolvedExpressions.size());
ResolvedIndexExpression remoteExpression = remoteResolvedExpressions.get(0);
assertEquals(
remoteExpression.localExpressions().localIndexResolutionResult(),
ResolvedIndexExpression.LocalIndexResolutionResult.SUCCESS
);
assertEquals(1, remoteExpression.localExpressions().indices().size());
assertEquals(remoteIndex, remoteResolvedExpressions.get(0).original());
}
public void testIncludesMinTransportVersion() {
if (randomBoolean()) {
assertAcked(client().admin().indices().prepareCreate("index"));
}
var response = client().prepareFieldCaps("_all").setFields("*").get();
assertThat(response.minTransportVersion(), equalTo(TransportVersion.current()));
}
}
|
CCSFieldCapabilitiesIT
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-metrics-default/src/main/java/org/apache/dubbo/metrics/collector/sample/ErrorCodeMetricsListenRegister.java
|
{
"start": 1178,
"end": 1730
}
|
class ____ implements LogListener {
private final ErrorCodeSampler errorCodeSampler;
public ErrorCodeMetricsListenRegister(ErrorCodeSampler errorCodeSampler) {
FailsafeErrorTypeAwareLogger.registerGlobalListen(this);
this.errorCodeSampler = errorCodeSampler;
this.errorCodeSampler.addMetricName(MetricsKey.ERROR_CODE_COUNT.getName());
}
@Override
public void onMessage(String code, String msg) {
errorCodeSampler.inc(code, MetricsKey.ERROR_CODE_COUNT.getName());
}
}
|
ErrorCodeMetricsListenRegister
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/joinformula/ChildEntity.java
|
{
"start": 264,
"end": 397
}
|
class ____ {
@Id
private Long id;
@Column(name = "PARENT_ID")
private Long parentId;
@Column
private String name;
}
|
ChildEntity
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/jsonptr/PointerFromContextTest.java
|
{
"start": 463,
"end": 13341
}
|
class ____ extends JacksonCoreTestBase
{
/*
/**********************************************************
/* Test methods, basic
/**********************************************************
*/
private final JsonFactory JSON_F = new JsonFactory();
private final JsonPointer EMPTY_PTR = JsonPointer.empty();
public void testViaParser() throws Exception
{
final String SIMPLE = a2q("{'a':123,'array':[1,2,[3],5,{'obInArray':4}],"
+"'ob':{'first':[false,true],'second':{'sub':37}},'b':true}");
JsonParser p = JSON_F.createParser(ObjectReadContext.empty(), SIMPLE);
// by default should just get "empty"
assertSame(EMPTY_PTR, p.streamReadContext().pathAsPointer());
// let's just traverse, then:
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertSame(EMPTY_PTR, p.streamReadContext().pathAsPointer());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // a
assertEquals("/a", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("/a", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // array
assertEquals("/array", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertEquals("/array", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 1
assertEquals("/array/0", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 2
assertEquals("/array/1", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertEquals("/array/2", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 3
assertEquals("/array/2/0", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertEquals("/array/2", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 5
assertEquals("/array/3", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/array/4", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // obInArray
assertEquals("/array/4/obInArray", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 4
assertEquals("/array/4/obInArray", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertEquals("/array/4", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_ARRAY, p.nextToken()); // /array
assertEquals("/array", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // ob
assertEquals("/ob", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/ob", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // first
assertEquals("/ob/first", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertEquals("/ob/first", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_FALSE, p.nextToken());
assertEquals("/ob/first/0", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertEquals("/ob/first/1", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertEquals("/ob/first", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // second
assertEquals("/ob/second", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/ob/second", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // sub
assertEquals("/ob/second/sub", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // 37
assertEquals("/ob/second/sub", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertEquals("/ob/second", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_OBJECT, p.nextToken()); // /ob
assertEquals("/ob", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // b
assertEquals("/b", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertEquals("/b", p.streamReadContext().pathAsPointer().toString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertSame(EMPTY_PTR, p.streamReadContext().pathAsPointer());
assertNull(p.nextToken());
p.close();
}
public void testViaGenerator() throws Exception
{
StringWriter w = new StringWriter();
JsonGenerator g = JSON_F.createGenerator(ObjectWriteContext.empty(), w);
assertSame(EMPTY_PTR, g.streamWriteContext().pathAsPointer());
g.writeStartArray();
// no path yet
assertSame(EMPTY_PTR, g.streamWriteContext().pathAsPointer());
g.writeBoolean(true);
assertEquals("/0", g.streamWriteContext().pathAsPointer().toString());
g.writeStartObject();
assertEquals("/1", g.streamWriteContext().pathAsPointer().toString());
g.writeName("x");
assertEquals("/1/x", g.streamWriteContext().pathAsPointer().toString());
g.writeString("foo");
assertEquals("/1/x", g.streamWriteContext().pathAsPointer().toString());
g.writeName("stats");
assertEquals("/1/stats", g.streamWriteContext().pathAsPointer().toString());
g.writeStartObject();
assertEquals("/1/stats", g.streamWriteContext().pathAsPointer().toString());
g.writeName("rate");
assertEquals("/1/stats/rate", g.streamWriteContext().pathAsPointer().toString());
g.writeNumber(13);
assertEquals("/1/stats/rate", g.streamWriteContext().pathAsPointer().toString());
g.writeEndObject();
assertEquals("/1/stats", g.streamWriteContext().pathAsPointer().toString());
g.writeEndObject();
assertEquals("/1", g.streamWriteContext().pathAsPointer().toString());
g.writeEndArray();
assertSame(EMPTY_PTR, g.streamWriteContext().pathAsPointer());
g.close();
w.close();
}
/*
/**********************************************************
/* Test methods, root-offset
/**********************************************************
*/
public void testParserWithRoot() throws Exception
{
final String JSON = a2q("{'a':1,'b':3}\n"
+"{'a':5,'c':[1,2]}\n[1,2]\n");
JsonParser p = JSON_F.createParser(ObjectReadContext.empty(), JSON);
// before pointing to anything, we have no path to point to
assertSame(EMPTY_PTR, p.streamReadContext().pathAsPointer(true));
// but immediately after advancing we do
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/0", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // a
assertEquals("/0/a", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // a:1
assertEquals("/0/a", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // b
assertEquals("/0/b", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // a:1
assertEquals("/0/b", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertEquals("/0", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/1", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // a
assertEquals("/1/a", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken()); // a:1
assertEquals("/1/a", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken()); // c
assertEquals("/1/c", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertEquals("/1/c", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("/1/c/0", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("/1/c/1", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertEquals("/1/c", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertEquals("/1", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertEquals("/2", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("/2/0", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("/2/1", p.streamReadContext().pathAsPointer(true).toString());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertEquals("/2", p.streamReadContext().pathAsPointer(true).toString());
assertNull(p.nextToken());
// 21-Mar-2017, tatu: This is not entirely satisfactory: ideally should get
// EMPTY here as well. But context doesn't really get reset at the end
// and it's not 100% clear what is the best path forward. So, for now...
// just verify current sub-optimal behavior
assertEquals("/2", p.streamReadContext().pathAsPointer(true).toString());
p.close();
}
public void testGeneratorWithRoot() throws Exception
{
StringWriter w = new StringWriter();
JsonGenerator g = JSON_F.createGenerator(ObjectWriteContext.empty(), w);
assertSame(EMPTY_PTR, g.streamWriteContext().pathAsPointer(true));
g.writeStartArray();
assertEquals("/0", g.streamWriteContext().pathAsPointer(true).toString());
g.writeBoolean(true);
assertEquals("/0/0", g.streamWriteContext().pathAsPointer(true).toString());
g.writeStartObject();
assertEquals("/0/1", g.streamWriteContext().pathAsPointer(true).toString());
g.writeName("x");
assertEquals("/0/1/x", g.streamWriteContext().pathAsPointer(true).toString());
g.writeString("foo");
assertEquals("/0/1/x", g.streamWriteContext().pathAsPointer(true).toString());
g.writeEndObject();
assertEquals("/0/1", g.streamWriteContext().pathAsPointer(true).toString());
g.writeEndArray();
assertEquals("/0", g.streamWriteContext().pathAsPointer(true).toString());
g.writeBoolean(true);
assertEquals("/1", g.streamWriteContext().pathAsPointer(true).toString());
g.writeStartArray();
assertEquals("/2", g.streamWriteContext().pathAsPointer(true).toString());
g.writeString("foo");
assertEquals("/2/0", g.streamWriteContext().pathAsPointer(true).toString());
g.writeString("bar");
assertEquals("/2/1", g.streamWriteContext().pathAsPointer(true).toString());
g.writeEndArray();
assertEquals("/2", g.streamWriteContext().pathAsPointer(true).toString());
// as earlier, not optimal result, but verify it's stable:
assertEquals("/2", g.streamWriteContext().pathAsPointer(true).toString());
g.close();
}
}
|
PointerFromContextTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security-oauth2-resource-server/src/main/java/org/springframework/boot/security/oauth2/server/resource/autoconfigure/reactive/JwkSetUriReactiveJwtDecoderBuilderCustomizer.java
|
{
"start": 908,
"end": 1225
}
|
interface ____ the customization of the
* {@link JwkSetUriReactiveJwtDecoderBuilder} used to create the auto-configured
* {@link ReactiveJwtDecoder} for a JWK set URI that has been configured directly or
* obtained through an issuer URI.
*
* @author Andy Wilkinson
* @since 4.0.0
*/
@FunctionalInterface
public
|
for
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/main/java/io/github/resilience4j/core/ResultUtils.java
|
{
"start": 777,
"end": 2148
}
|
class ____ {
private ResultUtils() {
}
@SuppressWarnings("unchecked")
public static <T> boolean isSuccessfulAndReturned(
Either<? extends Throwable, ?> callsResult,
Class<T> expectedClass,
Function<T, Boolean> returnedChecker) {
if (callsResult.isLeft()) {
return false;
}
Object result = callsResult.get();
if (result == null) {
return false;
}
if (!expectedClass.isAssignableFrom(result.getClass())) {
return false;
}
return returnedChecker.apply((T) result);
}
public static <T extends Throwable> boolean isFailedAndThrown(
Either<? extends Throwable, ?> callsResult,
Class<T> expectedClass) {
return isFailedAndThrown(callsResult, expectedClass, thrown -> true);
}
@SuppressWarnings("unchecked")
public static <T extends Throwable> boolean isFailedAndThrown(
Either<? extends Throwable, ?> callsResult,
Class<T> expectedClass,
Function<T, Boolean> thrownChecker) {
if (callsResult.isRight()) {
return false;
}
Throwable thrown = callsResult.getLeft();
if (!expectedClass.isAssignableFrom(thrown.getClass())) {
return false;
}
return thrownChecker.apply((T) thrown);
}
}
|
ResultUtils
|
java
|
apache__camel
|
components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppSmCommand.java
|
{
"start": 1148,
"end": 2515
}
|
class ____ extends AbstractSmppCommand {
// FIXME: these constants should be defined somewhere in jSMPP:
public static final int SMPP_NEG_RESPONSE_MSG_TOO_LONG = 1;
protected Charset ascii = StandardCharsets.US_ASCII;
protected Charset latin1 = StandardCharsets.ISO_8859_1;
protected Charset defaultCharset;
private final Logger logger = LoggerFactory.getLogger(SmppSmCommand.class);
public SmppSmCommand(SMPPSession session, SmppConfiguration config) {
super(session, config);
defaultCharset = Charset.forName(config.getEncoding());
}
protected byte[][] splitBody(Message message) throws SmppException {
byte[] shortMessage = getShortMessage(message);
SmppSplitter splitter = createSplitter(message);
byte[][] segments = splitter.split(shortMessage);
if (segments.length > 1) {
// Message body is split into multiple parts,
// check if this is permitted
SmppSplittingPolicy policy = getSplittingPolicy(message);
switch (policy) {
case ALLOW:
return segments;
case TRUNCATE:
return new byte[][] { java.util.Arrays.copyOfRange(shortMessage, 0, segments[0].length) };
case REJECT:
// FIXME - JSMPP needs to have an
|
SmppSmCommand
|
java
|
apache__camel
|
components/camel-mllp/src/test/java/org/apache/camel/test/junit/rule/mllp/MllpServerResource.java
|
{
"start": 21798,
"end": 31801
}
|
class ____ extends Thread {
final long bindTimeout = 30000;
final long bindRetryDelay = 1000;
Logger log = LoggerFactory.getLogger(this.getClass());
ServerSocket serverSocket;
List<ClientSocketThread> clientSocketThreads = new LinkedList<>();
String listenHost;
int listenPort;
int backlog = 5;
int acceptTimeout = 5000;
boolean raiseExceptionOnAcceptTimeout;
AcceptSocketThread() throws IOException {
bind();
}
AcceptSocketThread(int listenPort) throws IOException {
this.listenPort = listenPort;
bind();
}
AcceptSocketThread(int listenPort, int backlog) throws IOException {
this.listenPort = listenPort;
this.backlog = backlog;
bind();
}
AcceptSocketThread(String listenHost, int listenPort, int backlog) throws IOException {
this.listenHost = listenHost;
this.listenPort = listenPort;
this.backlog = backlog;
bind();
}
/**
* Open the TCP Listener
*
* @throws IOException
*/
private void bind() throws IOException {
this.setDaemon(true);
serverSocket = new ServerSocket();
// Set TCP Parameters
serverSocket.setSoTimeout(acceptTimeout);
serverSocket.setReuseAddress(true);
InetSocketAddress listenAddress;
if (null != this.listenHost) {
listenAddress = new InetSocketAddress(this.listenHost, this.listenPort);
} else {
listenAddress = new InetSocketAddress(this.listenPort);
}
long startTicks = System.currentTimeMillis();
while (!serverSocket.isBound()) {
try {
serverSocket.bind(listenAddress, backlog);
} catch (BindException bindEx) {
if (System.currentTimeMillis() < startTicks + bindTimeout) {
log.warn("Unable to bind to {} - retrying in {} milliseconds", listenAddress, bindRetryDelay);
try {
Thread.sleep(bindRetryDelay);
} catch (InterruptedException interruptedEx) {
log.error("Wait for bind retry was interrupted - rethrowing BindException");
throw bindEx;
}
}
}
}
if (0 >= this.listenPort) {
this.listenPort = serverSocket.getLocalPort();
}
log.info("Opened TCP Listener on port {}", serverSocket.getLocalPort());
}
void checkClientConnections() {
if (clientSocketThreads != null) {
for (ClientSocketThread clientSocketThread : clientSocketThreads) {
clientSocketThread.checkConnection();
}
}
}
void closeClientConnections() {
if (clientSocketThreads != null) {
for (ClientSocketThread clientSocketThread : clientSocketThreads) {
clientSocketThread.closeConnection();
}
}
}
void resetClientConnections() {
if (clientSocketThreads != null) {
for (ClientSocketThread clientSocketThread : clientSocketThreads) {
clientSocketThread.resetConnection();
}
}
}
/**
* Accept TCP connections and create ClientSocketThreads for them
*/
@Override
public void run() {
log.info("Accepting connections on port {}", serverSocket.getLocalPort());
this.setName("MllpServerResource$AcceptSocketThread - " + serverSocket.getLocalSocketAddress().toString());
while (!isInterrupted() && serverSocket.isBound() && !serverSocket.isClosed()) {
Socket clientSocket = null;
try {
clientSocket = serverSocket.accept();
} catch (SocketTimeoutException timeoutEx) {
if (raiseExceptionOnAcceptTimeout) {
throw new MllpJUnitResourceTimeoutException("Timeout Accepting client connection", timeoutEx);
}
log.warn("Timeout waiting for client connection");
} catch (SocketException socketEx) {
log.debug("SocketException encountered accepting client connection - ignoring", socketEx);
if (null == clientSocket) {
continue;
} else if (!clientSocket.isClosed()) {
try {
clientSocket.setSoLinger(true, 0);
} catch (SocketException soLingerEx) {
log.warn(
"Ignoring SocketException encountered when setting SO_LINGER in preparation of resetting client Socket",
soLingerEx);
}
try {
clientSocket.close();
} catch (IOException ioEx) {
log.warn("Ignoring IOException encountered when resetting client Socket", ioEx);
}
continue;
} else {
throw new MllpJUnitResourceException(
"Unexpected SocketException encountered accepting client connection", socketEx);
}
} catch (Exception ex) {
throw new MllpJUnitResourceException("Unexpected exception encountered accepting client connection", ex);
}
if (null != clientSocket) {
try {
clientSocket.setKeepAlive(true);
clientSocket.setTcpNoDelay(false);
clientSocket.setSoLinger(false, -1);
clientSocket.setSoTimeout(5000);
ClientSocketThread clientSocketThread = new ClientSocketThread(clientSocket);
clientSocketThread.setDaemon(true);
clientSocketThread.start();
clientSocketThreads.add(clientSocketThread);
} catch (Exception unexpectedEx) {
log.warn("Unexpected exception encountered configuring client socket");
try {
clientSocket.close();
} catch (IOException ingoreEx) {
log.warn("Exceptiong encountered closing client socket after attempting to accept connection",
ingoreEx);
}
throw new MllpJUnitResourceException(
"Unexpected exception encountered configuring client socket", unexpectedEx);
}
}
}
log.info("No longer accepting connections - closing TCP Listener on port {}", serverSocket.getLocalPort());
try {
serverSocket.close();
} catch (IOException e) {
log.warn("I/O exception closing the server socket: {}", e.getMessage(), e);
}
log.info("Closed TCP Listener on port {}", serverSocket.getLocalPort());
}
public void shutdown() {
this.interrupt();
}
public String getListenHost() {
return listenHost;
}
public int getListenPort() {
return listenPort;
}
public int getBacklog() {
return backlog;
}
public int getAcceptTimeout() {
return acceptTimeout;
}
/**
* Enable/disable a timeout while waiting for a TCP connection, in milliseconds. With this option set to a
* non-zero timeout, the AcceptSocketThread will block for only this amount of time while waiting for a tcp
* connection. If the timeout expires and raiseExceptionOnAcceptTimeout is set to true, a
* MllpJUnitResourceTimeoutException is raised. Otherwise, the AcceptSocketThread will continue to poll for new
* TCP connections.
*
* @param acceptTimeout the timeout in milliseconds - zero is interpreted as an infinite timeout
*/
public void setAcceptTimeout(int acceptTimeout) {
this.acceptTimeout = acceptTimeout;
}
public boolean isRaiseExceptionOnAcceptTimeout() {
return raiseExceptionOnAcceptTimeout;
}
/**
* Enable/Disable the generation of MllpJUnitResourceTimeoutException if the ServerSocket.accept() call raises a
* SocketTimeoutException.
*
* @param raiseExceptionOnAcceptTimeout true enables exceptions on an accept timeout
*/
public void setRaiseExceptionOnAcceptTimeout(boolean raiseExceptionOnAcceptTimeout) {
this.raiseExceptionOnAcceptTimeout = raiseExceptionOnAcceptTimeout;
}
public void close() {
}
@Override
public void interrupt() {
for (ClientSocketThread clientSocketThread : clientSocketThreads) {
clientSocketThread.interrupt();
}
if (serverSocket != null && serverSocket.isBound() && !serverSocket.isClosed()) {
try {
serverSocket.close();
} catch (Exception ex) {
log.warn("Exception encountered closing server socket on interrupt", ex);
}
}
super.interrupt();
}
}
/**
* Nested
|
AcceptSocketThread
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableRemove_Test.java
|
{
"start": 1040,
"end": 2532
}
|
class ____ extends AbstractTest_ComparatorBasedComparisonStrategy {
@Test
void should_remove_value_from_collections_since_it_matches_one_collections_element_according_to_given_comparator() {
List<String> hobbits = newArrayList("Merry", "Frodo", null, "Merry", "Sam");
assertThat(caseInsensitiveComparisonStrategy.iterableContains(hobbits, "SAM")).isTrue();
caseInsensitiveComparisonStrategy.iterableRemoves(hobbits, "Sam");
assertThat(caseInsensitiveComparisonStrategy.iterableContains(hobbits, "SAM")).isFalse();
caseInsensitiveComparisonStrategy.iterableRemoves(hobbits, null);
assertThat(caseInsensitiveComparisonStrategy.iterableContains(hobbits, null)).isFalse();
}
@Test
void should_not_remove_value_from_collections_since_it_does_not_match_any_collections_elements_according_to_given_comparator() {
List<String> hobbits = newArrayList("Merry", "Frodo", null, "Merry", "Sam");
assertThat(caseInsensitiveComparisonStrategy.iterableContains(hobbits, "SAM")).isTrue();
caseInsensitiveComparisonStrategy.iterableRemoves(hobbits, "SAM ");
assertThat(caseInsensitiveComparisonStrategy.iterableContains(hobbits, "SAM")).isTrue();
}
@Test
void should_not_fail_if_collections_is_empty_or_null() {
List<String> hobbits = newArrayList();
caseInsensitiveComparisonStrategy.iterableRemoves(hobbits, "SAM");
caseInsensitiveComparisonStrategy.iterableRemoves(null, "SAM ");
}
}
|
ComparatorBasedComparisonStrategy_iterableRemove_Test
|
java
|
elastic__elasticsearch
|
test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java
|
{
"start": 1056,
"end": 5469
}
|
class ____ extends ExternalResource {
private final Path source;
private final Path target;
private final boolean enabled;
private HttpServer server;
public GeoIpHttpFixture(boolean enabled) {
this.enabled = enabled;
try {
this.source = Files.createTempDirectory("source");
this.target = Files.createTempDirectory("target");
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public String getAddress() {
return "http://" + server.getAddress().getHostString() + ":" + server.getAddress().getPort() + "/";
}
@Override
protected void before() throws Throwable {
if (enabled) {
copyFiles();
String data = new String(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/data.json").readAllBytes(),
StandardCharsets.UTF_8
);
this.server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
this.server.createContext("/", exchange -> {
String query = exchange.getRequestURI().getQuery();
if (query == null || query.contains("elastic_geoip_service_tos=agree") == false) {
exchange.sendResponseHeaders(400, 0);
exchange.getResponseBody().close();
return;
}
exchange.sendResponseHeaders(200, data.length());
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(exchange.getResponseBody()))) {
writer.write(data);
}
});
this.server.createContext("/db", exchange -> {
exchange.sendResponseHeaders(200, 0);
String dbName = exchange.getRequestURI().getPath().replaceAll(".*/db", "");
try (
OutputStream outputStream = exchange.getResponseBody();
InputStream db = GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture" + dbName)
) {
db.transferTo(outputStream);
}
});
this.server.createContext("/cli", exchange -> {
String fileName = exchange.getRequestURI().getPath().replaceAll(".*/cli/", "");
Path targetPath = target.resolve(fileName);
if (Files.isRegularFile(targetPath)) {
try (OutputStream outputStream = exchange.getResponseBody(); InputStream db = Files.newInputStream(targetPath)) {
exchange.sendResponseHeaders(200, 0);
db.transferTo(outputStream);
} catch (Exception e) {
exchange.sendResponseHeaders(500, 0);
exchange.getResponseBody().close();
}
} else {
exchange.sendResponseHeaders(404, 0);
exchange.getResponseBody().close();
}
});
server.start();
}
}
@Override
protected void after() {
if (enabled) {
server.stop(0);
}
}
private void copyFiles() throws Exception {
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-ASN.tgz"),
source.resolve("GeoLite2-ASN.tgz"),
StandardCopyOption.REPLACE_EXISTING
);
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-City.mmdb"),
source.resolve("GeoLite2-City.mmdb"),
StandardCopyOption.REPLACE_EXISTING
);
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-Country.mmdb"),
source.resolve("GeoLite2-Country.mmdb"),
StandardCopyOption.REPLACE_EXISTING
);
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/MyCustomGeoLite2-City.mmdb"),
source.resolve("MyCustomGeoLite2-City.mmdb"),
StandardCopyOption.REPLACE_EXISTING
);
new GeoIpCli().main(
new String[] { "-s", source.toAbsolutePath().toString(), "-t", target.toAbsolutePath().toString() },
Terminal.DEFAULT,
null
);
}
}
|
GeoIpHttpFixture
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanSuperAndSubtypeIntegrationTests.java
|
{
"start": 1554,
"end": 1715
}
|
class ____ {
// The declaration order of the following fields is intentional, and prior
// to fixing gh-34025 this test
|
MockitoBeanSuperAndSubtypeIntegrationTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java
|
{
"start": 29689,
"end": 30598
}
|
class ____;" +
" expect the final file has correct storage class.");
Configuration conf = getConfiguration();
skipIfStorageClassTestsDisabled(conf);
conf.set(STORAGE_CLASS, STORAGE_CLASS_INTELLIGENT_TIERING);
JobData jobData = startJob(false);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
validateTaskAttemptWorkingDirectory(committer, tContext);
// write output
writeTextOutput(tContext);
// commit task
dumpMultipartUploads();
commitTask(committer, tContext);
// commit job
assertMultipartUploadsPending(outDir);
commitJob(committer, jContext);
// validate output
validateContent(outDir, shouldExpectSuccessMarker(),
committer.getUUID());
assertNoMultipartUploadsPending(outDir);
// validate storage
|
configuration
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java
|
{
"start": 11591,
"end": 13275
}
|
class ____ extends AbstractRunnable {
private final long recoveryId;
private final TimeValue checkInterval;
private volatile long lastSeenAccessTime;
private RecoveryMonitor(long recoveryId, long lastSeenAccessTime, TimeValue checkInterval) {
this.recoveryId = recoveryId;
this.checkInterval = checkInterval;
this.lastSeenAccessTime = lastSeenAccessTime;
}
@Override
public void onFailure(Exception e) {
logger.error(() -> "unexpected error while monitoring recovery [" + recoveryId + "]", e);
}
@Override
protected void doRun() throws Exception {
RecoveryTarget status = onGoingRecoveries.get(recoveryId);
if (status == null) {
logger.trace("[monitor] no status found for [{}], shutting down", recoveryId);
return;
}
long accessTime = status.lastAccessTime();
if (accessTime == lastSeenAccessTime) {
String message = "no activity after [" + checkInterval + "]";
failRecovery(
recoveryId,
new RecoveryFailedException(status.state(), message, new ElasticsearchTimeoutException(message)),
true // to be safe, we don't know what go stuck
);
return;
}
lastSeenAccessTime = accessTime;
logger.trace("[monitor] rescheduling check for [{}]. last access time is [{}]", recoveryId, lastSeenAccessTime);
threadPool.schedule(this, checkInterval, threadPool.generic());
}
}
}
|
RecoveryMonitor
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/junitrule/RuleTestWithParameterConstructorTest.java
|
{
"start": 437,
"end": 909
}
|
class ____ {
@Rule public MockitoRule mockitoJUnitRule = MockitoJUnit.rule();
@Mock private Injected injected;
@InjectMocks private InjectInto injectInto;
@Test
public void testInjectMocks() throws Exception {
assertNotNull("Mock created", injected);
assertNotNull("Object created", injectInto);
assertEquals("A injected into B", injected, injectInto.getInjected());
}
public static
|
RuleTestWithParameterConstructorTest
|
java
|
apache__logging-log4j2
|
log4j-jakarta-web/src/test/java/org/apache/logging/log4j/web/Log4jShutdownOnContextDestroyedListenerTest.java
|
{
"start": 1314,
"end": 2857
}
|
class ____ {
@Mock(lenient = true)
private ServletContextEvent event;
@Mock(lenient = true)
private ServletContext servletContext;
@Mock
private Log4jWebLifeCycle initializer;
private Log4jShutdownOnContextDestroyedListener listener;
void setUp(final boolean mockInitializer) {
this.listener = new Log4jShutdownOnContextDestroyedListener();
given(event.getServletContext()).willReturn(servletContext);
if (mockInitializer) {
given(servletContext.getAttribute(Log4jWebSupport.SUPPORT_ATTRIBUTE))
.willReturn(initializer);
}
}
@Test
void testInitAndDestroy() {
setUp(true);
this.listener.contextInitialized(this.event);
then(initializer).should(never()).start();
then(initializer).should(never()).setLoggerContext();
this.listener.contextDestroyed(this.event);
then(initializer).should().clearLoggerContext();
then(initializer).should().stop();
}
@Test
void testDestroy() {
setUp(true);
this.listener.contextDestroyed(this.event);
then(initializer).should(never()).clearLoggerContext();
then(initializer).should(never()).stop();
}
@Test
void whenNoInitializerInContextTheContextInitializedShouldThrowAnException() {
setUp(false);
assertThrows(IllegalStateException.class, () -> {
this.listener.contextInitialized(this.event);
});
}
}
|
Log4jShutdownOnContextDestroyedListenerTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/providers/FileTestCase.java
|
{
"start": 710,
"end": 7237
}
|
class ____ {
private static final String FILE = "src/test/resources/lorem.txt";
@TestHTTPResource
URI uri;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(FileResource.class, WithWriterInterceptor.class, WriterInterceptor.class));
@Test
public void testFiles() throws Exception {
String content = Files.readString(Path.of(FILE));
String contentLength = String.valueOf(content.length());
RestAssured.get("/providers/file/file")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.given().header("Range", "bytes=0-9").get("/providers/file/file")
.then()
.statusCode(206)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.header("Content-Range", "bytes 0-9/" + contentLength)
.body(Matchers.equalTo(content.substring(0, 10)));
RestAssured.given().header("Range", "bytes=10-19").get("/providers/file/file")
.then()
.statusCode(206)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.header("Content-Range", "bytes 10-19/" + contentLength)
.body(Matchers.equalTo(content.substring(10, 20)));
RestAssured.given().header("Range", "bytes=10-").get("/providers/file/file")
.then()
.statusCode(206)
.header(HttpHeaders.CONTENT_LENGTH, String.valueOf(content.length() - 10))
.header("Content-Range", "bytes 10-" + (content.length() - 1) + "/" + contentLength)
.body(Matchers.equalTo(content.substring(10)));
RestAssured.given().header("Range", "bytes=-10").get("/providers/file/file")
.then()
.statusCode(206)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.header("Content-Range",
"bytes " + (content.length() - 10) + "-" + (content.length() - 1) + "/" + contentLength)
.body(Matchers.equalTo(content.substring((content.length() - 10))));
RestAssured.given().header("Range", "bytes=" + (content.length() + 1) + "-").get("/providers/file/file")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.given().header("Range", "bytes=0-1, 3-4").get("/providers/file/file")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/file-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
RestAssured.get("/providers/file/path")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/path-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
RestAssured.get("/providers/file/async-file")
.then()
.header(HttpHeaders.CONTENT_LENGTH, Matchers.nullValue())
.statusCode(200)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/rest-response-async-file")
.then()
.header("foo", "bar")
.statusCode(200)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/mutiny-async-file")
.then()
.header(HttpHeaders.CONTENT_LENGTH, Matchers.nullValue())
.statusCode(200)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/async-file-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
}
@Test
public void testChecks() throws IOException {
// creation-time checks
Path path = Paths.get(FILE);
// works
new PathPart(path, 10, 10);
new PathPart(path, 0, Files.size(path));
// fails
try {
new PathPart(path, -1, 10);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 0, -1);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 0, 1000);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 250, 250);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
File file = new File(FILE);
// works
new FilePart(file, 10, 10);
new FilePart(file, 0, file.length());
// fails
try {
new FilePart(file, -1, 10);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 0, -1);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 0, 1000);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 250, 250);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
}
@EnabledIfSystemProperty(named = "test-resteasy-reactive-large-files", matches = "true")
@Test
public void testWithLargeFile() {
RestAssured.given()
.get("/providers/file/large-path-rest-response")
.then()
.statusCode(200)
.contentType("application/octet-stream")
.header(HttpHeaders.CONTENT_DISPOSITION, "large-file")
.header(HttpHeaders.CONTENT_LENGTH, "1073741824");
}
}
|
FileTestCase
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ConcurrentLruCache.java
|
{
"start": 7685,
"end": 8318
}
|
class ____ implements Runnable {
final Node<K, V> node;
AddTask(Node<K, V> node) {
this.node = node;
}
@Override
public void run() {
currentSize.lazySet(currentSize.get() + 1);
if (this.node.get().isActive()) {
evictionQueue.add(this.node);
evictEntries();
}
}
private void evictEntries() {
while (currentSize.get() > capacity) {
final Node<K, V> node = evictionQueue.poll();
if (node == null) {
return;
}
cache.remove(node.key, node);
markAsRemoved(node);
}
}
}
/**
* Write operation recorded when an entry is removed to the cache.
*/
private final
|
AddTask
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/StreamsMetadataTest.java
|
{
"start": 1323,
"end": 5716
}
|
class ____ {
private static final HostInfo HOST_INFO = new HostInfo("local", 12);
public static final Set<String> STATE_STORE_NAMES = Set.of("store1", "store2");
private static final TopicPartition TP_0 = new TopicPartition("t", 0);
private static final TopicPartition TP_1 = new TopicPartition("t", 1);
public static final Set<TopicPartition> TOPIC_PARTITIONS = Set.of(TP_0, TP_1);
public static final Set<String> STAND_BY_STORE_NAMES = Set.of("store2");
public static final Set<TopicPartition> STANDBY_TOPIC_PARTITIONS = Set.of(TP_1);
private StreamsMetadata streamsMetadata;
@BeforeEach
public void setUp() {
streamsMetadata = new StreamsMetadataImpl(
HOST_INFO,
STATE_STORE_NAMES,
TOPIC_PARTITIONS,
STAND_BY_STORE_NAMES,
STANDBY_TOPIC_PARTITIONS
);
}
@Test
public void shouldNotAllowModificationOfInternalStateViaGetters() {
assertThat(isUnmodifiable(streamsMetadata.stateStoreNames()), is(true));
assertThat(isUnmodifiable(streamsMetadata.topicPartitions()), is(true));
assertThat(isUnmodifiable(streamsMetadata.standbyTopicPartitions()), is(true));
assertThat(isUnmodifiable(streamsMetadata.standbyStateStoreNames()), is(true));
}
@Test
public void shouldBeEqualsIfSameObject() {
final StreamsMetadata same = new StreamsMetadataImpl(
HOST_INFO,
STATE_STORE_NAMES,
TOPIC_PARTITIONS,
STAND_BY_STORE_NAMES,
STANDBY_TOPIC_PARTITIONS);
assertThat(streamsMetadata, equalTo(same));
assertThat(streamsMetadata.hashCode(), equalTo(same.hashCode()));
}
@Test
public void shouldNotBeEqualIfDifferInHostInfo() {
final StreamsMetadata differHostInfo = new StreamsMetadataImpl(
new HostInfo("different", 122),
STATE_STORE_NAMES,
TOPIC_PARTITIONS,
STAND_BY_STORE_NAMES,
STANDBY_TOPIC_PARTITIONS);
assertThat(streamsMetadata, not(equalTo(differHostInfo)));
assertThat(streamsMetadata.hashCode(), not(equalTo(differHostInfo.hashCode())));
}
@Test
public void shouldNotBeEqualIfDifferStateStoreNames() {
final StreamsMetadata differStateStoreNames = new StreamsMetadataImpl(
HOST_INFO,
Set.of("store1"),
TOPIC_PARTITIONS,
STAND_BY_STORE_NAMES,
STANDBY_TOPIC_PARTITIONS);
assertThat(streamsMetadata, not(equalTo(differStateStoreNames)));
assertThat(streamsMetadata.hashCode(), not(equalTo(differStateStoreNames.hashCode())));
}
@Test
public void shouldNotBeEqualIfDifferInTopicPartitions() {
final StreamsMetadata differTopicPartitions = new StreamsMetadataImpl(
HOST_INFO,
STATE_STORE_NAMES,
Set.of(TP_0),
STAND_BY_STORE_NAMES,
STANDBY_TOPIC_PARTITIONS);
assertThat(streamsMetadata, not(equalTo(differTopicPartitions)));
assertThat(streamsMetadata.hashCode(), not(equalTo(differTopicPartitions.hashCode())));
}
@Test
public void shouldNotBeEqualIfDifferInStandByStores() {
final StreamsMetadata differStandByStores = new StreamsMetadataImpl(
HOST_INFO,
STATE_STORE_NAMES,
TOPIC_PARTITIONS,
Set.of("store1"),
STANDBY_TOPIC_PARTITIONS);
assertThat(streamsMetadata, not(equalTo(differStandByStores)));
assertThat(streamsMetadata.hashCode(), not(equalTo(differStandByStores.hashCode())));
}
@Test
public void shouldNotBeEqualIfDifferInStandByTopicPartitions() {
final StreamsMetadata differStandByTopicPartitions = new StreamsMetadataImpl(
HOST_INFO,
STATE_STORE_NAMES,
TOPIC_PARTITIONS,
STAND_BY_STORE_NAMES,
Set.of(TP_0));
assertThat(streamsMetadata, not(equalTo(differStandByTopicPartitions)));
assertThat(streamsMetadata.hashCode(), not(equalTo(differStandByTopicPartitions.hashCode())));
}
private static boolean isUnmodifiable(final Collection<?> collection) {
try {
collection.clear();
return false;
} catch (final UnsupportedOperationException e) {
return true;
}
}
}
|
StreamsMetadataTest
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamSlidingWindowAggregate.java
|
{
"start": 2172,
"end": 3900
}
|
class ____<KIn, VIn, VAgg> implements KStreamAggProcessorSupplier<KIn, VIn, Windowed<KIn>, VAgg> {
private static final Logger log = LoggerFactory.getLogger(KStreamSlidingWindowAggregate.class);
private final String storeName;
private final StoreFactory storeFactory;
private final SlidingWindows windows;
private final Initializer<VAgg> initializer;
private final Aggregator<? super KIn, ? super VIn, VAgg> aggregator;
private final EmitStrategy emitStrategy;
private boolean sendOldValues = false;
public KStreamSlidingWindowAggregate(final SlidingWindows windows,
final StoreFactory storeFactory,
final EmitStrategy emitStrategy,
final Initializer<VAgg> initializer,
final Aggregator<? super KIn, ? super VIn, VAgg> aggregator) {
this.windows = windows;
this.storeName = storeFactory.storeName();
this.storeFactory = storeFactory;
this.initializer = initializer;
this.aggregator = aggregator;
this.emitStrategy = emitStrategy;
}
@Override
public Set<StoreBuilder<?>> stores() {
return Collections.singleton(new FactoryWrappingStoreBuilder<>(storeFactory));
}
@Override
public Processor<KIn, VIn, Windowed<KIn>, Change<VAgg>> get() {
return new KStreamSlidingWindowAggregateProcessor(storeName, emitStrategy, sendOldValues);
}
public SlidingWindows windows() {
return windows;
}
@Override
public void enableSendingOldValues() {
sendOldValues = true;
}
private
|
KStreamSlidingWindowAggregate
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/repository/extrnal/ExternalHistoryConfigInfoPersistServiceImpl.java
|
{
"start": 3104,
"end": 14266
}
|
class ____ implements HistoryConfigInfoPersistService {
private DataSourceService dataSourceService;
protected JdbcTemplate jt;
protected TransactionTemplate tjt;
private MapperManager mapperManager;
public ExternalHistoryConfigInfoPersistServiceImpl() {
this.dataSourceService = DynamicDataSource.getInstance().getDataSource();
this.jt = dataSourceService.getJdbcTemplate();
this.tjt = dataSourceService.getTransactionTemplate();
Boolean isDataSourceLogEnable = EnvUtil.getProperty(CommonConstant.NACOS_PLUGIN_DATASOURCE_LOG, Boolean.class,
false);
this.mapperManager = MapperManager.instance(isDataSourceLogEnable);
}
@Override
public <E> PaginationHelper<E> createPaginationHelper() {
return new ExternalStoragePaginationHelperImpl<>(jt);
}
@Override
public void insertConfigHistoryAtomic(long id, ConfigInfo configInfo, String srcIp, String srcUser,
final Timestamp time, String ops, String publishType, String grayName, String extInfo) {
String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
String encryptedDataKey = StringUtils.defaultEmptyIfBlank(configInfo.getEncryptedDataKey());
String publishTypeTmp = StringUtils.defaultEmptyIfBlank(publishType);
String grayNameTemp = StringUtils.defaultEmptyIfBlank(grayName);
try {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
jt.update(historyConfigInfoMapper.insert(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "src_ip",
"src_user", "gmt_modified", "op_type", "publish_type", "gray_name", "ext_info",
"encrypted_data_key")), id, configInfo.getDataId(), configInfo.getGroup(), tenantTmp,
appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops, publishTypeTmp,
grayNameTemp, extInfo, encryptedDataKey);
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[db-error] " + e, e);
throw e;
}
}
@Override
public void removeConfigHistory(final Timestamp startTime, final int limitSize) {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.START_TIME, startTime);
context.putWhereParameter(FieldConstant.LIMIT_SIZE, limitSize);
MapperResult mapperResult = historyConfigInfoMapper.removeConfigHistory(context);
PaginationHelper<Object> paginationHelper = createPaginationHelper();
paginationHelper.updateLimit(mapperResult.getSql(), mapperResult.getParamList().toArray());
}
@Override
public List<ConfigInfoStateWrapper> findDeletedConfig(final Timestamp startTime, long startId, int pageSize,
String publishType) {
try {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.START_TIME, startTime);
context.putWhereParameter(FieldConstant.PAGE_SIZE, pageSize);
context.putWhereParameter(FieldConstant.LAST_MAX_ID, startId);
context.putWhereParameter(FieldConstant.PUBLISH_TYPE, publishType);
MapperResult mapperResult = historyConfigInfoMapper.findDeletedConfig(context);
List<ConfigHistoryInfo> configHistoryInfos = jt.query(mapperResult.getSql(),
mapperResult.getParamList().toArray(), HISTORY_DETAIL_ROW_MAPPER);
List<ConfigInfoStateWrapper> configInfoStateWrappers = new ArrayList<>();
for (ConfigHistoryInfo configHistoryInfo : configHistoryInfos) {
ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper();
configInfoStateWrapper.setId(configHistoryInfo.getId());
configInfoStateWrapper.setDataId(configHistoryInfo.getDataId());
configInfoStateWrapper.setGroup(configHistoryInfo.getGroup());
configInfoStateWrapper.setTenant(configHistoryInfo.getTenant());
configInfoStateWrapper.setMd5(configHistoryInfo.getMd5());
configInfoStateWrapper.setLastModified(configHistoryInfo.getLastModifiedTime().getTime());
configInfoStateWrapper.setGrayName(configHistoryInfo.getGrayName());
configInfoStateWrappers.add(configInfoStateWrapper);
}
return configInfoStateWrappers;
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[db-error] " + e, e);
throw e;
}
}
@Override
public Page<ConfigHistoryInfo> findConfigHistory(String dataId, String group, String tenant, int pageNo,
int pageSize) {
PaginationHelper<ConfigHistoryInfo> helper = createPaginationHelper();
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
MapperContext context = new MapperContext((pageNo - 1) * pageSize, pageSize);
context.putWhereParameter(FieldConstant.DATA_ID, dataId);
context.putWhereParameter(FieldConstant.GROUP_ID, group);
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
String sqlCountRows = historyConfigInfoMapper.count(Arrays.asList("data_id", "group_id", "tenant_id"));
MapperResult sqlFetchRows = historyConfigInfoMapper.pageFindConfigHistoryFetchRows(context);
Page<ConfigHistoryInfo> page;
try {
page = helper.fetchPage(sqlCountRows, sqlFetchRows.getSql(), sqlFetchRows.getParamList().toArray(), pageNo,
pageSize, HISTORY_LIST_ROW_MAPPER);
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group},
e);
throw e;
}
return page;
}
@Override
public ConfigHistoryInfo detailConfigHistory(Long nid) {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
String sqlFetchRows = historyConfigInfoMapper.select(
Arrays.asList("nid", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "src_user",
"src_ip", "op_type", "gmt_create", "gmt_modified", "publish_type", "gray_name", "ext_info",
"encrypted_data_key"), Collections.singletonList("nid"));
try {
ConfigHistoryInfo historyInfo = jt.queryForObject(sqlFetchRows, new Object[] {nid},
HISTORY_DETAIL_ROW_MAPPER);
return historyInfo;
} catch (EmptyResultDataAccessException emptyResultDataAccessException) {
return null;
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e);
throw e;
}
}
@Override
public ConfigHistoryInfo detailPreviousConfigHistory(Long id) {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.ID, id);
MapperResult sqlFetchRows = historyConfigInfoMapper.detailPreviousConfigHistory(context);
try {
ConfigHistoryInfo historyInfo = jt.queryForObject(sqlFetchRows.getSql(),
sqlFetchRows.getParamList().toArray(), HISTORY_DETAIL_ROW_MAPPER);
return historyInfo;
} catch (EmptyResultDataAccessException emptyResultDataAccessException) {
return null;
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e);
throw e;
}
}
@Override
public int findConfigHistoryCountByTime(final Timestamp startTime) {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.START_TIME, startTime);
MapperResult mapperResult = historyConfigInfoMapper.findConfigHistoryCountByTime(context);
Integer result = jt.queryForObject(mapperResult.getSql(), mapperResult.getParamList().toArray(), Integer.class);
if (result == null) {
throw new IllegalArgumentException("findConfigHistoryCountByTime error");
}
return result;
}
@Override
public ConfigHistoryInfo getNextHistoryInfo(String dataId, String group, String tenant, String publishType,
String grayName, long startNid) {
HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.DATA_ID, dataId);
context.putWhereParameter(FieldConstant.GROUP_ID, group);
context.putWhereParameter(FieldConstant.TENANT_ID, tenant);
context.putWhereParameter(FieldConstant.PUBLISH_TYPE, publishType);
context.putWhereParameter(FieldConstant.NID, startNid);
context.putWhereParameter(FieldConstant.GRAY_NAME, grayName);
MapperResult sqlFetchRows = historyConfigInfoMapper.getNextHistoryInfo(context);
try {
ConfigHistoryInfo historyInfo = jt.queryForObject(sqlFetchRows.getSql(),
sqlFetchRows.getParamList().toArray(), HISTORY_DETAIL_ROW_MAPPER);
return historyInfo;
} catch (EmptyResultDataAccessException emptyResultDataAccessException) {
return null;
} catch (DataAccessException e) {
LogUtil.FATAL_LOG.error("[db-error] " + e, e);
throw e;
}
}
}
|
ExternalHistoryConfigInfoPersistServiceImpl
|
java
|
apache__camel
|
core/camel-main/src/main/java/org/apache/camel/main/MetricsConfigurationProperties.java
|
{
"start": 1054,
"end": 12432
}
|
class ____ implements BootstrapCloseable {
private MainConfigurationProperties parent;
private boolean enabled;
@Metadata(defaultValue = "default", enums = "default,legacy")
private String namingStrategy;
@Metadata(defaultValue = "true")
private boolean enableRoutePolicy = true;
@Metadata(defaultValue = "all", enums = "all,route,context")
private String routePolicyLevel = "all";
private boolean enableMessageHistory;
@Metadata(defaultValue = "true")
private boolean enableExchangeEventNotifier = true;
@Metadata(defaultValue = "true")
private boolean baseEndpointURIExchangeEventNotifier = true;
@Metadata(defaultValue = "true")
private boolean enableRouteEventNotifier = true;
@Metadata(defaultValue = "false")
private boolean enableInstrumentedThreadPoolFactory;
@Metadata(defaultValue = "true")
private boolean clearOnReload = true;
@Metadata(defaultValue = "false")
private boolean skipCamelInfo = false;
@Metadata(defaultValue = "0.0.4", enums = "0.0.4,1.0.0")
private String textFormatVersion = "0.0.4";
@Metadata
private String binders;
@Metadata(defaultValue = "/observe/metrics")
private String path = "/observe/metrics";
public MetricsConfigurationProperties(MainConfigurationProperties parent) {
this.parent = parent;
}
public MainConfigurationProperties end() {
return parent;
}
public boolean isEnabled() {
return enabled;
}
/**
* To enable Micrometer metrics.
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getNamingStrategy() {
return namingStrategy;
}
/**
* Controls the name style to use for metrics.
*
* Default = uses micrometer naming convention. Legacy = uses the classic naming style (camelCase)
*/
public void setNamingStrategy(String namingStrategy) {
this.namingStrategy = namingStrategy;
}
public boolean isEnableRoutePolicy() {
return enableRoutePolicy;
}
/**
* Set whether to enable the MicrometerRoutePolicyFactory for capturing metrics on route processing times.
*/
public void setEnableRoutePolicy(boolean enableRoutePolicy) {
this.enableRoutePolicy = enableRoutePolicy;
}
public String getRoutePolicyLevel() {
return routePolicyLevel;
}
/**
* Sets the level of information to capture. all = both context and routes.
*/
public void setRoutePolicyLevel(String routePolicyLevel) {
this.routePolicyLevel = routePolicyLevel;
}
public boolean isEnableMessageHistory() {
return enableMessageHistory;
}
/**
* Set whether to enable the MicrometerMessageHistoryFactory for capturing metrics on individual route node
* processing times.
*
* Depending on the number of configured route nodes, there is the potential to create a large volume of metrics.
* Therefore, this option is disabled by default.
*/
public void setEnableMessageHistory(boolean enableMessageHistory) {
this.enableMessageHistory = enableMessageHistory;
}
public boolean isEnableExchangeEventNotifier() {
return enableExchangeEventNotifier;
}
/**
* Set whether to enable the MicrometerExchangeEventNotifier for capturing metrics on exchange processing times.
*/
public void setEnableExchangeEventNotifier(boolean enableExchangeEventNotifier) {
this.enableExchangeEventNotifier = enableExchangeEventNotifier;
}
public boolean isBaseEndpointURIExchangeEventNotifier() {
return baseEndpointURIExchangeEventNotifier;
}
/**
* Whether to use static or dynamic values for Endpoint Name tags in captured metrics.
*
* By default, static values are used.
*
* When using dynamic tags, then a dynamic to (toD) can compute many different endpoint URIs that, can lead to many
* tags as the URI is dynamic, so use this with care if setting this option to false.
*/
public void setBaseEndpointURIExchangeEventNotifier(boolean baseEndpointURIExchangeEventNotifier) {
this.baseEndpointURIExchangeEventNotifier = baseEndpointURIExchangeEventNotifier;
}
public boolean isEnableRouteEventNotifier() {
return enableRouteEventNotifier;
}
/**
* Set whether to enable the MicrometerRouteEventNotifier for capturing metrics on the total number of routes and
* total number of routes running.
*/
public void setEnableRouteEventNotifier(boolean enableRouteEventNotifier) {
this.enableRouteEventNotifier = enableRouteEventNotifier;
}
public boolean isEnableInstrumentedThreadPoolFactory() {
return enableInstrumentedThreadPoolFactory;
}
/**
* Set whether to gather performance information about Camel Thread Pools by injecting an
* InstrumentedThreadPoolFactory.
*/
public void setEnableInstrumentedThreadPoolFactory(boolean enableInstrumentedThreadPoolFactory) {
this.enableInstrumentedThreadPoolFactory = enableInstrumentedThreadPoolFactory;
}
public boolean isClearOnReload() {
return clearOnReload;
}
/**
* Clear the captured metrics data when Camel is reloading routes such as when using Camel JBang.
*/
public void setClearOnReload(boolean clearOnReload) {
this.clearOnReload = clearOnReload;
}
public boolean isSkipCamelInfo() {
return skipCamelInfo;
}
/**
* Skip the evaluation of "app.info" metric which contains runtime provider information (default, `false`).
*/
public void setSkipCamelInfo(boolean skipCamelInfo) {
this.skipCamelInfo = skipCamelInfo;
}
public String getTextFormatVersion() {
return textFormatVersion;
}
/**
* The text-format version to use with Prometheus scraping.
*
* 0.0.4 = text/plain; version=0.0.4; charset=utf-8 1.0.0 = application/openmetrics-text; version=1.0.0;
* charset=utf-8
*/
public void setTextFormatVersion(String textFormatVersion) {
this.textFormatVersion = textFormatVersion;
}
public String getBinders() {
return binders;
}
/**
* Additional Micrometer binders to include such as jvm-memory, processor, jvm-thread, and so forth. Multiple
* binders can be separated by comma.
*
* The following binders currently is available from Micrometer: class-loader, commons-object-pool2,
* file-descriptor, hystrix-metrics-binder, jvm-compilation, jvm-gc, jvm-heap-pressure, jvm-info, jvm-memory,
* jvm-thread, log4j2, logback, processor, uptime
*/
public void setBinders(String binders) {
this.binders = binders;
}
public String getPath() {
return path;
}
/**
* The path endpoint used to expose the metrics.
*/
public void setPath(String path) {
this.path = path;
}
@Override
public void close() {
parent = null;
}
/**
* Set whether to enable the MicrometerRoutePolicyFactory for capturing metrics on route processing times.
*/
public MetricsConfigurationProperties withEnableRoutePolicy(boolean enableRoutePolicy) {
this.enableRoutePolicy = enableRoutePolicy;
return this;
}
/**
* Sets the level of information to capture. all = both context and routes.
*/
public MetricsConfigurationProperties withRoutePolicyLevel(String routePolicyLevel) {
this.routePolicyLevel = routePolicyLevel;
return this;
}
/**
* To enable Micrometer metrics.
*/
public MetricsConfigurationProperties withEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Controls the name style to use for metrics.
*
* Default = uses micrometer naming convention. Legacy = uses the classic naming style (camelCase)
*/
public MetricsConfigurationProperties withNamingStrategy(String namingStrategy) {
this.namingStrategy = namingStrategy;
return this;
}
/**
* Set whether to enable the MicrometerMessageHistoryFactory for capturing metrics on individual route node
* processing times.
*
* Depending on the number of configured route nodes, there is the potential to create a large volume of metrics.
* Therefore, this option is disabled by default.
*/
public MetricsConfigurationProperties withEnableMessageHistory(boolean enableMessageHistory) {
this.enableMessageHistory = enableMessageHistory;
return this;
}
/**
* Set whether to enable the MicrometerExchangeEventNotifier for capturing metrics on exchange processing times.
*/
public MetricsConfigurationProperties withEnableExchangeEventNotifier(boolean enableExchangeEventNotifier) {
this.enableExchangeEventNotifier = enableExchangeEventNotifier;
return this;
}
/**
* Set whether to enable the MicrometerRouteEventNotifier for capturing metrics on the total number of routes and
* total number of routes running.
*/
public MetricsConfigurationProperties witheEnableRouteEventNotifier(boolean enableRouteEventNotifier) {
this.enableRouteEventNotifier = enableRouteEventNotifier;
return this;
}
/**
* Set whether to gather performance information about Camel Thread Pools by injecting an
* InstrumentedThreadPoolFactory.
*/
public MetricsConfigurationProperties withEnableInstrumentedThreadPoolFactory(boolean enableInstrumentedThreadPoolFactory) {
this.enableInstrumentedThreadPoolFactory = enableInstrumentedThreadPoolFactory;
return this;
}
/**
* Clear the captured metrics data when Camel is reloading routes such as when using Camel JBang.
*/
public MetricsConfigurationProperties withClearOnReload(boolean clearOnReload) {
this.clearOnReload = clearOnReload;
return this;
}
/**
* Skip the evaluation of "app.info" metric which contains runtime provider information (default, `false`).
*/
public MetricsConfigurationProperties withSkipCamelInfo(boolean skipCamelInfo) {
this.skipCamelInfo = skipCamelInfo;
return this;
}
/**
* The text-format version to use with Prometheus scraping.
*
* 0.0.4 = text/plain; version=0.0.4; charset=utf-8 1.0.0 = application/openmetrics-text; version=1.0.0;
* charset=utf-8
*/
public MetricsConfigurationProperties withTextFormatVersion(String textFormatVersion) {
this.textFormatVersion = textFormatVersion;
return this;
}
/**
* Additional Micrometer binders to include such as jvm-memory, processor, jvm-thread, and so forth. Multiple
* binders can be separated by comma.
*
* The following binders currently is available from Micrometer: class-loader, commons-object-pool2,
* file-descriptor, hystrix-metrics-binder, jvm-compilation, jvm-gc, jvm-heap-pressure, jvm-info, jvm-memory,
* jvm-thread, log4j2, logback, processor, uptime
*/
public MetricsConfigurationProperties withBinders(String binders) {
this.binders = binders;
return this;
}
}
|
MetricsConfigurationProperties
|
java
|
google__dagger
|
java/dagger/testing/compile/CompilerProcessors.java
|
{
"start": 1575,
"end": 2479
}
|
class ____ extends JavacBasicAnnotationProcessor {
private final ImmutableCollection<XProcessingStep> processingSteps;
JavacProcessor(ImmutableCollection<XProcessingStep> processingSteps) {
super(options -> CompilerTests.PROCESSING_ENV_CONFIG);
this.processingSteps = processingSteps;
}
@Override
public void initialize(XProcessingEnv env) {}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
@Override
public ImmutableSet<String> getSupportedOptions() {
return ImmutableSet.of();
}
@Override
public ImmutableCollection<XProcessingStep> processingSteps() {
return processingSteps;
}
@Override
public void postRound(XProcessingEnv env, XRoundEnv roundEnv) {}
}
/** A KSP processor that runs the given processing steps. */
static final
|
JavacProcessor
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3899ExtensionInheritanceTest.java
|
{
"start": 1134,
"end": 2676
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test that build extensions are properly merged during inheritance.
*
* @throws Exception in case of failure
*/
@Test
public void testitMNG3899() throws Exception {
File testDir = extractResources("/mng-3899");
Verifier verifier = newVerifier(new File(testDir, "sub").getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng3899");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("--settings");
verifier.addCliArgument("settings.xml");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties props = verifier.loadProperties("target/extension.properties");
assertEquals("3", props.getProperty("project.build.extensions"));
assertEquals("b", props.getProperty("project.build.extensions.0.artifactId"));
assertEquals("0.1", props.getProperty("project.build.extensions.0.version"));
assertEquals("a", props.getProperty("project.build.extensions.1.artifactId"));
assertEquals("0.2", props.getProperty("project.build.extensions.1.version"));
assertEquals("c", props.getProperty("project.build.extensions.2.artifactId"));
assertEquals("0.1", props.getProperty("project.build.extensions.2.version"));
}
}
|
MavenITmng3899ExtensionInheritanceTest
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/JsonToGrpcGatewayFilterFactory.java
|
{
"start": 5447,
"end": 11952
}
|
class ____ extends ServerHttpResponseDecorator {
private final ServerWebExchange exchange;
private final Descriptors.Descriptor descriptor;
private final ObjectReader objectReader;
private final ClientCall<DynamicMessage, DynamicMessage> clientCall;
private final ObjectNode objectNode;
GRPCResponseDecorator(ServerWebExchange exchange, Config config) {
super(exchange.getResponse());
this.exchange = exchange;
try {
Descriptors.MethodDescriptor methodDescriptor = getMethodDescriptor(config);
Descriptors.ServiceDescriptor serviceDescriptor = methodDescriptor.getService();
Descriptors.Descriptor outputType = methodDescriptor.getOutputType();
this.descriptor = methodDescriptor.getInputType();
clientCall = createClientCallForType(config, serviceDescriptor, outputType);
ObjectMapper objectMapper = JsonMapper.builder()
.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
.build();
objectReader = objectMapper.readerFor(JsonNode.class);
objectNode = objectMapper.createObjectNode();
}
catch (IOException | Descriptors.DescriptorValidationException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> writeWith(Publisher<? extends DataBuffer> body) {
exchange.getResponse().getHeaders().set("Content-Type", "application/json");
return getDelegate().writeWith(deserializeJSONRequest().map(callGRPCServer())
.map(serialiseGRPCResponse())
.map(wrapGRPCResponse())
.cast(DataBuffer.class)
.last());
}
private ClientCall<DynamicMessage, DynamicMessage> createClientCallForType(Config config,
Descriptors.ServiceDescriptor serviceDescriptor, Descriptors.Descriptor outputType) {
MethodDescriptor.Marshaller<DynamicMessage> marshaller = ProtoUtils
.marshaller(DynamicMessage.newBuilder(outputType).build());
MethodDescriptor<DynamicMessage, DynamicMessage> methodDescriptor = MethodDescriptor
.<DynamicMessage, DynamicMessage>newBuilder()
.setType(MethodDescriptor.MethodType.UNKNOWN)
.setFullMethodName(
MethodDescriptor.generateFullMethodName(serviceDescriptor.getFullName(), config.getMethod()))
.setRequestMarshaller(marshaller)
.setResponseMarshaller(marshaller)
.build();
Channel channel = createChannel();
return channel.newCall(methodDescriptor, CallOptions.DEFAULT);
}
private Descriptors.MethodDescriptor getMethodDescriptor(Config config)
throws IOException, Descriptors.DescriptorValidationException {
Objects.requireNonNull(config.getProtoDescriptor(), "Proto Descriptor must not be null");
Resource descriptorFile = resourceLoader.getResource(config.getProtoDescriptor());
DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet
.parseFrom(descriptorFile.getInputStream());
DescriptorProtos.FileDescriptorProto fileProto = fileDescriptorSet.getFile(0);
Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileProto,
dependencies(fileDescriptorSet, fileProto.getDependencyList()));
Descriptors.ServiceDescriptor serviceDescriptor = fileDescriptor.findServiceByName(config.getService());
if (serviceDescriptor == null) {
throw new NoSuchElementException("No Service found");
}
List<Descriptors.MethodDescriptor> methods = serviceDescriptor.getMethods();
return methods.stream()
.filter(method -> method.getName().equals(config.getMethod()))
.findFirst()
.orElseThrow(() -> new NoSuchElementException("No Method found"));
}
private FileDescriptor[] dependencies(FileDescriptorSet input, ProtocolStringList list) {
FileDescriptor[] deps = new FileDescriptor[list.size()];
for (int i = 0; i < list.size(); i++) {
String name = list.get(i);
FileDescriptorProto file = findFileByName(input, name);
if (file == null) {
throw new IllegalStateException("Missing dependency: " + name);
}
try {
deps[i] = FileDescriptor.buildFrom(file, dependencies(input, file.getDependencyList()));
}
catch (DescriptorValidationException e) {
throw new IllegalStateException("Invalid descriptor: " + file.getName(), e);
}
}
return deps;
}
private @Nullable FileDescriptorProto findFileByName(FileDescriptorSet input, String name) {
for (FileDescriptorProto file : input.getFileList()) {
if (file.getName().equals(name)) {
return file;
}
}
return null;
}
private ManagedChannel createChannel() {
Route route = (Route) exchange.getAttributes().get(ServerWebExchangeUtils.GATEWAY_ROUTE_ATTR);
URI requestURI = Objects.requireNonNull(route, "Route not found in exchange attributes").getUri();
return createChannelChannel(requestURI.getHost(), requestURI.getPort());
}
private Function<JsonNode, DynamicMessage> callGRPCServer() {
return jsonRequest -> {
try {
DynamicMessage.Builder builder = DynamicMessage.newBuilder(descriptor);
JsonFormat.parser().merge(jsonRequest.toString(), builder);
return ClientCalls.blockingUnaryCall(clientCall, builder.build());
}
catch (IOException e) {
throw new RuntimeException(e);
}
};
}
private Function<DynamicMessage, Object> serialiseGRPCResponse() {
return gRPCResponse -> {
try {
return objectReader
.readValue(JsonFormat.printer().omittingInsignificantWhitespace().print(gRPCResponse));
}
catch (IOException e) {
throw new RuntimeException(e);
}
};
}
private Flux<JsonNode> deserializeJSONRequest() {
return exchange.getRequest().getBody().mapNotNull(dataBufferBody -> {
if (dataBufferBody.capacity() == 0) {
return objectNode;
}
ResolvableType targetType = ResolvableType.forType(JsonNode.class);
return new JacksonJsonDecoder().decode(dataBufferBody, targetType, null, null);
}).cast(JsonNode.class);
}
private Function<Object, DataBuffer> wrapGRPCResponse() {
return jsonResponse -> new NettyDataBufferFactory(new PooledByteBufAllocator())
.wrap(Objects.requireNonNull(new ObjectMapper().writeValueAsBytes(jsonResponse)));
}
// We are creating this on every call, should optimize?
private ManagedChannel createChannelChannel(String host, int port) {
NettyChannelBuilder nettyChannelBuilder = NettyChannelBuilder.forAddress(host, port);
try {
return grpcSslConfigurer.configureSsl(nettyChannelBuilder);
}
catch (SSLException e) {
throw new RuntimeException(e);
}
}
}
}
|
GRPCResponseDecorator
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerSaslClientTest.java
|
{
"start": 1813,
"end": 2206
}
|
class ____ {
private static final Map<String, String> TEST_PROPERTIES = new LinkedHashMap<>() {
{
put("One", "1");
put("Two", "2");
put("Three", "3");
}
};
private SaslExtensions testExtensions = new SaslExtensions(TEST_PROPERTIES);
private final String errorMessage = "Error as expected!";
public
|
OAuthBearerSaslClientTest
|
java
|
google__guava
|
android/guava/src/com/google/common/io/BaseEncoding.java
|
{
"start": 39261,
"end": 42668
}
|
class ____ extends BaseEncoding {
private final BaseEncoding delegate;
private final String separator;
private final int afterEveryChars;
SeparatedBaseEncoding(BaseEncoding delegate, String separator, int afterEveryChars) {
this.delegate = checkNotNull(delegate);
this.separator = checkNotNull(separator);
this.afterEveryChars = afterEveryChars;
checkArgument(
afterEveryChars > 0, "Cannot add a separator after every %s chars", afterEveryChars);
}
@Override
CharSequence trimTrailingPadding(CharSequence chars) {
return delegate.trimTrailingPadding(chars);
}
@Override
int maxEncodedSize(int bytes) {
int unseparatedSize = delegate.maxEncodedSize(bytes);
return unseparatedSize
+ separator.length() * divide(max(0, unseparatedSize - 1), afterEveryChars, FLOOR);
}
@J2ktIncompatible
@GwtIncompatible // Writer,OutputStream
@Override
public OutputStream encodingStream(Writer output) {
return delegate.encodingStream(separatingWriter(output, separator, afterEveryChars));
}
@Override
void encodeTo(Appendable target, byte[] bytes, int off, int len) throws IOException {
delegate.encodeTo(separatingAppendable(target, separator, afterEveryChars), bytes, off, len);
}
@Override
int maxDecodedSize(int chars) {
return delegate.maxDecodedSize(chars);
}
@Override
public boolean canDecode(CharSequence chars) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < chars.length(); i++) {
char c = chars.charAt(i);
if (separator.indexOf(c) < 0) {
builder.append(c);
}
}
return delegate.canDecode(builder);
}
@Override
int decodeTo(byte[] target, CharSequence chars) throws DecodingException {
StringBuilder stripped = new StringBuilder(chars.length());
for (int i = 0; i < chars.length(); i++) {
char c = chars.charAt(i);
if (separator.indexOf(c) < 0) {
stripped.append(c);
}
}
return delegate.decodeTo(target, stripped);
}
@Override
@J2ktIncompatible
@GwtIncompatible // Reader,InputStream
public InputStream decodingStream(Reader reader) {
return delegate.decodingStream(ignoringReader(reader, separator));
}
@Override
public BaseEncoding omitPadding() {
return delegate.omitPadding().withSeparator(separator, afterEveryChars);
}
@Override
public BaseEncoding withPadChar(char padChar) {
return delegate.withPadChar(padChar).withSeparator(separator, afterEveryChars);
}
@Override
public BaseEncoding withSeparator(String separator, int afterEveryChars) {
throw new UnsupportedOperationException("Already have a separator");
}
@Override
public BaseEncoding upperCase() {
return delegate.upperCase().withSeparator(separator, afterEveryChars);
}
@Override
public BaseEncoding lowerCase() {
return delegate.lowerCase().withSeparator(separator, afterEveryChars);
}
@Override
public BaseEncoding ignoreCase() {
return delegate.ignoreCase().withSeparator(separator, afterEveryChars);
}
@Override
public String toString() {
return delegate + ".withSeparator(\"" + separator + "\", " + afterEveryChars + ")";
}
}
}
|
SeparatedBaseEncoding
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java
|
{
"start": 1004,
"end": 2629
}
|
class ____ {
public static SingleState initSingle() {
return new SingleState();
}
public static void combine(SingleState state, boolean v) {
if (v) {
state.seenTrue = true;
} else {
state.seenFalse = true;
}
}
public static void combineIntermediate(SingleState state, BooleanBlock values) {
int start = values.getFirstValueIndex(0);
int end = start + values.getValueCount(0);
for (int i = start; i < end; i++) {
combine(state, values.getBoolean(i));
}
}
public static Block evaluateFinal(SingleState state, DriverContext driverContext) {
return state.toBlock(driverContext.blockFactory());
}
public static GroupingState initGrouping(BigArrays bigArrays) {
return new GroupingState(bigArrays);
}
public static void combine(GroupingState state, int groupId, boolean v) {
long index = ((long) groupId) << 1 | (v ? 1 : 0);
state.values.set(index);
}
public static void combineIntermediate(GroupingState state, int groupId, BooleanBlock values, int valuesPosition) {
int start = values.getFirstValueIndex(valuesPosition);
int end = start + values.getValueCount(valuesPosition);
for (int i = start; i < end; i++) {
combine(state, groupId, values.getBoolean(i));
}
}
public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) {
return state.toBlock(ctx.blockFactory(), selected);
}
public static
|
ValuesBooleanAggregator
|
java
|
grpc__grpc-java
|
examples/src/main/java/io/grpc/examples/errorhandling/ErrorHandlingClient.java
|
{
"start": 1862,
"end": 7317
}
|
class ____ {
public static void main(String [] args) throws Exception {
new ErrorHandlingClient().run();
}
private ManagedChannel channel;
void run() throws Exception {
// Port 0 means that the operating system will pick an available port to use.
Server server = Grpc.newServerBuilderForPort(0, InsecureServerCredentials.create())
.addService(new GreeterGrpc.GreeterImplBase() {
@Override
public void sayHello(HelloRequest request, StreamObserver<HelloReply> responseObserver) {
// The server will always fail, and we'll see this failure on client-side. The exception is
// not sent to the client, only the status code (i.e., INTERNAL) and description.
responseObserver.onError(Status.INTERNAL
.withDescription("Eggplant Xerxes Crybaby Overbite Narwhal").asRuntimeException());
}
}).build().start();
channel = Grpc.newChannelBuilderForAddress(
"localhost", server.getPort(), InsecureChannelCredentials.create()).build();
blockingCall();
futureCallDirect();
futureCallCallback();
asyncCall();
advancedAsyncCall();
channel.shutdown();
server.shutdown();
channel.awaitTermination(1, TimeUnit.SECONDS);
server.awaitTermination();
}
void blockingCall() {
GreeterBlockingStub stub = GreeterGrpc.newBlockingStub(channel);
try {
stub.sayHello(HelloRequest.newBuilder().setName("Bart").build());
} catch (Exception e) {
Status status = Status.fromThrowable(e);
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(status.getDescription().contains("Eggplant"));
// Cause is not transmitted over the wire.
}
}
void futureCallDirect() {
GreeterFutureStub stub = GreeterGrpc.newFutureStub(channel);
ListenableFuture<HelloReply> response =
stub.sayHello(HelloRequest.newBuilder().setName("Lisa").build());
try {
response.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (ExecutionException e) {
Status status = Status.fromThrowable(e.getCause());
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(status.getDescription().contains("Xerxes"));
// Cause is not transmitted over the wire.
}
}
void futureCallCallback() {
GreeterFutureStub stub = GreeterGrpc.newFutureStub(channel);
ListenableFuture<HelloReply> response =
stub.sayHello(HelloRequest.newBuilder().setName("Maggie").build());
final CountDownLatch latch = new CountDownLatch(1);
Futures.addCallback(
response,
new FutureCallback<HelloReply>() {
@Override
public void onSuccess(@Nullable HelloReply result) {
// Won't be called, since the server in this example always fails.
}
@Override
public void onFailure(Throwable t) {
Status status = Status.fromThrowable(t);
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(status.getDescription().contains("Crybaby"));
// Cause is not transmitted over the wire..
latch.countDown();
}
},
directExecutor());
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
void asyncCall() {
GreeterStub stub = GreeterGrpc.newStub(channel);
HelloRequest request = HelloRequest.newBuilder().setName("Homer").build();
final CountDownLatch latch = new CountDownLatch(1);
StreamObserver<HelloReply> responseObserver = new StreamObserver<HelloReply>() {
@Override
public void onNext(HelloReply value) {
// Won't be called.
}
@Override
public void onError(Throwable t) {
Status status = Status.fromThrowable(t);
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(status.getDescription().contains("Overbite"));
// Cause is not transmitted over the wire..
latch.countDown();
}
@Override
public void onCompleted() {
// Won't be called, since the server in this example always fails.
}
};
stub.sayHello(request, responseObserver);
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
/**
* This is more advanced and does not make use of the stub. You should not normally need to do
* this, but here is how you would.
*/
void advancedAsyncCall() {
ClientCall<HelloRequest, HelloReply> call =
channel.newCall(GreeterGrpc.getSayHelloMethod(), CallOptions.DEFAULT);
final CountDownLatch latch = new CountDownLatch(1);
call.start(new ClientCall.Listener<HelloReply>() {
@Override
public void onClose(Status status, Metadata trailers) {
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(status.getDescription().contains("Narwhal"));
// Cause is not transmitted over the wire.
latch.countDown();
}
}, new Metadata());
call.sendMessage(HelloRequest.newBuilder().setName("Marge").build());
call.halfClose();
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
}
|
ErrorHandlingClient
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/annotatewith/CustomAnnotationWithParams.java
|
{
"start": 633,
"end": 1584
}
|
interface ____ {
String stringParam();
Class<? extends Annotation> genericTypedClass() default CustomAnnotationWithParams.class;
AnnotateWithEnum enumParam() default AnnotateWithEnum.EXISTING;
byte byteParam() default 0x00;
char charParam() default 'a';
double doubleParam() default 0.0;
float floatParam() default 0.0f;
int intParam() default 0;
long longParam() default 0L;
short shortParam() default 0;
boolean booleanParam() default false;
short[] shortArray() default {};
byte[] byteArray() default {};
int[] intArray() default {};
long[] longArray() default {};
float[] floatArray() default {};
double[] doubleArray() default {};
char[] charArray() default {};
boolean[] booleanArray() default {};
String[] stringArray() default {};
Class<?>[] classArray() default {};
AnnotateWithEnum[] enumArray() default {};
}
|
CustomAnnotationWithParams
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/net/TcpSocketManager.java
|
{
"start": 19262,
"end": 23353
}
|
class ____<M extends TcpSocketManager, T extends FactoryData>
implements ManagerFactory<M, T> {
static volatile HostResolver RESOLVER = HostResolver.INSTANCE;
@SuppressWarnings("resource")
@Override
public M createManager(final String name, final T data) {
InetAddress inetAddress;
OutputStream os;
try {
inetAddress = InetAddress.getByName(data.host);
} catch (final UnknownHostException ex) {
LOGGER.error("Could not find address of {}: {}", data.host, ex, ex);
return null;
}
Socket socket = null;
try {
// LOG4J2-1042
socket = createSocket(data);
os = socket.getOutputStream();
return createManager(name, os, socket, inetAddress, data);
} catch (final IOException ex) {
LOGGER.error("TcpSocketManager ({}) caught exception and will continue:", name, ex);
os = NullOutputStream.getInstance();
}
if (data.reconnectDelayMillis == 0) {
Closer.closeSilently(socket);
return null;
}
return createManager(name, os, null, inetAddress, data);
}
@SuppressWarnings("unchecked")
M createManager(
final String name,
final OutputStream os,
final Socket socket,
final InetAddress inetAddress,
final T data) {
return (M) new TcpSocketManager(
name,
os,
socket,
inetAddress,
data.host,
data.port,
data.connectTimeoutMillis,
data.reconnectDelayMillis,
data.immediateFail,
data.layout,
data.bufferSize,
data.socketOptions);
}
Socket createSocket(final T data) throws IOException {
final List<InetSocketAddress> socketAddresses = RESOLVER.resolveHost(data.host, data.port);
IOException ioe = null;
for (InetSocketAddress socketAddress : socketAddresses) {
try {
return TcpSocketManager.createSocket(socketAddress, data.socketOptions, data.connectTimeoutMillis);
} catch (IOException ex) {
ioe = ex;
}
}
throw new IOException(errorMessage(data, socketAddresses), ioe);
}
protected String errorMessage(final T data, final List<InetSocketAddress> socketAddresses) {
final StringBuilder sb = new StringBuilder("Unable to create socket for ");
sb.append(data.host).append(" at port ").append(data.port);
if (socketAddresses.size() == 1) {
if (!socketAddresses.get(0).getAddress().getHostAddress().equals(data.host)) {
sb.append(" using ip address ")
.append(socketAddresses.get(0).getAddress().getHostAddress());
sb.append(" and port ").append(socketAddresses.get(0).getPort());
}
} else {
sb.append(" using ip addresses and ports ");
for (int i = 0; i < socketAddresses.size(); ++i) {
if (i > 0) {
sb.append(", ");
sb.append(socketAddresses.get(i).getAddress().getHostAddress());
sb.append(":").append(socketAddresses.get(i).getPort());
}
}
}
return sb.toString();
}
}
/**
* This method is only for unit testing. It is not Thread-safe.
* @param resolver the HostResolver.
*/
public static void setHostResolver(final HostResolver resolver) {
TcpSocketManagerFactory.RESOLVER = resolver;
}
public static
|
TcpSocketManagerFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StringFormatWithLiteralTest.java
|
{
"start": 10211,
"end": 10561
}
|
class ____ {
String test() {
return "hello \\n world";
}
}
""")
.doTest();
}
@Test
public void refactoringStringFormatWithLineBreakOnLiteral() {
refactoringHelper
.addInputLines(
"ExampleClass.java",
"""
public
|
ExampleClass
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultivalueTestCaseSupplier.java
|
{
"start": 919,
"end": 12687
}
|
class ____ {
private static final int MIN_VALUES = 1;
private static final int MAX_VALUES = 1000;
private MultivalueTestCaseSupplier() {}
public static List<TypedDataSupplier> intCases(int min, int max, boolean includeZero) {
List<TypedDataSupplier> cases = new ArrayList<>();
for (Block.MvOrdering ordering : Block.MvOrdering.values()) {
if (0 <= max && 0 >= min && includeZero) {
cases.add(
new TypedDataSupplier(
"<0 mv " + ordering + " ints>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> 0), ordering),
DataType.INTEGER
)
);
}
if (max != 0) {
cases.add(
new TypedDataSupplier(
"<" + max + " mv " + ordering + " ints>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> max), ordering),
DataType.INTEGER
)
);
}
if (min != 0 && min != max) {
cases.add(
new TypedDataSupplier(
"<" + min + " mv " + ordering + " ints>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> min), ordering),
DataType.INTEGER
)
);
}
int lower = Math.max(min, 1);
int upper = Math.min(max, Integer.MAX_VALUE);
if (lower < upper) {
cases.add(
new TypedDataSupplier(
"<positive mv " + ordering + " ints>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomIntBetween(lower, upper)), ordering),
DataType.INTEGER
)
);
}
int lower1 = Math.max(min, Integer.MIN_VALUE);
int upper1 = Math.min(max, -1);
if (lower1 < upper1) {
cases.add(
new TypedDataSupplier(
"<negative mv " + ordering + " ints>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomIntBetween(lower1, upper1)), ordering),
DataType.INTEGER
)
);
}
if (min < 0 && max > 0) {
cases.add(
new TypedDataSupplier("<random mv " + ordering + " ints>", () -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> {
if (includeZero) {
return ESTestCase.randomIntBetween(min, max);
}
return randomBoolean() ? ESTestCase.randomIntBetween(min, -1) : ESTestCase.randomIntBetween(1, max);
}), ordering), DataType.INTEGER)
);
}
}
return cases;
}
public static List<TypedDataSupplier> longCases(long min, long max, boolean includeZero) {
List<TypedDataSupplier> cases = new ArrayList<>();
for (Block.MvOrdering ordering : Block.MvOrdering.values()) {
if (0 <= max && 0 >= min && includeZero) {
cases.add(
new TypedDataSupplier(
"<0 mv " + ordering + " longs>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> 0L), ordering),
DataType.LONG
)
);
}
if (max != 0) {
cases.add(
new TypedDataSupplier(
"<" + max + " mv " + ordering + " longs>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> max), ordering),
DataType.LONG
)
);
}
if (min != 0 && min != max) {
cases.add(
new TypedDataSupplier(
"<" + min + " mv " + ordering + " longs>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> min), ordering),
DataType.LONG
)
);
}
long lower = Math.max(min, 1);
long upper = Math.min(max, Long.MAX_VALUE);
if (lower < upper) {
cases.add(
new TypedDataSupplier(
"<positive mv " + ordering + " longs>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomLongBetween(lower, upper)), ordering),
DataType.LONG
)
);
}
long lower1 = Math.max(min, Long.MIN_VALUE);
long upper1 = Math.min(max, -1);
if (lower1 < upper1) {
cases.add(
new TypedDataSupplier(
"<negative mv " + ordering + " longs>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomLongBetween(lower1, upper1)), ordering),
DataType.LONG
)
);
}
if (min < 0 && max > 0) {
cases.add(
new TypedDataSupplier("<random mv " + ordering + " longs>", () -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> {
if (includeZero) {
return ESTestCase.randomLongBetween(min, max);
}
return randomBoolean() ? ESTestCase.randomLongBetween(min, -1) : ESTestCase.randomLongBetween(1, max);
}), ordering), DataType.LONG)
);
}
}
return cases;
}
public static List<TypedDataSupplier> doubleCases(double min, double max, boolean includeZero) {
List<TypedDataSupplier> cases = new ArrayList<>();
for (Block.MvOrdering ordering : Block.MvOrdering.values()) {
if (0d <= max && 0d >= min && includeZero) {
cases.add(
new TypedDataSupplier(
"<0 mv " + ordering + " doubles>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> 0d), ordering),
DataType.DOUBLE
)
);
cases.add(
new TypedDataSupplier(
"<-0 mv " + ordering + " doubles>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> -0d), ordering),
DataType.DOUBLE
)
);
}
if (max != 0d) {
cases.add(
new TypedDataSupplier(
"<" + max + " mv " + ordering + " doubles>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> max), ordering),
DataType.DOUBLE
)
);
}
if (min != 0d && min != max) {
cases.add(
new TypedDataSupplier(
"<" + min + " mv " + ordering + " doubles>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> min), ordering),
DataType.DOUBLE
)
);
}
double lower1 = Math.max(min, 0d);
double upper1 = Math.min(max, 1d);
if (lower1 < upper1) {
cases.add(
new TypedDataSupplier(
"<small positive mv " + ordering + " doubles>",
() -> putInOrder(
randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomDoubleBetween(lower1, upper1, true)),
ordering
),
DataType.DOUBLE
)
);
}
double lower2 = Math.max(min, -1d);
double upper2 = Math.min(max, 0d);
if (lower2 < upper2) {
cases.add(
new TypedDataSupplier(
"<small negative mv " + ordering + " doubles>",
() -> putInOrder(
randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomDoubleBetween(lower2, upper2, true)),
ordering
),
DataType.DOUBLE
)
);
}
double lower3 = Math.max(min, 1d);
double upper3 = Math.min(max, Double.MAX_VALUE);
if (lower3 < upper3) {
cases.add(
new TypedDataSupplier(
"<big positive mv " + ordering + " doubles>",
() -> putInOrder(
randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomDoubleBetween(lower3, upper3, true)),
ordering
),
DataType.DOUBLE
)
);
}
double lower4 = Math.max(min, -Double.MAX_VALUE);
double upper4 = Math.min(max, -1d);
if (lower4 < upper4) {
cases.add(
new TypedDataSupplier(
"<big negative mv " + ordering + " doubles>",
() -> putInOrder(
randomList(MIN_VALUES, MAX_VALUES, () -> ESTestCase.randomDoubleBetween(lower4, upper4, true)),
ordering
),
DataType.DOUBLE
)
);
}
if (min < 0 && max > 0) {
cases.add(
new TypedDataSupplier(
"<random mv " + ordering + " doubles>",
() -> putInOrder(randomList(MIN_VALUES, MAX_VALUES, () -> {
if (includeZero) {
return ESTestCase.randomDoubleBetween(min, max, true);
}
return randomBoolean()
? ESTestCase.randomDoubleBetween(min, -1, true)
: ESTestCase.randomDoubleBetween(1, max, true);
}), ordering),
DataType.DOUBLE
)
);
}
}
return cases;
}
private static <T extends Comparable<T>> List<T> putInOrder(List<T> mvData, Block.MvOrdering ordering) {
switch (ordering) {
case UNORDERED -> {
}
case DEDUPLICATED_UNORDERD -> {
var dedup = new LinkedHashSet<>(mvData);
mvData.clear();
mvData.addAll(dedup);
}
case DEDUPLICATED_AND_SORTED_ASCENDING -> {
var dedup = new HashSet<>(mvData);
mvData.clear();
mvData.addAll(dedup);
Collections.sort(mvData);
}
case SORTED_ASCENDING -> {
Collections.sort(mvData);
}
default -> throw new UnsupportedOperationException("unsupported ordering [" + ordering + "]");
}
return mvData;
}
}
|
MultivalueTestCaseSupplier
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/spi/DynamicModelJavaType.java
|
{
"start": 461,
"end": 1146
}
|
class ____ implements JavaType<Map<?,?>> {
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators context) {
throw new UnsupportedOperationException();
}
@Override
public Map<?,?> fromString(CharSequence string) {
throw new UnsupportedOperationException();
}
@Override
public <X> X unwrap(Map<?,?> value, Class<X> type, WrapperOptions options) {
throw new UnsupportedOperationException();
}
@Override
public <X> Map<?,?> wrap(X value, WrapperOptions options) {
throw new UnsupportedOperationException();
}
@Override
public Class<Map<?,?>> getJavaTypeClass() {
//noinspection unchecked,rawtypes
return (Class) Map.class;
}
}
|
DynamicModelJavaType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metadata/Fridge.java
|
{
"start": 369,
"end": 1031
}
|
class ____ {
private Long id;
private String brand;
private int temperature;
private Integer height;
//dimensions
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Basic(optional = false)
public String getBrand() {
return brand;
}
public void setBrand(String brand) {
this.brand = brand;
}
public int getTemperature() {
return temperature;
}
public void setTemperature(int temperature) {
this.temperature = temperature;
}
@Column(nullable = false)
public Integer getHeight() {
return height;
}
public void setHeight(Integer height) {
this.height = height;
}
}
|
Fridge
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/constructor/simpleinjection/B.java
|
{
"start": 698,
"end": 831
}
|
class ____ {
private A a;
@Inject
B(A a) {
this.a = a;
}
public A getA() {
return this.a;
}
}
|
B
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/RouterHeartbeatRequestPBImpl.java
|
{
"start": 1851,
"end": 3131
}
|
class ____ extends RouterHeartbeatRequest
implements PBRecord {
private FederationProtocolPBTranslator<RouterHeartbeatRequestProto, Builder,
RouterHeartbeatRequestProtoOrBuilder> translator =
new FederationProtocolPBTranslator<RouterHeartbeatRequestProto,
Builder, RouterHeartbeatRequestProtoOrBuilder>(
RouterHeartbeatRequestProto.class);
public RouterHeartbeatRequestPBImpl() {
}
@Override
public RouterHeartbeatRequestProto getProto() {
return this.translator.build();
}
@Override
public void setProto(Message proto) {
this.translator.setProto(proto);
}
@Override
public void readInstance(String base64String) throws IOException {
this.translator.readInstance(base64String);
}
@Override
public RouterState getRouter() throws IOException {
RouterRecordProto routerProto =
this.translator.getProtoOrBuilder().getRouter();
return new RouterStatePBImpl(routerProto);
}
@Override
public void setRouter(RouterState routerState) {
if (routerState instanceof RouterStatePBImpl) {
RouterStatePBImpl routerStatePB = (RouterStatePBImpl)routerState;
this.translator.getBuilder().setRouter(routerStatePB.getProto());
}
}
}
|
RouterHeartbeatRequestPBImpl
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/sync/AbstractForStStateKeysIterator.java
|
{
"start": 1316,
"end": 2688
}
|
class ____<K> implements AutoCloseable {
@Nonnull protected final ForStIteratorWrapper iterator;
@Nonnull protected final String state;
@Nonnull protected final TypeSerializer<K> keySerializer;
protected final boolean ambiguousKeyPossible;
protected final int keyGroupPrefixBytes;
protected final DataInputDeserializer byteArrayDataInputView;
public AbstractForStStateKeysIterator(
@Nonnull ForStIteratorWrapper iterator,
@Nonnull String state,
@Nonnull TypeSerializer<K> keySerializer,
int keyGroupPrefixBytes,
boolean ambiguousKeyPossible) {
this.iterator = iterator;
this.state = state;
this.keySerializer = keySerializer;
this.keyGroupPrefixBytes = keyGroupPrefixBytes;
this.ambiguousKeyPossible = ambiguousKeyPossible;
this.byteArrayDataInputView = new DataInputDeserializer();
}
protected K deserializeKey(byte[] keyBytes, DataInputDeserializer readView) throws IOException {
readView.setBuffer(keyBytes, keyGroupPrefixBytes, keyBytes.length - keyGroupPrefixBytes);
return CompositeKeySerializationUtils.readKey(
keySerializer, byteArrayDataInputView, ambiguousKeyPossible);
}
@Override
public void close() {
iterator.close();
}
}
|
AbstractForStStateKeysIterator
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/progress/MockingProgress.java
|
{
"start": 483,
"end": 1627
}
|
interface ____ {
void reportOngoingStubbing(OngoingStubbing<?> ongoingStubbing);
OngoingStubbing<?> pullOngoingStubbing();
Set<VerificationListener> verificationListeners();
void verificationStarted(VerificationMode verificationMode);
VerificationMode pullVerificationMode();
void stubbingStarted();
void stubbingCompleted();
void validateState();
void reset();
/**
* Removes ongoing stubbing so that in case the framework is misused
* state validation errors are more accurate
*/
void resetOngoingStubbing();
ArgumentMatcherStorage getArgumentMatcherStorage();
void mockingStarted(Object mock, MockCreationSettings settings);
void mockingStarted(Class<?> mock, MockCreationSettings settings);
void addListener(MockitoListener listener);
void removeListener(MockitoListener listener);
void setVerificationStrategy(VerificationStrategy strategy);
VerificationMode maybeVerifyLazily(VerificationMode mode);
/**
* Removes all listeners added via {@link #addListener(MockitoListener)}.
*/
void clearListeners();
}
|
MockingProgress
|
java
|
apache__flink
|
flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/flink/fs/s3hadoop/token/S3HadoopDelegationTokenProvider.java
|
{
"start": 1053,
"end": 1219
}
|
class ____ extends AbstractS3DelegationTokenProvider {
@Override
public String serviceName() {
return "s3-hadoop";
}
}
|
S3HadoopDelegationTokenProvider
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/support/PropertyBindingSupportPropertiesTest.java
|
{
"start": 1252,
"end": 3663
}
|
class ____ extends ContextTestSupport {
@Test
public void testProperties() {
Bar bar = new Bar();
PropertyBindingSupport.build()
.withCamelContext(context)
.withReflection(true)
.withTarget(bar)
.withProperty("works[acme]", "company1")
.withProperty("works[burger]", "company2")
.bind();
assertEquals("company1", bar.getWorks().getProperty("acme"));
assertEquals("company2", bar.getWorks().getProperty("burger"));
}
@Test
public void testPropertiesWithConfigurer() {
Bar bar = new Bar();
PropertyBindingSupport.build()
.withCamelContext(context)
.withReflection(false)
.withConfigurer(new BarConfigurer())
.withTarget(bar)
.withProperty("works[acme]", "company1")
.withProperty("works[burger]", "company2")
.bind();
assertEquals("company1", bar.getWorks().getProperty("acme"));
assertEquals("company2", bar.getWorks().getProperty("burger"));
}
@Test
public void testPropertiesMap() {
BarWithMap bar = new BarWithMap();
PropertyBindingSupport.build()
.withCamelContext(context)
.withReflection(true)
.withTarget(bar)
.withProperty("works[acme].name", "company1")
.withProperty("works[burger].name", "company2")
.bind();
assertEquals("company1", bar.getWorks().get("acme").getProperty("name"));
assertEquals("company2", bar.getWorks().get("burger").getProperty("name"));
}
@Test
public void testPropertiesMapWithConfigurer() {
BarWithMap bar = new BarWithMap();
PropertyBindingSupport.build()
.withCamelContext(context)
.withReflection(false)
.withConfigurer(new BarWithMapConfigurer())
.withTarget(bar)
.withProperty("works[acme].name", "company1")
.withProperty("works[burger].name", "company2")
.bind();
assertEquals("company1", bar.getWorks().get("acme").getProperty("name"));
assertEquals("company2", bar.getWorks().get("burger").getProperty("name"));
}
public static
|
PropertyBindingSupportPropertiesTest
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 406976,
"end": 408762
}
|
class ____ extends YamlDeserializerBase<GlobalOptionDefinition> {
public GlobalOptionDefinitionDeserializer() {
super(GlobalOptionDefinition.class);
}
@Override
protected GlobalOptionDefinition newInstance() {
return new GlobalOptionDefinition();
}
@Override
protected boolean setProperty(GlobalOptionDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "key": {
String val = asText(node);
target.setKey(val);
break;
}
case "value": {
String val = asText(node);
target.setValue(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "globalOptions",
types = org.apache.camel.model.GlobalOptionsDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Global Options",
description = "Models a series of string key/value pairs for configuring some global options on a Camel context such as max debug log length.",
deprecated = false,
properties = @YamlProperty(name = "globalOption", type = "array:org.apache.camel.model.GlobalOptionDefinition", description = "A series of global options as key value pairs", displayName = "Global Option")
)
public static
|
GlobalOptionDefinitionDeserializer
|
java
|
apache__flink
|
flink-datastream/src/main/java/org/apache/flink/datastream/impl/extension/window/context/DefaultTwoInputWindowContext.java
|
{
"start": 2378,
"end": 5908
}
|
class ____<K, IN1, IN2, W extends Window>
implements TwoInputWindowContext<IN1, IN2> {
/**
* The current processing window. An instance should be set every time before accessing
* window-related attributes, data, and state.
*/
@Nullable private W window;
/** Use to retrieve state associated with windows. */
private final WindowStateStore<K, W> windowStateStore;
/** The state utilized for storing window received input data from first input. */
private final AppendingState<IN1, StateIterator<IN1>, Iterable<IN1>> leftWindowState;
/** The state utilized for storing window received input data from second input. */
private final AppendingState<IN2, StateIterator<IN2>, Iterable<IN2>> rightWindowState;
public DefaultTwoInputWindowContext(
W window,
AppendingState<IN1, StateIterator<IN1>, Iterable<IN1>> leftWindowState,
AppendingState<IN2, StateIterator<IN2>, Iterable<IN2>> rightWindowState,
WindowProcessFunction windowProcessFunction,
AbstractAsyncStateStreamOperator<?> operator,
TypeSerializer<W> windowSerializer,
boolean isMergingWindow) {
this.window = window;
this.leftWindowState = leftWindowState;
this.rightWindowState = rightWindowState;
this.windowStateStore =
new WindowStateStore<>(
windowProcessFunction, operator, windowSerializer, isMergingWindow);
}
public void setWindow(W window) {
this.window = window;
}
@Override
public long getStartTime() {
if (window instanceof TimeWindow) {
return ((TimeWindow) window).getStart();
}
return -1;
}
@Override
public long getEndTime() {
if (window instanceof TimeWindow) {
return ((TimeWindow) window).getEnd();
}
return -1;
}
@Override
public <T> Optional<ListState<T>> getWindowState(ListStateDeclaration<T> stateDeclaration)
throws Exception {
return windowStateStore.getWindowState(stateDeclaration, window);
}
@Override
public <KEY, V> Optional<MapState<KEY, V>> getWindowState(
MapStateDeclaration<KEY, V> stateDeclaration) throws Exception {
return windowStateStore.getWindowState(stateDeclaration, window);
}
@Override
public <T> Optional<ValueState<T>> getWindowState(ValueStateDeclaration<T> stateDeclaration)
throws Exception {
return windowStateStore.getWindowState(stateDeclaration, window);
}
@Override
public <T> Optional<ReducingState<T>> getWindowState(
ReducingStateDeclaration<T> stateDeclaration) throws Exception {
return windowStateStore.getWindowState(stateDeclaration, window);
}
@Override
public <T, ACC, OUT> Optional<AggregatingState<T, OUT>> getWindowState(
AggregatingStateDeclaration<T, ACC, OUT> stateDeclaration) throws Exception {
return windowStateStore.getWindowState(stateDeclaration, window);
}
@Override
public void putRecord1(IN1 record) {
leftWindowState.add(record);
}
@Override
public Iterable<IN1> getAllRecords1() {
return leftWindowState.get();
}
@Override
public void putRecord2(IN2 record) {
rightWindowState.add(record);
}
@Override
public Iterable<IN2> getAllRecords2() {
return rightWindowState.get();
}
}
|
DefaultTwoInputWindowContext
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/TypeAdapter.java
|
{
"start": 11318,
"end": 11920
}
|
class ____ extends TypeAdapter<T> {
@Override
public void write(JsonWriter out, T value) throws IOException {
if (value == null) {
out.nullValue();
} else {
TypeAdapter.this.write(out, value);
}
}
@Override
public T read(JsonReader reader) throws IOException {
if (reader.peek() == JsonToken.NULL) {
reader.nextNull();
return null;
}
return TypeAdapter.this.read(reader);
}
@Override
public String toString() {
return "NullSafeTypeAdapter[" + TypeAdapter.this + "]";
}
}
}
|
NullSafeTypeAdapter
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/status/StatusConsoleListenerTest.java
|
{
"start": 1358,
"end": 4900
}
|
class ____ {
public static final MessageFactory MESSAGE_FACTORY = ParameterizedNoReferenceMessageFactory.INSTANCE;
@Test
void StatusData_getFormattedStatus_should_be_used() {
// Create the listener.
final PrintStream stream = mock(PrintStream.class);
final StatusConsoleListener listener = new StatusConsoleListener(Level.ALL, stream);
// Log a message.
final Message message = mock(Message.class);
final StatusData statusData = spy(new StatusData(null, Level.TRACE, message, null, null));
listener.log(statusData);
// Verify the call.
verify(statusData).getFormattedStatus();
}
@Test
void stream_should_be_honored() throws Exception {
// Create the listener.
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
final String encoding = "UTF-8";
final PrintStream printStream = new PrintStream(outputStream, false, encoding);
final StatusConsoleListener listener = new StatusConsoleListener(Level.WARN, printStream);
// log a message that is expected to be logged.
final RuntimeException expectedThrowable = new RuntimeException("expectedThrowable");
expectedThrowable.setStackTrace(new StackTraceElement[] {
new StackTraceElement("expectedThrowableClass", "expectedThrowableMethod", "expectedThrowableFile", 1)
});
final Message expectedMessage = MESSAGE_FACTORY.newMessage("expectedMessage");
listener.log(new StatusData(
null, // since ignored by `SimpleLogger`
Level.WARN,
expectedMessage,
expectedThrowable,
null)); // as set by `StatusLogger` itself
// Collect the output.
printStream.flush();
final String output = outputStream.toString(encoding);
// Verify the output.
assertThat(output).contains(expectedThrowable.getMessage()).contains(expectedMessage.getFormattedMessage());
}
@Test
void non_system_streams_should_be_closed() {
final PrintStream stream = mock(PrintStream.class);
final StatusConsoleListener listener = new StatusConsoleListener(Level.WARN, stream);
listener.close();
verify(stream).close();
}
@Test
void close_should_reset_to_initials() {
// Create the listener
final PrintStream initialStream = mock(PrintStream.class);
final Level initialLevel = Level.TRACE;
final StatusConsoleListener listener = new StatusConsoleListener(initialLevel, initialStream);
// Verify the initial state
assertThat(listener.getStatusLevel()).isEqualTo(initialLevel);
assertThat(listener).hasFieldOrPropertyWithValue("stream", initialStream);
// Update the state
final PrintStream newStream = mock(PrintStream.class);
listener.setStream(newStream);
final Level newLevel = Level.DEBUG;
listener.setLevel(newLevel);
// Verify the update
verify(initialStream).close();
assertThat(listener.getStatusLevel()).isEqualTo(newLevel);
assertThat(listener).hasFieldOrPropertyWithValue("stream", newStream);
// Close the listener
listener.close();
// Verify the reset
verify(newStream).close();
assertThat(listener.getStatusLevel()).isEqualTo(initialLevel);
assertThat(listener).hasFieldOrPropertyWithValue("stream", initialStream);
}
}
|
StatusConsoleListenerTest
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng2972OverridePluginDependencyTest.java
|
{
"start": 1255,
"end": 4740
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that a project-level plugin dependency replaces the original dependency from the plugin POM.
*
* @throws Exception in case of failure
*/
@Test
public void testitLifecycleInvocation() throws Exception {
File testDir = extractResources("/mng-2972/test1");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifact("org.apache.maven.its.plugins.class-loader", "dep-b", "0.2-mng-2972", "jar");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("--settings");
verifier.addCliArgument("settings.xml");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties pclProps = verifier.loadProperties("target/pcl.properties");
verify(pclProps);
}
/**
* Verify that a project-level plugin dependency replaces the original dependency from the plugin POM.
* Apart from testing direct CLI invocation this time, this test also employs a slightly different version for the
* overriding dependency. The original bug is caused by usage of a HashSet but whenever the random order of its
* elements happens to match the correct ordering, the test cannot detect the bad implementation. The obvious way
* to increase the test coverage is re-running the test with different dependency versions, each time producing
* another hash code for the dependency artifact and thereby changing its position in the HashSet's element order.
* The two versions 0.2-mng-2972 and 9.9-MNG-2972 we use here have at least once proven (on Sun JDK 1.6.0_07) to
* successfully break the correctness of the random ordering.
*
* @throws Exception in case of failure
*/
@Test
public void testitCommandLineInvocation() throws Exception {
File testDir = extractResources("/mng-2972/test2");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifact("org.apache.maven.its.plugins.class-loader", "dep-b", "9.9-MNG-2972", "jar");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("--settings");
verifier.addCliArgument("settings.xml");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-class-loader:2.1-SNAPSHOT:load");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties pclProps = verifier.loadProperties("target/pcl.properties");
verify(pclProps);
}
private void verify(Properties pclProps) throws Exception {
assertNotNull(pclProps.getProperty("org.apache.maven.its.mng2972.MNG2972"));
assertNull(pclProps.getProperty("org.apache.maven.plugin.coreit.ClassA"));
assertNull(pclProps.getProperty("org.apache.maven.plugin.coreit.ClassB"));
assertEquals("1", pclProps.getProperty("org/apache/maven/its/mng2972/mng-2972.properties.count"));
assertEquals("0", pclProps.getProperty("org/apache/maven/plugin/coreit/a.properties.count"));
assertEquals("0", pclProps.getProperty("org/apache/maven/plugin/coreit/b.properties.count"));
}
}
|
MavenITmng2972OverridePluginDependencyTest
|
java
|
junit-team__junit5
|
platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/ManagedResource.java
|
{
"start": 1639,
"end": 1679
}
|
enum ____ {
GLOBAL, PER_CONTEXT
}
|
Scope
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/Header.java
|
{
"start": 1045,
"end": 5113
}
|
enum ____ {
SIMPLE("simple");
private final String value;
Style(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
private String description;
private Boolean required;
private Boolean deprecated;
private Boolean allowEmptyValue;
private final Style style = Style.SIMPLE;
private Boolean explode;
private Schema schema;
private Object example;
private Map<String, Example> examples;
private Map<String, MediaType> contents;
public String getDescription() {
return description;
}
public Header setDescription(String description) {
this.description = description;
return this;
}
public Boolean getRequired() {
return required;
}
public Header setRequired(Boolean required) {
this.required = required;
return this;
}
public Boolean getDeprecated() {
return deprecated;
}
public Header setDeprecated(Boolean deprecated) {
this.deprecated = deprecated;
return this;
}
public Boolean getAllowEmptyValue() {
return allowEmptyValue;
}
public Header setAllowEmptyValue(Boolean allowEmptyValue) {
this.allowEmptyValue = allowEmptyValue;
return this;
}
public Style getStyle() {
return style;
}
public Boolean getExplode() {
return explode;
}
public Header setExplode(Boolean explode) {
this.explode = explode;
return this;
}
public Schema getSchema() {
return schema;
}
public Header setSchema(Schema schema) {
this.schema = schema;
return this;
}
public Object getExample() {
return example;
}
public Header setExample(Object example) {
this.example = example;
return this;
}
public Map<String, Example> getExamples() {
return examples;
}
public Header setExamples(Map<String, Example> examples) {
this.examples = examples;
return this;
}
public Header addExample(String name, Example example) {
if (examples == null) {
examples = new LinkedHashMap<>();
}
examples.put(name, example);
return this;
}
public Header removeExample(String name) {
if (examples != null) {
examples.remove(name);
}
return this;
}
public Map<String, MediaType> getContents() {
return contents;
}
public MediaType getContent(String name) {
return contents == null ? null : contents.get(name);
}
public Header setContents(Map<String, MediaType> contents) {
this.contents = contents;
return this;
}
public Header addContent(String name, MediaType content) {
if (contents == null) {
contents = new LinkedHashMap<>();
}
contents.put(name, content);
return this;
}
public Header removeContent(String name) {
if (contents != null) {
contents.remove(name);
}
return this;
}
@Override
public Header clone() {
Header clone = super.clone();
clone.schema = clone(schema);
clone.examples = clone(examples);
clone.contents = clone(contents);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> node, Context context) {
write(node, "description", description);
write(node, "required", required);
write(node, "deprecated", deprecated);
write(node, "allowEmptyValue", allowEmptyValue);
write(node, "style", style);
write(node, "explode", explode);
write(node, "schema", schema, context);
write(node, "example", example);
write(node, "examples", examples, context);
write(node, "content", contents, context);
writeExtensions(node);
return node;
}
}
|
Style
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UngroupedOverloadsTest.java
|
{
"start": 14487,
"end": 14743
}
|
class ____ {
public void foo() {}
public void foo(int x) {}
public void foo(int x, int y) {}
public void foo(int x, int y, int z) {}
private static
|
UngroupedOverloadsRefactoringMultiple
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
|
{
"start": 1972,
"end": 2603
}
|
class ____ {
private static final int NUM_MAPS = 1;
private static final int NUM_TESTS = 4;
private static final int NUM_VALUES = 40;
private static Path TEST_ROOT_DIR =
new Path(System.getProperty("test.build.data","/tmp"));
private static Configuration conf = new Configuration();
private static FileSystem localFs;
static {
try {
localFs = FileSystem.getLocal(conf);
} catch (IOException io) {
throw new RuntimeException("problem getting local fs", io);
}
}
private static final Logger LOG =
LoggerFactory.getLogger(TestValueIterReset.class);
public static
|
TestValueIterReset
|
java
|
google__guice
|
core/src/com/google/inject/spi/InjectionListener.java
|
{
"start": 896,
"end": 1127
}
|
interface ____<I> {
/**
* Invoked by Guice after it injects the fields and methods of instance.
*
* @param injectee instance that Guice injected dependencies into
*/
void afterInjection(I injectee);
}
|
InjectionListener
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/CBORDataFormat.java
|
{
"start": 1463,
"end": 7601
}
|
class ____ extends DataFormatDefinition {
@XmlTransient
private Class<?> collectionType;
@XmlTransient
private Class<?> unmarshalType;
@XmlAttribute
@Metadata(label = "advanced")
private String objectMapper;
@XmlAttribute
@Metadata(defaultValue = "true", javaType = "java.lang.Boolean")
private String useDefaultObjectMapper;
@XmlAttribute(name = "unmarshalType")
private String unmarshalTypeName;
@XmlAttribute(name = "collectionType")
@Metadata(label = "advanced")
private String collectionTypeName;
@XmlAttribute
@Metadata(defaultValue = "false", javaType = "java.lang.Boolean")
private String useList;
@XmlAttribute
@Metadata(defaultValue = "false", javaType = "java.lang.Boolean")
private String allowUnmarshallType;
@XmlAttribute
@Metadata(defaultValue = "false", javaType = "java.lang.Boolean")
private String prettyPrint;
@XmlAttribute
@Metadata(label = "advanced", defaultValue = "false", javaType = "java.lang.Boolean")
private String allowJmsType;
@XmlAttribute
private String enableFeatures;
@XmlAttribute
private String disableFeatures;
public CBORDataFormat() {
super("cbor");
}
protected CBORDataFormat(CBORDataFormat source) {
super(source);
this.collectionType = source.collectionType;
this.unmarshalType = source.unmarshalType;
this.objectMapper = source.objectMapper;
this.useDefaultObjectMapper = source.useDefaultObjectMapper;
this.unmarshalTypeName = source.unmarshalTypeName;
this.collectionTypeName = source.collectionTypeName;
this.useList = source.useList;
this.allowUnmarshallType = source.allowUnmarshallType;
this.prettyPrint = source.prettyPrint;
this.allowJmsType = source.allowJmsType;
this.enableFeatures = source.enableFeatures;
this.disableFeatures = source.disableFeatures;
}
private CBORDataFormat(Builder builder) {
this();
this.collectionType = builder.collectionType;
this.unmarshalType = builder.unmarshalType;
this.objectMapper = builder.objectMapper;
this.useDefaultObjectMapper = builder.useDefaultObjectMapper;
this.unmarshalTypeName = builder.unmarshalTypeName;
this.collectionTypeName = builder.collectionTypeName;
this.useList = builder.useList;
this.allowUnmarshallType = builder.allowUnmarshallType;
this.prettyPrint = builder.prettyPrint;
this.allowJmsType = builder.allowJmsType;
this.enableFeatures = builder.enableFeatures;
this.disableFeatures = builder.disableFeatures;
}
@Override
public CBORDataFormat copyDefinition() {
return new CBORDataFormat(this);
}
public String getObjectMapper() {
return objectMapper;
}
/**
* Lookup and use the existing CBOR ObjectMapper with the given id when using Jackson.
*/
public void setObjectMapper(String objectMapper) {
this.objectMapper = objectMapper;
}
public String getUseDefaultObjectMapper() {
return useDefaultObjectMapper;
}
/**
* Whether to lookup and use default Jackson CBOR ObjectMapper from the registry.
*/
public void setUseDefaultObjectMapper(String useDefaultObjectMapper) {
this.useDefaultObjectMapper = useDefaultObjectMapper;
}
public String getUnmarshalTypeName() {
return unmarshalTypeName;
}
/**
* Class name of the java type to use when unmarshalling
*/
public void setUnmarshalTypeName(String unmarshalTypeName) {
this.unmarshalTypeName = unmarshalTypeName;
}
public Class<?> getUnmarshalType() {
return unmarshalType;
}
public String getPrettyPrint() {
return prettyPrint;
}
/**
* To enable pretty printing output nicely formatted.
* <p/>
* Is by default false.
*/
public void setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
}
public String getAllowJmsType() {
return allowJmsType;
}
/**
* Used for JMS users to allow the JMSType header from the JMS spec to specify a FQN classname to use to unmarshal
* to.
*/
public void setAllowJmsType(String allowJmsType) {
this.allowJmsType = allowJmsType;
}
/**
* Class of the java type to use when unmarshalling
*/
public void setUnmarshalType(Class<?> unmarshalType) {
this.unmarshalType = unmarshalType;
}
public String getCollectionTypeName() {
return collectionTypeName;
}
/**
* Refers to a custom collection type to lookup in the registry to use. This option should rarely be used, but
* allows to use different collection types than java.util.Collection based as default.
*/
public void setCollectionTypeName(String collectionTypeName) {
this.collectionTypeName = collectionTypeName;
}
public Class<?> getCollectionType() {
return collectionType;
}
public void setCollectionType(Class<?> collectionType) {
this.collectionType = collectionType;
}
public String getUseList() {
return useList;
}
/**
* To unmarshal to a List of Map or a List of Pojo.
*/
public void setUseList(String useList) {
this.useList = useList;
}
public String getAllowUnmarshallType() {
return allowUnmarshallType;
}
/**
* If enabled then Jackson CBOR is allowed to attempt to use the CamelCBORUnmarshalType header during the
* unmarshalling.
* <p/>
* This should only be enabled when desired to be used.
*/
public void setAllowUnmarshallType(String allowUnmarshallType) {
this.allowUnmarshallType = allowUnmarshallType;
}
public String getEnableFeatures() {
return enableFeatures;
}
/**
* Set of features to enable on the Jackson <tt>com.fasterxml.jackson.databind.ObjectMapper</tt>.
* <p/>
* The features should be a name that matches a
|
CBORDataFormat
|
java
|
processing__processing4
|
app/src/processing/app/ui/Editor.java
|
{
"start": 14047,
"end": 35551
}
|
class ____ extends TransferHandler {
public boolean canImport(TransferHandler.TransferSupport support) {
return !sketch.isReadOnly();
}
public boolean importData(TransferHandler.TransferSupport support) {
int successful = 0;
if (!canImport(support)) {
return false;
}
try {
Transferable transferable = support.getTransferable();
DataFlavor uriListFlavor =
new DataFlavor("text/uri-list;class=java.lang.String");
if (transferable.isDataFlavorSupported(DataFlavor.javaFileListFlavor)) {
List<?> list = (List<?>)
transferable.getTransferData(DataFlavor.javaFileListFlavor);
for (Object o : list) {
File file = (File) o;
if (sketch.addFile(file)) {
successful++;
}
}
} else if (transferable.isDataFlavorSupported(uriListFlavor)) {
// Some platforms (Mac OS X and Linux, when this began) preferred
// this method of moving files.
String data = (String)transferable.getTransferData(uriListFlavor);
String[] pieces = PApplet.splitTokens(data, "\r\n");
for (String piece : pieces) {
if (piece.startsWith("#")) continue;
String path = null;
if (piece.startsWith("file:///")) {
path = piece.substring(7);
} else if (piece.startsWith("file:/")) {
path = piece.substring(5);
}
if (path != null) {
if (sketch.addFile(new File(path))) {
successful++;
}
} else {
System.err.println("Path not found for: " + data);
}
}
}
} catch (Exception e) {
Messages.showWarning("Drag & Drop Problem",
"An error occurred while trying to add files to the sketch.", e);
return false;
}
statusNotice(Language.pluralize("editor.status.drag_and_drop.files_added", successful));
return true;
}
}
public Base getBase() {
return base;
}
public Mode getMode() {
return mode;
}
public void repaintHeader() {
header.repaint();
}
public void rebuildHeader() {
header.rebuild();
}
public void rebuildModePopup() {
modePopup = new JMenu();
ButtonGroup modeGroup = new ButtonGroup();
for (final Mode m : base.getModeList()) {
JRadioButtonMenuItem item = new JRadioButtonMenuItem(m.getTitle());
item.addActionListener(e -> {
if (!base.changeMode(m)) {
// Returns false if unable to change the mode in this window
// (which will open a new window with the new Mode), in which case
// re-select the menu item b/c Java changes it automatically.
reselectMode();
}
});
modePopup.add(item);
modeGroup.add(item);
if (mode == m) {
item.setSelected(true);
}
}
modePopup.addSeparator();
JMenuItem manageModes = new JMenuItem(Language.text("toolbar.manage_modes"));
manageModes.addActionListener(e -> ContributionManager.openModes());
modePopup.add(manageModes);
Toolkit.setMenuMnemsInside(modePopup);
}
// Re-select the old checkbox, because it was automatically
// updated by Java, even though the Mode could not be changed.
// https://github.com/processing/processing/issues/2615
private void reselectMode() {
for (Component c : getModePopup().getComponents()) {
if (c instanceof JRadioButtonMenuItem) {
if (((JRadioButtonMenuItem) c).getText().equals(mode.getTitle())) {
((JRadioButtonMenuItem)c).setSelected(true);
break;
}
}
}
}
public JPopupMenu getModePopup() {
return modePopup.getPopupMenu();
}
public EditorConsole getConsole() {
return console;
}
public EditorHeader createHeader() {
return new EditorHeader(this);
}
abstract public EditorToolbar createToolbar();
@SuppressWarnings("unused")
public EditorToolbar getToolbar() {
return toolbar;
}
/**
* Rebuild the Toolbar after turning debug on/off.
*/
public void rebuildToolbar() {
toolbar.rebuild();
toolbar.revalidate(); // necessary to handle subcomponents
}
abstract public Formatter createFormatter();
protected void setDividerLocation(int pos) {
splitPane.setDividerLocation(pos);
}
protected int getDividerLocation() {
return splitPane.getDividerLocation();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Read and apply new values from the preferences, either because
* the app is just starting up, or the user just finished messing
* with things in the Preferences window.
*/
public void applyPreferences() {
// Even though this is only updating the theme (colors, icons),
// subclasses use this to apply other preferences.
// For instance, Java Mode applies changes to error checking.
updateTheme();
}
public void updateTheme() {
header.updateTheme();
toolbar.updateTheme();
textarea.updateTheme();
errorColumn.updateTheme();
status.updateTheme();
console.updateTheme();
footer.updateTheme();
// Not all Modes will have an error table (that's why it's addErrorTable()
// and not createErrorTable() and called by default).
// https://github.com/jdf/processing.py/issues/382#issuecomment-892269678
if (errorTable != null) {
errorTable.updateTheme();
}
var color = Theme.getColor("toolbar.gradient.top");
spacer.setBackground(color);
toolTipFont = Toolkit.getSansFont(Toolkit.zoom(9), Font.PLAIN);
toolTipTextColor = Theme.get("errors.selection.fgcolor");
toolTipWarningColor = Theme.get("errors.selection.warning.bgcolor");
toolTipErrorColor = Theme.get("errors.selection.error.bgcolor");
UIManager.put("RootPane.background", color);
UIManager.put("TitlePane.embeddedForeground", Theme.getColor("editor.fgcolor"));
getRootPane().updateUI();
UIManager.put("RootPane.background", null);
JPopupMenu popup = modePopup.getPopupMenu();
// Cannot use instanceof because com.formdev.flatlaf.ui.FlatPopupMenuBorder
// is a subclass of EmptyBorder, so just override each time. Cannot set
// null because that will reset the border to the default, not remove it.
// The top/bottom in FlatLaf is 6px, but feels too large.
popup.setBorder(new EmptyBorder(3, 0, 3, 0));
popup.setBackground(Theme.getColor("mode.popup.enabled.bgcolor"));
for (Component comp : modePopup.getMenuComponents()) {
if (comp instanceof JMenuItem item) {
if (item.getUI() instanceof PdeMenuItemUI) {
((PdeMenuItemUI) item.getUI()).updateTheme();
} else {
item.setUI(new PdeMenuItemUI("mode.popup"));
}
} else if (comp instanceof JPopupMenu.Separator) {
comp.setForeground(Theme.getColor("mode.popup.disabled.fgcolor"));
}
}
repaint(); // for good measure
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
protected void buildMenuBar() {
JMenuBar menubar = new JMenuBar();
fileMenu = buildFileMenu();
menubar.add(fileMenu);
menubar.add(buildEditMenu());
menubar.add(buildSketchMenu());
// For 3.0a4 move mode menu to the left of the Tool menu
JMenu modeMenu = buildModeMenu();
if (modeMenu != null) {
menubar.add(modeMenu);
}
toolsMenu = new JMenu(Language.text("menu.tools"));
base.populateToolsMenu(toolsMenu);
menubar.add(toolsMenu);
JMenu helpMenu = buildHelpMenu();
if (Platform.isMacOS()) {
// There's a bug on macOS since at least 2016 that leaves the
// Help menu disabled after a modal dialog has been shown.
// In 2018, it was closed by Oracle with a claim that it couldn't
// be reproduced: https://bugs.openjdk.org/browse/JDK-8196655
// The workaround is to add a space to the end of the menu name,
// which disables whatever macOS behavior is causing the problem.
// https://github.com/processing/processing4/issues/638
helpMenu.setText(helpMenu.getText() + " ");
}
menubar.add(helpMenu);
updateDevelopMenu(menubar);
Toolkit.setMenuMnemonics(menubar);
setJMenuBar(menubar);
}
abstract public JMenu buildFileMenu();
protected JMenu buildFileMenu(JMenuItem[] exportItems) {
JMenuItem item;
JMenu fileMenu = new JMenu(Language.text("menu.file"));
item = Toolkit.newJMenuItem(Language.text("menu.file.new"), 'N');
item.addActionListener(e -> base.handleNew());
fileMenu.add(item);
item = Toolkit.newJMenuItem(Language.text("menu.file.open"), 'O');
item.addActionListener(e -> base.handleOpenPrompt());
fileMenu.add(item);
item = Toolkit.newJMenuItemShift(Language.text("menu.file.sketchbook"), 'K');
item.addActionListener(e -> base.showSketchbookFrame());
fileMenu.add(item);
item = Toolkit.newJMenuItemShift(Language.text("menu.file.examples"), 'O');
item.addActionListener(e -> mode.showExamplesFrame());
fileMenu.add(item);
item = Toolkit.newJMenuItem(Language.text("menu.file.close"), 'W');
item.addActionListener(e -> base.handleClose(Editor.this, false));
fileMenu.add(item);
item = Toolkit.newJMenuItem(Language.text("menu.file.save"), 'S');
item.addActionListener(e -> handleSave(false));
fileMenu.add(item);
item = Toolkit.newJMenuItemShift(Language.text("menu.file.save_as"), 'S');
item.addActionListener(e -> handleSaveAs());
fileMenu.add(item);
if (exportItems != null) {
for (JMenuItem ei : exportItems) {
fileMenu.add(ei);
}
}
fileMenu.addSeparator();
item = Toolkit.newJMenuItemShift(Language.text("menu.file.page_setup"), 'P');
item.addActionListener(e -> handlePageSetup());
fileMenu.add(item);
item = Toolkit.newJMenuItem(Language.text("menu.file.print"), 'P');
item.addActionListener(e -> handlePrint());
fileMenu.add(item);
/*
fileMenu.addSeparator();
item = new JMenuItem("Restart");
item.addActionListener(e -> base.handleRestart());
fileMenu.add(item);
*/
// macOS already has its own preferences and quit menu.
// That's right! Think different, b*tches!
if (!Platform.isMacOS()) {
fileMenu.addSeparator();
item = Toolkit.newJMenuItem(Language.text("menu.file.preferences"), ',');
item.addActionListener(e -> base.handlePrefs());
fileMenu.add(item);
fileMenu.addSeparator();
item = Toolkit.newJMenuItem(Language.text("menu.file.quit"), 'Q');
item.addActionListener(e -> base.handleQuit());
fileMenu.add(item);
}
return fileMenu;
}
protected JMenu buildEditMenu() {
JMenu menu = new JMenu(Language.text("menu.edit"));
JMenuItem item;
undoItem = Toolkit.newJMenuItem(undoAction = new UndoAction(), 'Z');
menu.add(undoItem);
redoItem = new JMenuItem(redoAction = new RedoAction());
redoItem.setAccelerator(Toolkit.getKeyStrokeExt("menu.edit.redo"));
menu.add(redoItem);
menu.addSeparator();
item = Toolkit.newJMenuItem(cutAction = new CutAction(), 'X');
editMenuUpdatable.add(cutAction);
menu.add(item);
item = Toolkit.newJMenuItem(copyAction = new CopyAction(), 'C');
editMenuUpdatable.add(copyAction);
menu.add(item);
item = Toolkit.newJMenuItemShift(copyAsHtmlAction = new CopyAsHtmlAction(), 'C');
editMenuUpdatable.add(copyAsHtmlAction);
menu.add(item);
item = Toolkit.newJMenuItem(pasteAction = new PasteAction(), 'V');
editMenuUpdatable.add(pasteAction);
menu.add(item);
item = Toolkit.newJMenuItem(Language.text("menu.edit.select_all"), 'A');
item.addActionListener(e -> textarea.selectAll());
menu.add(item);
menu.addSeparator();
item = Toolkit.newJMenuItem(Language.text("menu.edit.auto_format"), 'T');
item.addActionListener(e -> handleAutoFormat());
menu.add(item);
item = Toolkit.newJMenuItemExt("menu.edit.comment_uncomment");
item.addActionListener(e -> handleCommentUncomment());
menu.add(item);
item = Toolkit.newJMenuItemExt("menu.edit.increase_indent");
item.addActionListener(e -> handleIndentOutdent(true));
menu.add(item);
item = Toolkit.newJMenuItemExt("menu.edit.decrease_indent");
item.addActionListener(e -> handleIndentOutdent(false));
menu.add(item);
item = Toolkit.newJMenuItemExt("menu.edit.increase_font");
item.addActionListener(e -> {
modifyFontSize(true);
});
menu.add(item);
item = Toolkit.newJMenuItemExt("menu.edit.decrease_font");
item.addActionListener(e -> {
modifyFontSize(false);
});
menu.add(item);
menu.addSeparator();
item = Toolkit.newJMenuItem(Language.text("menu.edit.find"), 'F');
item.addActionListener(e -> {
if (find == null) {
find = new FindReplace(Editor.this);
}
// https://github.com/processing/processing/issues/3457
String selection = getSelectedText();
if (selection != null && selection.length() != 0 &&
!selection.contains("\n")) {
find.setFindText(selection);
}
find.setVisible(true);
});
menu.add(item);
item = Toolkit.newJMenuItem(findNextAction = new FindNextAction(), 'G');
editMenuUpdatable.add(findNextAction);
menu.add(item);
item = Toolkit.newJMenuItemShift(findPreviousAction = new FindPreviousAction(), 'G');
editMenuUpdatable.add(findPreviousAction);
menu.add(item);
UpdatableAction action;
item = Toolkit.newJMenuItem(action = new SelectionForFindAction(), 'E');
editMenuUpdatable.add(action);
menu.add(item);
// Update copy/cut state on selection/de-selection
menu.addMenuListener(new MenuListener() {
// UndoAction and RedoAction do this for themselves.
@Override
public void menuCanceled(MenuEvent e) {
for (UpdatableAction a : editMenuUpdatable) {
a.setEnabled(true);
}
}
@Override
public void menuDeselected(MenuEvent e) {
for (UpdatableAction a : editMenuUpdatable) {
a.setEnabled(true);
}
}
@Override
public void menuSelected(MenuEvent e) {
for (UpdatableAction a : editMenuUpdatable) {
a.updateState();
}
}
});
return menu;
}
protected void modifyFontSize(boolean increase){
var fontSize = Preferences.getInteger("editor.font.size");
fontSize += increase ? 1 : -1;
fontSize = Math.max(5, Math.min(72, fontSize));
Preferences.setInteger("editor.font.size", fontSize);
for (Editor editor : base.getEditors()) {
editor.applyPreferences();
}
Preferences.save();
}
abstract public JMenu buildSketchMenu();
protected JMenu buildSketchMenu(JMenuItem[] runItems) {
JMenuItem item;
sketchMenu = new JMenu(Language.text("menu.sketch"));
for (JMenuItem mi : runItems) {
sketchMenu.add(mi);
}
sketchMenu.addSeparator();
sketchMenu.add(mode.getImportMenu());
item = Toolkit.newJMenuItem(Language.text("menu.sketch.show_sketch_folder"), 'K');
item.addActionListener(e -> {
if (sketch.isUntitled() || sketch.isReadOnly()) {
// Too weird to show the sketch folder when it's buried somewhere in an
// OS-specific temp directory. TODO a better, and localized, message.
Messages.showMessage("Save First", "Please first save the sketch.");
} else {
Platform.openFolder(sketch.getFolder());
}
});
sketchMenu.add(item);
item.setEnabled(Platform.openFolderAvailable());
item = new JMenuItem(Language.text("menu.sketch.add_file"));
item.addActionListener(e -> {
if (sketch.isUntitled() || sketch.isReadOnly()) {
// Technically, this sketch either doesn't exist (it's untitled and
// lives in a temp folder) or it shouldn't be overwritten/modified
// (it's an example). Just ask the user to save. TODO same as above.
Messages.showMessage("Save First", "Please first save the sketch.");
} else {
sketch.handleAddFile();
}
});
sketchMenu.add(item);
//noinspection ConstantConditions
if (runItems != null && runItems.length != 0) {
sketchMenu.addSeparator();
}
sketchMenu.addMenuListener(new MenuListener() {
// Menu Listener that populates the menu only when the menu is opened
final Map<Sketch, JMenuItem> itemMap = new HashMap<>();
@Override
public void menuSelected(MenuEvent event) {
Set<JMenuItem> unseen = new HashSet<>(itemMap.values());
for (final Editor editor : base.getEditors()) {
Sketch sketch = editor.getSketch();
JMenuItem item = itemMap.get(sketch);
if (item != null) {
unseen.remove(item);
} else { // it's a new item
item = new JCheckBoxMenuItem();
sketchMenu.add(item);
itemMap.put(sketch, item);
}
// set selected if the current sketch, deselect if not
item.setSelected(sketch.equals(getSketch()));
// name may have changed while Sketch object stayed the same
String name = sketch.getName();
if (!editor.getMode().equals(base.getDefaultMode())) {
name += " (" + editor.getMode().getTitle() + ")";
}
item.setText(name);
// Action listener to bring the appropriate sketch in front
item.addActionListener(e -> {
editor.setState(Frame.NORMAL);
editor.setVisible(true);
editor.toFront();
});
// Disabling for now, might be problematic [fry 200117]
//Toolkit.setMenuMnemsInside(sketchMenu);
}
for (JMenuItem item : unseen) {
sketchMenu.remove(item);
Sketch s = findSketch(item);
if (s != null) {
itemMap.remove(s);
}
}
}
Sketch findSketch(JMenuItem item) {
for (Map.Entry<Sketch, JMenuItem> e : itemMap.entrySet()) {
if (item == e.getValue()) {
return e.getKey();
}
}
return null;
}
@Override
public void menuDeselected(MenuEvent event) { }
@Override
public void menuCanceled(MenuEvent event) { }
});
return sketchMenu;
}
abstract public void handleImportLibrary(String name);
public void librariesChanged() { }
public void codeFolderChanged() { }
public void sketchChanged() { }
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public JMenu getToolMenu() {
return toolsMenu;
}
/**
* Clears the Tool menu and runs the gc so that contributions can be updated
* without classes still being in use.
*/
public void clearToolMenu() {
toolsMenu.removeAll();
System.gc();
}
/**
* Updates update count in the UI. Called on EDT.
* @param count number of available updates
*/
public void setUpdatesAvailable(int count) {
footer.setUpdateCount(count);
}
/**
* Override this if you want a special menu for a Mode.
* You only get one menu, use it wisely!
* Note that this is called from the Editor constructor,
* so your Editor object may not be completely initialized yet.
*/
public JMenu buildModeMenu() {
return null;
}
abstract public JMenu buildHelpMenu();
public void buildDevelopMenu(){
developMenu = new JMenu(Language.text("menu.develop"));
var updateTrigger = new JMenuItem(Language.text("menu.develop.check_for_updates"));
updateTrigger.addActionListener(e -> {
Preferences.unset("update.last");
new UpdateCheck(base);
});
developMenu.add(updateTrigger);
}
public void updateDevelopMenu(){
updateDevelopMenu(null);
}
void updateDevelopMenu(JMenuBar menu){
if(menu == null){
menu = getJMenuBar();
}
if(developMenu == null){
buildDevelopMenu();
}
if(Base.DEBUG){
menu.add(developMenu);
}else{
menu.remove(developMenu);
}
}
public void showReference(String filename) {
File file = new File(mode.getReferenceFolder(), filename);
showReferenceFile(file);
}
/**
* Given the .html file, displays it in the default browser.
*/
public void showReferenceFile(File file) {
try {
file = file.getCanonicalFile();
} catch (IOException e) {
e.printStackTrace();
}
// Prepend with file:// and also encode spaces & other characters
Platform.openURL(file.toURI().toString());
}
static public void showChanges() {
// https://github.com/processing/processing/issues/1558
if (!Base.isCommandLine()) {
Platform.openURL("https://github.com/processing/processing4/wiki/Changes-in-4.0");
}
}
static public int getProblemEditorLineStop(Problem problem, int lineStart, int lineStop) {
int stopOffset = problem.getStopOffset();
if (stopOffset == -1) {
stopOffset = lineStop - lineStart;
}
return stopOffset;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Subclass if you want to have setEnabled(canDo()); called when your menu
* is opened.
*/
static abstract
|
FileDropHandler
|
java
|
google__dagger
|
javatests/artifacts/dagger-ksp/java-app/src/main/java/app/AssistedInjectClasses.java
|
{
"start": 948,
"end": 993
}
|
class ____ {
@Component
|
AssistedInjectClasses
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/bundling/CacheSpec.java
|
{
"start": 2601,
"end": 2768
}
|
class ____ {
/**
* Returns the source of the cache.
* @return the cache source
*/
@Input
public abstract Property<String> getSource();
}
}
|
BindCacheSpec
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/QuartzEndpointBuilderFactory.java
|
{
"start": 11126,
"end": 26309
}
|
interface ____
extends
EndpointConsumerBuilder {
default QuartzEndpointBuilder basic() {
return (QuartzEndpointBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Specifies a custom calendar to avoid specific range of date.
*
* The option is a: <code>org.quartz.Calendar</code> type.
*
* Group: advanced
*
* @param customCalendar the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder customCalendar(org.quartz.Calendar customCalendar) {
doSetProperty("customCalendar", customCalendar);
return this;
}
/**
* Specifies a custom calendar to avoid specific range of date.
*
* The option will be converted to a <code>org.quartz.Calendar</code>
* type.
*
* Group: advanced
*
* @param customCalendar the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder customCalendar(String customCalendar) {
doSetProperty("customCalendar", customCalendar);
return this;
}
/**
* Whether to ignore quartz cannot schedule a trigger because the
* trigger will never fire in the future. This can happen when using a
* cron trigger that are configured to only run in the past. By default,
* Quartz will fail to schedule the trigger and therefore fail to start
* the Camel route. You can set this to true which then logs a WARN and
* then ignore the problem, meaning that the route will never fire in
* the future.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param ignoreExpiredNextFireTime the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder ignoreExpiredNextFireTime(boolean ignoreExpiredNextFireTime) {
doSetProperty("ignoreExpiredNextFireTime", ignoreExpiredNextFireTime);
return this;
}
/**
* Whether to ignore quartz cannot schedule a trigger because the
* trigger will never fire in the future. This can happen when using a
* cron trigger that are configured to only run in the past. By default,
* Quartz will fail to schedule the trigger and therefore fail to start
* the Camel route. You can set this to true which then logs a WARN and
* then ignore the problem, meaning that the route will never fire in
* the future.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param ignoreExpiredNextFireTime the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder ignoreExpiredNextFireTime(String ignoreExpiredNextFireTime) {
doSetProperty("ignoreExpiredNextFireTime", ignoreExpiredNextFireTime);
return this;
}
/**
* To configure additional options on the job. This is a multi-value
* option with prefix: job.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the jobParameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: advanced
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder jobParameters(String key, Object value) {
doSetMultiValueProperty("jobParameters", "job." + key, value);
return this;
}
/**
* To configure additional options on the job. This is a multi-value
* option with prefix: job.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the jobParameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: advanced
*
* @param values the values
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder jobParameters(Map values) {
doSetMultiValueProperties("jobParameters", "job.", values);
return this;
}
/**
* Whether the job name should be prefixed with endpoint id.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param prefixJobNameWithEndpointId the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder prefixJobNameWithEndpointId(boolean prefixJobNameWithEndpointId) {
doSetProperty("prefixJobNameWithEndpointId", prefixJobNameWithEndpointId);
return this;
}
/**
* Whether the job name should be prefixed with endpoint id.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param prefixJobNameWithEndpointId the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder prefixJobNameWithEndpointId(String prefixJobNameWithEndpointId) {
doSetProperty("prefixJobNameWithEndpointId", prefixJobNameWithEndpointId);
return this;
}
/**
* To configure additional options on the trigger. The parameter
* timeZone is supported if the cron option is present. Otherwise the
* parameters repeatInterval and repeatCount are supported. Note: When
* using repeatInterval values of 1000 or less, the first few events
* after starting the camel context may be fired more rapidly than
* expected. . This is a multi-value option with prefix: trigger.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* triggerParameters(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: advanced
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder triggerParameters(String key, Object value) {
doSetMultiValueProperty("triggerParameters", "trigger." + key, value);
return this;
}
/**
* To configure additional options on the trigger. The parameter
* timeZone is supported if the cron option is present. Otherwise the
* parameters repeatInterval and repeatCount are supported. Note: When
* using repeatInterval values of 1000 or less, the first few events
* after starting the camel context may be fired more rapidly than
* expected. . This is a multi-value option with prefix: trigger.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* triggerParameters(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: advanced
*
* @param values the values
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder triggerParameters(Map values) {
doSetMultiValueProperties("triggerParameters", "trigger.", values);
return this;
}
/**
* If it is true, JobDataMap uses the CamelContext name directly to
* reference the CamelContext, if it is false, JobDataMap uses use the
* CamelContext management name which could be changed during the deploy
* time.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param usingFixedCamelContextName the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder usingFixedCamelContextName(boolean usingFixedCamelContextName) {
doSetProperty("usingFixedCamelContextName", usingFixedCamelContextName);
return this;
}
/**
* If it is true, JobDataMap uses the CamelContext name directly to
* reference the CamelContext, if it is false, JobDataMap uses use the
* CamelContext management name which could be changed during the deploy
* time.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param usingFixedCamelContextName the value to set
* @return the dsl builder
*/
default AdvancedQuartzEndpointBuilder usingFixedCamelContextName(String usingFixedCamelContextName) {
doSetProperty("usingFixedCamelContextName", usingFixedCamelContextName);
return this;
}
}
public
|
AdvancedQuartzEndpointBuilder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.