language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__dagger
|
javatests/dagger/internal/codegen/AssistedFactoryTest.java
|
{
"start": 1045,
"end": 1737
}
|
class ____ {
@Parameters(name = "{0}")
public static ImmutableCollection<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
@Rule public GoldenFileRule goldenFileRule = new GoldenFileRule();
private final CompilerMode compilerMode;
public AssistedFactoryTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
@Test
public void testAssistedFactory() throws Exception {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import dagger.assisted.Assisted;",
"import dagger.assisted.AssistedInject;",
"",
"
|
AssistedFactoryTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsActionRequestTests.java
|
{
"start": 1168,
"end": 3377
}
|
class ____ extends AbstractXContentSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {
return new Request(DataFrameAnalyticsConfigUpdateTests.randomUpdate(randomValidId()));
}
@Override
protected Request mutateInstance(Request instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Request doParseInstance(XContentParser parser) {
return Request.parseRequest(null, parser);
}
@Override
protected Writeable.Reader<Request> instanceReader() {
return Request::new;
}
public void testParseRequest() throws IOException {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{}")) {
Request request = Request.parseRequest("id-from-param", parser);
assertThat(request.getUpdate(), is(equalTo(new DataFrameAnalyticsConfigUpdate.Builder("id-from-param").build())));
}
try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"id\": \"id-from-body\"}")) {
Request request = Request.parseRequest(null, parser);
assertThat(request.getUpdate(), is(equalTo(new DataFrameAnalyticsConfigUpdate.Builder("id-from-body").build())));
}
try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"id\": \"same-id\"}")) {
Request request = Request.parseRequest("same-id", parser);
assertThat(request.getUpdate(), is(equalTo(new DataFrameAnalyticsConfigUpdate.Builder("same-id").build())));
}
try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"id\": \"id-from-body\"}")) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Request.parseRequest("id-from-param", parser));
assertThat(e.getMessage(), startsWith("Inconsistent id"));
}
}
public void testDefaultTimeout() {
AcknowledgedRequest<Request> requestAcknowledgedRequest = createTestInstance();
assertThat(requestAcknowledgedRequest.ackTimeout(), is(notNullValue()));
}
}
|
UpdateDataFrameAnalyticsActionRequestTests
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/NestedWithSeparateInheritanceTests.java
|
{
"start": 1415,
"end": 1768
}
|
class ____ {
@BeforeAll
static void setup() {
NestedWithSeparateInheritanceTests.lifecycleInvokingClassNames = new ArrayList<>();
}
@BeforeEach
public void beforeEach() {
String invokingClass = this.getClass().getSimpleName();
requireNonNull(NestedWithSeparateInheritanceTests.lifecycleInvokingClassNames).add(invokingClass);
}
}
|
SuperClass1
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/OperatorTests.java
|
{
"start": 654,
"end": 4535
}
|
class ____ extends ESTestCase {
public void testFromString() {
assertEquals(Operator.fromString("gt"), Operator.GT);
assertEquals(Operator.fromString("gte"), Operator.GTE);
assertEquals(Operator.fromString("lte"), Operator.LTE);
assertEquals(Operator.fromString("lt"), Operator.LT);
assertEquals(Operator.fromString("Gt"), Operator.GT);
assertEquals(Operator.fromString("GTE"), Operator.GTE);
}
public void testToString() {
assertEquals("gt", Operator.GT.toString());
assertEquals("gte", Operator.GTE.toString());
assertEquals("lte", Operator.LTE.toString());
assertEquals("lt", Operator.LT.toString());
}
public void testTest() {
assertTrue(Operator.GT.test(1.0, 0.0));
assertFalse(Operator.GT.test(0.0, 1.0));
assertTrue(Operator.GTE.test(1.0, 0.0));
assertTrue(Operator.GTE.test(1.0, 1.0));
assertFalse(Operator.GTE.test(0.0, 1.0));
assertTrue(Operator.LT.test(0.0, 1.0));
assertFalse(Operator.LT.test(0.0, 0.0));
assertTrue(Operator.LTE.test(0.0, 1.0));
assertTrue(Operator.LTE.test(1.0, 1.0));
assertFalse(Operator.LTE.test(1.0, 0.0));
}
public void testWriteTo() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
Operator.GT.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(0));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
Operator.GTE.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(1));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
Operator.LT.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(2));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
Operator.LTE.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(3));
}
}
}
public void testReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(0);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(Operator.readFromStream(in), equalTo(Operator.GT));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(1);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(Operator.readFromStream(in), equalTo(Operator.GTE));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(2);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(Operator.readFromStream(in), equalTo(Operator.LT));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(3);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(Operator.readFromStream(in), equalTo(Operator.LTE));
}
}
}
public void testInvalidReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(randomIntBetween(7, Integer.MAX_VALUE));
try (StreamInput in = out.bytes().streamInput()) {
Operator.readFromStream(in);
fail("Expected IOException");
} catch (IOException e) {
assertThat(e.getMessage(), containsString("Unknown Operator ordinal ["));
}
}
}
}
|
OperatorTests
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/ExportCamelMain.java
|
{
"start": 7109,
"end": 21178
}
|
class
____(srcJavaDir, srcPackageName, mainClassname);
// copy local lib JARs
copyLocalLibDependencies(deps);
// copy local lib JARs
if (groovyPrecompiled) {
copyGroovyPrecompiled(srcResourcesDir);
}
// copy agent JARs and remove as dependency
copyAgentDependencies(deps);
deps.removeIf(d -> d.startsWith("agent:"));
if ("maven".equals(buildTool)) {
createMavenPom(settings, profile,
buildDir.resolve("pom.xml"), deps, srcPackageName);
if (mavenWrapper) {
copyMavenWrapper();
}
}
copyDockerFiles(BUILD_DIR);
String appJar = Paths.get("target", ids[1] + "-" + ids[2] + ".jar").toString();
copyReadme(BUILD_DIR, appJar);
if (cleanExportDir || !exportDir.equals(".")) {
// cleaning current dir can be a bit dangerous so only clean if explicit enabled
// otherwise always clean export-dir to avoid stale data
CommandHelper.cleanExportDir(exportDir);
}
// copy to export dir and remove work dir
PathUtils.copyDirectory(buildDir, Path.of(exportDir));
PathUtils.deleteDirectory(buildDir);
return 0;
}
private void createMavenPom(Path settings, Path profile, Path pom, Set<String> deps, String packageName) throws Exception {
String[] ids = gav.split(":");
InputStream is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/" + pomTemplateName);
String context = IOHelper.loadText(is);
IOHelper.close(is);
CamelCatalog catalog = new DefaultCamelCatalog();
if (ObjectHelper.isEmpty(camelVersion)) {
camelVersion = catalog.getLoadedVersion();
}
if (ObjectHelper.isEmpty(camelVersion)) {
camelVersion = VersionHelper.extractCamelVersion();
}
context = context.replaceAll("\\{\\{ \\.GroupId }}", ids[0]);
context = context.replaceAll("\\{\\{ \\.ArtifactId }}", ids[1]);
context = context.replaceAll("\\{\\{ \\.Version }}", ids[2]);
context = context.replaceAll("\\{\\{ \\.JavaVersion }}", javaVersion);
context = context.replaceAll("\\{\\{ \\.CamelVersion }}", camelVersion);
if (packageName != null) {
context = context.replaceAll("\\{\\{ \\.MainClassname }}", packageName + "." + mainClassname);
} else {
context = context.replaceAll("\\{\\{ \\.MainClassname }}", mainClassname);
}
context = context.replaceFirst("\\{\\{ \\.ProjectBuildOutputTimestamp }}", this.getBuildMavenProjectDate());
Properties prop = new CamelCaseOrderedProperties();
RuntimeUtil.loadProperties(prop, settings);
String repos = getMavenRepositories(settings, prop, camelVersion);
context = replaceBuildProperties(context);
if (repos == null || repos.isEmpty()) {
context = context.replaceFirst("\\{\\{ \\.MavenRepositories }}", "");
} else {
String s = mavenRepositoriesAsPomXml(repos);
context = context.replaceFirst("\\{\\{ \\.MavenRepositories }}", s);
}
List<MavenGav> gavs = new ArrayList<>();
for (String dep : deps) {
MavenGav gav = parseMavenGav(dep);
String gid = gav.getGroupId();
if ("org.apache.camel".equals(gid)) {
// uses BOM so version should not be included
gav.setVersion(null);
}
gavs.add(gav);
}
// sort artifacts
gavs.sort(mavenGavComparator());
StringBuilder sb = new StringBuilder();
for (MavenGav gav : gavs) {
sb.append(" <dependency>\n");
sb.append(" <groupId>").append(gav.getGroupId()).append("</groupId>\n");
sb.append(" <artifactId>").append(gav.getArtifactId()).append("</artifactId>\n");
if (gav.getVersion() != null) {
sb.append(" <version>").append(gav.getVersion()).append("</version>\n");
}
if (gav.getScope() != null) {
sb.append(" <scope>").append(gav.getScope()).append("</scope>\n");
}
// special for lib JARs
if ("lib".equals(gav.getPackaging())) {
sb.append(" <scope>system</scope>\n");
sb.append(" <systemPath>\\$\\{project.basedir}/lib/").append(gav.getArtifactId()).append("-")
.append(gav.getVersion()).append(".jar</systemPath>\n");
}
if ("camel-kamelets-utils".equals(gav.getArtifactId())) {
// special for camel-kamelets-utils
sb.append(" <exclusions>\n");
sb.append(" <exclusion>\n");
sb.append(" <groupId>org.apache.camel</groupId>\n");
sb.append(" <artifactId>*</artifactId>\n");
sb.append(" </exclusion>\n");
sb.append(" </exclusions>\n");
}
sb.append(" </dependency>\n");
}
context = context.replaceFirst("\\{\\{ \\.CamelDependencies }}", sb.toString());
// include docker/kubernetes with jib/jkube
context = enrichMavenPomJib(context, settings, profile);
Files.writeString(pom, context);
}
protected String enrichMavenPomJib(String context, Path settings, Path profile) throws Exception {
StringBuilder sb1 = new StringBuilder();
StringBuilder sb2 = new StringBuilder();
// is kubernetes included?
Properties prop = new CamelCaseOrderedProperties();
if (Files.exists(profile)) {
RuntimeUtil.loadProperties(prop, profile);
}
// include additional build properties
boolean jib = prop.stringPropertyNames().stream().anyMatch(s -> s.startsWith("jib."));
boolean jkube = prop.stringPropertyNames().stream().anyMatch(s -> s.startsWith("jkube."));
// jib is used for docker and kubernetes, jkube is only used for kubernetes
if (jib || jkube) {
// include all jib/jkube/label properties
String fromImage = null;
for (String key : prop.stringPropertyNames()) {
String value = prop.getProperty(key);
if ("jib.from.image".equals(key)) {
fromImage = value;
}
boolean accept = key.startsWith("jkube.") || key.startsWith("jib.") || key.startsWith("label.");
if (accept) {
sb1.append(String.format(" <%s>%s</%s>%n", key, value, key));
}
}
// from image is mandatory so use a default image if none provided
if (fromImage == null) {
fromImage = "mirror.gcr.io/library/eclipse-temurin:" + javaVersion + "-jre";
sb1.append(String.format(" <%s>%s</%s>%n", "jib.from.image", fromImage, "jib.from.image"));
}
InputStream is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/main-docker-pom.tmpl");
String context2 = IOHelper.loadText(is);
IOHelper.close(is);
context2 = context2.replaceFirst("\\{\\{ \\.JibMavenPluginVersion }}",
jibMavenPluginVersion(settings, prop));
// image from/to auth
String auth = "";
if (prop.stringPropertyNames().stream().anyMatch(s -> s.startsWith("jib.from.auth."))) {
is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/main-docker-from-auth-pom.tmpl");
auth = IOHelper.loadText(is);
IOHelper.close(is);
}
context2 = context2.replace("{{ .JibFromImageAuth }}", auth);
auth = "";
if (prop.stringPropertyNames().stream().anyMatch(s -> s.startsWith("jib.to.auth."))) {
is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/main-docker-to-auth-pom.tmpl");
auth = IOHelper.loadText(is);
IOHelper.close(is);
}
context2 = context2.replace("{{ .JibToImageAuth }}", auth);
// http port setting
int port = httpServerPort(settings);
if (port == -1) {
port = 8080;
}
context2 = context2.replaceFirst("\\{\\{ \\.Port }}", String.valueOf(port));
sb2.append(context2);
// jkube is only used for kubernetes
if (jkube) {
is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/main-jkube-pom.tmpl");
String context3 = IOHelper.loadText(is);
IOHelper.close(is);
context3 = context3.replaceFirst("\\{\\{ \\.JkubeMavenPluginVersion }}",
jkubeMavenPluginVersion(settings, prop));
sb2.append(context3);
}
}
// remove empty lines
String s1 = sb1.toString().replaceAll("(\\r?\\n){2,}", "\n");
String s2 = sb2.toString().replaceAll("(\\r?\\n){2,}", "\n");
context = context.replace("{{ .CamelKubernetesProperties }}", s1);
context = context.replace("{{ .CamelKubernetesPlugins }}", s2);
return context;
}
@Override
protected Set<String> resolveDependencies(Path settings, Path profile) throws Exception {
Set<String> answer = super.resolveDependencies(settings, profile);
if (profile != null && Files.exists(profile)) {
Properties prop = new CamelCaseOrderedProperties();
RuntimeUtil.loadProperties(prop, profile);
// if metrics is defined then include camel-micrometer-prometheus for camel-main runtime
if (prop.getProperty("camel.metrics.enabled") != null
|| prop.getProperty("camel.management.metricsEnabled") != null
|| prop.getProperty("camel.server.metricsEnabled") != null) {
answer.add("mvn:org.apache.camel:camel-micrometer-prometheus");
}
}
// remove out of the box dependencies
answer.removeIf(s -> s.contains("camel-core"));
answer.removeIf(s -> s.contains("camel-main"));
answer.removeIf(s -> s.contains("camel-health"));
boolean main = answer.stream().anyMatch(s -> s.contains("mvn:org.apache.camel:camel-platform-http-main"));
if (hasOpenapi(answer) && !main) {
// include http server if using openapi
answer.add("mvn:org.apache.camel:camel-platform-http-main");
}
// if platform-http is included then we need to switch to use camel-platform-http-main as implementation
if (!main && answer.stream().anyMatch(s -> s.contains("camel-platform-http"))) {
answer.add("mvn:org.apache.camel:camel-platform-http-main");
main = true;
}
if (main) {
answer.removeIf(s -> s.contains("org.apache.camel:camel-platform-http:"));
answer.removeIf(s -> s.contains("org.apache.camel:camel-platform-http-vertx:"));
}
return answer;
}
private void createMainClassSource(Path srcJavaDir, String packageName, String mainClassname) throws Exception {
InputStream is = ExportCamelMain.class.getClassLoader().getResourceAsStream("templates/main.tmpl");
String context = IOHelper.loadText(is);
IOHelper.close(is);
if (packageName != null) {
context = context.replaceFirst("\\{\\{ \\.PackageName }}", "package " + packageName + ";");
} else {
context = context.replaceFirst("\\{\\{ \\.PackageName }}", "");
}
context = context.replaceAll("\\{\\{ \\.MainClassname }}", mainClassname);
Path outputFile = srcJavaDir.resolve(mainClassname + ".java");
Files.writeString(outputFile, context);
}
@Override
protected void copySourceFiles(
Path settings, Path profile, Path srcJavaDirRoot, Path srcJavaDir, Path srcResourcesDir, Path srcCamelResourcesDir,
Path srcKameletsResourcesDir, String packageName)
throws Exception {
super.copySourceFiles(settings, profile, srcJavaDirRoot, srcJavaDir, srcResourcesDir, srcCamelResourcesDir,
srcKameletsResourcesDir, packageName);
// log4j configuration
InputStream is = ExportCamelMain.class.getResourceAsStream("/log4j2-main.properties");
ExportHelper.safeCopy(is, srcResourcesDir.resolve("log4j2.properties"));
is = ExportCamelMain.class.getResourceAsStream("/log4j2.component.properties");
ExportHelper.safeCopy(is, srcResourcesDir.resolve("log4j2.component.properties"));
// assembly for runner jar
is = ExportCamelMain.class.getResourceAsStream("/assembly/runner.xml");
ExportHelper.safeCopy(is, srcResourcesDir.resolve("assembly/runner.xml"));
}
protected void copyGroovyPrecompiled(Path srcResourcesDir) throws Exception {
// are there any pre-compiled groovy code
File gc = new File(GROOVY_COMPILE_DIR);
if (gc.exists() && gc.isDirectory()) {
File[] files = gc.listFiles();
if (files != null) {
Path targetDir = srcResourcesDir.resolve("camel-groovy-compiled");
for (File file : files) {
// skip anonymous scripts
if (file.getName().endsWith(".class") && !file.getName().startsWith("Script_")) {
Files.createDirectories(targetDir);
Path out = targetDir.resolve(file.getName());
ExportHelper.safeCopy(file.toPath(), out, true);
}
}
}
}
}
}
|
createMainClassSource
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java
|
{
"start": 108073,
"end": 108157
}
|
interface ____ {
}
@MetaMetaAliasedTransactional
static
|
MetaMetaAliasedTransactional
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/naming/pojo/Cluster.java
|
{
"start": 908,
"end": 1122
}
|
class ____ be serialized to json, and there are some variables and method can't use Camel naming rule for
* compatibility
*
* @author nkorange
*/
@SuppressWarnings("checkstyle:abbreviationaswordinname")
public
|
will
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java
|
{
"start": 815,
"end": 2664
}
|
class ____ {
private final Logger logger;
private final RestClient adminClient;
public MlRestTestStateCleaner(Logger logger, RestClient adminClient) {
this.logger = logger;
this.adminClient = adminClient;
}
public void resetFeatures() throws IOException {
deletePipelinesWithInferenceProcessors();
// This resets all features, not just ML, but they should have been getting reset between tests anyway so it shouldn't matter
performPostFeaturesReset(adminClient);
}
@SuppressWarnings("unchecked")
private void deletePipelinesWithInferenceProcessors() throws IOException {
final Response pipelinesResponse = adminClient.performRequest(new Request("GET", "/_ingest/pipeline"));
final Map<String, Object> pipelines = ESRestTestCase.entityAsMap(pipelinesResponse);
var pipelinesWithInferenceProcessors = new HashSet<String>();
for (var entry : pipelines.entrySet()) {
var pipelineDef = (Map<String, Object>) entry.getValue(); // each top level object is a separate pipeline
var processors = (List<Map<String, Object>>) pipelineDef.get("processors");
for (var processor : processors) {
assertThat(processor.entrySet(), hasSize(1));
if ("inference".equals(processor.keySet().iterator().next())) {
pipelinesWithInferenceProcessors.add(entry.getKey());
}
}
}
for (String pipelineId : pipelinesWithInferenceProcessors) {
try {
adminClient.performRequest(new Request("DELETE", "/_ingest/pipeline/" + pipelineId));
} catch (Exception ex) {
logger.warn(() -> "failed to delete pipeline [" + pipelineId + "]", ex);
}
}
}
}
|
MlRestTestStateCleaner
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/target/AbstractPoolingTargetSource.java
|
{
"start": 1335,
"end": 2021
}
|
class ____ independent of concrete pooling technology;
* see the subclass {@link CommonsPool2TargetSource} for a concrete example.
*
* <p>Subclasses must implement the {@link #getTarget} and
* {@link #releaseTarget} methods based on their chosen object pool.
* The {@link #newPrototypeInstance()} method inherited from
* {@link AbstractPrototypeBasedTargetSource} can be used to create objects
* in order to put them into the pool.
*
* <p>Subclasses must also implement some monitoring methods from the
* {@link PoolingConfig} interface. The {@link #getPoolingConfigMixin()} method
* makes these stats available on proxied objects through an IntroductionAdvisor.
*
* <p>This
|
is
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/annotation/ProfileValueUtilsTests.java
|
{
"start": 10342,
"end": 10568
}
|
class ____ {
public void nonAnnotatedMethod() {
}
@MetaEnabled
public void enabledAnnotatedMethod() {
}
@MetaDisabled
public void disabledAnnotatedMethod() {
}
}
public static
|
MetaDisabledAnnotatedSingleValue
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentImplementation.java
|
{
"start": 8289,
"end": 8369
}
|
class ____ a component provision. */
COMPONENT_PROVISION_FACTORY,
/** A
|
for
|
java
|
spring-projects__spring-boot
|
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchClientConfigurations.java
|
{
"start": 2035,
"end": 2256
}
|
class ____ {
@Import({ JacksonJsonpMapperConfiguration.class, Jackson2JsonpMapperConfiguration.class,
JsonbJsonpMapperConfiguration.class, SimpleJsonpMapperConfiguration.class })
static
|
ElasticsearchClientConfigurations
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metagen/mappedsuperclass/idclass/ProductAttribute.java
|
{
"start": 389,
"end": 895
}
|
class ____ extends AbstractAttribute implements Serializable {
private String owner;
public ProductAttribute(String key, String value, String product) {
this.key = key;
this.value = value;
this.owner = product;
}
public ProductAttribute() {
super();
}
@Id
@Column(name = "owner")
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
@Id
@Column(name = "attribute_key")
public String getKey() {
return key;
}
}
|
ProductAttribute
|
java
|
apache__camel
|
components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlProducerAlwaysPopulateStatementFalseTest.java
|
{
"start": 1636,
"end": 3869
}
|
class ____ extends CamelTestSupport {
private EmbeddedDatabase db;
@BindToRegistry("myStrategy")
private SqlPrepareStatementStrategy strategy;
private volatile boolean invoked;
@Override
public void doPreSetup() throws Exception {
db = new EmbeddedDatabaseBuilder()
.setName(getClass().getSimpleName())
.setType(EmbeddedDatabaseType.H2)
.addScript("sql/createAndPopulateDatabase.sql").build();
strategy = new DefaultSqlPrepareStatementStrategy() {
@Override
public void populateStatement(PreparedStatement ps, Iterator<?> iterator, int expectedParams) throws SQLException {
invoked = true;
super.populateStatement(ps, iterator, expectedParams);
}
};
}
@Override
public void doPostTearDown() throws Exception {
if (db != null) {
db.shutdown();
}
}
@Test
public void testAlwaysPopulateFalse() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", null);
mock.assertIsSatisfied();
List<?> received = assertIsInstanceOf(List.class, mock.getReceivedExchanges().get(0).getIn().getBody());
assertEquals(2, received.size());
Map<?, ?> row = assertIsInstanceOf(Map.class, received.get(0));
assertEquals("Camel", row.get("PROJECT"));
row = assertIsInstanceOf(Map.class, received.get(1));
assertEquals("AMQ", row.get("PROJECT"));
assertFalse(invoked, "Should not populate");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("direct:start")
.to("sql:select * from projects where license = 'ASF' order by id?alwaysPopulateStatement=false&prepareStatementStrategy=#myStrategy&initialDelay=0&delay=50")
.to("mock:result");
}
};
}
}
|
SqlProducerAlwaysPopulateStatementFalseTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/StructuredType.java
|
{
"start": 8742,
"end": 9392
}
|
enum ____ {
EQUALS(true, false),
FULL(true, true),
NONE(false, false);
private final boolean equality;
private final boolean comparison;
StructuredComparison(boolean equality, boolean comparison) {
this.equality = equality;
this.comparison = comparison;
}
public boolean isEquality() {
return equality;
}
public boolean isComparison() {
return comparison;
}
}
/** A builder for a {@link StructuredType}. Intended for future extensibility. */
@PublicEvolving
public static final
|
StructuredComparison
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/CustomSerializersTest.java
|
{
"start": 3603,
"end": 4019
}
|
class ____ extends StdScalarSerializer<String>
{
public UCStringSerializer() { super(String.class); }
@Override
public void serialize(String value, JsonGenerator gen,
SerializationContext provider) {
gen.writeString(value.toUpperCase());
}
}
// IMPORTANT: must associate serializer via property annotations
protected static
|
UCStringSerializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java
|
{
"start": 1462,
"end": 2614
}
|
class ____ extends ActionResponse implements ToXContentObject {
private final Map<String, Object> info;
public Response(Map<String, Object> info) {
this.info = info;
}
public Response() {
this.info = Collections.emptyMap();
}
public Map<String, Object> getInfo() {
return info;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeGenericMap(info);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.map(info);
return builder;
}
@Override
public int hashCode() {
return Objects.hash(info);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Response other = (Response) obj;
return Objects.equals(info, other.info);
}
}
}
|
Response
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/util/function/TupleTests.java
|
{
"start": 913,
"end": 13637
}
|
class ____ {
@Test
public void tupleProvidesTypeSafeMethods() {
Tuple3<String, Long, Integer> t3 = Tuples.of("string", 1L, 10);
assertThat(t3.getT1()).as("first value is a string").isInstanceOf(String.class);
assertThat(t3.getT2()).as("second value is a long").isInstanceOf(Long.class);
assertThat(t3.getT3()).as("third value is an int").isInstanceOf(Integer.class);
}
@Test
public void tupleProvidesTupleTypeHierarchy() {
Tuple3<String, Long, Integer> t3 = Tuples.of("string", 1L, 10);
assertThat(t3).as("Tuple3 is also a Tuple2").isInstanceOf(Tuple2.class);
}
@Test
public void tupleEquals() {
Tuple3<String, Long, Integer> t3a = Tuples.of("string", 1L, 10);
Tuple3<String, Long, Integer> t3b = Tuples.of("string", 1L, 10);
assertThat(t3a).as("Tuples of same length and values are equal.").isEqualTo(t3b);
}
@Test
public void tupleNotEquals() {
Tuple2<String, String> t2a = Tuples.of("ALPHA", "BRAVO");
Tuple2<String, String> t2b = Tuples.of("ALPHA", "CHARLIE");
assertThat(t2a).as("Tuples of same length and values are not equal.").isNotEqualTo(t2b);
}
@Test
public void tuplesOfDifferentLengthAreNotEqual() {
Tuple3<String, Long, Integer> t3 = Tuples.of("string", 1L, 10);
Tuple2<String, Long> t2 = Tuples.of("string", 1L);
assertThat(t3).as("Tuples of different length are not equal.").isNotEqualTo(t2);
assertThat(t2).as("Tuples of different length are not equal.").isNotEqualTo(t3);
}
@Test
public void fromArrayRejects0() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> Tuples.fromArray(new Object[0]))
.withMessageStartingWith("null or too small array, need between 2 and 8 values");
}
@Test
public void fromArrayRejects1() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> Tuples.fromArray(new Object[1]))
.withMessageStartingWith("null or too small array, need between 2 and 8 values");
}
@Test
public void fromArrayRejectsNull() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> Tuples.fromArray(null))
.withMessageStartingWith("null or too small array, need between 2 and 8 values");
}
@Test
@SuppressWarnings("unchecked")
public void tuple2CreatedFromArray2() {
Integer[] source = new Integer[] { 1, 2 };
Tuple2 expected = Tuples.of(1, 2);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple2.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple3CreatedFromArray3() {
Integer[] source = new Integer[]{1, 2, 3};
Tuple2 expected = Tuples.of(1, 2, 3);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual).isExactlyInstanceOf(Tuple3.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple4CreatedFromArray4() {
Integer[] source = new Integer[] { 1, 2, 3, 4 };
Tuple2 expected = Tuples.of(1, 2, 3, 4);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple4.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple5CreatedFromArray5() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5 };
Tuple2 expected = Tuples.of(1, 2, 3, 4, 5);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple5.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple6CreatedFromArray6() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6 };
Tuple2 expected = Tuples.of(1, 2, 3, 4, 5, 6);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple6.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple7CreatedFromArray7() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7 };
Tuple2 expected = Tuples.of(1, 2, 3, 4, 5, 6, 7);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple7.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void tuple8CreatedFromArray8() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple2 expected = Tuples.of(1, 2, 3, 4, 5, 6, 7, 8);
Tuple2 actual = Tuples.fromArray(source);
assertThat(actual)
.isExactlyInstanceOf(Tuple8.class)
.isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void fromArrayRejects9() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 };
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> Tuples.fromArray(source))
.withMessage("too many arguments (9), need between 2 and 8 values");
}
@Test
@SuppressWarnings("unchecked")
public void fnAny() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple2<Object, Object> tuple = Tuples.fnAny().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fnAnyDelegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple2, Tuple2<Object, Object>> invert = t2 -> new Tuple2<>(t2.getT2(), t2.getT1());
Tuple2<Object, Object> tuple = Tuples.fnAny(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(2);
assertThat(tuple.getT2()).isEqualTo(1);
assertThat(tuple)
.isExactlyInstanceOf(Tuple2.class)
.containsExactly(2, 1);
}
@Test
public void fn2() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple2<Object, Object> tuple = Tuples.fn2().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn3() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple3<Object, Object, Object> tuple = Tuples.fn3().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn3Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple3<Integer, Integer, Integer>, Tuple3> invert = t3 -> new Tuple3<>(t3.getT3(), t3.getT2(), t3.getT1());
Tuple3 tuple = Tuples.fn3(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(3);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(1);
assertThat((Object) tuple).isExactlyInstanceOf(Tuple3.class);
}
@Test
public void fn4() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple4<Object, Object, Object, Object> tuple = Tuples.fn4().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn4Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple4<Integer, Integer, Integer, Integer>, Tuple4> invert =
t4 -> new Tuple4<>(t4.getT4(), t4.getT3(), t4.getT2(), t4.getT1());
Tuple4 tuple = Tuples.fn4(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(4);
assertThat(tuple.getT2()).isEqualTo(3);
assertThat(tuple.getT3()).isEqualTo(2);
assertThat(tuple.getT4()).isEqualTo(1);
assertThat((Object) tuple).isExactlyInstanceOf(Tuple4.class);
}
@Test
public void fn5() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple5<Object, Object, Object, Object, Object> tuple = Tuples.fn5().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple.getT5()).isEqualTo(5);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn5Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple5<Integer, Integer, Integer, Integer, Integer>, Tuple5> invert =
t5 -> new Tuple5<>(t5.getT5(), t5.getT4(), t5.getT3(), t5.getT2(), t5.getT1());
Tuple5 tuple = Tuples.fn5(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(5);
assertThat(tuple.getT2()).isEqualTo(4);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(2);
assertThat(tuple.getT5()).isEqualTo(1);
assertThat((Object) tuple).isExactlyInstanceOf(Tuple5.class);
}
@Test
public void fn6() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple6<Object, Object, Object, Object, Object, Object> tuple = Tuples.fn6().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple.getT5()).isEqualTo(5);
assertThat(tuple.getT6()).isEqualTo(6);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn6Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple6<Integer, Integer, Integer, Integer, Integer, Integer>, Tuple6> invert =
t6 -> new Tuple6<>(t6.getT6(), t6.getT5(), t6.getT4(), t6.getT3(), t6.getT2(), t6.getT1());
Tuple6 tuple = Tuples.fn6(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(6);
assertThat(tuple.getT2()).isEqualTo(5);
assertThat(tuple.getT3()).isEqualTo(4);
assertThat(tuple.getT4()).isEqualTo(3);
assertThat(tuple.getT5()).isEqualTo(2);
assertThat(tuple.getT6()).isEqualTo(1);
assertThat((Object) tuple).isExactlyInstanceOf(Tuple6.class);
}
@Test
public void fn7() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple7<Object, Object, Object, Object, Object, Object, Object> tuple = Tuples.fn7().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple.getT5()).isEqualTo(5);
assertThat(tuple.getT6()).isEqualTo(6);
assertThat(tuple.getT7()).isEqualTo(7);
assertThat(tuple)
.isInstanceOf(Tuple8.class)
.containsExactly(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void fn7Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple7<Integer, Integer, Integer, Integer, Integer, Integer, Integer>, Tuple7> invert =
t7 -> new Tuple7<>(t7.getT7(), t7.getT6(), t7.getT5(), t7.getT4(), t7.getT3(), t7.getT2(), t7.getT1());
Tuple7 tuple = Tuples.fn7(invert).apply(source);
assertThat(tuple.getT1()).isEqualTo(7);
assertThat(tuple.getT2()).isEqualTo(6);
assertThat(tuple.getT3()).isEqualTo(5);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple.getT5()).isEqualTo(3);
assertThat(tuple.getT6()).isEqualTo(2);
assertThat(tuple.getT7()).isEqualTo(1);
assertThat((Object) tuple).isExactlyInstanceOf(Tuple7.class);
}
@Test
public void fn8() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Tuple8 tuple = Tuples.fn8().apply(source);
assertThat(tuple.getT1()).isEqualTo(1);
assertThat(tuple.getT2()).isEqualTo(2);
assertThat(tuple.getT3()).isEqualTo(3);
assertThat(tuple.getT4()).isEqualTo(4);
assertThat(tuple.getT5()).isEqualTo(5);
assertThat(tuple.getT6()).isEqualTo(6);
assertThat(tuple.getT7()).isEqualTo(7);
assertThat(tuple.getT8()).isEqualTo(8);
}
@Test
public void fn8Delegate() {
Integer[] source = new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8 };
Function<Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>, Integer> sum =
t8 -> t8.getT8() + t8.getT7() + t8.getT6() + t8.getT5() + t8.getT4() + t8.getT3() + t8.getT2() + t8.getT1();
Integer result = Tuples.fn8(sum).apply(source);
assertThat(result).isEqualTo(36);
}
@Test
public void tupleStringRepresentationFull() {
assertThat(Tuples.tupleStringRepresentation(1, 2, 3, 4, 5)
.toString())
.isEqualTo("1,2,3,4,5");
}
@Test
public void tupleStringRepresentationSparse() {
assertThat(Tuples.tupleStringRepresentation(null, 2, null, 4, 5, null)
.toString())
.isEqualTo(",2,,4,5,");
}
}
|
TupleTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
|
{
"start": 1155,
"end": 1555
}
|
class ____ {
private Map<String, List<Param<?>>> params;
/**
* Constructor that receives the request parsed parameters.
*
* @param params the request parsed parameters.
*/
public Parameters(Map<String, List<Param<?>>> params) {
this.params = params;
}
/**
* Returns the value of a request parsed parameter.
*
* @param name parameter name.
* @param klass
|
Parameters
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/workload/ConnectionStressWorker.java
|
{
"start": 9998,
"end": 10699
}
|
class ____ implements Runnable {
@Override
public void run() {
try {
long lastTimeMs = Time.SYSTEM.milliseconds();
JsonNode node;
synchronized (ConnectionStressWorker.this) {
node = JsonUtil.JSON_SERDE.valueToTree(
new StatusData(totalConnections, totalFailedConnections,
(totalConnections * 1000.0) / (lastTimeMs - startTimeMs)));
}
status.update(node);
} catch (Exception e) {
WorkerUtils.abort(log, "StatusUpdater", e, doneFuture);
}
}
}
public static
|
StatusUpdater
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/resultmatches/XpathAssertionTests.java
|
{
"start": 7585,
"end": 8178
}
|
class ____ {
@XmlElementWrapper(name = "composers")
@XmlElement(name = "composer")
private List<Person> composers;
@XmlElementWrapper(name = "performers")
@XmlElement(name = "performer")
private List<Person> performers;
public PeopleWrapper() {
}
public PeopleWrapper(List<Person> composers, List<Person> performers) {
this.composers = composers;
this.performers = performers;
}
public List<Person> getComposers() {
return this.composers;
}
public List<Person> getPerformers() {
return this.performers;
}
}
@Controller
public static
|
PeopleWrapper
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/StringSerializer.java
|
{
"start": 1291,
"end": 2952
}
|
class ____ extends TypeSerializerSingleton<String> {
private static final long serialVersionUID = 1L;
/** Sharable instance of the StringSerializer. */
public static final StringSerializer INSTANCE = new StringSerializer();
private static final String EMPTY = "";
@Override
public boolean isImmutableType() {
return true;
}
@Override
public String createInstance() {
return EMPTY;
}
@Override
public String copy(String from) {
return from;
}
@Override
public String copy(String from, String reuse) {
return from;
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(String record, DataOutputView target) throws IOException {
StringValue.writeString(record, target);
}
@Override
public String deserialize(DataInputView source) throws IOException {
return StringValue.readString(source);
}
@Override
public String deserialize(String record, DataInputView source) throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
StringValue.copyString(source, target);
}
@Override
public TypeSerializerSnapshot<String> snapshotConfiguration() {
return new StringSerializerSnapshot();
}
// ------------------------------------------------------------------------
/** Serializer configuration snapshot for compatibility and format evolution. */
@SuppressWarnings("WeakerAccess")
public static final
|
StringSerializer
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/test/java/io/github/resilience4j/core/TestContextPropagators.java
|
{
"start": 886,
"end": 1984
}
|
class ____ implements ContextPropagator<String> {
private ThreadLocal<String> threadLocal;
public TestThreadLocalContextPropagator(ThreadLocal<String> threadLocal) {
this.threadLocal = threadLocal;
}
@Override
public Supplier<Optional<String>> retrieve() {
return () -> (Optional<String>) Optional.ofNullable(threadLocal.get());
}
@Override
public Consumer<Optional<String>> copy() {
return (t) -> t.ifPresent(e -> {
if (threadLocal.get() != null) {
threadLocal.set(null);
threadLocal.remove();
}
threadLocal.set(e);
});
}
@Override
public Consumer<Optional<String>> clear() {
return (t) -> {
if (threadLocal.get() != null) {
threadLocal.set(null);
threadLocal.remove();
}
};
}
}
@SuppressWarnings("unchecked")
public static
|
TestThreadLocalContextPropagator
|
java
|
netty__netty
|
codec-http2/src/test/java/io/netty/handler/codec/http2/Http2FrameCodecTest.java
|
{
"start": 16426,
"end": 39240
}
|
class ____ extends AbstractReferenceCounted implements Http2Frame {
@Override
public String name() {
return "UNKNOWN";
}
@Override
protected void deallocate() {
}
@Override
public ReferenceCounted touch(Object hint) {
return this;
}
}
UnknownHttp2Frame frame = new UnknownHttp2Frame();
assertEquals(1, frame.refCnt());
ChannelFuture f = channel.write(frame);
f.await();
assertTrue(f.isDone());
assertFalse(f.isSuccess());
assertInstanceOf(UnsupportedMessageTypeException.class, f.cause());
assertEquals(0, frame.refCnt());
}
@Test
public void unknownFrameTypeOnConnectionStream() throws Exception {
// handle the case where unknown frames are sent before a stream is created,
// for example: HTTP/2 GREASE testing
ByteBuf debugData = bb("debug");
frameInboundWriter.writeInboundFrame((byte) 0xb, 0, new Http2Flags(), debugData);
channel.flush();
assertEquals(0, debugData.refCnt());
assertTrue(channel.isActive());
}
@Test
public void unknownFrameOnMissingStream() throws Exception {
ByteBuf debugData = bb("debug");
frameInboundWriter.writeInboundFrame((byte) 0xb, 101, new Http2Flags(), debugData);
channel.flush();
assertEquals(0, debugData.refCnt());
assertTrue(channel.isActive());
}
@Test
public void unknownFrameOnMissingStreamFirstPacket() throws Exception {
setUp(Http2FrameCodecBuilder.forClient(), new Http2Settings(), false);
// SETTINGS and UNKNOWN must come in on the same packet to trigger the bug
channel.pipeline().addFirst("combine", new ChannelInboundHandlerAdapter() {
CompositeByteBuf accumulate;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (accumulate == null) {
accumulate = ctx.alloc().compositeBuffer();
}
accumulate.addComponent(true, (ByteBuf) msg);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) {
if (accumulate != null) {
ctx.fireChannelRead(accumulate);
ctx.fireChannelReadComplete();
}
}
});
frameInboundWriter.writeInboundSettings(new Http2Settings());
ByteBuf debugData = bb("debug");
frameInboundWriter.writeInboundFrame((byte) 0xb, 101, new Http2Flags(), debugData);
channel.pipeline().remove("combine");
channel.flushInbound();
assertEquals(0, debugData.refCnt());
assertTrue(channel.isActive());
}
@Test
public void goAwayLastStreamIdOverflowed() throws Exception {
frameInboundWriter.writeInboundHeaders(5, request, 31, false);
Http2Stream stream = frameCodec.connection().stream(5);
assertNotNull(stream);
assertEquals(State.OPEN, stream.state());
ByteBuf debugData = bb("debug");
Http2GoAwayFrame goAwayFrame = new DefaultHttp2GoAwayFrame(NO_ERROR.code(),
debugData.retainedDuplicate());
goAwayFrame.setExtraStreamIds(Integer.MAX_VALUE);
channel.writeOutbound(goAwayFrame);
// When the last stream id computation overflows, the last stream id should just be set to 2^31 - 1.
verify(frameWriter).writeGoAway(eqFrameCodecCtx(), eq(Integer.MAX_VALUE),
eq(NO_ERROR.code()), eq(debugData), anyChannelPromise());
debugData.release();
assertEquals(State.OPEN, stream.state());
assertTrue(channel.isActive());
}
@Test
public void streamErrorShouldFireExceptionForInbound() throws Exception {
frameInboundWriter.writeInboundHeaders(3, request, 31, false);
Http2Stream stream = frameCodec.connection().stream(3);
assertNotNull(stream);
StreamException streamEx = new StreamException(3, Http2Error.INTERNAL_ERROR, "foo");
channel.pipeline().fireExceptionCaught(streamEx);
Http2FrameStreamEvent event = inboundHandler.readInboundMessageOrUserEvent();
assertEquals(Http2FrameStreamEvent.Type.State, event.type());
assertEquals(State.OPEN, event.stream().state());
Http2HeadersFrame headersFrame = inboundHandler.readInboundMessageOrUserEvent();
assertNotNull(headersFrame);
Http2FrameStreamException e = assertThrows(Http2FrameStreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
inboundHandler.checkException();
}
});
assertEquals(streamEx, e.getCause());
assertNull(inboundHandler.readInboundMessageOrUserEvent());
}
@Test
public void streamErrorShouldNotFireExceptionForOutbound() throws Exception {
frameInboundWriter.writeInboundHeaders(3, request, 31, false);
Http2Stream stream = frameCodec.connection().stream(3);
assertNotNull(stream);
StreamException streamEx = new StreamException(3, Http2Error.INTERNAL_ERROR, "foo");
frameCodec.onError(frameCodec.ctx, true, streamEx);
Http2FrameStreamEvent event = inboundHandler.readInboundMessageOrUserEvent();
assertEquals(Http2FrameStreamEvent.Type.State, event.type());
assertEquals(State.OPEN, event.stream().state());
Http2HeadersFrame headersFrame = inboundHandler.readInboundMessageOrUserEvent();
assertNotNull(headersFrame);
// No exception expected
inboundHandler.checkException();
assertNull(inboundHandler.readInboundMessageOrUserEvent());
}
@Test
public void windowUpdateFrameDecrementsConsumedBytes() throws Exception {
frameInboundWriter.writeInboundHeaders(3, request, 31, false);
Http2Connection connection = frameCodec.connection();
Http2Stream stream = connection.stream(3);
assertNotNull(stream);
ByteBuf data = Unpooled.buffer(100).writeZero(100);
frameInboundWriter.writeInboundData(3, data, 0, false);
Http2HeadersFrame inboundHeaders = inboundHandler.readInbound();
assertNotNull(inboundHeaders);
assertNotNull(inboundHeaders.stream());
Http2FrameStream stream2 = inboundHeaders.stream();
int before = connection.local().flowController().unconsumedBytes(stream);
ChannelFuture f = channel.write(new DefaultHttp2WindowUpdateFrame(100).stream(stream2));
int after = connection.local().flowController().unconsumedBytes(stream);
assertEquals(100, before - after);
assertTrue(f.isSuccess());
}
@Test
public void windowUpdateMayFail() throws Exception {
frameInboundWriter.writeInboundHeaders(3, request, 31, false);
Http2Connection connection = frameCodec.connection();
Http2Stream stream = connection.stream(3);
assertNotNull(stream);
Http2HeadersFrame inboundHeaders = inboundHandler.readInbound();
assertNotNull(inboundHeaders);
Http2FrameStream stream2 = inboundHeaders.stream();
// Fails, cause trying to return too many bytes to the flow controller
ChannelFuture f = channel.write(new DefaultHttp2WindowUpdateFrame(100).stream(stream2));
assertTrue(f.isDone());
assertFalse(f.isSuccess());
assertInstanceOf(Http2Exception.class, f.cause());
}
@Test
public void inboundWindowUpdateShouldBeForwarded() throws Exception {
frameInboundWriter.writeInboundHeaders(3, request, 31, false);
frameInboundWriter.writeInboundWindowUpdate(3, 100);
// Connection-level window update
frameInboundWriter.writeInboundWindowUpdate(0, 100);
Http2HeadersFrame headersFrame = inboundHandler.readInbound();
assertNotNull(headersFrame);
Http2WindowUpdateFrame windowUpdateFrame = inboundHandler.readInbound();
assertNotNull(windowUpdateFrame);
assertEquals(3, windowUpdateFrame.stream().id());
assertEquals(100, windowUpdateFrame.windowSizeIncrement());
// Window update for the connection should not be forwarded.
assertNull(inboundHandler.readInbound());
}
@Test
public void streamZeroWindowUpdateIncrementsConnectionWindow() throws Http2Exception {
Http2Connection connection = frameCodec.connection();
Http2LocalFlowController localFlow = connection.local().flowController();
int initialWindowSizeBefore = localFlow.initialWindowSize();
Http2Stream connectionStream = connection.connectionStream();
int connectionWindowSizeBefore = localFlow.windowSize(connectionStream);
// We only replenish the flow control window after the amount consumed drops below the following threshold.
// We make the threshold very "high" so that window updates will be sent when the delta is relatively small.
((DefaultHttp2LocalFlowController) localFlow).windowUpdateRatio(connectionStream, .999f);
int windowUpdate = 1024;
channel.write(new DefaultHttp2WindowUpdateFrame(windowUpdate));
// The initial window size is only changed by Http2Settings, so it shouldn't change.
assertEquals(initialWindowSizeBefore, localFlow.initialWindowSize());
// The connection window should be increased by the delta amount.
assertEquals(connectionWindowSizeBefore + windowUpdate, localFlow.windowSize(connectionStream));
}
@Test
public void windowUpdateDoesNotOverflowConnectionWindow() {
Http2Connection connection = frameCodec.connection();
Http2LocalFlowController localFlow = connection.local().flowController();
int initialWindowSizeBefore = localFlow.initialWindowSize();
channel.write(new DefaultHttp2WindowUpdateFrame(Integer.MAX_VALUE));
// The initial window size is only changed by Http2Settings, so it shouldn't change.
assertEquals(initialWindowSizeBefore, localFlow.initialWindowSize());
// The connection window should be increased by the delta amount.
assertEquals(Integer.MAX_VALUE, localFlow.windowSize(connection.connectionStream()));
}
@Test
public void writeUnknownFrame() {
final Http2FrameStream stream = frameCodec.newStream();
ByteBuf buffer = Unpooled.buffer().writeByte(1);
DefaultHttp2UnknownFrame unknownFrame = new DefaultHttp2UnknownFrame(
(byte) 20, new Http2Flags().ack(true), buffer);
unknownFrame.stream(stream);
channel.write(unknownFrame);
verify(frameWriter).writeFrame(eqFrameCodecCtx(), eq(unknownFrame.frameType()),
eq(unknownFrame.stream().id()), eq(unknownFrame.flags()), eq(buffer), any(ChannelPromise.class));
}
@Test
public void sendSettingsFrame() {
Http2Settings settings = new Http2Settings();
channel.write(new DefaultHttp2SettingsFrame(settings));
verify(frameWriter).writeSettings(eqFrameCodecCtx(), same(settings), any(ChannelPromise.class));
}
@Test
@Timeout(value = 5000, unit = TimeUnit.MILLISECONDS)
public void newOutboundStream() {
final Http2FrameStream stream = frameCodec.newStream();
assertNotNull(stream);
assertFalse(isStreamIdValid(stream.id()));
final Promise<Void> listenerExecuted = new DefaultPromise<Void>(GlobalEventExecutor.INSTANCE);
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers(), false).stream(stream))
.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
assertTrue(future.isSuccess());
assertTrue(isStreamIdValid(stream.id()));
listenerExecuted.setSuccess(null);
}
}
);
ByteBuf data = Unpooled.buffer().writeZero(100);
ChannelFuture f = channel.writeAndFlush(new DefaultHttp2DataFrame(data).stream(stream));
assertTrue(f.isSuccess());
listenerExecuted.syncUninterruptibly();
assertTrue(listenerExecuted.isSuccess());
}
@Test
public void newOutboundStreamsShouldBeBuffered() throws Exception {
setUp(Http2FrameCodecBuilder.forServer().encoderEnforceMaxConcurrentStreams(true),
new Http2Settings().maxConcurrentStreams(1));
Http2FrameStream stream1 = frameCodec.newStream();
Http2FrameStream stream2 = frameCodec.newStream();
ChannelPromise promise1 = channel.newPromise();
ChannelPromise promise2 = channel.newPromise();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream1), promise1);
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream2), promise2);
assertTrue(isStreamIdValid(stream1.id()));
channel.runPendingTasks();
assertTrue(isStreamIdValid(stream2.id()));
assertTrue(promise1.syncUninterruptibly().isSuccess());
assertFalse(promise2.isDone());
// Increase concurrent streams limit to 2
frameInboundWriter.writeInboundSettings(new Http2Settings().maxConcurrentStreams(2));
channel.flush();
assertTrue(promise2.syncUninterruptibly().isSuccess());
}
@Test
public void multipleNewOutboundStreamsShouldBeBuffered() throws Exception {
// We use a limit of 1 and then increase it step by step.
setUp(Http2FrameCodecBuilder.forServer().encoderEnforceMaxConcurrentStreams(true),
new Http2Settings().maxConcurrentStreams(1));
Http2FrameStream stream1 = frameCodec.newStream();
Http2FrameStream stream2 = frameCodec.newStream();
Http2FrameStream stream3 = frameCodec.newStream();
ChannelPromise promise1 = channel.newPromise();
ChannelPromise promise2 = channel.newPromise();
ChannelPromise promise3 = channel.newPromise();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream1), promise1);
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream2), promise2);
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream3), promise3);
assertTrue(isStreamIdValid(stream1.id()));
channel.runPendingTasks();
assertTrue(isStreamIdValid(stream2.id()));
assertTrue(promise1.syncUninterruptibly().isSuccess());
assertFalse(promise2.isDone());
assertFalse(promise3.isDone());
// Increase concurrent streams limit to 2
frameInboundWriter.writeInboundSettings(new Http2Settings().maxConcurrentStreams(2));
channel.flush();
// As we increased the limit to 2 we should have also succeed the second frame.
assertTrue(promise2.syncUninterruptibly().isSuccess());
assertFalse(promise3.isDone());
frameInboundWriter.writeInboundSettings(new Http2Settings().maxConcurrentStreams(3));
channel.flush();
// With the max streams of 3 all streams should be succeed now.
assertTrue(promise3.syncUninterruptibly().isSuccess());
assertFalse(channel.finishAndReleaseAll());
}
@Test
public void doNotLeakOnFailedInitializationForChannels() throws Exception {
setUp(Http2FrameCodecBuilder.forServer(), new Http2Settings().maxConcurrentStreams(2));
Http2FrameStream stream1 = frameCodec.newStream();
Http2FrameStream stream2 = frameCodec.newStream();
ChannelPromise stream1HeaderPromise = channel.newPromise();
ChannelPromise stream2HeaderPromise = channel.newPromise();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream1),
stream1HeaderPromise);
channel.runPendingTasks();
frameInboundWriter.writeInboundGoAway(stream1.id(), 0L, Unpooled.EMPTY_BUFFER);
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream2),
stream2HeaderPromise);
channel.runPendingTasks();
assertTrue(stream1HeaderPromise.syncUninterruptibly().isSuccess());
assertTrue(stream2HeaderPromise.isDone());
assertEquals(0, frameCodec.numInitializingStreams());
assertFalse(channel.finishAndReleaseAll());
}
@Test
public void streamIdentifiersExhausted() throws Http2Exception {
int maxServerStreamId = Integer.MAX_VALUE - 1;
assertNotNull(frameCodec.connection().local().createStream(maxServerStreamId, false));
Http2FrameStream stream = frameCodec.newStream();
assertNotNull(stream);
ChannelPromise writePromise = channel.newPromise();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream), writePromise);
Http2GoAwayFrame goAwayFrame = inboundHandler.readInbound();
assertNotNull(goAwayFrame);
assertEquals(NO_ERROR.code(), goAwayFrame.errorCode());
assertEquals(Integer.MAX_VALUE, goAwayFrame.lastStreamId());
goAwayFrame.release();
assertInstanceOf(Http2NoMoreStreamIdsException.class, writePromise.cause());
}
@Test
public void receivePing() throws Http2Exception {
frameInboundWriter.writeInboundPing(false, 12345L);
Http2PingFrame pingFrame = inboundHandler.readInbound();
assertNotNull(pingFrame);
assertEquals(12345, pingFrame.content());
assertFalse(pingFrame.ack());
}
@Test
public void sendPing() {
channel.writeAndFlush(new DefaultHttp2PingFrame(12345));
verify(frameWriter).writePing(eqFrameCodecCtx(), eq(false), eq(12345L), anyChannelPromise());
}
@Test
public void receiveSettings() throws Http2Exception {
Http2Settings settings = new Http2Settings().maxConcurrentStreams(1);
frameInboundWriter.writeInboundSettings(settings);
Http2SettingsFrame settingsFrame = inboundHandler.readInbound();
assertNotNull(settingsFrame);
assertEquals(settings, settingsFrame.settings());
}
@Test
public void sendSettings() {
Http2Settings settings = new Http2Settings().maxConcurrentStreams(1);
channel.writeAndFlush(new DefaultHttp2SettingsFrame(settings));
verify(frameWriter).writeSettings(eqFrameCodecCtx(), eq(settings), anyChannelPromise());
}
@Test
public void iterateActiveStreams() throws Exception {
setUp(Http2FrameCodecBuilder.forServer().encoderEnforceMaxConcurrentStreams(true),
new Http2Settings().maxConcurrentStreams(1));
frameInboundWriter.writeInboundHeaders(3, request, 0, false);
Http2HeadersFrame headersFrame = inboundHandler.readInbound();
assertNotNull(headersFrame);
Http2FrameStream activeInbond = headersFrame.stream();
Http2FrameStream activeOutbound = frameCodec.newStream();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(activeOutbound));
Http2FrameStream bufferedOutbound = frameCodec.newStream();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(bufferedOutbound));
@SuppressWarnings("unused")
Http2FrameStream idleStream = frameCodec.newStream();
final Set<Http2FrameStream> activeStreams = new HashSet<Http2FrameStream>();
frameCodec.forEachActiveStream(new Http2FrameStreamVisitor() {
@Override
public boolean visit(Http2FrameStream stream) {
activeStreams.add(stream);
return true;
}
});
assertEquals(2, activeStreams.size());
Set<Http2FrameStream> expectedStreams = new HashSet<Http2FrameStream>();
expectedStreams.add(activeInbond);
expectedStreams.add(activeOutbound);
assertEquals(expectedStreams, activeStreams);
}
@Test
public void autoAckPingTrue() throws Exception {
setUp(Http2FrameCodecBuilder.forServer().autoAckPingFrame(true), new Http2Settings());
frameInboundWriter.writeInboundPing(false, 8);
Http2PingFrame frame = inboundHandler.readInbound();
assertFalse(frame.ack());
assertEquals(8, frame.content());
verify(frameWriter).writePing(eqFrameCodecCtx(), eq(true), eq(8L), anyChannelPromise());
}
@Test
public void autoAckPingFalse() throws Exception {
setUp(Http2FrameCodecBuilder.forServer().autoAckPingFrame(false), new Http2Settings());
frameInboundWriter.writeInboundPing(false, 8);
verify(frameWriter, never()).writePing(eqFrameCodecCtx(), eq(true), eq(8L), anyChannelPromise());
Http2PingFrame frame = inboundHandler.readInbound();
assertFalse(frame.ack());
assertEquals(8, frame.content());
// Now ack the frame manually.
channel.writeAndFlush(new DefaultHttp2PingFrame(8, true));
verify(frameWriter).writePing(eqFrameCodecCtx(), eq(true), eq(8L), anyChannelPromise());
}
@Test
public void streamShouldBeOpenInListener() {
final Http2FrameStream stream2 = frameCodec.newStream();
assertEquals(State.IDLE, stream2.state());
final AtomicBoolean listenerExecuted = new AtomicBoolean();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream2))
.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
assertTrue(future.isSuccess());
assertEquals(State.OPEN, stream2.state());
listenerExecuted.set(true);
}
});
assertTrue(listenerExecuted.get());
}
@Test
public void writeHeadersVoidPromise() {
final Http2FrameStream stream = frameCodec.newStream();
channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream),
channel.voidPromise());
}
@Test
public void upgradeEventNoRefCntError() throws Exception {
frameInboundWriter.writeInboundHeaders(Http2CodecUtil.HTTP_UPGRADE_STREAM_ID, request, 31, false);
// Using reflect as the constructor is package-private and the
|
UnknownHttp2Frame
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/env/BuildVersion.java
|
{
"start": 5984,
"end": 6717
}
|
class ____ implements BuildExtension {
@Override
public Build getCurrentBuild() {
return Build.current();
}
@Override
public BuildVersion currentBuildVersion() {
return DefaultBuildVersion.CURRENT;
}
@Override
public BuildVersion fromVersionId(int versionId) {
return new DefaultBuildVersion(versionId);
}
@Override
public BuildVersion fromString(String version) {
return new DefaultBuildVersion(version);
}
@Override
public BuildVersion fromStream(StreamInput in) throws IOException {
return new DefaultBuildVersion(in);
}
}
}
|
DefaultBuildExtension
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/floats/Floats_assertIsCloseTo_Test.java
|
{
"start": 1667,
"end": 9936
}
|
class ____ extends FloatsBaseTest {
private static final Float ZERO = 0f;
private static final Float ONE = 1f;
private static final Float TWO = 2f;
private static final Float TEN = 10f;
// success
@ParameterizedTest
@CsvSource({
"1.0f, 1.0f, 0.0f",
"1.0f, 2.0f, 1.0f",
"1.0f, 0.0f, 1.0f",
"1.0f, 1.0f, 10",
"0.375f, 0.125f, 0.25f",
"1.0f, 1.1, 0.1",
"NaN, NaN, 1.0f" })
void should_pass_if_difference_is_less_than_or_equal_to_given_precision(float actual, float expected, float precision) {
floats.assertIsCloseTo(INFO, actual, expected, within(precision));
floats.assertIsCloseTo(INFO, expected, actual, within(precision));
}
@ParameterizedTest
@CsvSource({
"1.0f, 2.0f, 10",
"0.375, 0.125, 0.2500001",
"1.1, 1.0f, 0.1000001",
"NaN, NaN, 1.0f" })
void should_pass_if_difference_is_less_than_given_offset(float actual, float expected, float precision) {
floats.assertIsCloseTo(INFO, actual, expected, byLessThan(precision));
}
@Test
void should_pass_if_actual_and_expected_are_POSITIVE_INFINITY() {
floats.assertIsCloseTo(INFO, POSITIVE_INFINITY, POSITIVE_INFINITY, within(ONE));
floats.assertIsCloseTo(INFO, POSITIVE_INFINITY, POSITIVE_INFINITY, byLessThan(ONE));
}
@Test
void should_pass_if_actual_and_expected_are_NEGATIVE_INFINITY() {
floats.assertIsCloseTo(INFO, NEGATIVE_INFINITY, NEGATIVE_INFINITY, within(ONE));
floats.assertIsCloseTo(INFO, NEGATIVE_INFINITY, NEGATIVE_INFINITY, byLessThan(ONE));
}
// error or failure
@ParameterizedTest
@CsvSource({
"1.0f, 1.1f, 0.1f",
"1.0f, 2.0f, 1.0f",
"0.375f, 0.125f, 0.25f" })
void should_fail_if_difference_is_greater_than_or_equal_to_given_precision(float expected, float actual, float precision) {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, actual, expected, byLessThan(precision)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(actual, expected, byLessThan(precision), absDiff(expected, actual)));
}
@ParameterizedTest
@CsvSource({
"1.0f, 1.1f, 0.0999999f",
"0.375f, 0.125f, 0.2499999f" })
void should_fail_if_difference_is_greater_than_given_precision(float expected, float actual, float precision) {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, actual, expected, within(precision)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(actual, expected, within(precision), absDiff(expected, actual)));
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Float actual = null;
// WHEN
var assertionError = expectAssertionError(() -> floats.assertIsCloseTo(INFO, actual, ONE, within(ONE)));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_throw_error_if_expected_value_is_null() {
assertThatNullPointerException().isThrownBy(() -> floats.assertIsCloseTo(INFO, 6.0f, null, offset(1.0f)))
.withMessage("The given number should not be null");
}
@Test
void should_throw_error_if_offset_is_null() {
assertThatNullPointerException().isThrownBy(() -> floats.assertIsCloseTo(INFO, ONE, ZERO, null))
.withMessage("The given offset should not be null");
}
@Test
void should_fail_if_actual_is_not_close_enough_to_expected_value() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, ONE, TEN, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(ONE, TEN, within(ONE), TEN - ONE));
}
@Test
void should_fail_if_actual_is_not_close_enough_to_expected_value_with_a_strict_offset() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, ONE, TEN, byLessThan(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(ONE, TEN, byLessThan(ONE), TEN - ONE));
}
@Test
void should_fail_if_difference_is_equal_to_the_given_strict_offset() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, TWO, ONE, byLessThan(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(TWO, ONE, byLessThan(ONE), TWO - ONE));
}
@Test
void should_fail_if_actual_is_NaN_and_expected_is_not() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, NaN, ONE, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(NaN, ONE, within(ONE), absDiff(NaN, ONE)));
}
@Test
void should_fail_if_actual_is_POSITIVE_INFINITY_and_expected_is_not() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, POSITIVE_INFINITY, ONE, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(POSITIVE_INFINITY, ONE, within(ONE), absDiff(POSITIVE_INFINITY, ONE)));
}
@Test
void should_fail_if_actual_is_NEGATIVE_INFINITY_and_expected_is_not() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, NEGATIVE_INFINITY, ONE, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(NEGATIVE_INFINITY, ONE, within(ONE), absDiff(NEGATIVE_INFINITY, ONE)));
}
@Test
void should_fail_if_actual_is_POSITIVE_INFINITY_and_expected_is_NEGATIVE_INFINITY() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, POSITIVE_INFINITY, NEGATIVE_INFINITY, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(POSITIVE_INFINITY, NEGATIVE_INFINITY, within(ONE),
absDiff(POSITIVE_INFINITY, NEGATIVE_INFINITY)));
}
@Test
void should_fail_if_actual_is_NEGATIVE_INFINITY_and_expected_is_POSITIVE_INFINITY() {
// WHEN
expectAssertionError(() -> floats.assertIsCloseTo(INFO, NEGATIVE_INFINITY, POSITIVE_INFINITY, within(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(NEGATIVE_INFINITY, POSITIVE_INFINITY, within(ONE),
absDiff(NEGATIVE_INFINITY, POSITIVE_INFINITY)));
}
// with comparison strategy
@Test
void should_pass_if_difference_is_less_than_given_offset_whatever_custom_comparison_strategy_is() {
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, ONE, within(ONE));
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, TWO, within(TEN));
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, TWO, byLessThan(TEN));
}
@Test
void should_pass_if_difference_is_equal_to_given_offset_whatever_custom_comparison_strategy_is() {
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, ONE, within(ZERO));
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, ZERO, within(ONE));
floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, ONE, TWO, within(ONE));
}
@Test
void should_throw_error_if_offset_is_null_whatever_custom_comparison_strategy_is() {
// GIVEN
ThrowingCallable code = () -> floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, 8f, 8f, null);
// WHEN/THEN
assertThatNullPointerException().isThrownBy(code)
.withMessage(offsetIsNull());
}
@Test
void should_fail_if_actual_is_not_close_enough_to_expected_value_whatever_custom_comparison_strategy_is() {
// WHEN
expectAssertionError(() -> floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, 6f, 8f, offset(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(6f, 8f, offset(ONE), TWO));
}
@Test
void should_fail_if_actual_is_not_strictly_close_enough_to_expected_value_whatever_custom_comparison_strategy_is() {
// WHEN
expectAssertionError(() -> floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, 6f, 8f, byLessThan(ONE)));
// THEN
verify(failures).failure(INFO, shouldBeEqual(6f, 8f, byLessThan(ONE), TWO));
}
@Test
void should_throw_error_if_expected_value_is_null_whatever_custom_comparison_strategy_is() {
// GIVEN
ThrowingCallable code = () -> floatsWithAbsValueComparisonStrategy.assertIsCloseTo(INFO, 6f, null, offset(ONE));
// WHEN/THEN
assertThatNullPointerException().isThrownBy(code)
.withMessage("The given number should not be null");
}
}
|
Floats_assertIsCloseTo_Test
|
java
|
elastic__elasticsearch
|
libs/dissect/src/main/java/org/elasticsearch/dissect/DissectMatch.java
|
{
"start": 821,
"end": 4386
}
|
class ____ {
private final String appendSeparator;
private final Map<String, String> results;
private final Map<String, String> simpleResults;
private final Map<String, ReferenceResult> referenceResults;
private final Map<String, AppendResult> appendResults;
private int implicitAppendOrder = -1000;
private final int maxMatches;
private final int maxResults;
private final int appendCount;
private final int referenceCount;
private final int simpleCount;
private int matches = 0;
DissectMatch(String appendSeparator, int maxMatches, int maxResults, int appendCount, int referenceCount) {
if (maxMatches <= 0 || maxResults <= 0) {
throw new IllegalArgumentException("Expected results are zero, can not construct DissectMatch");// should never happen
}
this.maxMatches = maxMatches;
this.maxResults = maxResults;
this.appendCount = appendCount;
this.referenceCount = referenceCount;
this.appendSeparator = appendSeparator;
results = new HashMap<>(maxResults);
this.simpleCount = maxMatches - referenceCount - appendCount;
simpleResults = simpleCount <= 0 ? null : new HashMap<>(simpleCount);
referenceResults = referenceCount <= 0 ? null : new HashMap<>(referenceCount);
appendResults = appendCount <= 0 ? null : new HashMap<>(appendCount);
}
/**
* Add the key/value that was found as result of the parsing
* @param key the {@link DissectKey}
* @param value the discovered value for the key
*/
void add(DissectKey key, String value) {
matches++;
if (key.skip()) {
return;
}
switch (key.getModifier()) {
case NONE -> simpleResults.put(key.getName(), value);
case APPEND -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
.addValue(value, implicitAppendOrder++);
case APPEND_WITH_ORDER -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
.addValue(value, key.getAppendPosition());
case FIELD_NAME -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
case FIELD_VALUE -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
}
}
boolean fullyMatched() {
return matches == maxMatches;
}
/**
* Checks if results are valid.
* @param resultsToCheck the results to check
* @return true if all dissect keys have been matched and the results are of the expected size.
*/
boolean isValid(Map<String, String> resultsToCheck) {
return fullyMatched() && resultsToCheck.size() == maxResults;
}
/**
* Gets all the current matches. Pass the results of this to isValid to determine if a fully successful match has occurred.
*
* @return the map of the results.
*/
Map<String, String> getResults() {
results.clear();
if (simpleCount > 0) {
results.putAll(simpleResults);
}
if (referenceCount > 0) {
referenceResults.forEach((k, v) -> results.put(v.getKey(), v.getValue()));
}
if (appendCount > 0) {
appendResults.forEach((k, v) -> results.put(k, v.getAppendResult()));
}
return results;
}
/**
* a result that will need to be part of an append operation.
*/
private final
|
DissectMatch
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/ThrowableUtil.java
|
{
"start": 800,
"end": 2699
}
|
class ____ {
private ThrowableUtil() { }
/**
* Set the {@link StackTraceElement} for the given {@link Throwable}, using the {@link Class} and method name.
*/
public static <T extends Throwable> T unknownStackTrace(T cause, Class<?> clazz, String method) {
cause.setStackTrace(new StackTraceElement[] { new StackTraceElement(clazz.getName(), method, null, -1)});
return cause;
}
/**
* Gets the stack trace from a Throwable as a String.
*
* @param cause the {@link Throwable} to be examined
* @return the stack trace as generated by {@link Throwable#printStackTrace(java.io.PrintWriter)} method.
*/
@Deprecated
public static String stackTraceToString(Throwable cause) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream pout = new PrintStream(out);
cause.printStackTrace(pout);
pout.flush();
try {
return out.toString();
} finally {
try {
out.close();
} catch (IOException ignore) {
// ignore as should never happen
}
}
}
@Deprecated
public static boolean haveSuppressed() {
return true;
}
public static void addSuppressed(Throwable target, Throwable suppressed) {
if (suppressed != null) {
target.addSuppressed(suppressed);
}
}
public static void addSuppressedAndClear(Throwable target, List<Throwable> suppressed) {
addSuppressed(target, suppressed);
suppressed.clear();
}
public static void addSuppressed(Throwable target, List<Throwable> suppressed) {
for (Throwable t : suppressed) {
addSuppressed(target, t);
}
}
public static Throwable[] getSuppressed(Throwable source) {
return source.getSuppressed();
}
}
|
ThrowableUtil
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/ops/Node.java
|
{
"start": 239,
"end": 1701
}
|
class ____ {
private String name;
private String description;
private Date created;
private Node parent;
private Set children = new HashSet();
private Set cascadingChildren = new HashSet();
public Node() {
}
public Node(String name) {
this.name = name;
created = generateCurrentDate();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Node getParent() {
return parent;
}
public void setParent(Node parent) {
this.parent = parent;
}
public Set getChildren() {
return children;
}
public void setChildren(Set children) {
this.children = children;
}
public Node addChild(Node child) {
children.add( child );
child.setParent( this );
return this;
}
public Set getCascadingChildren() {
return cascadingChildren;
}
public void setCascadingChildren(Set cascadingChildren) {
this.cascadingChildren = cascadingChildren;
}
private Date generateCurrentDate() {
// Note : done as java.sql.Date mainly to work around issue with
// MySQL and its lack of milli-second precision on its DATETIME
// and TIMESTAMP datatypes.
return new Date( new java.util.Date().getTime() );
}
}
|
Node
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/web/server/OAuth2LoginTests.java
|
{
"start": 35108,
"end": 36869
}
|
class ____ {
ReactiveOAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> tokenResponseClient = mock(
ReactiveOAuth2AccessTokenResponseClient.class);
ReactiveOAuth2UserService<OAuth2UserRequest, OAuth2User> reactiveOAuth2UserService = mock(
DefaultReactiveOAuth2UserService.class);
ServerAuthenticationConverter authenticationConverter = mock(ServerAuthenticationConverter.class);
ServerSecurityContextRepository securityContextRepository = mock(ServerSecurityContextRepository.class);
@Bean
SecurityWebFilterChain springSecurity(ServerHttpSecurity http) {
http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated())
.oauth2Login((c) -> c.authenticationConverter(this.authenticationConverter)
.securityContextRepository(this.securityContextRepository));
return http.build();
}
@Bean
ReactiveOAuth2UserService<OAuth2UserRequest, OAuth2User> customOAuth2UserService() {
return this.reactiveOAuth2UserService;
}
@Bean
ReactiveJwtDecoderFactory<ClientRegistration> jwtDecoderFactory() {
return (clientRegistration) -> (token) -> {
Map<String, Object> claims = new HashMap<>();
claims.put(IdTokenClaimNames.SUB, "subject");
claims.put(IdTokenClaimNames.ISS, "http://localhost/issuer");
claims.put(IdTokenClaimNames.AUD, Collections.singletonList("client"));
claims.put(IdTokenClaimNames.AZP, "client");
return Mono.just(TestJwts.jwt().claims((c) -> c.putAll(claims)).build());
};
}
@Bean
ReactiveOAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> requestReactiveOAuth2AccessTokenResponseClient() {
return this.tokenResponseClient;
}
}
@Configuration
@EnableWebFluxSecurity
static
|
OAuth2LoginWithOauth2UserService
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2DeviceVerificationAuthenticationProvider.java
|
{
"start": 3388,
"end": 11574
}
|
class ____ implements AuthenticationProvider {
static final OAuth2TokenType USER_CODE_TOKEN_TYPE = new OAuth2TokenType(OAuth2ParameterNames.USER_CODE);
private static final StringKeyGenerator DEFAULT_STATE_GENERATOR = new Base64StringKeyGenerator(
Base64.getUrlEncoder());
private final Log logger = LogFactory.getLog(getClass());
private final RegisteredClientRepository registeredClientRepository;
private final OAuth2AuthorizationService authorizationService;
private final OAuth2AuthorizationConsentService authorizationConsentService;
private Predicate<OAuth2DeviceVerificationAuthenticationContext> authorizationConsentRequired = OAuth2DeviceVerificationAuthenticationProvider::isAuthorizationConsentRequired;
/**
* Constructs an {@code OAuth2DeviceVerificationAuthenticationProvider} using the
* provided parameters.
* @param registeredClientRepository the repository of registered clients
* @param authorizationService the authorization service
* @param authorizationConsentService the authorization consent service
*/
public OAuth2DeviceVerificationAuthenticationProvider(RegisteredClientRepository registeredClientRepository,
OAuth2AuthorizationService authorizationService,
OAuth2AuthorizationConsentService authorizationConsentService) {
Assert.notNull(registeredClientRepository, "registeredClientRepository cannot be null");
Assert.notNull(authorizationService, "authorizationService cannot be null");
Assert.notNull(authorizationConsentService, "authorizationConsentService cannot be null");
this.registeredClientRepository = registeredClientRepository;
this.authorizationService = authorizationService;
this.authorizationConsentService = authorizationConsentService;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
OAuth2DeviceVerificationAuthenticationToken deviceVerificationAuthentication = (OAuth2DeviceVerificationAuthenticationToken) authentication;
OAuth2Authorization authorization = this.authorizationService
.findByToken(deviceVerificationAuthentication.getUserCode(), USER_CODE_TOKEN_TYPE);
if (authorization == null) {
throw new OAuth2AuthenticationException(OAuth2ErrorCodes.INVALID_GRANT);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved authorization with user code");
}
OAuth2Authorization.Token<OAuth2UserCode> userCode = authorization.getToken(OAuth2UserCode.class);
if (!userCode.isActive()) {
if (!userCode.isInvalidated()) {
authorization = OAuth2Authorization.from(authorization).invalidate(userCode.getToken()).build();
this.authorizationService.save(authorization);
if (this.logger.isWarnEnabled()) {
this.logger.warn(LogMessage.format("Invalidated user code used by registered client '%s'",
authorization.getRegisteredClientId()));
}
}
throw new OAuth2AuthenticationException(OAuth2ErrorCodes.INVALID_GRANT);
}
Authentication principal = (Authentication) deviceVerificationAuthentication.getPrincipal();
if (!isPrincipalAuthenticated(principal)) {
if (this.logger.isTraceEnabled()) {
this.logger.trace("Did not authenticate device verification request since principal not authenticated");
}
// Return the device verification request as-is where isAuthenticated() is
// false
return deviceVerificationAuthentication;
}
RegisteredClient registeredClient = this.registeredClientRepository
.findById(authorization.getRegisteredClientId());
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved registered client");
}
Set<String> requestedScopes = authorization.getAttribute(OAuth2ParameterNames.SCOPE);
OAuth2DeviceVerificationAuthenticationContext.Builder authenticationContextBuilder = OAuth2DeviceVerificationAuthenticationContext
.with(deviceVerificationAuthentication)
.registeredClient(registeredClient)
.authorization(authorization);
OAuth2AuthorizationConsent currentAuthorizationConsent = this.authorizationConsentService
.findById(registeredClient.getId(), principal.getName());
if (currentAuthorizationConsent != null) {
authenticationContextBuilder.authorizationConsent(currentAuthorizationConsent);
}
if (this.authorizationConsentRequired.test(authenticationContextBuilder.build())) {
String state = DEFAULT_STATE_GENERATOR.generateKey();
authorization = OAuth2Authorization.from(authorization)
.principalName(principal.getName())
.attribute(Principal.class.getName(), principal)
.attribute(OAuth2ParameterNames.STATE, state)
.build();
if (this.logger.isTraceEnabled()) {
this.logger.trace("Generated device authorization consent state");
}
this.authorizationService.save(authorization);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Saved authorization");
}
Set<String> currentAuthorizedScopes = (currentAuthorizationConsent != null)
? currentAuthorizationConsent.getScopes() : null;
AuthorizationServerSettings authorizationServerSettings = AuthorizationServerContextHolder.getContext()
.getAuthorizationServerSettings();
String deviceVerificationUri = authorizationServerSettings.getDeviceVerificationEndpoint();
return new OAuth2DeviceAuthorizationConsentAuthenticationToken(deviceVerificationUri,
registeredClient.getClientId(), principal, deviceVerificationAuthentication.getUserCode(), state,
requestedScopes, currentAuthorizedScopes);
}
// @formatter:off
authorization = OAuth2Authorization.from(authorization)
.principalName(principal.getName())
.authorizedScopes(requestedScopes)
.invalidate(userCode.getToken())
.attribute(Principal.class.getName(), principal)
.attributes((attributes) -> attributes.remove(OAuth2ParameterNames.SCOPE))
.build();
// @formatter:on
this.authorizationService.save(authorization);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Saved authorization with authorized scopes");
// This log is kept separate for consistency with other providers
this.logger.trace("Authenticated device verification request");
}
return new OAuth2DeviceVerificationAuthenticationToken(principal,
deviceVerificationAuthentication.getUserCode(), registeredClient.getClientId());
}
@Override
public boolean supports(Class<?> authentication) {
return OAuth2DeviceVerificationAuthenticationToken.class.isAssignableFrom(authentication);
}
/**
* Sets the {@code Predicate} used to determine if authorization consent is required.
*
* <p>
* The {@link OAuth2DeviceVerificationAuthenticationContext} gives the predicate
* access to the {@link OAuth2DeviceVerificationAuthenticationToken}, as well as, the
* following context attributes:
* <ul>
* <li>The {@link RegisteredClient} associated with the device authorization
* request.</li>
* <li>The {@link OAuth2Authorization} containing the device authorization request
* parameters.</li>
* <li>The {@link OAuth2AuthorizationConsent} previously granted to the
* {@link RegisteredClient}, or {@code null} if not available.</li>
* </ul>
* </p>
* @param authorizationConsentRequired the {@code Predicate} used to determine if
* authorization consent is required
*/
public void setAuthorizationConsentRequired(
Predicate<OAuth2DeviceVerificationAuthenticationContext> authorizationConsentRequired) {
Assert.notNull(authorizationConsentRequired, "authorizationConsentRequired cannot be null");
this.authorizationConsentRequired = authorizationConsentRequired;
}
private static boolean isAuthorizationConsentRequired(
OAuth2DeviceVerificationAuthenticationContext authenticationContext) {
if (authenticationContext.getAuthorizationConsent() != null && authenticationContext.getAuthorizationConsent()
.getScopes()
.containsAll(authenticationContext.getRequestedScopes())) {
return false;
}
return true;
}
private static boolean isPrincipalAuthenticated(Authentication principal) {
return principal != null && !AnonymousAuthenticationToken.class.isAssignableFrom(principal.getClass())
&& principal.isAuthenticated();
}
}
|
OAuth2DeviceVerificationAuthenticationProvider
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredTaskSchema.java
|
{
"start": 644,
"end": 2334
}
|
interface ____ extends TaskSettings {
SageMakerStoredTaskSchema NO_OP = new SageMakerStoredTaskSchema() {
private static final TransportVersion ML_INFERENCE_SAGEMAKER = TransportVersion.fromName("ml_inference_sagemaker");
@Override
public boolean isEmpty() {
return true;
}
@Override
public SageMakerStoredTaskSchema updatedTaskSettings(Map<String, Object> newSettings) {
return this;
}
private static final String NAME = "noop_sagemaker_task_schema";
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
assert false : "should never be called when supportsVersion is used";
return ML_INFERENCE_SAGEMAKER;
}
@Override
public boolean supportsVersion(TransportVersion version) {
return version.supports(ML_INFERENCE_SAGEMAKER);
}
@Override
public void writeTo(StreamOutput out) {}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) {
return builder;
}
};
/**
* These extra service settings serialize flatly alongside the overall SageMaker ServiceSettings.
*/
@Override
default boolean isFragment() {
return true;
}
@Override
SageMakerStoredTaskSchema updatedTaskSettings(Map<String, Object> newSettings);
default SageMakerStoredTaskSchema override(Map<String, Object> newSettings) {
return updatedTaskSettings(newSettings);
}
}
|
SageMakerStoredTaskSchema
|
java
|
google__dagger
|
javatests/dagger/functional/producers/subcomponent/UsesProducerModuleSubcomponents.java
|
{
"start": 2084,
"end": 2228
}
|
class ____ {
@Produces
@IntoSet
static String produceStringInChild() {
return "from child";
}
}
@Qualifier
@
|
ChildModule
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 42910,
"end": 43196
}
|
interface ____ {
int f();
}
""")
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
import com.google.errorprone.annotations.Immutable;
@Immutable
|
Super
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryParserFactory.java
|
{
"start": 958,
"end": 1060
}
|
class ____ attempts to
* determine the version of job history and return a proper parser.
*/
public
|
that
|
java
|
apache__kafka
|
storage/src/main/java/org/apache/kafka/server/log/remote/storage/RemoteLogManager.java
|
{
"start": 124232,
"end": 124875
}
|
class ____ {
private final long retentionMs;
private final long cleanupUntilMs;
public RetentionTimeData(long retentionMs, long cleanupUntilMs) {
if (retentionMs < 0)
throw new IllegalArgumentException("retentionMs should be non negative, but it is " + retentionMs);
if (cleanupUntilMs < 0)
throw new IllegalArgumentException("cleanupUntilMs should be non negative, but it is " + cleanupUntilMs);
this.retentionMs = retentionMs;
this.cleanupUntilMs = cleanupUntilMs;
}
}
// Visible for testing
static
|
RetentionTimeData
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OAIPMHEndpointBuilderFactory.java
|
{
"start": 1565,
"end": 24992
}
|
interface ____
extends
EndpointConsumerBuilder {
default AdvancedOAIPMHEndpointConsumerBuilder advanced() {
return (AdvancedOAIPMHEndpointConsumerBuilder) this;
}
/**
* Specifies a lower bound for datestamp-based selective harvesting. UTC
* DateTime value.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param from the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder from(String from) {
doSetProperty("from", from);
return this;
}
/**
* Identifier of the requested resources. Applicable only with certain
* verbs.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param identifier the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder identifier(String identifier) {
doSetProperty("identifier", identifier);
return this;
}
/**
* Specifies the metadataPrefix of the format that should be included in
* the metadata part of the returned records.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: oai_dc
* Group: common
*
* @param metadataPrefix the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder metadataPrefix(String metadataPrefix) {
doSetProperty("metadataPrefix", metadataPrefix);
return this;
}
/**
* Specifies membership as a criteria for set-based selective
* harvesting.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param set the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder set(String set) {
doSetProperty("set", set);
return this;
}
/**
* Specifies an upper bound for datestamp-based selective harvesting.
* UTC DateTime value.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param until the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder until(String until) {
doSetProperty("until", until);
return this;
}
/**
* Request name supported by OAI-PMh protocol.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: ListRecords
* Group: common
*
* @param verb the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder verb(String verb) {
doSetProperty("verb", verb);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder sendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder sendEmptyMessageWhenIdle(String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffErrorThreshold(int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffErrorThreshold(String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffIdleThreshold(int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffIdleThreshold(String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffMultiplier(int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder backoffMultiplier(String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder runLoggingLevel(org.apache.camel.LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder runLoggingLevel(String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder scheduledExecutorService(ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder scheduledExecutorService(String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder schedulerProperties(String key, Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param values the values
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder startScheduler(boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Ignore SSL certificate warnings.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ignoreSSLWarnings the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder ignoreSSLWarnings(boolean ignoreSSLWarnings) {
doSetProperty("ignoreSSLWarnings", ignoreSSLWarnings);
return this;
}
/**
* Ignore SSL certificate warnings.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ignoreSSLWarnings the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder ignoreSSLWarnings(String ignoreSSLWarnings) {
doSetProperty("ignoreSSLWarnings", ignoreSSLWarnings);
return this;
}
/**
* Causes the defined url to make an https request.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ssl the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder ssl(boolean ssl) {
doSetProperty("ssl", ssl);
return this;
}
/**
* Causes the defined url to make an https request.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ssl the value to set
* @return the dsl builder
*/
default OAIPMHEndpointConsumerBuilder ssl(String ssl) {
doSetProperty("ssl", ssl);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the OAI-PMH component.
*/
public
|
OAIPMHEndpointConsumerBuilder
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/StateTable.java
|
{
"start": 2076,
"end": 2332
}
|
class ____ state tables. Accesses to state are typically scoped by the currently active key,
* as provided through the {@link InternalKeyContext}.
*
* @param <K> type of key
* @param <N> type of namespace
* @param <S> type of state
*/
public abstract
|
for
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/main/java/io/quarkus/it/rest/NonAnnotatedImplementation.java
|
{
"start": 157,
"end": 186
}
|
interface ____";
}
}
|
endpoint
|
java
|
spring-projects__spring-boot
|
module/spring-boot-amqp/src/main/java/org/springframework/boot/amqp/autoconfigure/metrics/RabbitConnectionFactoryMetricsPostProcessor.java
|
{
"start": 1513,
"end": 3410
}
|
class ____ implements BeanPostProcessor, Ordered {
private static final String CONNECTION_FACTORY_SUFFIX = "connectionFactory";
private final ApplicationContext context;
private volatile @Nullable MeterRegistry meterRegistry;
RabbitConnectionFactoryMetricsPostProcessor(ApplicationContext context) {
this.context = context;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
if (bean instanceof AbstractConnectionFactory connectionFactory) {
bindConnectionFactoryToRegistry(getMeterRegistry(), beanName, connectionFactory);
}
return bean;
}
private void bindConnectionFactoryToRegistry(MeterRegistry registry, String beanName,
AbstractConnectionFactory connectionFactory) {
ConnectionFactory rabbitConnectionFactory = connectionFactory.getRabbitConnectionFactory();
String connectionFactoryName = getConnectionFactoryName(beanName);
new RabbitMetrics(rabbitConnectionFactory, Tags.of("name", connectionFactoryName)).bindTo(registry);
}
/**
* Get the name of a ConnectionFactory based on its {@code beanName}.
* @param beanName the name of the connection factory bean
* @return a name for the given connection factory
*/
private String getConnectionFactoryName(String beanName) {
if (beanName.length() > CONNECTION_FACTORY_SUFFIX.length()
&& StringUtils.endsWithIgnoreCase(beanName, CONNECTION_FACTORY_SUFFIX)) {
return beanName.substring(0, beanName.length() - CONNECTION_FACTORY_SUFFIX.length());
}
return beanName;
}
private MeterRegistry getMeterRegistry() {
MeterRegistry meterRegistry = this.meterRegistry;
if (meterRegistry == null) {
meterRegistry = this.context.getBean(MeterRegistry.class);
this.meterRegistry = meterRegistry;
}
return meterRegistry;
}
@Override
public int getOrder() {
return Ordered.HIGHEST_PRECEDENCE;
}
}
|
RabbitConnectionFactoryMetricsPostProcessor
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DoubleBounds.java
|
{
"start": 1242,
"end": 1486
}
|
class ____ similar to {@link LongBounds} used in date histograms, but is using longs to store data. LongBounds and DoubleBounds are
* not used interchangeably and therefore don't share any common interfaces except for serialization.
*/
public
|
is
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson2/src/main/java/org/springframework/boot/jackson2/autoconfigure/Jackson2AutoConfiguration.java
|
{
"start": 14362,
"end": 15442
}
|
class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
if (ClassUtils.isPresent("com.fasterxml.jackson.databind.PropertyNamingStrategy", classLoader)) {
registerPropertyNamingStrategyHints(hints.reflection());
}
}
/**
* Register hints for the {@code configurePropertyNamingStrategyField} method to
* use.
* @param hints reflection hints
*/
private void registerPropertyNamingStrategyHints(ReflectionHints hints) {
registerPropertyNamingStrategyHints(hints, PropertyNamingStrategies.class);
}
private void registerPropertyNamingStrategyHints(ReflectionHints hints, Class<?> type) {
Stream.of(type.getDeclaredFields())
.filter(this::isPropertyNamingStrategyField)
.forEach(hints::registerField);
}
private boolean isPropertyNamingStrategyField(Field candidate) {
return ReflectionUtils.isPublicStaticFinal(candidate)
&& candidate.getType().isAssignableFrom(PropertyNamingStrategy.class);
}
}
}
|
Jackson2AutoConfigurationRuntimeHints
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/java/io/micronaut/inject/qualifiers/stereotype/StereotypeQualifierTest.java
|
{
"start": 310,
"end": 2552
}
|
class ____ {
@Test
void testByStereotypeQualifier() {
try (ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "StereotypeQualifierSpec"))) {
Qualifier<Object> qualifier = Qualifiers.byQualifiers(Qualifiers.byStereotype(MyStereotype.class));
var definitions = context.getBeanDefinitions(qualifier);
assertEquals(1, definitions.size());
var beanType = definitions.iterator().next().getBeanType();
assertEquals(MyBean.class, beanType);
}
}
@Test
void testByNamedStereotypeQualifier() {
try (ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "StereotypeQualifierSpec"))) {
Qualifier<Object> qualifier = Qualifiers.byQualifiers(Qualifiers.byStereotype(MyStereotype.class.getName()));
var definitions = context.getBeanDefinitions(qualifier);
assertEquals(1, definitions.size());
var beanType = definitions.iterator().next().getBeanType();
assertEquals(MyBean.class, beanType);
}
}
@Test
void testByRepeatableStereotypeQualifier() {
try (ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "StereotypeQualifierSpec"))) {
Qualifier<Object> qualifier = Qualifiers.byQualifiers(Qualifiers.byStereotype(MyRepeatableStereotype.class));
var definitions = context.getBeanDefinitions(qualifier);
assertEquals(1, definitions.size());
var beanType = definitions.iterator().next().getBeanType();
assertEquals(MyBean2.class, beanType);
}
}
@Test
void testByRepeatableNamedStereotypeQualifier() {
try (ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "StereotypeQualifierSpec"))) {
Qualifier<Object> qualifier = Qualifiers.byQualifiers(Qualifiers.byStereotype(MyRepeatableStereotype.class.getName()));
var definitions = context.getBeanDefinitions(qualifier);
assertEquals(1, definitions.size());
var beanType = definitions.iterator().next().getBeanType();
assertEquals(MyBean2.class, beanType);
}
}
}
|
StereotypeQualifierTest
|
java
|
apache__flink
|
flink-filesystems/flink-oss-fs-hadoop/src/test/java/org/apache/flink/fs/osshadoop/writer/OSSRecoverableFsDataOutputStreamTest.java
|
{
"start": 1655,
"end": 8825
}
|
class ____ {
private static Path basePath;
private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
private FileSystem fs;
private static final String TEST_OBJECT_NAME_PREFIX = "TEST-OBJECT-";
private Path objectPath;
private RecoverableWriter writer;
private RecoverableFsDataOutputStream fsDataOutputStream;
@TempDir public static File temporaryFolder;
@BeforeEach
void before() throws IOException {
OSSTestCredentials.assumeCredentialsAvailable();
final Configuration conf = new Configuration();
conf.setString("fs.oss.endpoint", OSSTestCredentials.getOSSEndpoint());
conf.setString("fs.oss.accessKeyId", OSSTestCredentials.getOSSAccessKey());
conf.setString("fs.oss.accessKeySecret", OSSTestCredentials.getOSSSecretKey());
FileSystem.initialize(conf);
basePath = new Path(OSSTestCredentials.getTestBucketUri() + TEST_DATA_DIR);
fs = basePath.getFileSystem();
writer = fs.createRecoverableWriter();
objectPath = new Path(basePath + "/" + TEST_OBJECT_NAME_PREFIX + UUID.randomUUID());
fsDataOutputStream = writer.open(objectPath);
}
@Test
void testRegularDataWritten() throws IOException {
final byte[] part = OSSTestUtils.bytesOf("hello world", 1024 * 1024);
fsDataOutputStream.write(part);
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, part);
}
@Test
void testNoDataWritten() throws IOException {
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
// will not create empty object
assertThat(fs.exists(objectPath)).isFalse();
}
@Test
void testCloseForCommitOnClosedStreamShouldFail() throws IOException {
fsDataOutputStream.closeForCommit().commit();
assertThatThrownBy(() -> fsDataOutputStream.closeForCommit().commit())
.isInstanceOf(IOException.class);
}
@Test
void testCloseWithoutCommit() throws IOException {
final byte[] part = OSSTestUtils.bytesOf("hello world", 1024 * 1024);
fsDataOutputStream.write(part);
fsDataOutputStream.close();
// close without commit will not upload current part
assertThat(fs.exists(objectPath)).isFalse();
}
@Test
void testWriteLargeFile() throws IOException {
List<byte[]> buffers = OSSTestUtils.generateRandomBuffer(50 * 1024 * 1024, 10 * 104 * 1024);
for (byte[] buffer : buffers) {
fsDataOutputStream.write(buffer);
}
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, buffers);
}
@Test
void testConcatWrites() throws IOException {
fsDataOutputStream.write(OSSTestUtils.bytesOf("hello", 5));
fsDataOutputStream.write(OSSTestUtils.bytesOf(" ", 1));
fsDataOutputStream.write(OSSTestUtils.bytesOf("world", 5));
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, OSSTestUtils.bytesOf("hello world", 11));
}
@Test
void testRegularRecovery() throws IOException {
final byte[] part = OSSTestUtils.bytesOf("hello world", 1024 * 1024);
fsDataOutputStream.write(part);
RecoverableWriter.ResumeRecoverable recoverable = fsDataOutputStream.persist();
fsDataOutputStream = writer.recover(recoverable);
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, part);
}
@Test
void testContinuousPersistWithoutWrites() throws IOException {
fsDataOutputStream.write(OSSTestUtils.bytesOf("hello", 5));
fsDataOutputStream.persist();
fsDataOutputStream.persist();
fsDataOutputStream.persist();
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf(" ", 1));
fsDataOutputStream.write(OSSTestUtils.bytesOf("world", 5));
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, OSSTestUtils.bytesOf("hello world", 11));
}
@Test
void testWriteSmallDataAndPersist() throws IOException {
fsDataOutputStream.write(OSSTestUtils.bytesOf("h", 1));
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf("e", 1));
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf("l", 1));
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf("l", 1));
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf("o", 1));
fsDataOutputStream.persist();
fsDataOutputStream.write(OSSTestUtils.bytesOf(" ", 1));
fsDataOutputStream.write(OSSTestUtils.bytesOf("world", 5));
fsDataOutputStream.persist();
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, OSSTestUtils.bytesOf("hello world", 11));
}
@Test
void testWriteBigDataAndPersist() throws IOException {
List<byte[]> buffers = OSSTestUtils.generateRandomBuffer(50 * 1024 * 1024, 10 * 104 * 1024);
for (byte[] buffer : buffers) {
fsDataOutputStream.write(buffer);
fsDataOutputStream.persist();
}
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
OSSTestUtils.objectContentEquals(fs, objectPath, buffers);
}
@Test
void testDataWrittenAfterRecovery() throws IOException {
final byte[] part = OSSTestUtils.bytesOf("hello world", 1024 * 1024);
fsDataOutputStream.write(part);
RecoverableWriter.ResumeRecoverable recoverable = fsDataOutputStream.persist();
fsDataOutputStream = writer.recover(recoverable);
List<byte[]> buffers = OSSTestUtils.generateRandomBuffer(50 * 1024 * 1024, 10 * 104 * 1024);
for (byte[] buffer : buffers) {
fsDataOutputStream.write(buffer);
}
RecoverableFsDataOutputStream.Committer committer = fsDataOutputStream.closeForCommit();
committer.commit();
buffers.add(0, part);
OSSTestUtils.objectContentEquals(fs, objectPath, buffers);
}
@AfterEach
void after() throws IOException {
try {
if (fs != null) {
fs.delete(basePath, true);
}
} finally {
FileSystem.initialize(new Configuration());
}
}
}
|
OSSRecoverableFsDataOutputStreamTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
|
{
"start": 70179,
"end": 70778
}
|
class ____ a subclass, if the check is required
if (superClass != null && !superClass.isAssignableFrom(stub.getClass())) {
throw new RuntimeException(
"The class '"
+ stub.getClass().getName()
+ "' is not a subclass of '"
+ superClass.getName()
+ "' as is required.");
}
return stub;
} catch (ClassCastException ccex) {
throw new RuntimeException(
"The UDF
|
is
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/io/support/SpringFactoriesLoader.java
|
{
"start": 12375,
"end": 12811
}
|
class ____.
* @return a {@link SpringFactoriesLoader} instance
* @since 6.0
* @see #forDefaultResourceLocation(ClassLoader)
*/
public static SpringFactoriesLoader forDefaultResourceLocation() {
return forDefaultResourceLocation(null);
}
/**
* Create a {@link SpringFactoriesLoader} instance that will load and
* instantiate the factory implementations from
* {@value #FACTORIES_RESOURCE_LOCATION}, using the given
|
loader
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/AclException.java
|
{
"start": 1024,
"end": 1487
}
|
class ____ extends IOException {
private static final long serialVersionUID = 1L;
/**
* Creates a new AclException.
*
* @param message String message
*/
public AclException(String message) {
super(message);
}
/**
* Creates a new AclException.
*
* @param message String message
* @param cause The cause of the exception
*/
public AclException(String message, Throwable cause) {
super(message, cause);
}
}
|
AclException
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/RedissonBaseLock.java
|
{
"start": 1626,
"end": 1748
}
|
class ____ implementing distributed locks
*
* @author Danila Varatyntsev
* @author Nikita Koksharov
*/
public abstract
|
for
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/message/SimpleArraysMessageTest.java
|
{
"start": 1097,
"end": 2669
}
|
class ____ {
@Test
public void testArrayBoundsChecking() {
// SimpleArraysMessageData takes 2 arrays
final ByteBuffer buf = ByteBuffer.wrap(new byte[] {
(byte) 0x7f, // Set size of first array to 126 which is larger than the size of this buffer
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00
});
final SimpleArraysMessageData out = new SimpleArraysMessageData();
ByteBufferAccessor accessor = new ByteBufferAccessor(buf);
assertEquals("Tried to allocate a collection of size 126, but there are only 7 bytes remaining.",
assertThrows(RuntimeException.class, () -> out.read(accessor, (short) 2)).getMessage());
}
@Test
public void testArrayBoundsCheckingOtherArray() {
// SimpleArraysMessageData takes 2 arrays
final ByteBuffer buf = ByteBuffer.wrap(new byte[] {
(byte) 0x01, // Set size of first array to 0
(byte) 0x7e, // Set size of second array to 125 which is larger than the size of this buffer
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00
});
final SimpleArraysMessageData out = new SimpleArraysMessageData();
ByteBufferAccessor accessor = new ByteBufferAccessor(buf);
assertEquals("Tried to allocate a collection of size 125, but there are only 6 bytes remaining.",
assertThrows(RuntimeException.class, () -> out.read(accessor, (short) 2)).getMessage());
}
}
|
SimpleArraysMessageTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/creator/JSONCreatorTest_double.java
|
{
"start": 298,
"end": 1153
}
|
class ____ extends TestCase {
public void test_create() throws Exception {
Entity entity = new Entity(123.45D, "菜姐");
String text = JSON.toJSONString(entity);
Entity entity2 = JSON.parseObject(text, Entity.class);
Assert.assertTrue(entity.getId() == entity2.getId());
Assert.assertEquals(entity.getName(), entity2.getName());
}
public void test_create_2() throws Exception {
Entity entity = new Entity(123.45D, "菜姐");
String text = JSON.toJSONString(entity);
ParserConfig config = new ParserConfig();
Entity entity2 = JSON.parseObject(text, Entity.class, config, 0);
Assert.assertTrue(entity.getId() == entity2.getId());
Assert.assertEquals(entity.getName(), entity2.getName());
}
public static
|
JSONCreatorTest_double
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/HashBasedTableRowMapTest.java
|
{
"start": 835,
"end": 1071
}
|
class ____ extends RowMapTests {
public HashBasedTableRowMapTest() {
super(false, true, true, true);
}
@Override
Table<String, Integer, Character> makeTable() {
return HashBasedTable.create();
}
}
|
HashBasedTableRowMapTest
|
java
|
resilience4j__resilience4j
|
resilience4j-feign/src/main/java/io/github/resilience4j/feign/FeignDecorators.java
|
{
"start": 8449,
"end": 10209
}
|
interface ____ when
* calling {@link Resilience4jFeign.Builder#target(Class, String)}.
* @param filter the filter must return <code>true</code> for the fallback to be
* called.
* @return the builder
*/
public Builder withFallbackFactory(Function<Exception, ?> fallbackFactory,
Predicate<Exception> filter) {
decorators.add(new FallbackDecorator<>(new FallbackFactory<>(fallbackFactory), filter));
return this;
}
/**
* Adds a {@link Bulkhead} to the decorator chain.
*
* @param bulkhead a fully configured {@link Bulkhead}.
* @return the builder
*/
public Builder withBulkhead(Bulkhead bulkhead) {
addFeignDecorator(fn -> Bulkhead.decorateCheckedFunction(bulkhead, fn));
return this;
}
private void addFeignDecorator(UnaryOperator<CheckedFunction<Object[], Object>> decorator) {
decorators
.add((fn, m, mh, t) -> {
// prevent default methods from being decorated
// as they do not participate in actual web requests
if (m.isDefault()) {
return fn;
} else {
return decorator.apply(fn);
}
});
}
/**
* Builds the decorator chain. This can then be used to setup an instance of {@link
* Resilience4jFeign}.
*
* @return the decorators.
*/
public FeignDecorators build() {
return new FeignDecorators(decorators);
}
}
}
|
specified
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/enhanced/SequenceNamingStrategyTest.java
|
{
"start": 7831,
"end": 8046
}
|
class ____ {
@Id
@GeneratedValue(generator = "table_generator")
@SequenceGenerator(name = "table_generator")
private Long id;
private String name;
}
@Entity(name = "TestEntity4")
public static
|
TestEntity3
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java
|
{
"start": 17039,
"end": 17634
}
|
class ____ {
void m() {
lib.Lib.Inner.InnerMost.f();
}
}
""")
.doTest();
}
@Test
public void packageWithCanIgnoreAnnotation() {
compilationHelper
.addSourceLines(
"package-info.java",
"""
@com.google.errorprone.annotations.CheckReturnValue
package lib;
""")
.addSourceLines(
"lib/Lib.java",
"""
package lib;
@com.google.errorprone.annotations.CanIgnoreReturnValue
public
|
Test
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
|
{
"start": 24323,
"end": 24876
}
|
class ____ implement one of the following means of construction, which are
* attempted in order:
*
* <ol>
* <li>a public constructor accepting java.net.URI and
* org.apache.hadoop.conf.Configuration</li>
* <li>a public constructor accepting
* org.apache.hadoop.conf.Configuration</li>
* <li>a public static method named as per methodName, that accepts no
* arguments and returns an instance of
* specified type, or</li>
* <li>a public default constructor.</li>
* </ol>
*
* @param className name of
|
must
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/ExecutionSlotSharingGroup.java
|
{
"start": 1506,
"end": 2875
}
|
class ____ implements WeightLoadable {
private final Set<ExecutionVertexID> executionVertexIds;
@Nonnull private final SlotSharingGroup slotSharingGroup;
public ExecutionSlotSharingGroup(@Nonnull SlotSharingGroup slotSharingGroup) {
this.slotSharingGroup = Preconditions.checkNotNull(slotSharingGroup);
this.executionVertexIds = new HashSet<>();
}
public void addVertex(final ExecutionVertexID executionVertexId) {
executionVertexIds.add(executionVertexId);
}
@Nonnull
public SlotSharingGroup getSlotSharingGroup() {
return slotSharingGroup;
}
@Nonnull
ResourceProfile getResourceProfile() {
return slotSharingGroup.getResourceProfile();
}
public Set<ExecutionVertexID> getExecutionVertexIds() {
return Collections.unmodifiableSet(executionVertexIds);
}
@Override
public String toString() {
return "ExecutionSlotSharingGroup{"
+ "executionVertexIds="
+ executionVertexIds
+ ", slotSharingGroup="
+ slotSharingGroup
+ ", loadingWeight="
+ getLoading()
+ '}';
}
@Nonnull
@Override
public LoadingWeight getLoading() {
return new DefaultLoadingWeight(executionVertexIds.size());
}
}
|
ExecutionSlotSharingGroup
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/logback/LogbackConfigurationTests.java
|
{
"start": 1687,
"end": 5646
}
|
class ____ {
@Test
@WithResource(name = "custom-console-log-pattern.xml", content = """
<configuration>
<property name="CONSOLE_LOG_PATTERN" value="foo" />
<include resource="org/springframework/boot/logging/logback/base.xml" />
</configuration>
""")
void consolePatternCanBeOverridden(@ResourcesRoot File resourcesRoot) throws JoranException {
JoranConfigurator configurator = new JoranConfigurator();
LoggerContext context = new LoggerContext();
configurator.setContext(context);
configurator.doConfigure(new File(resourcesRoot, "custom-console-log-pattern.xml"));
Appender<ILoggingEvent> appender = context.getLogger("ROOT").getAppender("CONSOLE");
assertThat(appender).isInstanceOf(ConsoleAppender.class);
Encoder<?> encoder = ((ConsoleAppender<?>) appender).getEncoder();
assertThat(encoder).isInstanceOf(PatternLayoutEncoder.class);
assertThat(((PatternLayoutEncoder) encoder).getPattern()).isEqualTo("foo");
}
@Test
@WithResource(name = "custom-file-log-pattern.xml", content = """
<configuration>
<property name="FILE_LOG_PATTERN" value="bar" />
<include resource="org/springframework/boot/logging/logback/base.xml" />
</configuration>
""")
void filePatternCanBeOverridden(@ResourcesRoot File resourcesRoot) throws JoranException {
JoranConfigurator configurator = new JoranConfigurator();
LoggerContext context = new LoggerContext();
configurator.setContext(context);
configurator.doConfigure(new File(resourcesRoot, "custom-file-log-pattern.xml"));
Appender<ILoggingEvent> appender = context.getLogger("ROOT").getAppender("FILE");
assertThat(appender).isInstanceOf(FileAppender.class);
Encoder<?> encoder = ((FileAppender<?>) appender).getEncoder();
assertThat(encoder).isInstanceOf(PatternLayoutEncoder.class);
assertThat(((PatternLayoutEncoder) encoder).getPattern()).isEqualTo("bar");
}
@Test
@WithResource(name = "custom-file-log-pattern.xml", content = """
<configuration>
<property name="FILE_LOG_PATTERN" value="bar" />
<include resource="org/springframework/boot/logging/logback/base.xml" />
</configuration>
""")
void defaultRollingFileNamePattern(@ResourcesRoot File resourcesRoot) throws JoranException {
JoranConfigurator configurator = new JoranConfigurator();
LoggerContext context = new LoggerContext();
configurator.setContext(context);
configurator.doConfigure(new File(resourcesRoot, "custom-file-log-pattern.xml"));
Appender<ILoggingEvent> appender = context.getLogger("ROOT").getAppender("FILE");
assertThat(appender).isInstanceOf(RollingFileAppender.class);
RollingPolicy rollingPolicy = ((RollingFileAppender<?>) appender).getRollingPolicy();
String fileNamePattern = ((SizeAndTimeBasedRollingPolicy<?>) rollingPolicy).getFileNamePattern();
assertThat(fileNamePattern).endsWith("spring.log.%d{yyyy-MM-dd}.%i.gz");
}
@Test
@WithResource(name = "custom-file-log-pattern-with-fileNamePattern.xml", content = """
<configuration>
<property name="LOGBACK_ROLLINGPOLICY_FILE_NAME_PATTERN" value="my.log.%d{yyyyMMdd}.%i.gz"/>
<include resource="org/springframework/boot/logging/logback/base.xml" />
</configuration>
""")
void customRollingFileNamePattern(@ResourcesRoot File resourcesRoot) throws JoranException {
JoranConfigurator configurator = new JoranConfigurator();
LoggerContext context = new LoggerContext();
configurator.setContext(context);
configurator.doConfigure(new File(resourcesRoot, "custom-file-log-pattern-with-fileNamePattern.xml"));
Appender<ILoggingEvent> appender = context.getLogger("ROOT").getAppender("FILE");
assertThat(appender).isInstanceOf(RollingFileAppender.class);
RollingPolicy rollingPolicy = ((RollingFileAppender<?>) appender).getRollingPolicy();
String fileNamePattern = ((SizeAndTimeBasedRollingPolicy<?>) rollingPolicy).getFileNamePattern();
assertThat(fileNamePattern).endsWith("my.log.%d{yyyyMMdd}.%i.gz");
}
}
|
LogbackConfigurationTests
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/records/NormalScopedRecordProducerTest.java
|
{
"start": 1079,
"end": 1254
}
|
class ____ {
@Produces
@ApplicationScoped
MyRecord produce() {
return new MyRecord();
}
}
record MyRecord() {
}
}
|
Producer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/GlobalBalancingWeightsFactory.java
|
{
"start": 1110,
"end": 2897
}
|
class ____ implements BalancingWeights {
private final WeightFunction weightFunction;
private final boolean diskUsageIgnored;
GlobalBalancingWeights() {
final float diskUsageBalanceFactor = balancerSettings.getDiskUsageBalanceFactor();
this.weightFunction = new WeightFunction(
balancerSettings.getShardBalanceFactor(),
balancerSettings.getIndexBalanceFactor(),
balancerSettings.getWriteLoadBalanceFactor(),
diskUsageBalanceFactor
);
this.diskUsageIgnored = diskUsageBalanceFactor == 0;
}
@Override
public WeightFunction weightFunctionForShard(ShardRouting shard) {
return weightFunction;
}
@Override
public WeightFunction weightFunctionForNode(RoutingNode node) {
return weightFunction;
}
@Override
public NodeSorters createNodeSorters(BalancedShardsAllocator.ModelNode[] modelNodes, BalancedShardsAllocator.Balancer balancer) {
return new GlobalNodeSorters(new BalancedShardsAllocator.NodeSorter(modelNodes, weightFunction, balancer));
}
@Override
public boolean diskUsageIgnored() {
return diskUsageIgnored;
}
private record GlobalNodeSorters(BalancedShardsAllocator.NodeSorter nodeSorter) implements NodeSorters {
@Override
public BalancedShardsAllocator.NodeSorter sorterForShard(ShardRouting shard) {
return nodeSorter;
}
@Override
public Iterator<BalancedShardsAllocator.NodeSorter> iterator() {
return Iterators.single(nodeSorter);
}
}
}
}
|
GlobalBalancingWeights
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/engineconfigurations/section/WrongTargetNestedTypeTest.java
|
{
"start": 1378,
"end": 1585
}
|
class ____ implements SectionHelperFactory<IfSectionHelper> {
@Override
public IfSectionHelper initialize(SectionInitContext context) {
return null;
}
}
}
|
CustomSection
|
java
|
quarkusio__quarkus
|
extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/ConfigureInstanceIdTest.java
|
{
"start": 425,
"end": 1033
}
|
class ____ {
@Inject
Scheduler quartzScheduler;
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Jobs.class)
.addAsResource(new StringAsset(
"quarkus.quartz.instance-id=myInstanceId"),
"application.properties"));
@Test
public void testSchedulerStarted() throws SchedulerException {
assertEquals("myInstanceId", quartzScheduler.getSchedulerInstanceId());
}
static
|
ConfigureInstanceIdTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/ConstantsTests.java
|
{
"start": 7452,
"end": 7528
}
|
class ____ {
}
@SuppressWarnings("unused")
private static final
|
NoConstants
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/JsonpCharacterEscapesTest.java
|
{
"start": 334,
"end": 885
}
|
class ____
extends JacksonCoreTestBase
{
@Test
void getEscapeSequenceOne() {
JsonpCharacterEscapes jsonpCharacterEscapes = JsonpCharacterEscapes.instance();
assertEquals(new SerializedString("\\u2028"),jsonpCharacterEscapes.getEscapeSequence(0x2028));
}
@Test
void getEscapeSequenceTwo() {
JsonpCharacterEscapes jsonpCharacterEscapes = JsonpCharacterEscapes.instance();
assertEquals(new SerializedString("\\u2029"),jsonpCharacterEscapes.getEscapeSequence(0x2029));
}
}
|
JsonpCharacterEscapesTest
|
java
|
spring-projects__spring-framework
|
spring-context-support/src/test/java/org/springframework/cache/jcache/config/JCacheJavaConfigTests.java
|
{
"start": 5910,
"end": 6567
}
|
class ____ implements JCacheConfigurer {
@Override
@Bean
public CacheManager cacheManager() {
return new NoOpCacheManager();
}
@Override
@Bean
public KeyGenerator keyGenerator() {
return new SimpleKeyGenerator();
}
@Override
@Bean
public CacheErrorHandler errorHandler() {
return new SimpleCacheErrorHandler();
}
@Override
@Bean
public CacheResolver cacheResolver() {
return new SimpleCacheResolver(cacheManager());
}
@Override
@Bean
public CacheResolver exceptionCacheResolver() {
return new SimpleCacheResolver(cacheManager());
}
}
@Configuration
@EnableCaching
public static
|
FullCachingConfig
|
java
|
junit-team__junit5
|
documentation/src/test/java/example/DisplayNameGeneratorDemo.java
|
{
"start": 1529,
"end": 1964
}
|
class ____ {
@Test
void if_it_is_divisible_by_4_but_not_by_100() {
}
@ParameterizedTest(name = "Year {0} is a leap year.")
@ValueSource(ints = { 2016, 2020, 2048 })
void if_it_is_one_of_the_following_years(int year) {
}
}
// end::user_guide_indicative_sentences[]
@Nested
// tag::user_guide_custom_sentence_fragments[]
@SentenceFragment("A year is a leap year")
@IndicativeSentencesGeneration
|
A_year_is_a_leap_year
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/language/XMLTokenizerExpression.java
|
{
"start": 1375,
"end": 3266
}
|
class ____ extends NamespaceAwareExpression {
@XmlAttribute
@Metadata(defaultValue = "i", enums = "i,w,u,t")
private String mode;
@XmlAttribute
@Metadata(javaType = "java.lang.Integer")
private String group;
public XMLTokenizerExpression() {
}
protected XMLTokenizerExpression(XMLTokenizerExpression source) {
super(source);
this.mode = source.mode;
this.group = source.group;
}
public XMLTokenizerExpression(String expression) {
super(expression);
}
public XMLTokenizerExpression(Expression expression) {
setExpressionValue(expression);
}
private XMLTokenizerExpression(Builder builder) {
super(builder);
this.mode = builder.mode;
this.group = builder.group;
}
@Override
public XMLTokenizerExpression copyDefinition() {
return new XMLTokenizerExpression(this);
}
@Override
public String getLanguage() {
return "xtokenize";
}
public String getMode() {
return mode;
}
/**
* The extraction mode. The available extraction modes are:
* <ul>
* <li>i - injecting the contextual namespace bindings into the extracted token (default)</li>
* <li>w - wrapping the extracted token in its ancestor context</li>
* <li>u - unwrapping the extracted token to its child content</li>
* <li>t - extracting the text content of the specified element</li>
* </ul>
*/
public void setMode(String mode) {
this.mode = mode;
}
public String getGroup() {
return group;
}
/**
* To group N parts together
*/
public void setGroup(String group) {
this.group = group;
}
/**
* {@code Builder} is a specific builder for {@link XMLTokenizerExpression}.
*/
@XmlTransient
public static
|
XMLTokenizerExpression
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng2690MojoLoadingErrorsTest.java
|
{
"start": 1541,
"end": 2248
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testNoClassDefFromMojoLoad() throws IOException, VerificationException {
File testDir = extractResources("/mng-2690/noclassdef-mojo");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.addCliArgument("validate");
VerificationException exception =
assertThrows(VerificationException.class, verifier::execute, "should throw an error during execution.");
List<String> lines = verifier.loadFile(new File(testDir, "log.txt"), false);
int msg = indexOf(lines, "(?i).*required
|
MavenITmng2690MojoLoadingErrorsTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/client/assertj/DefaultRestTestClientResponse.java
|
{
"start": 877,
"end": 1414
}
|
class ____ implements RestTestClientResponse {
private final ExchangeResult exchangeResult;
DefaultRestTestClientResponse(ExchangeResult exchangeResult) {
this.exchangeResult = exchangeResult;
}
@Override
public ExchangeResult getExchangeResult() {
return this.exchangeResult;
}
/**
* Use AssertJ's {@link org.assertj.core.api.Assertions#assertThat assertThat} instead.
*/
@Override
public RestTestClientResponseAssert assertThat() {
return new RestTestClientResponseAssert(this);
}
}
|
DefaultRestTestClientResponse
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/MapBinderTests.java
|
{
"start": 32106,
"end": 32345
}
|
class ____ {
private final CustomMap customMap;
CustomMapWithoutDefaultCtor(CustomMap customMap) {
this.customMap = customMap;
}
CustomMap getCustomMap() {
return this.customMap;
}
static final
|
CustomMapWithoutDefaultCtor
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/RequestMappingMessageConversionIntegrationTests.java
|
{
"start": 22968,
"end": 24972
}
|
class ____ {
@PostMapping("/person")
Person transformPerson(@RequestBody Person person) {
return new Person(person.getName().toUpperCase());
}
@PostMapping("/completable-future")
CompletableFuture<Person> transformCompletableFuture(@RequestBody CompletableFuture<Person> future) {
return future.thenApply(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/mono")
Mono<Person> transformMono(@RequestBody Mono<Person> personFuture) {
return personFuture.map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/single")
Single<Person> transformSingle(@RequestBody Single<Person> personFuture) {
return personFuture.map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/maybe")
Maybe<Person> transformMaybe(@RequestBody Maybe<Person> personFuture) {
return personFuture.map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/publisher")
Publisher<Person> transformPublisher(@RequestBody Publisher<Person> persons) {
return Flux.from(persons).map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/flux")
Flux<Person> transformFlux(@RequestBody Flux<Person> persons) {
return persons.map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/flux-delayed")
Flux<Person> transformDelayed(@RequestBody Flux<Person> persons) {
return transformFlux(persons).delayElements(Duration.ofMillis(10));
}
@PostMapping("/observable")
Observable<Person> transformObservable(@RequestBody Observable<Person> persons) {
return persons.map(person -> new Person(person.getName().toUpperCase()));
}
@PostMapping("/flowable")
Flowable<Person> transformFlowable(@RequestBody Flowable<Person> persons) {
return persons.map(person -> new Person(person.getName().toUpperCase()));
}
}
@RestController
@RequestMapping("/person-create")
@SuppressWarnings("unused")
private static
|
PersonTransformationController
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastLongByTimestampAggregatorFunctionSupplier.java
|
{
"start": 658,
"end": 1710
}
|
class ____ implements AggregatorFunctionSupplier {
public LastLongByTimestampAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return LastLongByTimestampAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return LastLongByTimestampGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public LastLongByTimestampAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return LastLongByTimestampAggregatorFunction.create(driverContext, channels);
}
@Override
public LastLongByTimestampGroupingAggregatorFunction groupingAggregator(
DriverContext driverContext, List<Integer> channels) {
return LastLongByTimestampGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return LastLongByTimestampAggregator.describe();
}
}
|
LastLongByTimestampAggregatorFunctionSupplier
|
java
|
grpc__grpc-java
|
cronet/src/test/java/io/grpc/cronet/CronetClientStreamTest.java
|
{
"start": 3434,
"end": 33625
}
|
class ____ implements Runnable {
private final StreamBuilderFactory factory;
private CronetClientStream stream;
SetStreamFactoryRunnable(StreamBuilderFactory factory) {
this.factory = factory;
}
void setStream(CronetClientStream stream) {
this.stream = stream;
}
@Override
@SuppressWarnings("GuardedBy")
public void run() {
assertNotNull(stream);
stream.transportState().start(factory);
}
}
@Before
public void setUp() {
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
clientStream =
new CronetClientStream(
"https://www.google.com:443",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
method,
StatsTraceContext.NOOP,
CallOptions.DEFAULT,
transportTracer,
false,
false);
callback.setStream(clientStream);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
when(builder.build()).thenReturn(cronetStream);
clientStream.start(clientListener);
}
@Test
public void startStream() {
verify(factory)
.newBidirectionalStreamBuilder(
eq("https://www.google.com:443"),
isA(BidirectionalStream.Callback.class),
eq(executor));
verify(builder).build();
// At least content type and trailer headers are set.
verify(builder, atLeast(2)).addHeader(isA(String.class), isA(String.class));
// addRequestAnnotation should only be called when we explicitly add the CRONET_ANNOTATION_KEY
// to CallOptions.
verify(builder, times(0)).addRequestAnnotation(isA(Object.class));
verify(builder, times(0)).setHttpMethod(any(String.class));
verify(cronetStream).start();
}
@Test
public void write() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Create 5 frames to send.
CronetWritableBufferAllocator allocator = new CronetWritableBufferAllocator();
String[] requests = new String[5];
WritableBuffer[] buffers = new WritableBuffer[5];
for (int i = 0; i < 5; ++i) {
requests[i] = "request" + i;
buffers[i] = allocator.allocate(requests[i].length());
buffers[i].write(requests[i].getBytes(StandardCharsets.UTF_8), 0, requests[i].length());
// The 3rd and 5th writeFrame calls have flush=true.
clientStream.abstractClientStreamSink().writeFrame(buffers[i], false, i == 2 || i == 4, 1);
}
// BidirectionalStream.write is not called because stream is not ready yet.
verify(cronetStream, times(0)).write(isA(ByteBuffer.class), isA(Boolean.class));
// Stream is ready.
callback.onStreamReady(cronetStream);
// 5 writes are called.
verify(cronetStream, times(5)).write(isA(ByteBuffer.class), eq(false));
ByteBuffer fakeBuffer = ByteBuffer.allocateDirect(8);
((Buffer) fakeBuffer).position(8);
verify(cronetStream, times(2)).flush();
// 5 onWriteCompleted callbacks for previous writes.
callback.onWriteCompleted(cronetStream, null, fakeBuffer, false);
callback.onWriteCompleted(cronetStream, null, fakeBuffer, false);
callback.onWriteCompleted(cronetStream, null, fakeBuffer, false);
callback.onWriteCompleted(cronetStream, null, fakeBuffer, false);
callback.onWriteCompleted(cronetStream, null, fakeBuffer, false);
// All pending data has been sent. onWriteCompleted callback will not trigger any additional
// write call.
verify(cronetStream, times(5)).write(isA(ByteBuffer.class), eq(false));
// Send end of stream. write will be immediately called since stream is ready.
clientStream.abstractClientStreamSink().writeFrame(null, true, true, 1);
verify(cronetStream, times(1)).write(isA(ByteBuffer.class), eq(true));
verify(cronetStream, times(3)).flush();
}
private static List<Map.Entry<String, String>> responseHeader(String status) {
Map<String, String> headers = new HashMap<>();
headers.put(":status", status);
headers.put("content-type", "application/grpc");
headers.put("test-key", "test-value");
return new ArrayList<>(headers.entrySet());
}
private static List<Map.Entry<String, String>> trailers(int status) {
Map<String, String> trailers = new HashMap<>();
trailers.put("grpc-status", String.valueOf(status));
trailers.put("content-type", "application/grpc");
trailers.put("test-trailer-key", "test-trailer-value");
return new ArrayList<>(trailers.entrySet());
}
private static ByteBuffer createMessageFrame(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.allocate(1 + 4 + bytes.length);
buffer.put((byte) 0 /* UNCOMPRESSED */);
buffer.putInt(bytes.length);
buffer.put(bytes);
return buffer;
}
@Test
public void read() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Read is not called until we receive the response header.
verify(cronetStream, times(0)).read(isA(ByteBuffer.class));
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
verify(cronetStream, times(1)).read(isA(ByteBuffer.class));
ArgumentCaptor<Metadata> metadataCaptor = ArgumentCaptor.forClass(Metadata.class);
verify(clientListener).headersRead(metadataCaptor.capture());
// Verify recevied headers.
Metadata metadata = metadataCaptor.getValue();
assertEquals(
"application/grpc",
metadata.get(Metadata.Key.of("content-type", Metadata.ASCII_STRING_MARSHALLER)));
assertEquals(
"test-value", metadata.get(Metadata.Key.of("test-key", Metadata.ASCII_STRING_MARSHALLER)));
callback.onReadCompleted(
cronetStream,
info,
createMessageFrame("response1".getBytes(StandardCharsets.UTF_8)),
false);
// Haven't request any message, so no callback is called here.
verify(clientListener, times(0)).messagesAvailable(isA(MessageProducer.class));
verify(cronetStream, times(1)).read(isA(ByteBuffer.class));
// Request one message
clientStream.request(1);
verify(clientListener, times(1)).messagesAvailable(isA(MessageProducer.class));
verify(cronetStream, times(2)).read(isA(ByteBuffer.class));
// BidirectionalStream.read will not be called again after receiving endOfStream(empty buffer).
clientStream.request(1);
callback.onReadCompleted(cronetStream, info, ByteBuffer.allocate(0), true);
verify(clientListener, times(1)).messagesAvailable(isA(MessageProducer.class));
verify(cronetStream, times(2)).read(isA(ByteBuffer.class));
}
@Test
public void streamSucceeded() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
callback.onStreamReady(cronetStream);
verify(cronetStream, times(0)).write(isA(ByteBuffer.class), isA(Boolean.class));
// Send the first data frame.
CronetWritableBufferAllocator allocator = new CronetWritableBufferAllocator();
String request = "request";
WritableBuffer writableBuffer = allocator.allocate(request.length());
writableBuffer.write(request.getBytes(StandardCharsets.UTF_8), 0, request.length());
clientStream.abstractClientStreamSink().writeFrame(writableBuffer, false, true, 1);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(cronetStream, times(1)).write(bufferCaptor.capture(), isA(Boolean.class));
ByteBuffer buffer = bufferCaptor.getValue();
((Buffer) buffer).position(request.length());
verify(cronetStream, times(1)).flush();
// Receive response header
clientStream.request(2);
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
verify(cronetStream, times(1)).read(isA(ByteBuffer.class));
// Receive one message
callback.onReadCompleted(
cronetStream,
info,
createMessageFrame("response".getBytes(StandardCharsets.UTF_8)),
false);
verify(clientListener, times(1)).messagesAvailable(isA(MessageProducer.class));
verify(cronetStream, times(2)).read(isA(ByteBuffer.class));
// Send endOfStream
callback.onWriteCompleted(cronetStream, null, buffer, false);
clientStream.abstractClientStreamSink().writeFrame(null, true, true, 1);
verify(cronetStream, times(2)).write(isA(ByteBuffer.class), isA(Boolean.class));
verify(cronetStream, times(2)).flush();
// Receive trailer
((CronetClientStream.BidirectionalStreamCallback) callback).processTrailers(trailers(0));
callback.onSucceeded(cronetStream, info);
// Verify trailer
ArgumentCaptor<Metadata> trailerCaptor = ArgumentCaptor.forClass(Metadata.class);
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), trailerCaptor.capture());
// Verify recevied headers.
Metadata trailers = trailerCaptor.getValue();
Status status = statusCaptor.getValue();
assertEquals(
"test-trailer-value",
trailers.get(Metadata.Key.of("test-trailer-key", Metadata.ASCII_STRING_MARSHALLER)));
assertEquals(
"application/grpc",
trailers.get(Metadata.Key.of("content-type", Metadata.ASCII_STRING_MARSHALLER)));
assertTrue(status.isOk());
}
@Test
public void streamSucceededWithGrpcError() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
callback.onStreamReady(cronetStream);
verify(cronetStream, times(0)).write(isA(ByteBuffer.class), isA(Boolean.class));
clientStream.abstractClientStreamSink().writeFrame(null, true, true, 1);
verify(cronetStream, times(1)).write(isA(ByteBuffer.class), isA(Boolean.class));
verify(cronetStream, times(1)).flush();
// Receive response header
clientStream.request(2);
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
verify(cronetStream, times(1)).read(isA(ByteBuffer.class));
// Receive trailer
callback.onReadCompleted(cronetStream, null, ByteBuffer.allocate(0), true);
((CronetClientStream.BidirectionalStreamCallback) callback)
.processTrailers(trailers(Status.PERMISSION_DENIED.getCode().value()));
callback.onSucceeded(cronetStream, info);
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
// Verify error status.
Status status = statusCaptor.getValue();
assertFalse(status.isOk());
assertEquals(Status.PERMISSION_DENIED.getCode(), status.getCode());
}
@Test
public void streamFailed() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Nothing happens and stream fails
CronetException exception = mock(CronetException.class);
callback.onFailed(cronetStream, null, exception);
verify(transport).finishStream(eq(clientStream), isA(Status.class));
// finishStream calls transportReportStatus.
clientStream.transportState().transportReportStatus(Status.UNAVAILABLE, false, new Metadata());
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
assertEquals(Status.UNAVAILABLE.getCode(), status.getCode());
}
@Test
public void streamFailedAfterResponseHeaderReceived() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Receive response header
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
CronetException exception = mock(CronetException.class);
callback.onFailed(cronetStream, info, exception);
verify(transport).finishStream(eq(clientStream), isA(Status.class));
// finishStream calls transportReportStatus.
clientStream.transportState().transportReportStatus(Status.UNAVAILABLE, false, new Metadata());
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
assertEquals(Status.UNAVAILABLE.getCode(), status.getCode());
}
@Test
public void streamFailedAfterTrailerReceived() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Receive response header
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
// Report trailer but not endOfStream.
((CronetClientStream.BidirectionalStreamCallback) callback).processTrailers(trailers(0));
CronetException exception = mock(CronetException.class);
callback.onFailed(cronetStream, info, exception);
verify(transport).finishStream(eq(clientStream), isA(Status.class));
// finishStream calls transportReportStatus.
clientStream.transportState().transportReportStatus(Status.UNAVAILABLE, false, new Metadata());
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
// Stream has already finished so OK status should be reported.
assertEquals(Status.UNAVAILABLE.getCode(), status.getCode());
}
@Test
public void streamFailedAfterTrailerAndEndOfStreamReceived() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Receive response header
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
// Report trailer and endOfStream
callback.onReadCompleted(cronetStream, null, ByteBuffer.allocate(0), true);
((CronetClientStream.BidirectionalStreamCallback) callback).processTrailers(trailers(0));
CronetException exception = mock(CronetException.class);
callback.onFailed(cronetStream, info, exception);
verify(transport).finishStream(eq(clientStream), isA(Status.class));
// finishStream calls transportReportStatus.
clientStream.transportState().transportReportStatus(Status.UNAVAILABLE, false, new Metadata());
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener)
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
// Stream has already finished so OK status should be reported.
assertEquals(Status.OK.getCode(), status.getCode());
}
@Test
public void cancelStream() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
// Cancel the stream
clientStream.cancel(Status.DEADLINE_EXCEEDED);
verify(transport, times(0)).finishStream(eq(clientStream), isA(Status.class));
callback.onCanceled(cronetStream, null);
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(transport, times(1)).finishStream(eq(clientStream), statusCaptor.capture());
Status status = statusCaptor.getValue();
assertEquals(Status.DEADLINE_EXCEEDED.getCode(), status.getCode());
}
@Test
public void reportTrailersWhenTrailersReceivedBeforeReadClosed() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
callback.onStreamReady(cronetStream);
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
// Receive trailer first
((CronetClientStream.BidirectionalStreamCallback) callback)
.processTrailers(trailers(Status.UNAUTHENTICATED.getCode().value()));
verify(clientListener, times(0))
.closed(isA(Status.class), isA(RpcProgress.class), isA(Metadata.class));
// Receive cronet's endOfStream
callback.onReadCompleted(cronetStream, null, ByteBuffer.allocate(0), true);
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener, times(1))
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
assertEquals(Status.UNAUTHENTICATED.getCode(), status.getCode());
}
@Test
public void reportTrailersWhenTrailersReceivedAfterReadClosed() {
ArgumentCaptor<BidirectionalStream.Callback> callbackCaptor =
ArgumentCaptor.forClass(BidirectionalStream.Callback.class);
verify(factory)
.newBidirectionalStreamBuilder(
isA(String.class), callbackCaptor.capture(), isA(Executor.class));
BidirectionalStream.Callback callback = callbackCaptor.getValue();
callback.onStreamReady(cronetStream);
UrlResponseInfo info =
new UrlResponseInfoImpl(
new ArrayList<>(), 200, "", responseHeader("200"), false, "", "", 0);
callback.onResponseHeadersReceived(cronetStream, info);
// Receive cronet's endOfStream
callback.onReadCompleted(cronetStream, null, ByteBuffer.allocate(0), true);
verify(clientListener, times(0))
.closed(isA(Status.class), isA(RpcProgress.class), isA(Metadata.class));
// Receive trailer
((CronetClientStream.BidirectionalStreamCallback) callback)
.processTrailers(trailers(Status.UNAUTHENTICATED.getCode().value()));
ArgumentCaptor<Status> statusCaptor = ArgumentCaptor.forClass(Status.class);
verify(clientListener, times(1))
.closed(statusCaptor.capture(), isA(RpcProgress.class), isA(Metadata.class));
Status status = statusCaptor.getValue();
assertEquals(Status.UNAUTHENTICATED.getCode(), status.getCode());
}
@SuppressWarnings("deprecation")
@Test
public void addCronetRequestAnnotation_deprecated() {
Object annotation = new Object();
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com:443",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
method,
StatsTraceContext.NOOP,
CallOptions.DEFAULT.withOption(CronetClientStream.CRONET_ANNOTATION_KEY, annotation),
transportTracer,
false,
false);
callback.setStream(stream);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
stream.start(clientListener);
// addRequestAnnotation should be called since we add the option CRONET_ANNOTATION_KEY above.
verify(builder).addRequestAnnotation(annotation);
}
@Test
public void withAnnotation() {
Object annotation1 = new Object();
Object annotation2 = new Object();
CallOptions callOptions = CronetClientStream.withAnnotation(CallOptions.DEFAULT, annotation1);
callOptions = CronetClientStream.withAnnotation(callOptions, annotation2);
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com:443",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
method,
StatsTraceContext.NOOP,
callOptions,
transportTracer,
false,
false);
callback.setStream(stream);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
stream.start(clientListener);
verify(builder).addRequestAnnotation(annotation1);
verify(builder).addRequestAnnotation(annotation2);
}
@Test
public void getUnaryRequest() {
StreamBuilderFactory getFactory = mock(StreamBuilderFactory.class);
MethodDescriptor<?, ?> getMethod =
MethodDescriptor.<Void, Void>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("/service/method")
.setIdempotent(true)
.setSafe(true)
.setRequestMarshaller(marshaller)
.setResponseMarshaller(marshaller)
.build();
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(getFactory);
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com/service/method",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
getMethod,
StatsTraceContext.NOOP,
CallOptions.DEFAULT,
transportTracer,
true,
false);
callback.setStream(stream);
BidirectionalStream.Builder getBuilder =
mock(BidirectionalStream.Builder.class);
when(getFactory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(getBuilder);
when(getBuilder.build()).thenReturn(cronetStream);
stream.start(clientListener);
// We will not create BidirectionalStream until we have the full request.
verify(getFactory, times(0))
.newBidirectionalStreamBuilder(
isA(String.class), isA(BidirectionalStream.Callback.class), isA(Executor.class));
byte[] msg = "request".getBytes(StandardCharsets.UTF_8);
stream.writeMessage(new ByteArrayInputStream(msg));
// We still haven't built the stream or sent anything.
verify(cronetStream, times(0)).write(isA(ByteBuffer.class), isA(Boolean.class));
verify(getFactory, times(0))
.newBidirectionalStreamBuilder(
isA(String.class), isA(BidirectionalStream.Callback.class), isA(Executor.class));
// halfClose will trigger sending.
stream.halfClose();
// Stream should be built with request payload in the header.
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
verify(getFactory)
.newBidirectionalStreamBuilder(
urlCaptor.capture(), isA(BidirectionalStream.Callback.class), isA(Executor.class));
verify(getBuilder).setHttpMethod("GET");
assertEquals(
"https://www.google.com/service/method?" + BaseEncoding.base64().encode(msg),
urlCaptor.getValue());
}
@Test
public void idempotentMethod_usesHttpPut() {
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
MethodDescriptor<?, ?> idempotentMethod = method.toBuilder().setIdempotent(true).build();
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com:443",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
idempotentMethod,
StatsTraceContext.NOOP,
CallOptions.DEFAULT,
transportTracer,
true,
true);
callback.setStream(stream);
BidirectionalStream.Builder builder =
mock(BidirectionalStream.Builder.class);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
when(builder.build()).thenReturn(cronetStream);
stream.start(clientListener);
verify(builder).setHttpMethod("PUT");
}
@Test
public void alwaysUsePutOption_usesHttpPut() {
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com:443",
"cronet",
executor,
metadata,
transport,
callback,
lock,
100,
true /* alwaysUsePut */,
method,
StatsTraceContext.NOOP,
CallOptions.DEFAULT,
transportTracer,
true,
true);
callback.setStream(stream);
BidirectionalStream.Builder builder =
mock(BidirectionalStream.Builder.class);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
when(builder.build()).thenReturn(cronetStream);
stream.start(clientListener);
verify(builder).setHttpMethod("PUT");
}
@Test
public void reservedHeadersStripped() {
String userAgent = "cronet";
Metadata headers = new Metadata();
Metadata.Key<String> userKey = Metadata.Key.of("user-key", Metadata.ASCII_STRING_MARSHALLER);
headers.put(GrpcUtil.CONTENT_TYPE_KEY, "to-be-removed");
headers.put(GrpcUtil.USER_AGENT_KEY, "to-be-removed");
headers.put(GrpcUtil.TE_HEADER, "to-be-removed");
headers.put(userKey, "user-value");
SetStreamFactoryRunnable callback = new SetStreamFactoryRunnable(factory);
CronetClientStream stream =
new CronetClientStream(
"https://www.google.com:443",
userAgent,
executor,
headers,
transport,
callback,
lock,
100,
false /* alwaysUsePut */,
method,
StatsTraceContext.NOOP,
CallOptions.DEFAULT,
transportTracer,
false,
false);
callback.setStream(stream);
BidirectionalStream.Builder builder =
mock(BidirectionalStream.Builder.class);
when(factory.newBidirectionalStreamBuilder(
any(String.class), any(BidirectionalStream.Callback.class), any(Executor.class)))
.thenReturn(builder);
when(builder.build()).thenReturn(cronetStream);
stream.start(clientListener);
verify(builder, times(4)).addHeader(any(String.class), any(String.class));
verify(builder).addHeader(GrpcUtil.USER_AGENT_KEY.name(), userAgent);
verify(builder).addHeader(GrpcUtil.CONTENT_TYPE_KEY.name(), GrpcUtil.CONTENT_TYPE_GRPC);
verify(builder).addHeader("te", GrpcUtil.TE_TRAILERS);
verify(builder).addHeader(userKey.name(), "user-value");
}
}
|
SetStreamFactoryRunnable
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/completable/CompletableResumeNextTest.java
|
{
"start": 977,
"end": 2890
}
|
class ____ extends RxJavaTest {
@Test
public void resumeNextError() {
Completable.error(new TestException())
.onErrorResumeNext(Functions.justFunction(Completable.error(new TestException("second"))))
.to(TestHelper.<Object>testConsumer())
.assertFailureAndMessage(TestException.class, "second");
}
@Test
public void disposeInMain() {
TestHelper.checkDisposedCompletable(new Function<Completable, CompletableSource>() {
@Override
public CompletableSource apply(Completable c) throws Exception {
return c.onErrorResumeNext(Functions.justFunction(Completable.complete()));
}
});
}
@Test
public void disposeInResume() {
TestHelper.checkDisposedCompletable(new Function<Completable, CompletableSource>() {
@Override
public CompletableSource apply(Completable c) throws Exception {
return Completable.error(new TestException()).onErrorResumeNext(Functions.justFunction(c));
}
});
}
@Test
public void disposed() {
TestHelper.checkDisposed(
Completable.error(new TestException())
.onErrorResumeNext(Functions.justFunction(Completable.never()))
);
}
@Test
public void resumeWithNoError() throws Throwable {
Action action = mock(Action.class);
Completable.complete()
.onErrorResumeWith(Completable.fromAction(action))
.test()
.assertResult();
verify(action, never()).run();
}
@Test
public void resumeWithError() throws Throwable {
Action action = mock(Action.class);
Completable.error(new TestException())
.onErrorResumeWith(Completable.fromAction(action))
.test()
.assertResult();
verify(action).run();
}
}
|
CompletableResumeNextTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/path/JSONPath_field_access_multi.java
|
{
"start": 166,
"end": 1015
}
|
class ____ extends TestCase {
public void test_list_map() throws Exception {
Entity entity = new Entity(123, "wenshao");
JSONPath path = new JSONPath("$['id','name']");
List<Object> result = (List<Object>) path.eval(entity);
Assert.assertSame(entity.getId(), result.get(0));
Assert.assertSame(entity.getName(), result.get(1));
}
public void test_list_map2() throws Exception {
Entity entity = new Entity(123, "wenshao");
JSONPath path = new JSONPath("$.entity['id','name']");
Root root = new Root();
root.setEntity(entity);
List<Object> result = (List<Object>) path.eval(root);
Assert.assertSame(entity.getId(), result.get(0));
Assert.assertSame(entity.getName(), result.get(1));
}
public static
|
JSONPath_field_access_multi
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/tests/RelationalSmokeTest.java
|
{
"start": 20619,
"end": 21570
}
|
class ____ extends Comment {
private final String articlePrefix;
private AugmentedComment(final int key,
final long timestamp,
final String text,
final int articleId,
final String articlePrefix) {
super(key, timestamp, text, articleId);
this.articlePrefix = articlePrefix;
}
public String getArticlePrefix() {
return articlePrefix;
}
@Override
public String toString() {
return "AugmentedComment{" +
"key=" + super.key +
", timestamp=" + getTimestamp() +
", text='" + getText() + '\'' +
", articleId=" + getArticleId() +
", articlePrefix='" + articlePrefix + '\'' +
'}';
}
public static
|
AugmentedComment
|
java
|
quarkusio__quarkus
|
integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/NativeLoggingMinLevelBelowChildIT.java
|
{
"start": 130,
"end": 263
}
|
class ____ extends LoggingMinLevelBelowChildTest {
// Execute the same tests but in native mode.
}
|
NativeLoggingMinLevelBelowChildIT
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/csv/BindySimpleCsvAutospanLineTest.java
|
{
"start": 1194,
"end": 3057
}
|
class ____ extends CamelTestSupport {
@Test
public void testUnmarshalNoNeedToSpanLine() throws Exception {
final MockEndpoint mock = getMockEndpoint("mock:unmarshal");
mock.expectedMessageCount(1);
template.sendBody("direct:unmarshal", "1,hei,kommentar");
MockEndpoint.assertIsSatisfied(context);
//final List<Map<?, SpanLastRecord>> rows = CastUtils.cast(mock.getReceivedExchanges().get(0).getIn().getBody(List.class));
//final SpanLastRecord order = rows.get(0).get(SpanLastRecord.class.getName());
final SpanLastRecord order = mock.getReceivedExchanges().get(0).getIn().getBody(SpanLastRecord.class);
assertEquals(1, order.getRecordId());
assertEquals("hei", order.getName());
assertEquals("kommentar", order.getComment());
}
@Test
public void testUnmarshalSpanningLine() throws Exception {
final MockEndpoint mock = getMockEndpoint("mock:unmarshal");
mock.expectedMessageCount(1);
template.sendBody("direct:unmarshal", "1,hei,kommentar,test,noe,hei");
MockEndpoint.assertIsSatisfied(context);
final SpanLastRecord order = mock.getReceivedExchanges().get(0).getIn().getBody(SpanLastRecord.class);
assertEquals(1, order.getRecordId());
assertEquals("hei", order.getName());
assertEquals("kommentar,test,noe,hei", order.getComment());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
final BindyCsvDataFormat bindy = new BindyCsvDataFormat(SpanLastRecord.class);
from("direct:unmarshal")
.unmarshal(bindy)
.to("mock:unmarshal");
}
};
}
}
|
BindySimpleCsvAutospanLineTest
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableScanTest.java
|
{
"start": 1367,
"end": 22109
}
|
class ____ extends RxJavaTest {
@Test
public void scanIntegersWithInitialValue() {
Subscriber<String> subscriber = TestHelper.mockSubscriber();
Flowable<Integer> flowable = Flowable.just(1, 2, 3);
Flowable<String> m = flowable.scan("", new BiFunction<String, Integer, String>() {
@Override
public String apply(String s, Integer n) {
return s + n.toString();
}
});
m.subscribe(subscriber);
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, times(1)).onNext("");
verify(subscriber, times(1)).onNext("1");
verify(subscriber, times(1)).onNext("12");
verify(subscriber, times(1)).onNext("123");
verify(subscriber, times(4)).onNext(anyString());
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void scanIntegersWithoutInitialValue() {
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
Flowable<Integer> flowable = Flowable.just(1, 2, 3);
Flowable<Integer> m = flowable.scan(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
});
m.subscribe(subscriber);
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, never()).onNext(0);
verify(subscriber, times(1)).onNext(1);
verify(subscriber, times(1)).onNext(3);
verify(subscriber, times(1)).onNext(6);
verify(subscriber, times(3)).onNext(anyInt());
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void scanIntegersWithoutInitialValueAndOnlyOneValue() {
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
Flowable<Integer> flowable = Flowable.just(1);
Flowable<Integer> m = flowable.scan(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
});
m.subscribe(subscriber);
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, never()).onNext(0);
verify(subscriber, times(1)).onNext(1);
verify(subscriber, times(1)).onNext(anyInt());
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void shouldNotEmitUntilAfterSubscription() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.range(1, 100).scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
}).filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
// this will cause request(1) when 0 is emitted
return t1 > 0;
}
}).subscribe(ts);
assertEquals(100, ts.values().size());
}
@Test
public void backpressureWithInitialValue() {
final AtomicInteger count = new AtomicInteger();
Flowable.range(1, 100)
.scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
})
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(10);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
Assert.fail(e.getMessage());
e.printStackTrace();
}
@Override
public void onNext(Integer t) {
count.incrementAndGet();
}
});
// we only expect to receive 10 since we request(10)
assertEquals(10, count.get());
}
@Test
public void backpressureWithoutInitialValue() {
final AtomicInteger count = new AtomicInteger();
Flowable.range(1, 100)
.scan(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
})
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(10);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
Assert.fail(e.getMessage());
e.printStackTrace();
}
@Override
public void onNext(Integer t) {
count.incrementAndGet();
}
});
// we only expect to receive 10 since we request(10)
assertEquals(10, count.get());
}
@Test
public void noBackpressureWithInitialValue() {
final AtomicInteger count = new AtomicInteger();
Flowable.range(1, 100)
.scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
})
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
Assert.fail(e.getMessage());
e.printStackTrace();
}
@Override
public void onNext(Integer t) {
count.incrementAndGet();
}
});
// we only expect to receive 101 as we'll receive all 100 + the initial value
assertEquals(101, count.get());
}
/**
* This uses the public API collect which uses scan under the covers.
*/
@Test
public void seedFactory() {
Single<List<Integer>> o = Flowable.range(1, 10)
.collect(new Supplier<List<Integer>>() {
@Override
public List<Integer> get() {
return new ArrayList<>();
}
}, new BiConsumer<List<Integer>, Integer>() {
@Override
public void accept(List<Integer> list, Integer t2) {
list.add(t2);
}
});
assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), o.blockingGet());
assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), o.blockingGet());
}
/**
* This uses the public API collect which uses scan under the covers.
*/
@Test
public void seedFactoryFlowable() {
Flowable<List<Integer>> f = Flowable.range(1, 10)
.collect(new Supplier<List<Integer>>() {
@Override
public List<Integer> get() {
return new ArrayList<>();
}
}, new BiConsumer<List<Integer>, Integer>() {
@Override
public void accept(List<Integer> list, Integer t2) {
list.add(t2);
}
}).toFlowable().takeLast(1);
assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), f.blockingSingle());
assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), f.blockingSingle());
}
@Test
public void scanWithRequestOne() {
Flowable<Integer> f = Flowable.just(1, 2).scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
}).take(1);
TestSubscriberEx<Integer> subscriber = new TestSubscriberEx<>();
f.subscribe(subscriber);
subscriber.assertValue(0);
subscriber.assertTerminated();
subscriber.assertNoErrors();
}
@Test
public void scanShouldNotRequestZero() {
final AtomicReference<Subscription> producer = new AtomicReference<>();
Flowable<Integer> f = Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(final Subscriber<? super Integer> subscriber) {
Subscription p = spy(new Subscription() {
private AtomicBoolean requested = new AtomicBoolean(false);
@Override
public void request(long n) {
if (requested.compareAndSet(false, true)) {
subscriber.onNext(1);
subscriber.onComplete();
}
}
@Override
public void cancel() {
}
});
producer.set(p);
subscriber.onSubscribe(p);
}
}).scan(100, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
});
f.subscribe(new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer integer) {
request(1);
}
});
verify(producer.get(), never()).request(0);
verify(producer.get(), times(1)).request(Flowable.bufferSize() - 1);
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.create().scan(new BiFunction<Object, Object, Object>() {
@Override
public Object apply(Object a, Object b) throws Exception {
return a;
}
}));
TestHelper.checkDisposed(PublishProcessor.<Integer>create().scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
}));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f) throws Exception {
return f.scan(new BiFunction<Object, Object, Object>() {
@Override
public Object apply(Object a, Object b) throws Exception {
return a;
}
});
}
});
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f) throws Exception {
return f.scan(0, new BiFunction<Object, Object, Object>() {
@Override
public Object apply(Object a, Object b) throws Exception {
return a;
}
});
}
});
}
@Test
public void error() {
Flowable.error(new TestException())
.scan(new BiFunction<Object, Object, Object>() {
@Override
public Object apply(Object a, Object b) throws Exception {
return a;
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void neverSource() {
Flowable.<Integer>never()
.scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.test()
.assertValue(0)
.assertNoErrors()
.assertNotComplete();
}
@Test
public void unsubscribeScan() {
FlowableEventStream.getEventStream("HTTP-ClusterB", 20)
.scan(new HashMap<>(), new BiFunction<HashMap<String, String>, Event, HashMap<String, String>>() {
@Override
public HashMap<String, String> apply(HashMap<String, String> accum, Event perInstanceEvent) {
accum.put("instance", perInstanceEvent.instanceId);
return accum;
}
})
.take(10)
.blockingForEach(new Consumer<HashMap<String, String>>() {
@Override
public void accept(HashMap<String, String> v) {
System.out.println(v);
}
});
}
@Test
public void scanWithSeedDoesNotEmitErrorTwiceIfScanFunctionThrows() {
final List<Throwable> list = new CopyOnWriteArrayList<>();
Consumer<Throwable> errorConsumer = new Consumer<Throwable>() {
@Override
public void accept(Throwable t) throws Exception {
list.add(t);
}};
try {
RxJavaPlugins.setErrorHandler(errorConsumer);
final RuntimeException e = new RuntimeException();
final RuntimeException e2 = new RuntimeException();
Burst.items(1).error(e2)
.scan(0, throwingBiFunction(e))
.test()
.assertValues(0)
.assertError(e);
assertEquals("" + list, 1, list.size());
assertTrue("" + list, list.get(0) instanceof UndeliverableException);
assertEquals(e2, list.get(0).getCause());
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void scanWithSeedDoesNotEmitTerminalEventTwiceIfScanFunctionThrows() {
final RuntimeException e = new RuntimeException();
Burst.item(1).create()
.scan(0, throwingBiFunction(e))
.test()
.assertValue(0)
.assertError(e);
}
@Test
public void scanWithSeedDoesNotProcessOnNextAfterTerminalEventIfScanFunctionThrows() {
final RuntimeException e = new RuntimeException();
final AtomicInteger count = new AtomicInteger();
Burst.items(1, 2).create().scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer n1, Integer n2) throws Exception {
count.incrementAndGet();
throw e;
}})
.test()
.assertValues(0)
.assertError(e);
assertEquals(1, count.get());
}
@Test
public void scanWithSeedCompletesNormally() {
Flowable.just(1, 2, 3).scan(0, SUM)
.test()
.assertValues(0, 1, 3, 6)
.assertComplete();
}
@Test
public void scanWithSeedWhenScanSeedProviderThrows() {
final RuntimeException e = new RuntimeException();
Flowable.just(1, 2, 3).scanWith(throwingSupplier(e),
SUM)
.test()
.assertError(e)
.assertNoValues();
}
@Test
public void scanNoSeed() {
Flowable.just(1, 2, 3)
.scan(SUM)
.test()
.assertValues(1, 3, 6)
.assertComplete();
}
@Test
public void scanNoSeedDoesNotEmitErrorTwiceIfScanFunctionThrows() {
final List<Throwable> list = new CopyOnWriteArrayList<>();
Consumer<Throwable> errorConsumer = new Consumer<Throwable>() {
@Override
public void accept(Throwable t) throws Exception {
list.add(t);
}};
try {
RxJavaPlugins.setErrorHandler(errorConsumer);
final RuntimeException e = new RuntimeException();
final RuntimeException e2 = new RuntimeException();
Burst.items(1, 2).error(e2)
.scan(throwingBiFunction(e))
.test()
.assertValue(1)
.assertError(e);
assertEquals("" + list, 1, list.size());
assertTrue("" + list, list.get(0) instanceof UndeliverableException);
assertEquals(e2, list.get(0).getCause());
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void scanNoSeedDoesNotEmitTerminalEventTwiceIfScanFunctionThrows() {
final RuntimeException e = new RuntimeException();
Burst.items(1, 2).create()
.scan(throwingBiFunction(e))
.test()
.assertValue(1)
.assertError(e);
}
@Test
public void scanNoSeedDoesNotProcessOnNextAfterTerminalEventIfScanFunctionThrows() {
final RuntimeException e = new RuntimeException();
final AtomicInteger count = new AtomicInteger();
Burst.items(1, 2, 3).create().scan(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer n1, Integer n2) throws Exception {
count.incrementAndGet();
throw e;
}})
.test()
.assertValue(1)
.assertError(e);
assertEquals(1, count.get());
}
private static BiFunction<Integer, Integer, Integer> throwingBiFunction(final RuntimeException e) {
return new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer n1, Integer n2) throws Exception {
throw e;
}
};
}
private static final BiFunction<Integer, Integer, Integer> SUM = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) throws Exception {
return t1 + t2;
}
};
private static Supplier<Integer> throwingSupplier(final RuntimeException e) {
return new Supplier<Integer>() {
@Override
public Integer get() throws Exception {
throw e;
}
};
}
@Test
public void scanEmptyBackpressured() {
Flowable.<Integer>empty()
.scan(0, SUM)
.test(1)
.assertResult(0);
}
@Test
public void scanErrorBackpressured() {
Flowable.<Integer>error(new TestException())
.scan(0, SUM)
.test(0)
.assertFailure(TestException.class);
}
@Test
public void scanTake() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
onComplete();
cancel();
}
};
Flowable.range(1, 10)
.scan(0, SUM)
.subscribe(ts)
;
ts.assertResult(0);
}
@Test
public void scanLong() {
int n = 2 * Flowable.bufferSize();
for (int b = 1; b <= n; b *= 2) {
List<Integer> list = Flowable.range(1, n)
.scan(0, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return b;
}
})
.rebatchRequests(b)
.toList()
.blockingGet();
for (int i = 0; i <= n; i++) {
assertEquals(i, list.get(i).intValue());
}
}
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.<Integer>never().scanWith(() -> 1, (a, b) -> a + b));
}
@Test
public void drainMoreWork() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.scanWith(() -> 0, (a, b) -> a + b)
.doOnNext(v -> {
if (v == 1) {
pp.onNext(2);
pp.onComplete();
}
})
.test();
pp.onNext(1);
ts.assertResult(0, 1, 3);
}
}
|
FlowableScanTest
|
java
|
apache__camel
|
components/camel-schematron/src/generated/java/org/apache/camel/component/schematron/SchematronEndpointUriFactory.java
|
{
"start": 520,
"end": 2210
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":path";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(5);
props.add("abort");
props.add("lazyStartProducer");
props.add("path");
props.add("rules");
props.add("uriResolver");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "schematron".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "path", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
SchematronEndpointUriFactory
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/DefaultLayoutFactory.java
|
{
"start": 806,
"end": 952
}
|
class ____ implements LayoutFactory {
@Override
public Layout getLayout(File source) {
return Layouts.forFile(source);
}
}
|
DefaultLayoutFactory
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMakerTest.java
|
{
"start": 26107,
"end": 26316
}
|
class ____ {
public void callWrapped(ExceptionThrowingClass exceptionThrowingClass) throws IOException {
exceptionThrowingClass.throwException();
}
}
public static
|
WrapperClass
|
java
|
spring-projects__spring-security
|
test/src/test/java/org/springframework/security/test/context/support/WithSecurityContextTestExcecutionListenerTests.java
|
{
"start": 2316,
"end": 7261
}
|
class ____ {
private ConfigurableApplicationContext context;
@Mock
private TestContext testContext;
private WithSecurityContextTestExecutionListener listener;
@BeforeEach
public void setup() {
this.listener = new WithSecurityContextTestExecutionListener();
this.context = new AnnotationConfigApplicationContext(Config.class);
}
@AfterEach
public void cleanup() {
TestSecurityContextHolder.clearContext();
if (this.context != null) {
this.context.close();
}
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void beforeTestMethodNullSecurityContextNoError() throws Exception {
Class testClass = FakeTest.class;
given(this.testContext.getTestClass()).willReturn(testClass);
given(this.testContext.getTestMethod()).willReturn(ReflectionUtils.findMethod(testClass, "testNoAnnotation"));
this.listener.beforeTestMethod(this.testContext);
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void beforeTestMethodNoApplicationContext() throws Exception {
Class testClass = FakeTest.class;
given(this.testContext.getApplicationContext()).willThrow(new IllegalStateException());
given(this.testContext.getTestMethod()).willReturn(ReflectionUtils.findMethod(testClass, "testWithMockUser"));
this.listener.beforeTestMethod(this.testContext);
assertThat(TestSecurityContextHolder.getContext().getAuthentication().getName()).isEqualTo("user");
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void beforeTestMethodInnerClass() throws Exception {
Class testClass = OuterClass.InnerClass.class;
Method testNoAnnotation = ReflectionUtils.findMethod(testClass, "testNoAnnotation");
given(this.testContext.getTestClass()).willReturn(testClass);
given(this.testContext.getTestMethod()).willReturn(testNoAnnotation);
given(this.testContext.getApplicationContext()).willThrow(new IllegalStateException(""));
this.listener.beforeTestMethod(this.testContext);
assertThat(TestSecurityContextHolder.getContext().getAuthentication().getName()).isEqualTo("user");
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void beforeTestMethodInnerInnerClass() throws Exception {
Class testClass = OuterClass.InnerClass.InnerInnerClass.class;
Method testNoAnnotation = ReflectionUtils.findMethod(testClass, "testNoAnnotation");
given(this.testContext.getTestClass()).willReturn(testClass);
given(this.testContext.getTestMethod()).willReturn(testNoAnnotation);
given(this.testContext.getApplicationContext()).willThrow(new IllegalStateException(""));
this.listener.beforeTestMethod(this.testContext);
assertThat(TestSecurityContextHolder.getContext().getAuthentication().getName()).isEqualTo("user");
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void beforeTestMethodInnerClassWhenOverride() throws Exception {
Class testClass = OverrideOuterClass.InnerClass.class;
Method testNoAnnotation = ReflectionUtils.findMethod(testClass, "testNoAnnotation");
given(this.testContext.getTestClass()).willReturn(testClass);
given(this.testContext.getTestMethod()).willReturn(testNoAnnotation);
this.listener.beforeTestMethod(this.testContext);
assertThat(TestSecurityContextHolder.getContext().getAuthentication()).isNull();
}
// gh-3962
@Test
public void withSecurityContextAfterSqlScripts() {
SqlScriptsTestExecutionListener sql = new SqlScriptsTestExecutionListener();
WithSecurityContextTestExecutionListener security = new WithSecurityContextTestExecutionListener();
List<TestExecutionListener> listeners = Arrays.asList(security, sql);
AnnotationAwareOrderComparator.sort(listeners);
assertThat(listeners).containsExactly(sql, security);
}
// SEC-2709
@Test
public void orderOverridden() {
AbstractTestExecutionListener otherListener = new AbstractTestExecutionListener() {
};
List<TestExecutionListener> listeners = new ArrayList<>();
listeners.add(otherListener);
listeners.add(this.listener);
AnnotationAwareOrderComparator.sort(listeners);
assertThat(listeners).containsSequence(this.listener, otherListener);
}
@Test
// gh-3837
public void handlesGenericAnnotation() throws Exception {
Method method = ReflectionUtils.findMethod(WithSecurityContextTestExcecutionListenerTests.class,
"handlesGenericAnnotationTestMethod");
TestContext testContext = mock(TestContext.class);
given(testContext.getTestMethod()).willReturn(method);
given(testContext.getApplicationContext()).willThrow(new IllegalStateException(""));
this.listener.beforeTestMethod(testContext);
assertThat(SecurityContextHolder.getContext().getAuthentication().getPrincipal())
.isInstanceOf(WithSuperClassWithSecurityContext.class);
}
@WithSuperClassWithSecurityContext
public void handlesGenericAnnotationTestMethod() {
}
@Retention(RetentionPolicy.RUNTIME)
@WithSecurityContext(factory = SuperClassWithSecurityContextFactory.class)
@
|
WithSecurityContextTestExcecutionListenerTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java
|
{
"start": 1441,
"end": 10107
}
|
class ____ extends ESTestCase {
static final Map<String, Supplier<String>> ALLOWED_SETTING_GENERATORS = Map.of(
IndexMetadata.SETTING_NUMBER_OF_REPLICAS,
() -> randomAlphaOfLength(5), // no additional validation
IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS,
() -> randomAlphaOfLength(5), // no additional validation
DataTier.TIER_PREFERENCE,
() -> randomFrom(DataTier.DATA_CONTENT, DataTier.DATA_HOT, DataTier.DATA_WARM, DataTier.DATA_COLD)
);
public void testValidateSettingsEmpty() {
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap()
);
var ex = req.validate();
assertThat(ex, notNullValue());
assertThat(ex.getMessage(), containsString("No settings given to update"));
assertThat(ex.validationErrors(), hasSize(1));
}
public void testAllowedSettingsOk() {
Map<String, Object> allAllowedSettingsMap = new HashMap<>();
for (String allowedSetting : ALLOWED_SETTING_VALIDATORS.keySet()) {
String settingValue = ALLOWED_SETTING_GENERATORS.get(allowedSetting).get();
Map<String, Object> allowedSettingMap = Map.of(allowedSetting, settingValue);
allAllowedSettingsMap.put(allowedSetting, settingValue);
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
allowedSettingMap,
Collections.emptyMap(),
Collections.emptyMap()
);
assertThat(req.validate(), nullValue());
req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
Collections.emptyMap(),
allowedSettingMap,
Collections.emptyMap()
);
assertThat(req.validate(), nullValue());
req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
Collections.emptyMap(),
Collections.emptyMap(),
allowedSettingMap
);
assertThat(req.validate(), nullValue());
}
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
allAllowedSettingsMap,
allAllowedSettingsMap,
allAllowedSettingsMap
);
assertThat(req.validate(), nullValue());
}
public void testDisallowedSettingsFailsValidation() {
String disallowedSetting = "index."
+ randomValueOtherThanMany((value) -> ALLOWED_SETTING_VALIDATORS.containsKey("index." + value), () -> randomAlphaOfLength(5));
Map<String, Object> disallowedSettingMap = Map.of(disallowedSetting, randomAlphaOfLength(5));
String validSetting = randomFrom(ALLOWED_SETTING_VALIDATORS.keySet());
Map<String, Object> validOrEmptySettingMap = randomFrom(
Collections.emptyMap(),
Map.of(validSetting, ALLOWED_SETTING_GENERATORS.get(validSetting).get())
);
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
validOrEmptySettingMap,
disallowedSettingMap,
validOrEmptySettingMap
);
List<String> errors = req.validate().validationErrors();
assertThat(errors, hasSize(1));
for (String errorMsg : errors) {
assertThat(
errorMsg,
matchesRegex(
"illegal setting for index \\["
+ Pattern.quote(TOKENS_INDEX_NAME)
+ "\\]: \\["
+ disallowedSetting
+ "\\], this setting may not be configured. Only the following settings may be configured for that index.*"
)
);
}
}
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
disallowedSettingMap,
validOrEmptySettingMap,
disallowedSettingMap
);
List<String> errors = req.validate().validationErrors();
assertThat(errors, hasSize(2));
for (String errorMsg : errors) {
assertThat(
errorMsg,
matchesRegex(
"illegal setting for index \\[("
+ Pattern.quote(MAIN_INDEX_NAME)
+ "|"
+ Pattern.quote(PROFILES_INDEX_NAME)
+ ")\\]: \\["
+ disallowedSetting
+ "\\], this setting may not be configured. Only the following settings may be configured for that index.*"
)
);
}
}
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
disallowedSettingMap,
disallowedSettingMap,
disallowedSettingMap
);
List<String> errors = req.validate().validationErrors();
assertThat(errors, hasSize(3));
for (String errorMsg : errors) {
assertThat(
errorMsg,
matchesRegex(
"illegal setting for index \\[("
+ Pattern.quote(MAIN_INDEX_NAME)
+ "|"
+ Pattern.quote(TOKENS_INDEX_NAME)
+ "|"
+ Pattern.quote(PROFILES_INDEX_NAME)
+ ")\\]: \\["
+ disallowedSetting
+ "\\], this setting may not be configured. Only the following settings may be configured for that index.*"
)
);
}
}
}
public void testSettingValuesAreValidated() {
Map<String, Object> forbiddenSettingsMap = Map.of(DataTier.TIER_PREFERENCE, DataTier.DATA_FROZEN);
String badTier = randomAlphaOfLength(5);
Map<String, Object> badSettingsMap = Map.of(DataTier.TIER_PREFERENCE, badTier);
Map<String, Object> allowedSettingMap = Map.of(
DataTier.TIER_PREFERENCE,
randomFrom(DataTier.DATA_HOT, DataTier.DATA_WARM, DataTier.DATA_CONTENT, DataTier.DATA_COLD)
);
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
allowedSettingMap,
Collections.emptyMap(),
Collections.emptyMap()
);
assertThat(req.validate(), nullValue());
}
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
forbiddenSettingsMap,
Collections.emptyMap(),
Collections.emptyMap()
);
ActionRequestValidationException exception = req.validate();
assertThat(exception, notNullValue());
assertThat(exception.validationErrors(), hasSize(1));
assertThat(
exception.validationErrors().get(0),
containsString("disallowed data tiers [" + DataTier.DATA_FROZEN + "] found, allowed tiers are ")
);
}
{
var req = new UpdateSecuritySettingsAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
badSettingsMap,
Collections.emptyMap(),
Collections.emptyMap()
);
var exception = req.validate();
assertThat(exception, notNullValue());
assertThat(exception.validationErrors(), hasSize(1));
assertThat(
exception.validationErrors().get(0),
containsString("disallowed data tiers [" + badTier + "] found, allowed tiers are ")
);
}
}
}
|
UpdateSecuritySettingsActionTests
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/error/NoElementsShouldSatisfy_create_Test.java
|
{
"start": 1060,
"end": 2432
}
|
class ____ {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = noElementsShouldSatisfy(list("Luke", "Leia", "Yoda"), list("Luke", "Leia"));
// WHEN
String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting no elements of:%n" +
" [\"Luke\", \"Leia\", \"Yoda\"]%n" +
"to satisfy the given assertions requirements but these elements did:%n" +
" [\"Luke\", \"Leia\"]"));
}
@Test
void should_create_error_message_percent() {
// GIVEN
ErrorMessageFactory factory = noElementsShouldSatisfy(list("Luke", "Leia%s", "Yoda"), list("Luke", "Leia%s"));
// WHEN
String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting no elements of:%n" +
" [\"Luke\", \"Leia%%s\", \"Yoda\"]%n" +
"to satisfy the given assertions requirements but these elements did:%n" +
" [\"Luke\", \"Leia%%s\"]"));
}
}
|
NoElementsShouldSatisfy_create_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/spi/FlushEvent.java
|
{
"start": 141,
"end": 254
}
|
class ____ stateful session flush.
*
* @author Steve Ebersole
*
* @see org.hibernate.Session#flush
*/
public
|
for
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/headers/RemoveHopByHopHeadersFilter.java
|
{
"start": 1207,
"end": 2978
}
|
class ____ implements HttpHeadersFilter, Ordered {
/**
* Headers to remove as the result of applying the filter.
*/
public static final Set<String> HEADERS_REMOVED_ON_REQUEST = new HashSet<>(
Arrays.asList("connection", "keep-alive", "transfer-encoding", "te", "trailer", "proxy-authorization",
"proxy-authenticate", "x-application-context", "upgrade"
// these two are not listed in
// https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-14#section-7.1.3
// "proxy-connection",
// "content-length",
));
private int order = Ordered.LOWEST_PRECEDENCE - 1;
private Set<String> headers = HEADERS_REMOVED_ON_REQUEST;
public Set<String> getHeaders() {
return headers;
}
public void setHeaders(Set<String> headers) {
Objects.requireNonNull(headers, "headers may not be null");
this.headers = headers.stream().map(String::toLowerCase).collect(Collectors.toSet());
}
@Override
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
@Override
public HttpHeaders filter(HttpHeaders originalHeaders, ServerWebExchange exchange) {
HttpHeaders filtered = new HttpHeaders();
List<String> connectionOptions = originalHeaders.getConnection().stream().map(String::toLowerCase).toList();
Set<String> headersToRemove = new HashSet<>(headers);
headersToRemove.addAll(connectionOptions);
for (Map.Entry<String, List<String>> entry : originalHeaders.headerSet()) {
if (!headersToRemove.contains(entry.getKey().toLowerCase(Locale.ROOT))) {
filtered.addAll(entry.getKey(), entry.getValue());
}
}
return filtered;
}
@Override
public boolean supports(Type type) {
return type.equals(Type.REQUEST) || type.equals(Type.RESPONSE);
}
}
|
RemoveHopByHopHeadersFilter
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreWithHttpServerAndTlsRegistryTest.java
|
{
"start": 866,
"end": 2393
}
|
class ____ {
private static final String configuration = """
quarkus.tls.my-client.trust-store.jks.path=target/certs/grpc-client-truststore.jks
quarkus.tls.my-client.trust-store.jks.password=password
quarkus.grpc.clients.hello.plain-text=false
quarkus.grpc.clients.hello.tls-configuration-name=my-client
quarkus.grpc.clients.hello.use-quarkus-grpc-client=true
quarkus.grpc.server.use-separate-server=false
quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests
quarkus.tls.key-store.jks.path=target/certs/grpc-keystore.jks
quarkus.tls.key-store.jks.password=password
quarkus.http.insecure-requests=disabled
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addPackage(HelloWorldTlsEndpoint.class.getPackage())
.addPackage(GreeterGrpc.class.getPackage())
.add(new StringAsset(configuration), "application.properties"));
@GrpcClient("hello")
GreeterGrpc.GreeterBlockingStub blockingHelloService;
@Test
void testClientTlsConfiguration() {
HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build());
assertThat(reply.getMessage()).isEqualTo("Hello neo");
}
}
|
TlsWithJKSTrustStoreWithHttpServerAndTlsRegistryTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/date/DateFormatPriorityTest.java
|
{
"start": 5670,
"end": 5883
}
|
class ____ {
private Date date;
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
}
public static
|
VO
|
java
|
elastic__elasticsearch
|
modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/MeteredStorage.java
|
{
"start": 8258,
"end": 10049
}
|
class ____ implements ReadChannel {
private final GcsRepositoryStatsCollector statsCollector;
private final ReadChannel readChannel;
private final OperationPurpose purpose;
MeteredReadChannel(OperationPurpose purpose, GcsRepositoryStatsCollector statsCollector, ReadChannel readChannel) {
this.statsCollector = statsCollector;
this.readChannel = readChannel;
this.purpose = purpose;
}
@Override
public void close() {
statsCollector.collectRunnable(purpose, GET, readChannel::close);
}
@Override
public void seek(long position) throws IOException {
statsCollector.collectIORunnable(purpose, GET, () -> readChannel.seek(position));
}
@Override
public void setChunkSize(int chunkSize) {
readChannel.setChunkSize(chunkSize);
}
@Override
public RestorableState<ReadChannel> capture() {
return () -> new MeteredReadChannel(purpose, statsCollector, readChannel.capture().restore());
}
@Override
public ReadChannel limit(long limit) {
readChannel.limit(limit);
return this;
}
@Override
public long limit() {
return readChannel.limit();
}
@Override
public int read(ByteBuffer dst) throws IOException {
return statsCollector.collectIOSupplier(purpose, GET, () -> readChannel.read(dst));
}
@Override
public boolean isOpen() {
return readChannel.isOpen();
}
}
/**
* A delegating paginated blob list. Each list operation is at most one storage operation, or none.
*/
public static
|
MeteredReadChannel
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/util/DoubleComparatorTest.java
|
{
"start": 847,
"end": 2984
}
|
class ____ {
private static DoubleComparator comparator = new DoubleComparator(0.01d);
public static boolean nearlyEqual(Double a, Double b) {
return comparator.compare(a, b) == 0;
}
public static boolean nearlyEqual(Double a, Double b, double epsilon) {
return new DoubleComparator(epsilon).compare(a, b) == 0;
}
@ParameterizedTest
@CsvSource({
"1.0, 1.0",
"1.001, 1.0",
"0.01, 0.0",
"1.0, 1.001",
"0.001, 0.0",
"0.0, 0.001",
"-1.001, -1.0",
"-1.0, -1.001",
","
})
void should_be_equal_if_difference_is_less_than_or_equal_to_epsilon(Double actual, Double other) {
assertThat(nearlyEqual(actual, other)).as("comparing %f to %f with epsilon %f", actual, other,
comparator.getEpsilon())
.isTrue();
}
@ParameterizedTest
@CsvSource({
"1.0, 2.0",
"1.010001, 1.0",
"1.0, 1.010001",
"0.0, 0.010001",
"-1.010001, -1.0",
"-1.0, -1.010001",
", 1.0",
"1.0,"
})
void should_not_be_equal_if_difference_is_more_than_epsilon(Double actual, Double other) {
assertThat(nearlyEqual(actual, other)).as("comparing %f to %f with epsilon %f", actual, other,
comparator.getEpsilon())
.isFalse();
}
@ParameterizedTest
@CsvSource({
"Infinity, Infinity",
"-Infinity, -Infinity"
})
void should_be_equal_if_both_values_are_infinity_of_same_sign(Double actual, Double other) {
assertThat(nearlyEqual(actual, other)).as("comparing %f to %f", actual, other).isTrue();
}
@ParameterizedTest
@CsvSource({
"Infinity, -Infinity",
"-Infinity, Infinity"
})
void should_not_be_equal_if_both_values_are_infinity_of_opposite_signs(Double actual, Double other) {
assertThat(nearlyEqual(actual, other)).as("comparing %f to %f", actual, other).isFalse();
}
@Test
void should_not_be_equal_if_not_a_number() {
assertThat(nearlyEqual(Double.NaN, Double.NaN)).isFalse();
}
}
|
DoubleComparatorTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/gpu/src/test/java/org/elasticsearch/xpack/gpu/GPUDenseVectorFieldMapperTests.java
|
{
"start": 987,
"end": 3696
}
|
class ____ extends DenseVectorFieldMapperTests {
@BeforeClass
public static void setup() {
assumeTrue("cuvs not supported", GPUSupport.isSupported());
}
@Override
protected Collection<Plugin> getPlugins() {
var plugin = new GPUPlugin() {
@Override
protected boolean isGpuIndexingFeatureAllowed() {
return true;
}
};
return Collections.singletonList(plugin);
}
@Override
public void testKnnVectorsFormat() throws IOException {
// TODO improve test with custom parameters
KnnVectorsFormat knnVectorsFormat = getKnnVectorsFormat("hnsw");
String expectedStr = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, "
+ "maxConn=12, beamWidth=22, flatVectorFormat=Lucene99FlatVectorsFormat)";
assertEquals(expectedStr, knnVectorsFormat.toString());
}
@Override
public void testKnnQuantizedHNSWVectorsFormat() throws IOException {
// TOD improve the test with custom parameters
KnnVectorsFormat knnVectorsFormat = getKnnVectorsFormat("int8_hnsw");
String expectedStr = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, "
+ "maxConn=12, beamWidth=22, flatVectorFormat=ES814ScalarQuantizedVectorsFormat";
assertTrue(knnVectorsFormat.toString().startsWith(expectedStr));
}
private KnnVectorsFormat getKnnVectorsFormat(String indexOptionsType) throws IOException {
final int dims = randomIntBetween(128, 4096);
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "dense_vector");
b.field("dims", dims);
b.field("index", true);
b.field("similarity", "dot_product");
b.startObject("index_options");
b.field("type", indexOptionsType);
b.endObject();
}));
CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE);
Codec codec = codecService.codec("default");
if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG) {
assertThat(codec, instanceOf(PerFieldMapperCodec.class));
return ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field");
} else {
if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) {
codec = deduplicateFieldInfosCodec.delegate();
}
assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class));
return ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field");
}
}
}
|
GPUDenseVectorFieldMapperTests
|
java
|
elastic__elasticsearch
|
x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupMappingIT.java
|
{
"start": 476,
"end": 559
}
|
class ____ provide appropriate group mappings via configGroupMappings()
*/
public
|
will
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/ObjectArrayFieldTest.java
|
{
"start": 1113,
"end": 1330
}
|
class ____ {
private Object[] value;
public Object[] getValue() {
return value;
}
public void setValue(Object[] value) {
this.value = value;
}
}
}
|
V0
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotsRecoveryStateTests.java
|
{
"start": 932,
"end": 6639
}
|
class ____ extends ESTestCase {
public void testStageDoesNotTransitionToDoneUntilPreWarmingHasFinished() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
recoveryState.setStage(RecoveryState.Stage.INIT)
.setStage(RecoveryState.Stage.INDEX)
.setStage(RecoveryState.Stage.VERIFY_INDEX)
.setStage(RecoveryState.Stage.TRANSLOG);
recoveryState.getIndex().setFileDetailsComplete();
recoveryState.setStage(RecoveryState.Stage.FINALIZE).setStage(RecoveryState.Stage.DONE);
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.FINALIZE));
}
public void testsetStageThrowsAnExceptionOnInvalidTransitions() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
expectThrows(AssertionError.class, () -> recoveryState.setStage(RecoveryState.Stage.DONE));
}
public void testStageTransitionsToDoneOncePreWarmingHasFinished() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.INIT));
recoveryState.setPreWarmComplete();
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.INIT));
recoveryState.setStage(RecoveryState.Stage.INDEX).setStage(RecoveryState.Stage.VERIFY_INDEX).setStage(RecoveryState.Stage.TRANSLOG);
recoveryState.getIndex().setFileDetailsComplete();
recoveryState.setStage(RecoveryState.Stage.FINALIZE).setStage(RecoveryState.Stage.DONE);
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
}
public void testStageTransitionsToDoneOncePreWarmingFinishesOnShardStartedStage() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
recoveryState.setStage(RecoveryState.Stage.INDEX).setStage(RecoveryState.Stage.VERIFY_INDEX).setStage(RecoveryState.Stage.TRANSLOG);
recoveryState.getIndex().setFileDetailsComplete();
recoveryState.setStage(RecoveryState.Stage.FINALIZE);
recoveryState.setPreWarmComplete();
recoveryState.setStage(RecoveryState.Stage.DONE);
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
assertThat(recoveryState.getTimer().stopTime(), greaterThan(0L));
}
public void testStageTransitionsToDoneOncePreWarmingFinishesOnHoldShardStartedStage() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
recoveryState.setStage(RecoveryState.Stage.INDEX).setStage(RecoveryState.Stage.VERIFY_INDEX).setStage(RecoveryState.Stage.TRANSLOG);
recoveryState.getIndex().setFileDetailsComplete();
recoveryState.setStage(RecoveryState.Stage.FINALIZE).setStage(RecoveryState.Stage.DONE);
recoveryState.setPreWarmComplete();
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
assertThat(recoveryState.getTimer().stopTime(), greaterThan(0L));
}
public void testIndexTimerIsStartedDuringConstruction() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
assertThat(recoveryState.getIndex().startTime(), not(equalTo(0L)));
}
public void testIndexTimerMethodsAreBypassed() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
RecoveryState.Index index = recoveryState.getIndex();
long initialStartTime = index.startTime();
assertThat(initialStartTime, not(equalTo(0L)));
index.reset();
assertThat(index.startTime(), equalTo(initialStartTime));
index.start();
assertThat(index.startTime(), equalTo(initialStartTime));
assertThat(index.stopTime(), equalTo(0L));
index.stop();
assertThat(index.stopTime(), equalTo(0L));
}
public void testIndexTimerIsStoppedOncePreWarmingFinishes() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
assertThat(recoveryState.getIndex().stopTime(), equalTo(0L));
recoveryState.setPreWarmComplete();
assertThat(recoveryState.getIndex().stopTime(), greaterThan(0L));
}
public void testFilesAreIgnored() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
recoveryState.ignoreFile("non_pre_warmed_file");
recoveryState.getIndex().addFileDetail("non_pre_warmed_file", 100, false);
assertThat(recoveryState.getIndex().getFileDetails("non_pre_warmed_file"), is(nullValue()));
}
public void testResetAfterRemoteTranslogIsSetResetsFlag() {
SearchableSnapshotRecoveryState recoveryState = createRecoveryState();
recoveryState.getIndex().setFileDetailsComplete();
recoveryState.setStage(RecoveryState.Stage.INDEX).setStage(RecoveryState.Stage.VERIFY_INDEX).setRemoteTranslogStage();
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.FINALIZE));
assertThat(recoveryState.isRemoteTranslogSet(), equalTo(true));
recoveryState.setStage(RecoveryState.Stage.INIT);
assertThat(recoveryState.isRemoteTranslogSet(), equalTo(false));
}
private SearchableSnapshotRecoveryState createRecoveryState() {
ShardRouting shardRouting = TestShardRouting.newShardRouting(
randomAlphaOfLength(10),
0,
randomAlphaOfLength(10),
true,
ShardRoutingState.INITIALIZING
);
DiscoveryNode targetNode = DiscoveryNodeUtils.create("local");
return new SearchableSnapshotRecoveryState(shardRouting, targetNode, null);
}
}
|
SearchableSnapshotsRecoveryStateTests
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/BeanWrapperAutoGrowingTests.java
|
{
"start": 8173,
"end": 11289
}
|
class ____ {
private String prop;
private Bean nested;
private NestedNoDefaultConstructor nestedNoConstructor;
private Bean[] array;
private Bean[][] multiArray;
private Bean[][][] threeDimensionalArray;
private List<Bean[][][]> threeDimensionalArrayList;
private List<Bean> list;
private List<List<Bean>> nestedList;
private List<List<List<Bean>>> nestedNestedList;
private List listNotParameterized;
private Map<String, Bean> map;
private Map<String, Map<String, Bean>> nestedMap;
private Map<String, Map<String, Map<String, Bean>>> nestedNestedMap;
public String getProp() {
return prop;
}
public void setProp(String prop) {
this.prop = prop;
}
public Bean getNested() {
return nested;
}
public void setNested(Bean nested) {
this.nested = nested;
}
public Bean[] getArray() {
return array;
}
public void setArray(Bean[] array) {
this.array = array;
}
public Bean[][] getMultiArray() {
return multiArray;
}
public void setMultiArray(Bean[][] multiArray) {
this.multiArray = multiArray;
}
public Bean[][][] getThreeDimensionalArray() {
return threeDimensionalArray;
}
public void setThreeDimensionalArray(Bean[][][] threeDimensionalArray) {
this.threeDimensionalArray = threeDimensionalArray;
}
public List<Bean[][][]> getThreeDimensionalArrayList() {
return threeDimensionalArrayList;
}
public void setThreeDimensionalArrayList(List<Bean[][][]> threeDimensionalArrayList) {
this.threeDimensionalArrayList = threeDimensionalArrayList;
}
public List<Bean> getList() {
return list;
}
public void setList(List<Bean> list) {
this.list = list;
}
public List<List<Bean>> getNestedList() {
return nestedList;
}
public void setNestedList(List<List<Bean>> nestedList) {
this.nestedList = nestedList;
}
public List<List<List<Bean>>> getNestedNestedList() {
return nestedNestedList;
}
public void setNestedNestedList(List<List<List<Bean>>> nestedNestedList) {
this.nestedNestedList = nestedNestedList;
}
public NestedNoDefaultConstructor getNestedNoConstructor() {
return nestedNoConstructor;
}
public void setNestedNoConstructor(NestedNoDefaultConstructor nestedNoConstructor) {
this.nestedNoConstructor = nestedNoConstructor;
}
public List getListNotParameterized() {
return listNotParameterized;
}
public void setListNotParameterized(List listNotParameterized) {
this.listNotParameterized = listNotParameterized;
}
public Map<String, Bean> getMap() {
return map;
}
public void setMap(Map<String, Bean> map) {
this.map = map;
}
public Map<String, Map<String, Bean>> getNestedMap() {
return nestedMap;
}
public void setNestedMap(Map<String, Map<String, Bean>> nestedMap) {
this.nestedMap = nestedMap;
}
public Map<String, Map<String, Map<String, Bean>>> getNestedNestedMap() {
return nestedNestedMap;
}
public void setNestedNestedMap(Map<String, Map<String, Map<String, Bean>>> nestedNestedMap) {
this.nestedNestedMap = nestedNestedMap;
}
}
public static
|
Bean
|
java
|
google__dagger
|
dagger-runtime/main/java/dagger/MapKey.java
|
{
"start": 2911,
"end": 3441
}
|
class ____ {
* {@literal @}Inject
* SomeInjectedType({@literal Map<MyMapKey, Integer>} map) {
* assert map.get(new MyMapKeyImpl("foo", MyEnum.BAR)) == 2;
* }
* }
* </code></pre>
*
* <p>(Note that there must be a class {@code MyMapKeyImpl} that implements {@code MyMapKey} in
* order to call {@link Map#get(Object)} on the provided map.)
*
* @see <a href="https://dagger.dev/multibindings#map-multibindings">Map multibinding</a>
*/
@Documented
@Target(ANNOTATION_TYPE)
@Retention(RUNTIME)
public @
|
SomeInjectedType
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IndexOfCharTest.java
|
{
"start": 1592,
"end": 1729
}
|
class ____ {
{
"".indexOf('$', 0);
}
}
""")
.doTest();
}
}
|
Test
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/constraint/PlacementConstraintParser.java
|
{
"start": 20314,
"end": 20428
}
|
class ____ encapsulate source tags and allocations in the
* placement specification.
*/
public static final
|
to
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/traffic/TrafficCounter.java
|
{
"start": 1569,
"end": 4500
}
|
class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(TrafficCounter.class);
/**
* @return the time in ms using nanoTime, so not real EPOCH time but elapsed time in ms.
*/
public static long milliSecondFromNano() {
return System.nanoTime() / 1000000;
}
/**
* Current written bytes
*/
private final AtomicLong currentWrittenBytes = new AtomicLong();
/**
* Current read bytes
*/
private final AtomicLong currentReadBytes = new AtomicLong();
/**
* Last writing time during current check interval
*/
private long writingTime;
/**
* Last reading delay during current check interval
*/
private long readingTime;
/**
* Long life written bytes
*/
private final AtomicLong cumulativeWrittenBytes = new AtomicLong();
/**
* Long life read bytes
*/
private final AtomicLong cumulativeReadBytes = new AtomicLong();
/**
* Last Time where cumulative bytes where reset to zero: this time is a real EPOC time (informative only)
*/
private long lastCumulativeTime;
/**
* Last writing bandwidth
*/
private long lastWriteThroughput;
/**
* Last reading bandwidth
*/
private long lastReadThroughput;
/**
* Last Time Check taken
*/
final AtomicLong lastTime = new AtomicLong();
/**
* Last written bytes number during last check interval
*/
private volatile long lastWrittenBytes;
/**
* Last read bytes number during last check interval
*/
private volatile long lastReadBytes;
/**
* Last future writing time during last check interval
*/
private volatile long lastWritingTime;
/**
* Last reading time during last check interval
*/
private volatile long lastReadingTime;
/**
* Real written bytes
*/
private final AtomicLong realWrittenBytes = new AtomicLong();
/**
* Real writing bandwidth
*/
private long realWriteThroughput;
/**
* Delay between two captures
*/
final AtomicLong checkInterval = new AtomicLong(
AbstractTrafficShapingHandler.DEFAULT_CHECK_INTERVAL);
// default 1 s
/**
* Name of this Monitor
*/
final String name;
/**
* The associated TrafficShapingHandler
*/
final AbstractTrafficShapingHandler trafficShapingHandler;
/**
* Executor that will run the monitor
*/
final ScheduledExecutorService executor;
/**
* Monitor created once in start()
*/
Runnable monitor;
/**
* used in stop() to cancel the timer
*/
volatile ScheduledFuture<?> scheduledFuture;
/**
* Is Monitor active
*/
volatile boolean monitorActive;
/**
* Class to implement monitoring at fix delay
*
*/
private final
|
TrafficCounter
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/PackageLegalMojo.java
|
{
"start": 2098,
"end": 3767
}
|
class ____ one of the threads it generated failed.
* @throws MojoFailureException something bad happened...
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (legalOutDir == null) {
legalOutDir = new File(project.getBuild().getOutputDirectory());
}
processLegal(legalOutDir.toPath());
}
public void processLegal(Path legalOutDir) throws MojoExecutionException {
// Only take care about camel legal stuff
if (!"org.apache.camel".equals(project.getGroupId())) {
return;
}
boolean exists = new File("src/main/resources/META-INF/LICENSE.txt").exists();
if (!exists) {
try (InputStream isLicense = getClass().getResourceAsStream("/camel-LICENSE.txt")) {
String license = IOUtils.toString(isLicense, StandardCharsets.UTF_8);
updateResource(legalOutDir, "META-INF/LICENSE.txt", license);
} catch (IOException e) {
throw new MojoExecutionException("Failed to write legal files. Reason: " + e, e);
}
}
exists = new File("src/main/resources/META-INF/NOTICE.txt").exists();
if (!exists) {
try (InputStream isNotice = getClass().getResourceAsStream("/camel-NOTICE.txt")) {
String notice = IOUtils.toString(isNotice, StandardCharsets.UTF_8);
updateResource(legalOutDir, "META-INF/NOTICE.txt", notice);
} catch (IOException e) {
throw new MojoExecutionException("Failed to write legal files. Reason: " + e, e);
}
}
}
}
|
or
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.