language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java | {
"start": 23452,
"end": 26030
} | enum ____ {
MILLISECONDS, OPPORTUNITIES
}
private AbstractConstraint constraint;
private long schedulingDelay;
private DelayUnit delayUnit;
public TimedPlacementConstraint(AbstractConstraint constraint,
long schedulingDelay, DelayUnit delayUnit) {
this.constraint = constraint;
this.schedulingDelay = schedulingDelay;
this.delayUnit = delayUnit;
}
public TimedPlacementConstraint(AbstractConstraint constraint,
long schedulingDelay) {
this(constraint, schedulingDelay, DelayUnit.MILLISECONDS);
}
public TimedPlacementConstraint(AbstractConstraint constraint) {
this(constraint, Long.MAX_VALUE, DelayUnit.MILLISECONDS);
}
/**
* Get the constraint that has to be satisfied within the time window.
*
* @return the constraint to be satisfied
*/
public AbstractConstraint getConstraint() {
return constraint;
}
/**
* Sets the constraint that has to be satisfied within the time window.
*
* @param constraint the constraint to be satisfied
*/
public void setConstraint(AbstractConstraint constraint) {
this.constraint = constraint;
}
/**
* Get the scheduling delay value that determines the time window within
* which the constraint has to be satisfied.
*
* @return the value of the scheduling delay
*/
public long getSchedulingDelay() {
return schedulingDelay;
}
/**
* The unit of the scheduling delay.
*
* @return the unit of the delay
*/
public DelayUnit getDelayUnit() {
return delayUnit;
}
@Override
public <T> T accept(Visitor<T> visitor) {
return visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TimedPlacementConstraint that = (TimedPlacementConstraint) o;
if (schedulingDelay != that.schedulingDelay) {
return false;
}
if (constraint != null ? !constraint.equals(that.constraint) :
that.constraint != null) {
return false;
}
return delayUnit == that.delayUnit;
}
@Override
public int hashCode() {
int result = constraint != null ? constraint.hashCode() : 0;
result = 31 * result + (int) (schedulingDelay ^ (schedulingDelay >>> 32));
result = 31 * result + (delayUnit != null ? delayUnit.hashCode() : 0);
return result;
}
}
}
| DelayUnit |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/beanmanager/BeanManagerTest.java | {
"start": 15447,
"end": 15670
} | class ____ implements Converter<String> {
@Override
public String convert(String value) {
return value.toUpperCase();
}
}
@Decorator
@Priority(10)
static | ToUpperCaseConverter |
java | apache__camel | core/camel-yaml-io/src/main/java/org/apache/camel/yaml/io/YamlWriter.java | {
"start": 2453,
"end": 19496
} | class ____ extends ServiceSupport implements CamelContextAware {
private CamelContext camelContext;
private final Writer writer;
private final DefaultRuntimeCamelCatalog catalog;
private final ModelJSonSchemaResolver resolver;
private final List<EipModel> roots = new ArrayList<>();
private boolean routesIsRoot;
private final ArrayDeque<EipModel> models = new ArrayDeque<>();
private String expression;
private boolean uriAsParameters;
public YamlWriter(Writer writer) {
this.writer = writer;
this.resolver = new ModelJSonSchemaResolver();
this.catalog = new DefaultRuntimeCamelCatalog();
this.catalog.setJSonSchemaResolver(this.resolver);
this.catalog.setCaching(false); // turn cache off as we store state per node
this.catalog.start();
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
protected void doStart() throws Exception {
if (camelContext != null) {
this.resolver.setCamelContext(camelContext);
this.resolver.setClassLoader(camelContext.getApplicationContextClassLoader());
}
}
private EipModel lookupEipModel(String name) {
// namespace is using the property model
if ("namespace".equals(name)) {
name = "property";
}
return catalog.eipModel(name);
}
public void setUriAsParameters(boolean uriAsParameters) {
this.uriAsParameters = uriAsParameters;
}
public void startElement(String name) throws IOException {
if ("routes".equals(name) || "dataFormats".equals(name)) {
// special for routes or dataFormats
routesIsRoot = true;
return;
}
EipModel model = lookupEipModel(name);
if (model == null) {
// not an EIP model or namespace
return;
}
EipModel parent = models.isEmpty() ? null : models.peek();
model.getMetadata().put("_parent", parent);
models.push(model);
if (parent == null) {
// its a root element
roots.add(model);
}
}
public void startExpressionElement(String name) throws IOException {
// currently building an expression
this.expression = name;
}
public void endExpressionElement(String name) throws IOException {
// expression complete, back to normal mode
this.expression = null;
}
public void endElement(String name) throws IOException {
if ("routes".equals(name) || "dataFormats".equals(name)) {
// we are done
writer.write(toYaml());
return;
}
EipModel model = lookupEipModel(name);
if (model == null) {
// not an EIP model
return;
}
// special for namespace
if ("namespace".equals(name)) {
EipModel last = models.isEmpty() ? null : models.peek();
if (!models.isEmpty()) {
models.pop();
}
EipModel parent = models.isEmpty() ? null : models.peek();
if (parent != null) {
Map<String, String> map = (Map<String, String>) parent.getMetadata().get("namespace");
if (map == null) {
map = new LinkedHashMap<>();
parent.getMetadata().put("namespace", map);
}
String key = (String) last.getMetadata().get("key");
String value = (String) last.getMetadata().get("value");
// skip xsi namespace
if (key != null && !"xsi".equals(key) && value != null) {
map.put(key, value);
}
}
return;
}
EipModel last = models.isEmpty() ? null : models.peek();
if (last != null && isLanguage(last)) {
if (!models.isEmpty()) {
models.pop();
}
// okay we ended a language which we need to set on a parent EIP
EipModel parent = models.isEmpty() ? null : models.peek();
if (parent != null) {
String key = expressionName(parent, expression);
if (key != null) {
parent.getMetadata().put(key, last);
}
}
return;
}
if (last != null) {
if (!models.isEmpty()) {
models.pop();
}
// is this input/output on the parent
EipModel parent = models.isEmpty() ? null : models.peek();
if (parent != null) {
if ("from".equals(name) && parent.isInput()) {
// only set input once
parent.getMetadata().put("_input", last);
} else if ("dataFormats".equals(parent.getName())) {
// special for dataFormats
List<EipModel> list = (List<EipModel>) parent.getMetadata().get("_output");
if (list == null) {
list = new ArrayList<>();
parent.getMetadata().put("_output", list);
}
list.add(last);
} else if ("choice".equals(parent.getName())) {
// special for choice/doCatch/doFinally
setMetadata(parent, name, last);
} else if (parent.isOutput()) {
List<EipModel> list = (List<EipModel>) parent.getMetadata().get("_output");
if (list == null) {
list = new ArrayList<>();
parent.getMetadata().put("_output", list);
}
list.add(last);
} else if ("marshal".equals(parent.getName()) || "unmarshal".equals(parent.getName())) {
parent.getMetadata().put("_dataFormatType", last);
}
}
}
if (models.isEmpty() && !routesIsRoot) {
// we are done
writer.write(toYaml());
}
}
public void writeText(String name, String text) throws IOException {
EipModel last = models.isEmpty() ? null : models.peek();
if (last != null) {
// special as writeText can be used for list of string values
setMetadata(last, name, text);
}
}
public void writeValue(String value) throws IOException {
EipModel last = models.isEmpty() ? null : models.peek();
if (last != null) {
String key = valueName(last);
if (key != null) {
last.getMetadata().put(key, value);
}
}
}
public void addAttribute(String name, Object value) throws IOException {
EipModel last = models.isEmpty() ? null : models.peek();
if (last != null) {
// uri should be expanded into more human-readable with parameters
if (uriAsParameters && "uri".equals(name) && value != null) {
try {
String base = StringHelper.before(value.toString(), ":");
if (base != null) {
Map parameters = catalog.endpointProperties(value.toString());
if (!parameters.isEmpty()) {
prepareParameters(parameters);
last.getMetadata().put("uri", base);
last.getMetadata().put("parameters", parameters);
return;
}
}
} catch (Exception e) {
// ignore will attempt without catalog
}
try {
String base = URISupport.stripQuery(value.toString());
String query = URISupport.extractQuery(value.toString());
if (base != null && query != null) {
Map parameters = URISupport.parseQuery(query);
if (!parameters.isEmpty()) {
prepareParameters(parameters);
last.getMetadata().put("uri", base);
last.getMetadata().put("parameters", parameters);
return;
}
}
} catch (Exception e) {
// ignore
}
}
last.getMetadata().put(name, value);
}
}
private static void prepareParameters(Map<String, Object> parameters) {
// convert "true" / "false" to boolean values
parameters.forEach((k, v) -> {
if ("true".equals(v) || "false".equals(v)) {
Object s = Boolean.valueOf(v.toString());
parameters.replace(k, s);
}
});
}
private EipNode asExpressionNode(EipModel model, String name) {
EipNode node = new EipNode(name, null, false, true);
doAsNode(model, node);
return node;
}
private EipNode asNode(EipModel model) {
EipNode node = new EipNode(model.getName(), null, false, false);
doAsNode(model, node);
return node;
}
private void doAsNode(EipModel model, EipNode node) {
for (Map.Entry<String, Object> entry : model.getMetadata().entrySet()) {
String key = entry.getKey();
if ("_input".equals(key)) {
EipModel m = (EipModel) entry.getValue();
node.setInput(asNode(m));
} else if ("_output".equals(key)) {
List<EipModel> list = (List) entry.getValue();
for (EipModel m : list) {
node.addOutput(asNode(m));
}
} else if ("choice".equals(node.getName()) && "otherwise".equals(key)) {
EipModel other = (EipModel) entry.getValue();
node.addOutput(asNode(other));
} else if ("choice".equals(node.getName()) && "when".equals(key)) {
Object v = entry.getValue();
if (v instanceof List) {
// can be a list in choice
List<EipModel> list = (List) v;
for (EipModel m : list) {
node.addOutput(asNode(m));
}
} else {
node.addOutput(asNode((EipModel) v));
}
} else if (("marshal".equals(node.getName()) || "unmarshal".equals(node.getName()))
&& "_dataFormatType".equals(key)) {
EipModel other = (EipModel) entry.getValue();
node.addOutput(asNode(other));
} else {
boolean skip = key.startsWith("_") || key.equals("customId");
if (skip) {
continue;
}
String exp = null;
if (!isLanguage(model)) {
// special for expressions that are a property where we need to use expression name as key
exp = expressionName(model, key);
}
Object v = entry.getValue();
if (v instanceof EipModel m) {
if (exp == null || "expression".equals(exp)) {
v = asExpressionNode(m, m.getName());
} else {
v = asExpressionNode(m, exp);
}
}
if (exp != null && v instanceof EipNode eipNode) {
node.addExpression(eipNode);
} else {
node.addProperty(key, v);
if ("expression".equals(key)) {
node.addProperty("language", model.getName());
}
}
}
}
}
public String toYaml() {
try {
// model to json
JsonArray arr = transformToJson(roots);
// load into jackson
JsonNode jsonNodeTree = new ObjectMapper().readTree(arr.toJson());
// map to yaml via jackson
YAMLMapper mapper = new YAMLMapper();
mapper.disable(YAMLGenerator.Feature.WRITE_DOC_START_MARKER);
mapper.enable(YAMLGenerator.Feature.MINIMIZE_QUOTES);
mapper.enable(YAMLGenerator.Feature.INDENT_ARRAYS_WITH_INDICATOR);
String jsonAsYaml = mapper.writeValueAsString(jsonNodeTree);
// strip leading yaml indent of 2 spaces (because INDENT_ARRAYS_WITH_INDICATOR is enabled)
StringJoiner sj = new StringJoiner("\n");
for (String line : jsonAsYaml.split("\n")) {
if (line.startsWith(" ")) {
line = line.substring(2);
}
sj.add(line);
}
sj.add(""); // end with empty line
jsonAsYaml = sj.toString();
return jsonAsYaml;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private JsonArray transformToJson(List<EipModel> models) {
JsonArray arr = new JsonArray();
for (EipModel model : models) {
JsonObject jo = asJSonNode(model);
arr.add(jo);
}
return arr;
}
private JsonObject asJSonNode(EipModel model) {
JsonObject answer = new JsonObject();
JsonObject jo = new JsonObject();
answer.put(model.getName(), jo);
for (Map.Entry<String, Object> entry : model.getMetadata().entrySet()) {
String key = entry.getKey();
boolean skip = key.equals("customId");
if (skip) {
continue;
}
Object value = entry.getValue();
if (value != null) {
if (value instanceof Collection<?>) {
Collection<?> col = (Collection<?>) value;
List<Object> list = new ArrayList<>();
for (Object v : col) {
Object r = v;
if (r instanceof EipModel eipModel) {
EipNode en = asNode(eipModel);
value = en.asJsonObject();
JsonObject wrap = new JsonObject();
wrap.put(en.getName(), value);
r = wrap;
}
list.add(r);
}
if ("_output".equals(key)) {
key = "steps";
}
// special with "from" where outputs needs to be embedded
if (jo.containsKey("from")) {
jo = jo.getMap("from");
}
jo.put(key, list);
} else {
if (value instanceof EipModel eipModel) {
EipNode r = asNode(eipModel);
value = r.asJsonObject();
jo.put(r.getName(), value);
} else {
jo.put(key, value);
}
}
}
}
return answer;
}
@SuppressWarnings("unchecked")
private static void setMetadata(EipModel model, String name, Object value) {
// special for choice
boolean array = isArray(model, name);
if (array) {
List<Object> list = (List<Object>) model.getMetadata().get(name);
if (list == null) {
list = new ArrayList<>();
model.getMetadata().put(name, list);
}
list.add(value);
} else {
model.getMetadata().put(name, value);
}
}
private static String valueName(EipModel model) {
return model.getOptions().stream()
.filter(o -> "value".equals(o.getKind()))
.map(BaseOptionModel::getName)
.findFirst().orElse(null);
}
private static String expressionName(EipModel model, String name) {
return model.getOptions().stream()
.filter(o -> "expression".equals(o.getKind()))
.map(BaseOptionModel::getName)
.filter(oName -> name == null || oName.equalsIgnoreCase(name))
.findFirst().orElse(null);
}
private static boolean isArray(EipModel model, String name) {
return model.getOptions().stream()
.filter(o -> o.getName().equalsIgnoreCase(name))
.map(o -> "array".equals(o.getType()))
.findFirst().orElse(false);
}
private static boolean isLanguage(EipModel model) {
return model.getJavaType().startsWith("org.apache.camel.model.language");
}
}
| YamlWriter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ObjectsHashCodePrimitiveTest.java | {
"start": 3333,
"end": 3680
} | class ____ {
void f() {
int x = 3;
int y = Integer.hashCode(x);
}
}
""")
.doTest();
}
@Test
public void hashCodeLong() {
helper
.addInputLines(
"Test.java",
"""
import java.util.Objects;
| Test |
java | elastic__elasticsearch | qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java | {
"start": 1120,
"end": 3703
} | class ____ extends PackagingTestCase {
private final LintianResultParser lintianParser = new LintianResultParser();
private static final List<String> IGNORED_TAGS = List.of(
// Override syntax changes between lintian versions in a non-backwards compatible way, so we have to tolerate these.
// Tag mismatched-override is a non-erasable tag which cannot be ignored with overrides, so we handle it here.
"mismatched-override",
// systemd-service-file-outside-lib has been incorrect and removed in the newer version on Lintian
"systemd-service-file-outside-lib"
);
@BeforeClass
public static void filterDistros() {
assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB);
}
public void test05CheckLintian() {
String extraArgs = "";
final String helpText = sh.run("lintian --help").stdout();
if (helpText.contains("--fail-on-warnings")) {
extraArgs = "--fail-on-warnings";
} else if (helpText.contains("--fail-on error")) {
extraArgs = "--fail-on error,warning";
}
Shell.Result result = sh.runIgnoreExitCode(
String.format(Locale.ROOT, "lintian %s %s", extraArgs, getDistributionFile(distribution()))
);
Result lintianResult = lintianParser.parse(result.stdout());
// Unfortunately Lintian overrides syntax changes between Lintian versions in a non-backwards compatible
// way, so we have to manage some exclusions outside the overrides file.
if (lintianResult.isSuccess() == false) {
List<Issue> importantIssues = lintianResult.issues()
.stream()
.filter(issue -> IGNORED_TAGS.contains(issue.tag()) == false)
.toList();
if (importantIssues.isEmpty() == false) {
fail(
"Issues for DEB package found by Lintian:\n"
+ importantIssues.stream().map(Record::toString).collect(Collectors.joining("\n"))
);
}
}
}
public void test06Dependencies() {
final Shell sh = new Shell();
final Shell.Result result = sh.run("dpkg -I " + getDistributionFile(distribution()));
TestCase.assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(result.stdout()).find());
String oppositePackageName = "elasticsearch-oss";
TestCase.assertTrue(Pattern.compile("(?m)^ Conflicts: " + oppositePackageName + "$").matcher(result.stdout()).find());
}
}
| DebMetadataTests |
java | google__dagger | javatests/dagger/internal/codegen/MapKeyProcessorTest.java | {
"start": 3222,
"end": 3334
} | interface ____ {",
"@MapKey(unwrapValue = false)",
"@Retention(RUNTIME)",
"public @ | Container |
java | spring-projects__spring-boot | buildSrc/src/test/java/org/springframework/boot/build/bom/bomr/InteractiveUpgradeResolverTests.java | {
"start": 1337,
"end": 3043
} | class ____ {
@Test
void resolveUpgradeUpdateVersionNumberInLibrary() {
UserInputHandler userInputHandler = mock(UserInputHandler.class);
LibraryUpdateResolver libaryUpdateResolver = mock(LibraryUpdateResolver.class);
InteractiveUpgradeResolver upgradeResolver = new InteractiveUpgradeResolver(userInputHandler,
libaryUpdateResolver);
List<Library> libraries = new ArrayList<>();
DependencyVersion version = DependencyVersion.parse("1.0.0");
LibraryVersion libraryVersion = new LibraryVersion(version);
Library library = new Library("test", null, libraryVersion, null, null, null, false, null, null, null, null);
libraries.add(library);
List<Library> librariesToUpgrade = new ArrayList<>();
librariesToUpgrade.add(library);
List<LibraryWithVersionOptions> updates = new ArrayList<>();
DependencyVersion updateVersion = DependencyVersion.parse("1.0.1");
VersionOption versionOption = new VersionOption(updateVersion);
updates.add(new LibraryWithVersionOptions(library, List.of(versionOption)));
given(libaryUpdateResolver.findLibraryUpdates(any(), any())).willReturn(updates);
Provider<Object> providerOfVersionOption = providerOf(versionOption);
given(userInputHandler.askUser(any())).willReturn(providerOfVersionOption);
List<Upgrade> upgrades = upgradeResolver.resolveUpgrades(librariesToUpgrade, libraries);
assertThat(upgrades.get(0).to().getVersion().getVersion()).isEqualTo(updateVersion);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private <T> Provider<T> providerOf(VersionOption versionOption) {
Provider provider = mock(Provider.class);
given(provider.get()).willReturn(versionOption);
return provider;
}
}
| InteractiveUpgradeResolverTests |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqualWithinOffset.java | {
"start": 886,
"end": 2348
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeEqualWithinOffset}</code>.
* @param <T> guarantees that the values used in this factory have the same type.
* @param actual the actual value in the failed assertion.
* @param expected the expected value in the failed assertion.
* @param offset the given positive offset.
* @param difference the effective difference between actual and expected.
* @return the created {@code ErrorMessageFactory}.
*/
public static <T extends Number> ErrorMessageFactory shouldBeEqual(T actual, T expected, Offset<T> offset,
T difference) {
return new ShouldBeEqualWithinOffset(actual, expected, offset, difference);
}
private <T extends Number> ShouldBeEqualWithinOffset(Number actual, Number expected, Offset<T> offset,
Number difference) {
super("%n" +
"Expecting actual:%n" +
" %s%n" +
"to be close to:%n" +
" %s%n" +
"by less than %s but difference was %s.%n" +
"(a difference of exactly %s being considered " + validOrNot(offset) + ")",
actual, expected, offset.value, difference, offset.value);
}
private static <T extends Number> String validOrNot(Offset<T> offset) {
return offset.strict ? "invalid" : "valid";
}
}
| ShouldBeEqualWithinOffset |
java | hibernate__hibernate-orm | hibernate-jcache/src/test/java/org/hibernate/orm/test/jcache/domain/VersionedItem.java | {
"start": 148,
"end": 328
} | class ____ extends Item {
private Long version;
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
}
| VersionedItem |
java | grpc__grpc-java | s2a/src/test/java/io/grpc/s2a/internal/handshaker/S2APrivateKeyMethodTest.java | {
"start": 1872,
"end": 11963
} | class ____ {
@Rule public final Expect expect = Expect.create();
private static final byte[] DATA_TO_SIGN = "random bytes for signing.".getBytes(UTF_8);
private S2AStub stub;
private FakeWriter writer;
private S2APrivateKeyMethod keyMethod;
private static PublicKey extractPublicKeyFromPem(String pem) throws Exception {
X509Certificate cert =
(X509Certificate)
CertificateFactory.getInstance("X.509")
.generateCertificate(new ByteArrayInputStream(pem.getBytes(UTF_8)));
return cert.getPublicKey();
}
private static boolean verifySignature(
byte[] dataToSign, byte[] signature, String signatureAlgorithm) throws Exception {
Signature sig = Signature.getInstance(signatureAlgorithm);
InputStream leafCert =
S2APrivateKeyMethodTest.class.getClassLoader().getResourceAsStream("leaf_cert_ec.pem");
sig.initVerify(extractPublicKeyFromPem(FakeWriter.convertInputStreamToString(
leafCert)));
leafCert.close();
sig.update(dataToSign);
return sig.verify(signature);
}
@Before
public void setUp() {
// This is line is to ensure that JNI correctly links the necessary objects. Without this, we
// get `java.lang.UnsatisfiedLinkError` on
// `io.netty.internal.tcnative.NativeStaticallyReferencedJniMethods.sslSignRsaPkcsSha1()`
GrpcSslContexts.configure(SslContextBuilder.forClient());
writer = new FakeWriter();
stub = S2AStub.newInstanceForTesting(writer);
writer.setReader(stub.getReader());
keyMethod = S2APrivateKeyMethod.create(stub, /* localIdentity= */ Optional.empty());
}
@Test
public void signatureAlgorithmConversion_success() {
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PKCS1_SHA256))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PKCS1_SHA256);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PKCS1_SHA384))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PKCS1_SHA384);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PKCS1_SHA512))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PKCS1_SHA512);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_ECDSA_SECP256R1_SHA256);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP384R1_SHA384))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_ECDSA_SECP384R1_SHA384);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP521R1_SHA512))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_ECDSA_SECP521R1_SHA512);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PSS_RSAE_SHA256))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PSS_RSAE_SHA256);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PSS_RSAE_SHA384))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PSS_RSAE_SHA384);
expect
.that(
S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PSS_RSAE_SHA512))
.isEqualTo(SignatureAlgorithm.S2A_SSL_SIGN_RSA_PSS_RSAE_SHA512);
}
@Test
public void signatureAlgorithmConversion_unsupportedOperation() {
UnsupportedOperationException e =
assertThrows(
UnsupportedOperationException.class,
() -> S2APrivateKeyMethod.convertOpenSslSignAlgToS2ASignAlg(-1));
assertThat(e).hasMessageThat().contains("Signature Algorithm -1 is not supported.");
}
@Test
public void createOnNullStub_returnsNullPointerException() {
assertThrows(
NullPointerException.class,
() -> S2APrivateKeyMethod.create(/* stub= */ null, /* localIdentity= */ Optional.empty()));
}
@Test
public void decrypt_unsupportedOperation() {
UnsupportedOperationException e =
assertThrows(
UnsupportedOperationException.class,
() -> keyMethod.decrypt(/* engine= */ null, DATA_TO_SIGN));
assertThat(e).hasMessageThat().contains("decrypt is not supported.");
}
@Test
public void fakelocalIdentity_signWithSha256_success() throws Exception {
S2AIdentity fakeIdentity = S2AIdentity.fromSpiffeId("fake-spiffe-id");
S2AStub mockStub = mock(S2AStub.class);
OpenSslPrivateKeyMethod keyMethodWithFakeIdentity =
S2APrivateKeyMethod.create(mockStub, Optional.of(fakeIdentity));
SessionReq req =
SessionReq.newBuilder()
.setLocalIdentity(fakeIdentity.getIdentity())
.setOffloadPrivateKeyOperationReq(
OffloadPrivateKeyOperationReq.newBuilder()
.setOperation(OffloadPrivateKeyOperationReq.PrivateKeyOperation.SIGN)
.setSignatureAlgorithm(SignatureAlgorithm.S2A_SSL_SIGN_ECDSA_SECP256R1_SHA256)
.setRawBytes(ByteString.copyFrom(DATA_TO_SIGN)))
.build();
byte[] expectedOutbytes = "fake out bytes".getBytes(UTF_8);
when(mockStub.send(req))
.thenReturn(
SessionResp.newBuilder()
.setOffloadPrivateKeyOperationResp(
OffloadPrivateKeyOperationResp.newBuilder()
.setOutBytes(ByteString.copyFrom(expectedOutbytes)))
.build());
byte[] signature =
keyMethodWithFakeIdentity.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256,
DATA_TO_SIGN);
verify(mockStub).send(req);
assertThat(signature).isEqualTo(expectedOutbytes);
}
@Test
public void signWithSha256_success() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.OK_STATUS);
byte[] signature =
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256,
DATA_TO_SIGN);
assertThat(signature).isNotEmpty();
assertThat(verifySignature(DATA_TO_SIGN, signature, "SHA256withECDSA")).isTrue();
}
@Test
public void signWithSha384_success() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.OK_STATUS);
byte[] signature =
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP384R1_SHA384,
DATA_TO_SIGN);
assertThat(signature).isNotEmpty();
assertThat(verifySignature(DATA_TO_SIGN, signature, "SHA384withECDSA")).isTrue();
}
@Test
public void signWithSha512_success() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.OK_STATUS);
byte[] signature =
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP521R1_SHA512,
DATA_TO_SIGN);
assertThat(signature).isNotEmpty();
assertThat(verifySignature(DATA_TO_SIGN, signature, "SHA512withECDSA")).isTrue();
}
@Test
public void sign_noKeyAvailable() throws Exception {
writer.resetPrivateKey().setBehavior(FakeWriter.Behavior.OK_STATUS);
S2AConnectionException e =
assertThrows(
S2AConnectionException.class,
() ->
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256,
DATA_TO_SIGN));
assertThat(e)
.hasMessageThat()
.contains(
"Error occurred in response from S2A, error code: 255, error message: \"No Private Key"
+ " available.\".");
}
@Test
public void sign_algorithmNotSupported() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.OK_STATUS);
S2AConnectionException e =
assertThrows(
S2AConnectionException.class,
() ->
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_RSA_PKCS1_SHA256,
DATA_TO_SIGN));
assertThat(e)
.hasMessageThat()
.contains(
"Error occurred in response from S2A, error code: 255, error message: \"Only ECDSA key"
+ " algorithms are supported.\".");
}
@Test
public void sign_getsErrorResponse() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.ERROR_STATUS);
S2AConnectionException e =
assertThrows(
S2AConnectionException.class,
() ->
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256,
DATA_TO_SIGN));
assertThat(e)
.hasMessageThat()
.contains(
"Error occurred in response from S2A, error code: 1, error message: \"Intended ERROR"
+ " Status from FakeWriter.\".");
}
@Test
public void sign_getsEmptyResponse() throws Exception {
writer.initializePrivateKey().setBehavior(FakeWriter.Behavior.EMPTY_RESPONSE);
S2AConnectionException e =
assertThrows(
S2AConnectionException.class,
() ->
keyMethod.sign(
/* engine= */ null,
OpenSslPrivateKeyMethod.SSL_SIGN_ECDSA_SECP256R1_SHA256,
DATA_TO_SIGN));
assertThat(e).hasMessageThat().contains("No valid response received from S2A.");
}
} | S2APrivateKeyMethodTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/AbstractContextConfigurationUtilsTests.java | {
"start": 7308,
"end": 7361
} | class ____ {
}
@WebAppConfiguration
static | ClassesFoo |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java | {
"start": 1950,
"end": 2150
} | class ____ where '$' tokens represent inner classes excluding
* def and array types </li>
*
* <li> - javaClass (Class) - a java | name |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/cli/plugin/PluginManager.java | {
"start": 377,
"end": 15836
} | class ____ {
private static PluginManager INSTANCE;
private final MessageWriter output;
private final PluginMangerState state;
private final PluginManagerSettings settings;
private final PluginManagerUtil util;
public synchronized static PluginManager get() {
if (INSTANCE == null) {
throw new IllegalStateException("No instance of PluginManager found");
}
return INSTANCE;
}
public synchronized static PluginManager create(PluginManagerSettings settings, MessageWriter output,
Optional<Path> userHome, Optional<Path> currentDir, Supplier<QuarkusProject> quarkusProject) {
if (INSTANCE == null) {
INSTANCE = new PluginManager(settings, output, userHome, currentDir, quarkusProject);
}
return INSTANCE;
}
PluginManager(PluginManagerSettings settings, MessageWriter output, Optional<Path> userHome,
Optional<Path> currentDir, Supplier<QuarkusProject> quarkusProject) {
this.settings = settings;
this.output = output;
this.util = PluginManagerUtil.getUtil(settings);
this.state = new PluginMangerState(settings, output, userHome, currentDir, quarkusProject);
}
/**
* Adds the {@link Plugin} with the specified name or location to the installed plugins.
* Plugins that have been detected as installable may be added by name.
* Remote plugins, that are not detected can be added by the location (e.g. url or maven coordinates).
*
* @param nameOrLocation The name or location of the plugin.
* @return the pugin that was added wrapped in {@link Optional}, or empty if no plugin was added.
*/
public Optional<Plugin> addPlugin(String nameOrLocation) {
return addPlugin(nameOrLocation, false, Optional.empty());
}
/**
* Adds the {@link Plugin} with the specified name or location to the installed plugins.
* Plugins that have been detected as installable may be added by name.
* Remote plugins, that are not detected can be added by the location (e.g. url or maven coordinates).
*
* @param nameOrLocation The name or location of the plugin.
* @param userCatalog Flag to only use the user catalog.
* @param description An optional description to add to the plugin.
* @return The pugin that was added wrapped in {@link Optional}, or empty if no plugin was added.
*/
public Optional<Plugin> addPlugin(String nameOrLocation, boolean userCatalog, Optional<String> description) {
PluginCatalogService pluginCatalogService = state.getPluginCatalogService();
String name = util.getName(nameOrLocation);
Optional<String> location = Optional.empty();
if (PluginUtil.isRemoteLocation(nameOrLocation)) {
location = Optional.of(nameOrLocation);
} else if (PluginUtil.isLocalFile(nameOrLocation)) {
Optional<Path> projectRelative = state.getProjectRoot()
.filter(r -> !userCatalog) // If users catalog selected ignore project relative paths.
.filter(r -> PluginUtil.isProjectFile(r, nameOrLocation)) // check if its project file
.map(r -> r.relativize(Path.of(nameOrLocation).toAbsolutePath()));
location = projectRelative
.or(() -> Optional.of(nameOrLocation).map(Path::of).map(Path::toAbsolutePath))
.map(Path::toString);
}
if (!location.isEmpty()) {
Plugin plugin = new Plugin(name, PluginUtil.getType(nameOrLocation), location, description, Optional.empty(),
userCatalog || state.getProjectCatalog().isEmpty());
PluginCatalog updatedCatalog = state.pluginCatalog(userCatalog).addPlugin(plugin);
pluginCatalogService.writeCatalog(updatedCatalog);
state.invalidateInstalledPlugins();
return Optional.of(plugin);
}
Map<String, Plugin> installablePlugins = state.installablePlugins();
Optional<Plugin> plugin = Optional.ofNullable(installablePlugins.get(name)).map(Plugin::inUserCatalog);
return plugin.map(p -> {
Plugin withDescription = p.withDescription(description);
PluginCatalog updatedCatalog = state.pluginCatalog(userCatalog).addPlugin(withDescription);
pluginCatalogService.writeCatalog(updatedCatalog);
state.invalidateInstalledPlugins();
return withDescription;
});
}
/**
* Adds the {@link Plugin} with the specified name or location to the installed plugins.
* Plugins that have been detected as installable may be added by name.
* Remote plugins, that are not detected can be added by the location (e.g. url or maven coordinates).
*
* @param plugin The plugin.
* @return The pugin that was added wrapped in {@link Optional}, or empty if no plugin was added.
*/
public Optional<Plugin> addPlugin(Plugin plugin) {
return addPlugin(plugin, false);
}
/**
* Adds the {@link Plugin} with the specified name or location to the installed plugins.
* Plugins that have been detected as installable may be added by name.
* Remote plugins, that are not detected can be added by the location (e.g. url or maven coordinates).
*
* @param plugin The plugin.
* @param userCatalog Flag to only use the user catalog.
* @return The pugin that was added wrapped in {@link Optional}, or empty if no plugin was added.
*/
public Optional<Plugin> addPlugin(Plugin plugin, boolean userCatalog) {
PluginCatalogService pluginCatalogService = state.getPluginCatalogService();
PluginCatalog updatedCatalog = state.pluginCatalog(userCatalog).addPlugin(plugin);
pluginCatalogService.writeCatalog(updatedCatalog);
state.invalidateInstalledPlugins();
return Optional.of(plugin);
}
/**
* Removes a {@link Plugin} by name.
* The catalog from which the plugin will be removed is selected
* based on where the plugin is found. If plugin is found in both catalogs
* the project catalog is prefered.
*
* @param name The name of the plugin to remove.
* @return The removed plugin wrapped in Optional, empty if no plugin was removed.
*/
public Optional<Plugin> removePlugin(String name) {
return removePlugin(name, false);
}
/**
* Removes a {@link Plugin} by name.
* The catalog from which the plugin will be removed is selected
* based on where the plugin is found. If plugin is found in both catalogs
* the project catalog is prefered.
*
* @param name The name of the plugin to remove.
* @param userCatalog Flag to only use the user catalog.
* @return The removed plugin wrapped in Optional, empty if no plugin was removed.
*/
public Optional<Plugin> removePlugin(String name, boolean userCatalog) {
PluginCatalogService pluginCatalogService = state.getPluginCatalogService();
Plugin plugin = state.getInstalledPluigns().get(name);
if (plugin == null) {
return Optional.empty();
} else if (userCatalog) {
Optional<Plugin> userPlugin = state.getUserCatalog().map(PluginCatalog::getPlugins).map(p -> p.get(name));
return userPlugin.map(p -> {
pluginCatalogService.writeCatalog(
state.getUserCatalog().orElseThrow(() -> new IllegalStateException("User catalog should be available"))
.removePlugin(p));
state.invalidateInstalledPlugins();
return p;
});
}
if (plugin.isInUserCatalog()) {
pluginCatalogService.writeCatalog(state.getUserCatalog()
.orElseThrow(() -> new IllegalStateException("User catalog should be available")).removePlugin(plugin));
} else {
pluginCatalogService.writeCatalog(state.getProjectCatalog()
.orElseThrow(() -> new IllegalStateException("Project catalog should be available")).removePlugin(plugin));
}
state.invalidateInstalledPlugins();
return Optional.of(plugin);
}
/**
* Removes a {@link Plugin} by name.
* The catalog from which the plugin will be removed is selected
* based on where the plugin is found. If plugin is found in both catalogs
* the project catalog is prefered.
*
* @param plugin The plugin to remove
* @return The removed plugin wrapped in Optional, empty if no plugin was removed.
*/
public Optional<Plugin> removePlugin(Plugin plugin) {
return removePlugin(plugin, false);
}
/**
* Removes a {@link Plugin} by name.
* The catalog from which the plugin will be removed is selected
* based on where the plugin is found. If plugin is found in both catalogs
* the project catalog is prefered.
*
* @param plugin The plugin to remove
* @param userCatalog Flag to only use the user catalog.
* @return The removed plugin wrapped in Optional, empty if no plugin was removed.
*/
public Optional<Plugin> removePlugin(Plugin plugin, boolean userCatalog) {
return removePlugin(plugin.getName(), userCatalog);
}
/**
* Check that the installed plugins are still available in the environment.
*
* @return true if any catalog was changed.
*/
public boolean reconcile() {
//We are using `|` instead of `||` cause we always want both branches to be executed
if (state.getUserCatalog().map(c -> reconcile(c)).orElse(false)
| state.getProjectCatalog().map(c -> reconcile(c)).orElse(false)) {
// Refresh the list of installed plugins
state.invalidate();
return true;
} else {
return false;
}
}
/**
* Check that the installed plugins are still available in the environment.
*
* @param catalog The {@PluginCatalog} to use
* @return true if catalog was modified
*/
private boolean reconcile(PluginCatalog catalog) {
Path location = catalog.getCatalogLocation()
.orElseThrow(() -> new IllegalArgumentException("Unknown plugin catalog location."));
List<PluginType> installedTypes = catalog.getPlugins().entrySet().stream().map(Map.Entry::getValue).map(Plugin::getType)
.collect(Collectors.toList());
//Let's only fetch installable plugins of the corresponding types.
//This will help us avoid uneeded calls to things like jbang if no jbang plugins are installed
Map<String, Plugin> installablePlugins = state.installablePlugins(installedTypes).entrySet().stream()
.filter(e -> installedTypes.contains(e.getValue().getType()))
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
Map<String, Plugin> unreachable = catalog.getPlugins().entrySet().stream()
.filter(i -> !installablePlugins.containsKey(i.getKey()))
.filter(i -> PluginUtil.shouldRemove(i.getValue()))
.collect(Collectors.toMap(m -> m.getKey(), m -> m.getValue()));
if (unreachable.isEmpty()) {
return false;
}
Path backupLocation = location.getParent().resolve("quarkus-cli-catalog.json.bkp");
output.warn(
"The following plugins were found in the catalog: [%s] but are no longer available: %s.\n"
+ "The unavailable plugins will be purged. A backup of the catalog will be saved at: [%s].",
location,
unreachable.entrySet().stream().map(Map.Entry::getKey).collect(Collectors.joining(", ", "[", "]")),
backupLocation);
PluginCatalogService pluginCatalogService = state.getPluginCatalogService();
pluginCatalogService.writeCatalog(catalog.withCatalogLocation(Optional.of(backupLocation)));
for (String u : unreachable.keySet()) {
catalog = catalog.removePlugin(u);
}
pluginCatalogService.writeCatalog(catalog);
// here we are just touching the catalog, no need to invalidate
return true;
}
/**
* Remove unavailable plugins, add extension plugins if available.
*
* @return true if changes any catalog was modified.
*/
public boolean sync() {
if (state.isSynced()) {
return false;
}
try {
boolean catalogModified = reconcile();
Map<String, Plugin> installedPlugins = getInstalledPlugins();
Map<String, Plugin> extensionPlugins = state.getExtensionPlugins();
Map<String, Plugin> pluginsToInstall = extensionPlugins.entrySet().stream()
.filter(e -> !installedPlugins.containsKey(e.getKey()))
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
catalogModified = catalogModified || !pluginsToInstall.isEmpty();
pluginsToInstall.forEach((name, plugin) -> {
addPlugin(plugin);
});
state.invalidate();
if (!catalogModified) {
PluginCatalogService pluginCatalogService = state.getPluginCatalogService();
PluginCatalog catalog = state.pluginCatalog(false);
pluginCatalogService.writeCatalog(catalog);
// here we are just touching the catalog, no need to invalidate
}
return catalogModified;
} finally {
state.synced();
}
}
/**
* Optionally sync if needed.
* Sync happens weekly or when project files are updated.
*/
public boolean syncIfNeeded() {
if (!settings.isInteractiveMode()) {
//syncing may require user interaction, so just return false
return false;
}
// Check if there project catalog file is missing
boolean createdMissingProjectCatalog = state.getPluginCatalogService().findProjectCatalogPath(state.getProjectRoot())
.map(Path::toFile)
.filter(Predicate.not(File::exists))
.map(File::toPath)
.map(p -> {
output.debug("Project plugin catalog has not been initialized. Initializing.");
state.getPluginCatalogService().writeCatalog(new PluginCatalog().withCatalogLocation(p));
return true;
}).orElse(false);
if (createdMissingProjectCatalog) {
return sync();
}
PluginCatalog catalog = state.getCombinedCatalog();
if (PluginUtil.shouldSync(state.getProjectRoot(), catalog)) {
output.debug("Plugin catalog last updated on: " + catalog.getLastUpdate() + ". Syncing.");
return sync();
}
return false;
}
public Map<String, Plugin> getInstalledPlugins(boolean userCatalog) {
return userCatalog ? state.userPlugins() : state.getInstalledPluigns();
}
public Map<String, Plugin> getInstalledPlugins() {
return getInstalledPlugins(false);
}
public Map<String, Plugin> getInstallablePlugins() {
return state.getInstallablePlugins();
}
public PluginManagerUtil getUtil() {
return util;
}
}
| PluginManager |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/opensaml5Test/java/org/springframework/security/saml2/provider/service/registration/OpenSaml5AssertingPartyMetadataRepositoryTests.java | {
"start": 2843,
"end": 16202
} | class ____ {
private static MetadataDispatcher dispatcher = new MetadataDispatcher()
.addResponse("/entity.xml", readFile("test-metadata.xml"))
.addResponse("/entities.xml", readFile("test-entitiesdescriptor.xml"));
private static MockWebServer web = new MockWebServer();
private static String readFile(String fileName) {
try {
ClassPathResource resource = new ClassPathResource(fileName);
try (BufferedReader reader = new BufferedReader(new InputStreamReader(resource.getInputStream()))) {
return reader.lines().collect(Collectors.joining());
}
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
@BeforeAll
public static void start() throws Exception {
web.setDispatcher(dispatcher);
web.start();
}
@AfterAll
public static void shutdown() throws Exception {
web.shutdown();
}
@Test
public void withMetadataUrlLocationWhenResolvableThenFindByEntityIdReturns() throws Exception {
AssertingPartyMetadataRepository parties = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation(web.url("/entity.xml").toString())
.build();
AssertingPartyMetadata party = parties.findByEntityId("https://idp.example.com/idp/shibboleth");
assertThat(party.getEntityId()).isEqualTo("https://idp.example.com/idp/shibboleth");
assertThat(party.getSingleSignOnServiceLocation())
.isEqualTo("https://idp.example.com/idp/profile/SAML2/POST/SSO");
assertThat(party.getSingleSignOnServiceBinding()).isEqualTo(Saml2MessageBinding.POST);
assertThat(party.getVerificationX509Credentials()).hasSize(1);
assertThat(party.getEncryptionX509Credentials()).hasSize(1);
}
@Test
public void withMetadataUrlLocationnWhenResolvableThenIteratorReturns() throws Exception {
List<AssertingPartyMetadata> parties = new ArrayList<>();
OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation(web.url("/entities.xml").toString())
.build()
.iterator()
.forEachRemaining(parties::add);
assertThat(parties).hasSize(2);
assertThat(parties).extracting(AssertingPartyMetadata::getEntityId)
.contains("https://ap.example.org/idp/shibboleth", "https://idp.example.com/idp/shibboleth");
}
@Test
public void withMetadataUrlLocationWhenUnresolvableThenThrowsSaml2Exception() throws Exception {
try (MockWebServer server = new MockWebServer()) {
String url = server.url("/").toString();
server.shutdown();
assertThatExceptionOfType(Saml2Exception.class)
.isThrownBy(() -> OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation(url).build());
}
}
@Test
public void withMetadataUrlLocationWhenMalformedResponseThenSaml2Exception() throws Exception {
dispatcher.addResponse("/malformed", "malformed");
String url = web.url("/malformed").toString();
assertThatExceptionOfType(Saml2Exception.class)
.isThrownBy(() -> OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation(url).build());
}
@Test
public void fromMetadataFileLocationWhenResolvableThenFindByEntityIdReturns() {
File file = new File("src/test/resources/test-metadata.xml");
AssertingPartyMetadata party = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation("file:" + file.getAbsolutePath())
.build()
.findByEntityId("https://idp.example.com/idp/shibboleth");
assertThat(party.getEntityId()).isEqualTo("https://idp.example.com/idp/shibboleth");
assertThat(party.getSingleSignOnServiceLocation())
.isEqualTo("https://idp.example.com/idp/profile/SAML2/POST/SSO");
assertThat(party.getSingleSignOnServiceBinding()).isEqualTo(Saml2MessageBinding.POST);
assertThat(party.getVerificationX509Credentials()).hasSize(1);
assertThat(party.getEncryptionX509Credentials()).hasSize(1);
}
@Test
public void fromMetadataFileLocationWhenResolvableThenIteratorReturns() {
File file = new File("src/test/resources/test-entitiesdescriptor.xml");
Collection<AssertingPartyMetadata> parties = new ArrayList<>();
OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation("file:" + file.getAbsolutePath())
.build()
.iterator()
.forEachRemaining(parties::add);
assertThat(parties).hasSize(2);
assertThat(parties).extracting(AssertingPartyMetadata::getEntityId)
.contains("https://idp.example.com/idp/shibboleth", "https://ap.example.org/idp/shibboleth");
}
@Test
public void withMetadataFileLocationWhenNotFoundThenSaml2Exception() {
assertThatExceptionOfType(Saml2Exception.class).isThrownBy(
() -> OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation("file:path").build());
}
@Test
public void fromMetadataClasspathLocationWhenResolvableThenFindByEntityIdReturns() {
AssertingPartyMetadata party = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation("classpath:test-entitiesdescriptor.xml")
.build()
.findByEntityId("https://ap.example.org/idp/shibboleth");
assertThat(party.getEntityId()).isEqualTo("https://ap.example.org/idp/shibboleth");
assertThat(party.getSingleSignOnServiceLocation())
.isEqualTo("https://ap.example.org/idp/profile/SAML2/POST/SSO");
assertThat(party.getSingleSignOnServiceBinding()).isEqualTo(Saml2MessageBinding.POST);
assertThat(party.getVerificationX509Credentials()).hasSize(1);
assertThat(party.getEncryptionX509Credentials()).hasSize(1);
}
@Test
public void fromMetadataClasspathLocationWhenResolvableThenIteratorReturns() {
Collection<AssertingPartyMetadata> parties = new ArrayList<>();
OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation("classpath:test-entitiesdescriptor.xml")
.build()
.iterator()
.forEachRemaining(parties::add);
assertThat(parties).hasSize(2);
assertThat(parties).extracting(AssertingPartyMetadata::getEntityId)
.contains("https://idp.example.com/idp/shibboleth", "https://ap.example.org/idp/shibboleth");
}
@Test
public void withMetadataClasspathLocationWhenNotFoundThenSaml2Exception() {
assertThatExceptionOfType(Saml2Exception.class).isThrownBy(
() -> OpenSaml5AssertingPartyMetadataRepository.withTrustedMetadataLocation("classpath:path").build());
}
@Test
public void withTrustedMetadataLocationWhenMatchingCredentialsThenVerifiesSignature() throws IOException {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.full().build();
EntityDescriptor descriptor = TestOpenSamlObjects.entityDescriptor(registration);
TestOpenSamlObjects.signed(descriptor, TestSaml2X509Credentials.assertingPartySigningCredential(),
descriptor.getEntityID());
String serialized = serialize(descriptor);
Credential credential = TestOpenSamlObjects
.getSigningCredential(TestSaml2X509Credentials.relyingPartyVerifyingCredential(), descriptor.getEntityID());
String endpoint = "/" + UUID.randomUUID().toString();
dispatcher.addResponse(endpoint, serialized);
AssertingPartyMetadataRepository parties = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation(web.url(endpoint).toString())
.verificationCredentials((c) -> c.add(credential))
.build();
assertThat(parties.findByEntityId(registration.getAssertingPartyMetadata().getEntityId())).isNotNull();
}
@Test
public void withTrustedMetadataLocationWhenMismatchingCredentialsThenSaml2Exception() throws IOException {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.full().build();
EntityDescriptor descriptor = TestOpenSamlObjects.entityDescriptor(registration);
TestOpenSamlObjects.signed(descriptor, TestSaml2X509Credentials.relyingPartySigningCredential(),
descriptor.getEntityID());
String serialized = serialize(descriptor);
Credential credential = TestOpenSamlObjects
.getSigningCredential(TestSaml2X509Credentials.relyingPartyVerifyingCredential(), descriptor.getEntityID());
String endpoint = "/" + UUID.randomUUID().toString();
dispatcher.addResponse(endpoint, serialized);
assertThatExceptionOfType(Saml2Exception.class).isThrownBy(() -> OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation(web.url(endpoint).toString())
.verificationCredentials((c) -> c.add(credential))
.build());
}
@Test
public void withTrustedMetadataLocationWhenNoCredentialsThenSkipsVerifySignature() throws IOException {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.full().build();
EntityDescriptor descriptor = TestOpenSamlObjects.entityDescriptor(registration);
TestOpenSamlObjects.signed(descriptor, TestSaml2X509Credentials.assertingPartySigningCredential(),
descriptor.getEntityID());
String serialized = serialize(descriptor);
String endpoint = "/" + UUID.randomUUID().toString();
dispatcher.addResponse(endpoint, serialized);
AssertingPartyMetadataRepository parties = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation(web.url(endpoint).toString())
.build();
assertThat(parties.findByEntityId(registration.getAssertingPartyMetadata().getEntityId())).isNotNull();
}
@Test
public void withTrustedMetadataLocationWhenCustomResourceLoaderThenUses() {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
given(resourceLoader.getResource(any())).willReturn(new ClassPathResource("test-metadata.xml"));
AssertingPartyMetadata party = OpenSaml5AssertingPartyMetadataRepository
.withTrustedMetadataLocation("classpath:wrong")
.resourceLoader(resourceLoader)
.build()
.iterator()
.next();
assertThat(party.getEntityId()).isEqualTo("https://idp.example.com/idp/shibboleth");
assertThat(party.getSingleSignOnServiceLocation())
.isEqualTo("https://idp.example.com/idp/profile/SAML2/POST/SSO");
assertThat(party.getSingleSignOnServiceBinding()).isEqualTo(Saml2MessageBinding.POST);
assertThat(party.getVerificationX509Credentials()).hasSize(1);
assertThat(party.getEncryptionX509Credentials()).hasSize(1);
verify(resourceLoader).getResource(any());
}
@Test
public void constructorWhenNoIndexAndNoIteratorThenException() {
MetadataResolver resolver = mock(MetadataResolver.class);
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OpenSaml5AssertingPartyMetadataRepository(resolver));
}
@Test
public void constructorWhenIterableResolverThenUses() {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.full().build();
EntityDescriptor descriptor = TestOpenSamlObjects.entityDescriptor(registration);
MetadataResolver resolver = mock(MetadataResolver.class,
withSettings().extraInterfaces(IterableMetadataSource.class));
given(((IterableMetadataSource) resolver).iterator()).willReturn(List.of(descriptor).iterator());
AssertingPartyMetadataRepository parties = new OpenSaml5AssertingPartyMetadataRepository(resolver);
parties.iterator()
.forEachRemaining((p) -> assertThat(p.getEntityId())
.isEqualTo(registration.getAssertingPartyMetadata().getEntityId()));
verify(((IterableMetadataSource) resolver)).iterator();
}
@Test
public void constructorWhenIndexedResolverThenUses() throws Exception {
FilesystemMetadataResolver resolver = new FilesystemMetadataResolver(
new ClassPathResource("test-metadata.xml").getFile());
resolver.setIndexes(Set.of(new RoleMetadataIndex()));
resolver.setId("id");
resolver.setParserPool(XMLObjectProviderRegistrySupport.getParserPool());
resolver.initialize();
MetadataResolver spied = spy(resolver);
AssertingPartyMetadataRepository parties = new OpenSaml5AssertingPartyMetadataRepository(spied);
parties.iterator()
.forEachRemaining((p) -> assertThat(p.getEntityId()).isEqualTo("https://idp.example.com/idp/shibboleth"));
verify(spied).resolve(any());
}
@Test
public void withMetadataLocationWhenNoCredentialsThenException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(
() -> OpenSaml5AssertingPartyMetadataRepository.withMetadataLocation("classpath:test-metadata.xml")
.build());
}
@Test
public void withMetadataLocationWhenMatchingCredentialsThenVerifiesSignature() throws IOException {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.full().build();
EntityDescriptor descriptor = TestOpenSamlObjects.entityDescriptor(registration);
TestOpenSamlObjects.signed(descriptor, TestSaml2X509Credentials.assertingPartySigningCredential(),
descriptor.getEntityID());
String serialized = serialize(descriptor);
Credential credential = TestOpenSamlObjects
.getSigningCredential(TestSaml2X509Credentials.relyingPartyVerifyingCredential(), descriptor.getEntityID());
String endpoint = "/" + UUID.randomUUID().toString();
dispatcher.addResponse(endpoint, serialized);
AssertingPartyMetadataRepository parties = OpenSaml5AssertingPartyMetadataRepository
.withMetadataLocation(web.url(endpoint).toString())
.verificationCredentials((c) -> c.add(credential))
.build();
assertThat(parties.findByEntityId(registration.getAssertingPartyMetadata().getEntityId())).isNotNull();
}
private static String serialize(XMLObject object) {
try {
Marshaller marshaller = XMLObjectProviderRegistrySupport.getMarshallerFactory().getMarshaller(object);
Element element = marshaller.marshall(object);
return SerializeSupport.nodeToString(element);
}
catch (MarshallingException ex) {
throw new Saml2Exception(ex);
}
}
private static final | OpenSaml5AssertingPartyMetadataRepositoryTests |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/multipart/MultipartResourceTest.java | {
"start": 369,
"end": 1020
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MultipartResource.class));
@Test
public void testHelloEndpoint() {
Map<String, String> map = new HashMap<>();
map.put("test", "value");
given()
.formParams(map)
.header("Expect", "100-continue")
.contentType(ContentType.URLENC)
.when().post("/multipart/")
.then()
.statusCode(200)
.body(is("[test:value]"));
}
}
| MultipartResourceTest |
java | hibernate__hibernate-orm | hibernate-spatial/src/main/java/org/hibernate/spatial/dialect/oracle/SDOMethodDescriptor.java | {
"start": 516,
"end": 1471
} | class ____ extends OracleSpatialFunction {
public SDOMethodDescriptor(
String name,
boolean useParenthesesWhenNoArgs,
ArgumentsValidator argValidator,
FunctionReturnTypeResolver returnTypeResolver) {
super( name, useParenthesesWhenNoArgs, argValidator, returnTypeResolver );
}
public SDOMethodDescriptor(
String name,
ArgumentsValidator argValidator,
FunctionReturnTypeResolver returnTypeResolver) {
this( name, true, argValidator, returnTypeResolver );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
sqlAstArguments.get(0).accept( walker );
sqlAppender.appendSql( "." );
sqlAppender.appendSql( getName() );
//First argument is target of the method invocation
if (this.alwaysIncludesParentheses() || sqlAstArguments.size() > 1) {
sqlAppender.append( "()" );
}
}
}
| SDOMethodDescriptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CanonicalDurationTest.java | {
"start": 3037,
"end": 3700
} | class ____ {
static final int CONST = 86400;
{
Duration.standardDays(1);
org.joda.time.Duration.standardDays(1);
Duration.standardSeconds(CONST);
Duration zero = Duration.ZERO;
Duration.standardDays(1);
}
}
""")
.doTest(TEXT_MATCH);
}
@Test
public void refactoringJavaTimeStaticImport() {
helper
.addInputLines(
"in/A.java",
"""
package a;
import static java.time.Duration.ofSeconds;
import java.time.Duration;
public | A |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/foreignkeyjoin/ForeignTableJoinProcessorSupplier.java | {
"start": 2114,
"end": 3463
} | class ____<KLeft, KRight, VRight>
implements ProcessorSupplier<KRight, Change<VRight>, KLeft, SubscriptionResponseWrapper<VRight>> {
private static final Logger LOG = LoggerFactory.getLogger(ForeignTableJoinProcessorSupplier.class);
private final StoreFactory subscriptionStoreFactory;
private final CombinedKeySchema<KRight, KLeft> keySchema;
private boolean useVersionedSemantics = false;
public ForeignTableJoinProcessorSupplier(final StoreFactory subscriptionStoreFactory,
final CombinedKeySchema<KRight, KLeft> keySchema) {
this.subscriptionStoreFactory = subscriptionStoreFactory;
this.keySchema = keySchema;
}
@Override
public Set<StoreBuilder<?>> stores() {
return Collections.singleton(new FactoryWrappingStoreBuilder<>(subscriptionStoreFactory));
}
@Override
public Processor<KRight, Change<VRight>, KLeft, SubscriptionResponseWrapper<VRight>> get() {
return new KTableKTableJoinProcessor();
}
public void setUseVersionedSemantics(final boolean useVersionedSemantics) {
this.useVersionedSemantics = useVersionedSemantics;
}
// VisibleForTesting
public boolean isUseVersionedSemantics() {
return useVersionedSemantics;
}
private final | ForeignTableJoinProcessorSupplier |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2AuthorizationCodeRequestAuthenticationTokenTests.java | {
"start": 1480,
"end": 6256
} | class ____ {
private static final String AUTHORIZATION_URI = "https://provider.com/oauth2/authorize";
private static final RegisteredClient REGISTERED_CLIENT = TestRegisteredClients.registeredClient().build();
private static final TestingAuthenticationToken PRINCIPAL = new TestingAuthenticationToken("principalName",
"password");
private static final OAuth2AuthorizationCode AUTHORIZATION_CODE = new OAuth2AuthorizationCode("code", Instant.now(),
Instant.now().plus(5, ChronoUnit.MINUTES));
@Test
public void constructorWhenAuthorizationUriNotProvidedThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationCodeRequestAuthenticationToken(null,
REGISTERED_CLIENT.getClientId(), PRINCIPAL, null, null, (Set<String>) null, null))
.withMessage("authorizationUri cannot be empty");
}
@Test
public void constructorWhenClientIdNotProvidedThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationCodeRequestAuthenticationToken(AUTHORIZATION_URI, null, PRINCIPAL,
null, null, (Set<String>) null, null))
.withMessage("clientId cannot be empty");
}
@Test
public void constructorWhenPrincipalNotProvidedThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationCodeRequestAuthenticationToken(AUTHORIZATION_URI,
REGISTERED_CLIENT.getClientId(), null, null, null, (Set<String>) null, null))
.withMessage("principal cannot be null");
}
@Test
public void constructorWhenAuthorizationCodeNotProvidedThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationCodeRequestAuthenticationToken(AUTHORIZATION_URI,
REGISTERED_CLIENT.getClientId(), PRINCIPAL, null, null, null, (Set<String>) null))
.withMessage("authorizationCode cannot be null");
}
@Test
public void constructorWhenAuthorizationRequestThenValuesAreSet() {
String clientId = REGISTERED_CLIENT.getClientId();
String redirectUri = REGISTERED_CLIENT.getRedirectUris().iterator().next();
String state = "state";
Set<String> requestedScopes = REGISTERED_CLIENT.getScopes();
Map<String, Object> additionalParameters = Collections.singletonMap("param1", "value1");
OAuth2AuthorizationCodeRequestAuthenticationToken authentication = new OAuth2AuthorizationCodeRequestAuthenticationToken(
AUTHORIZATION_URI, clientId, PRINCIPAL, redirectUri, state, requestedScopes, additionalParameters);
assertThat(authentication.getPrincipal()).isEqualTo(PRINCIPAL);
assertThat(authentication.getCredentials()).isEqualTo("");
assertThat(authentication.getAuthorities()).isEmpty();
assertThat(authentication.getAuthorizationUri()).isEqualTo(AUTHORIZATION_URI);
assertThat(authentication.getClientId()).isEqualTo(clientId);
assertThat(authentication.getRedirectUri()).isEqualTo(redirectUri);
assertThat(authentication.getState()).isEqualTo(state);
assertThat(authentication.getScopes()).containsExactlyInAnyOrderElementsOf(requestedScopes);
assertThat(authentication.getAdditionalParameters()).containsExactlyInAnyOrderEntriesOf(additionalParameters);
assertThat(authentication.getAuthorizationCode()).isNull();
assertThat(authentication.isAuthenticated()).isFalse();
}
@Test
public void constructorWhenAuthorizationResponseThenValuesAreSet() {
String clientId = REGISTERED_CLIENT.getClientId();
String redirectUri = REGISTERED_CLIENT.getRedirectUris().iterator().next();
String state = "state";
Set<String> authorizedScopes = REGISTERED_CLIENT.getScopes();
OAuth2AuthorizationCodeRequestAuthenticationToken authentication = new OAuth2AuthorizationCodeRequestAuthenticationToken(
AUTHORIZATION_URI, clientId, PRINCIPAL, AUTHORIZATION_CODE, redirectUri, state, authorizedScopes);
assertThat(authentication.getPrincipal()).isEqualTo(PRINCIPAL);
assertThat(authentication.getCredentials()).isEqualTo("");
assertThat(authentication.getAuthorities()).isEmpty();
assertThat(authentication.getAuthorizationUri()).isEqualTo(AUTHORIZATION_URI);
assertThat(authentication.getClientId()).isEqualTo(clientId);
assertThat(authentication.getRedirectUri()).isEqualTo(redirectUri);
assertThat(authentication.getState()).isEqualTo(state);
assertThat(authentication.getScopes()).containsExactlyInAnyOrderElementsOf(authorizedScopes);
assertThat(authentication.getAdditionalParameters()).isEmpty();
assertThat(authentication.getAuthorizationCode()).isEqualTo(AUTHORIZATION_CODE);
assertThat(authentication.isAuthenticated()).isTrue();
}
}
| OAuth2AuthorizationCodeRequestAuthenticationTokenTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithJsonTypeInfoSimpleClassName4061Test.java | {
"start": 4159,
"end": 8709
} | class ____ has contains dollar sign
@Test
public void testMinimalInnerClass() throws Exception {
String jsonStr =
a2q("{'@c':'.SealedTypesWithJsonTypeInfoSimpleClassName4061Test$MinimalInnerSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MinimalInnerSub4061A()));
// deser <- breaks!
MinimalInnerSuper4061 bean = MAPPER.readValue(jsonStr, MinimalInnerSuper4061.class);
assertInstanceOf(MinimalInnerSuper4061.class, bean);
assertNotNull(bean);
}
// Basic : non-inner class, without dollar sign
@Test
public void testBasicClass() throws Exception {
String jsonStr = a2q("{'@type':'BasicSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new BasicSub4061A()));
// deser
BasicSuper4061 bean = MAPPER.readValue(jsonStr, BasicSuper4061.class);
assertInstanceOf(BasicSuper4061.class, bean);
assertInstanceOf(BasicSub4061A.class, bean);
}
// Mixed SimpleClassName : parent as inner, subtype as basic
@Test
public void testMixedClass() throws Exception {
String jsonStr = a2q("{'@type':'MixedSub4061AForSealedClasses'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MixedSub4061AForSealedClasses()));
// deser
MixedSuper4061 bean = MAPPER.readValue(jsonStr, MixedSuper4061.class);
assertInstanceOf(MixedSuper4061.class, bean);
assertInstanceOf(MixedSub4061AForSealedClasses.class, bean);
}
// Mixed MinimalClass : parent as inner, subtype as basic
@Test
public void testMixedMinimalClass() throws Exception {
String jsonStr = a2q("{'@c':'.MixedMinimalSub4061AForSealedClasses'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MixedMinimalSub4061AForSealedClasses()));
// deser
MixedMinimalSuper4061 bean = MAPPER.readValue(jsonStr, MixedMinimalSuper4061.class);
assertInstanceOf(MixedMinimalSuper4061.class, bean);
assertInstanceOf(MixedMinimalSub4061AForSealedClasses.class, bean);
}
@Test
public void testPolymorphicNewObject() throws Exception {
String jsonStr = "{\"child\": { \"@type\": \"MergeChildA\", \"name\": \"I'm child A\" }}";
Root root = MAPPER.readValue(jsonStr, Root.class);
assertTrue(root.child instanceof MergeChildA);
assertEquals("I'm child A", ((MergeChildA) root.child).name);
}
// case insenstive type name
@Test
public void testPolymorphicNewObjectCaseInsensitive() throws Exception {
String jsonStr = "{\"child\": { \"@type\": \"mergechilda\", \"name\": \"I'm child A\" }}";
ObjectMapper mapper =
jsonMapperBuilder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES).build();
Root root = mapper.readValue(jsonStr, Root.class);
assertTrue(root.child instanceof MergeChildA);
assertEquals("I'm child A", ((MergeChildA) root.child).name);
}
@Test
public void testPolymorphicNewObjectUnknownTypeId() throws Exception {
try {
MAPPER.readValue("{\"child\": { \"@type\": \"UnknownChildA\", \"name\": \"I'm child A\" }}",
Root.class);
} catch (InvalidTypeIdException e) {
verifyException(e, "Could not resolve type id 'UnknownChildA' as a subtype of");
}
}
@Test
public void testAliasWithPolymorphic() throws Exception {
String jsonStr = a2q("{'value': ['ab', {'nm' : 'Bob', 'A' : 17} ] }");
PolyWrapperForAlias value = MAPPER.readValue(jsonStr, PolyWrapperForAlias.class);
assertNotNull(value.value);
AliasBean bean = (AliasBean) value.value;
assertEquals("Bob", bean.name);
assertEquals(17, bean._a);
}
@Test
public void testGetMechanism() {
final DeserializationConfig config = MAPPER.deserializationConfig();
JavaType javaType = config.constructType(InnerSub4061B.class);
List<NamedType> namedTypes = new ArrayList<>();
namedTypes.add(new NamedType(InnerSub4061A.class));
namedTypes.add(new NamedType(InnerSub4061B.class));
SimpleNameIdResolver idResolver =
SimpleNameIdResolver.construct(config, javaType, namedTypes, false, true);
assertEquals(JsonTypeInfo.Id.SIMPLE_NAME, idResolver.getMechanism());
}
@Test
public void testDuplicateNameLastOneWins() throws Exception {
String jsonStr = a2q("{'@type':'DuplicateSubClassForSealedClasses'}");
// deser
DuplicateSuperClass bean = MAPPER.readValue(jsonStr, DuplicateSuperClass.class);
assertInstanceOf(tools.jackson.databind.jsontype.DuplicateSubClassForSealedClasses.class, bean);
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.SIMPLE_NAME)
sealed | that |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/json/async/AsyncLocationTest.java | {
"start": 424,
"end": 1904
} | class ____ extends AsyncTestBase
{
private final JsonFactory DEFAULT_F = new JsonFactory();
// for [core#531]
@Test
public void testLocationOffsets() throws Exception
{
JsonParser parser = DEFAULT_F.createNonBlockingByteArrayParser(ObjectReadContext.empty());
ByteArrayFeeder feeder = (ByteArrayFeeder) parser.nonBlockingInputFeeder();
byte[] input = utf8Bytes("[[[");
feeder.feedInput(input, 2, 3);
assertEquals(JsonToken.START_ARRAY, parser.nextToken());
assertEquals(1, parser.currentLocation().getByteOffset());
assertEquals(1, parser.currentTokenLocation().getByteOffset());
assertEquals(1, parser.currentLocation().getLineNr());
assertEquals(1, parser.currentTokenLocation().getLineNr());
assertEquals(2, parser.currentLocation().getColumnNr());
assertEquals(1, parser.currentTokenLocation().getColumnNr());
feeder.feedInput(input, 0, 1);
assertEquals(JsonToken.START_ARRAY, parser.nextToken());
assertEquals(2, parser.currentLocation().getByteOffset());
assertEquals(2, parser.currentTokenLocation().getByteOffset());
assertEquals(1, parser.currentLocation().getLineNr());
assertEquals(1, parser.currentTokenLocation().getLineNr());
assertEquals(3, parser.currentLocation().getColumnNr());
assertEquals(2, parser.currentTokenLocation().getColumnNr());
parser.close();
}
}
| AsyncLocationTest |
java | apache__camel | components/camel-coap/src/test/java/org/apache/camel/coap/CoAPRestComponentTLSTest.java | {
"start": 1938,
"end": 5236
} | class ____ extends CoAPRestComponentTestBase {
@Override
protected String getProtocol() {
return "coaps";
}
@Override
protected void decorateClient(CoapClient client) throws GeneralSecurityException, IOException {
DtlsConnectorConfig.Builder builder = new DtlsConnectorConfig.Builder(Configuration.getStandard());
builder.set(DtlsConfig.DTLS_ROLE, DtlsRole.CLIENT_ONLY);
KeyStoreParameters truststoreParameters = new KeyStoreParameters();
truststoreParameters.setCamelContext(context);
truststoreParameters.setResource("truststore.jks");
truststoreParameters.setPassword("storepass");
KeyStore trustStore = truststoreParameters.createKeyStore();
X509Certificate[] certs
= new X509Certificate[] { (X509Certificate) trustStore.getCertificate(trustStore.aliases().nextElement()) };
NewAdvancedCertificateVerifier trust = StaticNewAdvancedCertificateVerifier
.builder()
.setTrustedCertificates(certs)
.build();
builder.setAdvancedCertificateVerifier(trust);
CoapEndpoint.Builder coapBuilder = new CoapEndpoint.Builder();
coapBuilder.setConnector(new DTLSConnector(builder.build()));
client.setEndpoint(coapBuilder.build());
}
@Override
protected void decorateRestConfiguration(RestConfigurationDefinition restConfig) {
KeyStoreParameters keystoreParameters = new KeyStoreParameters();
keystoreParameters.setCamelContext(context);
keystoreParameters.setResource("service.jks");
keystoreParameters.setPassword("security");
SSLContextParameters serviceSSLContextParameters = new SSLContextParameters();
serviceSSLContextParameters.setCamelContext(context);
KeyManagersParameters serviceSSLKeyManagers = new KeyManagersParameters();
serviceSSLKeyManagers.setCamelContext(context);
serviceSSLKeyManagers.setKeyPassword("security");
serviceSSLKeyManagers.setKeyStore(keystoreParameters);
serviceSSLContextParameters.setKeyManagers(serviceSSLKeyManagers);
KeyStoreParameters truststoreParameters = new KeyStoreParameters();
truststoreParameters.setCamelContext(context);
truststoreParameters.setResource("truststore.jks");
truststoreParameters.setPassword("storepass");
SSLContextParameters clientSSLContextParameters = new SSLContextParameters();
clientSSLContextParameters.setCamelContext(context);
TrustManagersParameters clientSSLTrustManagers = new TrustManagersParameters();
clientSSLTrustManagers.setCamelContext(context);
clientSSLTrustManagers.setKeyStore(truststoreParameters);
clientSSLContextParameters.setTrustManagers(clientSSLTrustManagers);
context.getRegistry().bind("serviceSSLContextParameters", serviceSSLContextParameters);
context.getRegistry().bind("clientSSLContextParameters", clientSSLContextParameters);
restConfig.endpointProperty("sslContextParameters", "#serviceSSLContextParameters");
}
@Override
protected String getClientURI() {
return super.getClientURI() + "?sslContextParameters=#clientSSLContextParameters";
}
}
| CoAPRestComponentTLSTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsIfTest.java | {
"start": 121,
"end": 379
} | class ____ extends TestCase {
public void test_if() throws Exception {
String sql = "select sum(if(a > 0, 1, 0)) from t1";
assertEquals("SELECT sum(IF(a > 0, 1, 0))"
+ "\nFROM t1", SQLUtils.formatOdps(sql));
}
}
| OdpsIfTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/StreamsRebalanceListenerInvoker.java | {
"start": 1411,
"end": 4222
} | class ____ {
private final Logger log;
private final StreamsRebalanceData streamsRebalanceData;
private Optional<StreamsRebalanceListener> listener;
StreamsRebalanceListenerInvoker(LogContext logContext, StreamsRebalanceData streamsRebalanceData) {
this.log = logContext.logger(getClass());
this.listener = Optional.empty();
this.streamsRebalanceData = streamsRebalanceData;
}
public void setRebalanceListener(StreamsRebalanceListener streamsRebalanceListener) {
Objects.requireNonNull(streamsRebalanceListener, "StreamsRebalanceListener cannot be null");
this.listener = Optional.of(streamsRebalanceListener);
}
public Exception invokeAllTasksRevoked() {
if (listener.isEmpty()) {
return null;
}
return invokeTasksRevoked(streamsRebalanceData.reconciledAssignment().activeTasks());
}
public Exception invokeTasksAssigned(final StreamsRebalanceData.Assignment assignment) {
if (listener.isEmpty()) {
return null;
}
log.info("Invoking tasks assigned callback for new assignment: {}", assignment);
try {
listener.get().onTasksAssigned(assignment);
} catch (WakeupException | InterruptException e) {
throw e;
} catch (Exception e) {
log.error(
"Streams rebalance listener failed on invocation of onTasksAssigned for tasks {}",
assignment,
e
);
return e;
}
return null;
}
public Exception invokeTasksRevoked(final Set<StreamsRebalanceData.TaskId> tasks) {
if (listener.isEmpty()) {
return null;
}
log.info("Invoking task revoked callback for revoked active tasks {}", tasks);
try {
listener.get().onTasksRevoked(tasks);
} catch (WakeupException | InterruptException e) {
throw e;
} catch (Exception e) {
log.error(
"Streams rebalance listener failed on invocation of onTasksRevoked for tasks {}",
tasks,
e
);
return e;
}
return null;
}
public Exception invokeAllTasksLost() {
if (listener.isEmpty()) {
return null;
}
log.info("Invoking tasks lost callback for all tasks");
try {
listener.get().onAllTasksLost();
} catch (WakeupException | InterruptException e) {
throw e;
} catch (Exception e) {
log.error(
"Streams rebalance listener failed on invocation of onTasksLost.",
e
);
return e;
}
return null;
}
}
| StreamsRebalanceListenerInvoker |
java | resilience4j__resilience4j | resilience4j-rxjava2/src/main/java/io/github/resilience4j/circuitbreaker/operator/ObserverCircuitBreaker.java | {
"start": 1709,
"end": 2707
} | class ____ extends AbstractObserver<T> {
private final long start;
CircuitBreakerObserver(Observer<? super T> downstreamObserver) {
super(downstreamObserver);
this.start = circuitBreaker.getCurrentTimestamp();
}
@Override
protected void hookOnError(Throwable e) {
circuitBreaker.onError(circuitBreaker.getCurrentTimestamp() - start, circuitBreaker.getTimestampUnit(), e);
}
@Override
protected void hookOnComplete() {
circuitBreaker.onSuccess(circuitBreaker.getCurrentTimestamp() - start, circuitBreaker.getTimestampUnit());
}
@Override
protected void hookOnCancel() {
if (eventWasEmitted.get()) {
circuitBreaker.onSuccess(circuitBreaker.getCurrentTimestamp() - start, circuitBreaker.getTimestampUnit());
} else {
circuitBreaker.releasePermission();
}
}
}
}
| CircuitBreakerObserver |
java | apache__camel | components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheOperation.java | {
"start": 911,
"end": 1033
} | enum ____ {
GET,
PUT,
REMOVE,
SIZE,
REBALANCE,
QUERY,
CLEAR,
REPLACE,
}
| IgniteCacheOperation |
java | google__error-prone | core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java | {
"start": 73237,
"end": 74187
} | class ____ unchanged.
" java.util.List list;",
"}")
.doTest();
}
private static Description addSuppressWarningsIfCompilationSucceeds(
ClassTree tree,
VisitorState state,
boolean onlyInSameCompilationUnit,
Function<? super Fix, Description> toDescriptionFn) {
return Optional.of(SuggestedFix.prefixWith(tree, "@SuppressWarnings(\"foobar\") "))
.filter(
fix ->
SuggestedFixes.compilesWithFix(
fix,
state,
ImmutableList.of("-Xlint:unchecked,rawtypes", "-Werror"),
onlyInSameCompilationUnit))
.map(toDescriptionFn)
.orElse(NO_MATCH);
}
/** Test checker that casts return expressions to int. */
@BugPattern(
name = "AddSuppressWarningsIfCompilationSucceedsInAllCompilationUnits",
summary = "",
severity = ERROR)
public static final | is |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java | {
"start": 1075,
"end": 3765
} | interface ____ {
/**
* Set the options for this expression, called once before processing any
* items.
* @param options options.
* @throws IOException raised on errors performing I/O.
*/
public void setOptions(FindOptions options) throws IOException;
/**
* Prepares the expression for execution, called once after setting options
* and before processing any options.
* @throws IOException raised on errors performing I/O.
*/
public void prepare() throws IOException;
/**
* Apply the expression to the specified item, called once for each item.
*
* @param item {@link PathData} item to be processed
* @param depth distance of the item from the command line argument
* @return {@link Result} of applying the expression to the item
* @throws IOException raised on errors performing I/O.
*/
public Result apply(PathData item, int depth) throws IOException;
/**
* Finishes the expression, called once after processing all items.
*
* @throws IOException raised on errors performing I/O.
*/
public void finish() throws IOException;
/**
* Returns brief usage instructions for this expression. Multiple items should
* be returned if there are multiple ways to use this expression.
*
* @return array of usage instructions
*/
public String[] getUsage();
/**
* Returns a description of the expression for use in help. Multiple lines
* should be returned array items. Lines should be formated to 60 characters
* or less.
*
* @return array of description lines
*/
public String[] getHelp();
/**
* Indicates whether this expression performs an action, i.e. provides output
* back to the user.
* @return if is action true, not false.
*/
public boolean isAction();
/**
* Identifies the expression as an operator rather than a primary.
* @return if is operator true, not false.
*/
public boolean isOperator();
/**
* Returns the precedence of this expression
* (only applicable to operators).
*
* @return precedence.
*/
public int getPrecedence();
/**
* Adds children to this expression. Children are popped from the head of the
* deque.
*
* @param expressions
* deque of expressions from which to take the children
*/
public void addChildren(Deque<Expression> expressions);
/**
* Adds arguments to this expression. Arguments are popped from the head of
* the deque and added to the front of the child list, ie last child added is
* the first evaluated.
* @param args deque of arguments from which to take expression arguments
*/
public void addArguments(Deque<String> args);
}
| Expression |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequest.java | {
"start": 601,
"end": 2033
} | class ____ extends BaseNodesRequest {
private boolean restActions;
private boolean aggregations;
/**
* Get usage from nodes based on the nodes ids specified. If none are
* passed, usage for all nodes will be returned.
*/
public NodesUsageRequest(String... nodesIds) {
super(nodesIds);
}
/**
* Sets all the request flags.
*/
public NodesUsageRequest all() {
this.restActions = true;
this.aggregations = true;
return this;
}
/**
* Clears all the request flags.
*/
public NodesUsageRequest clear() {
this.restActions = false;
return this;
}
/**
* Should the node rest actions usage statistics be returned.
*/
public boolean restActions() {
return this.restActions;
}
/**
* Should the node rest actions usage statistics be returned.
*/
public NodesUsageRequest restActions(boolean restActions) {
this.restActions = restActions;
return this;
}
/**
* Should the node rest actions usage statistics be returned.
*/
public boolean aggregations() {
return this.aggregations;
}
/**
* Should the node rest actions usage statistics be returned.
*/
public NodesUsageRequest aggregations(boolean aggregations) {
this.aggregations = aggregations;
return this;
}
}
| NodesUsageRequest |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/StorkBinderProcessor.java | {
"start": 331,
"end": 679
} | class ____ {
static final String OBSERVABLE_CLIENT = "io.smallrye.stork.api.Service";
static final String METRICS_BEAN_CLASS = "io.quarkus.micrometer.runtime.binder.stork.StorkObservationCollectorBean";
static final Class<?> OBSERVABLE_CLIENT_CLASS = MicrometerRecorder.getClassForName(OBSERVABLE_CLIENT);
static | StorkBinderProcessor |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/converter/HttpMessageConverter.java | {
"start": 1338,
"end": 1680
} | class ____ test for readability
* @param mediaType the media type to read (can be {@code null} if not specified);
* typically the value of a {@code Content-Type} header.
* @return {@code true} if readable; {@code false} otherwise
*/
boolean canRead(Class<?> clazz, @Nullable MediaType mediaType);
/**
* Indicates whether the given | to |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticInjectionPointInstanceTest.java | {
"start": 2315,
"end": 2510
} | class ____ {
static final String STR = "I'm still here!";
@Override
public String toString() {
return STR;
}
}
public static | SomethingRemovable |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/TestBatchedRequestsIterators.java | {
"start": 1387,
"end": 3268
} | class ____ {
@Test
public void testSerialIterator() throws Exception {
List<SchedulingRequest> schedulingRequestList =
Arrays.asList(schedulingRequest(1, 1, 1, 512, "foo"),
schedulingRequest(1, 2, 1, 512, "foo"),
schedulingRequest(1, 3, 1, 512, "foo"),
schedulingRequest(1, 4, 1, 512, "foo"));
BatchedRequests batchedRequests = new BatchedRequests(
BatchedRequests.IteratorType.SERIAL, null, schedulingRequestList, 1);
Iterator<SchedulingRequest> requestIterator = batchedRequests.iterator();
long prevAllocId = 0;
while (requestIterator.hasNext()) {
SchedulingRequest request = requestIterator.next();
assertTrue(request.getAllocationRequestId() > prevAllocId);
prevAllocId = request.getAllocationRequestId();
}
}
@Test
public void testPopularTagsIterator() throws Exception {
List<SchedulingRequest> schedulingRequestList =
Arrays.asList(schedulingRequest(1, 1, 1, 512, "pri", "foo"),
schedulingRequest(1, 2, 1, 512, "bar"),
schedulingRequest(1, 3, 1, 512, "foo", "pri"),
schedulingRequest(1, 4, 1, 512, "test"),
schedulingRequest(1, 5, 1, 512, "pri", "bar"));
BatchedRequests batchedRequests =
new BatchedRequests(BatchedRequests.IteratorType.POPULAR_TAGS, null,
schedulingRequestList, 1);
Iterator<SchedulingRequest> requestIterator = batchedRequests.iterator();
long recCcount = 0;
while (requestIterator.hasNext()) {
SchedulingRequest request = requestIterator.next();
if (recCcount < 3) {
assertTrue(request.getAllocationTags().contains("pri"));
} else {
assertTrue(request.getAllocationTags().contains("bar")
|| request.getAllocationTags().contains("test"));
}
recCcount++;
}
}
} | TestBatchedRequestsIterators |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/stateless/events/EntityA.java | {
"start": 542,
"end": 747
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "ID")
Integer id;
@OneToMany
@JoinColumn(name = "ENTITY_A")
Collection<EntityB> children = new ArrayList<>();
}
| EntityA |
java | elastic__elasticsearch | x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OidcProviderTestContainer.java | {
"start": 568,
"end": 2473
} | class ____ extends DockerEnvironmentAwareTestContainer {
private static final int PORT = 8080;
private static final int SSL_PORT = 8443;
/**
* for packer caching only
* */
protected OidcProviderTestContainer() {
this(Network.newNetwork());
}
public OidcProviderTestContainer(Network network) {
super(
new ImageFromDockerfile("es-oidc-provider-fixture").withFileFromClasspath("oidc/setup.sh", "/oidc/setup.sh")
.withFileFromClasspath("oidc/testnode.jks", "/oidc/testnode.jks")
// we cannot make use of docker file builder
// as it does not support multi-stage builds
.withFileFromClasspath("Dockerfile", "oidc/Dockerfile")
);
withNetworkAliases("oidc-provider");
withNetwork(network);
addExposedPorts(PORT, SSL_PORT);
}
@Override
public void start() {
super.start();
copyFileToContainer(
Transferable.of(
"op.issuer=http://127.0.0.1:"
+ getMappedPort(PORT)
+ "/c2id\n"
+ "op.authz.endpoint=http://127.0.0.1:"
+ getMappedPort(PORT)
+ "/c2id-login/\n"
+ "op.reg.apiAccessTokenSHA256=d1c4fa70d9ee708d13cfa01daa0e060a05a2075a53c5cc1ad79e460e96ab5363\n"
+ "op.authz.alwaysPromptForConsent=true\n"
+ "op.authz.alwaysPromptForAuth=true"
),
"config/c2id/override.properties"
);
}
public String getC2OPUrl() {
return "http://127.0.0.1:" + getMappedPort(PORT);
}
public String getC2IssuerUrl() {
return getC2OPUrl() + "/c2id";
}
public String getC2IDSslUrl() {
return "https://127.0.0.1:" + getMappedPort(SSL_PORT) + "/c2id";
}
}
| OidcProviderTestContainer |
java | elastic__elasticsearch | modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java | {
"start": 1553,
"end": 6937
} | class ____ extends ESTestCase {
public void testHitsExecutionNeeded() {
PercolateQuery percolateQuery = new PercolateQuery(
"_name",
ctx -> null,
Collections.singletonList(new BytesArray("{}")),
new MatchAllDocsQuery(),
Mockito.mock(IndexSearcher.class),
null,
new MatchAllDocsQuery()
);
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap());
FetchContext fetchContext = mock(FetchContext.class);
Mockito.when(fetchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(fetchContext.query()).thenReturn(new MatchAllDocsQuery());
assertNull(subFetchPhase.getProcessor(fetchContext));
Mockito.when(fetchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(fetchContext));
}
public void testLocatePercolatorQuery() {
PercolateQuery percolateQuery = new PercolateQuery(
"_name",
ctx -> null,
Collections.singletonList(new BytesArray("{}")),
new MatchAllDocsQuery(),
Mockito.mock(IndexSearcher.class),
null,
new MatchAllDocsQuery()
);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()).size(), equalTo(0));
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(0));
bq.add(percolateQuery, BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(1));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).get(0), sameInstance(percolateQuery));
ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new MatchAllDocsQuery());
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery).size(), equalTo(0));
constantScoreQuery = new ConstantScoreQuery(percolateQuery);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery).size(), equalTo(1));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery).get(0), sameInstance(percolateQuery));
BoostQuery boostQuery = new BoostQuery(new MatchAllDocsQuery(), 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery).size(), equalTo(0));
boostQuery = new BoostQuery(percolateQuery, 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery).size(), equalTo(1));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery).get(0), sameInstance(percolateQuery));
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new MatchAllDocsQuery(), new RandomScoreFunction(0, 0, null));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(functionScoreQuery).size(), equalTo(0));
functionScoreQuery = new FunctionScoreQuery(percolateQuery, new RandomScoreFunction(0, 0, null));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(functionScoreQuery).size(), equalTo(1));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(functionScoreQuery).get(0), sameInstance(percolateQuery));
DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery(Collections.singleton(new MatchAllDocsQuery()), 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).size(), equalTo(0));
disjunctionMaxQuery = new DisjunctionMaxQuery(Arrays.asList(percolateQuery, new MatchAllDocsQuery()), 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).size(), equalTo(1));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).get(0), sameInstance(percolateQuery));
PercolateQuery percolateQuery2 = new PercolateQuery(
"_name",
ctx -> null,
Collections.singletonList(new BytesArray("{}")),
new MatchAllDocsQuery(),
Mockito.mock(IndexSearcher.class),
null,
new MatchAllDocsQuery()
);
bq = new BooleanQuery.Builder();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(0));
bq.add(percolateQuery, BooleanClause.Occur.FILTER);
bq.add(percolateQuery2, BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(2));
assertThat(
PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()),
containsInAnyOrder(sameInstance(percolateQuery), sameInstance(percolateQuery2))
);
assertNotNull(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(null));
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(null).size(), equalTo(0));
}
}
| PercolatorHighlightSubFetchPhaseTests |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java | {
"start": 57595,
"end": 105416
} | interface ____ the following,
// though perhaps counterintuitive, must be false:
assertThat(MergedAnnotations.from(SubInheritedAnnotationInterface.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(-1);
assertThat(MergedAnnotations.from(InheritedAnnotationClass.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(0);
assertThat(MergedAnnotations.from(SubInheritedAnnotationClass.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(1);
// non-inherited class-level annotation; note: @Order is not inherited
assertThat(MergedAnnotations.from(NonInheritedAnnotationInterface.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Order.class).getAggregateIndex()).isEqualTo(0);
assertThat(MergedAnnotations.from(SubNonInheritedAnnotationInterface.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Order.class).getAggregateIndex()).isEqualTo(-1);
assertThat(MergedAnnotations.from(NonInheritedAnnotationClass.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Order.class).getAggregateIndex()).isEqualTo(0);
assertThat(MergedAnnotations.from(SubNonInheritedAnnotationClass.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Order.class).getAggregateIndex()).isEqualTo(-1);
}
@Test
void getDirectWithoutAttributeAliases() {
MergedAnnotation<?> annotation = MergedAnnotations.from(WebController.class).get(Component.class);
assertThat(annotation.getString("value")).isEqualTo("webController");
}
@Test
void getDirectWithNestedAnnotations() {
MergedAnnotation<?> annotation = MergedAnnotations.from(ComponentScanClass.class).get(ComponentScan.class);
MergedAnnotation<Filter>[] filters = annotation.getAnnotationArray("excludeFilters", Filter.class);
assertThat(Arrays.stream(filters).map(
filter -> filter.getString("pattern"))).containsExactly("*Foo", "*Bar");
}
@Test
void getDirectWithAttributeAliases1() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithValueAttribute");
MergedAnnotation<?> annotation = MergedAnnotations.from(method).get(RequestMapping.class);
assertThat(annotation.getString("name")).isEqualTo("foo");
assertThat(annotation.getStringArray("value")).containsExactly("/test");
assertThat(annotation.getStringArray("path")).containsExactly("/test");
}
@Test
void getDirectWithAttributeAliases2() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithPathAttribute");
MergedAnnotation<?> annotation = MergedAnnotations.from(method).get(RequestMapping.class);
assertThat(annotation.getString("name")).isEqualTo("bar");
assertThat(annotation.getStringArray("value")).containsExactly("/test");
assertThat(annotation.getStringArray("path")).containsExactly("/test");
}
@Test
void getDirectWithAttributeAliasesWithDifferentValues() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithDifferentPathAndValueAttributes");
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotations.from(method).get(RequestMapping.class))
.withMessageContaining("attribute 'path' and its alias 'value'")
.withMessageContaining("values of [{/test}] and [{/enigma}]");
}
@Test
void getValueFromAnnotation() throws Exception {
Method method = TransactionalStringGeneric.class.getMethod("something", Object.class);
MergedAnnotation<?> annotation =
MergedAnnotations.from(method, SearchStrategy.TYPE_HIERARCHY).get(Order.class);
assertThat(annotation.getInt("value")).isEqualTo(1);
}
@Test
void getValueFromNonPublicAnnotation() {
Annotation[] declaredAnnotations = NonPublicAnnotatedClass.class.getDeclaredAnnotations();
assertThat(declaredAnnotations).hasSize(1);
Annotation annotation = declaredAnnotations[0];
MergedAnnotation<Annotation> mergedAnnotation = MergedAnnotation.from(annotation);
assertThat(mergedAnnotation.getType().getSimpleName()).isEqualTo("NonPublicAnnotation");
assertThat(mergedAnnotation.synthesize().annotationType().getSimpleName()).isEqualTo("NonPublicAnnotation");
assertThat(mergedAnnotation.getInt("value")).isEqualTo(42);
}
@Test
void getDefaultValueFromAnnotation() throws Exception {
Method method = TransactionalStringGeneric.class.getMethod("something", Object.class);
MergedAnnotation<Order> annotation =
MergedAnnotations.from(method, SearchStrategy.TYPE_HIERARCHY).get(Order.class);
assertThat(annotation.getDefaultValue("value")).contains(Ordered.LOWEST_PRECEDENCE);
}
@Test
void getDefaultValueFromNonPublicAnnotation() {
Annotation[] declaredAnnotations = NonPublicAnnotatedClass.class.getDeclaredAnnotations();
assertThat(declaredAnnotations).hasSize(1);
Annotation declaredAnnotation = declaredAnnotations[0];
MergedAnnotation<?> annotation = MergedAnnotation.from(declaredAnnotation);
assertThat(annotation.getType().getName()).isEqualTo(
"org.springframework.core.annotation.subpackage.NonPublicAnnotation");
assertThat(annotation.getDefaultValue("value")).contains(-1);
}
@Test
void getDefaultValueFromAnnotationType() {
MergedAnnotation<?> annotation = MergedAnnotation.of(Order.class);
assertThat(annotation.getDefaultValue("value")).contains(Ordered.LOWEST_PRECEDENCE);
}
@Test
void streamRepeatableDeclaredOnMethod() throws Exception {
Method method = InterfaceWithRepeated.class.getMethod("foo");
Stream<MergedAnnotation<MyRepeatable>> annotations = MergedAnnotations.from(
method, SearchStrategy.TYPE_HIERARCHY).stream(MyRepeatable.class);
Stream<String> values = annotations.map(
annotation -> annotation.getString("value"));
assertThat(values).containsExactly("A", "B", "C", "meta1");
}
@Test
@SuppressWarnings("deprecation")
void streamRepeatableDeclaredOnClassWithAttributeAliases() {
assertThat(MergedAnnotations.from(HierarchyClass.class).stream(TestConfiguration.class)).isEmpty();
RepeatableContainers containers = RepeatableContainers.explicitRepeatable(TestConfiguration.class, Hierarchy.class);
MergedAnnotations annotations = MergedAnnotations.from(HierarchyClass.class,
SearchStrategy.DIRECT, containers, AnnotationFilter.NONE);
assertThat(annotations.stream(TestConfiguration.class)
.map(annotation -> annotation.getString("location")))
.containsExactly("A", "B");
assertThat(annotations.stream(TestConfiguration.class)
.map(annotation -> annotation.getString("value")))
.containsExactly("A", "B");
}
@Test
void streamRepeatableDeclaredOnClass() {
Class<?> element = MyRepeatableClass.class;
String[] expectedValuesJava = { "A", "B", "C" };
String[] expectedValuesSpring = { "A", "B", "C", "meta1" };
testRepeatables(SearchStrategy.SUPERCLASS, element, expectedValuesJava, expectedValuesSpring);
}
@Test
void streamRepeatableDeclaredOnSuperclass() {
Class<?> element = SubMyRepeatableClass.class;
String[] expectedValuesJava = { "A", "B", "C" };
String[] expectedValuesSpring = { "A", "B", "C", "meta1" };
testRepeatables(SearchStrategy.SUPERCLASS, element, expectedValuesJava, expectedValuesSpring);
}
@Test
void streamRepeatableDeclaredOnClassAndSuperclass() {
Class<?> element = SubMyRepeatableWithAdditionalLocalDeclarationsClass.class;
String[] expectedValuesJava = { "X", "Y", "Z" };
String[] expectedValuesSpring = { "X", "Y", "Z", "meta2" };
testRepeatables(SearchStrategy.SUPERCLASS, element, expectedValuesJava, expectedValuesSpring);
}
@Test
void streamRepeatableDeclaredOnMultipleSuperclasses() {
Class<?> element = SubSubMyRepeatableWithAdditionalLocalDeclarationsClass.class;
String[] expectedValuesJava = { "X", "Y", "Z" };
String[] expectedValuesSpring = { "X", "Y", "Z", "meta2" };
testRepeatables(SearchStrategy.SUPERCLASS, element, expectedValuesJava, expectedValuesSpring);
}
@Test
void streamDirectRepeatablesDeclaredOnClass() {
Class<?> element = MyRepeatableClass.class;
String[] expectedValuesJava = { "A", "B", "C" };
String[] expectedValuesSpring = { "A", "B", "C", "meta1" };
testRepeatables(SearchStrategy.DIRECT, element, expectedValuesJava, expectedValuesSpring);
}
@Test
void streamDirectRepeatablesDeclaredOnSuperclass() {
Class<?> element = SubMyRepeatableClass.class;
String[] expectedValuesJava = {};
String[] expectedValuesSpring = {};
testRepeatables(SearchStrategy.DIRECT, element, expectedValuesJava, expectedValuesSpring);
}
private void testRepeatables(SearchStrategy searchStrategy, Class<?> element,
String[] expectedValuesJava, String[] expectedValuesSpring) {
testJavaRepeatables(searchStrategy, element, expectedValuesJava);
testExplicitRepeatables(searchStrategy, element, expectedValuesSpring);
testStandardRepeatables(searchStrategy, element, expectedValuesSpring);
}
private void testJavaRepeatables(SearchStrategy searchStrategy, Class<?> element, String[] expected) {
MyRepeatable[] annotations = searchStrategy == SearchStrategy.DIRECT ?
element.getDeclaredAnnotationsByType(MyRepeatable.class) :
element.getAnnotationsByType(MyRepeatable.class);
assertThat(annotations).extracting(MyRepeatable::value).containsExactly(expected);
}
private void testExplicitRepeatables(SearchStrategy searchStrategy, Class<?> element, String[] expected) {
MergedAnnotations annotations = MergedAnnotations.from(element, searchStrategy,
RepeatableContainers.explicitRepeatable(MyRepeatable.class, MyRepeatableContainer.class));
Stream<String> values = annotations.stream(MyRepeatable.class)
.filter(MergedAnnotationPredicates.firstRunOf(MergedAnnotation::getAggregateIndex))
.map(annotation -> annotation.getString("value"));
assertThat(values).containsExactly(expected);
}
private void testStandardRepeatables(SearchStrategy searchStrategy, Class<?> element, String[] expected) {
Stream<String> values = MergedAnnotations.from(element, searchStrategy).stream(MyRepeatable.class)
.filter(MergedAnnotationPredicates.firstRunOf(MergedAnnotation::getAggregateIndex))
.map(annotation -> annotation.getString("value"));
assertThat(values).containsExactly(expected);
}
@Test
void synthesizeWithoutAttributeAliases() {
Component component = WebController.class.getAnnotation(Component.class);
assertThat(component).isNotNull();
Component synthesizedComponent = MergedAnnotation.from(component).synthesize();
assertThat(synthesizedComponent).isNotNull();
assertThat(synthesizedComponent).isEqualTo(component);
assertThat(synthesizedComponent.value()).isEqualTo("webController");
}
/**
* @since 6.0
*/
@Test
void synthesizedAnnotationShouldReuseJdkProxyClass() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithValueAttribute");
RequestMapping jdkRequestMapping = method.getAnnotation(RequestMapping.class);
assertThat(jdkRequestMapping).isNotNull();
assertThat(jdkRequestMapping.value()).containsExactly("/test");
assertThat(jdkRequestMapping.path()).containsExactly("");
RequestMapping synthesizedRequestMapping = MergedAnnotation.from(jdkRequestMapping).synthesize();
assertSynthesized(synthesizedRequestMapping);
assertThat(synthesizedRequestMapping.value()).containsExactly("/test");
assertThat(synthesizedRequestMapping.path()).containsExactly("/test");
assertThat(jdkRequestMapping.getClass()).isSameAs(synthesizedRequestMapping.getClass());
}
@Test
void synthesizeAlreadySynthesized() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithValueAttribute");
RequestMapping webMapping = method.getAnnotation(RequestMapping.class);
assertThat(webMapping).isNotNull();
RequestMapping synthesizedWebMapping = MergedAnnotation.from(webMapping).synthesize();
RequestMapping synthesizedAgainWebMapping = MergedAnnotation.from(synthesizedWebMapping).synthesize();
assertSynthesized(synthesizedWebMapping);
assertSynthesized(synthesizedAgainWebMapping);
assertThat(synthesizedWebMapping).isEqualTo(synthesizedAgainWebMapping);
assertThat(synthesizedWebMapping).isSameAs(synthesizedAgainWebMapping);
assertThat(synthesizedWebMapping.name()).isEqualTo("foo");
assertThat(synthesizedWebMapping.path()).containsExactly("/test");
assertThat(synthesizedWebMapping.value()).containsExactly("/test");
}
@Test
void synthesizeShouldNotSynthesizeNonsynthesizableAnnotations() throws Exception {
Method method = getClass().getDeclaredMethod("getId");
Id id = method.getAnnotation(Id.class);
assertThat(id).isNotNull();
Id synthesizedId = MergedAnnotation.from(id).synthesize();
assertThat(id).isEqualTo(synthesizedId);
// It doesn't make sense to synthesize @Id since it declares zero attributes.
assertNotSynthesized(synthesizedId);
assertThat(id).isSameAs(synthesizedId);
GeneratedValue generatedValue = method.getAnnotation(GeneratedValue.class);
assertThat(generatedValue).isNotNull();
GeneratedValue synthesizedGeneratedValue = MergedAnnotation.from(generatedValue).synthesize();
assertThat(generatedValue).isEqualTo(synthesizedGeneratedValue);
// It doesn't make sense to synthesize @GeneratedValue since it declares zero attributes with aliases.
assertNotSynthesized(synthesizedGeneratedValue);
assertThat(generatedValue).isSameAs(synthesizedGeneratedValue);
}
@Test // gh-28716
void synthesizeWhenUsingMergedAnnotationsFromApi() {
Field directlyAnnotatedField = ReflectionUtils.findField(DomainType.class, "directlyAnnotated");
MergedAnnotations mergedAnnotations = MergedAnnotations.from(directlyAnnotatedField);
RootAnnotation rootAnnotation = mergedAnnotations.get(RootAnnotation.class).synthesize();
assertThat(rootAnnotation.flag()).isFalse();
assertNotSynthesized(rootAnnotation);
Field metaAnnotatedField = ReflectionUtils.findField(DomainType.class, "metaAnnotated");
mergedAnnotations = MergedAnnotations.from(metaAnnotatedField);
rootAnnotation = mergedAnnotations.get(RootAnnotation.class).synthesize();
assertThat(rootAnnotation.flag()).isTrue();
assertSynthesized(rootAnnotation);
Field metaMetaAnnotatedField = ReflectionUtils.findField(DomainType.class, "metaMetaAnnotated");
mergedAnnotations = MergedAnnotations.from(metaMetaAnnotatedField);
rootAnnotation = mergedAnnotations.get(RootAnnotation.class).synthesize();
assertThat(rootAnnotation.flag()).isTrue();
assertSynthesized(rootAnnotation);
}
@Test // gh-28704
void synthesizeShouldNotSynthesizeNonsynthesizableAnnotationsWhenUsingMergedAnnotationsFromApi() {
MergedAnnotations mergedAnnotations = MergedAnnotations.from(SecurityConfig.class);
EnableWebSecurity enableWebSecurity = mergedAnnotations.get(EnableWebSecurity.class).synthesize();
assertNotSynthesized(enableWebSecurity);
EnableGlobalAuthentication enableGlobalAuthentication = mergedAnnotations.get(EnableGlobalAuthentication.class).synthesize();
assertNotSynthesized(enableGlobalAuthentication);
}
/**
* If an attempt is made to synthesize an annotation from an annotation instance
* that has already been synthesized, the original synthesized annotation should
* ideally be returned as-is without creating a new proxy instance with the same
* values.
*/
@Test
void synthesizeShouldNotResynthesizeAlreadySynthesizedAnnotations() throws Exception {
Method method = WebController.class.getMethod("handleMappedWithValueAttribute");
RequestMapping webMapping = method.getAnnotation(RequestMapping.class);
assertThat(webMapping).isNotNull();
MergedAnnotation<RequestMapping> mergedAnnotation1 = MergedAnnotation.from(webMapping);
RequestMapping synthesizedWebMapping1 = mergedAnnotation1.synthesize();
RequestMapping synthesizedWebMapping2 = MergedAnnotation.from(webMapping).synthesize();
assertSynthesized(synthesizedWebMapping1);
assertSynthesized(synthesizedWebMapping2);
assertThat(synthesizedWebMapping1).isEqualTo(synthesizedWebMapping2);
// Synthesizing an annotation from a different MergedAnnotation results in a different synthesized annotation instance.
assertThat(synthesizedWebMapping1).isNotSameAs(synthesizedWebMapping2);
// Synthesizing an annotation from the same MergedAnnotation results in the same synthesized annotation instance.
assertThat(synthesizedWebMapping1).isSameAs(mergedAnnotation1.synthesize());
RequestMapping synthesizedAgainWebMapping = MergedAnnotation.from(synthesizedWebMapping1).synthesize();
assertThat(synthesizedWebMapping1).isEqualTo(synthesizedAgainWebMapping);
// Synthesizing an already synthesized annotation results in the original synthesized annotation instance.
assertThat(synthesizedWebMapping1).isSameAs(synthesizedAgainWebMapping);
}
@Test
void synthesizeWhenAliasForIsMissingAttributeDeclaration() {
AliasForWithMissingAttributeDeclaration annotation =
AliasForWithMissingAttributeDeclarationClass.class.getAnnotation(
AliasForWithMissingAttributeDeclaration.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("@AliasFor declaration on attribute 'foo' in annotation")
.withMessageContaining(AliasForWithMissingAttributeDeclaration.class.getName())
.withMessageContaining("points to itself");
}
@Test
void synthesizeWhenAliasForHasDuplicateAttributeDeclaration() {
AliasForWithDuplicateAttributeDeclaration annotation =
AliasForWithDuplicateAttributeDeclarationClass.class.getAnnotation(
AliasForWithDuplicateAttributeDeclaration.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("In @AliasFor declared on attribute 'foo' in annotation")
.withMessageContaining(AliasForWithDuplicateAttributeDeclaration.class.getName())
.withMessageContaining("attribute 'attribute' and its alias 'value' are present with values of 'baz' and 'bar'");
}
@Test
void synthesizeWhenAttributeAliasForNonexistentAttribute() {
AliasForNonexistentAttribute annotation = AliasForNonexistentAttributeClass.class.getAnnotation(
AliasForNonexistentAttribute.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("@AliasFor declaration on attribute 'foo' in annotation")
.withMessageContaining(AliasForNonexistentAttribute.class.getName())
.withMessageContaining("declares an alias for 'bar' which is not present");
}
@Test
void synthesizeWhenAttributeAliasWithMirroredAliasForWrongAttribute() {
AliasForWithMirroredAliasForWrongAttribute annotation =
AliasForWithMirroredAliasForWrongAttributeClass.class.getAnnotation(
AliasForWithMirroredAliasForWrongAttribute.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessage("@AliasFor declaration on attribute 'bar' in annotation [" +
AliasForWithMirroredAliasForWrongAttribute.class.getName() +
"] declares an alias for 'quux' which is not present.");
}
@Test
void synthesizeWhenAttributeAliasForAttributeOfDifferentType() {
AliasForAttributeOfDifferentType annotation = AliasForAttributeOfDifferentTypeClass.class.getAnnotation(
AliasForAttributeOfDifferentType.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("Misconfigured aliases")
.withMessageContaining(AliasForAttributeOfDifferentType.class.getName())
.withMessageContaining("attribute 'foo'")
.withMessageContaining("attribute 'bar'")
.withMessageContaining("same return type");
}
@Test
void synthesizeWhenAttributeAliasForWithMissingDefaultValues() {
AliasForWithMissingDefaultValues annotation = AliasForWithMissingDefaultValuesClass.class.getAnnotation(
AliasForWithMissingDefaultValues.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("Misconfigured aliases")
.withMessageContaining(AliasForWithMissingDefaultValues.class.getName())
.withMessageContaining("attribute 'foo' in annotation")
.withMessageContaining("attribute 'bar' in annotation")
.withMessageContaining("default values");
}
@Test
void synthesizeWhenAttributeAliasForAttributeWithDifferentDefaultValue() {
AliasForAttributeWithDifferentDefaultValue annotation =
AliasForAttributeWithDifferentDefaultValueClass.class.getAnnotation(
AliasForAttributeWithDifferentDefaultValue.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("Misconfigured aliases")
.withMessageContaining(AliasForAttributeWithDifferentDefaultValue.class.getName())
.withMessageContaining("attribute 'foo' in annotation")
.withMessageContaining("attribute 'bar' in annotation")
.withMessageContaining("same default value");
}
@Test
void synthesizeWhenAttributeAliasForMetaAnnotationThatIsNotMetaPresent() {
AliasedComposedTestConfigurationNotMetaPresent annotation =
AliasedComposedTestConfigurationNotMetaPresentClass.class.getAnnotation(
AliasedComposedTestConfigurationNotMetaPresent.class);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(annotation))
.withMessageStartingWith("@AliasFor declaration on attribute 'xmlConfigFile' in annotation")
.withMessageContaining(AliasedComposedTestConfigurationNotMetaPresent.class.getName())
.withMessageContaining("declares an alias for attribute 'location' in annotation")
.withMessageContaining(TestConfiguration.class.getName())
.withMessageContaining("not meta-present");
}
@Test
void synthesizeWithImplicitAliases() {
testSynthesisWithImplicitAliases(ValueImplicitAliasesTestConfigurationClass.class, "value");
testSynthesisWithImplicitAliases(Location1ImplicitAliasesTestConfigurationClass.class, "location1");
testSynthesisWithImplicitAliases(XmlImplicitAliasesTestConfigurationClass.class, "xmlFile");
testSynthesisWithImplicitAliases(GroovyImplicitAliasesSimpleTestConfigurationClass.class, "groovyScript");
}
private void testSynthesisWithImplicitAliases(Class<?> clazz, String expected) {
ImplicitAliasesTestConfiguration config = clazz.getAnnotation(ImplicitAliasesTestConfiguration.class);
assertThat(config).isNotNull();
ImplicitAliasesTestConfiguration synthesized = MergedAnnotation.from(config).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized.value()).isEqualTo(expected);
assertThat(synthesized.location1()).isEqualTo(expected);
assertThat(synthesized.xmlFile()).isEqualTo(expected);
assertThat(synthesized.groovyScript()).isEqualTo(expected);
}
@Test
void synthesizeWithImplicitAliasesWithImpliedAliasNamesOmitted() {
testSynthesisWithImplicitAliasesWithImpliedAliasNamesOmitted(
ValueImplicitAliasesWithImpliedAliasNamesOmittedTestConfigurationClass.class,
"value");
testSynthesisWithImplicitAliasesWithImpliedAliasNamesOmitted(
LocationsImplicitAliasesWithImpliedAliasNamesOmittedTestConfigurationClass.class,
"location");
testSynthesisWithImplicitAliasesWithImpliedAliasNamesOmitted(
XmlFilesImplicitAliasesWithImpliedAliasNamesOmittedTestConfigurationClass.class,
"xmlFile");
}
private void testSynthesisWithImplicitAliasesWithImpliedAliasNamesOmitted(Class<?> clazz, String expected) {
ImplicitAliasesWithImpliedAliasNamesOmittedTestConfiguration config = clazz.getAnnotation(
ImplicitAliasesWithImpliedAliasNamesOmittedTestConfiguration.class);
assertThat(config).isNotNull();
ImplicitAliasesWithImpliedAliasNamesOmittedTestConfiguration synthesized =
MergedAnnotation.from(config).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized.value()).isEqualTo(expected);
assertThat(synthesized.location()).isEqualTo(expected);
assertThat(synthesized.xmlFile()).isEqualTo(expected);
}
@Test
void synthesizeWithImplicitAliasesForAliasPair() {
ImplicitAliasesForAliasPairTestConfiguration config =
ImplicitAliasesForAliasPairTestConfigurationClass.class.getAnnotation(
ImplicitAliasesForAliasPairTestConfiguration.class);
ImplicitAliasesForAliasPairTestConfiguration synthesized = MergedAnnotation.from(config).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized.xmlFile()).isEqualTo("test.xml");
assertThat(synthesized.groovyScript()).isEqualTo("test.xml");
}
@Test
void synthesizeWithTransitiveImplicitAliases() {
TransitiveImplicitAliasesTestConfiguration config =
TransitiveImplicitAliasesTestConfigurationClass.class.getAnnotation(
TransitiveImplicitAliasesTestConfiguration.class);
TransitiveImplicitAliasesTestConfiguration synthesized = MergedAnnotation.from(config).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized.xml()).isEqualTo("test.xml");
assertThat(synthesized.groovy()).isEqualTo("test.xml");
}
@Test
void synthesizeWithTransitiveImplicitAliasesForAliasPair() {
TransitiveImplicitAliasesForAliasPairTestConfiguration config =
TransitiveImplicitAliasesForAliasPairTestConfigurationClass.class.getAnnotation(
TransitiveImplicitAliasesForAliasPairTestConfiguration.class);
TransitiveImplicitAliasesForAliasPairTestConfiguration synthesized = MergedAnnotation.from(config).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized.xml()).isEqualTo("test.xml");
assertThat(synthesized.groovy()).isEqualTo("test.xml");
}
@Test
void synthesizeWithImplicitAliasesWithMissingDefaultValues() {
Class<?> clazz = ImplicitAliasesWithMissingDefaultValuesTestConfigurationClass.class;
Class<ImplicitAliasesWithMissingDefaultValuesTestConfiguration> annotationType =
ImplicitAliasesWithMissingDefaultValuesTestConfiguration.class;
ImplicitAliasesWithMissingDefaultValuesTestConfiguration config = clazz.getAnnotation(annotationType);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(clazz, config))
.withMessageStartingWith("Misconfigured aliases:")
.withMessageContaining("attribute 'location1' in annotation [" + annotationType.getName() + "]")
.withMessageContaining("attribute 'location2' in annotation [" + annotationType.getName() + "]")
.withMessageContaining("default values");
}
@Test
void synthesizeWithImplicitAliasesWithDifferentDefaultValues() {
Class<?> clazz = ImplicitAliasesWithDifferentDefaultValuesTestConfigurationClass.class;
Class<ImplicitAliasesWithDifferentDefaultValuesTestConfiguration> annotationType =
ImplicitAliasesWithDifferentDefaultValuesTestConfiguration.class;
ImplicitAliasesWithDifferentDefaultValuesTestConfiguration config = clazz.getAnnotation(annotationType);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(clazz, config))
.withMessageStartingWith("Misconfigured aliases:")
.withMessageContaining("attribute 'location1' in annotation [" + annotationType.getName() + "]")
.withMessageContaining("attribute 'location2' in annotation [" + annotationType.getName() + "]")
.withMessageContaining("same default value");
}
@Test
void synthesizeWithImplicitAliasesWithDuplicateValues() {
Class<?> clazz = ImplicitAliasesWithDuplicateValuesTestConfigurationClass.class;
Class<ImplicitAliasesWithDuplicateValuesTestConfiguration> annotationType =
ImplicitAliasesWithDuplicateValuesTestConfiguration.class;
ImplicitAliasesWithDuplicateValuesTestConfiguration config = clazz.getAnnotation(annotationType);
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(clazz, config))
.withMessageStartingWith("Different @AliasFor mirror values for annotation")
.withMessageContaining(annotationType.getName())
.withMessageContaining("declared on class")
.withMessageContaining(clazz.getName())
.withMessageContaining("are declared with values of");
}
@Test
void synthesizeFromMapWithoutAttributeAliases() {
Component component = WebController.class.getAnnotation(Component.class);
assertThat(component).isNotNull();
Map<String, Object> map = Collections.singletonMap("value", "webController");
MergedAnnotation<Component> annotation = MergedAnnotation.of(Component.class, map);
Component synthesizedComponent = annotation.synthesize();
assertSynthesized(synthesizedComponent);
assertThat(synthesizedComponent.value()).isEqualTo("webController");
}
@Test
@SuppressWarnings("unchecked")
void synthesizeFromMapWithNestedMap() {
ComponentScanSingleFilter componentScan = ComponentScanSingleFilterClass.class.getAnnotation(
ComponentScanSingleFilter.class);
assertThat(componentScan).isNotNull();
assertThat(componentScan.value().pattern()).isEqualTo("*Foo");
Map<String, Object> map = MergedAnnotation.from(componentScan).asMap(
annotation -> new LinkedHashMap<>(), Adapt.ANNOTATION_TO_MAP);
Map<String, Object> filterMap = (Map<String, Object>) map.get("value");
assertThat(filterMap.get("pattern")).isEqualTo("*Foo");
filterMap.put("pattern", "newFoo");
filterMap.put("enigma", 42);
MergedAnnotation<ComponentScanSingleFilter> annotation = MergedAnnotation.of(
ComponentScanSingleFilter.class, map);
ComponentScanSingleFilter synthesizedComponentScan = annotation.synthesize();
assertSynthesized(synthesizedComponentScan);
assertThat(synthesizedComponentScan.value().pattern()).isEqualTo("newFoo");
}
@Test
@SuppressWarnings("unchecked")
void synthesizeFromMapWithNestedArrayOfMaps() {
ComponentScan componentScan = ComponentScanClass.class.getAnnotation(ComponentScan.class);
assertThat(componentScan).isNotNull();
Map<String, Object> map = MergedAnnotation.from(componentScan).asMap(
annotation -> new LinkedHashMap<>(), Adapt.ANNOTATION_TO_MAP);
Map<String, Object>[] filters = (Map[]) map.get("excludeFilters");
List<String> patterns = Arrays.stream(filters).map(
m -> (String) m.get("pattern")).toList();
assertThat(patterns).containsExactly("*Foo", "*Bar");
filters[0].put("pattern", "newFoo");
filters[0].put("enigma", 42);
filters[1].put("pattern", "newBar");
filters[1].put("enigma", 42);
MergedAnnotation<ComponentScan> annotation = MergedAnnotation.of(ComponentScan.class, map);
ComponentScan synthesizedComponentScan = annotation.synthesize();
assertSynthesized(synthesizedComponentScan);
assertThat(Arrays.stream(synthesizedComponentScan.excludeFilters()).map(Filter::pattern))
.containsExactly("newFoo", "newBar");
}
@Test
void synthesizeFromDefaultsWithoutAttributeAliases() {
MergedAnnotation<AnnotationWithDefaults> annotation = MergedAnnotation.of(AnnotationWithDefaults.class);
AnnotationWithDefaults synthesized = annotation.synthesize();
assertThat(synthesized.text()).isEqualTo("enigma");
assertThat(synthesized.predicate()).isTrue();
assertThat(synthesized.characters()).containsExactly('a', 'b', 'c');
}
@Test
void synthesizeFromDefaultsWithAttributeAliases() {
MergedAnnotation<TestConfiguration> annotation = MergedAnnotation.of(TestConfiguration.class);
TestConfiguration synthesized = annotation.synthesize();
assertThat(synthesized.value()).isEmpty();
assertThat(synthesized.location()).isEmpty();
}
@Test
void synthesizeWhenAttributeAliasesWithDifferentValues() {
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> MergedAnnotation.from(TestConfigurationMismatch.class.getAnnotation(TestConfiguration.class)));
}
@Test
void synthesizeFromMapWithMinimalAttributesWithAttributeAliases() {
Map<String, Object> map = Collections.singletonMap("location", "test.xml");
MergedAnnotation<TestConfiguration> annotation = MergedAnnotation.of(TestConfiguration.class, map);
TestConfiguration synthesized = annotation.synthesize();
assertThat(synthesized.value()).isEqualTo("test.xml");
assertThat(synthesized.location()).isEqualTo("test.xml");
}
@Test
void synthesizeFromMapWithAttributeAliasesThatOverrideArraysWithSingleElements() {
synthesizeFromMapWithAttributeAliasesThatOverrideArraysWithSingleElements(
Collections.singletonMap("value", "/foo"));
synthesizeFromMapWithAttributeAliasesThatOverrideArraysWithSingleElements(
Collections.singletonMap("path", "/foo"));
}
private void synthesizeFromMapWithAttributeAliasesThatOverrideArraysWithSingleElements(Map<String, Object> map) {
MergedAnnotation<GetMapping> annotation = MergedAnnotation.of(GetMapping.class, map);
GetMapping synthesized = annotation.synthesize();
assertThat(synthesized.value()).isEqualTo("/foo");
assertThat(synthesized.path()).isEqualTo("/foo");
}
@Test
void synthesizeFromMapWithImplicitAttributeAliases() {
testSynthesisFromMapWithImplicitAliases("value");
testSynthesisFromMapWithImplicitAliases("location1");
testSynthesisFromMapWithImplicitAliases("location2");
testSynthesisFromMapWithImplicitAliases("location3");
testSynthesisFromMapWithImplicitAliases("xmlFile");
testSynthesisFromMapWithImplicitAliases("groovyScript");
}
private void testSynthesisFromMapWithImplicitAliases(String attributeNameAndValue) {
Map<String, Object> map = Collections.singletonMap(attributeNameAndValue, attributeNameAndValue);
MergedAnnotation<ImplicitAliasesTestConfiguration> annotation = MergedAnnotation.of(
ImplicitAliasesTestConfiguration.class, map);
ImplicitAliasesTestConfiguration synthesized = annotation.synthesize();
assertThat(synthesized.value()).isEqualTo(attributeNameAndValue);
assertThat(synthesized.location1()).isEqualTo(attributeNameAndValue);
assertThat(synthesized.location2()).isEqualTo(attributeNameAndValue);
assertThat(synthesized.location2()).isEqualTo(attributeNameAndValue);
assertThat(synthesized.xmlFile()).isEqualTo(attributeNameAndValue);
assertThat(synthesized.groovyScript()).isEqualTo(attributeNameAndValue);
}
@Test
void synthesizeFromMapWithMissingAttributeValue() {
testMissingTextAttribute(Collections.emptyMap());
}
@Test
void synthesizeFromMapWithNullAttributeValue() {
Map<String, Object> map = Collections.singletonMap("text", null);
assertThat(map).containsKey("text");
testMissingTextAttribute(map);
}
private void testMissingTextAttribute(Map<String, Object> attributes) {
assertThatExceptionOfType(NoSuchElementException.class)
.isThrownBy(() -> MergedAnnotation.of(AnnotationWithoutDefaults.class, attributes).synthesize().text())
.withMessage("No value found for attribute named 'text' in merged annotation " +
AnnotationWithoutDefaults.class.getCanonicalName());
}
@Test
void synthesizeFromMapWithAttributeOfIncorrectType() {
Map<String, Object> map = Collections.singletonMap("value", 42L);
MergedAnnotation<Component> annotation = MergedAnnotation.of(Component.class, map);
assertThatIllegalStateException()
.isThrownBy(() -> annotation.synthesize().value())
.withMessage("Attribute 'value' in annotation " +
"org.springframework.core.testfixture.stereotype.Component should be " +
"compatible with java.lang.String but a java.lang.Long value was returned");
}
@Test
void synthesizeFromAnnotationAttributesWithoutAttributeAliases() {
Component component = WebController.class.getAnnotation(Component.class);
assertThat(component).isNotNull();
Map<String, Object> attributes = MergedAnnotation.from(component).asMap();
Component synthesized = MergedAnnotation.of(Component.class, attributes).synthesize();
assertSynthesized(synthesized);
assertThat(synthesized).isEqualTo(component);
}
@Test
void toStringForSynthesizedAnnotations() throws Exception {
Method methodWithPath = WebController.class.getMethod("handleMappedWithPathAttribute");
RequestMapping webMappingWithAliases = methodWithPath.getAnnotation(RequestMapping.class);
assertThat(webMappingWithAliases).isNotNull();
Method methodWithPathAndValue = WebController.class.getMethod("handleMappedWithSamePathAndValueAttributes");
RequestMapping webMappingWithPathAndValue = methodWithPathAndValue.getAnnotation(RequestMapping.class);
assertThat(methodWithPathAndValue).isNotNull();
RequestMapping synthesizedWebMapping1 = MergedAnnotation.from(webMappingWithAliases).synthesize();
RequestMapping synthesizedWebMapping2 = MergedAnnotation.from(webMappingWithPathAndValue).synthesize();
assertThat(webMappingWithAliases.toString()).isNotEqualTo(synthesizedWebMapping1.toString());
// The unsynthesized annotation for handleMappedWithSamePathAndValueAttributes()
// should produce almost the same toString() results as synthesized annotations for
// handleMappedWithPathAttribute() on Java 9 or higher; however, due to multiple changes
// in the JDK's toString() implementation for annotations in JDK 9, 14, and 19,
// we do not test the JDK implementation.
// assertToStringForWebMappingWithPathAndValue(webMappingWithPathAndValue);
assertToStringForWebMappingWithPathAndValue(synthesizedWebMapping1);
assertToStringForWebMappingWithPathAndValue(synthesizedWebMapping2);
}
private void assertToStringForWebMappingWithPathAndValue(RequestMapping webMapping) {
assertThat(webMapping.toString())
.startsWith("@org.springframework.core.annotation.MergedAnnotationsTests.RequestMapping(")
.contains(
// Strings
"value={\"/test\"}", "path={\"/test\"}", "name=\"bar\"",
// Characters
"ch='X'", "chars={'X'}",
// Enums
"method={GET, POST}",
// Classes
"clazz=org.springframework.core.annotation.MergedAnnotationsTests.RequestMethod.class",
"classes={int[][].class, org.springframework.core.annotation.MergedAnnotationsTests.RequestMethod[].class}",
// Bytes
"byteValue=(byte) 0xFF", "bytes={(byte) 0xFF}",
// Shorts
"shortValue=9876", "shorts={9876}",
// Longs
"longValue=42L", "longs={42L}",
// Floats
"floatValue=3.14f", "floats={3.14f}",
// Doubles
"doubleValue=99.999d", "doubles={99.999d}"
)
.endsWith(")");
}
@Test
void equalsForSynthesizedAnnotations() throws Exception {
Method methodWithPath = WebController.class.getMethod("handleMappedWithPathAttribute");
RequestMapping webMappingWithAliases = methodWithPath.getAnnotation(RequestMapping.class);
assertThat(webMappingWithAliases).isNotNull();
Method methodWithPathAndValue = WebController.class.getMethod("handleMappedWithSamePathAndValueAttributes");
RequestMapping webMappingWithPathAndValue = methodWithPathAndValue.getAnnotation(RequestMapping.class);
assertThat(webMappingWithPathAndValue).isNotNull();
RequestMapping synthesizedWebMapping1 = MergedAnnotation.from(webMappingWithAliases).synthesize();
RequestMapping synthesizedWebMapping2 = MergedAnnotation.from(webMappingWithPathAndValue).synthesize();
// Equality amongst standard annotations
assertThat(webMappingWithAliases).isEqualTo(webMappingWithAliases);
assertThat(webMappingWithPathAndValue).isEqualTo(webMappingWithPathAndValue);
// Inequality amongst standard annotations
assertThat(webMappingWithAliases).isNotEqualTo(webMappingWithPathAndValue);
assertThat(webMappingWithPathAndValue).isNotEqualTo(webMappingWithAliases);
// Equality amongst synthesized annotations
assertThat(synthesizedWebMapping1).isEqualTo(synthesizedWebMapping1);
assertThat(synthesizedWebMapping2).isEqualTo(synthesizedWebMapping2);
assertThat(synthesizedWebMapping1).isEqualTo(synthesizedWebMapping2);
assertThat(synthesizedWebMapping2).isEqualTo(synthesizedWebMapping1);
// Equality between standard and synthesized annotations
assertThat(synthesizedWebMapping1).isEqualTo(webMappingWithPathAndValue);
assertThat(webMappingWithPathAndValue).isEqualTo(synthesizedWebMapping1);
// Inequality between standard and synthesized annotations
assertThat(synthesizedWebMapping1).isNotEqualTo(webMappingWithAliases);
assertThat(webMappingWithAliases).isNotEqualTo(synthesizedWebMapping1);
}
@Test
void hashCodeForSynthesizedAnnotations() throws Exception {
Method methodWithPath = WebController.class.getMethod("handleMappedWithPathAttribute");
RequestMapping webMappingWithAliases = methodWithPath.getAnnotation(RequestMapping.class);
assertThat(webMappingWithAliases).isNotNull();
Method methodWithPathAndValue = WebController.class.getMethod("handleMappedWithSamePathAndValueAttributes");
RequestMapping webMappingWithPathAndValue = methodWithPathAndValue.getAnnotation(RequestMapping.class);
assertThat(webMappingWithPathAndValue).isNotNull();
RequestMapping synthesizedWebMapping1 = MergedAnnotation.from(webMappingWithAliases).synthesize();
assertThat(synthesizedWebMapping1).isNotNull();
RequestMapping synthesizedWebMapping2 = MergedAnnotation.from(webMappingWithPathAndValue).synthesize();
assertThat(synthesizedWebMapping2).isNotNull();
// Equality amongst standard annotations
assertThat(webMappingWithAliases.hashCode()).isEqualTo(webMappingWithAliases.hashCode());
assertThat(webMappingWithPathAndValue.hashCode()).isEqualTo(webMappingWithPathAndValue.hashCode());
// Inequality amongst standard annotations
assertThat(webMappingWithAliases.hashCode()).isNotEqualTo(webMappingWithPathAndValue.hashCode());
assertThat(webMappingWithPathAndValue.hashCode()).isNotEqualTo(webMappingWithAliases.hashCode());
// Equality amongst synthesized annotations
assertThat(synthesizedWebMapping1.hashCode()).isEqualTo(synthesizedWebMapping1.hashCode());
assertThat(synthesizedWebMapping2.hashCode()).isEqualTo(synthesizedWebMapping2.hashCode());
assertThat(synthesizedWebMapping1.hashCode()).isEqualTo(synthesizedWebMapping2.hashCode());
assertThat(synthesizedWebMapping2.hashCode()).isEqualTo(synthesizedWebMapping1.hashCode());
// Equality between standard and synthesized annotations
assertThat(synthesizedWebMapping1.hashCode()).isEqualTo(webMappingWithPathAndValue.hashCode());
assertThat(webMappingWithPathAndValue.hashCode()).isEqualTo(synthesizedWebMapping1.hashCode());
// Inequality between standard and synthesized annotations
assertThat(synthesizedWebMapping1.hashCode()).isNotEqualTo(webMappingWithAliases.hashCode());
assertThat(webMappingWithAliases.hashCode()).isNotEqualTo(synthesizedWebMapping1.hashCode());
}
/**
* Fully reflection-based test that verifies support for synthesizing
* annotations across packages with non-public visibility of user types
* (for example, a non-public annotation that uses {@code @AliasFor}).
*/
@Test
@SuppressWarnings("unchecked")
void synthesizeNonPublicWithAttributeAliasesFromDifferentPackage() throws Exception {
Class<?> type = ClassUtils.forName(
"org.springframework.core.annotation.subpackage.NonPublicAliasedAnnotatedClass",
null);
Class<? extends Annotation> annotationType = (Class<? extends Annotation>) ClassUtils.forName(
"org.springframework.core.annotation.subpackage.NonPublicAliasedAnnotation",
null);
Annotation annotation = type.getAnnotation(annotationType);
assertThat(annotation).isNotNull();
MergedAnnotation<Annotation> mergedAnnotation = MergedAnnotation.from(annotation);
Annotation synthesizedAnnotation = mergedAnnotation.synthesize();
assertSynthesized(synthesizedAnnotation);
assertThat(mergedAnnotation.getString("name")).isEqualTo("test");
assertThat(mergedAnnotation.getString("path")).isEqualTo("/test");
assertThat(mergedAnnotation.getString("value")).isEqualTo("/test");
}
@Test
void synthesizeWithArrayOfAnnotations() {
Hierarchy hierarchy = HierarchyClass.class.getAnnotation(Hierarchy.class);
assertThat(hierarchy).isNotNull();
Hierarchy synthesizedHierarchy = MergedAnnotation.from(hierarchy).synthesize();
assertSynthesized(synthesizedHierarchy);
TestConfiguration[] configs = synthesizedHierarchy.value();
assertThat(configs).isNotNull();
assertThat(configs).allMatch(AnnotationUtils::isSynthesizedAnnotation);
assertThat(configs).extracting(TestConfiguration::value).containsExactly("A", "B");
assertThat(configs).extracting(TestConfiguration::location).containsExactly("A", "B");
TestConfiguration contextConfig = TestConfigurationClass.class.getAnnotation(TestConfiguration.class);
assertThat(contextConfig).isNotNull();
// Alter array returned from synthesized annotation
configs[0] = contextConfig;
assertThat(configs).extracting(TestConfiguration::value).containsExactly("simple.xml", "B");
// Re-retrieve the array from the synthesized annotation
configs = synthesizedHierarchy.value();
assertThat(configs).extracting(TestConfiguration::value).containsExactly("A", "B");
}
@Test
void synthesizeWithArrayOfChars() {
CharsContainer charsContainer = GroupOfCharsClass.class.getAnnotation(CharsContainer.class);
assertThat(charsContainer).isNotNull();
CharsContainer synthesizedCharsContainer = MergedAnnotation.from(charsContainer).synthesize();
assertSynthesized(synthesizedCharsContainer);
char[] chars = synthesizedCharsContainer.chars();
assertThat(chars).containsExactly('x', 'y', 'z');
// Alter array returned from synthesized annotation
chars[0] = '?';
// Re-retrieve the array from the synthesized annotation
chars = synthesizedCharsContainer.chars();
assertThat(chars).containsExactly('x', 'y', 'z');
}
@Test
void getValueWhenHasDefaultOverride() {
MergedAnnotation<?> annotation =
MergedAnnotations.from(DefaultOverrideClass.class).get(DefaultOverrideRoot.class);
// Convention-based annotation attribute overrides are no longer supported as of
// Spring Framework 7.0. Otherwise, we would expect "metameta".
assertThat(annotation.getString("text")).isEqualTo("root");
}
@Test // gh-22654
void getValueWhenHasDefaultOverrideWithImplicitAlias() {
MergedAnnotation<?> annotation1 = MergedAnnotations.from(DefaultOverrideImplicitAliasMetaClass1.class)
.get(DefaultOverrideRoot.class);
assertThat(annotation1.getString("text")).isEqualTo("alias-meta-1");
MergedAnnotation<?> annotation2 = MergedAnnotations.from(DefaultOverrideImplicitAliasMetaClass2.class)
.get(DefaultOverrideRoot.class);
assertThat(annotation2.getString("text")).isEqualTo("alias-meta-2");
}
@Test // gh-22654
void getValueWhenHasDefaultOverrideWithExplicitAlias() {
MergedAnnotation<?> annotation = MergedAnnotations.from(DefaultOverrideExplicitAliasRootMetaMetaClass.class)
.get(DefaultOverrideExplicitAliasRoot.class);
assertThat(annotation.getString("text")).isEqualTo("meta");
assertThat(annotation.getString("value")).isEqualTo("meta");
}
@Test // gh-22703
void getValueWhenThreeDeepMetaWithValue() {
MergedAnnotation<?> annotation = MergedAnnotations.from(ValueAttributeMetaMetaClass.class)
.get(ValueAttribute.class);
assertThat(annotation.getStringArray(MergedAnnotation.VALUE)).containsExactly("FromValueAttributeMeta");
}
@Test
void asAnnotationAttributesReturnsPopulatedAnnotationAttributes() {
MergedAnnotation<?> annotation = MergedAnnotations.from(SpringApplicationConfigurationClass.class)
.get(SpringApplicationConfiguration.class);
AnnotationAttributes attributes = annotation.asAnnotationAttributes(Adapt.CLASS_TO_STRING);
assertThat(attributes).containsEntry("classes", new String[] {Number.class.getName()});
assertThat(attributes.annotationType()).isEqualTo(SpringApplicationConfiguration.class);
}
// @formatter:off
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@ | hierarchies |
java | resilience4j__resilience4j | resilience4j-spring/src/main/java/io/github/resilience4j/spelresolver/SpelResolver.java | {
"start": 684,
"end": 789
} | interface ____ {
String resolve(Method method, Object[] arguments, String spelExpression);
}
| SpelResolver |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/inheritance/basic/Customer.java | {
"start": 238,
"end": 270
} | class ____ extends User {
}
| Customer |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/handler/GatewayStreamingServerResponse.java | {
"start": 1800,
"end": 3383
} | class ____ extends AbstractGatewayServerResponse {
private final Consumer<StreamBuilder> streamConsumer;
private final @Nullable Duration timeout;
private GatewayStreamingServerResponse(HttpStatusCode statusCode, HttpHeaders headers,
MultiValueMap<String, Cookie> cookies, Consumer<StreamBuilder> streamConsumer, @Nullable Duration timeout) {
super(statusCode, headers, cookies);
this.streamConsumer = streamConsumer;
this.timeout = timeout;
}
static ServerResponse create(HttpStatusCode statusCode, HttpHeaders headers, MultiValueMap<String, Cookie> cookies,
Consumer<StreamBuilder> streamConsumer, @Nullable Duration timeout) {
Objects.requireNonNull(statusCode, "statusCode must not be null");
Objects.requireNonNull(headers, "headers must not be null");
Objects.requireNonNull(cookies, "cookies must not be null");
Objects.requireNonNull(streamConsumer, "streamConsumer must not be null");
return new GatewayStreamingServerResponse(statusCode, headers, cookies, streamConsumer, timeout);
}
@Override
protected @Nullable ModelAndView writeToInternal(HttpServletRequest request, HttpServletResponse response,
Context context) throws Exception {
DeferredResult<?> result;
if (this.timeout != null) {
result = new DeferredResult<>(this.timeout.toMillis());
}
else {
result = new DeferredResult<>();
}
GatewayAsyncServerResponse.writeAsync(request, response, result);
this.streamConsumer.accept(new DefaultStreamBuilder(response, context, result, this.headers()));
return null;
}
private static | GatewayStreamingServerResponse |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/controllers/OperatorController.java | {
"start": 2683,
"end": 9147
} | class ____ {
private final SwitchManager switchManager;
private final ServerStatusManager serverStatusManager;
private final SwitchDomain switchDomain;
private final DistroMapper distroMapper;
private final ClientManager clientManager;
public OperatorController(SwitchManager switchManager, ServerStatusManager serverStatusManager,
SwitchDomain switchDomain, DistroMapper distroMapper,
ClientManager clientManager) {
this.switchManager = switchManager;
this.serverStatusManager = serverStatusManager;
this.switchDomain = switchDomain;
this.distroMapper = distroMapper;
this.clientManager = clientManager;
}
/**
* Get push metric status.
*
* @param detail whether return detail information
* @param reset whether reset metric information after return information
* @return push metric status
*/
@RequestMapping("/push/state")
@Compatibility(apiType = ApiType.ADMIN_API)
public ObjectNode pushState(@RequestParam(required = false) boolean detail,
@RequestParam(required = false) boolean reset) {
ObjectNode result = JacksonUtils.createEmptyJsonNode();
int failedPushCount = MetricsMonitor.getFailedPushMonitor().get();
int totalPushCount = MetricsMonitor.getTotalPushMonitor().get();
result.put("succeed", totalPushCount - failedPushCount);
result.put("total", totalPushCount);
if (totalPushCount > 0) {
result.put("ratio", ((float) totalPushCount - failedPushCount) / totalPushCount);
} else {
result.put("ratio", 0);
}
if (detail) {
ObjectNode detailNode = JacksonUtils.createEmptyJsonNode();
detailNode.put("avgPushCost", MetricsMonitor.getAvgPushCostMonitor().get());
detailNode.put("maxPushCost", MetricsMonitor.getMaxPushCostMonitor().get());
result.replace("detail", detailNode);
}
if (reset) {
MetricsMonitor.resetPush();
}
result.put("reset", reset);
return result;
}
/**
* Get switch information.
*
* @param request no used
* @return switchDomain
*/
@GetMapping("/switches")
@Compatibility(apiType = ApiType.ADMIN_API, alternatives = "GET ${contextPath:nacos}/v3/admin/ns/ops/switches")
public SwitchDomain switches(HttpServletRequest request) {
return switchDomain;
}
/**
* Update switch information.
*
* @param debug whether debug
* @param entry item entry of switch, {@link SwitchEntry}
* @param value switch value
* @return 'ok' if success
* @throws Exception exception
*/
@Secured(resource = "naming/switches", action = ActionTypes.WRITE)
@PutMapping("/switches")
@Compatibility(apiType = ApiType.ADMIN_API, alternatives = "PUT ${contextPath:nacos}/v3/admin/ns/ops/switches")
public String updateSwitch(@RequestParam(required = false) boolean debug, @RequestParam String entry,
@RequestParam String value) throws Exception {
switchManager.update(entry, value, debug);
return "ok";
}
/**
* Get metrics information.
*
* @param request request
* @return metrics information
*/
@GetMapping("/metrics")
@Compatibility(apiType = ApiType.OPEN_API, alternatives = "GET ${contextPath:nacos}/v3/admin/ns/ops/metrics")
public ObjectNode metrics(HttpServletRequest request) {
boolean onlyStatus = Boolean.parseBoolean(WebUtils.optional(request, "onlyStatus", "true"));
ObjectNode result = JacksonUtils.createEmptyJsonNode();
result.put("status", serverStatusManager.getServerStatus().name());
if (onlyStatus) {
return result;
}
Collection<String> allClientId = clientManager.allClientId();
int connectionBasedClient = 0;
int ephemeralIpPortClient = 0;
int persistentIpPortClient = 0;
int responsibleClientCount = 0;
int responsibleIpCount = 0;
for (String clientId : allClientId) {
if (clientId.contains(IpPortBasedClient.ID_DELIMITER)) {
if (clientId.endsWith(ClientConstants.PERSISTENT_SUFFIX)) {
persistentIpPortClient += 1;
} else {
ephemeralIpPortClient += 1;
}
} else {
connectionBasedClient += 1;
}
Client client = clientManager.getClient(clientId);
if (clientManager.isResponsibleClient(client)) {
responsibleClientCount += 1;
responsibleIpCount += client.getAllPublishedService().size();
}
}
result.put("serviceCount", MetricsMonitor.getDomCountMonitor().get());
result.put("instanceCount", MetricsMonitor.getIpCountMonitor().get());
result.put("subscribeCount", MetricsMonitor.getSubscriberCount().get());
result.put("responsibleInstanceCount", responsibleIpCount);
result.put("clientCount", allClientId.size());
result.put("connectionBasedClientCount", connectionBasedClient);
result.put("ephemeralIpPortClientCount", ephemeralIpPortClient);
result.put("persistentIpPortClientCount", persistentIpPortClient);
result.put("responsibleClientCount", responsibleClientCount);
result.put("cpu", EnvUtil.getCpu());
result.put("load", EnvUtil.getLoad());
result.put("mem", EnvUtil.getMem());
return result;
}
@GetMapping("/distro/client")
@Compatibility(apiType = ApiType.ADMIN_API, alternatives = "GET ${contextPath:nacos}/v3/admin/ns/client/distro")
public ObjectNode getResponsibleServer4Client(@RequestParam String ip, @RequestParam String port) {
ObjectNode result = JacksonUtils.createEmptyJsonNode();
String tag = ip + InternetAddressUtil.IP_PORT_SPLITER + port;
result.put("responsibleServer", distroMapper.mapSrv(tag));
return result;
}
@PutMapping("/log")
@Compatibility(apiType = ApiType.ADMIN_API, alternatives = "PUT ${contextPath:nacos}/v3/admin/ns/ops/log")
public String setLogLevel(@RequestParam String logName, @RequestParam String logLevel) {
Loggers.setLogLevel(logName, logLevel);
return "ok";
}
}
| OperatorController |
java | elastic__elasticsearch | modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java | {
"start": 8457,
"end": 12547
} | class ____ extends SingleAttemptInputStream {
private final ResponseInputStream<GetObjectResponse> responseStream;
private final long start;
private final long end;
private final long lastOffset;
private long offset = 0;
private boolean closed;
private boolean eof;
private boolean aborted;
private S3SingleAttemptInputStream(ResponseInputStream<GetObjectResponse> responseStream, long start, long end) {
this.responseStream = responseStream;
this.start = start;
this.end = end;
lastOffset = getStreamLength(responseStream.response(), start, end);
}
@Override
public int read() throws IOException {
ensureOpen();
int result = responseStream.read();
if (result == -1) {
eof = true;
} else {
offset++;
}
return result;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
ensureOpen();
final int bytesRead = responseStream.read(b, off, len);
if (bytesRead == -1) {
eof = true;
} else {
offset += bytesRead;
}
return bytesRead;
}
private void ensureOpen() {
if (closed) {
final var message = "using " + getClass().getSimpleName() + " after close";
assert false : message;
throw new IllegalStateException(message);
}
}
@Override
public void close() throws IOException {
maybeAbort(responseStream);
try {
responseStream.close();
} finally {
closed = true;
}
}
/**
* Abort the {@link ResponseInputStream} if it wasn't read completely at the time this method is called,
* suppressing all thrown exceptions.
*/
private void maybeAbort(ResponseInputStream<?> stream) {
if (isEof()) {
return;
}
try {
if (offset < lastOffset) {
stream.abort();
aborted = true;
}
} catch (Exception e) {
logger.warn("Failed to abort stream before closing", e);
}
}
@Override
public long skip(long n) throws IOException {
// This could be optimized on a failure by re-opening stream directly to the preferred location. However, it is rarely called,
// so for now we will rely on the default implementation which just discards bytes by reading.
return super.skip(n);
}
@Override
public void reset() {
throw new UnsupportedOperationException("S3InputStream does not support seeking");
}
// exposed for testing
private boolean isEof() {
return eof || offset == lastOffset;
}
// exposed for testing
private boolean isAborted() {
// just expose whether abort() was called, we cannot tell if the stream is really aborted
return aborted;
}
// exposed for testing
private long tryGetStreamLength(GetObjectResponse response) {
return S3RetryingInputStream.tryGetStreamLength(response, start, end);
}
@Override
protected long getFirstOffset() {
return start;
}
}
// exposed for testing
boolean isEof() {
return ((S3SingleAttemptInputStream) currentStream).isEof();
}
// exposed for testing
boolean isAborted() {
return ((S3SingleAttemptInputStream) currentStream).isAborted();
}
// exposed for testing
long tryGetStreamLength(GetObjectResponse getObjectResponse) {
return ((S3SingleAttemptInputStream) currentStream).tryGetStreamLength(getObjectResponse);
}
}
| S3SingleAttemptInputStream |
java | quarkusio__quarkus | extensions/micrometer-opentelemetry/runtime/src/main/java/io/quarkus/micrometer/opentelemetry/runtime/MicrometerOtelBridgeRecorder.java | {
"start": 602,
"end": 1660
} | class ____ {
public Function<SyntheticCreationalContext<MeterRegistry>, MeterRegistry> createBridge() {
return new Function<>() {
@Override
public MeterRegistry apply(SyntheticCreationalContext<MeterRegistry> context) {
Instance<OpenTelemetry> openTelemetry = context.getInjectedReference(new TypeLiteral<>() {
});
if (openTelemetry.isUnsatisfied()) {
throw new IllegalStateException("OpenTelemetry instance not found");
}
MeterRegistry meterRegistry = OpenTelemetryMeterRegistry.builder(openTelemetry.get())
.setPrometheusMode(false)
.setMicrometerHistogramGaugesEnabled(true)
.setBaseTimeUnit(TimeUnit.MILLISECONDS)
.setClock(Clock.SYSTEM)
.build();
Metrics.addRegistry(meterRegistry);
return meterRegistry;
}
};
}
}
| MicrometerOtelBridgeRecorder |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/test/java/org/apache/dubbo/rpc/protocol/tri/rest/service/User.java | {
"start": 1019,
"end": 3996
} | class ____ {
private Long id;
private String name;
private Group group;
private long[] ids;
private List<Integer> scores;
private List<Tag> tags;
private Tag[] tagsA;
private List<Tag> tagsB = new ArrayList<>();
private Tag[] tagsC = new Tag[] {new Tag("a", "b")};
private List<Map<String, Group>> groupMaps;
private Map<String, String> features;
private Map<String, Tag> tagMap;
private Map<String, Tag> tagMapA = new HashMap<>();
private Map<Integer, Tag> tagMapB;
private Map<String, List<Group>> groupsMap;
public User() {
tagsB.add(new Tag("a", "b"));
tagMapA.put("a", new Tag("a", "b"));
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Group getGroup() {
return group;
}
public void setGroup(Group group) {
this.group = group;
}
public long[] getIds() {
return ids;
}
public void setIds(long[] ids) {
this.ids = ids;
}
public List<Integer> getScores() {
return scores;
}
public void setScores(List<Integer> scores) {
this.scores = scores;
}
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
public Tag[] getTagsA() {
return tagsA;
}
public void setTagsA(Tag[] tagsA) {
this.tagsA = tagsA;
}
public List<Tag> getTagsB() {
return tagsB;
}
public void setTagsB(List<Tag> tagsB) {
this.tagsB = tagsB;
}
public Tag[] getTagsC() {
return tagsC;
}
public void setTagsC(Tag[] tagsC) {
this.tagsC = tagsC;
}
public List<Map<String, Group>> getGroupMaps() {
return groupMaps;
}
public void setGroupMaps(List<Map<String, Group>> groupMaps) {
this.groupMaps = groupMaps;
}
public Map<String, String> getFeatures() {
return features;
}
public void setFeatures(Map<String, String> features) {
this.features = features;
}
public Map<String, Tag> getTagMap() {
return tagMap;
}
public void setTagMap(Map<String, Tag> tagMap) {
this.tagMap = tagMap;
}
public Map<String, Tag> getTagMapA() {
return tagMapA;
}
public void setTagMapA(Map<String, Tag> tagMapA) {
this.tagMapA = tagMapA;
}
public Map<Integer, Tag> getTagMapB() {
return tagMapB;
}
public void setTagMapB(Map<Integer, Tag> tagMapB) {
this.tagMapB = tagMapB;
}
public Map<String, List<Group>> getGroupsMap() {
return groupsMap;
}
public void setGroupsMap(Map<String, List<Group>> groupsMap) {
this.groupsMap = groupsMap;
}
public static | User |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/main/java/io/quarkus/resteasy/reactive/server/deployment/QuarkusServerEndpointIndexer.java | {
"start": 4982,
"end": 13057
} | class ____ extends AbstractBuilder<Builder> {
private final Capabilities capabilities;
private BuildProducer<GeneratedClassBuildItem> generatedClassBuildItemBuildProducer;
private ResteasyReactiveRecorder resteasyReactiveRecorder;
private DefaultProducesHandler defaultProducesHandler = DefaultProducesHandler.Noop.INSTANCE;
public Predicate<String> applicationClassPredicate;
public Builder(Capabilities capabilities) {
this.capabilities = capabilities;
}
@Override
public QuarkusServerEndpointIndexer build() {
return new QuarkusServerEndpointIndexer(this);
}
public Builder setGeneratedClassBuildItemBuildProducer(
BuildProducer<GeneratedClassBuildItem> generatedClassBuildItemBuildProducer) {
this.generatedClassBuildItemBuildProducer = generatedClassBuildItemBuildProducer;
return this;
}
public Builder setApplicationClassPredicate(Predicate<String> applicationClassPredicate) {
this.applicationClassPredicate = applicationClassPredicate;
return this;
}
public Builder setResteasyReactiveRecorder(ResteasyReactiveRecorder resteasyReactiveRecorder) {
this.resteasyReactiveRecorder = resteasyReactiveRecorder;
return this;
}
public Builder setDefaultProducesHandler(DefaultProducesHandler defaultProducesHandler) {
this.defaultProducesHandler = defaultProducesHandler;
return this;
}
}
@Override
protected void handleAdditionalMethodProcessing(ServerResourceMethod method, ClassInfo currentClassInfo,
MethodInfo info, AnnotationStore annotationStore) {
super.handleAdditionalMethodProcessing(method, currentClassInfo, info, annotationStore);
if (!capabilities.isCapabilityWithPrefixMissing("io.quarkus.resteasy.reactive.json")) {
return;
}
warnAboutMissingJsonProviderIfNeeded(method, info, jsonDefaultProducersHandler, currentDefaultProducesContext);
}
@Override
public boolean additionalRegisterClassForReflectionCheck(ResourceMethodCallbackEntry entry) {
return checkBodyParameterMessageBodyReader(entry) || checkReturnTypeMessageBodyWriter(entry);
}
/**
* Check whether the Resource Method has a body parameter for which there exists a matching
* {@link jakarta.ws.rs.ext.MessageBodyReader}
* that is not a {@link org.jboss.resteasy.reactive.server.spi.ServerMessageBodyReader}.
* In this case the Resource Class needs to be registered for reflection because the
* {@link jakarta.ws.rs.ext.MessageBodyReader#isReadable(Class, java.lang.reflect.Type, Annotation[], MediaType)}
* method expects to be passed the method annotations.
*/
private boolean checkBodyParameterMessageBodyReader(ResourceMethodCallbackEntry entry) {
MethodParameter[] parameters = entry.getResourceMethod().getParameters();
if (parameters.length == 0) {
return false;
}
MethodParameter bodyParameter = null;
for (MethodParameter parameter : parameters) {
if (parameter.parameterType == ParameterType.BODY) {
bodyParameter = parameter;
break;
}
}
if (bodyParameter == null) {
return false;
}
String parameterClassName = bodyParameter.getDeclaredType();
List<ScannedSerializer> readers = getSerializerScanningResult().getReaders();
for (ScannedSerializer reader : readers) {
if (isSubclassOf(parameterClassName, reader.getHandledClassName()) && !isServerMessageBodyReader(
reader.getClassInfo())) {
return true;
}
}
return false;
}
/**
* Check whether the Resource Method has a return type for which there exists a matching
* {@link jakarta.ws.rs.ext.MessageBodyWriter}
* that is not a {@link org.jboss.resteasy.reactive.server.spi.ServerMessageBodyWriter}.
* In this case the Resource Class needs to be registered for reflection because the
* {@link jakarta.ws.rs.ext.MessageBodyWriter#isWriteable(Class, java.lang.reflect.Type, Annotation[], MediaType)}
* method expects to be passed the method annotations.
*/
private boolean checkReturnTypeMessageBodyWriter(ResourceMethodCallbackEntry entry) {
Type returnType = entry.getMethodInfo().returnType();
String returnTypeName;
switch (returnType.kind()) {
case CLASS:
returnTypeName = returnType.asClassType().name().toString();
break;
case PARAMETERIZED_TYPE:
returnTypeName = returnType.asParameterizedType().name().toString();
break;
default:
returnTypeName = null;
}
if (returnTypeName == null) {
return false;
}
List<ScannedSerializer> writers = getSerializerScanningResult().getWriters();
for (ScannedSerializer writer : writers) {
if (isSubclassOf(returnTypeName, writer.getHandledClassName())
&& !isServerMessageBodyWriter(writer.getClassInfo())) {
return true;
}
}
return false;
}
private boolean isSubclassOf(String className, String parentName) {
if (className.equals(parentName)) {
return true;
}
ClassInfo classByName = index.getClassByName(className);
if ((classByName == null) || (classByName.superName() == null)) {
return false;
}
try {
return JandexUtil.isSubclassOf(index, classByName,
DotName.createSimple(parentName));
} catch (BuildException e) {
return false;
}
}
private boolean isServerMessageBodyReader(ClassInfo classInfo) {
return index.getAllKnownImplementors(SERVER_MESSAGE_BODY_READER).contains(classInfo);
}
private boolean isServerMessageBodyWriter(ClassInfo classInfo) {
return index.getAllKnownImplementors(SERVER_MESSAGE_BODY_WRITER).contains(classInfo);
}
@Override
protected void logMissingJsonWarning(MethodInfo info) {
LOGGER.warnf("Quarkus detected the use of JSON in JAX-RS method '" + info.declaringClass().name() + "#"
+ info.name()
+ "' but no JSON extension has been added. Consider adding 'quarkus-rest-jackson' (recommended) or 'quarkus-rest-jsonb'.");
}
@Override
protected void warnAboutMissUsedBodyParameter(DotName httpMethod, MethodInfo methodInfo) {
// This indexer also picks up REST client methods as well as there is no bulletproof way of distinguishing the two.
// That is why we check for client specific annotations here
if (methodInfo.hasAnnotation(REST_CLIENT_NOT_BODY_ANNOTATION)) {
return;
}
super.warnAboutMissUsedBodyParameter(httpMethod, methodInfo);
}
/**
* At this point we know exactly which resources will require field injection and therefore are required to be
* {@link RequestScoped}.
* We can't change anything CDI related at this point (because it would create build cycles), so all we can do
* is fail the build if the resource has not already been handled automatically (by the best effort approach performed
* elsewhere)
* or it's not manually set to be {@link RequestScoped}.
*/
@Override
protected void verifyClassThatRequiresFieldInjection(ClassInfo classInfo) {
if (!alreadyHandledRequestScopedResources.contains(classInfo.name())) {
BuiltinScope scope = BuiltinScope.from(classInfo);
if (BuiltinScope.REQUEST != scope) {
throw new DeploymentException(
"Resource classes that use field injection for REST parameters can only be @RequestScoped. Offending | Builder |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/body/NettyBodyAdapter.java | {
"start": 1140,
"end": 1938
} | class ____ extends AbstractBodyAdapter {
private final EventLoopFlow eventLoopFlow;
NettyBodyAdapter(EventLoop eventLoop, Publisher<ReadBuffer> source, @Nullable Runnable onDiscard) {
super(source, onDiscard);
this.eventLoopFlow = new EventLoopFlow(eventLoop);
}
@Override
public void onNext(ReadBuffer bytes) {
if (eventLoopFlow.executeNow(() -> super.onNext(bytes))) {
super.onNext(bytes);
}
}
@Override
public void onError(Throwable t) {
if (eventLoopFlow.executeNow(() -> super.onError(t))) {
super.onError(t);
}
}
@Override
public void onComplete() {
if (eventLoopFlow.executeNow(super::onComplete)) {
super.onComplete();
}
}
}
| NettyBodyAdapter |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/NamedQueryUnitTests.java | {
"start": 2070,
"end": 4669
} | class ____ {
private static final JpaQueryConfiguration CONFIG = new JpaQueryConfiguration(QueryRewriterProvider.simple(),
QueryEnhancerSelector.DEFAULT_SELECTOR, ValueExpressionDelegate.create(), EscapeCharacter.DEFAULT);
@Mock RepositoryMetadata metadata;
@Mock QueryExtractor extractor;
@Mock EntityManager em;
@Mock EntityManagerFactory emf;
@Mock Metamodel metamodel;
private ProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory();
private Method method;
@BeforeEach
@SuppressWarnings({ "unchecked", "rawtypes" })
void setUp() throws SecurityException, NoSuchMethodException {
method = SampleRepository.class.getMethod("foo", Pageable.class);
when(metadata.getDomainType()).thenReturn((Class) String.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(String.class));
when(metadata.getReturnedDomainClass(method)).thenReturn((Class) String.class);
when(metadata.getReturnType(any(Method.class)))
.thenAnswer(invocation -> TypeInformation.fromReturnTypeOf(invocation.getArgument(0)));
when(em.getMetamodel()).thenReturn(metamodel);
when(em.getEntityManagerFactory()).thenReturn(emf);
when(em.getDelegate()).thenReturn(em);
when(emf.createEntityManager()).thenReturn(em);
}
@Test
void rejectsPersistenceProviderIfIncapableOfExtractingQueriesAndPagebleBeingUsed() {
when(extractor.canExtractQuery()).thenReturn(false);
JpaQueryMethod queryMethod = new JpaQueryMethod(method, metadata, projectionFactory, extractor);
when(em.createNamedQuery(queryMethod.getNamedCountQueryName())).thenThrow(new IllegalArgumentException());
assertThatExceptionOfType(QueryCreationException.class)
.isThrownBy(() -> NamedQuery.lookupFrom(queryMethod, em, CONFIG));
}
@Test // DATAJPA-142
@SuppressWarnings("unchecked")
void doesNotRejectPersistenceProviderIfNamedCountQueryIsAvailable() {
when(extractor.canExtractQuery()).thenReturn(false);
JpaQueryMethod queryMethod = new JpaQueryMethod(method, metadata, projectionFactory, extractor);
TypedQuery<Long> countQuery = mock(TypedQuery.class);
when(em.createNamedQuery(eq(queryMethod.getNamedCountQueryName()), eq(Long.class))).thenReturn(countQuery);
NamedQuery query = (NamedQuery) NamedQuery.lookupFrom(queryMethod, em, CONFIG);
query.doCreateCountQuery(new JpaParametersParameterAccessor(queryMethod.getParameters(), new Object[1]));
verify(em, times(1)).createNamedQuery(queryMethod.getNamedCountQueryName(), Long.class);
verify(em, never()).createQuery(any(String.class), eq(Long.class));
}
| NamedQueryUnitTests |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/issue50/Issue50Tests.java | {
"start": 2152,
"end": 4166
} | class ____ {
@Autowired
private AuthenticationManager authenticationManager;
@Autowired
private UserRepository userRepo;
@BeforeEach
public void setup() {
SecurityContextHolder.getContext()
.setAuthentication(new TestingAuthenticationToken("test", null, "ROLE_ADMIN"));
}
@AfterEach
public void cleanup() {
SecurityContextHolder.clearContext();
}
@Test
// https://github.com/spring-projects/spring-security-javaconfig/issues/50
public void loadWhenGlobalMethodSecurityConfigurationThenAuthenticationManagerLazy() {
// no exception
}
@Test
public void authenticateWhenMissingUserThenUsernameNotFoundException() {
assertThatExceptionOfType(UsernameNotFoundException.class).isThrownBy(() -> this.authenticationManager
.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("test", "password")));
}
@Test
public void authenticateWhenInvalidPasswordThenBadCredentialsException() {
this.userRepo.save(User.withUsernameAndPassword("test", "password"));
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.authenticationManager
.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("test", "invalid")));
}
@Test
public void authenticateWhenValidUserThenAuthenticates() {
this.userRepo.save(User.withUsernameAndPassword("test", "password"));
Authentication result = this.authenticationManager
.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("test", "password"));
assertThat(result.getName()).isEqualTo("test");
}
@Test
public void globalMethodSecurityIsEnabledWhenNotAllowedThenAccessDenied() {
SecurityContextHolder.getContext().setAuthentication(new TestingAuthenticationToken("test", null, "ROLE_USER"));
this.userRepo.save(User.withUsernameAndPassword("denied", "password"));
assertThatExceptionOfType(AccessDeniedException.class).isThrownBy(() -> this.authenticationManager
.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("test", "password")));
}
}
| Issue50Tests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java | {
"start": 1629,
"end": 4633
} | enum ____ {
INSTANCE; // the singleton
private AtomicReference<MetricsSystem> impl =
new AtomicReference<MetricsSystem>(new MetricsSystemImpl());
@VisibleForTesting
volatile boolean miniClusterMode = false;
transient final UniqueNames mBeanNames = new UniqueNames();
transient final UniqueNames sourceNames = new UniqueNames();
/**
* Convenience method to initialize the metrics system
* @param prefix for the metrics system configuration
* @return the metrics system instance
*/
public static MetricsSystem initialize(String prefix) {
return INSTANCE.init(prefix);
}
MetricsSystem init(String prefix) {
return impl.get().init(prefix);
}
/**
* @return the metrics system object
*/
public static MetricsSystem instance() {
return INSTANCE.getImpl();
}
/**
* Shutdown the metrics system
*/
public static void shutdown() {
INSTANCE.shutdownInstance();
}
void shutdownInstance() {
boolean last = impl.get().shutdown();
if (last) synchronized(this) {
mBeanNames.map.clear();
sourceNames.map.clear();
}
}
@InterfaceAudience.Private
public static MetricsSystem setInstance(MetricsSystem ms) {
return INSTANCE.setImpl(ms);
}
MetricsSystem setImpl(MetricsSystem ms) {
return impl.getAndSet(ms);
}
MetricsSystem getImpl() { return impl.get(); }
@VisibleForTesting
public static void setMiniClusterMode(boolean choice) {
INSTANCE.miniClusterMode = choice;
}
@VisibleForTesting
public static boolean inMiniClusterMode() {
return INSTANCE.miniClusterMode;
}
@InterfaceAudience.Private
public static ObjectName newMBeanName(String name) {
return INSTANCE.newObjectName(name);
}
@InterfaceAudience.Private
public static void removeMBeanName(ObjectName name) {
INSTANCE.removeObjectName(name.toString());
}
@InterfaceAudience.Private
public static void removeSourceName(String name) {
INSTANCE.removeSource(name);
}
@InterfaceAudience.Private
public static String sourceName(String name, boolean dupOK) {
return INSTANCE.newSourceName(name, dupOK);
}
synchronized ObjectName newObjectName(String name) {
try {
if (mBeanNames.map.containsKey(name) && !miniClusterMode) {
throw new MetricsException(name +" already exists!");
}
return new ObjectName(mBeanNames.uniqueName(name));
} catch (Exception e) {
throw new MetricsException(e);
}
}
synchronized void removeObjectName(String name) {
mBeanNames.map.remove(name);
}
synchronized void removeSource(String name) {
sourceNames.map.remove(name);
}
synchronized String newSourceName(String name, boolean dupOK) {
if (sourceNames.map.containsKey(name)) {
if (dupOK) {
return name;
} else if (!miniClusterMode) {
throw new MetricsException("Metrics source "+ name +" already exists!");
}
}
return sourceNames.uniqueName(name);
}
}
| DefaultMetricsSystem |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryObserverChain.java | {
"start": 296,
"end": 1437
} | class ____ implements SessionFactoryObserver {
private List<SessionFactoryObserver> observers;
void addObserver(SessionFactoryObserver observer) {
if ( observers == null ) {
observers = new ArrayList<>();
}
observers.add( observer );
}
@Override
public void sessionFactoryCreated(SessionFactory factory) {
if ( observers == null ) {
return;
}
for ( SessionFactoryObserver observer : observers ) {
observer.sessionFactoryCreated( factory );
}
}
@Override
public void sessionFactoryClosing(SessionFactory factory) {
if ( observers == null ) {
return;
}
//notify in reverse order of create notification
int size = observers.size();
for (int index = size - 1 ; index >= 0 ; index--) {
observers.get( index ).sessionFactoryClosing( factory );
}
}
@Override
public void sessionFactoryClosed(SessionFactory factory) {
if ( observers == null ) {
return;
}
//notify in reverse order of create notification
int size = observers.size();
for (int index = size - 1 ; index >= 0 ; index--) {
observers.get( index ).sessionFactoryClosed( factory );
}
}
}
| SessionFactoryObserverChain |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java | {
"start": 807,
"end": 1583
} | class ____ extends AbstractScalarFunctionTestCase {
public TanhTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> suppliers = TestCaseSupplier.forUnaryCastingToDouble(
"TanhEvaluator",
"val",
Math::tanh,
Double.NEGATIVE_INFINITY,
Double.POSITIVE_INFINITY,
List.of()
);
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Tanh(source, args.get(0));
}
}
| TanhTests |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnSingleCandidateTests.java | {
"start": 9099,
"end": 9250
} | class ____ {
@Bean
@Primary
String bravo() {
return "bravo";
}
}
@Configuration(proxyBeanMethods = false)
static | BravoPrimaryConfiguration |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/MongoMetricsTest.java | {
"start": 513,
"end": 2187
} | class ____ extends MongoTestBase {
@Inject
MongoClient client;
@Inject
MeterRegistry meterRegistry;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(jar -> jar.addClasses(MongoTestBase.class))
.withConfigurationResource("application-metrics-mongo.properties");
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
}
@Test
void testMetricsInitialization() {
// Just need to execute something so that a connection is opened
client.listDatabaseNames().first();
assertThat(getMetric("mongodb.driver.pool.size")).isOne();
assertThat(getMetric("mongodb.driver.commands")).isOne();
assertThat(getMetric("mongodb.driver.pool.checkedout")).isZero();
client.close();
assertThat(getMetric("mongodb.driver.pool.size")).isNull();
assertThat(getMetric("mongodb.driver.pool.checkedout")).isNull();
// doing this here instead of in another method in order to avoid messing with the initialization stats
assertThat(Arc.container().instance(MongoClient.class).get()).isNotNull();
assertThat(Arc.container().instance(ReactiveMongoClient.class).get()).isNull();
}
private Double getMetric(String metricName) {
Meter metric = meterRegistry.getMeters()
.stream()
.filter(mtr -> mtr.getId().getName().contains(metricName))
.findFirst()
.orElse(null);
return metric == null ? null : metric.measure().iterator().next().getValue();
}
}
| MongoMetricsTest |
java | spring-projects__spring-boot | module/spring-boot-health/src/test/java/org/springframework/boot/health/registry/DefaultReactiveHealthContributorRegistryTests.java | {
"start": 1282,
"end": 2107
} | class ____
extends AbstractHealthContributorRegistryTests<ReactiveHealthContributor, ReactiveHealthContributors.Entry> {
@Override
protected AbstractRegistry<ReactiveHealthContributor, Entry> createRegistry(
Collection<? extends HealthContributorNameValidator> nameValidators,
@Nullable Consumer<BiConsumer<String, ReactiveHealthContributor>> initialRegistrations) {
return new DefaultReactiveHealthContributorRegistry(nameValidators, initialRegistrations);
}
@Override
protected ReactiveHealthContributor mockHealthIndicator() {
return mock(ReactiveHealthIndicator.class);
}
@Override
protected String name(Entry entry) {
return entry.name();
}
@Override
protected ReactiveHealthContributor contributor(Entry entry) {
return entry.contributor();
}
}
| DefaultReactiveHealthContributorRegistryTests |
java | spring-projects__spring-security | acl/src/main/java/org/springframework/security/acls/model/SidRetrievalStrategy.java | {
"start": 775,
"end": 928
} | interface ____ provides an ability to determine the {@link Sid} instances
* applicable for an {@link Authentication}.
*
* @author Ben Alex
*/
public | that |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/dates/Dates_assertIsInSameSecondAs_Test.java | {
"start": 1584,
"end": 4935
} | class ____ extends DatesBaseTest {
@Override
protected void initActualDate() {
actual = parseDatetime("2011-01-01T03:15:05");
}
@Test
void should_fail_if_actual_is_not_in_same_second_as_given_date() {
AssertionInfo info = someInfo();
Date other = parseDatetime("2011-01-01T03:15:02");
Throwable error = catchThrowable(() -> dates.assertIsInSameSecondAs(info, actual, other));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeInSameSecond(actual, other));
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> dates.assertIsInSameSecondAs(someInfo(), null, new Date()))
.withMessage(actualIsNull());
}
@Test
void should_throw_error_if_given_date_is_null() {
assertThatNullPointerException().isThrownBy(() -> dates.assertIsInSameSecondAs(someInfo(), actual, null))
.withMessage(dateToCompareActualWithIsNull());
}
@Test
void should_pass_if_actual_is_in_same_second_as_given_date() {
Date other = parseDatetime("2011-01-01T03:15:05");
dates.assertIsInSameSecondAs(someInfo(), actual, other);
dates.assertIsInSameSecondAs(someInfo(), actual, new Date(other.getTime() + 999));
}
@Test
void should_fail_if_actual_is_not_in_same_second_as_given_date_whatever_custom_comparison_strategy_is() {
AssertionInfo info = someInfo();
Date other = parseDatetime("2011-01-01T03:15:02");
Throwable error = catchThrowable(() -> datesWithCustomComparisonStrategy.assertIsInSameSecondAs(info, actual, other));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeInSameSecond(actual, other));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> datesWithCustomComparisonStrategy.assertIsInSameSecondAs(someInfo(),
null,
new Date()))
.withMessage(actualIsNull());
}
@Test
void should_throw_error_if_given_date_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> datesWithCustomComparisonStrategy.assertIsInSameSecondAs(someInfo(),
actual,
null))
.withMessage(dateToCompareActualWithIsNull());
}
@Test
void should_pass_if_actual_is_in_same_second_as_given_date_whatever_custom_comparison_strategy_is() {
Date other = parseDatetime("2011-01-01T03:15:05");
datesWithCustomComparisonStrategy.assertIsInSameSecondAs(someInfo(), actual, other);
datesWithCustomComparisonStrategy.assertIsInSameSecondAs(someInfo(), actual, new Date(other.getTime() + 999));
}
}
| Dates_assertIsInSameSecondAs_Test |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java | {
"start": 1498,
"end": 3045
} | class ____ extends TransportMasterNodeAction<
ExecuteEnrichPolicyAction.Request,
ExecuteEnrichPolicyAction.Response> {
private final EnrichPolicyExecutor executor;
private final ProjectResolver projectResolver;
@Inject
public TransportExecuteEnrichPolicyAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
EnrichPolicyExecutor enrichPolicyExecutor,
ProjectResolver projectResolver
) {
super(
ExecuteEnrichPolicyAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
ExecuteEnrichPolicyAction.Request::new,
ExecuteEnrichPolicyAction.Response::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.executor = enrichPolicyExecutor;
this.projectResolver = projectResolver;
}
@Override
protected void masterOperation(
Task task,
ExecuteEnrichPolicyAction.Request request,
ClusterState state,
ActionListener<ExecuteEnrichPolicyAction.Response> listener
) {
executor.coordinatePolicyExecution(request, listener);
}
@Override
protected ClusterBlockException checkBlock(ExecuteEnrichPolicyAction.Request request, ClusterState state) {
return state.blocks().globalBlockedException(projectResolver.getProjectId(), ClusterBlockLevel.METADATA_READ);
}
}
| TransportExecuteEnrichPolicyAction |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/ResolvableType.java | {
"start": 28631,
"end": 30676
} | class ____ use if resolution fails
* @return an array of resolved generic parameters
* @see #getGenerics()
* @see #resolve()
*/
public Class<?>[] resolveGenerics(Class<?> fallback) {
ResolvableType[] generics = getGenerics();
Class<?>[] resolvedGenerics = new Class<?>[generics.length];
for (int i = 0; i < generics.length; i++) {
resolvedGenerics[i] = generics[i].resolve(fallback);
}
return resolvedGenerics;
}
/**
* Convenience method that will {@link #getGeneric(int...) get} and
* {@link #resolve() resolve} a specific generic parameter.
* @param indexes the indexes that refer to the generic parameter
* (can be omitted to return the first generic)
* @return a resolved {@link Class} or {@code null}
* @see #getGeneric(int...)
* @see #resolve()
*/
public @Nullable Class<?> resolveGeneric(int... indexes) {
return getGeneric(indexes).resolve();
}
/**
* Resolve this type to a {@link java.lang.Class}, returning {@code null}
* if the type cannot be resolved. This method will consider bounds of
* {@link TypeVariable TypeVariables} and {@link WildcardType WildcardTypes} if
* direct resolution fails; however, bounds of {@code Object.class} will be ignored.
* <p>If this method returns a non-null {@code Class} and {@link #hasGenerics()}
* returns {@code false}, the given type effectively wraps a plain {@code Class},
* allowing for plain {@code Class} processing if desirable.
* @return the resolved {@link Class}, or {@code null} if not resolvable
* @see #resolve(Class)
* @see #resolveGeneric(int...)
* @see #resolveGenerics()
*/
public @Nullable Class<?> resolve() {
return this.resolved;
}
/**
* Resolve this type to a {@link java.lang.Class}, returning the specified
* {@code fallback} if the type cannot be resolved. This method will consider bounds
* of {@link TypeVariable TypeVariables} and {@link WildcardType WildcardTypes} if
* direct resolution fails; however, bounds of {@code Object.class} will be ignored.
* @param fallback the fallback | to |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java | {
"start": 889,
"end": 1561
} | class ____ extends BooleanParam {
/** Parameter name. */
public static final String NAME = "overwrite";
/** Default parameter value. */
public static final String DEFAULT = FALSE;
private static final Domain DOMAIN = new Domain(NAME);
/**
* Constructor.
* @param value the parameter value.
*/
public OverwriteParam(final Boolean value) {
super(DOMAIN, value);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public OverwriteParam(final String str) {
super(DOMAIN, DOMAIN.parse(str == null ? DEFAULT : str));
}
@Override
public String getName() {
return NAME;
}
}
| OverwriteParam |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/AotUserRepositoryTests.java | {
"start": 2702,
"end": 4442
} | class ____ {
@PersistenceContext EntityManager entityManager;
@Autowired ApplicationContext applicationContext;
@Bean
public EvaluationContextExtension sampleEvaluationContextExtension() {
return new SampleEvaluationContextExtension();
}
@Bean
static AotFragmentTestConfigurationSupport aot() {
return new AotFragmentTestConfigurationSupport(UserRepository.class, SampleConfig.class, false,
UserRepositoryImpl.class);
}
@Bean
public UserRepository userRepository(BeanFactory beanFactory) throws Exception {
ExtensionAwareEvaluationContextProvider evaluationContextProvider = new ExtensionAwareEvaluationContextProvider(
applicationContext);
JpaRepositoryFactoryBean<UserRepository, User, Integer> factory = new JpaRepositoryFactoryBean<>(
UserRepository.class);
factory.setEntityManager(entityManager);
factory.setBeanFactory(applicationContext);
factory
.setCustomImplementation(new UserRepositoryImpl(new DefaultJpaContext(Collections.singleton(entityManager))));
factory.setRepositoryFragments(RepositoryComposition.RepositoryFragments.just(beanFactory.getBean("fragment")));
factory.setNamedQueries(namedQueries());
factory.setEvaluationContextProvider(evaluationContextProvider);
factory.afterPropertiesSet();
return factory.getObject();
}
@Bean
public GreetingsFrom greetingsFrom() {
return new GreetingsFrom();
}
private NamedQueries namedQueries() throws IOException {
PropertiesFactoryBean factory = new PropertiesFactoryBean();
factory.setLocation(new ClassPathResource("META-INF/jpa-named-queries.properties"));
factory.afterPropertiesSet();
return new PropertiesBasedNamedQueries(factory.getObject());
}
}
}
| Config |
java | apache__kafka | streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/KafkaStreamsTelemetryIntegrationTest.java | {
"start": 4597,
"end": 30471
} | class ____ {
private String appId;
private String inputTopicTwoPartitions;
private String outputTopicTwoPartitions;
private String inputTopicOnePartition;
private String outputTopicOnePartition;
private String globalStoreTopic;
private Uuid globalStoreConsumerInstanceId;
private Properties streamsApplicationProperties = new Properties();
private Properties streamsSecondApplicationProperties = new Properties();
private KeyValueIterator<String, String> globalStoreIterator;
private static EmbeddedKafkaCluster cluster;
private static final List<TestingMetricsInterceptor> INTERCEPTING_CONSUMERS = new ArrayList<>();
private static final List<TestingMetricsInterceptingAdminClient> INTERCEPTING_ADMIN_CLIENTS = new ArrayList<>();
private static final int NUM_BROKERS = 3;
private static final int FIRST_INSTANCE_CLIENT = 0;
private static final int SECOND_INSTANCE_CLIENT = 1;
private static final Logger LOG = LoggerFactory.getLogger(KafkaStreamsTelemetryIntegrationTest.class);
static Stream<Arguments> recordingLevelParameters() {
return Stream.of(
Arguments.of("INFO", "classic"),
Arguments.of("DEBUG", "classic"),
Arguments.of("TRACE", "classic"),
Arguments.of("INFO", "streams"),
Arguments.of("DEBUG", "streams"),
Arguments.of("TRACE", "streams")
);
}
@BeforeAll
public static void startCluster() throws IOException {
final Properties properties = new Properties();
properties.put("metric.reporters", TelemetryPluginWithExporter.class.getName());
cluster = new EmbeddedKafkaCluster(NUM_BROKERS, properties);
cluster.start();
}
@BeforeEach
public void setUp(final TestInfo testInfo) throws InterruptedException {
appId = safeUniqueTestName(testInfo);
inputTopicTwoPartitions = appId + "-input-two";
outputTopicTwoPartitions = appId + "-output-two";
inputTopicOnePartition = appId + "-input-one";
outputTopicOnePartition = appId + "-output-one";
globalStoreTopic = appId + "-global-store";
cluster.createTopic(inputTopicTwoPartitions, 2, 1);
cluster.createTopic(outputTopicTwoPartitions, 2, 1);
cluster.createTopic(inputTopicOnePartition, 1, 1);
cluster.createTopic(outputTopicOnePartition, 1, 1);
cluster.createTopic(globalStoreTopic, 2, 1);
}
@AfterAll
public static void closeCluster() {
cluster.stop();
}
@AfterEach
public void tearDown() throws Exception {
INTERCEPTING_CONSUMERS.clear();
INTERCEPTING_ADMIN_CLIENTS.clear();
IntegrationTestUtils.purgeLocalStreamsState(streamsApplicationProperties);
if (!streamsSecondApplicationProperties.isEmpty()) {
IntegrationTestUtils.purgeLocalStreamsState(streamsSecondApplicationProperties);
}
if (globalStoreIterator != null) {
globalStoreIterator.close();
}
}
@ParameterizedTest
@MethodSource("recordingLevelParameters")
public void shouldPushGlobalThreadMetricsToBroker(final String recordingLevel, final String groupProtocol) throws Exception {
streamsApplicationProperties = props(groupProtocol);
streamsApplicationProperties.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, recordingLevel);
final Topology topology = simpleTopology(true);
subscribeForStreamsMetrics();
try (final KafkaStreams streams = new KafkaStreams(topology, streamsApplicationProperties)) {
IntegrationTestUtils.startApplicationAndWaitUntilRunning(streams);
final ClientInstanceIds clientInstanceIds = streams.clientInstanceIds(Duration.ofSeconds(60));
for (final Map.Entry<String, Uuid> instanceId : clientInstanceIds.consumerInstanceIds().entrySet()) {
final String instanceIdKey = instanceId.getKey();
if (instanceIdKey.endsWith("GlobalStreamThread-global-consumer")) {
globalStoreConsumerInstanceId = instanceId.getValue();
}
}
assertNotNull(globalStoreConsumerInstanceId);
LOG.info("Global consumer instance id {}", globalStoreConsumerInstanceId);
TestUtils.waitForCondition(
() -> !TelemetryPluginWithExporter.SUBSCRIBED_METRICS.getOrDefault(globalStoreConsumerInstanceId, Collections.emptyList()).isEmpty(),
30_000,
"Never received subscribed metrics"
);
final List<String> expectedGlobalMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("thread-id") &&
metricName.tags().get("thread-id").endsWith("-GlobalStreamThread")).map(mn -> {
final String name = mn.name().replace('-', '.');
final String group = mn.group().replace("-metrics", "").replace('-', '.');
return "org.apache.kafka." + group + "." + name;
}).filter(name -> !name.equals("org.apache.kafka.stream.thread.state"))// telemetry reporter filters out string metrics
.sorted().toList();
final List<String> actualGlobalMetrics = new ArrayList<>(TelemetryPluginWithExporter.SUBSCRIBED_METRICS.get(globalStoreConsumerInstanceId));
assertEquals(expectedGlobalMetrics, actualGlobalMetrics);
}
}
@ParameterizedTest
@MethodSource("recordingLevelParameters")
public void shouldPushMetricsToBroker(final String recordingLevel, final String groupProtocol) throws Exception {
// End-to-end test validating metrics pushed to broker
streamsApplicationProperties = props(groupProtocol);
streamsApplicationProperties.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, recordingLevel);
final Topology topology = simpleTopology(false);
subscribeForStreamsMetrics();
try (final KafkaStreams streams = new KafkaStreams(topology, streamsApplicationProperties)) {
IntegrationTestUtils.startApplicationAndWaitUntilRunning(streams);
final ClientInstanceIds clientInstanceIds = streams.clientInstanceIds(Duration.ofSeconds(60));
final Uuid adminInstanceId = clientInstanceIds.adminInstanceId();
final Uuid mainConsumerInstanceId = clientInstanceIds.consumerInstanceIds().entrySet().stream()
.filter(entry -> !entry.getKey().endsWith("-restore-consumer")
&& !entry.getKey().endsWith("GlobalStreamThread-global-consumer"))
.map(Map.Entry::getValue)
.findFirst().orElseThrow();
assertNotNull(adminInstanceId);
assertNotNull(mainConsumerInstanceId);
LOG.info("Main consumer instance id {}", mainConsumerInstanceId);
final String expectedProcessId = streams.metrics().values().stream()
.filter(metric -> metric.metricName().tags().containsKey("process-id"))
.map(metric -> metric.metricName().tags().get("process-id"))
.findFirst().orElseThrow();
TestUtils.waitForCondition(
() -> !TelemetryPluginWithExporter.SUBSCRIBED_METRICS.getOrDefault(mainConsumerInstanceId, Collections.emptyList()).isEmpty(),
30_000,
"Never received subscribed metrics"
);
final List<String> expectedMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("thread-id")).map(mn -> {
final String name = mn.name().replace('-', '.');
final String group = mn.group().replace("-metrics", "").replace('-', '.');
return "org.apache.kafka." + group + "." + name;
}).filter(name -> !name.equals("org.apache.kafka.stream.thread.state"))// telemetry reporter filters out string metrics
.sorted().toList();
final List<String> actualMetrics = new ArrayList<>(TelemetryPluginWithExporter.SUBSCRIBED_METRICS.get(mainConsumerInstanceId));
assertEquals(expectedMetrics, actualMetrics);
TestUtils.waitForCondition(
() -> !TelemetryPluginWithExporter.SUBSCRIBED_METRICS.getOrDefault(adminInstanceId, Collections.emptyList()).isEmpty(),
30_000,
"Never received subscribed metrics"
);
final List<String> actualInstanceMetrics = TelemetryPluginWithExporter.SUBSCRIBED_METRICS.get(adminInstanceId);
final List<String> expectedInstanceMetrics = Arrays.asList(
"org.apache.kafka.stream.alive.stream.threads",
"org.apache.kafka.stream.client.state",
"org.apache.kafka.stream.failed.stream.threads",
"org.apache.kafka.stream.recording.level");
assertEquals(expectedInstanceMetrics, actualInstanceMetrics);
TestUtils.waitForCondition(() -> TelemetryPluginWithExporter.processId != null,
30_000,
"Never received the process id");
assertEquals(expectedProcessId, TelemetryPluginWithExporter.processId);
}
}
@ParameterizedTest
@MethodSource("topologyComplexityAndRebalanceProtocol")
public void shouldPassMetrics(final String topologyType, final String groupProtocol) throws Exception {
// Streams metrics should get passed to Admin and Consumer
streamsApplicationProperties = props(groupProtocol);
final Topology topology = topologyType.equals("simple") ? simpleTopology(false) : complexTopology();
try (final KafkaStreams streams = new KafkaStreams(topology, streamsApplicationProperties)) {
IntegrationTestUtils.startApplicationAndWaitUntilRunning(streams);
final List<MetricName> streamsThreadMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("thread-id")).toList();
final List<MetricName> streamsClientMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.group().equals("stream-metrics")).toList();
final List<MetricName> consumerPassedStreamThreadMetricNames = INTERCEPTING_CONSUMERS.get(FIRST_INSTANCE_CLIENT).passedMetrics().stream().map(KafkaMetric::metricName).toList();
final List<MetricName> adminPassedStreamClientMetricNames = INTERCEPTING_ADMIN_CLIENTS.get(FIRST_INSTANCE_CLIENT).passedMetrics.stream().map(KafkaMetric::metricName).toList();
assertEquals(streamsThreadMetrics.size(), consumerPassedStreamThreadMetricNames.size());
consumerPassedStreamThreadMetricNames.forEach(metricName -> assertTrue(streamsThreadMetrics.contains(metricName), "Streams metrics doesn't contain " + metricName));
assertEquals(streamsClientMetrics.size(), adminPassedStreamClientMetricNames.size());
adminPassedStreamClientMetricNames.forEach(metricName -> assertTrue(streamsClientMetrics.contains(metricName), "Client metrics doesn't contain " + metricName));
}
}
@Test
public void shouldPassCorrectMetricsDynamicInstances() throws Exception {
// Correct streams metrics should get passed with dynamic membership
streamsApplicationProperties = props("classic");
streamsApplicationProperties.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId).getPath() + "-ks1");
streamsApplicationProperties.put(StreamsConfig.CLIENT_ID_CONFIG, appId + "-ks1");
streamsSecondApplicationProperties = props("classic");
streamsSecondApplicationProperties.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId).getPath() + "-ks2");
streamsSecondApplicationProperties.put(StreamsConfig.CLIENT_ID_CONFIG, appId + "-ks2");
final Topology topology = complexTopology();
try (final KafkaStreams streamsOne = new KafkaStreams(topology, streamsApplicationProperties)) {
IntegrationTestUtils.startApplicationAndWaitUntilRunning(streamsOne);
final List<MetricName> streamsTaskMetricNames = streamsOne.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("task-id")).toList();
final List<MetricName> consumerPassedStreamTaskMetricNames = INTERCEPTING_CONSUMERS.get(FIRST_INSTANCE_CLIENT).passedMetrics().stream().map(KafkaMetric::metricName)
.filter(metricName -> metricName.tags().containsKey("task-id")).toList();
/*
With only one instance, Kafka Streams should register task metrics for all tasks 0_0, 0_1, 1_0, 1_1
*/
final List<String> streamTaskIds = getTaskIdsAsStrings(streamsOne);
final long consumerPassedTaskMetricCount = consumerPassedStreamTaskMetricNames.stream().filter(metricName -> streamTaskIds.contains(metricName.tags().get("task-id"))).count();
assertEquals(streamsTaskMetricNames.size(), consumerPassedStreamTaskMetricNames.size());
assertEquals(consumerPassedTaskMetricCount, streamsTaskMetricNames.size());
try (final KafkaStreams streamsTwo = new KafkaStreams(topology, streamsSecondApplicationProperties)) {
streamsTwo.start();
/*
Now with 2 instances, the tasks will get split amongst both Kafka Streams applications
*/
final List<String> streamOneTaskIds = new ArrayList<>();
final List<String> streamTwoTasksIds = new ArrayList<>();
waitForCondition(() -> {
streamOneTaskIds.clear();
streamTwoTasksIds.clear();
streamOneTaskIds.addAll(getTaskIdsAsStrings(streamsOne));
streamTwoTasksIds.addAll(getTaskIdsAsStrings(streamsTwo));
return streamOneTaskIds.size() == 2 && streamTwoTasksIds.size() == 2;
},
"Task assignment did not complete."
);
final List<MetricName> streamsOneTaskMetrics = streamsOne.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("task-id")).toList();
final List<MetricName> streamsOneStateMetrics = streamsOne.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.group().equals("stream-state-metrics")).toList();
final List<MetricName> consumerOnePassedTaskMetrics = INTERCEPTING_CONSUMERS.get(FIRST_INSTANCE_CLIENT)
.passedMetrics().stream().map(KafkaMetric::metricName).filter(metricName -> metricName.tags().containsKey("task-id")).toList();
final List<MetricName> consumerOnePassedStateMetrics = INTERCEPTING_CONSUMERS.get(FIRST_INSTANCE_CLIENT)
.passedMetrics().stream().map(KafkaMetric::metricName).filter(metricName -> metricName.group().equals("stream-state-metrics")).toList();
final List<MetricName> streamsTwoTaskMetrics = streamsTwo.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("task-id")).toList();
final List<MetricName> streamsTwoStateMetrics = streamsTwo.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.group().equals("stream-state-metrics")).toList();
final List<MetricName> consumerTwoPassedTaskMetrics = INTERCEPTING_CONSUMERS.get(SECOND_INSTANCE_CLIENT)
.passedMetrics().stream().map(KafkaMetric::metricName).filter(metricName -> metricName.tags().containsKey("task-id")).toList();
final List<MetricName> consumerTwoPassedStateMetrics = INTERCEPTING_CONSUMERS.get(SECOND_INSTANCE_CLIENT)
.passedMetrics().stream().map(KafkaMetric::metricName).filter(metricName -> metricName.group().equals("stream-state-metrics")).toList();
/*
Confirm pre-existing KafkaStreams instance one only passes metrics for its tasks and has no metrics for previous tasks
*/
final long consumerOneStreamOneTaskCount = consumerOnePassedTaskMetrics.stream().filter(metricName -> streamOneTaskIds.contains(metricName.tags().get("task-id"))).count();
final long consumerOneStateMetricCount = consumerOnePassedStateMetrics.stream().filter(metricName -> streamOneTaskIds.contains(metricName.tags().get("task-id"))).count();
final long consumerOneTaskTwoMetricCount = consumerOnePassedTaskMetrics.stream().filter(metricName -> streamTwoTasksIds.contains(metricName.tags().get("task-id"))).count();
final long consumerOneStateTwoMetricCount = consumerOnePassedStateMetrics.stream().filter(metricName -> streamTwoTasksIds.contains(metricName.tags().get("task-id"))).count();
/*
Confirm new KafkaStreams instance only passes metrics for the newly assigned tasks
*/
final long consumerTwoStreamTwoTaskCount = consumerTwoPassedTaskMetrics.stream().filter(metricName -> streamTwoTasksIds.contains(metricName.tags().get("task-id"))).count();
final long consumerTwoStateMetricCount = consumerTwoPassedStateMetrics.stream().filter(metricName -> streamTwoTasksIds.contains(metricName.tags().get("task-id"))).count();
final long consumerTwoTaskOneMetricCount = consumerTwoPassedTaskMetrics.stream().filter(metricName -> streamOneTaskIds.contains(metricName.tags().get("task-id"))).count();
final long consumerTwoStateMetricOneCount = consumerTwoPassedStateMetrics.stream().filter(metricName -> streamOneTaskIds.contains(metricName.tags().get("task-id"))).count();
assertEquals(streamsOneTaskMetrics.size(), consumerOneStreamOneTaskCount);
assertEquals(streamsOneStateMetrics.size(), consumerOneStateMetricCount);
assertEquals(0, consumerOneTaskTwoMetricCount);
assertEquals(0, consumerOneStateTwoMetricCount);
assertEquals(streamsTwoTaskMetrics.size(), consumerTwoStreamTwoTaskCount);
assertEquals(streamsTwoStateMetrics.size(), consumerTwoStateMetricCount);
assertEquals(0, consumerTwoTaskOneMetricCount);
assertEquals(0, consumerTwoStateMetricOneCount);
}
}
}
@ParameterizedTest
@ValueSource(strings = {"classic", "streams"})
public void passedMetricsShouldNotLeakIntoClientMetrics(final String groupProtocol) throws Exception {
// Streams metrics should not be visible in client metrics
streamsApplicationProperties = props(groupProtocol);
final Topology topology = complexTopology();
try (final KafkaStreams streams = new KafkaStreams(topology, streamsApplicationProperties)) {
IntegrationTestUtils.startApplicationAndWaitUntilRunning(streams);
final List<MetricName> streamsThreadMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.tags().containsKey("thread-id")).toList();
final List<MetricName> streamsClientMetrics = streams.metrics().values().stream().map(Metric::metricName)
.filter(metricName -> metricName.group().equals("stream-metrics")).toList();
final Map<MetricName, ? extends Metric> embeddedConsumerMetrics = INTERCEPTING_CONSUMERS.get(FIRST_INSTANCE_CLIENT).metrics();
final Map<MetricName, ? extends Metric> embeddedAdminMetrics = INTERCEPTING_ADMIN_CLIENTS.get(FIRST_INSTANCE_CLIENT).metrics();
streamsThreadMetrics.forEach(metricName -> assertFalse(embeddedConsumerMetrics.containsKey(metricName), "Stream thread metric found in client metrics" + metricName));
streamsClientMetrics.forEach(metricName -> assertFalse(embeddedAdminMetrics.containsKey(metricName), "Stream client metric found in client metrics" + metricName));
}
}
private void subscribeForStreamsMetrics() throws Exception {
final Properties clientProps = new Properties();
clientProps.put("bootstrap.servers", cluster.bootstrapServers());
try (final ClientMetricsCommand.ClientMetricsService clientMetricsService = new ClientMetricsCommand.ClientMetricsService(clientProps)) {
final String[] metricsSubscriptionParameters = new String[]{"--bootstrap-server", cluster.bootstrapServers(), "--metrics", "org.apache.kafka.stream", "--alter", "--name", "streams-task-metrics-subscription", "--interval", "1000"};
final ClientMetricsCommand.ClientMetricsCommandOptions commandOptions = new ClientMetricsCommand.ClientMetricsCommandOptions(metricsSubscriptionParameters);
clientMetricsService.alterClientMetrics(commandOptions);
}
}
private List<String> getTaskIdsAsStrings(final KafkaStreams streams) {
return streams.metadataForLocalThreads().stream()
.flatMap(threadMeta -> threadMeta.activeTasks().stream()
.map(taskMeta -> taskMeta.taskId().toString()))
.toList();
}
private static Stream<Arguments> topologyComplexityAndRebalanceProtocol() {
return Stream.of(
Arguments.of("simple", "classic"),
Arguments.of("complex", "classic"),
Arguments.of("simple", "streams")
);
}
private Properties props(final String groupProtocol) {
return props(mkObjectProperties(mkMap(
mkEntry(StreamsConfig.GROUP_PROTOCOL_CONFIG, groupProtocol)
)));
}
private Properties props(final Properties extraProperties) {
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, appId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
streamsConfiguration.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0);
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId).getPath());
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
streamsConfiguration.put(StreamsConfig.DEFAULT_CLIENT_SUPPLIER_CONFIG, TestClientSupplier.class);
streamsConfiguration.put(StreamsConfig.InternalConfig.INTERNAL_CONSUMER_WRAPPER, TestConsumerWrapper.class);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.putAll(extraProperties);
return streamsConfiguration;
}
private Topology complexTopology() {
final StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopicTwoPartitions, Consumed.with(Serdes.String(), Serdes.String()))
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+")))
.groupBy((key, value) -> value)
.count()
.toStream().to(outputTopicTwoPartitions, Produced.with(Serdes.String(), Serdes.Long()));
return builder.build();
}
private void addGlobalStore(final StreamsBuilder builder) {
builder.addGlobalStore(
Stores.keyValueStoreBuilder(
Stores.inMemoryKeyValueStore("iq-test-store"),
Serdes.String(),
Serdes.String()
),
globalStoreTopic,
Consumed.with(Serdes.String(), Serdes.String()),
() -> new Processor<>() {
// The store iterator is intentionally not closed here as it needs
// to be open during the test, so the Streams app will emit the
// org.apache.kafka.stream.state.oldest.iterator.open.since.ms metric
// that is expected. So the globalStoreIterator is a global variable
// (pun not intended), so it can be closed in the tearDown method.
@SuppressWarnings("unchecked")
@Override
public void init(final ProcessorContext<Void, Void> context) {
globalStoreIterator = ((KeyValueStore<String, String>) context.getStateStore("iq-test-store")).all();
}
@Override
public void process(final Record<String, String> record) {
// no-op
}
});
}
private Topology simpleTopology(final boolean includeGlobalStore) {
final StreamsBuilder builder = new StreamsBuilder();
if (includeGlobalStore) {
addGlobalStore(builder);
}
builder.stream(inputTopicOnePartition, Consumed.with(Serdes.String(), Serdes.String()))
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+")))
.to(outputTopicOnePartition, Produced.with(Serdes.String(), Serdes.String()));
return builder.build();
}
public static | KafkaStreamsTelemetryIntegrationTest |
java | apache__camel | components/camel-quartz/src/test/java/org/apache/camel/routepolicy/quartz/SpringCronScheduledRoutePolicyTest.java | {
"start": 1042,
"end": 1415
} | class ____ extends SpringScheduledRoutePolicyTest {
public void setUp() {
setApplicationContext(newAppContext("CronPolicies.xml"));
setTestType(TestType.CRON);
}
private AbstractXmlApplicationContext newAppContext(String config) {
return CamelSpringTestSupport.newAppContext(config, getClass());
}
}
| SpringCronScheduledRoutePolicyTest |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheRequest.java | {
"start": 530,
"end": 1000
} | class ____ extends BroadcastRequest<ClearSearchableSnapshotsCacheRequest> {
public ClearSearchableSnapshotsCacheRequest(StreamInput in) throws IOException {
super(in);
}
public ClearSearchableSnapshotsCacheRequest(String... indices) {
super(indices);
}
protected ClearSearchableSnapshotsCacheRequest(String[] indices, IndicesOptions indicesOptions) {
super(indices, indicesOptions);
}
}
| ClearSearchableSnapshotsCacheRequest |
java | apache__dubbo | dubbo-metrics/dubbo-metrics-api/src/main/java/org/apache/dubbo/metrics/event/MetricsInitEvent.java | {
"start": 1368,
"end": 2382
} | class ____ extends TimeCounterEvent {
private static final TypeWrapper METRIC_EVENT = new TypeWrapper(MetricsLevel.SERVICE, METRIC_REQUESTS);
public MetricsInitEvent(ApplicationModel source, TypeWrapper typeWrapper) {
super(source, typeWrapper);
}
public static MetricsInitEvent toMetricsInitEvent(
ApplicationModel applicationModel, Invocation invocation, boolean serviceLevel) {
MethodMetric methodMetric = new MethodMetric(applicationModel, invocation, serviceLevel);
MetricsInitEvent initEvent = new MetricsInitEvent(applicationModel, METRIC_EVENT);
initEvent.putAttachment(MetricsConstants.INVOCATION, invocation);
initEvent.putAttachment(MetricsConstants.METHOD_METRICS, methodMetric);
initEvent.putAttachment(ATTACHMENT_KEY_SERVICE, MetricsSupport.getInterfaceName(invocation));
initEvent.putAttachment(MetricsConstants.INVOCATION_SIDE, MetricsSupport.getSide(invocation));
return initEvent;
}
}
| MetricsInitEvent |
java | apache__camel | components/camel-aws/camel-aws2-eks/src/generated/java/org/apache/camel/component/aws2/eks/EKS2EndpointUriFactory.java | {
"start": 518,
"end": 2944
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":label";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(19);
props.add("accessKey");
props.add("eksClient");
props.add("label");
props.add("lazyStartProducer");
props.add("operation");
props.add("overrideEndpoint");
props.add("pojoRequest");
props.add("profileCredentialsName");
props.add("proxyHost");
props.add("proxyPort");
props.add("proxyProtocol");
props.add("region");
props.add("secretKey");
props.add("sessionToken");
props.add("trustAllCertificates");
props.add("uriEndpointOverride");
props.add("useDefaultCredentialsProvider");
props.add("useProfileCredentialsProvider");
props.add("useSessionCredentials");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(3);
secretProps.add("accessKey");
secretProps.add("secretKey");
secretProps.add("sessionToken");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "aws2-eks".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "label", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| EKS2EndpointUriFactory |
java | quarkusio__quarkus | extensions/reactive-oracle-client/deployment/src/test/java/io/quarkus/reactive/oracle/client/MultipleDataSourcesAndOraclePoolCreatorsTest.java | {
"start": 2055,
"end": 2627
} | class ____ {
@Inject
@ReactiveDataSource("hibernate")
Pool oracleClient;
public CompletionStage<Void> verify() {
CompletableFuture<Void> cf = new CompletableFuture<>();
oracleClient.query("SELECT 1 FROM DUAL").execute(ar -> {
if (ar.failed()) {
cf.completeExceptionally(ar.cause());
} else {
cf.complete(null);
}
});
return cf;
}
}
@Singleton
public static | BeanUsingHibernateDataSource |
java | apache__camel | core/camel-core-reifier/src/main/java/org/apache/camel/reifier/ConvertBodyReifier.java | {
"start": 1167,
"end": 2363
} | class ____ extends ProcessorReifier<ConvertBodyDefinition> {
public ConvertBodyReifier(Route route, ProcessorDefinition<?> definition) {
super(route, ConvertBodyDefinition.class.cast(definition));
}
@Override
public Processor createProcessor() throws Exception {
Class<?> typeClass = parse(Class.class, or(definition.getTypeClass(), parseString(definition.getType())));
String charset = validateCharset(parseString(definition.getCharset()));
boolean mandatory = true;
if (definition.getMandatory() != null) {
mandatory = parseBoolean(definition.getMandatory(), true);
}
ConvertBodyProcessor answer = new ConvertBodyProcessor(typeClass, charset, mandatory);
answer.setDisabled(isDisabled(camelContext, definition));
return answer;
}
public static String validateCharset(String charset) throws UnsupportedCharsetException {
if (charset != null) {
if (Charset.isSupported(charset)) {
return Charset.forName(charset).name();
}
throw new UnsupportedCharsetException(charset);
}
return null;
}
}
| ConvertBodyReifier |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallSuggesterTest.java | {
"start": 7470,
"end": 7865
} | class ____ extends com.google.inject.AbstractModule {
public final String extractString() {
throw new RuntimeException();
}
}
""")
.doTest();
}
@Test
public void finalClass_publicMethod_methodReturnsException() {
testHelper
.addSourceLines(
"Test.java",
"""
final | Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/UsersManager.java | {
"start": 2149,
"end": 4270
} | class ____ implements AbstractUsersManager {
private static final Logger LOG =
LoggerFactory.getLogger(UsersManager.class);
/*
* Member declaration for UsersManager class.
*/
private final AbstractLeafQueue lQueue;
private final RMNodeLabelsManager labelManager;
private final ResourceCalculator resourceCalculator;
private Map<String, User> users = new ConcurrentHashMap<>();
private ResourceUsage totalResUsageForActiveUsers = new ResourceUsage();
private ResourceUsage totalResUsageForNonActiveUsers = new ResourceUsage();
private Set<String> activeUsersSet = new HashSet<String>();
private Set<String> nonActiveUsersSet = new HashSet<String>();
// Summation of consumed ratios for all users in queue
private UsageRatios qUsageRatios;
// To detect whether there is a change in user count for every user-limit
// calculation.
private long latestVersionOfUsersState = 0;
private Map<String, Map<SchedulingMode, Long>> localVersionOfActiveUsersState =
new HashMap<String, Map<SchedulingMode, Long>>();
private Map<String, Map<SchedulingMode, Long>> localVersionOfAllUsersState =
new HashMap<String, Map<SchedulingMode, Long>>();
private volatile float userLimit;
private volatile float userLimitFactor;
private WriteLock writeLock;
private ReadLock readLock;
private final QueueMetrics metrics;
private AtomicInteger activeUsers = new AtomicInteger(0);
private AtomicInteger activeUsersWithOnlyPendingApps = new AtomicInteger(0);
private Map<String, Set<ApplicationId>> usersApplications =
new HashMap<String, Set<ApplicationId>>();
// Pre-computed list of user-limits.
@VisibleForTesting
Map<String, Map<SchedulingMode, Resource>> preComputedActiveUserLimit =
new HashMap<>();
@VisibleForTesting
Map<String, Map<SchedulingMode, Resource>> preComputedAllUserLimit =
new HashMap<>();
private float activeUsersTimesWeights = 0.0f;
private float allUsersTimesWeights = 0.0f;
/**
* UsageRatios will store the total used resources ratio across all users of
* the queue.
*/
static private | UsersManager |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/condition/ProducesRequestCondition.java | {
"start": 2022,
"end": 12733
} | class ____ extends AbstractRequestCondition<ProducesRequestCondition> {
private static final ContentNegotiationManager DEFAULT_CONTENT_NEGOTIATION_MANAGER =
new ContentNegotiationManager();
private static final ProducesRequestCondition EMPTY_CONDITION = new ProducesRequestCondition();
private static final List<ProduceMediaTypeExpression> MEDIA_TYPE_ALL_LIST =
Collections.singletonList(new ProduceMediaTypeExpression(MediaType.ALL_VALUE));
private static final String MEDIA_TYPES_ATTRIBUTE = ProducesRequestCondition.class.getName() + ".MEDIA_TYPES";
private final List<ProduceMediaTypeExpression> expressions;
private final ContentNegotiationManager contentNegotiationManager;
/**
* Creates a new instance from "produces" expressions. If 0 expressions
* are provided in total, this condition will match to any request.
* @param produces expressions with syntax defined by {@link RequestMapping#produces()}
*/
public ProducesRequestCondition(String... produces) {
this(produces, null, null);
}
/**
* Creates a new instance with "produces" and "header" expressions. "Header"
* expressions where the header name is not 'Accept' or have no header value
* defined are ignored. If 0 expressions are provided in total, this condition
* will match to any request.
* @param produces expressions with syntax defined by {@link RequestMapping#produces()}
* @param headers expressions with syntax defined by {@link RequestMapping#headers()}
*/
public ProducesRequestCondition(String @Nullable [] produces, String @Nullable [] headers) {
this(produces, headers, null);
}
/**
* Same as {@link #ProducesRequestCondition(String[], String[])} but also
* accepting a {@link ContentNegotiationManager}.
* @param produces expressions with syntax defined by {@link RequestMapping#produces()}
* @param headers expressions with syntax defined by {@link RequestMapping#headers()}
* @param manager used to determine requested media types
*/
public ProducesRequestCondition(String @Nullable [] produces, String @Nullable [] headers,
@Nullable ContentNegotiationManager manager) {
this.expressions = parseExpressions(produces, headers);
if (this.expressions.size() > 1) {
Collections.sort(this.expressions);
}
this.contentNegotiationManager = (manager != null ? manager : DEFAULT_CONTENT_NEGOTIATION_MANAGER);
}
private List<ProduceMediaTypeExpression> parseExpressions(String @Nullable [] produces, String @Nullable [] headers) {
Set<ProduceMediaTypeExpression> result = null;
if (!ObjectUtils.isEmpty(headers)) {
for (String header : headers) {
HeaderExpression expr = new HeaderExpression(header);
if ("Accept".equalsIgnoreCase(expr.name) && expr.value != null) {
for (MediaType mediaType : MediaType.parseMediaTypes(expr.value)) {
result = (result != null ? result : new LinkedHashSet<>());
result.add(new ProduceMediaTypeExpression(mediaType, expr.isNegated));
}
}
}
}
if (!ObjectUtils.isEmpty(produces)) {
for (String produce : produces) {
result = (result != null ? result : new LinkedHashSet<>());
result.add(new ProduceMediaTypeExpression(produce));
}
}
return (result != null ? new ArrayList<>(result) : Collections.emptyList());
}
/**
* Private constructor for internal use to create matching conditions.
* Note the expressions List is neither sorted nor deep copied.
*/
private ProducesRequestCondition(List<ProduceMediaTypeExpression> expressions, ProducesRequestCondition other) {
this.expressions = expressions;
this.contentNegotiationManager = other.contentNegotiationManager;
}
/**
* Return the contained "produces" expressions.
*/
public Set<MediaTypeExpression> getExpressions() {
return new LinkedHashSet<>(this.expressions);
}
/**
* Return the contained producible media types excluding negated expressions.
*/
public Set<MediaType> getProducibleMediaTypes() {
Set<MediaType> result = new LinkedHashSet<>();
for (ProduceMediaTypeExpression expression : this.expressions) {
if (!expression.isNegated()) {
result.add(expression.getMediaType());
}
}
return result;
}
/**
* Whether the condition has any media type expressions.
*/
@Override
public boolean isEmpty() {
return this.expressions.isEmpty();
}
@Override
protected List<ProduceMediaTypeExpression> getContent() {
return this.expressions;
}
@Override
protected String getToStringInfix() {
return " || ";
}
/**
* Returns the "other" instance if it has any expressions; returns "this"
* instance otherwise. Practically that means a method-level "produces"
* overrides a type-level "produces" condition.
*/
@Override
public ProducesRequestCondition combine(ProducesRequestCondition other) {
return (!other.expressions.isEmpty() ? other : this);
}
/**
* Checks if any of the contained media type expressions match the given
* request 'Content-Type' header and returns an instance that is guaranteed
* to contain matching expressions only. The match is performed via
* {@link MediaType#isCompatibleWith(MediaType)}.
* @param request the current request
* @return the same instance if there are no expressions;
* or a new condition with matching expressions;
* or {@code null} if no expressions match.
*/
@Override
public @Nullable ProducesRequestCondition getMatchingCondition(HttpServletRequest request) {
if (CorsUtils.isPreFlightRequest(request)) {
return EMPTY_CONDITION;
}
if (isEmpty()) {
return this;
}
List<MediaType> acceptedMediaTypes;
try {
acceptedMediaTypes = getAcceptedMediaTypes(request);
}
catch (HttpMediaTypeException ex) {
return null;
}
List<ProduceMediaTypeExpression> result = getMatchingExpressions(acceptedMediaTypes);
if (!CollectionUtils.isEmpty(result)) {
return new ProducesRequestCondition(result, this);
}
else if (MediaType.ALL.isPresentIn(acceptedMediaTypes)) {
return EMPTY_CONDITION;
}
else {
return null;
}
}
private @Nullable List<ProduceMediaTypeExpression> getMatchingExpressions(List<MediaType> acceptedMediaTypes) {
List<ProduceMediaTypeExpression> result = null;
for (ProduceMediaTypeExpression expression : this.expressions) {
if (expression.match(acceptedMediaTypes)) {
result = result != null ? result : new ArrayList<>();
result.add(expression);
}
}
return result;
}
/**
* Compares this and another "produces" condition as follows:
* <ol>
* <li>Sort 'Accept' header media types by quality value via
* {@link org.springframework.util.MimeTypeUtils#sortBySpecificity(List)}
* and iterate the list.
* <li>Get the first index of matching media types in each "produces"
* condition first matching with {@link MediaType#equals(Object)} and
* then with {@link MediaType#includes(MediaType)}.
* <li>If a lower index is found, the condition at that index wins.
* <li>If both indexes are equal, the media types at the index are
* compared further with {@link MediaType#isMoreSpecific(MimeType)}.
* </ol>
* <p>It is assumed that both instances have been obtained via
* {@link #getMatchingCondition(HttpServletRequest)} and each instance
* contains the matching producible media type expression only or
* is otherwise empty.
*/
@Override
public int compareTo(ProducesRequestCondition other, HttpServletRequest request) {
if (this.expressions.isEmpty() && other.expressions.isEmpty()) {
return 0;
}
try {
List<MediaType> acceptedMediaTypes = getAcceptedMediaTypes(request);
for (MediaType acceptedMediaType : acceptedMediaTypes) {
int thisIndex = this.indexOfEqualMediaType(acceptedMediaType);
int otherIndex = other.indexOfEqualMediaType(acceptedMediaType);
int result = compareMatchingMediaTypes(this, thisIndex, other, otherIndex);
if (result != 0) {
return result;
}
thisIndex = this.indexOfIncludedMediaType(acceptedMediaType);
otherIndex = other.indexOfIncludedMediaType(acceptedMediaType);
result = compareMatchingMediaTypes(this, thisIndex, other, otherIndex);
if (result != 0) {
return result;
}
}
return 0;
}
catch (HttpMediaTypeNotAcceptableException ex) {
// should never happen
throw new IllegalStateException("Cannot compare without having any requested media types", ex);
}
}
@SuppressWarnings("unchecked")
private List<MediaType> getAcceptedMediaTypes(HttpServletRequest request)
throws HttpMediaTypeNotAcceptableException {
List<MediaType> result = (List<MediaType>) request.getAttribute(MEDIA_TYPES_ATTRIBUTE);
if (result == null) {
result = this.contentNegotiationManager.resolveMediaTypes(new ServletWebRequest(request));
request.setAttribute(MEDIA_TYPES_ATTRIBUTE, result);
}
return result;
}
private int indexOfEqualMediaType(MediaType mediaType) {
for (int i = 0; i < getExpressionsToCompare().size(); i++) {
MediaType currentMediaType = getExpressionsToCompare().get(i).getMediaType();
if (mediaType.getType().equalsIgnoreCase(currentMediaType.getType()) &&
mediaType.getSubtype().equalsIgnoreCase(currentMediaType.getSubtype())) {
return i;
}
}
return -1;
}
private int indexOfIncludedMediaType(MediaType mediaType) {
for (int i = 0; i < getExpressionsToCompare().size(); i++) {
if (mediaType.includes(getExpressionsToCompare().get(i).getMediaType())) {
return i;
}
}
return -1;
}
private int compareMatchingMediaTypes(ProducesRequestCondition condition1, int index1,
ProducesRequestCondition condition2, int index2) {
int result = 0;
if (index1 != index2) {
result = index2 - index1;
}
else if (index1 != -1) {
ProduceMediaTypeExpression expr1 = condition1.getExpressionsToCompare().get(index1);
ProduceMediaTypeExpression expr2 = condition2.getExpressionsToCompare().get(index2);
result = expr1.compareTo(expr2);
result = (result != 0) ? result : expr1.getMediaType().compareTo(expr2.getMediaType());
}
return result;
}
/**
* Return the contained "produces" expressions or if that's empty, a list
* with a {@value MediaType#ALL_VALUE} expression.
*/
private List<ProduceMediaTypeExpression> getExpressionsToCompare() {
return (this.expressions.isEmpty() ? MEDIA_TYPE_ALL_LIST : this.expressions);
}
/**
* Use this to clear {@link #MEDIA_TYPES_ATTRIBUTE} that contains the parsed,
* requested media types.
* @param request the current request
* @since 5.2
*/
public static void clearMediaTypesAttribute(HttpServletRequest request) {
request.removeAttribute(MEDIA_TYPES_ATTRIBUTE);
}
/**
* Parses and matches a single media type expression to a request's 'Accept' header.
*/
static | ProducesRequestCondition |
java | spring-projects__spring-boot | module/spring-boot-session-data-redis/src/main/java/org/springframework/boot/session/data/redis/autoconfigure/SessionDataRedisAutoConfiguration.java | {
"start": 8662,
"end": 9645
} | class ____ {
@Bean
ReactiveSessionRepositoryCustomizer<ReactiveRedisSessionRepository> springBootSessionRepositoryCustomizer(
SessionProperties sessionProperties, SessionDataRedisProperties sessionDataRedisProperties,
ServerProperties serverProperties) {
return (sessionRepository) -> {
PropertyMapper map = PropertyMapper.get();
map.from(sessionProperties
.determineTimeout(() -> serverProperties.getReactive().getSession().getTimeout()))
.to(sessionRepository::setDefaultMaxInactiveInterval);
map.from(sessionDataRedisProperties::getNamespace).to(sessionRepository::setRedisKeyNamespace);
map.from(sessionDataRedisProperties::getSaveMode).to(sessionRepository::setSaveMode);
};
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(name = "spring.session.data.redis.repository-type", havingValue = "indexed")
@Import(RedisIndexedWebSessionConfiguration.class)
static | DefaultRedisSessionConfiguration |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java | {
"start": 1434,
"end": 11066
} | class ____ {
private final AnnotationCacheOperationSource source = new AnnotationCacheOperationSource();
@Test
void singularAnnotation() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singular", 1);
assertThat(ops).singleElement().satisfies(cacheOperation(CacheableOperation.class, "test"));
}
@Test
void multipleAnnotation() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multiple", 2);
assertThat(ops).satisfiesExactly(cacheOperation(CacheableOperation.class),
cacheOperation(CacheEvictOperation.class));
}
@Test
void caching() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "caching", 2);
assertThat(ops).satisfiesExactly(cacheOperation(CacheableOperation.class),
cacheOperation(CacheEvictOperation.class));
}
@Test
void emptyCaching() {
getOps(AnnotatedClass.class, "emptyCaching", 0);
}
@Test
void singularStereotype() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singleStereotype", 1);
assertThat(ops).satisfiesExactly(cacheOperation(CacheEvictOperation.class));
}
@Test
void multipleStereotypes() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multipleStereotype", 3);
assertThat(ops).satisfiesExactly(cacheOperation(CacheableOperation.class),
cacheOperation(CacheEvictOperation.class, "foo"),
cacheOperation(CacheEvictOperation.class, "bar")
);
}
@Test
void singleComposedAnnotation() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singleComposed", 2);
assertThat(ops).satisfiesExactly(
zero -> {
assertThat(zero).satisfies(cacheOperation(CacheOperation.class, "directly declared"));
assertThat(zero.getKey()).isEmpty();
},
first -> {
assertThat(first).satisfies(cacheOperation(CacheOperation.class, "composedCache"));
assertThat(first.getKey()).isEqualTo("composedKey");
}
);
}
@Test
void multipleComposedAnnotations() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multipleComposed", 4);
assertThat(ops).satisfiesExactly(
zero -> {
assertThat(zero).satisfies(cacheOperation(CacheOperation.class, "directly declared"));
assertThat(zero.getKey()).isEmpty();
},
first -> {
assertThat(first).satisfies(cacheOperation(CacheOperation.class, "composedCache"));
assertThat(first.getKey()).isEqualTo("composedKey");
},
two -> {
assertThat(two).satisfies(cacheOperation(CacheOperation.class, "foo"));
assertThat(two.getKey()).isEmpty();
},
three -> {
assertThat(three).satisfies(cacheOperation(CacheEvictOperation.class, "composedCacheEvict"));
assertThat(three.getKey()).isEqualTo("composedEvictionKey");
}
);
}
@Test
void customKeyGenerator() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customKeyGenerator", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getKeyGenerator()).isEqualTo("custom"));
}
@Test
void customKeyGeneratorInherited() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customKeyGeneratorInherited", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getKeyGenerator()).isEqualTo("custom"));
}
@Test
void keyAndKeyGeneratorCannotBeSetTogether() {
assertThatIllegalStateException().isThrownBy(() ->
getOps(AnnotatedClass.class, "invalidKeyAndKeyGeneratorSet"));
}
@Test
void customCacheManager() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheManager", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getCacheManager()).isEqualTo("custom"));
}
@Test
void customCacheManagerInherited() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheManagerInherited", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getCacheManager()).isEqualTo("custom"));
}
@Test
void customCacheResolver() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheResolver", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getCacheResolver()).isEqualTo("custom"));
}
@Test
void customCacheResolverInherited() {
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheResolverInherited", 1);
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getCacheResolver()).isEqualTo("custom"));
}
@Test
void cacheResolverAndCacheManagerCannotBeSetTogether() {
assertThatIllegalStateException().isThrownBy(() ->
getOps(AnnotatedClass.class, "invalidCacheResolverAndCacheManagerSet"));
}
@Test
void fullClassLevelWithCustomCacheName() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelCacheName", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "", "classCacheResolver", "custom"));
}
@Test
void fullClassLevelWithCustomKeyManager() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelKeyGenerator", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"custom", "", "classCacheResolver" , "classCacheName"));
}
@Test
void fullClassLevelWithCustomCacheManager() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelCacheManager", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "custom", "", "classCacheName"));
}
@Test
void fullClassLevelWithCustomCacheResolver() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelCacheResolver", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "", "custom" , "classCacheName"));
}
@Test
void validateNoCacheIsValid() {
// Valid as a CacheResolver might return the cache names to use with other info
Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "noCacheNameSpecified");
assertThat(ops).singleElement().satisfies(cacheOperation ->
assertThat(cacheOperation.getCacheNames()).isEmpty());
}
@Test
void customClassLevelWithCustomCacheName() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithCustomDefault.class, "methodLevelCacheName", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "", "classCacheResolver", "custom"));
}
@Test
void severalCacheConfigUseClosest() {
Collection<CacheOperation> ops = getOps(MultipleCacheConfig.class, "multipleCacheConfig");
assertThat(ops).singleElement().satisfies(hasSharedConfig("", "", "", "myCache"));
}
@Test
void cacheConfigFromInterface() {
Collection<CacheOperation> ops = getOps(InterfaceCacheConfig.class, "interfaceCacheConfig");
assertThat(ops).singleElement().satisfies(hasSharedConfig("", "", "", "myCache"));
}
@Test
void cacheAnnotationOverride() {
Collection<CacheOperation> ops = getOps(InterfaceCacheConfig.class, "interfaceCacheableOverride");
assertThat(ops).singleElement().satisfies(cacheOperation(CacheableOperation.class));
}
@Test
void partialClassLevelWithCustomCacheManager() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithSomeDefault.class, "methodLevelCacheManager", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "custom", "", "classCacheName"));
}
@Test
void partialClassLevelWithCustomCacheResolver() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithSomeDefault.class, "methodLevelCacheResolver", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "", "custom", "classCacheName"));
}
@Test
void partialClassLevelWithNoCustomization() {
Collection<CacheOperation> ops = getOps(AnnotatedClassWithSomeDefault.class, "noCustomization", 1);
assertThat(ops).singleElement().satisfies(hasSharedConfig(
"classKeyGenerator", "classCacheManager", "", "classCacheName"));
}
private Consumer<CacheOperation> cacheOperation(Class<? extends CacheOperation> type, String... cacheNames) {
return candidate -> {
assertThat(candidate).isInstanceOf(type);
assertThat(candidate.getCacheNames()).containsExactly(cacheNames);
};
}
private Consumer<CacheOperation> cacheOperation(Class<? extends CacheOperation> type) {
return candidate -> assertThat(candidate).isInstanceOf(type);
}
private Collection<CacheOperation> getOps(Class<?> target, String name, int expectedNumberOfOperations) {
Collection<CacheOperation> result = getOps(target, name);
assertThat(result).as("Wrong number of operation(s) for '" + name + "'").hasSize(expectedNumberOfOperations);
return result;
}
private Collection<CacheOperation> getOps(Class<?> target, String name) {
try {
Method method = target.getMethod(name);
return this.source.getCacheOperations(method, target);
}
catch (NoSuchMethodException ex) {
throw new IllegalStateException(ex);
}
}
private Consumer<CacheOperation> hasSharedConfig(String keyGenerator, String cacheManager,
String cacheResolver, String... cacheNames) {
return actual -> {
assertThat(actual.getKeyGenerator()).isEqualTo(keyGenerator);
assertThat(actual.getCacheManager()).isEqualTo(cacheManager);
assertThat(actual.getCacheResolver()).isEqualTo(cacheResolver);
assertThat(actual.getCacheNames()).hasSameSizeAs(cacheNames);
assertThat(actual.getCacheNames()).containsExactly(cacheNames);
};
}
private static | AnnotationCacheOperationSourceTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java | {
"start": 1405,
"end": 1503
} | class ____ tests behaviors of reusing RPC connections for various
* retry policies.
*/
public | mainly |
java | apache__spark | launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java | {
"start": 1263,
"end": 12264
} | class ____ extends BaseSuite {
private static File dummyPropsFile;
private static File connectPropsFile;
private static File driverMemPropsFile;
private static SparkSubmitOptionParser parser;
@BeforeAll
public static void setUp() throws Exception {
dummyPropsFile = File.createTempFile("spark", "properties");
connectPropsFile = File.createTempFile("spark", "properties");
Files.writeString(connectPropsFile.toPath(), "spark.remote=sc://connect-server:15002");
driverMemPropsFile = File.createTempFile("spark", "properties");
Files.writeString(driverMemPropsFile.toPath(),
"spark.driver.memory=4g\nspark.driver.memoryOverhead=768m");
parser = new SparkSubmitOptionParser();
}
@AfterAll
public static void cleanUp() throws Exception {
dummyPropsFile.delete();
connectPropsFile.delete();
driverMemPropsFile.delete();
}
@Test
public void testGetEffectiveConfig() throws Exception {
doTestGetEffectiveConfig(null, true, true);
doTestGetEffectiveConfig(null, true, false);
doTestGetEffectiveConfig(null, false, true);
doTestGetEffectiveConfig(null, false, false);
doTestGetEffectiveConfig(driverMemPropsFile, true, true);
doTestGetEffectiveConfig(driverMemPropsFile, true, false);
doTestGetEffectiveConfig(driverMemPropsFile, false, true);
doTestGetEffectiveConfig(driverMemPropsFile, false, false);
}
private void doTestGetEffectiveConfig(
File propertiesFile, boolean loadSparkDefaults, boolean confDriverMemory) throws Exception {
SparkSubmitCommandBuilder launcher =
newCommandBuilder(Collections.emptyList());
launcher.loadSparkDefaults = loadSparkDefaults;
launcher.conf.put("spark.foo", "bar");
launcher.childEnv.put("SPARK_CONF_DIR", System.getProperty("spark.test.home")
+ "/launcher/src/test/resources");
if (propertiesFile != null) {
launcher.setPropertiesFile(propertiesFile.getAbsolutePath());
}
if (confDriverMemory) {
launcher.conf.put(SparkLauncher.DRIVER_MEMORY, "2g");
}
Map<String, String> effectiveConfig = launcher.getEffectiveConfig();
assertEquals("bar", effectiveConfig.get("spark.foo"));
if (confDriverMemory) {
assertEquals("2g", effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
} else if (propertiesFile != null) {
try (FileReader reader = new FileReader(propertiesFile, StandardCharsets.UTF_8)) {
Properties props = new Properties();
props.load(reader);
if (props.containsKey(SparkLauncher.DRIVER_MEMORY)) {
assertEquals(props.getProperty(SparkLauncher.DRIVER_MEMORY),
effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
}
}
} else {
assertEquals("1g", effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
}
if (propertiesFile != null) {
try (FileReader reader = new FileReader(propertiesFile, StandardCharsets.UTF_8)) {
Properties props = new Properties();
props.load(reader);
if (props.containsKey("spark.driver.memoryOverhead")) {
assertEquals(props.getProperty("spark.driver.memoryOverhead"),
effectiveConfig.get("spark.driver.memoryOverhead"));
}
}
if (loadSparkDefaults) {
assertEquals("/driver", effectiveConfig.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
} else {
assertFalse(effectiveConfig.containsKey(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
}
} else {
assertEquals("/driver", effectiveConfig.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
}
}
@Test
public void testDriverCmdBuilder() throws Exception {
testCmdBuilder(true, null);
testCmdBuilder(true, dummyPropsFile);
testCmdBuilder(true, connectPropsFile);
}
@Test
public void testClusterCmdBuilder() throws Exception {
testCmdBuilder(false, null);
testCmdBuilder(false, dummyPropsFile);
testCmdBuilder(false, connectPropsFile);
}
@Test
public void testCliHelpAndNoArg() throws Exception {
List<String> helpArgs = Arrays.asList(parser.HELP);
Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(helpArgs, env);
assertTrue(cmd.contains(parser.HELP), "--help should be contained in the final cmd.");
List<String> sparkEmptyArgs = Collections.emptyList();
cmd = buildCommand(sparkEmptyArgs, env);
assertTrue(
cmd.contains("org.apache.spark.deploy.SparkSubmit"),
"org.apache.spark.deploy.SparkSubmit should be contained in the final cmd of empty input.");
}
@Test
public void testCheckJavaOptionsThrowException() throws Exception {
Map<String, String> env = new HashMap<>();
List<String> sparkSubmitArgs = Arrays.asList(
parser.MASTER,
"local",
parser.DRIVER_CLASS_PATH,
"/driverCp",
parser.DRIVER_JAVA_OPTIONS,
"-Xmx64g -Dprop=Other -Dprop1=\"-Xmx -Xmx\" -Dprop2=\"-Xmx '-Xmx\" " +
"-Dprop3='-Xmx -Xmx' -Dprop4='-Xmx \"-Xmx'",
SparkLauncher.NO_RESOURCE);
assertThrows(IllegalArgumentException.class, () -> buildCommand(sparkSubmitArgs, env));
}
@Test
public void testCheckJavaOptions() throws Exception {
Map<String, String> env = new HashMap<>();
List<String> sparkSubmitArgs = Arrays.asList(
parser.MASTER,
"local",
parser.DRIVER_CLASS_PATH,
"/driverCp",
parser.DRIVER_JAVA_OPTIONS,
"-Dprop=-Xmx -Dprop1=\"-Xmx -Xmx\" -Dprop2=\"-Xmx '-Xmx\" " +
"-Dprop3='-Xmx -Xmx' -Dprop4='-Xmx \"-Xmx'",
SparkLauncher.NO_RESOURCE);
buildCommand(sparkSubmitArgs, env);
}
@Test
public void testCliKillAndStatus() throws Exception {
List<String> params = Arrays.asList("driver-20160531171222-0000");
testCLIOpts(null, parser.STATUS, params);
testCLIOpts(null, parser.KILL_SUBMISSION, params);
testCLIOpts(SparkSubmitCommandBuilder.RUN_EXAMPLE, parser.STATUS, params);
testCLIOpts(SparkSubmitCommandBuilder.RUN_EXAMPLE, parser.KILL_SUBMISSION, params);
}
@Test
public void testCliParser() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
parser.MASTER,
"local",
parser.DRIVER_MEMORY,
"42g",
parser.DRIVER_CLASS_PATH,
"/driverCp",
parser.DRIVER_JAVA_OPTIONS,
"extraJavaOpt",
parser.CONF,
"spark.randomOption=foo",
parser.CONF,
SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH + "=/driverLibPath",
SparkLauncher.NO_RESOURCE);
Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertTrue(findInStringList(env.get(CommandBuilderUtils.getLibPathEnvName()),
File.pathSeparator, "/driverLibPath"));
assertTrue(findInStringList(findArgValue(cmd, "-cp"), File.pathSeparator, "/driverCp"));
assertTrue(cmd.contains("-Xmx42g"), "Driver -Xmx should be configured.");
assertTrue(
Collections.indexOfSubList(cmd, Arrays.asList(parser.CONF, "spark.randomOption=foo")) > 0,
"Command should contain user-defined conf.");
}
@Test
public void testShellCliParser() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
parser.CLASS,
"org.apache.spark.repl.Main",
parser.MASTER,
"foo",
"--app-arg",
"bar",
"--app-switch",
parser.FILES,
"baz",
parser.NAME,
"appName");
List<String> args = newCommandBuilder(sparkSubmitArgs).buildSparkSubmitArgs();
List<String> expected = Arrays.asList("spark-shell", "--app-arg", "bar", "--app-switch");
assertEquals(expected, args.subList(args.size() - expected.size(), args.size()));
}
@Test
public void testAlternateSyntaxParsing() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
parser.CLASS + "=org.my.Class",
parser.MASTER + "=foo",
parser.DEPLOY_MODE + "=bar",
SparkLauncher.NO_RESOURCE);
List<String> cmd = newCommandBuilder(sparkSubmitArgs).buildSparkSubmitArgs();
assertEquals("org.my.Class", findArgValue(cmd, parser.CLASS));
assertEquals("foo", findArgValue(cmd, parser.MASTER));
assertEquals("bar", findArgValue(cmd, parser.DEPLOY_MODE));
}
@Test
public void testPySparkLauncher() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
SparkSubmitCommandBuilder.PYSPARK_SHELL,
"--master=foo",
"--deploy-mode=bar");
Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertTrue(Arrays.asList("python", "python2", "python3").contains(cmd.get(cmd.size() - 1)));
assertEquals(
String.format("\"%s\" \"foo\" \"%s\" \"bar\" \"%s\"",
parser.MASTER, parser.DEPLOY_MODE, SparkSubmitCommandBuilder.PYSPARK_SHELL_RESOURCE),
env.get("PYSPARK_SUBMIT_ARGS"));
}
@Test
public void testPySparkFallback() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
"--master=foo",
"--deploy-mode=bar",
"script.py",
"arg1");
Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertEquals("foo", findArgValue(cmd, "--master"));
assertEquals("bar", findArgValue(cmd, "--deploy-mode"));
assertEquals("script.py", cmd.get(cmd.size() - 2));
assertEquals("arg1", cmd.get(cmd.size() - 1));
}
@Test
public void testSparkRShell() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
SparkSubmitCommandBuilder.SPARKR_SHELL,
"--master=foo",
"--deploy-mode=bar",
"--conf", "spark.r.shell.command=/usr/bin/R");
Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertEquals("/usr/bin/R", cmd.get(cmd.size() - 1));
assertEquals(
String.format(
"\"%s\" \"foo\" \"%s\" \"bar\" \"--conf\" \"spark.r.shell.command=/usr/bin/R\" \"%s\"",
parser.MASTER, parser.DEPLOY_MODE, SparkSubmitCommandBuilder.SPARKR_SHELL_RESOURCE),
env.get("SPARKR_SUBMIT_ARGS"));
}
@Test
public void testExamplesRunnerNoArg() {
List<String> sparkSubmitArgs = Arrays.asList(SparkSubmitCommandBuilder.RUN_EXAMPLE);
Map<String, String> env = new HashMap<>();
assertThrows(IllegalArgumentException.class, () -> buildCommand(sparkSubmitArgs, env));
}
@Test
public void testExamplesRunnerNoMainClass() throws Exception {
testCLIOpts(SparkSubmitCommandBuilder.RUN_EXAMPLE, parser.HELP, null);
testCLIOpts(SparkSubmitCommandBuilder.RUN_EXAMPLE, parser.USAGE_ERROR, null);
testCLIOpts(SparkSubmitCommandBuilder.RUN_EXAMPLE, parser.VERSION, null);
}
@Test
public void testExamplesRunnerWithMasterNoMainClass() {
List<String> sparkSubmitArgs = Arrays.asList(
SparkSubmitCommandBuilder.RUN_EXAMPLE,
parser.MASTER + "=foo"
);
Map<String, String> env = new HashMap<>();
assertThrows(IllegalArgumentException.class,
() -> buildCommand(sparkSubmitArgs, env), "Missing example | SparkSubmitCommandBuilderSuite |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/topology/TopologyComparators.java | {
"start": 9185,
"end": 10475
} | enum ____ {
/**
* Sort by latency.
*/
BY_LATENCY {
@Override
void sort(Partitions partitions) {
partitions.getPartitions().sort(TopologyComparators.LatencyComparator.INSTANCE);
}
},
/**
* Do not sort.
*/
NONE {
@Override
void sort(Partitions partitions) {
}
},
/**
* Randomize nodes.
*/
RANDOMIZE {
@Override
void sort(Partitions partitions) {
Collections.shuffle(partitions.getPartitions());
}
};
abstract void sort(Partitions partitions);
/**
* @return determine {@link SortAction} and fall back to {@link SortAction#BY_LATENCY} if sort action cannot be
* resolved.
*/
static SortAction getSortAction() {
String sortAction = System.getProperty("io.lettuce.core.topology.sort", BY_LATENCY.name());
for (SortAction action : values()) {
if (sortAction.equalsIgnoreCase(action.name())) {
return action;
}
}
return BY_LATENCY;
}
}
}
| SortAction |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/TypesTest.java | {
"start": 16170,
"end": 16242
} | interface ____ {}
@Target(FIELD)
@Retention(RUNTIME)
@ | FieldAnnotation |
java | apache__camel | components/camel-stitch/src/test/java/org/apache/camel/component/stitch/operations/StitchProducerOperationsTest.java | {
"start": 2139,
"end": 13546
} | class ____ extends CamelTestSupport {
@Test
void testIfCreateIfStitchMessagesSet() {
final StitchConfiguration configuration = new StitchConfiguration();
configuration.setTableName("test_table");
configuration.setStitchSchema(StitchSchema.builder().addKeyword("field_1", "integer").build());
configuration.setKeyNames("field_1,field_2");
final StitchMessage message = StitchMessage.builder()
.withData("field_1", "data")
.withSequence(0)
.build();
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(message);
final StitchProducerOperations operations = new StitchProducerOperations(new TestClient(), configuration);
assertEquals("{\"table_name\":\"test_table\",\"schema\":{\"field_1\":\"integer\"},\"messages\":[{\"action\":\"upsert\","
+ "\"sequence\":0,\"data\":{\"field_1\":\"data\"}}],\"key_names\":[\"field_1\",\"field_2\"]}",
JsonUtils.convertMapToJson(operations.createStitchRequestBody(exchange.getMessage()).toMap()));
final StitchMessage message1 = StitchMessage.builder()
.withData("field_1", "test_2")
.withSequence(0)
.build();
exchange.getMessage().setHeader(StitchConstants.SCHEMA,
StitchSchema.builder().addKeyword("field_1", "integer").addKeyword("field_2", "string").build());
exchange.getMessage().setHeader(StitchConstants.TABLE_NAME, "test_table_2");
exchange.getMessage().setHeader(StitchConstants.KEY_NAMES, "field_1,field_2");
exchange.getMessage().setBody(message1);
assertEquals("{\"table_name\":\"test_table_2\",\"schema\":{\"field_1\":\"integer\",\"field_2\":\"string\"},"
+ "\"messages\":[{\"action\":\"upsert\",\"sequence\":0,\"data\":{\"field_1\":\"test_2\"}}],\"key_names\":[\"field_1\",\"field_2\"]}",
JsonUtils.convertMapToJson(operations.createStitchRequestBody(exchange.getMessage()).toMap()));
}
@Test
void testIfCreateIfStitchRequestBodySet() {
final StitchConfiguration configuration = new StitchConfiguration();
configuration.setTableName("table_2");
final StitchMessage message1 = StitchMessage.builder()
.withData("field_1", "test_2")
.withSequence(0)
.build();
final StitchRequestBody requestBody = StitchRequestBody.builder()
.addMessage(message1)
.withSchema(StitchSchema.builder().addKeyword("field_1", "integer").build())
.withTableName("table_1")
.withKeyNames(Collections.singleton("field_1"))
.build();
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(requestBody);
final StitchProducerOperations operations = new StitchProducerOperations(new TestClient(), configuration);
assertEquals("{\"table_name\":\"table_2\",\"schema\":{\"field_1\":\"integer\"},\"messages\":"
+ "[{\"action\":\"upsert\",\"sequence\":0,\"data\":{\"field_1\":\"test_2\"}}],\"key_names\":[\"field_1\"]}",
JsonUtils.convertMapToJson(operations.createStitchRequestBody(exchange.getMessage()).toMap()));
}
@Test
void testIfCreateIfMapSet() {
final StitchConfiguration configuration = new StitchConfiguration();
final Map<String, Object> properties = new LinkedHashMap<>();
properties.put("id", Collections.singletonMap("type", "integer"));
properties.put("name", Collections.singletonMap("type", "string"));
properties.put("age", Collections.singletonMap("type", "integer"));
properties.put("has_magic", Collections.singletonMap("type", "boolean"));
final Map<String, Object> message = new LinkedHashMap<>();
message.put(StitchMessage.DATA, Collections.singletonMap("id", 2));
message.put(StitchMessage.SEQUENCE, 1L);
final Map<String, Object> data = new LinkedHashMap<>();
data.put(StitchRequestBody.TABLE_NAME, "my_table");
data.put(StitchRequestBody.SCHEMA, Collections.singletonMap("properties", properties));
data.put(StitchRequestBody.MESSAGES,
Collections.singletonList(message));
data.put(StitchRequestBody.KEY_NAMES, Collections.singletonList("test_key"));
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(data);
final StitchProducerOperations operations = new StitchProducerOperations(new TestClient(), configuration);
final String createdJson
= JsonUtils.convertMapToJson(operations.createStitchRequestBody(exchange.getMessage()).toMap());
assertEquals("{\"table_name\":\"my_table\",\"schema\":{\"properties\":{\"id\":{\"type\":\"integer\"},"
+ "\"name\":{\"type\":\"string\"},\"age\":{\"type\":\"integer\"},\"has_magic\""
+ ":{\"type\":\"boolean\"}}},\"messages\":[{\"action\":\"upsert\",\"sequence\":1,"
+ "\"data\":{\"id\":2}}],\"key_names\":[\"test_key\"]}",
createdJson);
}
@Test
void testIfCreateFromIterable() {
final StitchConfiguration configuration = new StitchConfiguration();
configuration.setTableName("table_1");
configuration.setStitchSchema(StitchSchema.builder().addKeyword("field_1", "string").build());
configuration.setKeyNames("field_1");
final StitchMessage stitchMessage1 = StitchMessage.builder()
.withData("field_1", "stitchMessage1")
.withSequence(1)
.build();
final StitchMessage stitchMessage2 = StitchMessage.builder()
.withData("field_1", "stitchMessage2-1")
.withData("field_2", "stitchMessage2-2")
.withSequence(2)
.build();
final StitchRequestBody stitchMessage2RequestBody = StitchRequestBody.builder()
.addMessage(stitchMessage2)
.withSchema(StitchSchema.builder().addKeyword("field_1", "integer").build())
.withTableName("table_1")
.withKeyNames(Collections.singleton("field_1"))
.build();
final Map<String, Object> stitchMessage3 = new LinkedHashMap<>();
stitchMessage3.put(StitchMessage.DATA, Collections.singletonMap("field_1", "stitchMessage3"));
stitchMessage3.put(StitchMessage.SEQUENCE, 3L);
final StitchMessage stitchMessage4 = StitchMessage.builder()
.withData("field_1", "stitchMessage4")
.withSequence(4)
.build();
final Exchange stitchMessage4Exchange = new DefaultExchange(context);
stitchMessage4Exchange.getMessage().setBody(stitchMessage4);
final StitchMessage stitchMessage5 = StitchMessage.builder()
.withData("field_1", "stitchMessage5")
.withSequence(5)
.build();
final Message stitchMessage5Message = new DefaultExchange(context).getMessage();
stitchMessage5Message.setBody(stitchMessage5);
final List<Object> inputMessages = new LinkedList<>();
inputMessages.add(stitchMessage1);
inputMessages.add(stitchMessage2RequestBody);
inputMessages.add(stitchMessage3);
inputMessages.add(stitchMessage4Exchange);
inputMessages.add(stitchMessage5Message);
final StitchProducerOperations operations = new StitchProducerOperations(new TestClient(), configuration);
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(inputMessages);
final String createdJson
= JsonUtils.convertMapToJson(operations.createStitchRequestBody(exchange.getMessage()).toMap());
assertEquals(
"{\"table_name\":\"table_1\",\"schema\":{\"field_1\":\"string\"},\"messages\":[{\"action\":\"upsert\",\"sequence\":1,\"data\":{\"field_1\":\"stitchMessage1\"}},"
+ "{\"action\":\"upsert\",\"sequence\":2,\"data\":{\"field_1\":\"stitchMessage2-1\",\"field_2\":\"stitchMessage2-2\"}},{\"action\":\"upsert\",\"sequence\":3,\"data\":{\"field_1\":"
+ "\"stitchMessage3\"}},{\"action\":\"upsert\",\"sequence\":4,\"data\":{\"field_1\":\"stitchMessage4\"}},{\"action\":\"upsert\",\"sequence\":5,\"data\":{\"field_1\":\"stitchMessage5\"}}],"
+ "\"key_names\":[\"field_1\"]}",
createdJson);
}
@Test
void testNormalSend() {
final StitchConfiguration configuration = new StitchConfiguration();
configuration.setTableName("table_1");
configuration.setStitchSchema(StitchSchema.builder().addKeyword("field_1", "string").build());
configuration.setKeyNames("field_1");
final StitchMessage message = StitchMessage.builder()
.withData("field_1", "data")
.withSequence(0)
.build();
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(message);
final StitchProducerOperations operations = new StitchProducerOperations(new TestClient(), configuration);
final AtomicBoolean done = new AtomicBoolean(false);
operations.sendEvents(exchange.getMessage(), response -> {
assertEquals(200, response.getHttpStatusCode());
assertEquals("OK", response.getStatus());
assertEquals("All good!", response.getMessage());
assertEquals(Collections.singletonMap("header-1", "test"), response.getHeaders());
done.set(true);
}, doneSync -> {
});
Awaitility
.await()
.atMost(1, TimeUnit.SECONDS)
.pollInterval(10, TimeUnit.MILLISECONDS)
.untilTrue(done);
}
@Test
void testErrorHandle() {
final StitchConfiguration configuration = new StitchConfiguration();
configuration.setTableName("table_1");
configuration.setStitchSchema(StitchSchema.builder().addKeyword("field_1", "string").build());
configuration.setKeyNames("field_1");
final StitchMessage message = StitchMessage.builder()
.withData("field_1", "data")
.withSequence(0)
.build();
final Exchange exchange = new DefaultExchange(context);
exchange.getMessage().setBody(message);
final StitchProducerOperations operations = new StitchProducerOperations(new TestErrorClient(), configuration);
operations.sendEvents(exchange.getMessage(), response -> {
}, doneSync -> {
});
assertNotNull(exchange.getException());
assertTrue(exchange.getException() instanceof StitchException);
assertNotNull(((StitchException) exchange.getException()).getResponse());
assertEquals(400, ((StitchException) exchange.getException()).getResponse().getHttpStatusCode());
assertEquals("Error", ((StitchException) exchange.getException()).getResponse().getStatus());
assertEquals("Not good!", ((StitchException) exchange.getException()).getResponse().getMessage());
}
static | StitchProducerOperationsTest |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/TotalRequestsThrottler.java | {
"start": 15692,
"end": 17344
} | class ____ implements Delayed {
private volatile long scheduledTime;
ThrottlePermit(final long delayMs) {
setDelayMs(delayMs);
}
public void setDelayMs(final long delayMs) {
this.scheduledTime = System.currentTimeMillis() + delayMs;
}
@Override
public long getDelay(final TimeUnit unit) {
return unit.convert(scheduledTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
}
@Override
public int compareTo(final Delayed o) {
return Long.compare(getDelay(TimeUnit.MILLISECONDS), o.getDelay(TimeUnit.MILLISECONDS));
}
}
@Override
public String getMode() {
return "TotalRequests";
}
/**
* Gets the current maximum request per period value. If it is grouped throttling applied with correlationExpression
* than the max per period within the group will return
*/
@Override
public int getCurrentMaximumRequests() {
return states.values().stream().mapToInt(ThrottlingState::getThrottleRate).max().orElse(0);
}
/**
* Sets the time period during which the maximum number of requests apply
*/
public void setTimePeriodMillis(final long timePeriodMillis) {
this.timePeriodMillis = timePeriodMillis;
}
public long getTimePeriodMillis() {
return timePeriodMillis;
}
@Override
public String getTraceLabel() {
return "throttle[" + this.getMaximumRequestsExpression() + " per: " + timePeriodMillis + "]";
}
@Override
public String toString() {
return id;
}
}
| ThrottlePermit |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RPatternTopicReactive.java | {
"start": 939,
"end": 2195
} | interface ____ {
/**
* Get topic channel patterns
*
* @return list of topic names
*/
List<String> getPatternNames();
/**
* Subscribes to this topic.
* <code>MessageListener.onMessage</code> is called when any message
* is published on this topic.
*
* @param <T> type of message
* @param type - type of message
* @param listener - message listener
* @return local JVM unique listener id
* @see org.redisson.api.listener.MessageListener
*/
<T> Mono<Integer> addListener(Class<T> type, PatternMessageListener<T> listener);
/**
* Subscribes to status changes of this topic
*
* @param listener - message listener
* @return local JVM unique listener id
* @see org.redisson.api.listener.StatusListener
*/
Mono<Integer> addListener(PatternStatusListener listener);
/**
* Removes the listener by <code>id</code> for listening this topic
*
* @param listenerId - message listener id
*/
Mono<Void> removeListener(int listenerId);
/**
* Returns active topic list of this pattern
* @return all actives channel of this pattern
*/
Mono<List<String>> getActiveTopics();
}
| RPatternTopicReactive |
java | google__guice | extensions/testlib/test/com/google/inject/testing/fieldbinder/BoundFieldModuleTest.java | {
"start": 7766,
"end": 14582
} | interface ____ {}
public void testBindingWithJakartaQualifier() {
final Integer testValue1 = 1024, testValue2 = 2048;
Object instance =
new Object() {
@Bind private Integer anInt = testValue1;
@Bind @SomeJakartaQualifier private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(
testValue2, injector.getInstance(Key.get(Integer.class, SomeJakartaQualifier.class)));
}
public void testCanReuseBindingAnnotationsWithDifferentValues() {
final Integer testValue1 = 1024, testValue2 = 2048;
final String name1 = "foo", name2 = "bar";
Object instance =
new Object() {
@Bind
@Named(name1)
private Integer anInt = testValue1;
@Bind
@Named(name2)
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Key.get(Integer.class, Names.named(name1))));
assertEquals(testValue2, injector.getInstance(Key.get(Integer.class, Names.named(name2))));
}
public void testBindingWithValuedBindingAnnotation() {
final Integer testValue1 = 1024, testValue2 = 2048;
final String name = "foo";
Object instance =
new Object() {
@Bind private Integer anInt = testValue1;
@Bind
@Named(name)
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(testValue2, injector.getInstance(Key.get(Integer.class, Names.named(name))));
}
public void testBindingWithGenerics() {
final List<Integer> testIntList = Arrays.asList(new Integer[] {1, 2, 3});
final List<Boolean> testBoolList = Arrays.asList(new Boolean[] {true, true, false});
Object instance =
new Object() {
@Bind private List<Integer> anIntList = testIntList;
@Bind private List<Boolean> aBoolList = testBoolList;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testIntList, injector.getInstance(new Key<List<Integer>>() {}));
assertEquals(testBoolList, injector.getInstance(new Key<List<Boolean>>() {}));
}
public void testBoundValueDoesntChange() {
Integer testValue = 1024;
FieldBindableClass instance = new FieldBindableClass(testValue);
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
instance.anInt++;
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testIncompatibleBindingType() {
final Integer testInt = 1024;
Object instance =
new Object() {
@Bind(to = String.class)
private Integer anInt = testInt;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Requested binding type \"java.lang.String\" is not assignable from field binding type "
+ "\"java.lang.Integer\"");
}
}
public void testIncompatiblePrimitiveBindingType() {
Object instance =
new Object() {
@Bind(to = Long.class)
int value = 1;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Requested binding type \"java.lang.Long\" is not assignable from field binding type "
+ "\"java.lang.Integer\"");
}
}
public void testFailureOnMultipleBindingAnnotations() {
final Integer testInt = 1024;
Object instance =
new Object() {
@Bind
@Named("a")
@SomeBindingAnnotation
private Integer anInt = testInt;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(), "More than one annotation is specified for this binding.");
}
}
public void testBindingSuperTypeAndBindingAnnotation() {
final Integer testValue = 1024;
Object instance =
new Object() {
@Bind(to = Number.class)
@Named("foo")
private Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Key.get(Number.class, Names.named("foo"))));
}
public void testBindingProvider() {
final Integer testValue = 1024;
Object instance =
new Object() {
@Bind
private Provider<Integer> anInt =
new Provider<Integer>() {
@Override
public Integer get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindingJakartaProvider() {
final Integer testValue = 1024;
Object instance =
new Object() {
@Bind
private jakarta.inject.Provider<Integer> anInt =
new jakarta.inject.Provider<Integer>() {
@Override
public Integer get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindingNonNullableNullField() {
Object instance =
new Object() {
@Bind private Integer anInt = null;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Binding to null values is only allowed for fields that are annotated @Nullable.");
}
}
@Retention(RUNTIME)
private @ | SomeJakartaQualifier |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/BasicTypeSerializerUpgradeTestSpecifications.java | {
"start": 32838,
"end": 33852
} | class ____
implements TypeSerializerUpgradeTestBase.UpgradeVerifier<Time> {
@Override
public TypeSerializer<Time> createUpgradedSerializer() {
return SqlTimeSerializer.INSTANCE;
}
@Override
public Condition<Time> testDataCondition() {
return new Condition<>(
value -> value.equals(new Time(1580382960L)), "value is 1580382960L");
}
@Override
public Condition<TypeSerializerSchemaCompatibility<Time>> schemaCompatibilityCondition(
FlinkVersion version) {
return TypeSerializerConditions.isCompatibleAsIs();
}
}
// ----------------------------------------------------------------------------------------------
// Specification for "sql-timestamp-serializer"
// ----------------------------------------------------------------------------------------------
/** SqlTimestampSerializerSetup. */
public static final | SqlTimeSerializerVerifier |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/guava/OptionalFormParamResourceTest.java | {
"start": 3839,
"end": 4519
} | class ____ {
@POST
@Path("/message")
public String getMessage(@FormParam("message") Optional<String> message) {
return message.or("Default Message");
}
@POST
@Path("/my-message")
public String getMyMessage(@FormParam("mymessage") Optional<MyMessage> myMessage) {
return myMessage.or(new MyMessage("My Default Message")).getMessage();
}
@POST
@Path("/uuid")
public String getUUID(@FormParam("uuid") Optional<UUIDParam> uuid) {
return uuid.or(new UUIDParam("d5672fa8-326b-40f6-bf71-d9dacf44bcdc")).get().toString();
}
}
}
| OptionalFormParamResource |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/execution/BuildResumptionData.java | {
"start": 985,
"end": 1522
} | class ____ {
/**
* The list of projects that remain to be built.
*/
private final List<String> remainingProjects;
public BuildResumptionData(final List<String> remainingProjects) {
this.remainingProjects = remainingProjects;
}
/**
* Returns the projects that still need to be built when resuming.
* @return A list containing the group and artifact id of the projects.
*/
public List<String> getRemainingProjects() {
return this.remainingProjects;
}
}
| BuildResumptionData |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RestrictedIndices.java | {
"start": 743,
"end": 1322
} | class ____ {
private final Automaton automaton;
private final Predicate<String> predicate;
public RestrictedIndices(IndexNameExpressionResolver resolver) {
this(resolver.getSystemNameAutomaton());
}
public RestrictedIndices(Automaton automaton) {
this.automaton = automaton;
this.predicate = Automatons.predicate(automaton);
}
public boolean isRestricted(String indexOrAliasName) {
return predicate.test(indexOrAliasName);
}
public Automaton getAutomaton() {
return automaton;
}
}
| RestrictedIndices |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/tasks/SourceOperatorStreamTask.java | {
"start": 2918,
"end": 13093
} | class ____<T> extends StreamTask<T, SourceOperator<T, ?>> {
private AsyncDataOutputToOutput<T> output;
/**
* Contains information about all checkpoints where RPC from checkpoint coordinator arrives
* before the source reader triggers it. (Common case)
*/
private SortedMap<Long, UntriggeredCheckpoint> untriggeredCheckpoints = new TreeMap<>();
/**
* Contains the checkpoints that are triggered by the source but the RPC from checkpoint
* coordinator has yet to arrive. This may happen if the barrier is inserted as an event into
* the data plane by the source coordinator and the (distributed) source reader reads that event
* before receiving Flink's checkpoint RPC. (Rare case)
*/
private SortedSet<Long> triggeredCheckpoints = new TreeSet<>();
/**
* Blocks input until the RPC call has been received that corresponds to the triggered
* checkpoint. This future must only be accessed and completed in the mailbox thread.
*/
private CompletableFuture<Void> waitForRPC = FutureUtils.completedVoidFuture();
/** Only set for externally induced sources. See also {@link #isExternallyInducedSource()}. */
private StreamTaskExternallyInducedSourceInput<T> externallyInducedSourceInput;
public SourceOperatorStreamTask(Environment env) throws Exception {
super(env);
}
@Override
public void init() throws Exception {
final SourceOperator<T, ?> sourceOperator = this.mainOperator;
// reader initialization, which cannot happen in the constructor due to the
// lazy metric group initialization. We do this here now, rather than
// later (in open()) so that we can access the reader when setting up the
// input processors
sourceOperator.initReader();
final SourceReader<T, ?> sourceReader = sourceOperator.getSourceReader();
final StreamTaskInput<T> input;
// TODO: should the input be constructed inside the `OperatorChain` class?
if (operatorChain.isTaskDeployedAsFinished()) {
input = new StreamTaskFinishedOnRestoreSourceInput<>(sourceOperator, 0, 0);
} else if (sourceReader instanceof ExternallyInducedSourceReader) {
externallyInducedSourceInput =
new StreamTaskExternallyInducedSourceInput<>(
sourceOperator,
this::triggerCheckpointForExternallyInducedSource,
0,
0);
input = externallyInducedSourceInput;
} else {
input = new StreamTaskSourceInput<>(sourceOperator, 0, 0);
}
// The SourceOperatorStreamTask doesn't have any inputs, so there is no need for
// a WatermarkGauge on the input.
output =
new AsyncDataOutputToOutput<T>(
operatorChain.getMainOperatorOutput(),
sourceOperator.getSourceMetricGroup(),
null);
inputProcessor = new StreamOneInputProcessor<>(input, output, operatorChain);
getEnvironment()
.getMetricGroup()
.getIOMetricGroup()
.gauge(
MetricNames.CHECKPOINT_START_DELAY_TIME,
this::getAsyncCheckpointStartDelayNanos);
}
@Override
public CompletableFuture<Boolean> triggerCheckpointAsync(
CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) {
if (!isExternallyInducedSource()) {
return triggerCheckpointNowAsync(checkpointMetaData, checkpointOptions);
}
CompletableFuture<Boolean> triggerFuture = new CompletableFuture<>();
// immediately move RPC to mailbox so we don't need to synchronize fields
mainMailboxExecutor.execute(
() ->
triggerCheckpointOnExternallyInducedSource(
checkpointMetaData, checkpointOptions, triggerFuture),
"SourceOperatorStreamTask#triggerCheckpointAsync(%s, %s)",
checkpointMetaData,
checkpointOptions);
return triggerFuture;
}
private boolean isExternallyInducedSource() {
return externallyInducedSourceInput != null;
}
private void triggerCheckpointOnExternallyInducedSource(
CheckpointMetaData checkpointMetaData,
CheckpointOptions checkpointOptions,
CompletableFuture<Boolean> triggerFuture) {
assert (mailboxProcessor.isMailboxThread());
if (!triggeredCheckpoints.remove(checkpointMetaData.getCheckpointId())) {
// common case: RPC is received before source reader triggers checkpoint
// store metadata and options for later
untriggeredCheckpoints.put(
checkpointMetaData.getCheckpointId(),
new UntriggeredCheckpoint(checkpointMetaData, checkpointOptions));
triggerFuture.complete(isRunning());
} else {
// trigger already received (rare case)
FutureUtils.forward(
triggerCheckpointNowAsync(checkpointMetaData, checkpointOptions),
triggerFuture);
cleanupOldCheckpoints(checkpointMetaData.getCheckpointId());
}
}
private CompletableFuture<Boolean> triggerCheckpointNowAsync(
CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) {
if (isSynchronous(checkpointOptions.getCheckpointType())) {
return triggerStopWithSavepointAsync(checkpointMetaData, checkpointOptions);
} else {
return super.triggerCheckpointAsync(checkpointMetaData, checkpointOptions);
}
}
private boolean isSynchronous(SnapshotType checkpointType) {
return checkpointType.isSavepoint() && ((SavepointType) checkpointType).isSynchronous();
}
private CompletableFuture<Boolean> triggerStopWithSavepointAsync(
CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) {
CompletableFuture<Void> operatorFinished = new CompletableFuture<>();
mainMailboxExecutor.execute(
() -> {
setSynchronousSavepoint(checkpointMetaData.getCheckpointId());
FutureUtils.forward(
mainOperator.stop(
((SavepointType) checkpointOptions.getCheckpointType())
.shouldDrain()
? StopMode.DRAIN
: StopMode.NO_DRAIN),
operatorFinished);
},
"stop Flip-27 source for stop-with-savepoint");
return operatorFinished.thenCompose(
(ignore) -> super.triggerCheckpointAsync(checkpointMetaData, checkpointOptions));
}
@Override
protected void advanceToEndOfEventTime() {
output.emitWatermark(Watermark.MAX_WATERMARK);
}
@Override
protected void declineCheckpoint(long checkpointId) {
cleanupCheckpoint(checkpointId);
super.declineCheckpoint(checkpointId);
}
@Override
public Future<Void> notifyCheckpointAbortAsync(
long checkpointId, long latestCompletedCheckpointId) {
mainMailboxExecutor.execute(
() -> cleanupCheckpoint(checkpointId), "Cleanup checkpoint %d", checkpointId);
return super.notifyCheckpointAbortAsync(checkpointId, latestCompletedCheckpointId);
}
@Override
public Future<Void> notifyCheckpointSubsumedAsync(long checkpointId) {
mainMailboxExecutor.execute(
() -> cleanupCheckpoint(checkpointId), "Cleanup checkpoint %d", checkpointId);
return super.notifyCheckpointSubsumedAsync(checkpointId);
}
// --------------------------
private void triggerCheckpointForExternallyInducedSource(long checkpointId) {
UntriggeredCheckpoint untriggeredCheckpoint = untriggeredCheckpoints.remove(checkpointId);
if (untriggeredCheckpoint != null) {
// common case: RPC before external sources induces it
triggerCheckpointNowAsync(
untriggeredCheckpoint.getMetadata(),
untriggeredCheckpoint.getCheckpointOptions());
cleanupOldCheckpoints(checkpointId);
} else {
// rare case: external source induced first
triggeredCheckpoints.add(checkpointId);
if (waitForRPC.isDone()) {
waitForRPC = new CompletableFuture<>();
externallyInducedSourceInput.blockUntil(waitForRPC);
}
}
}
/**
* Cleanup any orphaned checkpoint before the given currently triggered checkpoint. These
* checkpoint may occur when the checkpoint is cancelled but the RPC is lost. Note, to be safe,
* checkpoint X is only removed when both RPC and trigger for a checkpoint Y>X is received.
*/
private void cleanupOldCheckpoints(long checkpointId) {
assert (mailboxProcessor.isMailboxThread());
triggeredCheckpoints.headSet(checkpointId).clear();
untriggeredCheckpoints.headMap(checkpointId).clear();
maybeResumeProcessing();
}
/** Resumes processing if it was blocked before or else is a no-op. */
private void maybeResumeProcessing() {
assert (mailboxProcessor.isMailboxThread());
if (triggeredCheckpoints.isEmpty()) {
waitForRPC.complete(null);
}
}
/** Remove temporary data about a canceled checkpoint. */
private void cleanupCheckpoint(long checkpointId) {
assert (mailboxProcessor.isMailboxThread());
triggeredCheckpoints.remove(checkpointId);
untriggeredCheckpoints.remove(checkpointId);
maybeResumeProcessing();
}
// ---------------------------
/** Implementation of {@link DataOutput} that wraps a specific {@link Output}. */
public static | SourceOperatorStreamTask |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/MethodInvokersFunctionTest.java | {
"start": 1560,
"end": 4363
} | class ____ extends MethodFixtures {
@Test
void testApply0Arg() throws NoSuchMethodException, SecurityException {
final Function<MethodFixtures, String> func = MethodInvokers.asFunction(getMethodForGetString());
assertEquals(INSTANCE.getString(), func.apply(INSTANCE));
}
@Test
void testApply0ArgThrowsUnchecked() throws NoSuchMethodException, SecurityException {
final Function<MethodFixtures, String> func = MethodInvokers.asFunction(getMethodForGetStringThrowsUnchecked());
assertThrows(CustomUncheckedException.class, () -> func.apply(INSTANCE));
}
@Test
void testBuildVarArg() throws SecurityException, NoSuchMethodException {
MethodInvokers.asFunction(getMethodForGetStringVarStringArgs());
}
@Test
void testConstructorForNull() throws SecurityException {
assertNullPointerException(() -> MethodInvokers.asFunction(null));
}
@Test
void testFindAndInvoke() throws SecurityException {
// Finding
final List<Function<Object, Object>> invokers = Stream.of(MethodFixtures.class.getDeclaredMethods())
.filter(m -> m.isAnnotationPresent(AnnotationTestFixture.class)).map(MethodInvokers::asFunction).collect(Collectors.toList());
assertEquals(2, invokers.size());
// ...
// Invoking
final Set<Object> set1 = invokers.stream().map(i -> i.apply(MethodFixtures.INSTANCE)).collect(Collectors.toSet());
assertEquals(new HashSet<>(Arrays.asList(INSTANCE.getString(), INSTANCE.getString2())), set1);
final Set<Object> set2 = Stream.of(INSTANCE).map(invokers.get(0)).collect(Collectors.toSet());
final Set<Object> set3 = Stream.of(INSTANCE).map(invokers.get(1)).collect(Collectors.toSet());
set2.addAll(set3);
assertEquals(new HashSet<>(Arrays.asList(INSTANCE.getString(), INSTANCE.getString2())), set2);
}
@Test
void testFullExample() throws SecurityException, ReflectiveOperationException {
final Method method = String.class.getMethod("length");
final Function<String, Integer> function = MethodInvokers.asFunction(method);
assertEquals(3, function.apply("ABC"));
}
@Test
void testMapComputeIfAbsent() throws NoSuchMethodException, SecurityException {
final Map<MethodFixtures, String> map = new HashMap<>();
map.computeIfAbsent(INSTANCE, MethodInvokers.asFunction(getMethodForGetString()));
assertEquals(INSTANCE.getString(), map.get(INSTANCE));
}
@Test
void testToString() throws SecurityException, ReflectiveOperationException {
// Should not blow up and must return _something_
assertFalse(MethodInvokers.asFunction(getMethodForGetString()).toString().isEmpty());
}
}
| MethodInvokersFunctionTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java | {
"start": 2500,
"end": 10389
} | class ____
extends AbstractRestHandler<
RestfulGateway,
EmptyRequestBody,
JobVertexBackPressureInfo,
JobVertexMessageParameters> {
private final MetricFetcher metricFetcher;
public JobVertexBackPressureHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<EmptyRequestBody, JobVertexBackPressureInfo, JobVertexMessageParameters>
messageHeaders,
MetricFetcher metricFetcher) {
super(leaderRetriever, timeout, responseHeaders, messageHeaders);
this.metricFetcher = metricFetcher;
}
@Override
protected CompletableFuture<JobVertexBackPressureInfo> handleRequest(
@Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway)
throws RestHandlerException {
metricFetcher.update();
final JobID jobId = request.getPathParameter(JobIDPathParameter.class);
final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class);
TaskMetricStore taskMetricStore =
metricFetcher
.getMetricStore()
.getTaskMetricStore(jobId.toString(), jobVertexId.toString());
Map<String, Map<Integer, Integer>> jobRepresentativeExecutions =
metricFetcher.getMetricStore().getRepresentativeAttempts().get(jobId.toString());
Map<Integer, Integer> representativeAttempts =
jobRepresentativeExecutions != null
? jobRepresentativeExecutions.get(jobVertexId.toString())
: null;
return CompletableFuture.completedFuture(
taskMetricStore != null
? createJobVertexBackPressureInfo(taskMetricStore, representativeAttempts)
: JobVertexBackPressureInfo.deprecated());
}
private JobVertexBackPressureInfo createJobVertexBackPressureInfo(
TaskMetricStore taskMetricStore, Map<Integer, Integer> representativeAttempts) {
List<SubtaskBackPressureInfo> subtaskBackPressureInfos =
createSubtaskBackPressureInfo(taskMetricStore, representativeAttempts);
return new JobVertexBackPressureInfo(
JobVertexBackPressureInfo.VertexBackPressureStatus.OK,
getBackPressureLevel(getMaxBackPressureRatio(subtaskBackPressureInfos)),
metricFetcher.getLastUpdateTime(),
subtaskBackPressureInfos);
}
private List<SubtaskBackPressureInfo> createSubtaskBackPressureInfo(
TaskMetricStore taskMetricStore, Map<Integer, Integer> representativeAttempts) {
Map<Integer, SubtaskMetricStore> subtaskMetricStores =
taskMetricStore.getAllSubtaskMetricStores();
List<SubtaskBackPressureInfo> result = new ArrayList<>(subtaskMetricStores.size());
for (Map.Entry<Integer, SubtaskMetricStore> entry : subtaskMetricStores.entrySet()) {
int subtaskIndex = entry.getKey();
SubtaskMetricStore subtaskMetricStore = entry.getValue();
Map<Integer, ComponentMetricStore> allAttemptsMetricStores =
subtaskMetricStore.getAllAttemptsMetricStores();
if (allAttemptsMetricStores.isEmpty() || allAttemptsMetricStores.size() == 1) {
result.add(
createSubtaskAttemptBackpressureInfo(
subtaskIndex, null, subtaskMetricStore, null));
} else {
int representativeAttempt =
representativeAttempts == null
? -1
: representativeAttempts.getOrDefault(subtaskIndex, -1);
if (!allAttemptsMetricStores.containsKey(representativeAttempt)) {
// allAttemptsMetricStores is not empty here
representativeAttempt = allAttemptsMetricStores.keySet().iterator().next();
}
List<SubtaskBackPressureInfo> otherConcurrentAttempts =
new ArrayList<>(allAttemptsMetricStores.size() - 1);
for (Map.Entry<Integer, ComponentMetricStore> attemptStore :
allAttemptsMetricStores.entrySet()) {
if (attemptStore.getKey() == representativeAttempt) {
continue;
}
otherConcurrentAttempts.add(
createSubtaskAttemptBackpressureInfo(
subtaskIndex,
attemptStore.getKey(),
attemptStore.getValue(),
null));
}
result.add(
createSubtaskAttemptBackpressureInfo(
subtaskIndex,
representativeAttempt,
allAttemptsMetricStores.get(representativeAttempt),
otherConcurrentAttempts));
}
}
result.sort(Comparator.comparingInt(SubtaskBackPressureInfo::getSubtask));
return result;
}
private SubtaskBackPressureInfo createSubtaskAttemptBackpressureInfo(
int subtaskIndex,
@Nullable Integer attemptNumber,
ComponentMetricStore metricStore,
@Nullable List<SubtaskBackPressureInfo> otherConcurrentAttempts) {
double backPressureRatio = getBackPressureRatio(metricStore);
double idleRatio = getIdleRatio(metricStore);
double busyRatio = getBusyRatio(metricStore);
return new SubtaskBackPressureInfo(
subtaskIndex,
attemptNumber,
getBackPressureLevel(backPressureRatio),
backPressureRatio,
idleRatio,
busyRatio,
otherConcurrentAttempts);
}
private double getMaxBackPressureRatio(List<SubtaskBackPressureInfo> subtaskBackPressureInfos) {
return subtaskBackPressureInfos.stream()
.mapToDouble(backPressureInfo -> backPressureInfo.getBackPressuredRatio())
.max()
.getAsDouble();
}
private double getBackPressureRatio(ComponentMetricStore metricStore) {
return getMsPerSecondMetricAsRatio(metricStore, MetricNames.TASK_BACK_PRESSURED_TIME);
}
private double getIdleRatio(ComponentMetricStore metricStore) {
return getMsPerSecondMetricAsRatio(metricStore, MetricNames.TASK_IDLE_TIME);
}
private double getBusyRatio(ComponentMetricStore metricStore) {
return getMsPerSecondMetricAsRatio(metricStore, MetricNames.TASK_BUSY_TIME);
}
private double getMsPerSecondMetricAsRatio(
ComponentMetricStore metricStore, String metricName) {
return Double.valueOf(metricStore.getMetric(metricName, "0")) / 1_000;
}
/**
* Returns the back pressure level as a String.
*
* @param backPressureRatio Ratio of back pressures samples to total number of samples.
* @return Back pressure level ('ok', 'low', or 'high')
*/
private static JobVertexBackPressureInfo.VertexBackPressureLevel getBackPressureLevel(
double backPressureRatio) {
if (backPressureRatio <= 0.10) {
return JobVertexBackPressureInfo.VertexBackPressureLevel.OK;
} else if (backPressureRatio <= 0.5) {
return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW;
} else {
return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH;
}
}
}
| JobVertexBackPressureHandler |
java | dropwizard__dropwizard | dropwizard-jackson/src/test/java/io/dropwizard/jackson/FuzzyEnumModuleTest.java | {
"start": 657,
"end": 758
} | class ____ {
private final ObjectMapper mapper = new ObjectMapper();
private | FuzzyEnumModuleTest |
java | apache__camel | components/camel-openapi-java/src/test/java/org/apache/camel/openapi/RestOpenApiReaderDisabledTest.java | {
"start": 1522,
"end": 5076
} | class ____ extends CamelTestSupport {
private Logger log = LoggerFactory.getLogger(getClass());
@BindToRegistry("dummy-rest")
private DummyRestConsumerFactory factory = new DummyRestConsumerFactory();
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
rest("/hello").consumes("application/json").produces("application/json").get("/hi/{name}")
.description("Saying hi").param().name("name").type(RestParamType.path)
.dataType("string").description("Who is it").example("Donald Duck").endParam()
.param().name("filter").description("Filters to apply to the entity.").type(RestParamType.query)
.dataType("array").arrayType("date-time").endParam().to("log:hi")
.get("/bye/{name}").disabled().description("Saying bye").param().name("name")
.type(RestParamType.path).dataType("string").description("Who is it").example("Donald Duck").endParam()
.responseMessage().code(200).message("A reply number")
.responseModel(float.class).example("success", "123").example("error", "-1").endResponseMessage()
.to("log:bye").post("/bye").disabled("true")
.description("To update the greeting message").consumes("application/xml").produces("application/xml")
.param().name("greeting").type(RestParamType.body)
.dataType("string").description("Message to use as greeting")
.example("application/xml", "<hello>Hi</hello>").endParam().to("log:bye");
}
};
}
@ParameterizedTest
@ValueSource(strings = { "3.1", "3.0" })
public void testReaderReadV3(String version) throws Exception {
BeanConfig config = new BeanConfig();
config.setHost("localhost:8080");
config.setSchemes(new String[] { "http" });
config.setBasePath("/api");
config.setInfo(new Info());
config.setVersion(version);
RestOpenApiReader reader = new RestOpenApiReader();
OpenAPI openApi = reader.read(context, context.getRestDefinitions(), config, context.getName(),
new DefaultClassResolver());
assertNotNull(openApi);
String json = RestOpenApiSupport.getJsonFromOpenAPIAsString(openApi, config);
log.info(json);
assertTrue(json.contains("\"url\" : \"http://localhost:8080/api\""));
assertFalse(json.contains("\"/hello/bye\""));
assertFalse(json.contains("\"summary\" : \"To update the greeting message\""));
assertFalse(json.contains("\"/hello/bye/{name}\""));
assertFalse(json.contains("\"/api/hello/bye/{name}\""));
assertTrue(json.contains("\"/hello/hi/{name}\""));
assertFalse(json.contains("\"/api/hello/hi/{name}\""));
assertFalse(json.contains("\"type\" : \"number\""));
assertFalse(json.contains("\"format\" : \"float\""));
assertFalse(json.contains("\"example\" : \"<hello>Hi</hello>\""));
assertTrue(json.contains("\"example\" : \"Donald Duck\""));
assertFalse(json.contains("\"success\" : { \"value\" : \"123\" }"));
assertFalse(json.contains("\"error\" : { \"value\" : \"-1\" }"));
assertTrue(json.contains("\"type\" : \"array\""));
context.stop();
}
}
| RestOpenApiReaderDisabledTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeFileSystemStore.java | {
"start": 1406,
"end": 5093
} | interface ____ {
void initialize(URI uri, Configuration conf, AzureFileSystemInstrumentation instrumentation) throws IOException;
void storeEmptyFolder(String key, PermissionStatus permissionStatus)
throws AzureException;
FileMetadata retrieveMetadata(String key) throws IOException;
InputStream retrieve(String key) throws IOException;
InputStream retrieve(String key, long byteRangeStart) throws IOException;
InputStream retrieve(String key, long byteRangeStart,
Optional<Configuration> options) throws IOException;
DataOutputStream storefile(String keyEncoded,
PermissionStatus permissionStatus,
String key) throws AzureException;
boolean isPageBlobKey(String key);
boolean isAtomicRenameKey(String key);
/**
* Returns the file block size. This is a fake value used for integration
* of the Azure store with Hadoop.
* @return The file block size.
*/
long getHadoopBlockSize();
void storeEmptyLinkFile(String key, String tempBlobKey,
PermissionStatus permissionStatus) throws AzureException;
String getLinkInFileMetadata(String key) throws AzureException;
FileMetadata[] list(String prefix, final int maxListingCount,
final int maxListingDepth) throws IOException;
void changePermissionStatus(String key, PermissionStatus newPermission)
throws AzureException;
byte[] retrieveAttribute(String key, String attribute) throws IOException;
void storeAttribute(String key, String attribute, byte[] value) throws IOException;
/**
* API to delete a blob in the back end azure storage.
* @param key - key to the blob being deleted.
* @return return true when delete is successful, false if
* blob cannot be found or delete is not possible without
* exception.
* @throws IOException Exception encountered while deleting in
* azure storage.
*/
boolean delete(String key) throws IOException;
void rename(String srcKey, String dstKey) throws IOException;
void rename(String srcKey, String dstKey, boolean acquireLease, SelfRenewingLease existingLease)
throws IOException;
void rename(String srcKey, String dstKey, boolean acquireLease,
SelfRenewingLease existingLease, boolean overwriteDestination)
throws IOException;
/**
* Delete all keys with the given prefix. Used for testing.
*
* @param prefix prefix of objects to be deleted.
* @throws IOException Exception encountered while deleting keys.
*/
@VisibleForTesting
void purge(String prefix) throws IOException;
/**
* Diagnostic method to dump state to the console.
*
* @throws IOException Exception encountered while dumping to console.
*/
void dump() throws IOException;
void close();
void updateFolderLastModifiedTime(String key, SelfRenewingLease folderLease)
throws AzureException;
void updateFolderLastModifiedTime(String key, Date lastModified,
SelfRenewingLease folderLease) throws AzureException;
/**
* API to delete a blob in the back end azure storage.
* @param key - key to the blob being deleted.
* @param lease - Active lease on the blob.
* @return return true when delete is successful, false if
* blob cannot be found or delete is not possible without
* exception.
* @throws IOException Exception encountered while deleting in
* azure storage.
*/
boolean delete(String key, SelfRenewingLease lease) throws IOException;
SelfRenewingLease acquireLease(String key) throws AzureException;
DataOutputStream retrieveAppendStream(String key, int bufferSize) throws IOException;
boolean explicitFileExists(String key) throws AzureException;
}
| NativeFileSystemStore |
java | apache__camel | components/camel-cxf/camel-cxf-spring-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java | {
"start": 2418,
"end": 25836
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
// check the query
Message inMessage = exchange.getIn();
exchange.getMessage().setBody(inMessage.getHeader(Exchange.HTTP_QUERY, String.class));
}
}
public int getPort1() {
return port1;
}
public int getPort2() {
return port2;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/jaxrs/CxfRsSpringProducer.xml");
}
protected void setupDestinationURL(Message inMessage) {
// do nothing here
}
@Test
public void testGetCustomerWithClientProxyAPI() {
// START SNIPPET: ProxyExample
Exchange exchange = template.send("direct://proxy", new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the operation name
inMessage.setHeader(CxfConstants.OPERATION_NAME, "getCustomer");
// using the proxy client API
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_USING_HTTP_API, Boolean.FALSE);
// set a customer header
inMessage.setHeader("key", "value");
// set the parameters , if you just have one parameter
// camel will put this object into an Object[] itself
inMessage.setBody("123");
}
});
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertEquals(123, response.getId(), "Get a wrong customer id");
assertEquals("John", response.getName(), "Get a wrong customer name");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
assertEquals("value", exchange.getMessage().getHeader("key"), "Get a wrong header value");
// END SNIPPET: ProxyExample
}
@Test
public void testGetCustomersWithClientProxyAPI() {
Exchange exchange = template.send("direct://proxy", new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the operation name
inMessage.setHeader(CxfConstants.OPERATION_NAME, "getCustomers");
// using the proxy client API
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_USING_HTTP_API, Boolean.FALSE);
// set the parameters , if you just have one parameter
// camel will put this object into an Object[] itself
inMessage.setBody(null);
}
});
// get the response message
List<Customer> response = CastUtils.cast((List<?>) exchange.getMessage().getBody());
assertNotNull(response, "The response should not be null");
assertTrue(response.contains(new Customer(113, "Dan")), "Dan is missing!");
assertTrue(response.contains(new Customer(123, "John")), "John is missing!");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testGetCustomerWithHttpCentralClientAPI() {
// START SNIPPET: HttpExample
Exchange exchange = template.send("direct://http", new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// using the http central client API
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_USING_HTTP_API, Boolean.TRUE);
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
// set the relative path
inMessage.setHeader(Exchange.HTTP_PATH, "/customerservice/customers/123");
// Specify the response class , cxfrs will use InputStream as the response object type
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, Customer.class);
// set a customer header
inMessage.setHeader("key", "value");
// since we use the Get method, so we don't need to set the message body
inMessage.setBody(null);
}
});
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertEquals(123, response.getId(), "Get a wrong customer id");
assertEquals("John", response.getName(), "Get a wrong customer name");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
assertEquals("value", exchange.getMessage().getHeader("key"), "Get a wrong header value");
// END SNIPPET: HttpExample
}
@Test
public void testSuppressGetCustomerExceptionWithCxfRsEndpoint() {
Exchange exchange
= template.send("cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "/?httpClientAPI=true&throwExceptionOnFailure=false&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message message = exchange.getIn();
// set the Http method
message.setHeader(Exchange.HTTP_METHOD, "PUT");
// set the relative path
message.setHeader(Exchange.HTTP_PATH, "/customerservice/customers");
// we just setup the customer with a wrong id
Customer customer = new Customer();
customer.setId(222);
customer.setName("user");
message.setBody(customer);
}
});
// we should get the exception here
assertNull(exchange.getException(), "Don't expect the exception here");
Message result = exchange.getMessage();
assertEquals(406, result.getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong http status code.");
}
@Test
public void testGetCustomerExceptionWithCxfRsEndpoint() {
Exchange exchange
= template.send("cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "/?httpClientAPI=true&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message message = exchange.getIn();
// set the Http method
message.setHeader(Exchange.HTTP_METHOD, "PUT");
// set the relative path
message.setHeader(Exchange.HTTP_PATH, "/customerservice/customers");
// we just setup the customer with a wrong id
Customer customer = new Customer();
customer.setId(222);
customer.setName("user");
message.setBody(customer);
}
});
// we should get the exception here
assertNotNull(exchange.getException(), "Expect the exception here");
CxfOperationException exception = (CxfOperationException) exchange.getException();
assertEquals("Cannot find the customer!", exception.getResponseBody(), "Get a wrong response body");
}
@Test
public void testGetCustomerWithCxfRsEndpoint() {
Exchange exchange
= template.send(
"cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName() + "/?httpClientAPI=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
// set the relative path
inMessage.setHeader(Exchange.HTTP_PATH, "/customerservice/customers/123");
// Specify the response class , cxfrs will use InputStream as the response object type
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, Customer.class);
// since we use the Get method, so we don't need to set the message body
inMessage.setBody(null);
}
});
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertEquals(123, response.getId(), "Get a wrong customer id");
assertEquals("John", response.getName(), "Get a wrong customer name");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testGetCustomerWithVariableReplacementAndCxfRsEndpoint() {
Exchange exchange = template.send(
"cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName() + "/?httpClientAPI=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
// set the relative path
inMessage.setHeader(Exchange.HTTP_PATH, "/customerservice/customers/{customerId}");
// Set variables for replacement
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_VAR_VALUES, new String[] { "123" });
// Specify the response class , cxfrs will use InputStream as the response object type
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, Customer.class);
// since we use the Get method, so we don't need to set the message body
inMessage.setBody(null);
}
});
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertEquals(123, response.getId(), "Get a wrong customer id");
assertEquals("John", response.getName(), "Get a wrong customer name");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testAddCustomerUniqueResponseCodeWithHttpClientAPI() {
Exchange exchange
= template.send("cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "?httpClientAPI=true&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "POST");
// set the relative path
inMessage.setHeader(Exchange.HTTP_PATH, "/customerservice/customersUniqueResponseCode");
// create a new customer object
Customer customer = new Customer();
customer.setId(9999);
customer.setName("HttpClient");
inMessage.setBody(customer);
}
});
// get the response message
Response response = (Response) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertNotNull(response.getEntity(), "The response entity should not be null");
// check the response code
assertEquals(201, response.getStatus(), "Get a wrong response code");
// check the response code from message header
assertEquals(201, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testAddCustomerUniqueResponseCodeWithProxyAPI() {
Exchange exchange = template.send("direct://proxy", new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the operation name
inMessage.setHeader(CxfConstants.OPERATION_NAME, "addCustomerUniqueResponseCode");
// using the proxy client API
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_USING_HTTP_API, Boolean.FALSE);
// set the parameters , if you just have one parameter
// camel will put this object into an Object[] itself
Customer customer = new Customer();
customer.setId(8888);
customer.setName("ProxyAPI");
inMessage.setBody(customer);
}
});
// get the response message
Response response = (Response) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertNotNull(response.getEntity(), "The response entity should not be null");
// check the response code
assertEquals(201, response.getStatus(), "Get a wrong response code");
// check the response code from message header
assertEquals(201, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testAddCustomerUniqueResponseCode() {
Exchange exchange
= template.send("cxfrs://http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "?httpClientAPI=true&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "POST");
// set the relative path
inMessage.setHeader(Exchange.HTTP_PATH, "/customerservice/customersUniqueResponseCode");
// put the response's entity into out message body
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, Customer.class);
// create a new customer object
Customer customer = new Customer();
customer.setId(8888);
customer.setName("Willem");
inMessage.setBody(customer);
}
});
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertNotEquals(8888, response.getId(), "Get a wrong customer id");
assertEquals("Willem", response.getName(), "Get a wrong customer name");
assertEquals(201, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
}
@Test
public void testProducerWithQueryParameters() {
Exchange exchange = template.send("cxfrs://http://localhost:" + getPort2() + "/" + getClass().getSimpleName()
+ "/testQuery?httpClientAPI=true&q1=12&q2=13&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, InputStream.class);
inMessage.setBody(null);
}
});
// get the response message
String response = exchange.getMessage().getBody(String.class);
assertNotNull(response, "The response should not be null");
assertEquals("q1=12&q2=13", response, "The response value is wrong");
}
@Test
public void testProducerWithQueryParametersHeader() {
Exchange exchange = template.send("cxfrs://http://localhost:" + getPort2() + "/" + getClass().getSimpleName()
+ "/testQuery?httpClientAPI=true&q1=12&q2=13&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, InputStream.class);
// override the parameter setting from URI
// START SNIPPET: QueryMapExample
Map<String, String> queryMap = new LinkedHashMap<>();
queryMap.put("q1", "new");
queryMap.put("q2", "world");
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_QUERY_MAP, queryMap);
// END SNIPPET: QueryMapExample
inMessage.setBody(null);
}
});
// get the response message
String response = exchange.getMessage().getBody(String.class);
assertNotNull(response, "The response should not be null");
assertEquals("q1=new&q2=world", response, "The response value is wrong");
}
@Test
public void testProducerWithQueryParametersMultipleValues() {
Exchange exchange = template.send("cxfrs://http://localhost:" + getPort2() + "/" + getClass().getSimpleName()
+ "/testQuery?httpClientAPI=true&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, InputStream.class);
inMessage.setHeader(Exchange.HTTP_QUERY, "id=1&id=2");
inMessage.setBody(null);
}
});
// get the response message
String response = exchange.getMessage().getBody(String.class);
assertNotNull(response, "The response should not be null");
assertEquals("id=1&id=2", response, "The response value is wrong");
}
@Test
public void testProducerWithQueryParametersEscapeAmpersand() {
Exchange exchange = template.send("cxfrs://http://localhost:" + getPort2() + "/" + getClass().getSimpleName()
+ "/testQuery?httpClientAPI=true&synchronous=true",
new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
Message inMessage = exchange.getIn();
// set the Http method
inMessage.setHeader(Exchange.HTTP_METHOD, "GET");
inMessage.setHeader(CxfConstants.CAMEL_CXF_RS_RESPONSE_CLASS, InputStream.class);
inMessage.setHeader(Exchange.HTTP_QUERY, "id=1%262");
inMessage.setBody(null);
}
});
// get the response message
String response = exchange.getMessage().getBody(String.class);
assertNotNull(response, "The response should not be null");
assertEquals("id=1%262", response, "The response value is wrong");
}
@Test
public void testRestServerDirectlyGetCustomer() {
// we cannot convert directly to Customer as we need camel-jaxb
String response
= template.requestBodyAndHeader("cxfrs:http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "/customerservice/customers/123?synchronous=true",
null, Exchange.HTTP_METHOD, "GET", String.class);
assertNotNull(response, "The response should not be null");
}
@Test
public void testRestServerDirectlyAddCustomer() {
Customer input = new Customer();
input.setName("Donald Duck");
// we cannot convert directly to Customer as we need camel-jaxb
String response
= template.requestBodyAndHeader("cxfrs:http://localhost:" + getPort1() + "/" + getClass().getSimpleName()
+ "/customerservice/customers?synchronous=true",
input, Exchange.HTTP_METHOD, "POST", String.class);
assertNotNull(response);
assertTrue(response.endsWith("<name>Donald Duck</name></Customer>"));
}
static | UndertowProcessor |
java | elastic__elasticsearch | modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java | {
"start": 3445,
"end": 10127
} | class ____ search response parsing and serialization.
*/
private static SearchResponse createSearchResponse() {
long tookInMillis = randomNonNegativeLong();
int totalShards = randomIntBetween(1, Integer.MAX_VALUE);
int successfulShards = randomIntBetween(0, totalShards);
int skippedShards = randomIntBetween(0, totalShards);
return SearchResponseUtils.emptyWithTotalHits(
null,
totalShards,
successfulShards,
skippedShards,
tookInMillis,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);
}
private static BytesReference createSource() {
try {
XContentBuilder source = XContentFactory.jsonBuilder()
.startObject()
.startObject("query")
.startObject("match")
.field(randomAlphaOfLength(5), randomAlphaOfLength(10))
.endObject()
.endObject()
.endObject();
return BytesReference.bytes(source);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
String templateOutputField = SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName();
return field -> field.equals(templateOutputField) || field.startsWith(templateOutputField + ".");
}
/**
* Note that we can't rely on normal equals and hashCode checks, since {@link SearchResponse} doesn't
* currently implement equals and hashCode. Instead, we compare the template outputs for equality,
* and perform some sanity checks on the search response instances.
*/
@Override
protected void assertEqualInstances(SearchTemplateResponse expectedInstance, SearchTemplateResponse newInstance) {
assertNotSame(newInstance, expectedInstance);
BytesReference expectedSource = expectedInstance.getSource();
BytesReference newSource = newInstance.getSource();
assertEquals(expectedSource == null, newSource == null);
if (expectedSource != null) {
try {
assertToXContentEquivalent(expectedSource, newSource, XContentType.JSON);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
assertEquals(expectedInstance.hasResponse(), newInstance.hasResponse());
if (expectedInstance.hasResponse()) {
SearchResponse expectedResponse = expectedInstance.getResponse();
SearchResponse newResponse = newInstance.getResponse();
assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value());
assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001);
}
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
public void testSourceToXContent() throws IOException {
SearchTemplateResponse response = new SearchTemplateResponse();
try {
XContentBuilder source = XContentFactory.jsonBuilder()
.startObject()
.startObject("query")
.startObject("terms")
.field("status", new String[] { "pending", "published" })
.endObject()
.endObject()
.endObject();
response.setSource(BytesReference.bytes(source));
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
.startObject()
.startObject("template_output")
.startObject("query")
.startObject("terms")
.field("status", new String[] { "pending", "published" })
.endObject()
.endObject()
.endObject()
.endObject();
XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType);
} finally {
response.decRef();
}
}
public void testSearchResponseToXContent() throws IOException {
SearchHit hit = SearchHit.unpooled(1, "id");
hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit };
SearchResponse searchResponse = new SearchResponse(
SearchHits.unpooled(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f),
null,
null,
false,
null,
null,
1,
null,
0,
0,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);
SearchTemplateResponse response = new SearchTemplateResponse();
try {
response.setResponse(searchResponse);
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
.startObject()
.field("took", 0)
.field("timed_out", false)
.startObject("_shards")
.field("total", 0)
.field("successful", 0)
.field("skipped", 0)
.field("failed", 0)
.endObject()
.startObject("hits")
.startObject("total")
.field("value", 100)
.field("relation", "eq")
.endObject()
.field("max_score", 1.5F)
.startArray("hits")
.startObject()
.field("_id", "id")
.field("_score", 2.0F)
.endObject()
.endArray()
.endObject()
.endObject();
XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType);
} finally {
response.decRef();
}
}
@Override
protected void dispose(SearchTemplateResponse instance) {
instance.decRef();
}
}
| for |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/main/java/org/springframework/boot/web/server/servlet/Session.java | {
"start": 1214,
"end": 3248
} | class ____ {
/**
* Session timeout. If a duration suffix is not specified, seconds will be used.
*/
@DurationUnit(ChronoUnit.SECONDS)
private @Nullable Duration timeout = Duration.ofMinutes(30);
/**
* Session tracking modes.
*/
private @Nullable Set<Session.SessionTrackingMode> trackingModes;
/**
* Whether to persist session data between restarts.
*/
private boolean persistent;
/**
* Directory used to store session data.
*/
private @Nullable File storeDir;
@NestedConfigurationProperty
private final Cookie cookie = new Cookie();
private final SessionStoreDirectory sessionStoreDirectory = new SessionStoreDirectory();
public @Nullable Duration getTimeout() {
return this.timeout;
}
public void setTimeout(@Nullable Duration timeout) {
this.timeout = timeout;
}
/**
* Return the {@link SessionTrackingMode session tracking modes}.
* @return the session tracking modes
*/
public @Nullable Set<Session.SessionTrackingMode> getTrackingModes() {
return this.trackingModes;
}
public void setTrackingModes(@Nullable Set<Session.SessionTrackingMode> trackingModes) {
this.trackingModes = trackingModes;
}
/**
* Return whether to persist session data between restarts.
* @return {@code true} to persist session data between restarts.
*/
public boolean isPersistent() {
return this.persistent;
}
public void setPersistent(boolean persistent) {
this.persistent = persistent;
}
/**
* Return the directory used to store session data.
* @return the session data store directory
*/
public @Nullable File getStoreDir() {
return this.storeDir;
}
public void setStoreDir(@Nullable File storeDir) {
this.sessionStoreDirectory.setDirectory(storeDir);
this.storeDir = storeDir;
}
public Cookie getCookie() {
return this.cookie;
}
public SessionStoreDirectory getSessionStoreDirectory() {
return this.sessionStoreDirectory;
}
/**
* Available session tracking modes (mirrors
* {@link jakarta.servlet.SessionTrackingMode}).
*/
public | Session |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/subprotocol/SubprotocolNotAvailableTest.java | {
"start": 2394,
"end": 2822
} | class ____ {
static final AtomicBoolean OPEN_CALLED = new AtomicBoolean();
static final AtomicBoolean SESSION_CONTEXT_DESTROYED = new AtomicBoolean();
@OnOpen
void open() {
OPEN_CALLED.set(true);
}
static void sessionContextDestroyed(@Observes @Destroyed(SessionScoped.class) Object event) {
SESSION_CONTEXT_DESTROYED.set(true);
}
}
}
| Endpoint |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/custom/CustomServiceSettingsTests.java | {
"start": 2391,
"end": 41006
} | class ____ extends AbstractBWCWireSerializationTestCase<CustomServiceSettings> {
public static CustomServiceSettings createRandom() {
var inputUrl = randomAlphaOfLength(5);
var taskType = randomFrom(TaskType.TEXT_EMBEDDING, TaskType.RERANK, TaskType.SPARSE_EMBEDDING, TaskType.COMPLETION);
SimilarityMeasure similarityMeasure = null;
Integer dims = null;
var isTextEmbeddingModel = taskType.equals(TaskType.TEXT_EMBEDDING);
if (isTextEmbeddingModel) {
similarityMeasure = SimilarityMeasure.DOT_PRODUCT;
dims = 1536;
}
var maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256);
var url = inputUrl != null ? inputUrl : randomAlphaOfLength(15);
Map<String, String> headers = randomBoolean() ? Map.of() : Map.of("key", "value");
var queryParameters = randomBoolean()
? QueryParameters.EMPTY
: new QueryParameters(List.of(new QueryParameters.Parameter("key", "value")));
var requestContentString = randomAlphaOfLength(10);
var responseJsonParser = switch (taskType) {
case TEXT_EMBEDDING -> new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT);
case SPARSE_EMBEDDING -> new SparseEmbeddingResponseParser(
"$.result.sparse_embeddings[*].embedding[*].token_id",
"$.result.sparse_embeddings[*].embedding[*].weights"
);
case RERANK -> new RerankResponseParser(
"$.result.reranked_results[*].index",
"$.result.reranked_results[*].relevance_score",
"$.result.reranked_results[*].document_text"
);
case COMPLETION -> new CompletionResponseParser("$.result.text");
default -> new NoopResponseParser();
};
RateLimitSettings rateLimitSettings = new RateLimitSettings(randomLongBetween(1, 1000000));
return new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(similarityMeasure, dims, maxInputTokens),
url,
headers,
queryParameters,
requestContentString,
responseJsonParser,
rateLimitSettings
);
}
public void testFromMap() {
String similarity = SimilarityMeasure.DOT_PRODUCT.toString();
Integer dims = 1536;
Integer maxInputTokens = 512;
String url = "http://www.abc.com";
Map<String, String> headers = Map.of("key", "value");
var queryParameters = List.of(List.of("key", "value"));
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
ServiceFields.SIMILARITY,
similarity,
ServiceFields.DIMENSIONS,
dims,
ServiceFields.MAX_INPUT_TOKENS,
maxInputTokens,
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
headers,
QueryParameters.QUERY_PARAMETERS,
queryParameters,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
),
CustomServiceSettings.BATCH_SIZE,
11
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens),
url,
headers,
new QueryParameters(List.of(new QueryParameters.Parameter("key", "value"))),
requestContentString,
responseParser,
new RateLimitSettings(10_000),
11,
InputTypeTranslator.EMPTY_TRANSLATOR
)
)
);
}
public void testFromMap_EmbeddingType_Bit() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.BIT);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(
DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS,
"$.result.embeddings[*].embedding",
DenseEmbeddingResponseParser.EMBEDDING_TYPE,
CustomServiceEmbeddingType.BIT.toString()
)
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(null, null, null),
url,
Map.of(),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
}
public void testFromMap_EmbeddingType_Binary() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.BINARY);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(
DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS,
"$.result.embeddings[*].embedding",
DenseEmbeddingResponseParser.EMBEDDING_TYPE,
CustomServiceEmbeddingType.BINARY.toString()
)
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(null, null, null),
url,
Map.of(),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
}
public void testFromMap_EmbeddingType_Byte() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.BYTE);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(
DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS,
"$.result.embeddings[*].embedding",
DenseEmbeddingResponseParser.EMBEDDING_TYPE,
CustomServiceEmbeddingType.BYTE.toString()
)
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(null, null, null),
url,
Map.of(),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
assertThat(settings.elementType(), is(DenseVectorFieldMapper.ElementType.BYTE));
}
public void testFromMap_Completion_NoEmbeddingType() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var responseParser = new CompletionResponseParser("$.result.text");
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(Map.of(CompletionResponseParser.COMPLETION_PARSER_RESULT, "$.result.text"))
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.COMPLETION
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(null, null, null),
url,
Map.of(),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
assertNull(settings.elementType());
}
public void testFromMap_Completion_ThrowsWhenEmbeddingIsIncludedInMap() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var exception = expectThrows(
ElasticsearchStatusException.class,
() -> CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(
CompletionResponseParser.COMPLETION_PARSER_RESULT,
"$.result.text",
DenseEmbeddingResponseParser.EMBEDDING_TYPE,
"byte"
)
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.COMPLETION
)
);
assertThat(
exception.getMessage(),
is(
"Configuration contains unknown settings [{embedding_type=byte}] while parsing field [json_parser] "
+ "for settings [custom_service_settings]"
)
);
}
public void testFromMap_WithOptionalsNotSpecified() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
CustomServiceSettings.TextEmbeddingSettings.DEFAULT_FLOAT,
url,
Map.of(),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
}
public void testFromMap_RemovesNullValues_FromMaps() {
String similarity = SimilarityMeasure.DOT_PRODUCT.toString();
Integer dims = 1536;
Integer maxInputTokens = 512;
String url = "http://www.abc.com";
var headersWithNulls = new HashMap<String, Object>();
headersWithNulls.put("value", "abc");
headersWithNulls.put("null", null);
String requestContentString = "request body";
var responseParser = new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT);
var settings = CustomServiceSettings.fromMap(
new HashMap<>(
Map.of(
ServiceFields.SIMILARITY,
similarity,
ServiceFields.DIMENSIONS,
dims,
ServiceFields.MAX_INPUT_TOKENS,
maxInputTokens,
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
headersWithNulls,
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
),
ConfigurationParseContext.REQUEST,
TaskType.TEXT_EMBEDDING
);
MatcherAssert.assertThat(
settings,
is(
new CustomServiceSettings(
new CustomServiceSettings.TextEmbeddingSettings(SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens),
url,
Map.of("value", "abc"),
null,
requestContentString,
responseParser,
new RateLimitSettings(10_000)
)
)
);
}
public void testFromMap_ReturnsError_IfHeadersContainsNonStringValues() {
String similarity = SimilarityMeasure.DOT_PRODUCT.toString();
Integer dims = 1536;
Integer maxInputTokens = 512;
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
ServiceFields.SIMILARITY,
similarity,
ServiceFields.DIMENSIONS,
dims,
ServiceFields.MAX_INPUT_TOKENS,
maxInputTokens,
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", 1)),
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
);
var exception = expectThrows(
ValidationException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(
exception.getMessage(),
is(
"Validation Failed: 1: Map field [headers] has an entry that is not valid, [key => 1]. "
+ "Value type of [1] is not one of [String].;"
)
);
}
public void testFromMap_ReturnsError_IfQueryParamsContainsNonStringValues() {
String similarity = SimilarityMeasure.DOT_PRODUCT.toString();
Integer dims = 1536;
Integer maxInputTokens = 512;
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<>(
Map.of(
ServiceFields.SIMILARITY,
similarity,
ServiceFields.DIMENSIONS,
dims,
ServiceFields.MAX_INPUT_TOKENS,
maxInputTokens,
CustomServiceSettings.URL,
url,
QueryParameters.QUERY_PARAMETERS,
List.of(List.of("key", 1)),
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
);
var exception = expectThrows(
ValidationException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(
exception.getMessage(),
is(
"Validation Failed: 1: [service_settings] failed to parse tuple list entry [0] "
+ "for setting [query_parameters], the second element must be a string but was [Integer];"
)
);
}
public void testFromMap_ReturnsError_IfRequestMapIsMissing() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", "value")),
"invalid_request",
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
);
var exception = expectThrows(
ValidationException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(exception.getMessage(), is("Validation Failed: 1: [service_settings] does not contain the required setting [request];"));
}
public void testFromMap_ReturnsError_IfResponseMapIsMissing() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", "value")),
CustomServiceSettings.REQUEST,
requestContentString,
"invalid_response",
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
);
var exception = expectThrows(
ValidationException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(
exception.getMessage(),
is(
"Validation Failed: 1: [service_settings] does not contain the required setting [response];"
+ "2: [service_settings.response] does not contain the required setting [json_parser];"
)
);
}
public void testFromMap_ReturnsError_IfJsonParserMapIsNotEmptyAfterParsing() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", "value")),
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(
DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS,
"$.result.embeddings[*].embedding",
"key",
"value"
)
)
)
)
)
);
var exception = expectThrows(
ElasticsearchStatusException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(
exception.getMessage(),
is(
"Configuration contains unknown settings [{key=value}] while parsing field [json_parser]"
+ " for settings [custom_service_settings]"
)
);
}
public void testFromMap_ReturnsError_IfResponseMapIsNotEmptyAfterParsing() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", "value")),
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
),
"key",
"value"
)
)
)
);
var exception = expectThrows(
ElasticsearchStatusException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.TEXT_EMBEDDING)
);
assertThat(
exception.getMessage(),
is(
"Configuration contains unknown settings [{key=value}] while parsing field [response]"
+ " for settings [custom_service_settings]"
)
);
}
public void testFromMap_ReturnsError_IfTaskTypeIsInvalid() {
String url = "http://www.abc.com";
String requestContentString = "request body";
var mapSettings = new HashMap<String, Object>(
Map.of(
CustomServiceSettings.URL,
url,
CustomServiceSettings.HEADERS,
new HashMap<>(Map.of("key", "value")),
CustomServiceSettings.REQUEST,
requestContentString,
CustomServiceSettings.RESPONSE,
new HashMap<>(
Map.of(
CustomServiceSettings.JSON_PARSER,
new HashMap<>(
Map.of(DenseEmbeddingResponseParser.TEXT_EMBEDDING_PARSER_EMBEDDINGS, "$.result.embeddings[*].embedding")
)
)
)
)
);
var exception = expectThrows(
IllegalArgumentException.class,
() -> CustomServiceSettings.fromMap(mapSettings, ConfigurationParseContext.REQUEST, TaskType.CHAT_COMPLETION)
);
assertThat(exception.getMessage(), is("Invalid task type received [chat_completion] while constructing response parser"));
}
public void testXContent() throws IOException {
var entity = new CustomServiceSettings(
CustomServiceSettings.TextEmbeddingSettings.NON_TEXT_EMBEDDING_TASK_TYPE_SETTINGS,
"http://www.abc.com",
Map.of("key", "value"),
null,
"string",
new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT),
null
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
var expected = XContentHelper.stripWhitespace("""
{
"url": "http://www.abc.com",
"headers": {
"key": "value"
},
"request": "string",
"response": {
"json_parser": {
"text_embeddings": "$.result.embeddings[*].embedding",
"embedding_type": "float"
}
},
"input_type": {
"translation": {},
"default": ""
},
"rate_limit": {
"requests_per_minute": 10000
},
"batch_size": 10
}
""");
assertThat(xContentResult, is(expected));
}
public void testXContent_Rerank() throws IOException {
var entity = new CustomServiceSettings(
CustomServiceSettings.TextEmbeddingSettings.NON_TEXT_EMBEDDING_TASK_TYPE_SETTINGS,
"http://www.abc.com",
Map.of("key", "value"),
null,
"string",
new RerankResponseParser(
"$.result.reranked_results[*].relevance_score",
"$.result.reranked_results[*].index",
"$.result.reranked_results[*].document_text"
),
null
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
var expected = XContentHelper.stripWhitespace("""
{
"url": "http://www.abc.com",
"headers": {
"key": "value"
},
"request": "string",
"response": {
"json_parser": {
"relevance_score": "$.result.reranked_results[*].relevance_score",
"reranked_index": "$.result.reranked_results[*].index",
"document_text": "$.result.reranked_results[*].document_text"
}
},
"input_type": {
"translation": {},
"default": ""
},
"rate_limit": {
"requests_per_minute": 10000
},
"batch_size": 10
}
""");
assertThat(xContentResult, is(expected));
}
public void testXContent_WithInputTypeTranslationValues() throws IOException {
var entity = new CustomServiceSettings(
CustomServiceSettings.TextEmbeddingSettings.NON_TEXT_EMBEDDING_TASK_TYPE_SETTINGS,
"http://www.abc.com",
Map.of("key", "value"),
null,
"string",
new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT),
null,
null,
new InputTypeTranslator(Map.of(InputType.SEARCH, "do_search", InputType.INGEST, "do_ingest"), "a_default")
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
var expected = XContentHelper.stripWhitespace("""
{
"url": "http://www.abc.com",
"headers": {
"key": "value"
},
"request": "string",
"response": {
"json_parser": {
"text_embeddings": "$.result.embeddings[*].embedding",
"embedding_type": "float"
}
},
"input_type": {
"translation": {
"ingest": "do_ingest",
"search": "do_search"
},
"default": "a_default"
},
"rate_limit": {
"requests_per_minute": 10000
},
"batch_size": 10
}
""");
assertThat(xContentResult, is(expected));
}
public void testXContent_BatchSize11() throws IOException {
var entity = new CustomServiceSettings(
CustomServiceSettings.TextEmbeddingSettings.NON_TEXT_EMBEDDING_TASK_TYPE_SETTINGS,
"http://www.abc.com",
Map.of("key", "value"),
null,
"string",
new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT),
null,
11,
InputTypeTranslator.EMPTY_TRANSLATOR
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
var expected = XContentHelper.stripWhitespace("""
{
"url": "http://www.abc.com",
"headers": {
"key": "value"
},
"request": "string",
"response": {
"json_parser": {
"text_embeddings": "$.result.embeddings[*].embedding",
"embedding_type": "float"
}
},
"input_type": {
"translation": {},
"default": ""
},
"rate_limit": {
"requests_per_minute": 10000
},
"batch_size": 11
}
""");
assertThat(xContentResult, is(expected));
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables());
}
@Override
protected Writeable.Reader<CustomServiceSettings> instanceReader() {
return CustomServiceSettings::new;
}
@Override
protected CustomServiceSettings createTestInstance() {
return createRandom();
}
@Override
protected CustomServiceSettings mutateInstance(CustomServiceSettings instance) {
var textEmbeddingSettings = instance.getTextEmbeddingSettings();
var url = instance.getUrl();
var headers = instance.getHeaders();
var queryParameters = instance.getQueryParameters();
var requestContentString = instance.getRequestContentString();
var responseJsonParser = instance.getResponseJsonParser();
var rateLimitSettings = instance.rateLimitSettings();
var batchSize = instance.getBatchSize();
var inputTypeTranslator = instance.getInputTypeTranslator();
switch (randomInt(8)) {
case 0 -> textEmbeddingSettings = randomValueOtherThan(
textEmbeddingSettings,
CustomServiceSettingsTests::randomTextEmbeddingSettings
);
case 1 -> url = randomValueOtherThan(url, () -> randomAlphaOfLength(5));
case 2 -> headers = randomValueOtherThan(
headers,
() -> randomMap(0, 1, () -> new Tuple<>(randomAlphaOfLength(5), randomAlphaOfLength(5)))
);
case 3 -> queryParameters = randomValueOtherThan(queryParameters, QueryParametersTests::createRandom);
case 4 -> requestContentString = randomValueOtherThan(requestContentString, () -> randomAlphaOfLength(10));
case 5 -> responseJsonParser = randomValueOtherThan(responseJsonParser, CustomServiceSettingsTests::randomResponseParser);
case 6 -> rateLimitSettings = randomValueOtherThan(rateLimitSettings, RateLimitSettingsTests::createRandom);
case 7 -> batchSize = randomValueOtherThan(batchSize, ESTestCase::randomInt);
case 8 -> inputTypeTranslator = randomValueOtherThan(inputTypeTranslator, InputTypeTranslatorTests::createRandom);
default -> throw new AssertionError("Illegal randomisation branch");
}
return new CustomServiceSettings(
textEmbeddingSettings,
url,
headers,
queryParameters,
requestContentString,
responseJsonParser,
rateLimitSettings,
batchSize,
inputTypeTranslator
);
}
private static CustomServiceSettings.TextEmbeddingSettings randomTextEmbeddingSettings() {
return new CustomServiceSettings.TextEmbeddingSettings(
randomBoolean() ? null : randomSimilarityMeasure(),
randomIntOrNull(),
randomIntOrNull()
);
}
private static CustomResponseParser randomResponseParser() {
return switch (randomInt(4)) {
case 0 -> new DenseEmbeddingResponseParser("$.result.embeddings[*].embedding", CustomServiceEmbeddingType.FLOAT);
case 1 -> new SparseEmbeddingResponseParser(
"$.result.sparse_embeddings[*].embedding[*].token_id",
"$.result.sparse_embeddings[*].embedding[*].weights"
);
case 2 -> new RerankResponseParser(
"$.result.reranked_results[*].index",
"$.result.reranked_results[*].relevance_score",
"$.result.reranked_results[*].document_text"
);
case 3 -> new CompletionResponseParser("$.result.text");
case 4 -> new NoopResponseParser();
default -> throw new AssertionError("Illegal randomisation branch");
};
}
@Override
protected CustomServiceSettings mutateInstanceForVersion(CustomServiceSettings instance, TransportVersion version) {
return instance;
}
}
| CustomServiceSettingsTests |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/util/reflection/GenericMetadataSupport.java | {
"start": 25917,
"end": 28085
} | class ____ an interface (parameterized or not), if no bounds declared Object is returned.
*/
@Override
public Type firstBound() {
return typeVariable.getBounds()[0]; //
}
/**
* On a Type Variable (typeVar extends C_0 & I_1 & I_2 & etc), will return an array
* containing I_1 and I_2.
*
* @return other bounds for this type, these bounds can only be interfaces as the JLS says,
* empty array if no other bound declared.
*/
@Override
public Type[] interfaceBounds() {
Type[] interfaceBounds = new Type[typeVariable.getBounds().length - 1];
System.arraycopy(
typeVariable.getBounds(),
1,
interfaceBounds,
0,
typeVariable.getBounds().length - 1);
return interfaceBounds;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return typeVariable.equals(((TypeVarBoundedType) o).typeVariable);
}
@Override
public int hashCode() {
return typeVariable.hashCode();
}
@Override
public String toString() {
return "{firstBound="
+ firstBound()
+ ", interfaceBounds="
+ Arrays.deepToString(interfaceBounds())
+ '}';
}
public TypeVariable<?> typeVariable() {
return typeVariable;
}
}
/**
* Type representing bounds of a wildcard, allows to keep all bounds information.
*
* <p>The JLS says that lower bound and upper bound are mutually exclusive, and that multiple bounds
* are not allowed.
*
* @see <a href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-4.html#jls-4.4">https://docs.oracle.com/javase/specs/jls/se8/html/jls-4.html#jls-4.4</a>
*/
public static | or |
java | quarkusio__quarkus | extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java | {
"start": 935,
"end": 3458
} | class ____ implements HealthCheck {
@Inject
Instance<DataSourceSupport> dataSourceSupport;
@Inject
Instance<AgroalDataSourceSupport> agroalDataSourceSupport;
private final Map<String, DataSource> checkedDataSources = new HashMap<>();
@PostConstruct
protected void init() {
if (!dataSourceSupport.isResolvable() || !agroalDataSourceSupport.isResolvable()) {
// No configured Agroal datasources at build time.
return;
}
DataSourceSupport support = dataSourceSupport.get();
Set<String> healthCheckExcludedNames = support.getHealthCheckExcludedNames();
for (String name : agroalDataSourceSupport.get().entries.keySet()) {
if (healthCheckExcludedNames.contains(name)) {
continue;
}
Optional<AgroalDataSource> dataSource = AgroalDataSourceUtil.dataSourceIfActive(name);
if (dataSource.isPresent()) {
checkedDataSources.put(name, dataSource.get());
}
}
}
@Override
public HealthCheckResponse call() {
HealthCheckResponseBuilder builder = HealthCheckResponse.named("Database connections health check").up();
for (Map.Entry<String, DataSource> dataSource : checkedDataSources.entrySet()) {
boolean isDefault = DataSourceUtil.isDefault(dataSource.getKey());
AgroalDataSource ads = (AgroalDataSource) dataSource.getValue();
String dsName = dataSource.getKey();
try {
boolean valid = ads.isHealthy(false);
if (!valid) {
String data = isDefault ? "validation check failed for the default DataSource"
: "validation check failed for DataSource '" + dataSource.getKey() + "'";
builder.down().withData(dsName, data);
} else {
builder.withData(dsName, "UP");
}
} catch (SQLException e) {
String data = isDefault ? "Unable to execute the validation check for the default DataSource: "
: "Unable to execute the validation check for DataSource '" + dataSource.getKey() + "': ";
builder.down().withData(dsName, data + e.getMessage());
}
}
return builder.build();
}
protected Map<String, DataSource> getCheckedDataSources() {
return Collections.unmodifiableMap(checkedDataSources);
}
}
| DataSourceHealthCheck |
java | apache__camel | components/camel-pulsar/src/main/java/org/apache/camel/component/pulsar/utils/PulsarPath.java | {
"start": 925,
"end": 1900
} | class ____ {
private static final Pattern PATTERN = Pattern.compile("^(persistent|non-persistent):?/?/(.+)/(.+)/(.+)$");
private String persistence;
private String tenant;
private String namespace;
private String topic;
private boolean autoConfigurable;
public PulsarPath(String path) {
Matcher matcher = PATTERN.matcher(path);
autoConfigurable = matcher.matches();
if (autoConfigurable) {
persistence = matcher.group(1);
tenant = matcher.group(2);
namespace = matcher.group(3);
topic = matcher.group(4);
}
}
public String getPersistence() {
return persistence;
}
public String getTenant() {
return tenant;
}
public String getNamespace() {
return namespace;
}
public String getTopic() {
return topic;
}
public boolean isAutoConfigurable() {
return autoConfigurable;
}
}
| PulsarPath |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/graph/GraphQueryResponseItem.java | {
"start": 1006,
"end": 1301
} | interface ____ extends GraphQueryResponseItem {
boolean asBoolean();
int asInteger();
double asDouble();
boolean isNull();
String asString();
@Override
default Kind kind() {
return Kind.SCALAR;
}
}
| ScalarItem |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/allocation/AllocationFileParser.java | {
"start": 2055,
"end": 10101
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(AllocationFileParser.class);
private static final String QUEUE_MAX_RESOURCES_DEFAULT =
"queueMaxResourcesDefault";
private static final String USER_MAX_APPS_DEFAULT = "userMaxAppsDefault";
private static final String DEFAULT_FAIR_SHARE_PREEMPTION_TIMEOUT =
"defaultFairSharePreemptionTimeout";
private static final String FAIR_SHARE_PREEMPTION_TIMEOUT =
"fairSharePreemptionTimeout";
private static final String DEFAULT_MIN_SHARE_PREEMPTION_TIMEOUT =
"defaultMinSharePreemptionTimeout";
private static final String QUEUE_MAX_APPS_DEFAULT = "queueMaxAppsDefault";
private static final String DEFAULT_FAIR_SHARE_PREEMPTION_THRESHOLD =
"defaultFairSharePreemptionThreshold";
private static final String QUEUE_MAX_AM_SHARE_DEFAULT =
"queueMaxAMShareDefault";
private static final String RESERVATION_PLANNER = "reservation-planner";
private static final String RESERVATION_AGENT = "reservation-agent";
private static final String RESERVATION_ADMISSION_POLICY =
"reservation-policy";
private static final String QUEUE_PLACEMENT_POLICY = "queuePlacementPolicy";
private static final String QUEUE = "queue";
private static final String POOL = "pool";
private static final String USER = "user";
private static final String USERNAME = "name";
private static final String MAX_RUNNING_APPS = "maxRunningApps";
private static final String DEFAULT_QUEUE_SCHEDULING_POLICY =
"defaultQueueSchedulingPolicy";
private static final String DEFAULT_QUEUE_SCHEDULING_MODE =
"defaultQueueSchedulingMode";
private static final Set<String> VALID_TAG_NAMES =
Sets.newHashSet(QUEUE_MAX_RESOURCES_DEFAULT, USER_MAX_APPS_DEFAULT,
DEFAULT_FAIR_SHARE_PREEMPTION_TIMEOUT, FAIR_SHARE_PREEMPTION_TIMEOUT,
DEFAULT_MIN_SHARE_PREEMPTION_TIMEOUT, QUEUE_MAX_APPS_DEFAULT,
DEFAULT_FAIR_SHARE_PREEMPTION_THRESHOLD, QUEUE_MAX_AM_SHARE_DEFAULT,
RESERVATION_PLANNER, RESERVATION_AGENT, RESERVATION_ADMISSION_POLICY,
QUEUE_PLACEMENT_POLICY, QUEUE, POOL, USER,
DEFAULT_QUEUE_SCHEDULING_POLICY, DEFAULT_QUEUE_SCHEDULING_MODE);
private final NodeList elements;
private final Map<String, String> textValues = Maps.newHashMap();
private Element queuePlacementPolicyElement;
private final List<Element> queueElements = new ArrayList<>();
private final Map<String, Integer> userMaxApps = new HashMap<>();
private SchedulingPolicy defaultSchedulingPolicy;
public AllocationFileParser(NodeList elements) {
this.elements = elements;
}
public void parse() throws AllocationConfigurationException {
for (int i = 0; i < elements.getLength(); i++) {
Node node = elements.item(i);
if (node instanceof Element) {
Element element = (Element) node;
final String tagName = element.getTagName();
if (VALID_TAG_NAMES.contains(tagName)) {
if (tagName.equals(QUEUE_PLACEMENT_POLICY)) {
queuePlacementPolicyElement = element;
} else if (isSchedulingPolicy(element)) {
defaultSchedulingPolicy = extractSchedulingPolicy(element);
} else if (isQueue(element)) {
queueElements.add(element);
} else if (tagName.equals(USER)) {
extractUserData(element);
} else {
textValues.put(tagName, getTrimmedTextData(element));
}
} else {
LOG.warn("Bad element in allocations file: " + tagName);
}
}
}
}
private boolean isSchedulingPolicy(Element element) {
return DEFAULT_QUEUE_SCHEDULING_POLICY.equals(element.getTagName())
|| DEFAULT_QUEUE_SCHEDULING_MODE.equals(element.getTagName());
}
private void extractUserData(Element element) {
final String userName = element.getAttribute(USERNAME);
final NodeList fields = element.getChildNodes();
for (int j = 0; j < fields.getLength(); j++) {
final Node fieldNode = fields.item(j);
if (!(fieldNode instanceof Element)) {
continue;
}
final Element field = (Element) fieldNode;
if (MAX_RUNNING_APPS.equals(field.getTagName())) {
final String text = getTrimmedTextData(field);
final int val = Integer.parseInt(text);
userMaxApps.put(userName, val);
}
}
}
private SchedulingPolicy extractSchedulingPolicy(Element element)
throws AllocationConfigurationException {
String text = getTrimmedTextData(element);
if (text.equalsIgnoreCase(FifoPolicy.NAME)) {
throw new AllocationConfigurationException("Bad fair scheduler "
+ "config file: defaultQueueSchedulingPolicy or "
+ "defaultQueueSchedulingMode can't be FIFO.");
}
return SchedulingPolicy.parse(text);
}
private boolean isQueue(Element element) {
return element.getTagName().equals(QUEUE)
|| element.getTagName().equals(POOL);
}
private String getTrimmedTextData(Element element) {
return ((Text) element.getFirstChild()).getData().trim();
}
public ConfigurableResource getQueueMaxResourcesDefault()
throws AllocationConfigurationException {
Optional<String> value = getTextValue(QUEUE_MAX_RESOURCES_DEFAULT);
if (value.isPresent()) {
return FairSchedulerConfiguration.parseResourceConfigValue(value.get());
}
return new ConfigurableResource(Resources.unbounded());
}
public int getUserMaxAppsDefault() {
Optional<String> value = getTextValue(USER_MAX_APPS_DEFAULT);
return value.map(Integer::parseInt).orElse(Integer.MAX_VALUE);
}
public long getDefaultFairSharePreemptionTimeout() {
Optional<String> value = getTextValue(FAIR_SHARE_PREEMPTION_TIMEOUT);
Optional<String> defaultValue =
getTextValue(DEFAULT_FAIR_SHARE_PREEMPTION_TIMEOUT);
if (value.isPresent() && !defaultValue.isPresent()) {
return Long.parseLong(value.get()) * 1000L;
} else if (defaultValue.isPresent()) {
return Long.parseLong(defaultValue.get()) * 1000L;
}
return Long.MAX_VALUE;
}
public long getDefaultMinSharePreemptionTimeout() {
Optional<String> value = getTextValue(DEFAULT_MIN_SHARE_PREEMPTION_TIMEOUT);
return value.map(v -> Long.parseLong(v) * 1000L).orElse(Long.MAX_VALUE);
}
public int getQueueMaxAppsDefault() {
Optional<String> value = getTextValue(QUEUE_MAX_APPS_DEFAULT);
return value.map(Integer::parseInt).orElse(Integer.MAX_VALUE);
}
public float getDefaultFairSharePreemptionThreshold() {
Optional<String> value =
getTextValue(DEFAULT_FAIR_SHARE_PREEMPTION_THRESHOLD);
if (value.isPresent()) {
float floatValue = Float.parseFloat(value.get());
return Math.max(Math.min(floatValue, 1.0f), 0.0f);
}
return 0.5f;
}
public float getQueueMaxAMShareDefault() {
Optional<String> value = getTextValue(QUEUE_MAX_AM_SHARE_DEFAULT);
if (value.isPresent()) {
float val = Float.parseFloat(value.get());
return Math.min(val, 1.0f);
}
return 0.5f;
}
// Reservation global configuration knobs
public Optional<String> getReservationPlanner() {
return getTextValue(RESERVATION_PLANNER);
}
public Optional<String> getReservationAgent() {
return getTextValue(RESERVATION_AGENT);
}
public Optional<String> getReservationAdmissionPolicy() {
return getTextValue(RESERVATION_ADMISSION_POLICY);
}
public Optional<Element> getQueuePlacementPolicy() {
return Optional.ofNullable(queuePlacementPolicyElement);
}
private Optional<String> getTextValue(String key) {
return Optional.ofNullable(textValues.get(key));
}
public List<Element> getQueueElements() {
return queueElements;
}
public Map<String, Integer> getUserMaxApps() {
return userMaxApps;
}
public SchedulingPolicy getDefaultSchedulingPolicy() {
if (defaultSchedulingPolicy != null) {
return defaultSchedulingPolicy;
}
return SchedulingPolicy.DEFAULT_POLICY;
}
}
| AllocationFileParser |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.