language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__maven
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/PluginUpgradeStrategyTest.java
{ "start": 4559, "end": 17538 }
class ____ { @Test @DisplayName("should upgrade plugin version when below minimum") void shouldUpgradePluginVersionWhenBelowMinimum() throws Exception { String pomXml = PomBuilder.create() .groupId("test") .artifactId("test") .version("1.0.0") .plugin("org.apache.maven.plugins", "maven-compiler-plugin", "3.8.1") .build(); Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); // Note: POM may or may not be modified depending on whether upgrades are needed // Verify the plugin version was upgraded Editor editor = new Editor(document); Element root = editor.root(); String version = root.path("build", "plugins", "plugin", "version") .map(Element::textContentTrimmed) .orElse(null); // The exact version depends on the plugin upgrades configuration assertNotNull(version, "Plugin should have a version"); } @Test @DisplayName("should not modify plugin when version is already sufficient") void shouldNotModifyPluginWhenVersionAlreadySufficient() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.13.0</version> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); // POM might still be marked as modified due to other plugin management additions } @Test @DisplayName("should upgrade plugin in pluginManagement") void shouldUpgradePluginInPluginManagement() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <pluginManagement> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-enforcer-plugin</artifactId> <version>2.0.0</version> </plugin> </plugins> </pluginManagement> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); assertTrue(result.modifiedCount() > 0, "Should have upgraded maven-enforcer-plugin"); // Verify the version was upgraded Editor editor = new Editor(document); Element root = editor.root(); String version = root.path("build", "pluginManagement", "plugins", "plugin", "version") .map(Element::textContentTrimmed) .orElse(null); assertEquals("3.0.0", version); } @Test @DisplayName("should upgrade plugin with property version") void shouldUpgradePluginWithPropertyVersion() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <properties> <shade.plugin.version>3.0.0</shade.plugin.version> </properties> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>${shade.plugin.version}</version> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); assertTrue(result.modifiedCount() > 0, "Should have upgraded shade plugin property"); // Verify the property was upgraded Editor editor = new Editor(document); Element root = editor.root(); String version = root.path("properties", "shade.plugin.version") .map(Element::textContentTrimmed) .orElse(null); assertEquals("3.5.0", version); } @Test @DisplayName("should not upgrade when version is already higher") void shouldNotUpgradeWhenVersionAlreadyHigher() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>flatten-maven-plugin</artifactId> <version>1.3.0</version> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); // Verify the version was not changed Editor editor = new Editor(document); Element root = editor.root(); String version = root.path("build", "plugins", "plugin", "version") .map(Element::textContentTrimmed) .orElse(null); assertEquals("1.3.0", version); } @Test @DisplayName("should upgrade plugin without explicit groupId") void shouldUpgradePluginWithoutExplicitGroupId() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <artifactId>maven-shade-plugin</artifactId> <version>3.1.0</version> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); assertTrue( result.modifiedCount() > 0, "Should have upgraded maven-shade-plugin even without explicit groupId"); // Verify the version was upgraded Editor editor = new Editor(document); Element root = editor.root(); String version = root.path("build", "plugins", "plugin", "version") .map(Element::textContentTrimmed) .orElse(null); assertEquals("3.5.0", version); } @Test @DisplayName("should not upgrade plugin without version") void shouldNotUpgradePluginWithoutVersion() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-exec-plugin</artifactId> <!-- No version - inherited from parent or pluginManagement --> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); // Note: POM might still be modified due to plugin management additions } @Test @DisplayName("should not upgrade when property is not found") void shouldNotUpgradeWhenPropertyNotFound() throws Exception { String pomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>test</groupId> <artifactId>test</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-exec-plugin</artifactId> <version>${exec.plugin.version}</version> </plugin> </plugins> </build> </project> """; Document document = Document.of(pomXml); Map<Path, Document> pomMap = Map.of(Paths.get("pom.xml"), document); UpgradeContext context = createMockContext(); UpgradeResult result = strategy.doApply(context, pomMap); assertTrue(result.success(), "Plugin upgrade should succeed"); // Note: POM might still be modified due to plugin management additions } } @Nested @DisplayName("Plugin Management")
PluginUpgradeTests
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
{ "start": 3349, "end": 6788 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger( TestConfigurationFieldsBase.class); private static final Logger LOG_CONFIG = LoggerFactory.getLogger( "org.apache.hadoop.conf.TestConfigurationFieldsBase.config"); private static final Logger LOG_XML = LoggerFactory.getLogger( "org.apache.hadoop.conf.TestConfigurationFieldsBase.xml"); private static final String VALID_PROP_REGEX = "^[A-Za-z][A-Za-z0-9_-]+(\\.[A-Za-z%s0-9_-]+)+$"; private static final Pattern validPropertiesPattern = Pattern.compile(VALID_PROP_REGEX); /** * Member variable for storing xml filename. */ protected String xmlFilename = null; /** * Member variable for storing all related Configuration classes. */ protected Class[] configurationClasses = null; /** * Throw error during comparison if missing configuration properties. * Intended to be set by subclass. */ protected boolean errorIfMissingConfigProps = false; /** * Throw error during comparison if missing xml properties. Intended * to be set by subclass. */ protected boolean errorIfMissingXmlProps = false; /** * Set of properties to skip extracting (and thus comparing later) in * {@link #extractMemberVariablesFromConfigurationFields(Field[])}. */ protected Set<String> configurationPropsToSkipCompare = new HashSet<>(); /** * Set of property prefixes to skip extracting (and thus comparing later) * in * extractMemberVariablesFromConfigurationFields. */ protected Set<String> configurationPrefixToSkipCompare = new HashSet<>(); /** * Set of properties to skip extracting (and thus comparing later) in * extractPropertiesFromXml. */ protected Set<String> xmlPropsToSkipCompare = new HashSet<>(); /** * Set of property prefixes to skip extracting (and thus comparing later) * in extractPropertiesFromXml. */ protected Set<String> xmlPrefixToSkipCompare = new HashSet<>(); /** * Member variable to store Configuration variables for later comparison. */ private Map<String, String> configurationMemberVariables = null; /** * Member variable to store Configuration variables for later reference. */ private Map<String, String> configurationDefaultVariables = null; /** * Member variable to store XML properties for later comparison. */ private Map<String, String> xmlKeyValueMap = null; /** * Member variable to store Configuration variables that are not in the * corresponding XML file. */ private Set<String> configurationFieldsMissingInXmlFile = null; /** * Member variable to store XML variables that are not in the * corresponding Configuration class(es). */ private Set<String> xmlFieldsMissingInConfiguration = null; /** * A set of strings used to check for collision of default values. * For each of the set's strings, the default values containing that string * in their name should not coincide. */ @SuppressWarnings("checkstyle:visibilitymodifier") protected Set<String> filtersForDefaultValueCollisionCheck = new HashSet<>(); /** * Abstract method to be used by subclasses for initializing base * members. */ public abstract void initializeMemberVariables(); /** * Utility function to extract &quot;public static final&quot; member * variables from a Configuration type class. * * @param fields The
TestConfigurationFieldsBase
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/StartTransformActionResponseTests.java
{ "start": 462, "end": 952 }
class ____ extends AbstractWireSerializingTransformTestCase<Response> { @Override protected Response createTestInstance() { return new Response(randomBoolean()); } @Override protected Response mutateInstance(Response instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } @Override protected Reader<Response> instanceReader() { return Response::new; } }
StartTransformActionResponseTests
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/jdk8/SingleToCompletionStageTest.java
{ "start": 999, "end": 4700 }
class ____ extends RxJavaTest { @Test public void just() throws Exception { Integer v = Single.just(1) .toCompletionStage() .toCompletableFuture() .get(); assertEquals((Integer)1, v); } @Test public void completableFutureCancels() throws Exception { SingleSubject<Integer> source = SingleSubject.create(); CompletableFuture<Integer> cf = source .toCompletionStage() .toCompletableFuture(); assertTrue(source.hasObservers()); cf.cancel(true); assertTrue(cf.isCancelled()); assertFalse(source.hasObservers()); } @Test public void completableManualCompleteCancels() throws Exception { SingleSubject<Integer> source = SingleSubject.create(); CompletableFuture<Integer> cf = source .toCompletionStage() .toCompletableFuture(); assertTrue(source.hasObservers()); cf.complete(1); assertTrue(cf.isDone()); assertFalse(cf.isCompletedExceptionally()); assertFalse(cf.isCancelled()); assertFalse(source.hasObservers()); assertEquals((Integer)1, cf.get()); } @Test public void completableManualCompleteExceptionallyCancels() throws Exception { SingleSubject<Integer> source = SingleSubject.create(); CompletableFuture<Integer> cf = source .toCompletionStage() .toCompletableFuture(); assertTrue(source.hasObservers()); cf.completeExceptionally(new TestException()); assertTrue(cf.isDone()); assertTrue(cf.isCompletedExceptionally()); assertFalse(cf.isCancelled()); assertFalse(source.hasObservers()); TestHelper.assertError(cf, TestException.class); } @Test public void error() throws Exception { CompletableFuture<Integer> cf = Single.<Integer>error(new TestException()) .toCompletionStage() .toCompletableFuture(); assertTrue(cf.isDone()); assertTrue(cf.isCompletedExceptionally()); assertFalse(cf.isCancelled()); TestHelper.assertError(cf, TestException.class); } @Test public void sourceIgnoresCancel() throws Throwable { TestHelper.withErrorTracking(errors -> { Integer v = new Single<Integer>() { @Override protected void subscribeActual(SingleObserver<? super Integer> observer) { observer.onSubscribe(Disposable.empty()); observer.onSuccess(1); observer.onError(new TestException()); } } .toCompletionStage() .toCompletableFuture() .get(); assertEquals((Integer)1, v); TestHelper.assertUndeliverable(errors, 0, TestException.class); }); } @Test public void doubleOnSubscribe() throws Throwable { TestHelper.withErrorTracking(errors -> { Integer v = new Single<Integer>() { @Override protected void subscribeActual(SingleObserver<? super Integer> observer) { observer.onSubscribe(Disposable.empty()); observer.onSubscribe(Disposable.empty()); observer.onSuccess(1); } } .toCompletionStage() .toCompletableFuture() .get(); assertEquals((Integer)1, v); TestHelper.assertError(errors, 0, ProtocolViolationException.class); }); } }
SingleToCompletionStageTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingleTableDiscriminatorFormulaTest.java
{ "start": 3486, "end": 4204 }
class ____ extends Account { private String debitKey; private BigDecimal overdraftFee; //Getters and setters are omitted for brevity //end::entity-inheritance-single-table-discriminator-formula-example[] private DebitAccount() { } public DebitAccount(String debitKey) { this.debitKey = debitKey; } public String getDebitKey() { return debitKey; } public BigDecimal getOverdraftFee() { return overdraftFee; } public void setOverdraftFee(BigDecimal overdraftFee) { this.overdraftFee = overdraftFee; } //tag::entity-inheritance-single-table-discriminator-formula-example[] } @Entity(name = "CreditAccount") @DiscriminatorValue(value = "Credit") public static
DebitAccount
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloServerEndpointBuilderFactory.java
{ "start": 10774, "end": 11172 }
interface ____ extends MiloServerEndpointConsumerBuilder, MiloServerEndpointProducerBuilder { default AdvancedMiloServerEndpointBuilder advanced() { return (AdvancedMiloServerEndpointBuilder) this; } } /** * Advanced builder for endpoint for the OPC UA Server component. */ public
MiloServerEndpointBuilder
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/records/AuxServiceConfiguration.java
{ "start": 1648, "end": 3588 }
class ____ { private Map<String, String> properties = new HashMap<>(); private List<AuxServiceFile> files = new ArrayList<>(); /** * A blob of key-value pairs of common service properties. **/ public AuxServiceConfiguration properties(Map<String, String> props) { this.properties = props; return this; } @JsonProperty("properties") public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> properties) { this.properties = properties; } /** * Array of list of files that needs to be created and made available as * volumes in the service component containers. **/ public AuxServiceConfiguration files(List<AuxServiceFile> fileList) { this.files = fileList; return this; } @JsonProperty("files") public List<AuxServiceFile> getFiles() { return files; } public void setFiles(List<AuxServiceFile> files) { this.files = files; } public String getProperty(String name, String defaultValue) { String value = getProperty(name); if (StringUtils.isEmpty(value)) { return defaultValue; } return value; } public void setProperty(String name, String value) { properties.put(name, value); } public String getProperty(String name) { return properties.get(name.trim()); } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AuxServiceConfiguration configuration = (AuxServiceConfiguration) o; return Objects.equals(this.properties, configuration.properties) && Objects.equals(this.files, configuration.files); } @Override public int hashCode() { return Objects.hash(properties, files); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("
AuxServiceConfiguration
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableRepeatUntil.java
{ "start": 996, "end": 1564 }
class ____<T> extends AbstractObservableWithUpstream<T, T> { final BooleanSupplier until; public ObservableRepeatUntil(Observable<T> source, BooleanSupplier until) { super(source); this.until = until; } @Override public void subscribeActual(Observer<? super T> observer) { SequentialDisposable sd = new SequentialDisposable(); observer.onSubscribe(sd); RepeatUntilObserver<T> rs = new RepeatUntilObserver<>(observer, until, sd, source); rs.subscribeNext(); } static final
ObservableRepeatUntil
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/UnsubscribeEvent.java
{ "start": 1329, "end": 1503 }
class ____ extends CompletableApplicationEvent<Void> { public UnsubscribeEvent(final long deadlineMs) { super(Type.UNSUBSCRIBE, deadlineMs); } }
UnsubscribeEvent
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/node/NodeSerialization.java
{ "start": 441, "end": 3200 }
class ____ implements java.io.Serializable, java.io.Externalizable { // To avoid malicious input only allocate up to 100k protected final static int LONGEST_EAGER_ALLOC = 100_000; private static final long serialVersionUID = 1L; private static final JsonMapper JSON_MAPPER = JsonMapper.shared(); private static final ObjectReader NODE_READER = JSON_MAPPER.readerFor(JsonNode.class); public byte[] json; public NodeSerialization() { } public NodeSerialization(byte[] b) { json = b; } protected Object readResolve() { try { return bytesToNode(json); } catch (JacksonException e) { throw new IllegalArgumentException("Failed to JDK deserialize `JsonNode` value: "+e.getMessage(), e); } } public static NodeSerialization from(Object o) { return new NodeSerialization(valueToBytes(o)); } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(json.length); out.write(json); } @Override public void readExternal(ObjectInput in) throws IOException { final int len = in.readInt(); json = _read(in, len); } private byte[] _read(ObjectInput in, int expLen) throws IOException { // Common case, just read directly if (expLen <= LONGEST_EAGER_ALLOC) { byte[] result = new byte[expLen]; in.readFully(result, 0, expLen); return result; } // but longer content needs more care to avoid DoS by maliciously crafted data // (this wrt [databind#3328] try (final ByteArrayBuilder bb = new ByteArrayBuilder(LONGEST_EAGER_ALLOC)) { byte[] buffer = bb.resetAndGetFirstSegment(); int outOffset = 0; while (true) { int toRead = Math.min(buffer.length - outOffset, expLen); in.readFully(buffer, 0, toRead); expLen -= toRead; outOffset += toRead; // Did we get everything we needed? If so, we are done if (expLen == 0) { return bb.completeAndCoalesce(outOffset); } // Or perhaps we filled the current segment? If so, finish, get next if (outOffset == buffer.length) { buffer = bb.finishCurrentSegment(); outOffset = 0; } } } } private static byte[] valueToBytes(Object value) throws JacksonException { return JSON_MAPPER.writeValueAsBytes(value); } private static JsonNode bytesToNode(byte[] json) throws JacksonException { return NODE_READER.readValue(json); } }
NodeSerialization
java
apache__camel
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailComponentRecipientSetTest.java
{ "start": 1197, "end": 4147 }
class ____ extends CamelTestSupport { private static final MailboxUser james = Mailbox.getOrCreateUser("james", "secret"); private static final MailboxUser admin = Mailbox.getOrCreateUser("admin", "secret"); private static final MailboxUser a = Mailbox.getOrCreateUser("a", "secret"); private static final MailboxUser b = Mailbox.getOrCreateUser("b", "secret"); private static final MailboxUser c = Mailbox.getOrCreateUser("c", "secret"); @Test public void testMultipleEndpoints() throws Exception { Mailbox.clearAll(); template.sendBodyAndHeader("direct:a", "Hello World", "Subject", "Hello a"); template.sendBodyAndHeader("direct:b", "Bye World", "Subject", "Hello b"); template.sendBodyAndHeader("direct:c", "Hi World", "Subject", "Hello c"); Mailbox boxA = a.getInbox(); assertEquals(1, boxA.getMessageCount()); assertEquals("Hello a", boxA.get(0).getSubject()); assertEquals("Hello World", boxA.get(0).getContent()); assertEquals("me@me.com", boxA.get(0).getFrom()[0].toString()); assertEquals("spy@spy.com", boxA.get(0).getRecipients(Message.RecipientType.CC)[0].toString()); Mailbox boxB = b.getInbox(); assertEquals(1, boxB.getMessageCount()); assertEquals("Hello b", boxB.get(0).getSubject()); assertEquals("Bye World", boxB.get(0).getContent()); assertEquals("you@you.com", boxB.get(0).getFrom()[0].toString()); assertEquals("spy@spy.com", boxB.get(0).getRecipients(Message.RecipientType.CC)[0].toString()); Mailbox boxC = c.getInbox(); assertEquals(1, boxC.getMessageCount()); assertEquals("Hello c", boxC.get(0).getSubject()); assertEquals("Hi World", boxC.get(0).getContent()); assertEquals("me@me.com", boxC.get(0).getFrom()[0].toString()); assertEquals("you@you.com", boxC.get(0).getRecipients(Message.RecipientType.CC)[0].toString()); assertEquals("them@them.com", boxC.get(0).getRecipients(Message.RecipientType.CC)[1].toString()); // no spy as its overridden by endpoint } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { MailConfiguration config = new MailConfiguration(); config.setCc("spy@spy.com"); config.setFrom("me@me.com"); MailComponent mail = context.getComponent("smtp", MailComponent.class); mail.setConfiguration(config); from("direct:a").to(james.uriPrefix(Protocol.smtp) + "&to=a@localhost"); from("direct:b").to(james.uriPrefix(Protocol.smtp) + "&to=b@localhost&from=you@you.com"); from("direct:c").to(admin.uriPrefix(Protocol.smtp) + "&to=c@localhost&cc=you@you.com,them@them.com"); } }; } }
MailComponentRecipientSetTest
java
spring-projects__spring-boot
module/spring-boot-jersey/src/test/java/org/springframework/boot/jersey/autoconfigure/JerseyAutoConfigurationCustomFilterContextPathTests.java
{ "start": 2403, "end": 2794 }
class ____ { @Autowired private TestRestTemplate restTemplate; @Test void contextLoads() { ResponseEntity<String> entity = this.restTemplate.getForEntity("/rest/hello", String.class); assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK); } @MinimalWebConfiguration @ApplicationPath("/rest") @Path("/hello") public static
JerseyAutoConfigurationCustomFilterContextPathTests
java
apache__flink
flink-core/src/main/java/org/apache/flink/configuration/RestartStrategyOptions.java
{ "start": 2912, "end": 3102 }
class ____ { @Internal public static final String RESTART_STRATEGY_CONFIG_PREFIX = "restart-strategy"; /** The restart strategy type. */ @Internal public
RestartStrategyOptions
java
google__truth
core/src/main/java/com/google/common/truth/BigDecimalSubject.java
{ "start": 879, "end": 4010 }
class ____ extends ComparableSubject<BigDecimal> { private final @Nullable BigDecimal actual; private BigDecimalSubject(FailureMetadata metadata, @Nullable BigDecimal actual) { super(metadata, actual); this.actual = actual; } /** * Checks that the actual value is equal to the value of the given {@link BigDecimal}. (i.e., * checks that {@code actual.compareTo(expected) == 0}). * * <p><b>Note:</b> The scale of the BigDecimal is ignored. If you want to compare the values and * the scales, use {@link #isEqualTo(Object)}. */ public void isEqualToIgnoringScale(@Nullable BigDecimal expected) { compareValues(expected); } /** * Checks that the actual value is equal to the value of the {@link BigDecimal} created from the * expected string (i.e., checks that {@code actual.compareTo(new BigDecimal(expected)) == 0}). * * <p><b>Note:</b> The scale of the BigDecimal is ignored. If you want to compare the values and * the scales, use {@link #isEqualTo(Object)}. */ public void isEqualToIgnoringScale(String expected) { compareValues(new BigDecimal(expected)); } /** * Checks that the actual value is equal to the value of the {@link BigDecimal} created from the * expected {@code long} (i.e., checks that {@code actual.compareTo(new BigDecimal(expected)) == * 0}). * * <p><b>Note:</b> The scale of the BigDecimal is ignored. If you want to compare the values and * the scales, use {@link #isEqualTo(Object)}. */ public void isEqualToIgnoringScale(long expected) { compareValues(new BigDecimal(expected)); } /** * Checks that the actual value (including scale) is equal to the given {@link BigDecimal}. * * <p><b>Note:</b> If you only want to compare the values of the BigDecimals and not their scales, * use {@link #isEqualToIgnoringScale(BigDecimal)} instead. */ @Override // To express more specific javadoc public void isEqualTo(@Nullable Object expected) { super.isEqualTo(expected); } /** * Checks that the actual value is equivalent to the given value according to {@link * Comparable#compareTo}, (i.e., checks that {@code a.compareTo(b) == 0}). This method behaves * identically to (the more clearly named) {@link #isEqualToIgnoringScale(BigDecimal)}. * * <p><b>Note:</b> Do not use this method for checking object equality. Instead, use {@link * #isEqualTo(Object)}. */ @Override public void isEquivalentAccordingToCompareTo(@Nullable BigDecimal expected) { compareValues(expected); } private void compareValues(@Nullable BigDecimal expected) { if (actual == null || expected == null) { // This won't mention "(scale is ignored)" if it fails, but that seems tolerable or even good? isEqualTo(expected); } else if (actual.compareTo(expected) != 0) { failWithoutActual( numericFact("expected", expected), numericFact("but was", actual), simpleFact("(scale is ignored)")); } } static Factory<BigDecimalSubject, BigDecimal> bigDecimals() { return BigDecimalSubject::new; } }
BigDecimalSubject
java
apache__dubbo
dubbo-plugin/dubbo-rest-jaxrs/src/test/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/service/UserForm.java
{ "start": 943, "end": 1598 }
class ____ { @FormParam("first") String firstName; @FormParam("last") String lastName; @HeaderParam("Content-Type") String contentType; public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getContentType() { return contentType; } public void setContentType(String contentType) { this.contentType = contentType; } }
UserForm
java
apache__flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
{ "start": 4189, "end": 16957 }
class ____ implements WritableConfig, ReadableConfig { /** Please use {@link TableConfig#getDefault()} to get the default {@link TableConfig}. */ private TableConfig() {} // Note to implementers: // TableConfig is a ReadableConfig which is built once the TableEnvironment is created and // contains both the configuration defined in the execution context (config.yaml + CLI // params), stored in rootConfiguration, but also any extra configuration defined by the user in // the application, which has precedence over the execution configuration. // // This way, any consumer of TableConfig can get the complete view of the configuration // (environment + user-defined/application-specific) by calling the get() and getOptional() // methods. // // The set() methods only impact the application-specific configuration. /** Defines the configuration of Planner for Table API and SQL queries. */ private PlannerConfig plannerConfig = PlannerConfig.EMPTY_CONFIG; /** * A configuration object to hold all configuration that has been set specifically in the Table * API. It does not contain configuration from outer layers. */ private final Configuration configuration = new Configuration(); /** Configuration adopted from the outer layer (i.e. the {@link Executor}). */ private ReadableConfig rootConfiguration = new Configuration(); /** * Sets an application-specific value for the given {@link ConfigOption}. * * <p>This method should be preferred over {@link #set(String, String)} as it is type-safe, * avoids unnecessary parsing of the value, and provides inline documentation. * * <p>Note: Scala users might need to convert the value into a boxed type. E.g. by using {@code * Int.box(1)} or {@code Boolean.box(false)}. * * @see TableConfigOptions * @see ExecutionConfigOptions * @see OptimizerConfigOptions */ @Override public <T> TableConfig set(ConfigOption<T> option, T value) { configuration.set(option, value); return this; } /** * Sets an application-specific string-based value for the given string-based key. * * <p>The value will be parsed by the framework on access. * * <p>This method exists for convenience when configuring a session with string-based * properties. Use {@link #set(ConfigOption, Object)} for more type-safety and inline * documentation. * * @see TableConfigOptions * @see ExecutionConfigOptions * @see OptimizerConfigOptions */ public TableConfig set(String key, String value) { configuration.setString(key, value); return this; } /** * {@inheritDoc} * * <p>This method gives read-only access to the full configuration. However, * application-specific configuration has precedence. Configuration of outer layers is used for * defaults and fallbacks. See the docs of {@link TableConfig} for more information. * * @param option metadata of the option to read * @param <T> type of the value to read * @return read value or {@link ConfigOption#defaultValue()} if not found */ @Override public <T> T get(ConfigOption<T> option) { return configuration.getOptional(option).orElseGet(() -> rootConfiguration.get(option)); } /** * {@inheritDoc} * * <p>This method gives read-only access to the full configuration. However, * application-specific configuration has precedence. Configuration of outer layers is used for * defaults and fallbacks. See the docs of {@link TableConfig} for more information. * * @param option metadata of the option to read * @param <T> type of the value to read * @return read value or {@link Optional#empty()} if not found */ @Override public <T> Optional<T> getOptional(ConfigOption<T> option) { final Optional<T> tableValue = configuration.getOptional(option); if (tableValue.isPresent()) { return tableValue; } return rootConfiguration.getOptional(option); } @Internal @Override public Map<String, String> toMap() { Map<String, String> rootConfigMap = rootConfiguration.toMap(); Map<String, String> configMap = configuration.toMap(); rootConfigMap.putAll(configMap); return rootConfigMap; } /** * Gives direct access to the underlying application-specific key-value map for advanced * configuration. */ public Configuration getConfiguration() { return configuration; } /** * Gives direct access to the underlying environment-specific key-value map for advanced * configuration. */ @Internal public ReadableConfig getRootConfiguration() { return rootConfiguration; } /** * Adds the given key-value configuration to the underlying application-specific configuration. * It overwrites existing keys. * * @param configuration key-value configuration to be added */ public void addConfiguration(Configuration configuration) { Preconditions.checkNotNull(configuration); this.configuration.addAll(configuration); } /** Returns the current SQL dialect. */ public SqlDialect getSqlDialect() { return SqlDialect.valueOf(get(TableConfigOptions.TABLE_SQL_DIALECT).toUpperCase()); } /** Sets the current SQL dialect to parse a SQL query. Flink's SQL behavior by default. */ public void setSqlDialect(SqlDialect sqlDialect) { set(TableConfigOptions.TABLE_SQL_DIALECT, sqlDialect.name().toLowerCase()); } /** * Returns the current session time zone id. It is used when converting to/from {@code TIMESTAMP * WITH LOCAL TIME ZONE}. See {@link #setLocalTimeZone(ZoneId)} for more details. * * @see org.apache.flink.table.types.logical.LocalZonedTimestampType */ public ZoneId getLocalTimeZone() { final String zone = configuration.get(TableConfigOptions.LOCAL_TIME_ZONE); if (TableConfigOptions.LOCAL_TIME_ZONE.defaultValue().equals(zone)) { return ZoneId.systemDefault(); } validateTimeZone(zone); return ZoneId.of(zone); } /** * Sets the current session time zone id. It is used when converting to/from {@link * DataTypes#TIMESTAMP_WITH_LOCAL_TIME_ZONE()}. Internally, timestamps with local time zone are * always represented in the UTC time zone. However, when converting to data types that don't * include a time zone (e.g. TIMESTAMP, TIME, or simply STRING), the session time zone is used * during conversion. * * <p>Example: * * <pre>{@code * TableConfig config = tEnv.getConfig(); * config.setLocalTimeZone(ZoneOffset.ofHours(2)); * tEnv.executeSql("CREATE TABLE testTable (id BIGINT, tmstmp TIMESTAMP WITH LOCAL TIME ZONE)"); * tEnv.executeSql("INSERT INTO testTable VALUES ((1, '2000-01-01 2:00:00'), (2, TIMESTAMP '2000-01-01 2:00:00'))"); * tEnv.executeSql("SELECT * FROM testTable"); // query with local time zone set to UTC+2 * }</pre> * * <p>should produce: * * <pre> * ============================= * id | tmstmp * ============================= * 1 | 2000-01-01 2:00:00' * 2 | 2000-01-01 2:00:00' * </pre> * * <p>If we change the local time zone and query the same table: * * <pre>{@code * config.setLocalTimeZone(ZoneOffset.ofHours(0)); * tEnv.executeSql("SELECT * FROM testTable"); // query with local time zone set to UTC+0 * }</pre> * * <p>we should get: * * <pre> * ============================= * id | tmstmp * ============================= * 1 | 2000-01-01 0:00:00' * 2 | 2000-01-01 0:00:00' * </pre> * * @see org.apache.flink.table.types.logical.LocalZonedTimestampType */ public void setLocalTimeZone(ZoneId zoneId) { final String zone; if (zoneId instanceof ZoneOffset) { // Give ZoneOffset a timezone for backwards compatibility reasons. // In general, advertising either TZDB ID, GMT+xx:xx, or UTC is the best we can do. zone = ZoneId.ofOffset("GMT", (ZoneOffset) zoneId).toString(); } else { zone = zoneId.toString(); } validateTimeZone(zone); configuration.set(TableConfigOptions.LOCAL_TIME_ZONE, zone); } /** Returns the current configuration of Planner for Table API and SQL queries. */ public PlannerConfig getPlannerConfig() { return plannerConfig; } /** * Sets the configuration of Planner for Table API and SQL queries. Changing the configuration * has no effect after the first query has been defined. */ public void setPlannerConfig(PlannerConfig plannerConfig) { this.plannerConfig = Preconditions.checkNotNull(plannerConfig); } /** * Returns the current threshold where generated code will be split into sub-function calls. * Java has a maximum method length of 64 KB. This setting allows for finer granularity if * necessary. * * <p>Default value is 4000 instead of 64KB as by default JIT refuses to work on methods with * more than 8K byte code. */ public Integer getMaxGeneratedCodeLength() { return this.configuration.get(TableConfigOptions.MAX_LENGTH_GENERATED_CODE); } /** * Sets current threshold where generated code will be split into sub-function calls. Java has a * maximum method length of 64 KB. This setting allows for finer granularity if necessary. * * <p>Default value is 4000 instead of 64KB as by default JIT refuses to work on methods with * more than 8K byte code. */ public void setMaxGeneratedCodeLength(Integer maxGeneratedCodeLength) { this.configuration.set( TableConfigOptions.MAX_LENGTH_GENERATED_CODE, maxGeneratedCodeLength); } /** * Specifies a retention time interval for how long idle state, i.e., state which was not * updated, will be retained. State will never be cleared until it was idle for less than the * retention time and will be cleared on a best effort basis after the retention time. * * <p>When new data arrives for previously cleaned-up state, the new data will be handled as if * it was the first data. This can result in previous results being overwritten. * * <p>Set to 0 (zero) to never clean-up the state. * * @param duration The retention time interval for which idle state is retained. Set to 0 (zero) * to never clean-up the state. * @see org.apache.flink.api.common.state.StateTtlConfig */ public void setIdleStateRetention(Duration duration) { configuration.set(ExecutionConfigOptions.IDLE_STATE_RETENTION, duration); } /** * @return The duration until state which was not updated will be retained. */ public Duration getIdleStateRetention() { return configuration.get(ExecutionConfigOptions.IDLE_STATE_RETENTION); } /** * Sets a custom user parameter that can be accessed via {@link * FunctionContext#getJobParameter(String, String)}. * * <p>This will add an entry to the current value of {@link * PipelineOptions#GLOBAL_JOB_PARAMETERS}. * * <p>It is also possible to set multiple parameters at once, which will override any previously * set parameters: * * <pre>{@code * Map<String, String> params = ... * TableConfig config = tEnv.getConfig(); * config.set(PipelineOptions.GLOBAL_JOB_PARAMETERS, params); * }</pre> */ public void addJobParameter(String key, String value) { final Map<String, String> params = getOptional(PipelineOptions.GLOBAL_JOB_PARAMETERS) .map(HashMap::new) .orElseGet(HashMap::new); params.put(key, value); set(PipelineOptions.GLOBAL_JOB_PARAMETERS, params); } /** * Sets the given configuration as {@link #rootConfiguration}, which contains any configuration * set in the execution context. See the docs of {@link TableConfig} for more information. * * @param rootConfiguration root configuration to be set */ @Internal public void setRootConfiguration(ReadableConfig rootConfiguration) { this.rootConfiguration = rootConfiguration; } public static TableConfig getDefault() { return new TableConfig(); } }
TableConfig
java
quarkusio__quarkus
extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/AbstractJpaOperations.java
{ "start": 807, "end": 20106 }
class ____<PanacheQueryType> { private static final Map<String, String> entityToPersistenceUnit = new HashMap<>(); // Putting synchronized here because fields involved were marked as volatile initially, // so I expect recorders can be called concurrently? public static void addEntityTypesToPersistenceUnit(Map<String, String> map) { // Note: this may be called multiple times if an app uses both Java and Kotlin. // We don't really test what happens if entities are defined both in Java and Kotlin at the moment, // so we mostly care about the case where this gets called once with an empty map, and once with a non-empty map: // in that case, we don't want the empty map to erase the other one. entityToPersistenceUnit.putAll(map); } // FIXME: make it configurable? static final long TIMEOUT_MS = 5000; private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; protected abstract PanacheQueryType createPanacheQuery(Uni<Mutiny.Session> session, String query, String originalQuery, String orderBy, Object paramsArrayOrMap); protected abstract Uni<List<?>> list(PanacheQueryType query); // // Instance methods public Uni<Void> persist(Object entity) { return persist(getSession(entity.getClass()), entity); } public Uni<Void> persist(Uni<Mutiny.Session> sessionUni, Object entity) { return sessionUni.chain(session -> { if (!session.contains(entity)) { return session.persist(entity); } return Uni.createFrom().nullItem(); }); } @SuppressWarnings({ "rawtypes", "unchecked" }) public Uni<Void> persist(Iterable<?> entities) { List list = new ArrayList(); for (Object entity : entities) { list.add(entity); } return persist(list.toArray(EMPTY_OBJECT_ARRAY)); } public Uni<Void> persist(Object firstEntity, Object... entities) { List<Object> list = new ArrayList<>(entities.length + 1); list.add(firstEntity); Collections.addAll(list, entities); return persist(list.toArray(EMPTY_OBJECT_ARRAY)); } public Uni<Void> persist(Stream<?> entities) { return persist(entities.toArray()); } public Uni<Void> persist(Object... entities) { Map<String, List<Object>> sessions = Arrays.stream(entities) .collect(Collectors.groupingBy(e -> entityToPersistenceUnit.get(e.getClass().getName()))); List<Uni<Void>> results = new ArrayList<>(); for (Entry<String, List<Object>> entry : sessions.entrySet()) { results.add(getSession(entry.getKey()).chain(session -> session.persistAll(entry.getValue().toArray()))); } return Uni.combine().all().unis(results).discardItems(); } public Uni<Void> delete(Object entity) { return getSession(entity.getClass()).chain(session -> session.remove(entity)); } public boolean isPersistent(Object entity) { Session currentSession = getCurrentSession(entity.getClass()); if (currentSession == null) { // No active session so object is surely non-persistent return false; } return currentSession.contains(entity); } public Session getCurrentSession(Class<?> entityClass) { String persistenceUnitName = entityToPersistenceUnit.get(entityClass.getName()); return SessionOperations.getCurrentSession(persistenceUnitName); } public Uni<Void> flush(Object entity) { return getSession(entity.getClass()).chain(Session::flush); } public int paramCount(Object[] params) { return params != null ? params.length : 0; } public int paramCount(Map<String, Object> params) { return params != null ? params.size() : 0; } // // Queries public Uni<?> findById(Class<?> entityClass, Object id) { return getSession(entityClass).chain(session -> session.find(entityClass, id)); } public Uni<?> findById(Class<?> entityClass, Object id, LockModeType lockModeType) { return getSession(entityClass) .chain(session -> session.find(entityClass, id, LockModeConverter.convertToLockMode(lockModeType))); } public PanacheQueryType find(Class<?> entityClass, String panacheQuery, Object... params) { return find(entityClass, panacheQuery, null, params); } public PanacheQueryType find(Class<?> entityClass, String panacheQuery, Sort sort, Object... params) { Uni<Mutiny.Session> session = getSession(entityClass); if (PanacheJpaUtil.isNamedQuery(panacheQuery)) { String namedQuery = panacheQuery.substring(1); if (sort != null) { throw new IllegalArgumentException( "Sort cannot be used with named query, add an \"order by\" clause to the named query \"" + namedQuery + "\" instead"); } NamedQueryUtil.checkNamedQuery(entityClass, namedQuery); return createPanacheQuery(session, panacheQuery, panacheQuery, PanacheJpaUtil.toOrderBy(sort), params); } String hqlQuery = PanacheJpaUtil.createFindQuery(entityClass, panacheQuery, paramCount(params)); return createPanacheQuery(session, hqlQuery, panacheQuery, PanacheJpaUtil.toOrderBy(sort), params); } public PanacheQueryType find(Class<?> entityClass, String panacheQuery, Map<String, Object> params) { return find(entityClass, panacheQuery, null, params); } public PanacheQueryType find(Class<?> entityClass, String panacheQuery, Sort sort, Map<String, Object> params) { Uni<Mutiny.Session> session = getSession(entityClass); if (PanacheJpaUtil.isNamedQuery(panacheQuery)) { String namedQuery = panacheQuery.substring(1); if (sort != null) { throw new IllegalArgumentException( "Sort cannot be used with named query, add an \"order by\" clause to the named query \"" + namedQuery + "\" instead"); } NamedQueryUtil.checkNamedQuery(entityClass, namedQuery); return createPanacheQuery(session, panacheQuery, panacheQuery, PanacheJpaUtil.toOrderBy(sort), params); } String hqlQuery = PanacheJpaUtil.createFindQuery(entityClass, panacheQuery, paramCount(params)); return createPanacheQuery(session, hqlQuery, panacheQuery, PanacheJpaUtil.toOrderBy(sort), params); } public PanacheQueryType find(Class<?> entityClass, String query, Parameters params) { return find(entityClass, query, null, params); } public PanacheQueryType find(Class<?> entityClass, String query, Sort sort, Parameters params) { return find(entityClass, query, sort, params.map()); } public Uni<List<?>> list(Class<?> entityClass, String query, Object... params) { return list(find(entityClass, query, params)); } public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) { return list(find(entityClass, query, sort, params)); } public Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) { return list(find(entityClass, query, params)); } public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) { return list(find(entityClass, query, sort, params)); } public Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) { return list(find(entityClass, query, params)); } public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) { return list(find(entityClass, query, sort, params)); } public PanacheQueryType findAll(Class<?> entityClass) { String query = "FROM " + PanacheJpaUtil.getEntityName(entityClass); Uni<Mutiny.Session> session = getSession(entityClass); return createPanacheQuery(session, query, null, null, null); } public PanacheQueryType findAll(Class<?> entityClass, Sort sort) { String query = "FROM " + PanacheJpaUtil.getEntityName(entityClass); Uni<Mutiny.Session> session = getSession(entityClass); return createPanacheQuery(session, query, null, PanacheJpaUtil.toOrderBy(sort), null); } public Uni<List<?>> listAll(Class<?> entityClass) { return list(findAll(entityClass)); } public Uni<List<?>> listAll(Class<?> entityClass, Sort sort) { return list(findAll(entityClass, sort)); } public Uni<Long> count(Class<?> entityClass) { return getSession(entityClass) .chain(session -> session .createSelectionQuery("FROM " + PanacheJpaUtil.getEntityName(entityClass), entityClass) .getResultCount()); } @SuppressWarnings({ "unchecked", "rawtypes" }) public Uni<Long> count(Class<?> entityClass, String panacheQuery, Object... params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return (Uni) getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName, Long.class), params).getSingleResult(); }); return getSession(entityClass).chain(session -> bindParameters( session.createSelectionQuery(PanacheJpaUtil.createQueryForCount(entityClass, panacheQuery, paramCount(params)), Object.class), params).getResultCount()) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Long> count(Class<?> entityClass, String panacheQuery, Map<String, Object> params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName, Long.class), params).getSingleResult(); }); return getSession(entityClass).chain(session -> bindParameters( session.createSelectionQuery(PanacheJpaUtil.createQueryForCount(entityClass, panacheQuery, paramCount(params)), Object.class), params).getResultCount()) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Long> count(Class<?> entityClass, String query, Parameters params) { return count(entityClass, query, params.map()); } public Uni<Boolean> exists(Class<?> entityClass) { return count(entityClass).map(c -> c > 0); } public Uni<Boolean> exists(Class<?> entityClass, String query, Object... params) { return count(entityClass, query, params).map(c -> c > 0); } public Uni<Boolean> exists(Class<?> entityClass, String query, Map<String, Object> params) { return count(entityClass, query, params).map(c -> c > 0); } public Uni<Boolean> exists(Class<?> entityClass, String query, Parameters params) { return count(entityClass, query, params).map(c -> c > 0); } public Uni<Long> deleteAll(Class<?> entityClass) { return getSession(entityClass).chain( session -> session.createMutationQuery("DELETE FROM " + PanacheJpaUtil.getEntityName(entityClass)) .executeUpdate() .map(Integer::longValue)); } public Uni<Boolean> deleteById(Class<?> entityClass, Object id) { // Impl note : we load the entity then delete it because it's the only implementation generic enough for any model, // and correct in all cases (composite key, graph of entities, ...). HQL cannot be directly used for these reasons. return findById(entityClass, id) .chain(entity -> { if (entity == null) { return Uni.createFrom().item(false); } return getSession(entityClass).chain(session -> session.remove(entity).map(v -> true)); }); } public Uni<Long> delete(Class<?> entityClass, String panacheQuery, Object... params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName), params).executeUpdate().map(Integer::longValue); }); return getSession(entityClass).chain(session -> bindParameters( session.createMutationQuery(PanacheJpaUtil.createDeleteQuery(entityClass, panacheQuery, paramCount(params))), params) .executeUpdate().map(Integer::longValue)) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Long> delete(Class<?> entityClass, String panacheQuery, Map<String, Object> params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName), params).executeUpdate().map(Integer::longValue); }); return getSession(entityClass).chain(session -> bindParameters( session.createMutationQuery(PanacheJpaUtil.createDeleteQuery(entityClass, panacheQuery, paramCount(params))), params) .executeUpdate().map(Integer::longValue)) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Long> delete(Class<?> entityClass, String query, Parameters params) { return delete(entityClass, query, params.map()); } public static IllegalStateException implementationInjectionMissing() { return new IllegalStateException( "This method is normally automatically overridden in subclasses: did you forget to annotate your entity with @Entity?"); } public Uni<Integer> executeUpdate(Class<?> entityClass, String panacheQuery, Object... params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return (Uni) getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName), params).executeUpdate(); }); String updateQuery = PanacheJpaUtil.createUpdateQuery(entityClass, panacheQuery, paramCount(params)); return executeUpdate(updateQuery, params) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Integer> executeUpdate(Class<?> entityClass, String panacheQuery, Map<String, Object> params) { if (PanacheJpaUtil.isNamedQuery(panacheQuery)) return (Uni) getSession(entityClass).chain(session -> { String namedQueryName = panacheQuery.substring(1); NamedQueryUtil.checkNamedQuery(entityClass, namedQueryName); return bindParameters(session.createNamedQuery(namedQueryName), params).executeUpdate(); }); String updateQuery = PanacheJpaUtil.createUpdateQuery(entityClass, panacheQuery, paramCount(params)); return executeUpdate(updateQuery, params) .onFailure(RuntimeException.class) .transform(x -> NamedQueryUtil.checkForNamedQueryMistake((RuntimeException) x, panacheQuery)); } public Uni<Integer> update(Class<?> entityClass, String query, Map<String, Object> params) { return executeUpdate(entityClass, query, params); } public Uni<Integer> update(Class<?> entityClass, String query, Parameters params) { return update(entityClass, query, params.map()); } public Uni<Integer> update(Class<?> entityClass, String query, Object... params) { return executeUpdate(entityClass, query, params); } // // Static helpers public Uni<Mutiny.Session> getSession() { return getSession(DEFAULT_PERSISTENCE_UNIT_NAME); } public Uni<Mutiny.Session> getSession(Class<?> clazz) { String className = clazz.getName(); String persistenceUnitName = entityToPersistenceUnit.get(className); return getSession(persistenceUnitName); } public Uni<Mutiny.Session> getSession(String persistenceUnitName) { return SessionOperations.getSession(persistenceUnitName); } public static Mutiny.Query<?> bindParameters(Mutiny.Query<?> query, Object[] params) { if (params == null || params.length == 0) return query; for (int i = 0; i < params.length; i++) { query.setParameter(i + 1, params[i]); } return query; } public static <T extends Mutiny.AbstractQuery> T bindParameters(T query, Object[] params) { if (params == null || params.length == 0) return query; for (int i = 0; i < params.length; i++) { query.setParameter(i + 1, params[i]); } return query; } public static <T extends Mutiny.AbstractQuery> T bindParameters(T query, Map<String, Object> params) { if (params == null || params.size() == 0) return query; for (Entry<String, Object> entry : params.entrySet()) { query.setParameter(entry.getKey(), entry.getValue()); } return query; } /** * Execute update on default persistence unit */ public Uni<Integer> executeUpdate(String query, Object... params) { return getSession(DEFAULT_PERSISTENCE_UNIT_NAME) .chain(session -> bindParameters(session.createMutationQuery(query), params) .executeUpdate()); } /** * Execute update on default persistence unit */ public Uni<Integer> executeUpdate(String query, Map<String, Object> params) { return getSession(DEFAULT_PERSISTENCE_UNIT_NAME) .chain(session -> bindParameters(session.createMutationQuery(query), params) .executeUpdate()); } }
AbstractJpaOperations
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/pool/TestOraclePrefetch.java
{ "start": 1071, "end": 3767 }
class ____ extends TestCase { private DruidDataSource dataSource; protected void setUp() throws Exception { dataSource = new DruidDataSource(); dataSource.setOracle(true); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setDriver(new OracleMockDriver()); dataSource.setPoolPreparedStatements(true); dataSource.setConnectionProperties("defaultRowPrefetch=50"); // dataSource.setFilters("log4j"); } protected void tearDown() throws Exception { JdbcUtils.close(dataSource); } public void test_oracle() throws Exception { String sql = "SELECT 1"; OracleConnection oracleConn; OraclePreparedStatement oracleStmt; PreparedStatementHolder stmtHolder; { Connection conn = dataSource.getConnection(); { oracleConn = conn.unwrap(OracleConnection.class); assertEquals(50, oracleConn.getDefaultRowPrefetch()); } PreparedStatement stmt = conn.prepareStatement(sql); oracleStmt = stmt.unwrap(OraclePreparedStatement.class); assertEquals(50, oracleStmt.getRowPrefetch()); assertTrue(stmt.isWrapperFor(PreparedStatementHolder.class)); stmtHolder = stmt.unwrap(PreparedStatementHolder.class); assertNotNull(stmtHolder); assertEquals(0, stmtHolder.getHitCount()); ResultSet rs = stmt.executeQuery(); rs.next(); rs.close(); stmt.close(); conn.close(); } { Connection conn = dataSource.getConnection(); { OracleConnection oracleConn2 = conn.unwrap(OracleConnection.class); assertEquals(50, oracleConn2.getDefaultRowPrefetch()); assertSame(oracleConn, oracleConn2); } PreparedStatement stmt = conn.prepareStatement(sql); { PreparedStatementHolder stmtHolder2 = stmt.unwrap(PreparedStatementHolder.class); assertSame(stmtHolder2, stmtHolder); assertEquals(1, stmtHolder.getHitCount()); } ResultSet rs = stmt.executeQuery(); rs.next(); rs.close(); stmt.close(); { OraclePreparedStatement oracleStmt2 = stmt.unwrap(OraclePreparedStatement.class); assertSame(oracleStmt, oracleStmt2); assertEquals(2, oracleStmt.getRowPrefetch()); } conn.close(); } assertEquals(1, dataSource.getCachedPreparedStatementCount()); } }
TestOraclePrefetch
java
apache__kafka
connect/mirror/src/test/java/org/apache/kafka/connect/mirror/OffsetSyncStoreTest.java
{ "start": 1344, "end": 16650 }
class ____ extends OffsetSyncStore { private boolean startCalled = false; @Override public void start(boolean initializationMustReadToEnd) { startCalled = true; super.start(initializationMustReadToEnd); } @Override void backingStoreStart() { // do not start KafkaBasedLog } // simulate OffsetSync load as from KafkaBasedLog void sync(TopicPartition topicPartition, long upstreamOffset, long downstreamOffset) { assertTrue(startCalled); // sync in tests should only be called after store.start OffsetSync offsetSync = new OffsetSync(topicPartition, upstreamOffset, downstreamOffset); byte[] key = offsetSync.recordKey(); byte[] value = offsetSync.recordValue(); ConsumerRecord<byte[], byte[]> record = new ConsumerRecord<>("test.offsets.internal", 0, 3, key, value); handleRecord(record); } } @Test public void testOffsetTranslation() { try (FakeOffsetSyncStore store = new FakeOffsetSyncStore()) { store.start(true); // Emit synced downstream offset without dead-reckoning store.sync(tp, 100, 200); assertEquals(OptionalLong.of(201), store.translateDownstream(null, tp, 150)); // Translate exact offsets store.sync(tp, 150, 251); assertEquals(OptionalLong.of(251), store.translateDownstream(null, tp, 150)); // Use old offset (5) prior to any sync -> can't translate assertEquals(OptionalLong.of(-1), store.translateDownstream(null, tp, 5)); // Downstream offsets reset store.sync(tp, 200, 10); assertEquals(OptionalLong.of(10), store.translateDownstream(null, tp, 200)); // Upstream offsets reset store.sync(tp, 20, 20); assertEquals(OptionalLong.of(20), store.translateDownstream(null, tp, 20)); } } @Test public void testNoTranslationIfStoreNotStarted() { try (FakeOffsetSyncStore store = new FakeOffsetSyncStore() { @Override void backingStoreStart() { // read a sync during startup sync(tp, 100, 200); assertEquals(OptionalLong.empty(), translateDownstream(null, tp, 0)); assertEquals(OptionalLong.empty(), translateDownstream(null, tp, 100)); assertEquals(OptionalLong.empty(), translateDownstream(null, tp, 200)); } }) { // no offsets exist and store is not started assertEquals(OptionalLong.empty(), store.translateDownstream(null, tp, 0)); assertEquals(OptionalLong.empty(), store.translateDownstream(null, tp, 100)); assertEquals(OptionalLong.empty(), store.translateDownstream(null, tp, 200)); // After the store is started all offsets are visible store.start(true); assertEquals(OptionalLong.of(-1), store.translateDownstream(null, tp, 0)); assertEquals(OptionalLong.of(200), store.translateDownstream(null, tp, 100)); assertEquals(OptionalLong.of(201), store.translateDownstream(null, tp, 200)); } } @Test public void testNoTranslationIfNoOffsetSync() { try (FakeOffsetSyncStore store = new FakeOffsetSyncStore()) { store.start(true); assertEquals(OptionalLong.empty(), store.translateDownstream(null, tp, 0)); } } @Test public void testPastOffsetTranslation() { int maxOffsetLag = 10; try (FakeOffsetSyncStore store = new FakeOffsetSyncStore() { @Override void backingStoreStart() { for (int offset = 0; offset <= 1000; offset += maxOffsetLag) { sync(tp, offset, offset); assertSparseSyncInvariant(this, tp); } } }) { store.start(true); // After starting but before seeing new offsets, only the latest startup offset can be translated assertSparseSync(store, 1000, -1); for (int offset = 1000 + maxOffsetLag; offset <= 10000; offset += maxOffsetLag) { store.sync(tp, offset, offset); assertSparseSyncInvariant(store, tp); } // After seeing new offsets, we still cannot translate earlier than the latest startup offset // Invariant D: the last sync from the initial read-to-end is still stored assertSparseSync(store, 1000, -1); // We can translate offsets between the latest startup offset and the latest offset with variable precision // Older offsets are less precise and translation ends up farther apart assertSparseSync(store, 4840, 1000); assertSparseSync(store, 6760, 4840); assertSparseSync(store, 8680, 6760); assertSparseSync(store, 9160, 8680); assertSparseSync(store, 9640, 9160); assertSparseSync(store, 9880, 9640); assertSparseSync(store, 9940, 9880); assertSparseSync(store, 9970, 9940); assertSparseSync(store, 9990, 9970); assertSparseSync(store, 10000, 9990); // Rewinding upstream offsets should clear all historical syncs store.sync(tp, 1500, 11000); assertSparseSyncInvariant(store, tp); assertEquals(OptionalLong.of(-1), store.translateDownstream(null, tp, 1499)); assertEquals(OptionalLong.of(11000), store.translateDownstream(null, tp, 1500)); assertEquals(OptionalLong.of(11001), store.translateDownstream(null, tp, 2000)); } } // this test has been written knowing the exact offsets syncs stored @Test public void testPastOffsetTranslationWithoutInitializationReadToEnd() { final int maxOffsetLag = 10; try (FakeOffsetSyncStore store = new FakeOffsetSyncStore() { @Override void backingStoreStart() { for (int offset = 0; offset <= 1000; offset += maxOffsetLag) { sync(tp, offset, offset); assertSparseSyncInvariant(this, tp); } } }) { store.start(false); // After starting but before seeing new offsets assertSparseSync(store, 480, 0); assertSparseSync(store, 720, 480); assertSparseSync(store, 1000, 990); for (int offset = 1000; offset <= 10000; offset += maxOffsetLag) { store.sync(tp, offset, offset); assertSparseSyncInvariant(store, tp); } // After seeing new offsets, 1000 was kicked out of the store, so // offsets before 3840 can only be translated to 1, only previously stored offset is 0 assertSparseSync(store, 3840, 0); assertSparseSync(store, 7680, 3840); assertSparseSync(store, 8640, 7680); assertSparseSync(store, 9120, 8640); assertSparseSync(store, 9600, 9120); assertSparseSync(store, 9840, 9600); assertSparseSync(store, 9900, 9840); assertSparseSync(store, 9960, 9900); assertSparseSync(store, 9990, 9960); assertSparseSync(store, 10000, 9990); // Rewinding upstream offsets should clear all historical syncs store.sync(tp, 1500, 11000); assertSparseSyncInvariant(store, tp); assertEquals(OptionalLong.of(-1), store.translateDownstream(null, tp, 1499)); assertEquals(OptionalLong.of(11000), store.translateDownstream(null, tp, 1500)); assertEquals(OptionalLong.of(11001), store.translateDownstream(null, tp, 2000)); } } @Test public void testConsistentlySpacedSyncs() { // Under normal operation, the incoming syncs will be regularly spaced and the store should keep a set of syncs // which provide the best translation accuracy (expires as few syncs as possible) long iterations = 100; long maxStep = Long.MAX_VALUE / iterations; // Test a variety of steps (corresponding to the offset.lag.max configuration) for (long step = 1; step < maxStep; step = (step * 2) + 1) { for (long firstOffset = 0; firstOffset < 30; firstOffset++) { long finalStep = step; // Generate a stream of consistently spaced syncs // Each new sync should be added to the cache and expire at most one other sync from the cache assertSyncSpacingHasBoundedExpirations(firstOffset, LongStream.generate(() -> finalStep).limit(iterations), 1); } } } @Test public void testRandomlySpacedSyncs() { Random random = new Random(0L); // arbitrary but deterministic seed int iterationBits = 10; long iterations = 1 << iterationBits; for (int n = 1; n < Long.SIZE - iterationBits; n++) { // A stream with at most n bits of difference between the largest and smallest steps // will expire n + 2 syncs at once in the worst case, because the sync store is laid out exponentially. long maximumDifference = 1L << n; int maximumExpirations = n + 2; assertSyncSpacingHasBoundedExpirations(0, random.longs(iterations, 0L, maximumDifference), maximumExpirations); // This holds true even if there is a larger minimum step size, such as caused by offsetLagMax long offsetLagMax = 1L << 16; assertSyncSpacingHasBoundedExpirations(0, random.longs(iterations, offsetLagMax, offsetLagMax + maximumDifference), maximumExpirations); } } @Test public void testDroppedSyncsSpacing() { Random random = new Random(0L); // arbitrary but deterministic seed long iterations = 10000; long offsetLagMax = 100; // Half of the gaps will be offsetLagMax, and half will be double that, as if one intervening sync was dropped. LongStream stream = random.doubles() .mapToLong(d -> (d < 0.5 ? 2 : 1) * offsetLagMax) .limit(iterations); // This will cause up to 2 syncs to be discarded, because a sequence of two adjacent syncs followed by a // dropped sync will set up the following situation // before [d....d,c,b,a....] // after [e......e,d,a....] // and syncs b and c are discarded to make room for e and the demoted sync d. assertSyncSpacingHasBoundedExpirations(0, stream, 2); } /** * Simulate an OffsetSyncStore receiving a sequence of offset syncs as defined by their start offset and gaps. * After processing each simulated sync, assert that the store has not expired more unique syncs than the bound. * @param firstOffset First offset to give to the sync store after starting * @param steps A finite stream of gaps between syncs with some known distribution * @param maximumExpirations The maximum number of distinct syncs allowed to be expired after a single update. */ private void assertSyncSpacingHasBoundedExpirations(long firstOffset, LongStream steps, int maximumExpirations) { try (FakeOffsetSyncStore store = new FakeOffsetSyncStore()) { store.start(true); store.sync(tp, firstOffset, firstOffset); PrimitiveIterator.OfLong iterator = steps.iterator(); long offset = firstOffset; int lastCount = 1; while (iterator.hasNext()) { offset += iterator.nextLong(); assertTrue(offset >= 0, "Test is invalid, offset overflowed"); store.sync(tp, offset, offset); // Invariant A: the latest sync is present assertEquals(offset, store.syncFor(tp, 0).upstreamOffset()); // Invariant D: the earliest sync is present assertEquals(firstOffset, store.syncFor(tp, 63).upstreamOffset()); int count = countDistinctStoredSyncs(store, tp); // We are adding one sync, so if the count didn't change, then exactly one sync expired. int expiredSyncs = lastCount - count + 1; assertTrue(expiredSyncs <= maximumExpirations, "Store expired too many syncs: " + expiredSyncs + " > " + maximumExpirations + " after receiving offset " + offset); lastCount = count; } } } private void assertSparseSync(FakeOffsetSyncStore store, long syncOffset, long previousOffset) { assertEquals(OptionalLong.of(previousOffset == -1 ? previousOffset : previousOffset + 1), store.translateDownstream(null, tp, syncOffset - 1)); assertEquals(OptionalLong.of(syncOffset), store.translateDownstream(null, tp, syncOffset)); assertEquals(OptionalLong.of(syncOffset + 1), store.translateDownstream(null, tp, syncOffset + 1)); assertEquals(OptionalLong.of(syncOffset + 1), store.translateDownstream(null, tp, syncOffset + 2)); } private int countDistinctStoredSyncs(FakeOffsetSyncStore store, TopicPartition topicPartition) { int count = 1; for (int i = 1; i < OffsetSyncStore.SYNCS_PER_PARTITION; i++) { if (store.syncFor(topicPartition, i - 1) != store.syncFor(topicPartition, i)) { count++; } } return count; } private void assertSparseSyncInvariant(FakeOffsetSyncStore store, TopicPartition topicPartition) { for (int j = 0; j < OffsetSyncStore.SYNCS_PER_PARTITION; j++) { for (int i = 0; i < j; i++) { long jUpstream = store.syncFor(topicPartition, j).upstreamOffset(); long iUpstream = store.syncFor(topicPartition, i).upstreamOffset(); if (jUpstream == iUpstream) { continue; } int exponent = Math.max(i - 2, 0); long iUpstreamLowerBound = jUpstream + (1L << exponent); if (iUpstreamLowerBound < 0) { continue; } assertTrue( iUpstream >= iUpstreamLowerBound, "Invariant C(" + i + "," + j + "): Upstream offset " + iUpstream + " at position " + i + " should be at least " + iUpstreamLowerBound + " (" + jUpstream + " + 2^" + exponent + ")" ); long iUpstreamUpperBound = jUpstream + (1L << j) - (1L << i); if (iUpstreamUpperBound < 0) continue; assertTrue( iUpstream <= iUpstreamUpperBound, "Invariant B(" + i + "," + j + "): Upstream offset " + iUpstream + " at position " + i + " should be no greater than " + iUpstreamUpperBound + " (" + jUpstream + " + 2^" + j + " - 2^" + i + ")" ); } } } }
FakeOffsetSyncStore
java
apache__camel
core/camel-main/src/generated/java/org/apache/camel/main/AzureVaultConfigurationPropertiesConfigurer.java
{ "start": 711, "end": 12380 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("AwsVaultConfiguration", org.apache.camel.vault.AwsVaultConfiguration.class); map.put("AzureIdentityEnabled", boolean.class); map.put("AzureVaultConfiguration", org.apache.camel.vault.AzureVaultConfiguration.class); map.put("BlobAccessKey", java.lang.String.class); map.put("BlobAccountName", java.lang.String.class); map.put("BlobContainerName", java.lang.String.class); map.put("ClientId", java.lang.String.class); map.put("ClientSecret", java.lang.String.class); map.put("CyberArkVaultConfiguration", org.apache.camel.vault.CyberArkVaultConfiguration.class); map.put("EventhubConnectionString", java.lang.String.class); map.put("GcpVaultConfiguration", org.apache.camel.vault.GcpVaultConfiguration.class); map.put("HashicorpVaultConfiguration", org.apache.camel.vault.HashicorpVaultConfiguration.class); map.put("IBMSecretsManagerVaultConfiguration", org.apache.camel.vault.IBMSecretsManagerVaultConfiguration.class); map.put("KubernetesConfigMapVaultConfiguration", org.apache.camel.vault.KubernetesConfigMapVaultConfiguration.class); map.put("KubernetesVaultConfiguration", org.apache.camel.vault.KubernetesVaultConfiguration.class); map.put("RefreshEnabled", boolean.class); map.put("RefreshPeriod", long.class); map.put("Secrets", java.lang.String.class); map.put("SpringCloudConfigConfiguration", org.apache.camel.vault.SpringCloudConfigConfiguration.class); map.put("TenantId", java.lang.String.class); map.put("VaultName", java.lang.String.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.main.AzureVaultConfigurationProperties target = (org.apache.camel.main.AzureVaultConfigurationProperties) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "awsvaultconfiguration": case "awsVaultConfiguration": target.setAwsVaultConfiguration(property(camelContext, org.apache.camel.vault.AwsVaultConfiguration.class, value)); return true; case "azureidentityenabled": case "azureIdentityEnabled": target.setAzureIdentityEnabled(property(camelContext, boolean.class, value)); return true; case "azurevaultconfiguration": case "azureVaultConfiguration": target.setAzureVaultConfiguration(property(camelContext, org.apache.camel.vault.AzureVaultConfiguration.class, value)); return true; case "blobaccesskey": case "blobAccessKey": target.setBlobAccessKey(property(camelContext, java.lang.String.class, value)); return true; case "blobaccountname": case "blobAccountName": target.setBlobAccountName(property(camelContext, java.lang.String.class, value)); return true; case "blobcontainername": case "blobContainerName": target.setBlobContainerName(property(camelContext, java.lang.String.class, value)); return true; case "clientid": case "clientId": target.setClientId(property(camelContext, java.lang.String.class, value)); return true; case "clientsecret": case "clientSecret": target.setClientSecret(property(camelContext, java.lang.String.class, value)); return true; case "cyberarkvaultconfiguration": case "cyberArkVaultConfiguration": target.setCyberArkVaultConfiguration(property(camelContext, org.apache.camel.vault.CyberArkVaultConfiguration.class, value)); return true; case "eventhubconnectionstring": case "eventhubConnectionString": target.setEventhubConnectionString(property(camelContext, java.lang.String.class, value)); return true; case "gcpvaultconfiguration": case "gcpVaultConfiguration": target.setGcpVaultConfiguration(property(camelContext, org.apache.camel.vault.GcpVaultConfiguration.class, value)); return true; case "hashicorpvaultconfiguration": case "hashicorpVaultConfiguration": target.setHashicorpVaultConfiguration(property(camelContext, org.apache.camel.vault.HashicorpVaultConfiguration.class, value)); return true; case "ibmsecretsmanagervaultconfiguration": case "iBMSecretsManagerVaultConfiguration": target.setIBMSecretsManagerVaultConfiguration(property(camelContext, org.apache.camel.vault.IBMSecretsManagerVaultConfiguration.class, value)); return true; case "kubernetesconfigmapvaultconfiguration": case "kubernetesConfigMapVaultConfiguration": target.setKubernetesConfigMapVaultConfiguration(property(camelContext, org.apache.camel.vault.KubernetesConfigMapVaultConfiguration.class, value)); return true; case "kubernetesvaultconfiguration": case "kubernetesVaultConfiguration": target.setKubernetesVaultConfiguration(property(camelContext, org.apache.camel.vault.KubernetesVaultConfiguration.class, value)); return true; case "refreshenabled": case "refreshEnabled": target.setRefreshEnabled(property(camelContext, boolean.class, value)); return true; case "refreshperiod": case "refreshPeriod": target.setRefreshPeriod(property(camelContext, long.class, value)); return true; case "secrets": target.setSecrets(property(camelContext, java.lang.String.class, value)); return true; case "springcloudconfigconfiguration": case "springCloudConfigConfiguration": target.setSpringCloudConfigConfiguration(property(camelContext, org.apache.camel.vault.SpringCloudConfigConfiguration.class, value)); return true; case "tenantid": case "tenantId": target.setTenantId(property(camelContext, java.lang.String.class, value)); return true; case "vaultname": case "vaultName": target.setVaultName(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "awsvaultconfiguration": case "awsVaultConfiguration": return org.apache.camel.vault.AwsVaultConfiguration.class; case "azureidentityenabled": case "azureIdentityEnabled": return boolean.class; case "azurevaultconfiguration": case "azureVaultConfiguration": return org.apache.camel.vault.AzureVaultConfiguration.class; case "blobaccesskey": case "blobAccessKey": return java.lang.String.class; case "blobaccountname": case "blobAccountName": return java.lang.String.class; case "blobcontainername": case "blobContainerName": return java.lang.String.class; case "clientid": case "clientId": return java.lang.String.class; case "clientsecret": case "clientSecret": return java.lang.String.class; case "cyberarkvaultconfiguration": case "cyberArkVaultConfiguration": return org.apache.camel.vault.CyberArkVaultConfiguration.class; case "eventhubconnectionstring": case "eventhubConnectionString": return java.lang.String.class; case "gcpvaultconfiguration": case "gcpVaultConfiguration": return org.apache.camel.vault.GcpVaultConfiguration.class; case "hashicorpvaultconfiguration": case "hashicorpVaultConfiguration": return org.apache.camel.vault.HashicorpVaultConfiguration.class; case "ibmsecretsmanagervaultconfiguration": case "iBMSecretsManagerVaultConfiguration": return org.apache.camel.vault.IBMSecretsManagerVaultConfiguration.class; case "kubernetesconfigmapvaultconfiguration": case "kubernetesConfigMapVaultConfiguration": return org.apache.camel.vault.KubernetesConfigMapVaultConfiguration.class; case "kubernetesvaultconfiguration": case "kubernetesVaultConfiguration": return org.apache.camel.vault.KubernetesVaultConfiguration.class; case "refreshenabled": case "refreshEnabled": return boolean.class; case "refreshperiod": case "refreshPeriod": return long.class; case "secrets": return java.lang.String.class; case "springcloudconfigconfiguration": case "springCloudConfigConfiguration": return org.apache.camel.vault.SpringCloudConfigConfiguration.class; case "tenantid": case "tenantId": return java.lang.String.class; case "vaultname": case "vaultName": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.main.AzureVaultConfigurationProperties target = (org.apache.camel.main.AzureVaultConfigurationProperties) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "awsvaultconfiguration": case "awsVaultConfiguration": return target.getAwsVaultConfiguration(); case "azureidentityenabled": case "azureIdentityEnabled": return target.isAzureIdentityEnabled(); case "azurevaultconfiguration": case "azureVaultConfiguration": return target.getAzureVaultConfiguration(); case "blobaccesskey": case "blobAccessKey": return target.getBlobAccessKey(); case "blobaccountname": case "blobAccountName": return target.getBlobAccountName(); case "blobcontainername": case "blobContainerName": return target.getBlobContainerName(); case "clientid": case "clientId": return target.getClientId(); case "clientsecret": case "clientSecret": return target.getClientSecret(); case "cyberarkvaultconfiguration": case "cyberArkVaultConfiguration": return target.getCyberArkVaultConfiguration(); case "eventhubconnectionstring": case "eventhubConnectionString": return target.getEventhubConnectionString(); case "gcpvaultconfiguration": case "gcpVaultConfiguration": return target.getGcpVaultConfiguration(); case "hashicorpvaultconfiguration": case "hashicorpVaultConfiguration": return target.getHashicorpVaultConfiguration(); case "ibmsecretsmanagervaultconfiguration": case "iBMSecretsManagerVaultConfiguration": return target.getIBMSecretsManagerVaultConfiguration(); case "kubernetesconfigmapvaultconfiguration": case "kubernetesConfigMapVaultConfiguration": return target.getKubernetesConfigMapVaultConfiguration(); case "kubernetesvaultconfiguration": case "kubernetesVaultConfiguration": return target.getKubernetesVaultConfiguration(); case "refreshenabled": case "refreshEnabled": return target.isRefreshEnabled(); case "refreshperiod": case "refreshPeriod": return target.getRefreshPeriod(); case "secrets": return target.getSecrets(); case "springcloudconfigconfiguration": case "springCloudConfigConfiguration": return target.getSpringCloudConfigConfiguration(); case "tenantid": case "tenantId": return target.getTenantId(); case "vaultname": case "vaultName": return target.getVaultName(); default: return null; } } }
AzureVaultConfigurationPropertiesConfigurer
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/operators/drivers/AllReduceDriverTest.java
{ "start": 1913, "end": 12498 }
class ____ { @Test void testAllReduceDriverImmutableEmpty() { try { TestTaskContext<ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext< ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.ALL_REDUCE); context.setInput1(input, typeInfo.createSerializer(new SerializerConfigImpl())); context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>()); AllReduceDriver<Tuple2<String, Integer>> driver = new AllReduceDriver<Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail(e.getMessage()); } } @Test void testAllReduceDriverImmutable() { try { { TestTaskContext<ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext< ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>( data.iterator(), typeInfo.createSerializer(new SerializerConfigImpl())); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String, Integer>>( typeInfo.createSerializer(new SerializerConfigImpl())); context.setDriverStrategy(DriverStrategy.ALL_REDUCE); context.setInput1(input, typeInfo.createSerializer(new SerializerConfigImpl())); context.setCollector(result); context.setUdf(new ConcatSumFirstReducer()); AllReduceDriver<Tuple2<String, Integer>> driver = new AllReduceDriver<Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<String, Integer> res = result.getList().get(0); char[] foundString = res.f0.toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); assertThat(foundString).isEqualTo(expectedString); assertThat(res.f1).isEqualTo(78); } { TestTaskContext<ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext< ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>( data.iterator(), typeInfo.createSerializer(new SerializerConfigImpl())); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String, Integer>>( typeInfo.createSerializer(new SerializerConfigImpl())); context.setDriverStrategy(DriverStrategy.ALL_REDUCE); context.setInput1(input, typeInfo.createSerializer(new SerializerConfigImpl())); context.setCollector(result); context.setUdf(new ConcatSumSecondReducer()); AllReduceDriver<Tuple2<String, Integer>> driver = new AllReduceDriver<Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<String, Integer> res = result.getList().get(0); char[] foundString = res.f0.toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); assertThat(foundString).isEqualTo(expectedString); assertThat(res.f1).isEqualTo(78); } } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail(e.getMessage()); } } @Test void testAllReduceDriverMutable() { try { { TestTaskContext< ReduceFunction<Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext< ReduceFunction<Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TypeInformation<Tuple2<StringValue, IntValue>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>( data.iterator(), typeInfo.createSerializer(new SerializerConfigImpl())); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>( typeInfo.createSerializer(new SerializerConfigImpl())); context.setDriverStrategy(DriverStrategy.ALL_REDUCE); context.setInput1(input, typeInfo.createSerializer(new SerializerConfigImpl())); context.setCollector(result); context.setUdf(new ConcatSumFirstMutableReducer()); AllReduceDriver<Tuple2<StringValue, IntValue>> driver = new AllReduceDriver<Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<StringValue, IntValue> res = result.getList().get(0); char[] foundString = res.f0.getValue().toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); assertThat(foundString).isEqualTo(expectedString); assertThat(res.f1.getValue()).isEqualTo(78); } { TestTaskContext< ReduceFunction<Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext< ReduceFunction<Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TypeInformation<Tuple2<StringValue, IntValue>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>( data.iterator(), typeInfo.createSerializer(new SerializerConfigImpl())); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>( typeInfo.createSerializer(new SerializerConfigImpl())); context.setDriverStrategy(DriverStrategy.ALL_REDUCE); context.setInput1(input, typeInfo.createSerializer(new SerializerConfigImpl())); context.setCollector(result); context.setUdf(new ConcatSumSecondMutableReducer()); AllReduceDriver<Tuple2<StringValue, IntValue>> driver = new AllReduceDriver<Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<StringValue, IntValue> res = result.getList().get(0); char[] foundString = res.f0.getValue().toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); assertThat(foundString).isEqualTo(expectedString); assertThat(res.f1.getValue()).isEqualTo(78); } } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail(e.getMessage()); } } // -------------------------------------------------------------------------------------------- // Test UDFs // -------------------------------------------------------------------------------------------- private static final
AllReduceDriverTest
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/authzpolicy/AbstractAuthorizationPolicyTest.java
{ "start": 6959, "end": 9981 }
class ____ @AuthorizationPolicy(policy = "permit-user") // method has no annotation, therefore expect to permit only the user RestAssured.given().auth().preemptive().basic("admin", "admin") .get("/authorization-policy-class-roles-allowed-method/no-roles-allowed") .then().statusCode(403); RestAssured.given().auth().preemptive().basic("user", "user") .get("/authorization-policy-class-roles-allowed-method/no-roles-allowed") .then().statusCode(200).body(Matchers.equalTo("user")); } @Test public void testCombinationOfAuthzPolicyAndPathConfigPolicies() { // ViewerAugmentingPolicy adds 'admin' role to the viewer // here we test that both @AuthorizationPolicy and path-matching policies work together // viewer role is required by (JAX-RS) path-matching HTTP policies, RestAssured.given().auth().preemptive().basic("admin", "admin") .get("/authz-policy-and-path-matching-policies/jax-rs-path-matching-http-perm") .then().statusCode(200).body(Matchers.equalTo("true")); RestAssured.given().auth().preemptive().basic("viewer", "viewer") .get("/authz-policy-and-path-matching-policies/jax-rs-path-matching-http-perm") .then().statusCode(200).body(Matchers.equalTo("true")); RestAssured.given().auth().preemptive().basic("user", "user") .get("/authz-policy-and-path-matching-policies/jax-rs-path-matching-http-perm") .then().statusCode(403); RestAssured.given().auth().preemptive().basic("admin", "admin") .get("/authz-policy-and-path-matching-policies/path-matching-http-perm") .then().statusCode(200).body(Matchers.equalTo("true")); RestAssured.given().auth().preemptive().basic("viewer", "viewer") .get("/authz-policy-and-path-matching-policies/path-matching-http-perm") .then().statusCode(200).body(Matchers.equalTo("true")); RestAssured.given().auth().preemptive().basic("user", "user") .get("/authz-policy-and-path-matching-policies/path-matching-http-perm") .then().statusCode(403); // endpoint is annotated with @RolesAllowed("admin"), therefore class-level @AuthorizationPolicy is not applied RestAssured.given().auth().preemptive().basic("admin", "admin") .get("/authz-policy-and-path-matching-policies/roles-allowed-annotation") .then().statusCode(200).body(Matchers.equalTo("admin")); RestAssured.given().auth().preemptive().basic("viewer", "viewer") .get("/authz-policy-and-path-matching-policies/roles-allowed-annotation") .then().statusCode(403); RestAssured.given().auth().preemptive().basic("user", "user") .get("/authz-policy-and-path-matching-policies/roles-allowed-annotation") .then().statusCode(403); } }
with
java
elastic__elasticsearch
libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java
{ "start": 654, "end": 5967 }
class ____ extends ESTestCase { public void testParserSyntaxFailures() { PolicyParserException ppe = expectThrows( PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream("[]".getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false) .parsePolicy() ); assertEquals("[1:1] policy parsing error for [test-failure-policy.yaml]: expected object <scope name>", ppe.getMessage()); } public void testEntitlementDoesNotExist() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - does_not_exist: {} """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + "unknown entitlement type [does_not_exist]", ppe.getMessage() ); } public void testEntitlementMissingParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - files: - path: test-path """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [files]: files entitlement must contain 'mode' for every listed file", ppe.getMessage() ); } public void testEntitlementMissingDependentParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - files: - relative_path: test-path mode: read """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [files]: files entitlement with a 'relative_path' must specify 'relative_to'", ppe.getMessage() ); } public void testEntitlementMutuallyExclusiveParameters() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - files: - relative_path: test-path path: test-path mode: read """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [files]: a files entitlement entry must contain one of " + "[path, relative_path, path_setting]", ppe.getMessage() ); } public void testEntitlementAtLeastOneParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - files: - mode: read """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [files]: a files entitlement entry must contain one of " + "[path, relative_path, path_setting]", ppe.getMessage() ); } public void testEntitlementExtraneousParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - files: - path: test-path mode: read extra: test """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [files]: unknown key(s) [{extra=test}] in a listed file for files entitlement", ppe.getMessage() ); } public void testEntitlementIsNotForExternalPlugins() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - create_class_loader """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", true).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml]: entitlement type [create_class_loader] is allowed only on modules", ppe.getMessage() ); } }
PolicyParserFailureTests
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/FrameworkExtensionTests.java
{ "start": 2155, "end": 3021 }
class ____ { private final WebTestClient client = MockMvcWebTestClient.bindToController(new SampleController()) .apply(defaultSetup()) .build(); @Test public void fooHeader() { this.client.mutateWith(headers().foo("a=b")) .get().uri("/") .exchange() .expectBody(String.class).isEqualTo("Foo"); } @Test public void barHeader() { this.client.mutateWith(headers().bar("a=b")) .get().uri("/") .exchange() .expectBody(String.class).isEqualTo("Bar"); } private static TestMockMvcConfigurer defaultSetup() { return new TestMockMvcConfigurer(); } private static TestWebTestClientConfigurer headers() { return new TestWebTestClientConfigurer(); } /** * Test WebTestClientConfigurer that re-creates the MockMvcHttpConnector * with a {@code TestRequestPostProcessor}. */ private static
FrameworkExtensionTests
java
playframework__playframework
documentation/manual/working/commonGuide/build/code/javaguide/common/build/controllers/HomeController.java
{ "start": 303, "end": 401 }
class ____ extends Controller { public Result index() { return ok("admin"); } }
HomeController
java
quarkusio__quarkus
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onbinarymessage/SingleDtoReceived_SingleDtoResponse_Endpoint.java
{ "start": 244, "end": 417 }
class ____ { @OnBinaryMessage public Dto onMessage(Dto dto) { return new Dto("echo 0: " + dto.property()); } }
SingleDtoReceived_SingleDtoResponse_Endpoint
java
alibaba__nacos
common/src/main/java/com/alibaba/nacos/common/remote/exception/ConnectionBusyException.java
{ "start": 830, "end": 1154 }
class ____ extends RemoteException { private static final int CONNECTION_BUSY = 601; public ConnectionBusyException(String msg) { super(CONNECTION_BUSY, msg); } public ConnectionBusyException(Throwable throwable) { super(CONNECTION_BUSY, throwable); } }
ConnectionBusyException
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java
{ "start": 769, "end": 1112 }
enum ____ used to determine how to deal with invalid geo coordinates in geo related * queries: * * On STRICT validation invalid coordinates cause an exception to be thrown. * On IGNORE_MALFORMED invalid coordinates are being accepted. * On COERCE invalid coordinates are being corrected to the most likely valid coordinate. * */ public
is
java
apache__kafka
clients/src/test/java/org/apache/kafka/common/config/AbstractConfigTest.java
{ "start": 18058, "end": 19069 }
class ____ where some classes are not visible to thread context classloader Thread.currentThread().setContextClassLoader(restrictedClassLoader); // Properties specified as classes should succeed testConfig = new ClassTestConfig(ClassTestConfig.RESTRICTED_CLASS, Collections.singletonList(ClassTestConfig.RESTRICTED_CLASS)); testConfig.checkInstances(ClassTestConfig.RESTRICTED_CLASS, ClassTestConfig.RESTRICTED_CLASS); testConfig = new ClassTestConfig(ClassTestConfig.RESTRICTED_CLASS, Arrays.asList(ClassTestConfig.VISIBLE_CLASS, ClassTestConfig.RESTRICTED_CLASS)); testConfig.checkInstances(ClassTestConfig.RESTRICTED_CLASS, ClassTestConfig.VISIBLE_CLASS, ClassTestConfig.RESTRICTED_CLASS); // Properties specified as classNames should fail to load classes assertThrows(ConfigException.class, () -> new ClassTestConfig(ClassTestConfig.RESTRICTED_CLASS.getName(), null), "Config created with
overrides
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/FieldCanBeStaticTest.java
{ "start": 1398, "end": 1725 }
class ____ { private final Duration myDuration = Duration.ofMillis(1); public Duration d() { return this.myDuration; } } """) .addOutputLines( "Test.java", """ import java.time.Duration;
Test
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/time/JodaConstructors.java
{ "start": 2157, "end": 5251 }
class ____ extends BugChecker implements NewClassTreeMatcher { private static final Matcher<ExpressionTree> SELF_USAGE = packageStartsWith("org.joda.time"); private static final Matcher<ExpressionTree> DURATION_CTOR = constructor().forClass("org.joda.time.Duration").withParameters("long"); private static final Matcher<ExpressionTree> INSTANT_CTOR_NO_ARG = constructor().forClass("org.joda.time.Instant").withNoParameters(); private static final Matcher<ExpressionTree> INSTANT_CTOR_LONG_ARG = constructor().forClass("org.joda.time.Instant").withParameters("long"); private static final Matcher<ExpressionTree> DATE_TIME_CTOR_NO_ARG = constructor().forClass("org.joda.time.DateTime").withNoParameters(); private static final Matcher<ExpressionTree> DATE_TIME_CTORS_ONE_ARG = anyOf( constructor() .forClass("org.joda.time.DateTime") .withParameters("org.joda.time.Chronology"), constructor() .forClass("org.joda.time.DateTime") .withParameters("org.joda.time.DateTimeZone")); @Override public Description matchNewClass(NewClassTree tree, VisitorState state) { // Allow usage by JodaTime itself if (SELF_USAGE.matches(tree, state)) { return Description.NO_MATCH; } // ban new Duration(long) if (DURATION_CTOR.matches(tree, state)) { SuggestedFix fix = SuggestedFix.replace( getStartPosition(tree), getStartPosition(getOnlyElement(tree.getArguments())), state.getSourceForNode(tree.getIdentifier()) + ".millis("); return describeMatch(tree, fix); } // ban new Instant() if (INSTANT_CTOR_NO_ARG.matches(tree, state)) { SuggestedFix fix = SuggestedFix.replace(tree, getIdentifierSource(tree, state) + ".now()"); return describeMatch(tree, fix); } // ban new Instant(long) if (INSTANT_CTOR_LONG_ARG.matches(tree, state)) { SuggestedFix fix = SuggestedFix.replace( getStartPosition(tree), getStartPosition(getOnlyElement(tree.getArguments())), getIdentifierSource(tree, state) + ".ofEpochMilli("); return describeMatch(tree, fix); } // ban new DateTime() if (DATE_TIME_CTOR_NO_ARG.matches(tree, state)) { SuggestedFix fix = SuggestedFix.replace(tree, getIdentifierSource(tree, state) + ".now()"); return describeMatch(tree, fix); } // ban new DateTime(DateTimeZone) and new DateTime(Chronology) if (DATE_TIME_CTORS_ONE_ARG.matches(tree, state)) { SuggestedFix fix = SuggestedFix.replace( getStartPosition(tree), getStartPosition(getOnlyElement(tree.getArguments())), getIdentifierSource(tree, state) + ".now("); return describeMatch(tree, fix); } // otherwise, no match return Description.NO_MATCH; } private static String getIdentifierSource(NewClassTree tree, VisitorState state) { return state.getSourceForNode(tree.getIdentifier()); } }
JodaConstructors
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestViewDistributedFileSystemWithMountLinks.java
{ "start": 1751, "end": 6306 }
class ____ extends TestViewFileSystemOverloadSchemeWithHdfsScheme { @BeforeEach @Override public void setUp() throws IOException { super.setUp(); Configuration conf = getConf(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 1); conf.set("fs.hdfs.impl", ViewDistributedFileSystem.class.getName()); conf.setBoolean(CONFIG_VIEWFS_IGNORE_PORT_IN_MOUNT_TABLE_NAME, CONFIG_VIEWFS_IGNORE_PORT_IN_MOUNT_TABLE_NAME_DEFAULT); URI defaultFSURI = URI.create(conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY)); ConfigUtil.addLinkFallback(conf, defaultFSURI.getAuthority(), new Path(defaultFSURI.toString() + "/").toUri()); setConf(conf); } @Test @Timeout(value = 30) public void testCreateOnRoot() throws Exception { testCreateOnRoot(true); } @Test @Timeout(value = 30) public void testMountLinkWithNonExistentLink() throws Exception { testMountLinkWithNonExistentLink(false); } @Test public void testRenameOnInternalDirWithFallback() throws Exception { Configuration conf = getConf(); URI defaultFSURI = URI.create(conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY)); final Path hdfsTargetPath1 = new Path(defaultFSURI + "/HDFSUser"); final Path hdfsTargetPath2 = new Path(defaultFSURI + "/NewHDFSUser/next"); ViewFsTestSetup.addMountLinksToConf(defaultFSURI.getAuthority(), new String[] {"/HDFSUser", "/NewHDFSUser/next"}, new String[] {hdfsTargetPath1.toUri().toString(), hdfsTargetPath2.toUri().toString()}, conf); //Making sure parent dir structure as mount points available in fallback. try (DistributedFileSystem dfs = new DistributedFileSystem()) { dfs.initialize(defaultFSURI, conf); dfs.mkdirs(hdfsTargetPath1); dfs.mkdirs(hdfsTargetPath2); } try (FileSystem fs = FileSystem.get(conf)) { Path src = new Path("/newFileOnRoot"); Path dst = new Path("/newFileOnRoot1"); fs.create(src).close(); verifyRename(fs, src, dst); src = new Path("/newFileOnRoot1"); dst = new Path("/NewHDFSUser/newFileOnRoot"); fs.mkdirs(dst.getParent()); verifyRename(fs, src, dst); src = new Path("/NewHDFSUser/newFileOnRoot"); dst = new Path("/NewHDFSUser/newFileOnRoot1"); verifyRename(fs, src, dst); src = new Path("/NewHDFSUser/newFileOnRoot1"); dst = new Path("/newFileOnRoot"); verifyRename(fs, src, dst); src = new Path("/HDFSUser/newFileOnRoot1"); dst = new Path("/HDFSUser/newFileOnRoot"); fs.create(src).close(); verifyRename(fs, src, dst); } } @Test public void testRenameWhenDstOnInternalDirWithFallback() throws Exception { Configuration conf = getConf(); URI defaultFSURI = URI.create(conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY)); final Path hdfsTargetPath1 = new Path(defaultFSURI + "/HDFSUser"); final Path hdfsTargetPath2 = new Path(defaultFSURI + "/dstNewHDFSUser" + "/next"); ViewFsTestSetup.addMountLinksToConf(defaultFSURI.getAuthority(), new String[] {"/InternalDirDoesNotExistInFallback/test", "/NewHDFSUser/next/next1"}, new String[] {hdfsTargetPath1.toUri().toString(), hdfsTargetPath2.toUri().toString()}, conf); try (DistributedFileSystem dfs = new DistributedFileSystem()) { dfs.initialize(defaultFSURI, conf); dfs.mkdirs(hdfsTargetPath1); dfs.mkdirs(hdfsTargetPath2); dfs.mkdirs(new Path("/NewHDFSUser/next/next1")); } try (FileSystem fs = FileSystem.get(conf)) { Path src = new Path("/newFileOnRoot"); Path dst = new Path("/NewHDFSUser/next"); fs.create(src).close(); verifyRename(fs, src, dst); src = new Path("/newFileOnRoot"); dst = new Path("/NewHDFSUser/next/file"); fs.create(src).close(); verifyRename(fs, src, dst); src = new Path("/newFileOnRoot"); dst = new Path("/InternalDirDoesNotExistInFallback/file"); fs.create(src).close(); // If fallback does not have same structure as internal, rename will fail. assertFalse(fs.rename(src, dst)); } } private void verifyRename(FileSystem fs, Path src, Path dst) throws IOException { fs.rename(src, dst); assertFalse(fs.exists(src)); assertTrue(fs.exists(dst)); } }
TestViewDistributedFileSystemWithMountLinks
java
quarkusio__quarkus
extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonFieldNamePropertyNameStrategyTest.java
{ "start": 365, "end": 930 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar.addClass(Pojo.class)) .withConfigurationResource("application-field-name-property-name-strategy.properties"); @Inject ObjectMapper objectMapper; @Test public void test() throws JsonProcessingException { Assertions.assertThat(objectMapper.writeValueAsString(new Pojo("test"))).isEqualTo("{\"test-property\":\"test\"}"); } public static
JacksonFieldNamePropertyNameStrategyTest
java
apache__flink
flink-clients/src/test/java/org/apache/flink/client/program/rest/RestClusterClientTest.java
{ "start": 18679, "end": 19456 }
class ____ extends TestHandler< SavepointDisposalRequest, TriggerResponse, EmptyMessageParameters> { private TestSavepointDisposalTriggerHandler() { super(SavepointDisposalTriggerHeaders.getInstance()); } @Override protected CompletableFuture<TriggerResponse> handleRequest( @Nonnull HandlerRequest<SavepointDisposalRequest> request, @Nonnull DispatcherGateway gateway) { assertThat(request.getRequestBody().getSavepointPath()).isEqualTo(savepointPath); return CompletableFuture.completedFuture(new TriggerResponse(triggerId)); } } private
TestSavepointDisposalTriggerHandler
java
apache__kafka
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/KafkaStreamsTelemetryIntegrationTest.java
{ "start": 30471, "end": 32034 }
class ____ implements KafkaClientSupplier { @Override public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) { return new KafkaProducer<>(config, new ByteArraySerializer(), new ByteArraySerializer()); } @Override public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) { final TestingMetricsInterceptingConsumer<byte[], byte[]> consumer = new TestingMetricsInterceptingConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()); INTERCEPTING_CONSUMERS.add(consumer); return consumer; } @Override public Consumer<byte[], byte[]> getRestoreConsumer(final Map<String, Object> config) { return new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()); } @Override public Consumer<byte[], byte[]> getGlobalConsumer(final Map<String, Object> config) { return new TestingMetricsInterceptingConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()); } @Override public Admin getAdmin(final Map<String, Object> config) { assertTrue((Boolean) config.get(AdminClientConfig.ENABLE_METRICS_PUSH_CONFIG)); final TestingMetricsInterceptingAdminClient adminClient = new TestingMetricsInterceptingAdminClient(config); INTERCEPTING_ADMIN_CLIENTS.add(adminClient); return adminClient; } } public static
TestClientSupplier
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAmbiguousLeafs.java
{ "start": 1372, "end": 5787 }
class ____ { /** * Internal counter for incremental application id generation */ int appId = 0; private static final QueuePath ROOT = new QueuePath(CapacitySchedulerConfiguration.ROOT); private static final QueuePath DEFAULT = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".default"); private static final QueuePath A = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".a"); private static final QueuePath B = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".b"); private static final QueuePath A_UNIQUE = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".a.unique"); private static final QueuePath A_AMBI = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".a.ambi"); private static final QueuePath B_AMBI = new QueuePath(CapacitySchedulerConfiguration.ROOT + ".b.ambi"); /** * Helper method to submit applications via RMClientService, to make sure * all submissions go through RMAppManager. * @param rm The resource manager instance * @param queue Name of the queue to submit the application to * @return ApplicationID of the submitted application * @throws IOException * @throws YarnException */ private ApplicationId submitApplication(MockRM rm, String queue) throws IOException, YarnException { //Generating incremental application id final ApplicationAttemptId appAttemptId = TestUtils .getMockApplicationAttemptId(appId++, 1); Resource resource = Resources.createResource(1024); ContainerLaunchContext amContainerSpec = ContainerLaunchContext .newInstance(null, null, null, null, null, null); ApplicationSubmissionContext asc = ApplicationSubmissionContext .newInstance(appAttemptId.getApplicationId(), "Test application", queue, null, amContainerSpec, false, true, 1, resource, "applicationType"); SubmitApplicationRequest req = SubmitApplicationRequest.newInstance(asc); rm.getClientRMService().submitApplication(req); return appAttemptId.getApplicationId(); } @Test public void testAmbiguousSubmissionWithACL() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.set(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class.getName()); conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); MockRM rm = new MockRM(conf); CapacityScheduler cs = (CapacityScheduler)rm.getResourceScheduler(); CapacitySchedulerConfiguration schedulerConf = cs.getConfiguration(); schedulerConf.setQueues(ROOT, new String[] {"a", "b", "default"}); schedulerConf.setAcl(ROOT, QueueACL.SUBMIT_APPLICATIONS, " "); schedulerConf.setAcl(ROOT, QueueACL.ADMINISTER_QUEUE, "forbidden forbidden"); schedulerConf.setQueues(A, new String[] {"unique", "ambi"}); schedulerConf.setAcl(A, QueueACL.SUBMIT_APPLICATIONS, "forbidden forbidden"); schedulerConf.setCapacity(A, 45); schedulerConf.setQueues(B, new String[] {"ambi"}); schedulerConf.setCapacity(B, 45); schedulerConf.setCapacity(DEFAULT, 10); schedulerConf.setCapacity(A_UNIQUE, 50); schedulerConf.setAcl(A_UNIQUE, QueueACL.SUBMIT_APPLICATIONS, "* *"); schedulerConf.setCapacity(A_AMBI, 50); schedulerConf.setAcl(A_AMBI, QueueACL.SUBMIT_APPLICATIONS, "* *"); schedulerConf.setCapacity(B_AMBI, 100); schedulerConf.set(CapacitySchedulerConfiguration.MAPPING_RULE_FORMAT, "json"); //Simple %specified mapping rule for all submissions with skip fallback //The %specified needed rule to make sure we get an //ApplicationPlacementContext which is required for validating YARN-10787 schedulerConf.set(CapacitySchedulerConfiguration.MAPPING_RULE_JSON, "{\"rules\" : [{\"type\": \"user\", \"policy\" : \"specified\", " + "\"fallbackResult\" : \"skip\", \"matches\" : \"*\"}]}"); schedulerConf.setOverrideWithQueueMappings(true); rm.start(); cs.reinitialize(schedulerConf, rm.getRMContext()); ApplicationId id = submitApplication(rm, "root.a.unique"); rm.waitForState(id, RMAppState.ACCEPTED); id = submitApplication(rm, "unique"); rm.waitForState(id, RMAppState.ACCEPTED); id = submitApplication(rm, "ambi"); rm.waitForState(id, RMAppState.FAILED); id = submitApplication(rm, "root.a.ambi"); rm.waitForState(id, RMAppState.ACCEPTED); rm.stop(); } }
TestCapacitySchedulerAmbiguousLeafs
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ComparingThisWithNullTest.java
{ "start": 1164, "end": 1557 }
class ____ { void f() { // BUG: Diagnostic contains: ComparingThisWithNull if (this == null) { String x = "Test"; } } } """) .doTest(); } @Test public void nullIsThis() { helper .addSourceLines( "Test.java", """
Test
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/audit/listener/AuditListenerTests.java
{ "start": 1033, "end": 1416 }
class ____ { @Test void testStoredEvents() { AuditEventRepository repository = mock(AuditEventRepository.class); AuditEvent event = new AuditEvent("principal", "type", Collections.emptyMap()); AuditListener listener = new AuditListener(repository); listener.onApplicationEvent(new AuditApplicationEvent(event)); then(repository).should().add(event); } }
AuditListenerTests
java
elastic__elasticsearch
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java
{ "start": 13956, "end": 14101 }
interface ____ { void writeResponseChunk(RepositoryVerifyIntegrityResponseChunk responseChunk, ActionListener<Void> listener); } }
Writer
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/HandlerRequestException.java
{ "start": 977, "end": 1381 }
class ____ extends FlinkException { private static final long serialVersionUID = 7310878739304006028L; public HandlerRequestException(String message) { super(message); } public HandlerRequestException(Throwable cause) { super(cause); } public HandlerRequestException(String message, Throwable cause) { super(message, cause); } }
HandlerRequestException
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/ChunkingConfigTests.java
{ "start": 679, "end": 3891 }
class ____ extends AbstractXContentSerializingTestCase<ChunkingConfig> { @Override protected ChunkingConfig createTestInstance() { return createRandomizedChunk(); } @Override protected Writeable.Reader<ChunkingConfig> instanceReader() { return ChunkingConfig::new; } @Override protected ChunkingConfig doParseInstance(XContentParser parser) { return ChunkingConfig.STRICT_PARSER.apply(parser, null); } public void testConstructorGivenAutoAndTimeSpan() { expectThrows(IllegalArgumentException.class, () -> new ChunkingConfig(ChunkingConfig.Mode.AUTO, TimeValue.timeValueMillis(1000))); } public void testConstructorGivenOffAndTimeSpan() { expectThrows(IllegalArgumentException.class, () -> new ChunkingConfig(ChunkingConfig.Mode.OFF, TimeValue.timeValueMillis(1000))); } public void testConstructorGivenManualAndNoTimeSpan() { expectThrows(IllegalArgumentException.class, () -> new ChunkingConfig(ChunkingConfig.Mode.MANUAL, null)); } public void testIsEnabled() { assertThat(ChunkingConfig.newAuto().isEnabled(), is(true)); assertThat(ChunkingConfig.newManual(TimeValue.timeValueMillis(1000)).isEnabled(), is(true)); assertThat(ChunkingConfig.newOff().isEnabled(), is(false)); } public static ChunkingConfig createRandomizedChunk() { ChunkingConfig.Mode mode = randomFrom(ChunkingConfig.Mode.values()); TimeValue timeSpan = null; if (mode == ChunkingConfig.Mode.MANUAL) { // time span is required to be at least 1 millis, so we use a custom method to generate a time value here timeSpan = randomPositiveSecondsMinutesHours(); } return new ChunkingConfig(mode, timeSpan); } private static TimeValue randomPositiveSecondsMinutesHours() { return new TimeValue(randomIntBetween(1, 1000), randomFrom(Arrays.asList(TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS))); } @Override protected ChunkingConfig mutateInstance(ChunkingConfig instance) { ChunkingConfig.Mode mode = instance.getMode(); TimeValue timeSpan = instance.getTimeSpan(); switch (between(0, 1)) { case 0 -> { List<ChunkingConfig.Mode> modes = new ArrayList<>(Arrays.asList(ChunkingConfig.Mode.values())); modes.remove(mode); mode = randomFrom(modes); if (mode == ChunkingConfig.Mode.MANUAL) { timeSpan = randomPositiveSecondsMinutesHours(); } else { timeSpan = null; } } case 1 -> { if (timeSpan == null) { timeSpan = randomPositiveSecondsMinutesHours(); } else { timeSpan = new TimeValue(timeSpan.getMillis() + between(10, 10000)); } // only manual mode allows a timespan mode = ChunkingConfig.Mode.MANUAL; } default -> throw new AssertionError("Illegal randomisation branch"); } return new ChunkingConfig(mode, timeSpan); } }
ChunkingConfigTests
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/RestoreArgs.java
{ "start": 1622, "end": 5270 }
class ____ { /** * Utility constructor. */ private Builder() { } /** * Creates new {@link RestoreArgs} and set the TTL. * * @return new {@link RestoreArgs} with min idle time set. * @see RestoreArgs#ttl(long) */ public static RestoreArgs ttl(long milliseconds) { return new RestoreArgs().ttl(milliseconds); } /** * Creates new {@link RestoreArgs} and set the minimum idle time. * * @return new {@link RestoreArgs} with min idle time set. * @see RestoreArgs#ttl(Duration) */ public static RestoreArgs ttl(Duration ttl) { LettuceAssert.notNull(ttl, "Time to live must not be null"); return ttl(ttl.toMillis()); } } /** * Set TTL in {@code milliseconds} after restoring the key. * * @param milliseconds time to live. * @return {@code this}. */ public RestoreArgs ttl(long milliseconds) { this.ttl = milliseconds; return this; } /** * Set TTL in {@code milliseconds} after restoring the key. * * @param ttl time to live. * @return {@code this}. */ public RestoreArgs ttl(Duration ttl) { LettuceAssert.notNull(ttl, "Time to live must not be null"); return ttl(ttl.toMillis()); } /** * Replaces existing keys if the target key already exists. * * @return {@code this}. */ public RestoreArgs replace() { return replace(true); } /** * Replaces existing keys if the target key already exists. * * @param replace {@code true} to enable replacing of existing keys. * @return {@code this}. */ public RestoreArgs replace(boolean replace) { this.replace = replace; return this; } /** * TTL will represent an absolute Unix timestamp (in milliseconds) in which the key will expire. * * @return {@code this}. * @since 6.1 */ public RestoreArgs absttl() { return absttl(true); } /** * TTL will represent an absolute Unix timestamp (in milliseconds) in which the key will expire. * * @param absttl {@code true} to apply absolute TTL instead of a relative remaining TTL. * @return {@code this}. * @since 6.1 */ public RestoreArgs absttl(boolean absttl) { this.absttl = absttl; return this; } /** * Set the number of seconds since the object stored at the specified key is idle (not requested by read or write * operations). * * @param idleTime the idle time when using a LRU eviction policy. * @return {@code this}. * @since 6.1 */ public RestoreArgs idleTime(long idleTime) { this.idleTime = idleTime; return this; } /** * Set the logarithmic access frequency counter of the object stored at the specified key. * * @param frequency the access frequency when using a LFU eviction policy. * @return {@code this}. * @since 6.1 */ public RestoreArgs frequency(long frequency) { this.frequency = frequency; return this; } @Override public <K, V> void build(CommandArgs<K, V> args) { if (replace) { args.add(REPLACE); } if (absttl) { args.add(ABSTTL); } if (idleTime != null) { args.add(IDLETIME).add(idleTime); } if (frequency != null) { args.add(FREQ).add(frequency); } } }
Builder
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/submitted/dynsql2/Name.java
{ "start": 703, "end": 1055 }
class ____ { private String firstName; private String lastName; public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } }
Name
java
grpc__grpc-java
api/src/main/java/io/grpc/MetricRecorder.java
{ "start": 916, "end": 7109 }
interface ____ { /** * Adds a value for a double-precision counter metric instrument. * * @param metricInstrument The counter metric instrument to add the value against. * @param value The value to add. * @param requiredLabelValues A list of required label values for the metric. * @param optionalLabelValues A list of additional, optional label values for the metric. */ default void addDoubleCounter(DoubleCounterMetricInstrument metricInstrument, double value, List<String> requiredLabelValues, List<String> optionalLabelValues) { checkArgument(requiredLabelValues != null && requiredLabelValues.size() == metricInstrument.getRequiredLabelKeys().size(), "Incorrect number of required labels provided. Expected: %s", metricInstrument.getRequiredLabelKeys().size()); checkArgument(optionalLabelValues != null && optionalLabelValues.size() == metricInstrument.getOptionalLabelKeys().size(), "Incorrect number of optional labels provided. Expected: %s", metricInstrument.getOptionalLabelKeys().size()); } /** * Adds a value for a long valued counter metric instrument. * * @param metricInstrument The counter metric instrument to add the value against. * @param value The value to add. MUST be non-negative. * @param requiredLabelValues A list of required label values for the metric. * @param optionalLabelValues A list of additional, optional label values for the metric. */ default void addLongCounter(LongCounterMetricInstrument metricInstrument, long value, List<String> requiredLabelValues, List<String> optionalLabelValues) { checkArgument(requiredLabelValues != null && requiredLabelValues.size() == metricInstrument.getRequiredLabelKeys().size(), "Incorrect number of required labels provided. Expected: %s", metricInstrument.getRequiredLabelKeys().size()); checkArgument(optionalLabelValues != null && optionalLabelValues.size() == metricInstrument.getOptionalLabelKeys().size(), "Incorrect number of optional labels provided. Expected: %s", metricInstrument.getOptionalLabelKeys().size()); } /** * Adds a value for a long valued up down counter metric instrument. * * @param metricInstrument The counter metric instrument to add the value against. * @param value The value to add. May be positive, negative or zero. * @param requiredLabelValues A list of required label values for the metric. * @param optionalLabelValues A list of additional, optional label values for the metric. */ default void addLongUpDownCounter(LongUpDownCounterMetricInstrument metricInstrument, long value, List<String> requiredLabelValues, List<String> optionalLabelValues) { checkArgument(requiredLabelValues != null && requiredLabelValues.size() == metricInstrument.getRequiredLabelKeys().size(), "Incorrect number of required labels provided. Expected: %s", metricInstrument.getRequiredLabelKeys().size()); checkArgument(optionalLabelValues != null && optionalLabelValues.size() == metricInstrument.getOptionalLabelKeys().size(), "Incorrect number of optional labels provided. Expected: %s", metricInstrument.getOptionalLabelKeys().size()); } /** * Records a value for a double-precision histogram metric instrument. * * @param metricInstrument The histogram metric instrument to record the value against. * @param value The value to record. * @param requiredLabelValues A list of required label values for the metric. * @param optionalLabelValues A list of additional, optional label values for the metric. */ default void recordDoubleHistogram(DoubleHistogramMetricInstrument metricInstrument, double value, List<String> requiredLabelValues, List<String> optionalLabelValues) { checkArgument(requiredLabelValues != null && requiredLabelValues.size() == metricInstrument.getRequiredLabelKeys().size(), "Incorrect number of required labels provided. Expected: %s", metricInstrument.getRequiredLabelKeys().size()); checkArgument(optionalLabelValues != null && optionalLabelValues.size() == metricInstrument.getOptionalLabelKeys().size(), "Incorrect number of optional labels provided. Expected: %s", metricInstrument.getOptionalLabelKeys().size()); } /** * Records a value for a long valued histogram metric instrument. * * @param metricInstrument The histogram metric instrument to record the value against. * @param value The value to record. * @param requiredLabelValues A list of required label values for the metric. * @param optionalLabelValues A list of additional, optional label values for the metric. */ default void recordLongHistogram(LongHistogramMetricInstrument metricInstrument, long value, List<String> requiredLabelValues, List<String> optionalLabelValues) { checkArgument(requiredLabelValues != null && requiredLabelValues.size() == metricInstrument.getRequiredLabelKeys().size(), "Incorrect number of required labels provided. Expected: %s", metricInstrument.getRequiredLabelKeys().size()); checkArgument(optionalLabelValues != null && optionalLabelValues.size() == metricInstrument.getOptionalLabelKeys().size(), "Incorrect number of optional labels provided. Expected: %s", metricInstrument.getOptionalLabelKeys().size()); } /** * Registers a callback to produce metric values for only the listed instruments. The returned * registration must be closed when no longer needed, which will remove the callback. * * @param callback The callback to call to record. * @param metricInstruments The metric instruments the callback will record against. */ default Registration registerBatchCallback(BatchCallback callback, CallbackMetricInstrument... metricInstruments) { return () -> { }; } /** Callback to record gauge values. */
MetricRecorder
java
mockito__mockito
mockito-core/src/test/java/org/mockito/internal/util/reflection/BeanPropertySetterTest.java
{ "start": 383, "end": 3759 }
class ____ { @Test public void use_the_correct_setter_on_the_target() throws Exception { // given SomeBean someBean = new SomeBean(); Field theField = someBean.getClass().getDeclaredField("theField"); File valueToInject = new File("path"); // when boolean injected = new BeanPropertySetter(someBean, theField, true).set(valueToInject); // then assertTrue(injected); assertTrue(someBean.theFieldSetterWasUsed); assertSame(valueToInject, someBean.getTheField()); } @Test public void use_the_setter_on_the_target_when_field_name_begins_by_at_least_2_caps() throws Exception { // given BeanWithWeirdFields someBean = new BeanWithWeirdFields(); Field theField = someBean.getClass().getDeclaredField("UUID"); UUID valueToInject = new UUID(0L, 0L); // when boolean injected = new BeanPropertySetter(someBean, theField, true).set(valueToInject); // then assertTrue(injected); assertTrue(someBean.theFieldSetterWasUSed); assertSame(valueToInject, someBean.UUID); } @Test public void should_not_fail_if_bean_class_declares_only_the_setter_for_the_property() throws Exception { // given SomeBeanWithJustASetter someBean = new SomeBeanWithJustASetter(); Field theField = someBean.getClass().getDeclaredField("theField"); File valueToInject = new File("path"); // when boolean injected = new BeanPropertySetter(someBean, theField, true).set(valueToInject); // then assertTrue(injected); assertTrue(someBean.theFieldSetterWasUsed); } @Test public void should_fail_if_matching_setter_cannot_be_found_and_if_report_failure_is_true() throws Exception { // given SomeBeanWithNoSetterMatchingFieldType bean = new SomeBeanWithNoSetterMatchingFieldType(); Field theField = bean.getClass().getDeclaredField("theField"); File valueToInject = new File("path"); try { // when new BeanPropertySetter(bean, theField, true).set(valueToInject); fail(); } catch (Exception e) { // then Assertions.assertThat(e.getMessage()).contains("setter not found"); } } @Test public void return_false_if_no_setter_was_found() throws Exception { // given SomeBeanWithJustAGetter bean = new SomeBeanWithJustAGetter(); Field theField = bean.getClass().getDeclaredField("theField"); File valueToInject = new File("path"); // when boolean injected = new BeanPropertySetter(bean, theField).set(valueToInject); // then assertFalse(injected); } @Test public void return_false_if_no_setter_was_found_and_if_reportNoSetterFound_is_false() throws Exception { // given SomeBeanWithNoSetterMatchingFieldType bean = new SomeBeanWithNoSetterMatchingFieldType(); Field theField = bean.getClass().getDeclaredField("theField"); File valueToInject = new File("path"); // when boolean injected = new BeanPropertySetter(bean, theField, false).set(valueToInject); // then assertFalse(injected); } static
BeanPropertySetterTest
java
dropwizard__dropwizard
dropwizard-core/src/main/java/io/dropwizard/core/cli/CheckCommand.java
{ "start": 431, "end": 1514 }
class ____<T extends Configuration> extends ConfiguredCommand<T> { private static final Logger LOGGER = LoggerFactory.getLogger(CheckCommand.class); private final Class<T> configurationClass; public CheckCommand(Application<T> application) { super("check", "Parses and validates the configuration file"); this.configurationClass = application.getConfigurationClass(); } /* * Since we don't subclass CheckCommand, we need a concrete reference to the configuration * class. */ @Override protected Class<T> getConfigurationClass() { return configurationClass; } @Override protected void run(Bootstrap<T> bootstrap, Namespace namespace, T configuration) throws Exception { LOGGER.info("Configuration is OK"); } /* The stacktrace is redundant as the message contains the yaml error location */ @Override public void onError(Cli cli, Namespace namespace, Throwable e) { cli.getStdErr().println(e.getMessage()); } }
CheckCommand
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/rest/action/RestActionListener.java
{ "start": 938, "end": 2077 }
class ____<Response> implements ActionListener<Response> { private static final Logger logger = LogManager.getLogger(RestActionListener.class); protected final RestChannel channel; protected RestActionListener(RestChannel channel) { this.channel = channel; } @Override public final void onResponse(Response response) { try { ensureOpen(); processResponse(response); } catch (Exception e) { onFailure(e); } } protected abstract void processResponse(Response response) throws Exception; protected void ensureOpen() { if (channel.request().getHttpChannel().isOpen() == false) { throw new TaskCancelledException("response channel [" + channel.request().getHttpChannel() + "] closed"); } } @Override public final void onFailure(Exception e) { try { channel.sendResponse(new RestResponse(channel, e)); } catch (Exception inner) { inner.addSuppressed(e); logger.error("failed to send failure response", inner); } } }
RestActionListener
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/factory/annotation/InjectAnnotationBeanPostProcessorTests.java
{ "start": 41889, "end": 42084 }
class ____ { @Inject private Optional<List<TestBean>> testBean; public Optional<List<TestBean>> getTestBean() { return this.testBean; } } public static
OptionalListFieldInjectionBean
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleAlterTableModify.java
{ "start": 866, "end": 1464 }
class ____ extends OracleAlterTableItem { private List<SQLColumnDefinition> columns = new ArrayList<SQLColumnDefinition>(); @Override public void accept0(OracleASTVisitor visitor) { if (visitor.visit(this)) { acceptChild(visitor, columns); } visitor.endVisit(this); } public List<SQLColumnDefinition> getColumns() { return columns; } public void addColumn(SQLColumnDefinition column) { if (column != null) { column.setParent(this); } this.columns.add(column); } }
OracleAlterTableModify
java
quarkusio__quarkus
extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/SchemaValidateTest.java
{ "start": 1723, "end": 2150 }
class ____ { public static final String TABLE = "Hero_for_validation"; @jakarta.persistence.Id @jakarta.persistence.GeneratedValue public java.lang.Long id; @Column(unique = true) public String name; public String otherName; public int level; public String picture; @Column(columnDefinition = "TEXT") public String powers; } }
Hero
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/inference/WeightedTokensUtils.java
{ "start": 1128, "end": 5099 }
class ____ { private WeightedTokensUtils() {} public static Query queryBuilderWithAllTokens( String fieldName, List<WeightedToken> tokens, MappedFieldType ft, SearchExecutionContext context ) { var qb = new BooleanQuery.Builder(); for (var token : tokens) { qb.add(new BoostQuery(ft.termQuery(token.token(), context), token.weight()), BooleanClause.Occur.SHOULD); } return new SparseVectorQueryWrapper(fieldName, qb.setMinimumNumberShouldMatch(1).build()); } public static Query queryBuilderWithPrunedTokens( String fieldName, TokenPruningConfig tokenPruningConfig, List<WeightedToken> tokens, MappedFieldType ft, SearchExecutionContext context ) throws IOException { var qb = new BooleanQuery.Builder(); int fieldDocCount = context.getIndexReader().getDocCount(fieldName); float bestWeight = tokens.stream().map(WeightedToken::weight).reduce(0f, Math::max); float averageTokenFreqRatio = getAverageTokenFreqRatio(fieldName, context.getIndexReader(), fieldDocCount); if (averageTokenFreqRatio == 0) { return new MatchNoDocsQuery("query is against an empty field"); } for (var token : tokens) { boolean keep = shouldKeepToken( fieldName, tokenPruningConfig, context.getIndexReader(), token, fieldDocCount, averageTokenFreqRatio, bestWeight ); keep ^= tokenPruningConfig != null && tokenPruningConfig.isOnlyScorePrunedTokens(); if (keep) { qb.add(new BoostQuery(ft.termQuery(token.token(), context), token.weight()), BooleanClause.Occur.SHOULD); } } return new SparseVectorQueryWrapper(fieldName, qb.setMinimumNumberShouldMatch(1).build()); } /** * We calculate the maximum number of unique tokens for any shard of data. The maximum is used to compute * average token frequency since we don't have a unique inter-segment token count. * Once we have the maximum number of unique tokens, we use the total count of tokens in the index to calculate * the average frequency ratio. * * @param reader * @param fieldDocCount * @return float * @throws IOException */ private static float getAverageTokenFreqRatio(String fieldName, IndexReader reader, int fieldDocCount) throws IOException { int numUniqueTokens = 0; for (var leaf : reader.getContext().leaves()) { var terms = leaf.reader().terms(fieldName); if (terms != null) { numUniqueTokens = (int) Math.max(terms.size(), numUniqueTokens); } } if (numUniqueTokens == 0) { return 0; } return (float) reader.getSumDocFreq(fieldName) / fieldDocCount / numUniqueTokens; } /** * Returns true if the token should be queried based on the {@code tokensFreqRatioThreshold} and {@code tokensWeightThreshold} * set on the query. */ private static boolean shouldKeepToken( String fieldName, TokenPruningConfig tokenPruningConfig, IndexReader reader, WeightedToken token, int fieldDocCount, float averageTokenFreqRatio, float bestWeight ) throws IOException { if (tokenPruningConfig == null) { return true; } int docFreq = reader.docFreq(new Term(fieldName, token.token())); if (docFreq == 0) { return false; } float tokenFreqRatio = (float) docFreq / fieldDocCount; return tokenFreqRatio < tokenPruningConfig.getTokensFreqRatioThreshold() * averageTokenFreqRatio || token.weight() > tokenPruningConfig.getTokensWeightThreshold() * bestWeight; } }
WeightedTokensUtils
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/ProtocolServiceKey.java
{ "start": 979, "end": 1479 }
class ____ extends ServiceKey { private final String protocol; public ProtocolServiceKey(String interfaceName, String version, String group, String protocol) { super(interfaceName, version, group); this.protocol = protocol; } public String getProtocol() { return protocol; } public String getServiceKeyString() { return super.toString(); } public boolean isSameWith(ProtocolServiceKey protocolServiceKey) { //
ProtocolServiceKey
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertMatches_CharSequence_Test.java
{ "start": 1709, "end": 5268 }
class ____ extends StringsBaseTest { private String actual = "Yoda"; @Test void should_throw_error_if_regular_expression_is_null() { assertThatNullPointerException().isThrownBy(() -> { String regex = null; strings.assertMatches(someInfo(), actual, regex); }).withMessage(regexPatternIsNull()); } @Test void should_throw_error_if_syntax_of_regular_expression_is_invalid() { assertThatExceptionOfType(PatternSyntaxException.class).isThrownBy(() -> strings.assertMatches(someInfo(), actual, "*...")); } @Test void should_fail_if_actual_is_null() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertMatches(someInfo(), null, matchAnything().pattern())) .withMessage(actualIsNull()); } @Test void should_fail_if_actual_does_not_match_regular_expression() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertMatches(someInfo(), actual, "Luke")) .withMessage(shouldMatch(actual, "Luke").create()); } @Test void should_pass_if_actual_matches_Pattern() { strings.assertMatches(someInfo(), actual, "Yod.*"); } @Test void should_throw_error_if_regular_expression_is_null_whatever_custom_comparison_strategy_is() { assertThatNullPointerException().isThrownBy(() -> { String regex = null; stringsWithCaseInsensitiveComparisonStrategy.assertMatches(someInfo(), actual, regex); }).withMessage(regexPatternIsNull()); } @Test void should_throw_error_if_syntax_of_regular_expression_is_invalid_whatever_custom_comparison_strategy_is() { assertThatExceptionOfType(PatternSyntaxException.class).isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertMatches(someInfo(), actual, "*...")); } @Test void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertMatches(someInfo(), null, matchAnything().pattern())) .withMessage(actualIsNull()); } @Test void should_fail_if_actual_does_not_match_regular_expression_whatever_custom_comparison_strategy_is() { AssertionInfo info = someInfo(); Throwable error = catchThrowable(() -> stringsWithCaseInsensitiveComparisonStrategy.assertMatches(info, actual, "Luke")); assertThat(error).isInstanceOf(AssertionError.class); verify(failures).failure(info, shouldMatch(actual, "Luke")); } @Test void should_pass_if_actual_matches_Pattern_whatever_custom_comparison_strategy_is() { stringsWithCaseInsensitiveComparisonStrategy.assertMatches(someInfo(), actual, "Yod.*"); } }
Strings_assertMatches_CharSequence_Test
java
apache__camel
components/camel-platform-http/src/test/java/org/apache/camel/component/platform/http/JettyServerTest.java
{ "start": 1203, "end": 2261 }
class ____ { public static final String JETTY_SERVER_NAME = "JettyServerTest"; private final int port; private final ContextHandlerCollection contextHandlerCollection; private final JettyEmbeddedService service; public JettyServerTest(int port) { contextHandlerCollection = new ContextHandlerCollection(true); final JettyConfiguration configuration = JettyConfigurationBuilder.bareTemplate() .withPort(port) .withHandlerCollectionConfiguration().addHandlers(contextHandlerCollection).build().build(); this.service = new JettyEmbeddedService(configuration); this.port = port; } public void start() { service.initialize(); } public void stop() throws Exception { service.stop(); } public void addHandler(ContextHandler contextHandler) throws Exception { contextHandlerCollection.addHandler(contextHandler); contextHandler.start(); } public int getServerPort() { return port; } }
JettyServerTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java
{ "start": 1410, "end": 9024 }
class ____ contains information about reserved cluster state set * through file based settings or by modules/plugins. * * <p> * These types of cluster settings/entities can be read through the REST API, * but can only be modified through a versioned 'operator mode' update, e.g. * file based settings or module/plugin upgrade. */ public record ReservedStateMetadata( String namespace, Long version, Map<String, ReservedStateHandlerMetadata> handlers, ReservedStateErrorMetadata errorMetadata ) implements SimpleDiffable<ReservedStateMetadata>, ToXContentFragment { public static final Long NO_VERSION = Long.MIN_VALUE; // use min long as sentinel for uninitialized version public static final Long EMPTY_VERSION = -1L; // use -1 as sentinel for empty metadata public static final Long RESTORED_VERSION = 0L; // use 0 as sentinel for metadata restored from snapshot private static final ParseField VERSION = new ParseField("version"); private static final ParseField HANDLERS = new ParseField("handlers"); private static final ParseField ERRORS_METADATA = new ParseField("errors"); /** * ReservedStateMetadata contains information about reserved cluster settings. * * <p> * These settings cannot be updated by the end user and are set outside of the * REST layer, e.g. through file based settings or by plugin/modules. * * @param namespace The namespace of the setting creator, e.g. file_settings, security plugin, etc. * @param version The update version, must increase with each update * @param handlers Per state update handler information on key set in by this update. These keys are validated at REST time. * @param errorMetadata If the update failed for some reason, this is where we store the error information metadata. */ public ReservedStateMetadata {} /** * Creates a set intersection between cluster state keys set by a given {@link ReservedClusterStateHandler} * and the input set. * * <p> * This method is to be used to check if a REST action handler is allowed to modify certain cluster state. * * @param handlerName the name of the reserved state handler we need to check for keys * @param modified a set of keys we want to see if we can modify. * @return */ public Set<String> conflicts(String handlerName, Set<String> modified) { ReservedStateHandlerMetadata handlerMetadata = handlers.get(handlerName); if (handlerMetadata == null || handlerMetadata.keys().isEmpty()) { return Collections.emptySet(); } Set<String> intersect = new HashSet<>(handlerMetadata.keys()); intersect.retainAll(modified); return Collections.unmodifiableSet(intersect); } /** * Get the reserved keys for the handler name * * @param handlerName handler name to get keys for * @return set of keys for that handler */ public Set<String> keys(String handlerName) { ReservedStateHandlerMetadata handlerMetadata = handlers.get(handlerName); if (handlerMetadata == null || handlerMetadata.keys().isEmpty()) { return Collections.emptySet(); } return Collections.unmodifiableSet(handlerMetadata.keys()); } /** * Reads an {@link ReservedStateMetadata} from a {@link StreamInput} * * @param in the {@link StreamInput} to read from * @return {@link ReservedStateMetadata} * @throws IOException */ public static ReservedStateMetadata readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()).version(in.readLong()); int handlersSize = in.readVInt(); for (int i = 0; i < handlersSize; i++) { ReservedStateHandlerMetadata handler = ReservedStateHandlerMetadata.readFrom(in); builder.putHandler(handler); } builder.errorMetadata(in.readOptionalWriteable(ReservedStateErrorMetadata::readFrom)); return builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(namespace); out.writeLong(version); out.writeCollection(handlers.values()); out.writeOptionalWriteable(errorMetadata); } /** * Reads an {@link ReservedStateMetadata} {@link Diff} from {@link StreamInput} * * @param in the {@link StreamInput} to read the diff from * @return a {@link Diff} of {@link ReservedStateMetadata} * @throws IOException */ public static Diff<ReservedStateMetadata> readDiffFrom(StreamInput in) throws IOException { return SimpleDiffable.readDiffFrom(ReservedStateMetadata::readFrom, in); } /** * Convenience method for creating a {@link Builder} for {@link ReservedStateMetadata} * * @param namespace the namespace under which we'll store the {@link ReservedStateMetadata} * @return {@link Builder} */ public static Builder builder(String namespace) { return new Builder(namespace); } /** * Convenience method for creating a {@link Builder} for {@link ReservedStateMetadata} * * @param namespace the namespace under which we'll store the {@link ReservedStateMetadata} * @param metadata an existing {@link ReservedStateMetadata} * @return {@link Builder} */ public static Builder builder(String namespace, ReservedStateMetadata metadata) { return new Builder(namespace, metadata); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(namespace()); builder.field(VERSION.getPreferredName(), version); builder.startObject(HANDLERS.getPreferredName()); for (var i = handlers.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(); i.hasNext();) { i.next().getValue().toXContent(builder, params); } builder.endObject(); builder.field(ERRORS_METADATA.getPreferredName(), errorMetadata); builder.endObject(); return builder; } private static final ConstructingObjectParser<ReservedStateMetadata, String> PARSER = new ConstructingObjectParser<>( "reserved_state_metadata", false, (a, namespace) -> { Map<String, ReservedStateHandlerMetadata> handlers = new HashMap<>(); @SuppressWarnings("unchecked") List<ReservedStateHandlerMetadata> handlersList = (List<ReservedStateHandlerMetadata>) a[1]; handlersList.forEach(h -> handlers.put(h.name(), h)); return new ReservedStateMetadata(namespace, (Long) a[0], Map.copyOf(handlers), (ReservedStateErrorMetadata) a[2]); } ); static { PARSER.declareLong(constructorArg(), VERSION); PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, name) -> ReservedStateHandlerMetadata.fromXContent(p, name), HANDLERS); PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> ReservedStateErrorMetadata.fromXContent(p), null, ERRORS_METADATA); } /** * Reads {@link ReservedStateMetadata} from {@link XContentParser} * * @param parser {@link XContentParser} * @return {@link ReservedStateMetadata} * @throws IOException */ public static ReservedStateMetadata fromXContent(final XContentParser parser) throws IOException { parser.nextToken(); return PARSER.apply(parser, parser.currentName()); } /** * Builder
that
java
netty__netty
microbench/src/main/java/io/netty/microbench/handler/ssl/AbstractSslHandlerBenchmark.java
{ "start": 6285, "end": 6939 }
class ____ extends EmbeddedChannelWriteAccumulatingHandlerContext { SslThroughputBenchmarkHandlerContext(ByteBufAllocator alloc, ChannelHandler handler, ByteToMessageDecoder.Cumulator writeCumulator) { super(alloc, handler, writeCumulator); } @Override protected void handleException(Throwable t) { handleUnexpectedException(t); } } public static void handleUnexpectedException(Throwable t) { if (t != null) { throw new IllegalStateException(t); } } }
SslThroughputBenchmarkHandlerContext
java
quarkusio__quarkus
extensions/security-jpa-reactive/deployment/src/test/java/io/quarkus/security/jpa/reactive/MinimalConfigurationTest.java
{ "start": 150, "end": 631 }
class ____ extends JpaSecurityRealmTest { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(testClasses) .addClass(MinimalUserEntity.class) .addAsResource("minimal-config/import.sql", "import.sql") .addAsResource("minimal-config/application.properties", "application.properties")); }
MinimalConfigurationTest
java
spring-projects__spring-framework
spring-jms/src/main/java/org/springframework/jms/support/converter/MarshallingMessageConverter.java
{ "start": 3020, "end": 12213 }
interface ____ well */ public MarshallingMessageConverter(Marshaller marshaller) { Assert.notNull(marshaller, "Marshaller must not be null"); if (!(marshaller instanceof Unmarshaller _unmarshaller)) { throw new IllegalArgumentException( "Marshaller [" + marshaller + "] does not implement the Unmarshaller " + "interface. Please set an Unmarshaller explicitly by using the " + "MarshallingMessageConverter(Marshaller, Unmarshaller) constructor."); } else { this.marshaller = marshaller; this.unmarshaller = _unmarshaller; } } /** * Construct a new {@code MarshallingMessageConverter} with the * given Marshaller and Unmarshaller. * @param marshaller the Marshaller to use * @param unmarshaller the Unmarshaller to use */ public MarshallingMessageConverter(Marshaller marshaller, Unmarshaller unmarshaller) { Assert.notNull(marshaller, "Marshaller must not be null"); Assert.notNull(unmarshaller, "Unmarshaller must not be null"); this.marshaller = marshaller; this.unmarshaller = unmarshaller; } /** * Set the {@link Marshaller} to be used by this message converter. */ public void setMarshaller(Marshaller marshaller) { Assert.notNull(marshaller, "Marshaller must not be null"); this.marshaller = marshaller; } /** * Set the {@link Unmarshaller} to be used by this message converter. */ public void setUnmarshaller(Unmarshaller unmarshaller) { Assert.notNull(unmarshaller, "Unmarshaller must not be null"); this.unmarshaller = unmarshaller; } /** * Specify whether {@link #toMessage(Object, Session)} should marshal to * a {@link BytesMessage} or a {@link TextMessage}. * <p>The default is {@link MessageType#BYTES}, i.e. this converter marshals * to a {@link BytesMessage}. Note that the default version of this converter * supports {@link MessageType#BYTES} and {@link MessageType#TEXT} only. * @see MessageType#BYTES * @see MessageType#TEXT */ public void setTargetType(MessageType targetType) { Assert.notNull(targetType, "MessageType must not be null"); this.targetType = targetType; } @Override public void afterPropertiesSet() { Assert.notNull(this.marshaller, "Property 'marshaller' is required"); Assert.notNull(this.unmarshaller, "Property 'unmarshaller' is required"); } /** * This implementation marshals the given object to a {@link jakarta.jms.TextMessage} or * {@link jakarta.jms.BytesMessage}. The desired message type can be defined by setting * the {@link #setTargetType "marshalTo"} property. * @see #marshalToTextMessage * @see #marshalToBytesMessage */ @Override public Message toMessage(Object object, Session session) throws JMSException, MessageConversionException { Assert.state(this.marshaller != null, "No Marshaller set"); try { return switch (this.targetType) { case TEXT -> marshalToTextMessage(object, session, this.marshaller); case BYTES -> marshalToBytesMessage(object, session, this.marshaller); default -> marshalToMessage(object, session, this.marshaller, this.targetType); }; } catch (XmlMappingException | IOException ex) { throw new MessageConversionException("Could not marshal [" + object + "]", ex); } } /** * This implementation unmarshals the given {@link Message} into an object. * @see #unmarshalFromTextMessage * @see #unmarshalFromBytesMessage */ @Override public Object fromMessage(Message message) throws JMSException, MessageConversionException { Assert.state(this.unmarshaller != null, "No Unmarshaller set"); try { if (message instanceof TextMessage textMessage) { return unmarshalFromTextMessage(textMessage, this.unmarshaller); } else if (message instanceof BytesMessage bytesMessage) { return unmarshalFromBytesMessage(bytesMessage, this.unmarshaller); } else { return unmarshalFromMessage(message, this.unmarshaller); } } catch (IOException ex) { throw new MessageConversionException("Could not access message content: " + message, ex); } catch (XmlMappingException ex) { throw new MessageConversionException("Could not unmarshal message: " + message, ex); } } /** * Marshal the given object to a {@link TextMessage}. * @param object the object to be marshalled * @param session current JMS session * @param marshaller the marshaller to use * @return the resulting message * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors * @see Session#createTextMessage * @see Marshaller#marshal(Object, Result) */ protected TextMessage marshalToTextMessage(Object object, Session session, Marshaller marshaller) throws JMSException, IOException, XmlMappingException { StringWriter writer = new StringWriter(1024); Result result = new StreamResult(writer); marshaller.marshal(object, result); return session.createTextMessage(writer.toString()); } /** * Marshal the given object to a {@link BytesMessage}. * @param object the object to be marshalled * @param session current JMS session * @param marshaller the marshaller to use * @return the resulting message * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors * @see Session#createBytesMessage * @see Marshaller#marshal(Object, Result) */ protected BytesMessage marshalToBytesMessage(Object object, Session session, Marshaller marshaller) throws JMSException, IOException, XmlMappingException { ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); StreamResult streamResult = new StreamResult(bos); marshaller.marshal(object, streamResult); BytesMessage message = session.createBytesMessage(); message.writeBytes(bos.toByteArray()); return message; } /** * Template method that allows for custom message marshalling. * Invoked when {@link #setTargetType} is not {@link MessageType#TEXT} or * {@link MessageType#BYTES}. * <p>The default implementation throws an {@link IllegalArgumentException}. * @param object the object to marshal * @param session the JMS session * @param marshaller the marshaller to use * @param targetType the target message type (other than TEXT or BYTES) * @return the resulting message * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors */ protected Message marshalToMessage(Object object, Session session, Marshaller marshaller, MessageType targetType) throws JMSException, IOException, XmlMappingException { throw new IllegalArgumentException("Unsupported message type [" + targetType + "]. MarshallingMessageConverter by default only supports TextMessages and BytesMessages."); } /** * Unmarshal the given {@link TextMessage} into an object. * @param message the message * @param unmarshaller the unmarshaller to use * @return the unmarshalled object * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors * @see Unmarshaller#unmarshal(Source) */ protected Object unmarshalFromTextMessage(TextMessage message, Unmarshaller unmarshaller) throws JMSException, IOException, XmlMappingException { Source source = new StreamSource(new StringReader(message.getText())); return unmarshaller.unmarshal(source); } /** * Unmarshal the given {@link BytesMessage} into an object. * @param message the message * @param unmarshaller the unmarshaller to use * @return the unmarshalled object * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors * @see Unmarshaller#unmarshal(Source) */ protected Object unmarshalFromBytesMessage(BytesMessage message, Unmarshaller unmarshaller) throws JMSException, IOException, XmlMappingException { byte[] bytes = new byte[(int) message.getBodyLength()]; message.readBytes(bytes); ByteArrayInputStream bis = new ByteArrayInputStream(bytes); StreamSource source = new StreamSource(bis); return unmarshaller.unmarshal(source); } /** * Template method that allows for custom message unmarshalling. * Invoked when {@link #fromMessage(Message)} is invoked with a message * that is not a {@link TextMessage} or {@link BytesMessage}. * <p>The default implementation throws an {@link IllegalArgumentException}. * @param message the message * @param unmarshaller the unmarshaller to use * @return the unmarshalled object * @throws JMSException if thrown by JMS methods * @throws IOException in case of I/O errors * @throws XmlMappingException in case of OXM mapping errors */ protected Object unmarshalFromMessage(Message message, Unmarshaller unmarshaller) throws JMSException, IOException, XmlMappingException { throw new IllegalArgumentException("Unsupported message type [" + message.getClass() + "]. MarshallingMessageConverter by default only supports TextMessages and BytesMessages."); } }
as
java
spring-projects__spring-framework
spring-webmvc/src/main/java/org/springframework/web/servlet/resource/ResourceUrlEncodingFilter.java
{ "start": 1720, "end": 2552 }
class ____ extends GenericFilterBean { private static final Log logger = LogFactory.getLog(ResourceUrlEncodingFilter.class); @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws ServletException, IOException { if (!(request instanceof HttpServletRequest httpRequest) || !(response instanceof HttpServletResponse httpResponse)) { throw new ServletException("ResourceUrlEncodingFilter only supports HTTP requests"); } ResourceUrlEncodingRequestWrapper wrappedRequest = new ResourceUrlEncodingRequestWrapper(httpRequest); ResourceUrlEncodingResponseWrapper wrappedResponse = new ResourceUrlEncodingResponseWrapper(wrappedRequest, httpResponse); filterChain.doFilter(wrappedRequest, wrappedResponse); } private static
ResourceUrlEncodingFilter
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/http/ResponseInjectingHttpHandler.java
{ "start": 2262, "end": 3826 }
class ____ implements RequestHandler { private final RestStatus status; private final String responseBody; private final Predicate<HttpExchange> requestMatcher; public FixedRequestHandler(RestStatus status) { this(status, null, req -> true); } /** * Create a handler that only gets executed for requests that match the supplied predicate. Note * that because the errors are stored in a queue this will prevent any subsequently queued errors from * being returned until after it returns. */ public FixedRequestHandler(RestStatus status, String responseBody, Predicate<HttpExchange> requestMatcher) { this.status = status; this.responseBody = responseBody; this.requestMatcher = requestMatcher; } @Override public boolean matchesRequest(HttpExchange exchange) { return requestMatcher.test(exchange); } @Override public void writeResponse(HttpExchange exchange, HttpHandler delegateHandler) throws IOException { if (responseBody != null) { byte[] responseBytes = responseBody.getBytes(StandardCharsets.UTF_8); exchange.sendResponseHeaders(status.getStatus(), responseBytes.length == 0 ? -1 : responseBytes.length); exchange.getResponseBody().write(responseBytes); } else { exchange.sendResponseHeaders(status.getStatus(), -1); } } } }
FixedRequestHandler
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/rawlocal/TestRawlocalContractGetFileStatus.java
{ "start": 1051, "end": 1268 }
class ____ extends AbstractContractGetFileStatusTest { @Override protected AbstractFSContract createContract(Configuration conf) { return new RawlocalFSContract(conf); } }
TestRawlocalContractGetFileStatus
java
alibaba__druid
core/src/main/java/com/alibaba/druid/pool/DruidDataSourceStatLoggerImpl.java
{ "start": 1093, "end": 8790 }
class ____ extends DruidDataSourceStatLoggerAdapter { private static Log LOG = LogFactory.getLog(DruidDataSourceStatLoggerImpl.class); private Log logger = LOG; public DruidDataSourceStatLoggerImpl() { this.configFromProperties(System.getProperties()); } /** * @since 0.2.21 */ @Override public void configFromProperties(Properties properties) { if (properties == null) { return; } String property = properties.getProperty("druid.stat.loggerName"); if (property != null && property.length() > 0) { setLoggerName(property); } } public Log getLogger() { return logger; } @Override public void setLoggerName(String loggerName) { logger = LogFactory.getLog(loggerName); } @Override public void setLogger(Log logger) { if (logger == null) { throw new IllegalArgumentException("logger can not be null"); } this.logger = logger; } public boolean isLogEnable() { return logger.isInfoEnabled(); } public void log(String value) { logger.info(value); } @Override public void log(DruidDataSourceStatValue statValue) { if (!isLogEnable()) { return; } Map<String, Object> map = new LinkedHashMap<String, Object>(); map.put("url", statValue.url); map.put("dbType", statValue.getDbType()); map.put("name", statValue.getName()); map.put("activeCount", statValue.getActiveCount()); if (statValue.getActivePeak() > 0) { map.put("activePeak", statValue.getActivePeak()); map.put("activePeakTime", statValue.getActivePeakTime()); } map.put("poolingCount", statValue.getPoolingCount()); if (statValue.getPoolingPeak() > 0) { map.put("poolingPeak", statValue.getPoolingPeak()); map.put("poolingPeakTime", statValue.getPoolingPeakTime()); } map.put("connectCount", statValue.getConnectCount()); map.put("closeCount", statValue.getCloseCount()); if (statValue.getWaitThreadCount() > 0) { map.put("waitThreadCount", statValue.getWaitThreadCount()); } if (statValue.getNotEmptyWaitCount() > 0) { map.put("notEmptyWaitCount", statValue.getNotEmptyWaitCount()); } if (statValue.getNotEmptyWaitMillis() > 0) { map.put("notEmptyWaitMillis", statValue.getNotEmptyWaitMillis()); } if (statValue.getLogicConnectErrorCount() > 0) { map.put("logicConnectErrorCount", statValue.getLogicConnectErrorCount()); } if (statValue.getPhysicalConnectCount() > 0) { map.put("physicalConnectCount", statValue.getPhysicalConnectCount()); } if (statValue.getPhysicalCloseCount() > 0) { map.put("physicalCloseCount", statValue.getPhysicalCloseCount()); } if (statValue.getPhysicalConnectErrorCount() > 0) { map.put("physicalConnectErrorCount", statValue.getPhysicalConnectErrorCount()); } if (statValue.getExecuteCount() > 0) { map.put("executeCount", statValue.getExecuteCount()); } if (statValue.getErrorCount() > 0) { map.put("errorCount", statValue.getErrorCount()); } if (statValue.getCommitCount() > 0) { map.put("commitCount", statValue.getCommitCount()); } if (statValue.getRollbackCount() > 0) { map.put("rollbackCount", statValue.getRollbackCount()); } if (statValue.getPstmtCacheHitCount() > 0) { map.put("pstmtCacheHitCount", statValue.getPstmtCacheHitCount()); } if (statValue.getPstmtCacheMissCount() > 0) { map.put("pstmtCacheMissCount", statValue.getPstmtCacheMissCount()); } if (statValue.getStartTransactionCount() > 0) { map.put("startTransactionCount", statValue.getStartTransactionCount()); map.put("transactionHistogram", rtrim(statValue.getTransactionHistogram())); } if (statValue.getConnectCount() > 0) { map.put("connectionHoldTimeHistogram", rtrim(statValue.getConnectionHoldTimeHistogram())); } if (statValue.getClobOpenCount() > 0) { map.put("clobOpenCount", statValue.getClobOpenCount()); } if (statValue.getBlobOpenCount() > 0) { map.put("blobOpenCount", statValue.getBlobOpenCount()); } if (statValue.getSqlSkipCount() > 0) { map.put("sqlSkipCount", statValue.getSqlSkipCount()); } ArrayList<Map<String, Object>> sqlList = new ArrayList<Map<String, Object>>(); if (statValue.sqlList.size() > 0) { for (JdbcSqlStatValue sqlStat : statValue.getSqlList()) { Map<String, Object> sqlStatMap = new LinkedHashMap<String, Object>(); sqlStatMap.put("sql", sqlStat.getSql()); if (sqlStat.getExecuteCount() > 0) { sqlStatMap.put("executeCount", sqlStat.getExecuteCount()); sqlStatMap.put("executeMillisMax", sqlStat.getExecuteMillisMax()); sqlStatMap.put("executeMillisTotal", sqlStat.getExecuteMillisTotal()); sqlStatMap.put("executeHistogram", rtrim(sqlStat.getExecuteHistogram())); sqlStatMap.put("executeAndResultHoldHistogram", rtrim(sqlStat.getExecuteAndResultHoldHistogram())); } long executeErrorCount = sqlStat.getExecuteErrorCount(); if (executeErrorCount > 0) { sqlStatMap.put("executeErrorCount", executeErrorCount); } int runningCount = sqlStat.getRunningCount(); if (runningCount > 0) { sqlStatMap.put("runningCount", runningCount); } int concurrentMax = sqlStat.getConcurrentMax(); if (concurrentMax > 0) { sqlStatMap.put("concurrentMax", concurrentMax); } if (sqlStat.getFetchRowCount() > 0) { sqlStatMap.put("fetchRowCount", sqlStat.getFetchRowCount()); sqlStatMap.put("fetchRowCountMax", sqlStat.getFetchRowCountMax()); sqlStatMap.put("fetchRowHistogram", rtrim(sqlStat.getFetchRowHistogram())); } if (sqlStat.getUpdateCount() > 0) { sqlStatMap.put("updateCount", sqlStat.getUpdateCount()); sqlStatMap.put("updateCountMax", sqlStat.getUpdateCountMax()); sqlStatMap.put("updateHistogram", rtrim(sqlStat.getUpdateHistogram())); } if (sqlStat.getInTransactionCount() > 0) { sqlStatMap.put("inTransactionCount", sqlStat.getInTransactionCount()); } if (sqlStat.getClobOpenCount() > 0) { sqlStatMap.put("clobOpenCount", sqlStat.getClobOpenCount()); } if (sqlStat.getBlobOpenCount() > 0) { sqlStatMap.put("blobOpenCount", sqlStat.getBlobOpenCount()); } sqlList.add(sqlStatMap); } map.put("sqlList", sqlList); } if (statValue.getKeepAliveCheckCount() > 0) { map.put("keepAliveCheckCount", statValue.getKeepAliveCheckCount()); } String text = JSONUtils.toJSONString(map); log(text); } }
DruidDataSourceStatLoggerImpl
java
apache__dubbo
dubbo-common/src/test/java/org/apache/dubbo/common/model/person/FullAddress.java
{ "start": 886, "end": 6016 }
class ____ implements Serializable { private static final long serialVersionUID = 5163979984269419831L; private String countryId; private String countryName; private String provinceName; private String cityId; private String cityName; private String streetAddress; private String zipCode; public FullAddress() {} public FullAddress(String countryId, String provinceName, String cityId, String streetAddress, String zipCode) { this.countryId = countryId; this.countryName = countryId; this.provinceName = provinceName; this.cityId = cityId; this.cityName = cityId; this.streetAddress = streetAddress; this.zipCode = zipCode; } public FullAddress( String countryId, String countryName, String provinceName, String cityId, String cityName, String streetAddress, String zipCode) { this.countryId = countryId; this.countryName = countryName; this.provinceName = provinceName; this.cityId = cityId; this.cityName = cityName; this.streetAddress = streetAddress; this.zipCode = zipCode; } public String getCountryId() { return countryId; } public void setCountryId(String countryId) { this.countryId = countryId; } public String getCountryName() { return countryName; } public void setCountryName(String countryName) { this.countryName = countryName; } public String getProvinceName() { return provinceName; } public void setProvinceName(String provinceName) { this.provinceName = provinceName; } public String getCityId() { return cityId; } public void setCityId(String cityId) { this.cityId = cityId; } public String getCityName() { return cityName; } public void setCityName(String cityName) { this.cityName = cityName; } public String getStreetAddress() { return streetAddress; } public void setStreetAddress(String streetAddress) { this.streetAddress = streetAddress; } public String getZipCode() { return zipCode; } public void setZipCode(String zipCode) { this.zipCode = zipCode; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((cityId == null) ? 0 : cityId.hashCode()); result = prime * result + ((cityName == null) ? 0 : cityName.hashCode()); result = prime * result + ((countryId == null) ? 0 : countryId.hashCode()); result = prime * result + ((countryName == null) ? 0 : countryName.hashCode()); result = prime * result + ((provinceName == null) ? 0 : provinceName.hashCode()); result = prime * result + ((streetAddress == null) ? 0 : streetAddress.hashCode()); result = prime * result + ((zipCode == null) ? 0 : zipCode.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FullAddress other = (FullAddress) obj; if (cityId == null) { if (other.cityId != null) return false; } else if (!cityId.equals(other.cityId)) return false; if (cityName == null) { if (other.cityName != null) return false; } else if (!cityName.equals(other.cityName)) return false; if (countryId == null) { if (other.countryId != null) return false; } else if (!countryId.equals(other.countryId)) return false; if (countryName == null) { if (other.countryName != null) return false; } else if (!countryName.equals(other.countryName)) return false; if (provinceName == null) { if (other.provinceName != null) return false; } else if (!provinceName.equals(other.provinceName)) return false; if (streetAddress == null) { if (other.streetAddress != null) return false; } else if (!streetAddress.equals(other.streetAddress)) return false; if (zipCode == null) { if (other.zipCode != null) return false; } else if (!zipCode.equals(other.zipCode)) return false; return true; } @Override public String toString() { StringBuilder sb = new StringBuilder(); if (countryName != null && countryName.length() > 0) { sb.append(countryName); } if (provinceName != null && provinceName.length() > 0) { sb.append(' '); sb.append(provinceName); } if (cityName != null && cityName.length() > 0) { sb.append(' '); sb.append(cityName); } if (streetAddress != null && streetAddress.length() > 0) { sb.append(' '); sb.append(streetAddress); } return sb.toString(); } }
FullAddress
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/JdkObsoleteTest.java
{ "start": 9190, "end": 9957 }
class ____ {", " void test(Lib lib) {", " when(lib.foos())", " .thenReturn(", " new Enumeration<Integer>() {", " public boolean hasMoreElements() {", " return false;", " }", " public Integer nextElement() {", " return null;", " }", " });", " }", "}") .doTest(); } @Test public void navigableSetRepro() { testHelper .addSourceLines( "Test.java", """ import java.util.NavigableSet; import java.util.Optional;
Test
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/InterfaceWithOnlyStaticsTest.java
{ "start": 2632, "end": 2999 }
interface ____ extends A { int foo = 42; static int bar() { return 1; } } """) .doTest(); } @Test public void negative_daggerModules() { testHelper .addSourceLines( "Module.java", """ package dagger; public @
Test
java
quarkusio__quarkus
independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/MyItem.java
{ "start": 114, "end": 230 }
class ____ { public String id = "foo"; public String getBar(int limit) { return "bar"; } }
MyItem
java
apache__maven
api/maven-api-core/src/main/java/org/apache/maven/api/services/ProjectBuilderRequest.java
{ "start": 1603, "end": 5364 }
interface ____ extends RepositoryAwareRequest { /** * Gets the path to the project to build. * This is typically the path to a pom.xml file or a directory containing a pom.xml file. * * @return an optional containing the path to the project, or empty if not specified */ @Nonnull Optional<Path> getPath(); /** * Gets the source of the project to build. * This is an alternative to specifying a path, allowing the project to be built from * a model source such as a string or input stream. * * @return an optional containing the source of the project, or empty if not specified */ @Nonnull Optional<Source> getSource(); /** * Determines whether a stub model should be allowed when the POM is missing or unreadable. * A stub model contains only minimal information derived from the project's coordinates. * * @return true if a stub model should be allowed, false otherwise */ boolean isAllowStubModel(); /** * Determines whether the project builder should recursively build parent/child projects. * When true, the builder will process parent POMs and child modules as needed. * * @return true if the build should be recursive, false otherwise */ boolean isRecursive(); /** * Determines whether plugins should be processed during project building. * When true, the builder will process plugin information which may include * resolving plugin dependencies and executing plugin goals that participate in project building. * * @return true if plugins should be processed, false otherwise */ boolean isProcessPlugins(); /** * Gets the list of remote repositories to use for resolving dependencies during project building. * These repositories will be used in addition to any repositories defined in the project itself. * * @return the list of remote repositories, or null if not specified */ @Nullable List<RemoteRepository> getRepositories(); /** * Creates a new ProjectBuilderRequest with the specified session and source. * * @param session the Maven session * @param source the source of the project to build * @return a new ProjectBuilderRequest * @throws NullPointerException if session or source is null */ @Nonnull static ProjectBuilderRequest build(@Nonnull Session session, @Nonnull Source source) { return builder() .session(requireNonNull(session, "session cannot be null")) .source(requireNonNull(source, "source cannot be null")) .build(); } /** * Creates a new ProjectBuilderRequest with the specified session and path. * * @param session the Maven session * @param path the path to the project to build * @return a new ProjectBuilderRequest * @throws NullPointerException if session or path is null */ @Nonnull static ProjectBuilderRequest build(@Nonnull Session session, @Nonnull Path path) { return builder() .session(requireNonNull(session, "session cannot be null")) .path(requireNonNull(path, "path cannot be null")) .build(); } /** * Creates a new builder for constructing a ProjectBuilderRequest. * * @return a new ProjectBuilderRequestBuilder */ @Nonnull static ProjectBuilderRequestBuilder builder() { return new ProjectBuilderRequestBuilder(); } /** * Builder for creating ProjectBuilderRequest instances. * This builder provides a fluent API for setting the various properties of a request. */ @NotThreadSafe
ProjectBuilderRequest
java
apache__rocketmq
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/namesrv/WipeWritePermOfBrokerResponseHeader.java
{ "start": 1060, "end": 1465 }
class ____ implements CommandCustomHeader { @CFNotNull private Integer wipeTopicCount; @Override public void checkFields() throws RemotingCommandException { } public Integer getWipeTopicCount() { return wipeTopicCount; } public void setWipeTopicCount(Integer wipeTopicCount) { this.wipeTopicCount = wipeTopicCount; } }
WipeWritePermOfBrokerResponseHeader
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestProxyCAManager.java
{ "start": 1676, "end": 4630 }
class ____ { @Test public void testBasics() throws Exception { ProxyCA proxyCA = spy(new ProxyCA()); RMContext rmContext = mock(RMContext.class); RMStateStore rmStateStore = mock(RMStateStore.class); when(rmContext.getStateStore()).thenReturn(rmStateStore); ProxyCAManager proxyCAManager = new ProxyCAManager(proxyCA, rmContext); proxyCAManager.init(new YarnConfiguration()); assertEquals(proxyCA, proxyCAManager.getProxyCA()); verify(rmContext, times(0)).getStateStore(); verify(rmStateStore, times(0)).storeProxyCACert(any(), any()); verify(proxyCA, times(0)).init(); assertNull(proxyCA.getCaCert()); assertNull(proxyCA.getCaKeyPair()); proxyCAManager.start(); verify(rmContext, times(1)).getStateStore(); verify(rmStateStore, times(1)).storeProxyCACert(proxyCA.getCaCert(), proxyCA.getCaKeyPair().getPrivate()); verify(proxyCA, times(1)).init(); assertNotNull(proxyCA.getCaCert()); assertNotNull(proxyCA.getCaKeyPair()); } @Test public void testRecover() throws Exception { ProxyCA proxyCA = spy(new ProxyCA()); RMContext rmContext = mock(RMContext.class); RMStateStore rmStateStore = mock(RMStateStore.class); when(rmContext.getStateStore()).thenReturn(rmStateStore); ProxyCAManager proxyCAManager = new ProxyCAManager(proxyCA, rmContext); proxyCAManager.init(new YarnConfiguration()); assertEquals(proxyCA, proxyCAManager.getProxyCA()); verify(rmContext, times(0)).getStateStore(); verify(rmStateStore, times(0)).storeProxyCACert(any(), any()); verify(proxyCA, times(0)).init(); assertNull(proxyCA.getCaCert()); assertNull(proxyCA.getCaKeyPair()); RMStateStore.RMState rmState = mock(RMStateStore.RMState.class); RMStateStore.ProxyCAState proxyCAState = mock(RMStateStore.ProxyCAState.class); // We need to use a real certificate + private key because of validation // so just grab them from another ProxyCA ProxyCA otherProxyCA = new ProxyCA(); otherProxyCA.init(); X509Certificate certificate = otherProxyCA.getCaCert(); when(proxyCAState.getCaCert()).thenReturn(certificate); PrivateKey privateKey = otherProxyCA.getCaKeyPair().getPrivate(); when(proxyCAState.getCaPrivateKey()).thenReturn(privateKey); when(rmState.getProxyCAState()).thenReturn(proxyCAState); proxyCAManager.recover(rmState); verify(proxyCA, times(1)).init(certificate, privateKey); assertEquals(certificate, proxyCA.getCaCert()); assertEquals(privateKey, proxyCA.getCaKeyPair().getPrivate()); proxyCAManager.start(); verify(rmContext, times(1)).getStateStore(); verify(rmStateStore, times(1)).storeProxyCACert(proxyCA.getCaCert(), proxyCA.getCaKeyPair().getPrivate()); verify(proxyCA, times(0)).init(); assertEquals(certificate, proxyCA.getCaCert()); assertEquals(privateKey, proxyCA.getCaKeyPair().getPrivate()); } }
TestProxyCAManager
java
google__dagger
javatests/dagger/internal/codegen/BindsMissingDelegateValidationTest.java
{ "start": 4345, "end": 5240 }
interface ____"); subject.hasErrorContaining( "@Binds Object test.C.TestModule.bindObject(test.C.NotBound)"); subject.hasErrorContaining("@Provides Object test.C.TestModule.provideObject()"); } catch (Error e) { errors.add(e); } com.google.common.truth.Truth.assertThat(errors.size()).isAtMost(1); }); } @Test public void bindsMissingDelegate_setBinding() { Source component = CompilerTests.javaSource( "test.C", "package test;", "", "import dagger.Binds;", "import dagger.Component;", "import dagger.Module;", "import dagger.multibindings.IntoSet;", "import java.util.Set;", "", "@Component(modules = C.TestModule.class)", "
C
java
google__guava
android/guava-tests/test/com/google/common/primitives/DoublesTest.java
{ "start": 1794, "end": 31596 }
class ____ extends TestCase { private static final double[] EMPTY = {}; private static final double[] ARRAY1 = {1.0}; private static final double[] ARRAY234 = {2.0, 3.0, 4.0}; private static final double LEAST = Double.NEGATIVE_INFINITY; private static final double GREATEST = Double.POSITIVE_INFINITY; private static final double[] NUMBERS = new double[] { LEAST, -Double.MAX_VALUE, -1.0, -0.5, -0.1, -0.0, 0.0, 0.1, 0.5, 1.0, Double.MAX_VALUE, GREATEST, Double.MIN_NORMAL, -Double.MIN_NORMAL, Double.MIN_VALUE, -Double.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Long.MIN_VALUE, Long.MAX_VALUE }; private static final double[] VALUES = Doubles.concat(NUMBERS, new double[] {NaN}); // We need to test that our method behaves like the JDK method. @SuppressWarnings("InlineMeInliner") public void testHashCode() { for (double value : VALUES) { assertThat(Doubles.hashCode(value)).isEqualTo(Double.hashCode(value)); } } @SuppressWarnings("InlineMeInliner") // We need to test our method. public void testIsFinite() { for (double value : NUMBERS) { assertThat(Doubles.isFinite(value)).isEqualTo(Double.isFinite(value)); } } // We need to test that our method behaves like the JDK method. @SuppressWarnings("InlineMeInliner") public void testCompare() { for (double x : VALUES) { for (double y : VALUES) { // note: spec requires only that the sign is the same assertWithMessage(x + ", " + y).that(Doubles.compare(x, y)).isEqualTo(Double.compare(x, y)); } } } public void testContains() { assertThat(Doubles.contains(EMPTY, 1.0)).isFalse(); assertThat(Doubles.contains(ARRAY1, 2.0)).isFalse(); assertThat(Doubles.contains(ARRAY234, 1.0)).isFalse(); assertThat(Doubles.contains(new double[] {-1.0}, -1.0)).isTrue(); assertThat(Doubles.contains(ARRAY234, 2.0)).isTrue(); assertThat(Doubles.contains(ARRAY234, 3.0)).isTrue(); assertThat(Doubles.contains(ARRAY234, 4.0)).isTrue(); for (double value : NUMBERS) { assertWithMessage("" + value) .that(Doubles.contains(new double[] {5.0, value}, value)) .isTrue(); } assertThat(Doubles.contains(new double[] {5.0, NaN}, NaN)).isFalse(); } public void testIndexOf() { assertThat(Doubles.indexOf(EMPTY, 1.0)).isEqualTo(-1); assertThat(Doubles.indexOf(ARRAY1, 2.0)).isEqualTo(-1); assertThat(Doubles.indexOf(ARRAY234, 1.0)).isEqualTo(-1); assertThat(Doubles.indexOf(new double[] {-1.0}, -1.0)).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, 2.0)).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, 3.0)).isEqualTo(1); assertThat(Doubles.indexOf(ARRAY234, 4.0)).isEqualTo(2); assertThat(Doubles.indexOf(new double[] {2.0, 3.0, 2.0, 3.0}, 3.0)).isEqualTo(1); for (double value : NUMBERS) { assertWithMessage("" + value) .that(Doubles.indexOf(new double[] {5.0, value}, value)) .isEqualTo(1); } assertThat(Doubles.indexOf(new double[] {5.0, NaN}, NaN)).isEqualTo(-1); } public void testIndexOf_arrayTarget() { assertThat(Doubles.indexOf(EMPTY, EMPTY)).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, EMPTY)).isEqualTo(0); assertThat(Doubles.indexOf(EMPTY, ARRAY234)).isEqualTo(-1); assertThat(Doubles.indexOf(ARRAY234, ARRAY1)).isEqualTo(-1); assertThat(Doubles.indexOf(ARRAY1, ARRAY234)).isEqualTo(-1); assertThat(Doubles.indexOf(ARRAY1, ARRAY1)).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, ARRAY234)).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, new double[] {2.0, 3.0})).isEqualTo(0); assertThat(Doubles.indexOf(ARRAY234, new double[] {3.0, 4.0})).isEqualTo(1); assertThat(Doubles.indexOf(ARRAY234, new double[] {3.0})).isEqualTo(1); assertThat(Doubles.indexOf(ARRAY234, new double[] {4.0})).isEqualTo(2); assertThat(Doubles.indexOf(new double[] {2.0, 3.0, 3.0, 3.0, 3.0}, new double[] {3.0})) .isEqualTo(1); assertThat( Doubles.indexOf( new double[] {2.0, 3.0, 2.0, 3.0, 4.0, 2.0, 3.0}, new double[] {2.0, 3.0, 4.0})) .isEqualTo(2); assertThat( Doubles.indexOf( new double[] {2.0, 2.0, 3.0, 4.0, 2.0, 3.0, 4.0}, new double[] {2.0, 3.0, 4.0})) .isEqualTo(1); assertThat(Doubles.indexOf(new double[] {4.0, 3.0, 2.0}, new double[] {2.0, 3.0, 4.0})) .isEqualTo(-1); for (double value : NUMBERS) { assertWithMessage("" + value) .that(Doubles.indexOf(new double[] {5.0, value, value, 5.0}, new double[] {value, value})) .isEqualTo(1); } assertThat(Doubles.indexOf(new double[] {5.0, NaN, NaN, 5.0}, new double[] {NaN, NaN})) .isEqualTo(-1); } public void testLastIndexOf() { assertThat(Doubles.lastIndexOf(EMPTY, 1.0)).isEqualTo(-1); assertThat(Doubles.lastIndexOf(ARRAY1, 2.0)).isEqualTo(-1); assertThat(Doubles.lastIndexOf(ARRAY234, 1.0)).isEqualTo(-1); assertThat(Doubles.lastIndexOf(new double[] {-1.0}, -1.0)).isEqualTo(0); assertThat(Doubles.lastIndexOf(ARRAY234, 2.0)).isEqualTo(0); assertThat(Doubles.lastIndexOf(ARRAY234, 3.0)).isEqualTo(1); assertThat(Doubles.lastIndexOf(ARRAY234, 4.0)).isEqualTo(2); assertThat(Doubles.lastIndexOf(new double[] {2.0, 3.0, 2.0, 3.0}, 3.0)).isEqualTo(3); for (double value : NUMBERS) { assertWithMessage("" + value) .that(Doubles.lastIndexOf(new double[] {value, 5.0}, value)) .isEqualTo(0); } assertThat(Doubles.lastIndexOf(new double[] {NaN, 5.0}, NaN)).isEqualTo(-1); } @GwtIncompatible public void testMax_noArgs() { assertThrows(IllegalArgumentException.class, () -> max()); } public void testMax() { assertThat(max(LEAST)).isEqualTo(LEAST); assertThat(max(GREATEST)).isEqualTo(GREATEST); assertThat(max(8.0, 6.0, 7.0, 5.0, 3.0, 0.0, 9.0)).isEqualTo(9.0); assertThat(max(-0.0, 0.0)).isEqualTo(0.0); assertThat(max(0.0, -0.0)).isEqualTo(0.0); assertThat(max(NUMBERS)).isEqualTo(GREATEST); assertThat(Double.isNaN(max(VALUES))).isTrue(); } @GwtIncompatible public void testMin_noArgs() { assertThrows(IllegalArgumentException.class, () -> min()); } public void testMin() { assertThat(min(LEAST)).isEqualTo(LEAST); assertThat(min(GREATEST)).isEqualTo(GREATEST); assertThat(min(8.0, 6.0, 7.0, 5.0, 3.0, 0.0, 9.0)).isEqualTo(0.0); assertThat(min(-0.0, 0.0)).isEqualTo(-0.0); assertThat(min(0.0, -0.0)).isEqualTo(-0.0); assertThat(min(NUMBERS)).isEqualTo(LEAST); assertThat(Double.isNaN(min(VALUES))).isTrue(); } public void testConstrainToRange() { assertThat(Doubles.constrainToRange(1.0, 0.0, 5.0)).isEqualTo(1.0); assertThat(Doubles.constrainToRange(1.0, 1.0, 5.0)).isEqualTo(1.0); assertThat(Doubles.constrainToRange(1.0, 3.0, 5.0)).isEqualTo(3.0); assertThat(Doubles.constrainToRange(0.0, -5.0, -1.0)).isEqualTo(-1.0); assertThat(Doubles.constrainToRange(5.0, 2.0, 2.0)).isEqualTo(2.0); assertThrows(IllegalArgumentException.class, () -> Doubles.constrainToRange(1.0, 3.0, 2.0)); } public void testConcat() { assertThat(Doubles.concat()).isEqualTo(EMPTY); assertThat(Doubles.concat(EMPTY)).isEqualTo(EMPTY); assertThat(Doubles.concat(EMPTY, EMPTY, EMPTY)).isEqualTo(EMPTY); assertThat(Doubles.concat(ARRAY1)).isEqualTo(ARRAY1); assertThat(Doubles.concat(ARRAY1)).isNotSameInstanceAs(ARRAY1); assertThat(Doubles.concat(EMPTY, ARRAY1, EMPTY)).isEqualTo(ARRAY1); assertThat(Doubles.concat(ARRAY1, ARRAY1, ARRAY1)).isEqualTo(new double[] {1.0, 1.0, 1.0}); assertThat(Doubles.concat(ARRAY1, ARRAY234)).isEqualTo(new double[] {1.0, 2.0, 3.0, 4.0}); } @GwtIncompatible // different overflow behavior; could probably be made to work by using ~~ public void testConcat_overflow_negative() { int dim1 = 1 << 16; int dim2 = 1 << 15; assertThat(dim1 * dim2).isLessThan(0); testConcatOverflow(dim1, dim2); } @GwtIncompatible // different overflow behavior; could probably be made to work by using ~~ public void testConcat_overflow_nonNegative() { int dim1 = 1 << 16; int dim2 = 1 << 16; assertThat(dim1 * dim2).isAtLeast(0); testConcatOverflow(dim1, dim2); } private static void testConcatOverflow(int arraysDim1, int arraysDim2) { assertThat((long) arraysDim1 * arraysDim2).isNotEqualTo((long) (arraysDim1 * arraysDim2)); double[][] arrays = new double[arraysDim1][]; // it's shared to avoid using too much memory in tests double[] sharedArray = new double[arraysDim2]; Arrays.fill(arrays, sharedArray); try { Doubles.concat(arrays); fail(); } catch (IllegalArgumentException expected) { } } public void testEnsureCapacity() { assertThat(Doubles.ensureCapacity(EMPTY, 0, 1)).isSameInstanceAs(EMPTY); assertThat(Doubles.ensureCapacity(ARRAY1, 0, 1)).isSameInstanceAs(ARRAY1); assertThat(Doubles.ensureCapacity(ARRAY1, 1, 1)).isSameInstanceAs(ARRAY1); assertThat(Arrays.equals(new double[] {1.0, 0.0, 0.0}, Doubles.ensureCapacity(ARRAY1, 2, 1))) .isTrue(); } public void testEnsureCapacity_fail() { assertThrows(IllegalArgumentException.class, () -> Doubles.ensureCapacity(ARRAY1, -1, 1)); assertThrows(IllegalArgumentException.class, () -> Doubles.ensureCapacity(ARRAY1, 1, -1)); } @GwtIncompatible // Double.toString returns different value in GWT. public void testJoin() { assertThat(Doubles.join(",", EMPTY)).isEmpty(); assertThat(Doubles.join(",", ARRAY1)).isEqualTo("1.0"); assertThat(Doubles.join(",", 1.0, 2.0)).isEqualTo("1.0,2.0"); assertThat(Doubles.join("", 1.0, 2.0, 3.0)).isEqualTo("1.02.03.0"); } public void testJoinNonTrivialDoubles() { assertThat(Doubles.join(",", EMPTY)).isEmpty(); assertThat(Doubles.join(",", 1.2)).isEqualTo("1.2"); assertThat(Doubles.join(",", 1.3, 2.4)).isEqualTo("1.3,2.4"); assertThat(Doubles.join("", 1.4, 2.5, 3.6)).isEqualTo("1.42.53.6"); } public void testLexicographicalComparator() { List<double[]> ordered = Arrays.asList( new double[] {}, new double[] {LEAST}, new double[] {LEAST, LEAST}, new double[] {LEAST, 1.0}, new double[] {1.0}, new double[] {1.0, LEAST}, new double[] {GREATEST, Double.MAX_VALUE}, new double[] {GREATEST, GREATEST}, new double[] {GREATEST, GREATEST, GREATEST}); Comparator<double[]> comparator = Doubles.lexicographicalComparator(); Helpers.testComparator(comparator, ordered); } public void testReverse() { testReverse(new double[] {}, new double[] {}); testReverse(new double[] {1}, new double[] {1}); testReverse(new double[] {1, 2}, new double[] {2, 1}); testReverse(new double[] {3, 1, 1}, new double[] {1, 1, 3}); testReverse(new double[] {-1, 1, -2, 2}, new double[] {2, -2, 1, -1}); } private static void testReverse(double[] input, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.reverse(input); assertThat(input).isEqualTo(expectedOutput); } private static void testReverse( double[] input, int fromIndex, int toIndex, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.reverse(input, fromIndex, toIndex); assertThat(input).isEqualTo(expectedOutput); } public void testReverseIndexed() { testReverse(new double[] {}, 0, 0, new double[] {}); testReverse(new double[] {1}, 0, 1, new double[] {1}); testReverse(new double[] {1, 2}, 0, 2, new double[] {2, 1}); testReverse(new double[] {3, 1, 1}, 0, 2, new double[] {1, 3, 1}); testReverse(new double[] {3, 1, 1}, 0, 1, new double[] {3, 1, 1}); testReverse(new double[] {-1, 1, -2, 2}, 1, 3, new double[] {-1, -2, 1, 2}); } private static void testRotate(double[] input, int distance, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.rotate(input, distance); assertThat(input).isEqualTo(expectedOutput); } private static void testRotate( double[] input, int distance, int fromIndex, int toIndex, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.rotate(input, distance, fromIndex, toIndex); assertThat(input).isEqualTo(expectedOutput); } public void testRotate() { testRotate(new double[] {}, -1, new double[] {}); testRotate(new double[] {}, 0, new double[] {}); testRotate(new double[] {}, 1, new double[] {}); testRotate(new double[] {1}, -2, new double[] {1}); testRotate(new double[] {1}, -1, new double[] {1}); testRotate(new double[] {1}, 0, new double[] {1}); testRotate(new double[] {1}, 1, new double[] {1}); testRotate(new double[] {1}, 2, new double[] {1}); testRotate(new double[] {1, 2}, -3, new double[] {2, 1}); testRotate(new double[] {1, 2}, -1, new double[] {2, 1}); testRotate(new double[] {1, 2}, -2, new double[] {1, 2}); testRotate(new double[] {1, 2}, 0, new double[] {1, 2}); testRotate(new double[] {1, 2}, 1, new double[] {2, 1}); testRotate(new double[] {1, 2}, 2, new double[] {1, 2}); testRotate(new double[] {1, 2}, 3, new double[] {2, 1}); testRotate(new double[] {1, 2, 3}, -5, new double[] {3, 1, 2}); testRotate(new double[] {1, 2, 3}, -4, new double[] {2, 3, 1}); testRotate(new double[] {1, 2, 3}, -3, new double[] {1, 2, 3}); testRotate(new double[] {1, 2, 3}, -2, new double[] {3, 1, 2}); testRotate(new double[] {1, 2, 3}, -1, new double[] {2, 3, 1}); testRotate(new double[] {1, 2, 3}, 0, new double[] {1, 2, 3}); testRotate(new double[] {1, 2, 3}, 1, new double[] {3, 1, 2}); testRotate(new double[] {1, 2, 3}, 2, new double[] {2, 3, 1}); testRotate(new double[] {1, 2, 3}, 3, new double[] {1, 2, 3}); testRotate(new double[] {1, 2, 3}, 4, new double[] {3, 1, 2}); testRotate(new double[] {1, 2, 3}, 5, new double[] {2, 3, 1}); testRotate(new double[] {1, 2, 3, 4}, -9, new double[] {2, 3, 4, 1}); testRotate(new double[] {1, 2, 3, 4}, -5, new double[] {2, 3, 4, 1}); testRotate(new double[] {1, 2, 3, 4}, -1, new double[] {2, 3, 4, 1}); testRotate(new double[] {1, 2, 3, 4}, 0, new double[] {1, 2, 3, 4}); testRotate(new double[] {1, 2, 3, 4}, 1, new double[] {4, 1, 2, 3}); testRotate(new double[] {1, 2, 3, 4}, 5, new double[] {4, 1, 2, 3}); testRotate(new double[] {1, 2, 3, 4}, 9, new double[] {4, 1, 2, 3}); testRotate(new double[] {1, 2, 3, 4, 5}, -6, new double[] {2, 3, 4, 5, 1}); testRotate(new double[] {1, 2, 3, 4, 5}, -4, new double[] {5, 1, 2, 3, 4}); testRotate(new double[] {1, 2, 3, 4, 5}, -3, new double[] {4, 5, 1, 2, 3}); testRotate(new double[] {1, 2, 3, 4, 5}, -1, new double[] {2, 3, 4, 5, 1}); testRotate(new double[] {1, 2, 3, 4, 5}, 0, new double[] {1, 2, 3, 4, 5}); testRotate(new double[] {1, 2, 3, 4, 5}, 1, new double[] {5, 1, 2, 3, 4}); testRotate(new double[] {1, 2, 3, 4, 5}, 3, new double[] {3, 4, 5, 1, 2}); testRotate(new double[] {1, 2, 3, 4, 5}, 4, new double[] {2, 3, 4, 5, 1}); testRotate(new double[] {1, 2, 3, 4, 5}, 6, new double[] {5, 1, 2, 3, 4}); } public void testRotateIndexed() { testRotate(new double[] {}, 0, 0, 0, new double[] {}); testRotate(new double[] {1}, 0, 0, 1, new double[] {1}); testRotate(new double[] {1}, 1, 0, 1, new double[] {1}); testRotate(new double[] {1}, 1, 1, 1, new double[] {1}); // Rotate the central 5 elements, leaving the ends as-is testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -6, 1, 6, new double[] {0, 2, 3, 4, 5, 1, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -1, 1, 6, new double[] {0, 2, 3, 4, 5, 1, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 0, 1, 6, new double[] {0, 1, 2, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 5, 1, 6, new double[] {0, 1, 2, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 14, 1, 6, new double[] {0, 2, 3, 4, 5, 1, 6}); // Rotate the first three elements testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -2, 0, 3, new double[] {2, 0, 1, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -1, 0, 3, new double[] {1, 2, 0, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 0, 0, 3, new double[] {0, 1, 2, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 1, 0, 3, new double[] {2, 0, 1, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 2, 0, 3, new double[] {1, 2, 0, 3, 4, 5, 6}); // Rotate the last four elements testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -6, 3, 7, new double[] {0, 1, 2, 5, 6, 3, 4}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -5, 3, 7, new double[] {0, 1, 2, 4, 5, 6, 3}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -4, 3, 7, new double[] {0, 1, 2, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -3, 3, 7, new double[] {0, 1, 2, 6, 3, 4, 5}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -2, 3, 7, new double[] {0, 1, 2, 5, 6, 3, 4}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, -1, 3, 7, new double[] {0, 1, 2, 4, 5, 6, 3}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 0, 3, 7, new double[] {0, 1, 2, 3, 4, 5, 6}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 1, 3, 7, new double[] {0, 1, 2, 6, 3, 4, 5}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 2, 3, 7, new double[] {0, 1, 2, 5, 6, 3, 4}); testRotate(new double[] {0, 1, 2, 3, 4, 5, 6}, 3, 3, 7, new double[] {0, 1, 2, 4, 5, 6, 3}); } public void testSortDescending() { testSortDescending(new double[] {}, new double[] {}); testSortDescending(new double[] {1}, new double[] {1}); testSortDescending(new double[] {1, 2}, new double[] {2, 1}); testSortDescending(new double[] {1, 3, 1}, new double[] {3, 1, 1}); testSortDescending(new double[] {-1, 1, -2, 2}, new double[] {2, 1, -1, -2}); testSortDescending( new double[] {-1, 1, Double.NaN, -2, -0.0, 0, 2}, new double[] {Double.NaN, 2, 1, 0, -0.0, -1, -2}); } private static void testSortDescending(double[] input, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.sortDescending(input); for (int i = 0; i < input.length; i++) { assertThat(input[i]).isEqualTo(expectedOutput[i]); } } private static void testSortDescending( double[] input, int fromIndex, int toIndex, double[] expectedOutput) { input = Arrays.copyOf(input, input.length); Doubles.sortDescending(input, fromIndex, toIndex); for (int i = 0; i < input.length; i++) { assertThat(input[i]).isEqualTo(expectedOutput[i]); } } public void testSortDescendingIndexed() { testSortDescending(new double[] {}, 0, 0, new double[] {}); testSortDescending(new double[] {1}, 0, 1, new double[] {1}); testSortDescending(new double[] {1, 2}, 0, 2, new double[] {2, 1}); testSortDescending(new double[] {1, 3, 1}, 0, 2, new double[] {3, 1, 1}); testSortDescending(new double[] {1, 3, 1}, 0, 1, new double[] {1, 3, 1}); testSortDescending(new double[] {-1, -2, 1, 2}, 1, 3, new double[] {-1, 1, -2, 2}); testSortDescending( new double[] {-1, 1, Double.NaN, -2, 2}, 1, 4, new double[] {-1, Double.NaN, 1, -2, 2}); } @J2ktIncompatible @GwtIncompatible // SerializableTester public void testLexicographicalComparatorSerializable() { Comparator<double[]> comparator = Doubles.lexicographicalComparator(); assertThat(SerializableTester.reserialize(comparator)).isSameInstanceAs(comparator); } @J2ktIncompatible @GwtIncompatible // SerializableTester public void testStringConverterSerialization() { SerializableTester.reserializeAndAssert(Doubles.stringConverter()); } public void testToArray() { // need explicit type parameter to avoid javac warning!? List<Double> none = Arrays.<Double>asList(); assertThat(Doubles.toArray(none)).isEqualTo(EMPTY); List<Double> one = Arrays.asList(1.0); assertThat(Doubles.toArray(one)).isEqualTo(ARRAY1); double[] array = {0.0, 1.0, Math.PI}; List<Double> three = Arrays.asList(0.0, 1.0, Math.PI); assertThat(Doubles.toArray(three)).isEqualTo(array); assertThat(Doubles.toArray(Doubles.asList(array))).isEqualTo(array); } public void testToArray_threadSafe() { for (int delta : new int[] {+1, 0, -1}) { for (int i = 0; i < VALUES.length; i++) { List<Double> list = Doubles.asList(VALUES).subList(0, i); Collection<Double> misleadingSize = Helpers.misleadingSizeCollection(delta); misleadingSize.addAll(list); double[] arr = Doubles.toArray(misleadingSize); assertThat(arr.length).isEqualTo(i); for (int j = 0; j < i; j++) { assertThat(arr[j]).isEqualTo(VALUES[j]); } } } } public void testToArray_withNull() { List<@Nullable Double> list = Arrays.asList(0.0, 1.0, null); assertThrows(NullPointerException.class, () -> Doubles.toArray(list)); } public void testToArray_withConversion() { double[] array = {0.0, 1.0, 2.0}; List<Byte> bytes = Arrays.asList((byte) 0, (byte) 1, (byte) 2); List<Short> shorts = Arrays.asList((short) 0, (short) 1, (short) 2); List<Integer> ints = Arrays.asList(0, 1, 2); List<Float> floats = Arrays.asList(0.0f, 1.0f, 2.0f); List<Long> longs = Arrays.asList(0L, 1L, 2L); List<Double> doubles = Arrays.asList(0.0, 1.0, 2.0); assertThat(Doubles.toArray(bytes)).isEqualTo(array); assertThat(Doubles.toArray(shorts)).isEqualTo(array); assertThat(Doubles.toArray(ints)).isEqualTo(array); assertThat(Doubles.toArray(floats)).isEqualTo(array); assertThat(Doubles.toArray(longs)).isEqualTo(array); assertThat(Doubles.toArray(doubles)).isEqualTo(array); } @J2ktIncompatible // b/239034072: Kotlin varargs copy parameter arrays. public void testAsList_isAView() { double[] array = {0.0, 1.0}; List<Double> list = Doubles.asList(array); list.set(0, 2.0); assertThat(array).isEqualTo(new double[] {2.0, 1.0}); array[1] = 3.0; assertThat(list).containsExactly(2.0, 3.0).inOrder(); } public void testAsList_toArray_roundTrip() { double[] array = {0.0, 1.0, 2.0}; List<Double> list = Doubles.asList(array); double[] newArray = Doubles.toArray(list); // Make sure it returned a copy list.set(0, 4.0); assertThat(newArray).isEqualTo(new double[] {0.0, 1.0, 2.0}); newArray[1] = 5.0; assertThat((double) list.get(1)).isEqualTo(1.0); } // This test stems from a real bug found by andrewk public void testAsList_subList_toArray_roundTrip() { double[] array = {0.0, 1.0, 2.0, 3.0}; List<Double> list = Doubles.asList(array); assertThat(Doubles.toArray(list.subList(1, 3))).isEqualTo(new double[] {1.0, 2.0}); assertThat(Doubles.toArray(list.subList(2, 2))).isEmpty(); } // `primitives` can't depend on `collect`, so this is what the prod code has to return. @SuppressWarnings("EmptyList") public void testAsListEmpty() { assertThat(Doubles.asList(EMPTY)).isSameInstanceAs(Collections.emptyList()); } /** * A reference implementation for {@code tryParse} that just catches the exception from {@link * Double#valueOf}. */ private static @Nullable Double referenceTryParse(String input) { if (input.trim().length() < input.length()) { return null; } try { return Double.valueOf(input); } catch (NumberFormatException e) { return null; } } @GwtIncompatible // Doubles.tryParse private static void checkTryParse(String input) { Double expected = referenceTryParse(input); assertThat(Doubles.tryParse(input)).isEqualTo(expected); if (expected != null && !Doubles.FLOATING_POINT_PATTERN.matcher(input).matches()) { // TODO(cpovirk): Use SourceCodeEscapers if it is added to Guava. StringBuilder escapedInput = new StringBuilder(); for (char c : input.toCharArray()) { if (c >= 0x20 && c <= 0x7E) { escapedInput.append(c); } else { escapedInput.append(String.format("\\u%04x", (int) c)); } } fail("FLOATING_POINT_PATTERN should have matched valid input <" + escapedInput + ">"); } } @GwtIncompatible // Doubles.tryParse private static void checkTryParse(double expected, String input) { assertThat(Doubles.tryParse(input)).isEqualTo(Double.valueOf(expected)); assertThat(input) .matches( Pattern.compile( Doubles.FLOATING_POINT_PATTERN.pattern(), Doubles.FLOATING_POINT_PATTERN.flags())); } @GwtIncompatible // Doubles.tryParse public void testTryParseHex() { for (String signChar : ImmutableList.of("", "+", "-")) { for (String hexPrefix : ImmutableList.of("0x", "0X")) { for (String iPart : ImmutableList.of("", "0", "1", "F", "f", "c4", "CE")) { for (String fPart : ImmutableList.of("", ".", ".F", ".52", ".a")) { for (String expMarker : ImmutableList.of("p", "P")) { for (String exponent : ImmutableList.of("0", "-5", "+20", "52")) { for (String typePart : ImmutableList.of("", "D", "F", "d", "f")) { checkTryParse( signChar + hexPrefix + iPart + fPart + expMarker + exponent + typePart); } } } } } } } } @AndroidIncompatible // slow @GwtIncompatible // Doubles.tryParse public void testTryParseAllCodePoints() { // Exercise non-ASCII digit test cases and the like. char[] tmp = new char[2]; for (int i = Character.MIN_CODE_POINT; i < Character.MAX_CODE_POINT; i++) { Character.toChars(i, tmp, 0); checkTryParse(String.copyValueOf(tmp, 0, Character.charCount(i))); } } @GwtIncompatible // Doubles.tryParse public void testTryParseOfToStringIsOriginal() { for (double d : NUMBERS) { checkTryParse(d, Double.toString(d)); } } @J2ktIncompatible // hexadecimal doubles @GwtIncompatible // Doubles.tryParse public void testTryParseOfToHexStringIsOriginal() { for (double d : NUMBERS) { checkTryParse(d, Double.toHexString(d)); } } @GwtIncompatible // Doubles.tryParse public void testTryParseNaN() { checkTryParse("NaN"); checkTryParse("+NaN"); checkTryParse("-NaN"); } @GwtIncompatible // Doubles.tryParse public void testTryParseInfinity() { checkTryParse(Double.POSITIVE_INFINITY, "Infinity"); checkTryParse(Double.POSITIVE_INFINITY, "+Infinity"); checkTryParse(Double.NEGATIVE_INFINITY, "-Infinity"); } private static final String[] BAD_TRY_PARSE_INPUTS = { "", "+-", "+-0", " 5", "32 ", " 55 ", "infinity", "POSITIVE_INFINITY", "0x9A", "0x9A.bE-5", ".", ".e5", "NaNd", "InfinityF" }; @GwtIncompatible // Doubles.tryParse public void testTryParseFailures() { for (String badInput : BAD_TRY_PARSE_INPUTS) { assertThat(badInput) .doesNotMatch( Pattern.compile( Doubles.FLOATING_POINT_PATTERN.pattern(), Doubles.FLOATING_POINT_PATTERN.flags())); assertThat(Doubles.tryParse(badInput)).isEqualTo(referenceTryParse(badInput)); assertThat(Doubles.tryParse(badInput)).isNull(); } } @J2ktIncompatible @GwtIncompatible // NullPointerTester public void testNulls() { new NullPointerTester().testAllPublicStaticMethods(Doubles.class); } public void testStringConverter_convert() { Converter<String, Double> converter = Doubles.stringConverter(); assertThat(converter.convert("1.0")).isEqualTo(1.0); assertThat(converter.convert("0.0")).isEqualTo(0.0); assertThat(converter.convert("-1.0")).isEqualTo(-1.0); assertThat(converter.convert("1")).isEqualTo(1.0); assertThat(converter.convert("0")).isEqualTo(0.0); assertThat(converter.convert("-1")).isEqualTo(-1.0); assertThat(converter.convert("1e6")).isEqualTo(1e6); assertThat(converter.convert("1e-6")).isEqualTo(1e-6); } public void testStringConverter_convertError() { assertThrows( NumberFormatException.class, () -> Doubles.stringConverter().convert("notanumber")); } public void testStringConverter_nullConversions() { assertThat(Doubles.stringConverter().convert(null)).isNull(); assertThat(Doubles.stringConverter().reverse().convert(null)).isNull(); } @GwtIncompatible // Double.toString returns different value in GWT. public void testStringConverter_reverse() { Converter<String, Double> converter = Doubles.stringConverter(); assertThat(converter.reverse().convert(1.0)).isEqualTo("1.0"); assertThat(converter.reverse().convert(0.0)).isEqualTo("0.0"); assertThat(converter.reverse().convert(-1.0)).isEqualTo("-1.0"); assertThat(converter.reverse().convert(1e6)).isEqualTo("1000000.0"); assertThat(converter.reverse().convert(1e-6)).isEqualTo("1.0E-6"); } @J2ktIncompatible @GwtIncompatible // NullPointerTester public void testStringConverter_nullPointerTester() throws Exception { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicInstanceMethods(Doubles.stringConverter()); } @GwtIncompatible public void testTryParse_withNullNoGwt() { assertThat(Doubles.tryParse("null")).isNull(); assertThrows(NullPointerException.class, () -> Doubles.tryParse(null)); } }
DoublesTest
java
quarkusio__quarkus
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onerror/ErroneousServerEndpoint_OnOpen.java
{ "start": 288, "end": 539 }
class ____ { @OnOpen public Uni<Void> onOpen() { return Uni.createFrom().failure(new IllegalStateException("Expected failure")); } @OnTextMessage public void onMessage(String message) { } }
ErroneousServerEndpoint_OnOpen
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
{ "start": 19061, "end": 36274 }
class ____ extends SubjectInheritingThread { MonitoringThread() { super("Container Monitor"); } @Override public void work() { while (!stopped && !Thread.currentThread().isInterrupted()) { long start = Time.monotonicNow(); // Print the processTrees for debugging. if (LOG.isDebugEnabled()) { StringBuilder tmp = new StringBuilder("[ "); for (ProcessTreeInfo p : trackingContainers.values()) { tmp.append(p.getPID()); tmp.append(" "); } tmp.append("]"); LOG.debug("Current ProcessTree list : {}", tmp); } // Temporary structure to calculate the total resource utilization of // the containers ResourceUtilization trackedContainersUtilization = ResourceUtilization.newInstance(0, 0, 0.0f); // Now do the monitoring for the trackingContainers // Check memory usage and kill any overflowing containers long vmemUsageByAllContainers = 0; long pmemByAllContainers = 0; long cpuUsagePercentPerCoreByAllContainers = 0; for (Entry<ContainerId, ProcessTreeInfo> entry : trackingContainers .entrySet()) { ContainerId containerId = entry.getKey(); ProcessTreeInfo ptInfo = entry.getValue(); try { // Initialize uninitialized process trees initializeProcessTrees(entry); String pId = ptInfo.getPID(); if (pId == null || !isResourceCalculatorAvailable()) { continue; // processTree cannot be tracked } LOG.debug( "Constructing ProcessTree for : PID = {} ContainerId = {}", pId, containerId); ResourceCalculatorProcessTree pTree = ptInfo.getProcessTree(); pTree.updateProcessTree(); // update process-tree long currentVmemUsage = pTree.getVirtualMemorySize(); long currentPmemUsage = pTree.getRssMemorySize(); if (currentVmemUsage < 0 || currentPmemUsage < 0) { // YARN-6862/YARN-5021 If the container just exited or for // another reason the physical/virtual memory is UNAVAILABLE (-1) // the values shouldn't be aggregated. LOG.info("Skipping monitoring container {} because " + "memory usage is not available.", containerId); continue; } // if machine has 6 cores and 3 are used, // cpuUsagePercentPerCore should be 300% float cpuUsagePercentPerCore = pTree.getCpuUsagePercent(); if (cpuUsagePercentPerCore < 0) { // CPU usage is not available likely because the container just // started. Let us skip this turn and consider this container // in the next iteration. LOG.info("Skipping monitoring container {} since " + "CPU usage is not yet available.", containerId); continue; } recordUsage(containerId, pId, pTree, ptInfo, currentVmemUsage, currentPmemUsage, trackedContainersUtilization); checkLimit(containerId, pId, pTree, ptInfo, currentVmemUsage, currentPmemUsage); // Accounting the total memory in usage for all containers vmemUsageByAllContainers += currentVmemUsage; pmemByAllContainers += currentPmemUsage; // Accounting the total cpu usage for all containers cpuUsagePercentPerCoreByAllContainers += cpuUsagePercentPerCore; reportResourceUsage(containerId, currentPmemUsage, cpuUsagePercentPerCore); } catch (Exception e) { // Log the exception and proceed to the next container. LOG.warn("Uncaught exception in ContainersMonitorImpl " + "while monitoring resource of {}", containerId, e); } } LOG.debug("Total Resource Usage stats in NM by all containers : " + "Virtual Memory= {}, Physical Memory= {}, " + "Total CPU usage(% per core)= {}", vmemUsageByAllContainers, pmemByAllContainers, cpuUsagePercentPerCoreByAllContainers); // Save the aggregated utilization of the containers setContainersUtilization(trackedContainersUtilization); long duration = Time.monotonicNow() - start; LOG.debug("Finished monitoring container cost {} ms", duration); // Publish the container utilization metrics to node manager // metrics system. NodeManagerMetrics nmMetrics = context.getNodeManagerMetrics(); if (nmMetrics != null) { nmMetrics.setContainerUsedMemGB( trackedContainersUtilization.getPhysicalMemory()); nmMetrics.setContainerUsedVMemGB( trackedContainersUtilization.getVirtualMemory()); nmMetrics.setContainerCpuUtilization( trackedContainersUtilization.getCPU()); nmMetrics.addContainerMonitorCostTime(duration); } try { Thread.sleep(monitoringInterval); } catch (InterruptedException e) { LOG.warn("{} is interrupted. Exiting.", ContainersMonitorImpl.class.getName()); break; } } } /** * Initialize any uninitialized processTrees. * @param entry process tree entry to fill in */ private void initializeProcessTrees( Entry<ContainerId, ProcessTreeInfo> entry) throws ContainerExecutionException { ContainerId containerId = entry.getKey(); ProcessTreeInfo ptInfo = entry.getValue(); String pId = ptInfo.getPID(); // Initialize any uninitialized processTrees if (pId == null) { // get pid from ContainerId pId = containerExecutor.getProcessId(ptInfo.getContainerId()); if (pId != null) { // pId will be null, either if the container is not spawned yet // or if the container's pid is removed from ContainerExecutor LOG.debug("Tracking ProcessTree {} for the first time", pId); ResourceCalculatorProcessTree pt = getResourceCalculatorProcessTree(pId); ptInfo.setPid(pId); ptInfo.setProcessTree(pt); if (containerMetricsEnabled) { ContainerMetrics usageMetrics = ContainerMetrics .forContainer(containerId, containerMetricsPeriodMs, containerMetricsUnregisterDelayMs); usageMetrics.recordProcessId(pId); } Container container = context.getContainers().get(containerId); if (container != null) { String[] ipAndHost = containerExecutor.getIpAndHost(container); if ((ipAndHost != null) && (ipAndHost[0] != null) && (ipAndHost[1] != null)) { container.setIpAndHost(ipAndHost); LOG.info("{}'s ip = {}, and hostname = {}", containerId, ipAndHost[0], ipAndHost[1]); } else { LOG.info("Can not get both ip and hostname: {}", Arrays.toString(ipAndHost)); } String exposedPorts = containerExecutor.getExposedPorts(container); container.setExposedPorts(exposedPorts); } else { LOG.info("{} is missing. Not setting ip and hostname", containerId); } } } // End of initializing any uninitialized processTrees } /** * Record usage metrics. * @param containerId container id * @param pId process id * @param pTree valid process tree entry with CPU measurement * @param ptInfo process tree info with limit information * @param currentVmemUsage virtual memory measurement * @param currentPmemUsage physical memory measurement * @param trackedContainersUtilization utilization tracker to update */ private void recordUsage(ContainerId containerId, String pId, ResourceCalculatorProcessTree pTree, ProcessTreeInfo ptInfo, long currentVmemUsage, long currentPmemUsage, ResourceUtilization trackedContainersUtilization) { // if machine has 6 cores and 3 are used, // cpuUsagePercentPerCore should be 300% and // cpuUsageTotalCoresPercentage should be 50% float cpuUsagePercentPerCore = pTree.getCpuUsagePercent(); float cpuUsageTotalCoresPercentage = cpuUsagePercentPerCore / resourceCalculatorPlugin.getNumProcessors(); // Multiply by 1000 to avoid losing data when converting to int int milliVcoresUsed = (int) (cpuUsageTotalCoresPercentage * 1000 * maxVCoresAllottedForContainers /nodeCpuPercentageForYARN); long vmemLimit = ptInfo.getVmemLimit(); long pmemLimit = ptInfo.getPmemLimit(); if (AUDITLOG.isDebugEnabled()) { int vcoreLimit = ptInfo.getCpuVcores(); long cumulativeCpuTime = pTree.getCumulativeCpuTime(); AUDITLOG.debug( "Resource usage of ProcessTree {} for container-id {}:" + " {} %CPU: {} %CPU-cores: {}" + " vCores-used: {} of {} Cumulative-CPU-ms: {}", pId, containerId, formatUsageString( currentVmemUsage, vmemLimit, currentPmemUsage, pmemLimit), cpuUsagePercentPerCore, cpuUsageTotalCoresPercentage, milliVcoresUsed / 1000, vcoreLimit, cumulativeCpuTime); } // Add resource utilization for this container trackedContainersUtilization.addTo( (int) (currentPmemUsage >> 20), (int) (currentVmemUsage >> 20), milliVcoresUsed / 1000.0f); // Add usage to container metrics if (containerMetricsEnabled) { ContainerMetrics.forContainer( containerId, containerMetricsPeriodMs, containerMetricsUnregisterDelayMs).recordMemoryUsage( (int) (currentPmemUsage >> 20)); ContainerMetrics.forContainer( containerId, containerMetricsPeriodMs, containerMetricsUnregisterDelayMs).recordCpuUsage((int) cpuUsagePercentPerCore, milliVcoresUsed); } } /** * Check resource limits and take actions if the limit is exceeded. * @param containerId container id * @param pId process id * @param pTree valid process tree entry with CPU measurement * @param ptInfo process tree info with limit information * @param currentVmemUsage virtual memory measurement * @param currentPmemUsage physical memory measurement */ @SuppressWarnings("unchecked") private void checkLimit(ContainerId containerId, String pId, ResourceCalculatorProcessTree pTree, ProcessTreeInfo ptInfo, long currentVmemUsage, long currentPmemUsage) { if (strictMemoryEnforcement && !elasticMemoryEnforcement) { // When cgroup-based strict memory enforcement is used alone without // elastic memory control, the oom-kill would take care of it. // However, when elastic memory control is also enabled, the oom killer // would be disabled at the root yarn container cgroup level (all child // cgroups would inherit that setting). Hence, we fall back to the // polling-based mechanism. return; } boolean isMemoryOverLimit = false; String msg = ""; int containerExitStatus = ContainerExitStatus.INVALID; long vmemLimit = ptInfo.getVmemLimit(); long pmemLimit = ptInfo.getPmemLimit(); // as processes begin with an age 1, we want to see if there // are processes more than 1 iteration old. long curMemUsageOfAgedProcesses = pTree.getVirtualMemorySize(1); long curRssMemUsageOfAgedProcesses = pTree.getRssMemorySize(1); if (isVmemCheckEnabled() && isProcessTreeOverLimit(containerId.toString(), currentVmemUsage, curMemUsageOfAgedProcesses, vmemLimit)) { // The current usage (age=0) is always higher than the aged usage. We // do not show the aged size in the message, base the delta on the // current usage long delta = currentVmemUsage - vmemLimit; // Container (the root process) is still alive and overflowing // memory. // Dump the process-tree and then clean it up. msg = formatErrorMessage("virtual", formatUsageString(currentVmemUsage, vmemLimit, currentPmemUsage, pmemLimit), pId, containerId, pTree, delta); isMemoryOverLimit = true; containerExitStatus = ContainerExitStatus.KILLED_EXCEEDED_VMEM; } else if (isPmemCheckEnabled() && isProcessTreeOverLimit(containerId.toString(), currentPmemUsage, curRssMemUsageOfAgedProcesses, pmemLimit)) { // The current usage (age=0) is always higher than the aged usage. We // do not show the aged size in the message, base the delta on the // current usage long delta = currentPmemUsage - pmemLimit; // Container (the root process) is still alive and overflowing // memory. // Dump the process-tree and then clean it up. msg = formatErrorMessage("physical", formatUsageString(currentVmemUsage, vmemLimit, currentPmemUsage, pmemLimit), pId, containerId, pTree, delta); isMemoryOverLimit = true; containerExitStatus = ContainerExitStatus.KILLED_EXCEEDED_PMEM; } if (isMemoryOverLimit && trackingContainers.remove(containerId) != null) { // Virtual or physical memory over limit. Fail the container and // remove // the corresponding process tree LOG.warn(msg); // warn if not a leader if (!pTree.checkPidPgrpidForMatch()) { LOG.error("Killed container process with PID {} " + "but it is not a process group leader.", pId); } // kill the container eventDispatcher.getEventHandler().handle( new ContainerKillEvent(containerId, containerExitStatus, msg)); LOG.info("Removed ProcessTree with root {}", pId); } } /** * Report usage metrics to the timeline service. * @param containerId container id * @param currentPmemUsage physical memory measurement * @param cpuUsagePercentPerCore CPU usage */ private void reportResourceUsage(ContainerId containerId, long currentPmemUsage, float cpuUsagePercentPerCore) { ContainerImpl container = (ContainerImpl) context.getContainers().get(containerId); if (container != null) { NMTimelinePublisher nmMetricsPublisher = container.getNMTimelinePublisher(); if (nmMetricsPublisher != null) { nmMetricsPublisher.reportContainerResourceUsage(container, currentPmemUsage, cpuUsagePercentPerCore); } } else { LOG.info("{} does not exist to report", containerId); } } /** * Format string when memory limit has been exceeded. * @param memTypeExceeded type of memory * @param usageString general memory usage information string * @param pId process id * @param containerId container id * @param pTree process tree to dump full resource utilization graph * @return formatted resource usage information */ private String formatErrorMessage(String memTypeExceeded, String usageString, String pId, ContainerId containerId, ResourceCalculatorProcessTree pTree, long delta) { return String.format("Container [pid=%s,containerID=%s] is " + "running %dB beyond the '%S' memory limit. ", pId, containerId, delta, memTypeExceeded) + "Current usage: " + usageString + ". Killing container.\n" + "Dump of the process-tree for " + containerId + " :\n" + pTree.getProcessTreeDump(); } /** * Format memory usage string for reporting. * @param currentVmemUsage virtual memory usage * @param vmemLimit virtual memory limit * @param currentPmemUsage physical memory usage * @param pmemLimit physical memory limit * @return formatted memory information */ private String formatUsageString(long currentVmemUsage, long vmemLimit, long currentPmemUsage, long pmemLimit) { return String.format("%sB of %sB physical memory used; " + "%sB of %sB virtual memory used", TraditionalBinaryPrefix.long2String(currentPmemUsage, "", 1), TraditionalBinaryPrefix.long2String(pmemLimit, "", 1), TraditionalBinaryPrefix.long2String(currentVmemUsage, "", 1), TraditionalBinaryPrefix.long2String(vmemLimit, "", 1)); } } private
MonitoringThread
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/json/jackson/BufferDeserializer.java
{ "start": 898, "end": 1345 }
class ____ extends JsonDeserializer<Buffer> { @Override public Buffer deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JsonProcessingException { String text = p.getText(); try { return Buffer.buffer(BASE64_DECODER.decode(text)); } catch (IllegalArgumentException e) { throw new InvalidFormatException(p, "Expected a base64 encoded byte array", text, Instant.class); } } }
BufferDeserializer
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/annotation/JsonSerialize.java
{ "start": 1284, "end": 1461 }
class ____ use for * serializing associated value. Depending on what is annotated, * value is either an instance of annotated class (used globally * anywhere where
to
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java
{ "start": 558, "end": 943 }
class ____ extends MasterNodeReadRequest<GetTrialStatusRequest> { public GetTrialStatusRequest(TimeValue masterNodeTimeout) { super(masterNodeTimeout); } public GetTrialStatusRequest(StreamInput in) throws IOException { super(in); } @Override public ActionRequestValidationException validate() { return null; } }
GetTrialStatusRequest
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/aggregation/SumFunction.java
{ "start": 3284, "end": 3531 }
class ____ extends SumFunction { private static final long serialVersionUID = 1L; @Override public Object add(Object value1, Object value2) { return (byte) ((Byte) value1 + (Byte) value2); } } }
ByteSum
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/SingleTableInheritanceEagerAssociationTest.java
{ "start": 3347, "end": 3697 }
class ____ { @Id private String addressId; private int version; protected Address() { } public Address(String addressId) { this.addressId = addressId; } public String getId() { return this.addressId; } public abstract User getUser(); } @Entity(name = "AddressA") @DiscriminatorValue("ADDRESS_A") public static
Address
java
google__guice
core/src/com/google/inject/internal/util/LineNumbers.java
{ "start": 6668, "end": 7555 }
class ____ extends MethodVisitor { LineNumberMethodVisitor() { super(ASM_API_LEVEL); } @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return new LineNumberAnnotationVisitor(); } @Override public AnnotationVisitor visitAnnotationDefault() { return new LineNumberAnnotationVisitor(); } @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { if (opcode == Opcodes.PUTFIELD && LineNumberReader.this.name.equals(owner) && !lines.containsKey(name) && line != -1) { lines.put(name, line); } } @Override public void visitLineNumber(int line, Label start) { LineNumberReader.this.visitLineNumber(line, start); } }
LineNumberMethodVisitor
java
alibaba__nacos
naming/src/test/java/com/alibaba/nacos/naming/healthcheck/v2/processor/HttpHealthCheckProcessorTest.java
{ "start": 2191, "end": 9945 }
class ____ { @Mock private HealthCheckCommonV2 healthCheckCommon; @Mock private SwitchDomain switchDomain; @Mock private HealthCheckTaskV2 healthCheckTaskV2; @Mock private Service service; @Mock private ClusterMetadata clusterMetadata; @Mock private IpPortBasedClient ipPortBasedClient; @Mock private HealthCheckInstancePublishInfo healthCheckInstancePublishInfo; @Mock private RestResult restResult; @Mock private ConnectException connectException; private HttpHealthCheckProcessor httpHealthCheckProcessor; @BeforeEach void setUp() { EnvUtil.setEnvironment(new MockEnvironment()); when(switchDomain.getHttpHealthParams()).thenReturn(new SwitchDomain.HttpHealthParams()); when(healthCheckTaskV2.getClient()).thenReturn(ipPortBasedClient); when(ipPortBasedClient.getInstancePublishInfo(service)).thenReturn(healthCheckInstancePublishInfo); httpHealthCheckProcessor = new HttpHealthCheckProcessor(healthCheckCommon, switchDomain); } @Test void testProcess() { httpHealthCheckProcessor.process(healthCheckTaskV2, service, clusterMetadata); verify(healthCheckTaskV2).getClient(); verify(healthCheckInstancePublishInfo).tryStartCheck(); } @Test void testGetType() { assertEquals(httpHealthCheckProcessor.getType(), HealthCheckType.HTTP.name()); } @Test void testConstructor() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { Class<HttpHealthCheckProcessor> healthCheckProcessorClass = HttpHealthCheckProcessor.class; Class<?>[] classes = healthCheckProcessorClass.getDeclaredClasses(); Class<?> aClass = Arrays.stream(classes).findFirst().get(); Constructor<?> constructor = aClass.getConstructor(HttpHealthCheckProcessor.class, HealthCheckInstancePublishInfo.class, HealthCheckTaskV2.class, Service.class); Object objects = constructor.newInstance(httpHealthCheckProcessor, healthCheckInstancePublishInfo, healthCheckTaskV2, service); assertNotNull(objects); } @Test void testOnReceiveWithOK() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, InterruptedException { Class<HttpHealthCheckProcessor> healthCheckProcessorClass = HttpHealthCheckProcessor.class; Class<?>[] classes = healthCheckProcessorClass.getDeclaredClasses(); Class<?> aClass = Arrays.stream(classes).findFirst().get(); Constructor<?> constructor = aClass.getConstructor(HttpHealthCheckProcessor.class, HealthCheckInstancePublishInfo.class, HealthCheckTaskV2.class, Service.class); Object objects = constructor.newInstance(httpHealthCheckProcessor, healthCheckInstancePublishInfo, healthCheckTaskV2, service); int code = HttpURLConnection.HTTP_OK; when(restResult.getCode()).thenReturn(code); Method onReceive = aClass.getMethod("onReceive", RestResult.class); onReceive.invoke(objects, restResult); //verify this.verifyCall(code); List<Integer> codeList = Stream.of(HttpURLConnection.HTTP_UNAVAILABLE, HttpURLConnection.HTTP_NOT_FOUND) .collect(Collectors.toList()); } @Test void testOnReceiveWithUnavailable() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, InterruptedException { Class<HttpHealthCheckProcessor> healthCheckProcessorClass = HttpHealthCheckProcessor.class; Class<?>[] classes = healthCheckProcessorClass.getDeclaredClasses(); Class<?> aClass = Arrays.stream(classes).findFirst().get(); Constructor<?> constructor = aClass.getConstructor(HttpHealthCheckProcessor.class, HealthCheckInstancePublishInfo.class, HealthCheckTaskV2.class, Service.class); Object objects = constructor.newInstance(httpHealthCheckProcessor, healthCheckInstancePublishInfo, healthCheckTaskV2, service); int code = HttpURLConnection.HTTP_UNAVAILABLE; when(restResult.getCode()).thenReturn(code); Method onReceive = aClass.getMethod("onReceive", RestResult.class); onReceive.invoke(objects, restResult); //verify this.verifyCall(code); } @Test void testOnReceiveWithNotFound() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, InterruptedException { Class<HttpHealthCheckProcessor> healthCheckProcessorClass = HttpHealthCheckProcessor.class; Class<?>[] classes = healthCheckProcessorClass.getDeclaredClasses(); Class<?> aClass = Arrays.stream(classes).findFirst().get(); Constructor<?> constructor = aClass.getConstructor(HttpHealthCheckProcessor.class, HealthCheckInstancePublishInfo.class, HealthCheckTaskV2.class, Service.class); Object objects = constructor.newInstance(httpHealthCheckProcessor, healthCheckInstancePublishInfo, healthCheckTaskV2, service); int code = HttpURLConnection.HTTP_NOT_FOUND; when(restResult.getCode()).thenReturn(code); Method onReceive = aClass.getMethod("onReceive", RestResult.class); onReceive.invoke(objects, restResult); //verify this.verifyCall(code); } private void verifyCall(int code) { switch (code) { case HttpURLConnection.HTTP_OK: verify(healthCheckCommon).checkOk(healthCheckTaskV2, service, "http:" + restResult.getCode()); break; case HttpURLConnection.HTTP_UNAVAILABLE: verify(healthCheckCommon).checkFail(healthCheckTaskV2, service, "http:" + restResult.getCode()); verify(healthCheckCommon).reEvaluateCheckRt(healthCheckTaskV2.getCheckRtNormalized() * 2, healthCheckTaskV2, switchDomain.getHttpHealthParams()); break; case HttpURLConnection.HTTP_NOT_FOUND: verify(healthCheckCommon).checkFailNow(healthCheckTaskV2, service, "http:" + restResult.getCode()); verify(healthCheckCommon).reEvaluateCheckRt(switchDomain.getHttpHealthParams().getMax(), healthCheckTaskV2, switchDomain.getHttpHealthParams()); break; default: } } @Test void testOnError() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { Class<HttpHealthCheckProcessor> healthCheckProcessorClass = HttpHealthCheckProcessor.class; Class<?>[] classes = healthCheckProcessorClass.getDeclaredClasses(); Class<?> aClass = Arrays.stream(classes).findFirst().get(); Constructor<?> constructor = aClass.getConstructor(HttpHealthCheckProcessor.class, HealthCheckInstancePublishInfo.class, HealthCheckTaskV2.class, Service.class); Object objects = constructor.newInstance(httpHealthCheckProcessor, healthCheckInstancePublishInfo, healthCheckTaskV2, service); Method onReceive = aClass.getMethod("onError", Throwable.class); onReceive.invoke(objects, connectException); verify(healthCheckCommon).checkFailNow(healthCheckTaskV2, service, "http:unable2connect:" + connectException.getMessage()); verify(healthCheckCommon).reEvaluateCheckRt(switchDomain.getHttpHealthParams().getMax(), healthCheckTaskV2, switchDomain.getHttpHealthParams()); } }
HttpHealthCheckProcessorTest
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/utils/DruidWebUtilsTest.java
{ "start": 240, "end": 2497 }
class ____ extends TestCase { public void test_getContextPath_2_5() throws Exception { new DruidWebUtils(); MockServletContext context = new MockServletContext() { public int getMajorVersion() { return 2; } public int getMinorVersion() { return 4; } public String getContextPath() { throw new NoSuchMethodError(); } }; assertNull(DruidWebUtils.getContextPath(context)); } /** * @throws Exception * @see <a href="https://github.com/alibaba/druid/issues/5180">...</a> * see <a href="https://github.com/alibaba/druid/issues/5072">...</a> */ public void test_getxforwardip() throws Exception { MockHttpServletRequest request1 = new MockHttpServletRequest() { public String getHeader(String name) { if ("X-Forwarded-For".equalsIgnoreCase(name)) { return "116.228.20.212 , 10.0.25.22"; } return super.getHeader(name); } }; String ip1 = DruidWebUtils.getRemoteAddr(request1); System.out.println("X-Forwarded-For ip1===" + ip1); assertEquals("116.228.20.212", ip1); MockHttpServletRequest request2 = new MockHttpServletRequest() { public String getHeader(String name) { if ("X-Forwarded-For".equalsIgnoreCase(name)) { return "10.0.25.22"; } return super.getHeader(name); } }; String ip2 = DruidWebUtils.getRemoteAddr(request2); System.out.println("X-Forwarded-For ip2===" + ip2); assertEquals("10.0.25.22", ip2); MockHttpServletRequest request3 = new MockHttpServletRequest() { public String getHeader(String name) { if ("X-Forwarded-For".equalsIgnoreCase(name)) { return "x10.0.25.22"; } return "192.168.1.3"; } }; String ip3 = DruidWebUtils.getRemoteAddr(request3); System.out.println("X-Forwarded-For ip3===" + ip3); assertEquals("192.168.1.3", ip3); } }
DruidWebUtilsTest
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java
{ "start": 1589, "end": 6010 }
class ____ implements Evaluation { public static final ParseField NAME = new ParseField("outlier_detection", "binary_soft_classification"); private static final ParseField METRICS = new ParseField("metrics"); @SuppressWarnings("unchecked") public static final ConstructingObjectParser<OutlierDetection, Void> PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), a -> new OutlierDetection((String) a[0], (String) a[1], (List<EvaluationMetric>) a[2]) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), ACTUAL_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), PREDICTED_PROBABILITY_FIELD); PARSER.declareNamedObjects( ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME.getPreferredName(), n), c), METRICS ); } public static OutlierDetection fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } public static QueryBuilder actualIsTrueQuery(String actualField) { return QueryBuilders.queryStringQuery(actualField + ": (1 OR true)"); } /** * The collection of fields in the index being evaluated. * fields.getActualField() is assumed to either be 1 or 0, or true or false. * fields.getPredictedProbabilityField() is assumed to be a number in [0.0, 1.0]. * Other fields are not needed by this evaluation. */ private final EvaluationFields fields; /** * The list of metrics to calculate */ private final List<EvaluationMetric> metrics; public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List<EvaluationMetric> metrics) { this.fields = new EvaluationFields( ExceptionsHelper.requireNonNull(actualField, ACTUAL_FIELD), null, null, null, ExceptionsHelper.requireNonNull(predictedProbabilityField, PREDICTED_PROBABILITY_FIELD), false ); this.metrics = initMetrics(metrics, OutlierDetection::defaultMetrics); } private static List<EvaluationMetric> defaultMetrics() { return Arrays.asList( new AucRoc(false), new Precision(Arrays.asList(0.25, 0.5, 0.75)), new Recall(Arrays.asList(0.25, 0.5, 0.75)), new ConfusionMatrix(Arrays.asList(0.25, 0.5, 0.75)) ); } public OutlierDetection(StreamInput in) throws IOException { this.fields = new EvaluationFields(in.readString(), null, null, null, in.readString(), false); this.metrics = in.readNamedWriteableCollectionAsList(EvaluationMetric.class); } @Override public String getName() { return NAME.getPreferredName(); } @Override public EvaluationFields getFields() { return fields; } @Override public List<EvaluationMetric> getMetrics() { return metrics; } @Override public String getWriteableName() { return NAME.getPreferredName(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fields.getActualField()); out.writeString(fields.getPredictedProbabilityField()); out.writeNamedWriteableCollection(metrics); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ACTUAL_FIELD.getPreferredName(), fields.getActualField()); builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), fields.getPredictedProbabilityField()); builder.startObject(METRICS.getPreferredName()); for (EvaluationMetric metric : metrics) { builder.field(metric.getName(), metric); } builder.endObject(); builder.endObject(); return builder; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; OutlierDetection that = (OutlierDetection) o; return Objects.equals(fields, that.fields) && Objects.equals(metrics, that.metrics); } @Override public int hashCode() { return Objects.hash(fields, metrics); } }
OutlierDetection
java
spring-projects__spring-boot
buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/docker/type/LayerTests.java
{ "start": 1333, "end": 2624 }
class ____ { @Test @SuppressWarnings("NullAway") // Test null check void ofWhenLayoutIsNullThrowsException() { assertThatIllegalArgumentException().isThrownBy(() -> Layer.of((IOConsumer<Layout>) null)) .withMessage("'layout' must not be null"); } @Test @SuppressWarnings("NullAway") // Test null check void fromTarArchiveWhenTarArchiveIsNullThrowsException() { assertThatIllegalArgumentException().isThrownBy(() -> Layer.fromTarArchive(null)) .withMessage("'tarArchive' must not be null"); } @Test void ofCreatesLayer() throws Exception { Layer layer = Layer.of((layout) -> { layout.directory("/directory", Owner.ROOT); layout.file("/directory/file", Owner.ROOT, Content.of("test")); }); assertThat(layer.getId()) .hasToString("sha256:d03a34f73804698c875eb56ff694fc2fceccc69b645e4adceb004ed13588613b"); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); layer.writeTo(outputStream); try (TarArchiveInputStream tarStream = new TarArchiveInputStream( new ByteArrayInputStream(outputStream.toByteArray()))) { assertThat(tarStream.getNextEntry().getName()).isEqualTo("/directory/"); assertThat(tarStream.getNextEntry().getName()).isEqualTo("/directory/file"); assertThat(tarStream.getNextEntry()).isNull(); } } }
LayerTests
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java
{ "start": 5174, "end": 7799 }
class ____ implements ToXContentObject { private final String nodeId; private final long actualReplicas; private final long numberShardsLeftToAllocate; private final String message; static final ParseField NODE_ID = new ParseField("node_id"); static final ParseField EXPECTED_SHARDS = new ParseField("expected_shards"); static final ParseField SHARDS_TO_ALLOCATE = new ParseField("shards_left_to_allocate"); static final ParseField MESSAGE = new ParseField("message"); public Info(String nodeId, long expectedShards, long numberShardsLeftToAllocate) { this.nodeId = nodeId; this.actualReplicas = expectedShards; this.numberShardsLeftToAllocate = numberShardsLeftToAllocate; if (numberShardsLeftToAllocate < 0) { this.message = "Waiting for all shards to become active"; } else { this.message = Strings.format( "Waiting for node [%s] to contain [%d] shards, found [%d], remaining [%d]", nodeId, expectedShards, expectedShards - numberShardsLeftToAllocate, numberShardsLeftToAllocate ); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(MESSAGE.getPreferredName(), message); builder.field(NODE_ID.getPreferredName(), nodeId); builder.field(SHARDS_TO_ALLOCATE.getPreferredName(), numberShardsLeftToAllocate); builder.field(EXPECTED_SHARDS.getPreferredName(), actualReplicas); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(nodeId, actualReplicas, numberShardsLeftToAllocate); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } CheckShrinkReadyStep.Info other = (CheckShrinkReadyStep.Info) obj; return Objects.equals(actualReplicas, other.actualReplicas) && Objects.equals(numberShardsLeftToAllocate, other.numberShardsLeftToAllocate) && Objects.equals(nodeId, other.nodeId); } @Override public String toString() { return Strings.toString(this); } } }
Info
java
apache__maven
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3217InterPluginDependencyTest.java
{ "start": 1043, "end": 2201 }
class ____ extends AbstractMavenIntegrationTestCase { /** * Verify that the dependency of plugin A on some plugin B does not influence the build of another module in the * reactor that uses a different version of plugin B for normal build tasks. * * @throws Exception in case of failure */ @Test public void testitMNG3217() throws Exception { File testDir = extractResources("/mng-3217"); Verifier verifier = newVerifier(testDir.getAbsolutePath()); verifier.setAutoclean(false); verifier.deleteDirectory("sub-1/target"); verifier.deleteDirectory("sub-2/target"); verifier.deleteArtifacts("org.apache.maven.its.mng3217"); verifier.filterFile("settings-template.xml", "settings.xml"); verifier.addCliArgument("--settings"); verifier.addCliArgument("settings.xml"); verifier.addCliArgument("validate"); verifier.execute(); verifier.verifyErrorFreeLog(); verifier.verifyFilePresent("sub-1/target/touch-1.txt"); verifier.verifyFilePresent("sub-2/target/touch-2.txt"); } }
MavenITmng3217InterPluginDependencyTest
java
alibaba__nacos
plugin-default-impl/nacos-default-auth-plugin/src/main/java/com/alibaba/nacos/plugin/auth/impl/configuration/AuthConfigs.java
{ "start": 1743, "end": 7450 }
class ____ extends Subscriber<ServerConfigChangeEvent> { private static final Logger LOGGER = LoggerFactory.getLogger(AuthConfigs.class); private static final String PREFIX = "nacos.core.auth.plugin"; @JustForTest private static Boolean cachingEnabled = null; /** * Whether server auth enabled. */ @Value("${" + Constants.Auth.NACOS_CORE_AUTH_ENABLED + ":false}") private boolean authEnabled; /** * Whether console auth enabled. */ @Value("${" + Constants.Auth.NACOS_CORE_AUTH_CONSOLE_ENABLED + ":true}") private boolean consoleAuthEnabled; /** * Which auth system is in use. */ @Value("${" + Constants.Auth.NACOS_CORE_AUTH_SYSTEM_TYPE + ":}") private String nacosAuthSystemType; @Value("${" + Constants.Auth.NACOS_CORE_AUTH_SERVER_IDENTITY_KEY + ":}") private String serverIdentityKey; @Value("${" + Constants.Auth.NACOS_CORE_AUTH_SERVER_IDENTITY_VALUE + ":}") private String serverIdentityValue; private boolean hasGlobalAdminRole; private Map<String, Properties> authPluginProperties = new HashMap<>(); public AuthConfigs() { NotifyCenter.registerSubscriber(this); refreshPluginProperties(); } /** * Validate auth config. * * @throws NacosException If the config is not valid. */ @PostConstruct public void validate() throws NacosException { if (!authEnabled && !consoleAuthEnabled) { return; } if (StringUtils.isEmpty(nacosAuthSystemType)) { throw new NacosException(AuthErrorCode.INVALID_TYPE.getCode(), AuthErrorCode.INVALID_TYPE.getMsg()); } if (EnvUtil.getStandaloneMode()) { return; } if (StringUtils.isEmpty(serverIdentityKey) || StringUtils.isEmpty(serverIdentityValue)) { throw new NacosException(AuthErrorCode.EMPTY_IDENTITY.getCode(), AuthErrorCode.EMPTY_IDENTITY.getMsg()); } } private void refreshPluginProperties() { try { Map<String, Properties> newProperties = new HashMap<>(1); Properties properties = PropertiesUtil.getPropertiesWithPrefix(EnvUtil.getEnvironment(), PREFIX); if (properties != null) { for (String each : properties.stringPropertyNames()) { int typeIndex = each.indexOf('.'); String type = each.substring(0, typeIndex); String subKey = each.substring(typeIndex + 1); newProperties.computeIfAbsent(type, key -> new Properties()) .setProperty(subKey, properties.getProperty(each)); } } authPluginProperties = newProperties; } catch (Exception e) { LOGGER.warn("Refresh plugin properties failed ", e); } } public boolean isHasGlobalAdminRole() { return hasGlobalAdminRole; } public void setHasGlobalAdminRole(boolean hasGlobalAdminRole) { this.hasGlobalAdminRole = hasGlobalAdminRole; } public String getNacosAuthSystemType() { return nacosAuthSystemType; } public String getServerIdentityKey() { return serverIdentityKey; } public String getServerIdentityValue() { return serverIdentityValue; } /** * console auth function is open. * * @return console auth function is open */ public boolean isConsoleAuthEnabled() { return consoleAuthEnabled; } /** * server auth function is open. * * @return server auth function is open */ public boolean isAuthEnabled() { return authEnabled; } /** * Whether permission information can be cached. * * @return bool */ public boolean isCachingEnabled() { if (Objects.nonNull(AuthConfigs.cachingEnabled)) { return cachingEnabled; } return ConvertUtils.toBoolean(EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_CACHING_ENABLED, "true")); } public Properties getAuthPluginProperties(String authType) { if (!authPluginProperties.containsKey(authType)) { LOGGER.warn("Can't find properties for type {}, will use empty properties", authType); return new Properties(); } return authPluginProperties.get(authType); } @JustForTest public static void setCachingEnabled(boolean cachingEnabled) { AuthConfigs.cachingEnabled = cachingEnabled; } @Override public void onEvent(ServerConfigChangeEvent event) { try { authEnabled = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_ENABLED, Boolean.class, false); consoleAuthEnabled = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_CONSOLE_ENABLED, Boolean.class, true); cachingEnabled = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_CACHING_ENABLED, Boolean.class, true); serverIdentityKey = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_SERVER_IDENTITY_KEY, ""); serverIdentityValue = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_SERVER_IDENTITY_VALUE, ""); nacosAuthSystemType = EnvUtil.getProperty(Constants.Auth.NACOS_CORE_AUTH_SYSTEM_TYPE, ""); refreshPluginProperties(); } catch (Exception e) { LOGGER.warn("Upgrade auth config from env failed, use old value", e); } } @Override public Class<? extends Event> subscribeType() { return ServerConfigChangeEvent.class; } }
AuthConfigs
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/ReadFrom.java
{ "start": 9034, "end": 9384 }
interface ____ extends Iterable<RedisNodeDescription> { /** * Returns the list of nodes that are applicable for the read operation. The list is ordered by latency. * * @return the collection of nodes that are applicable for reading. * */ List<RedisNodeDescription> getNodes(); } }
Nodes
java
google__guava
android/guava/src/com/google/common/collect/MapMakerInternalMap.java
{ "start": 85095, "end": 85556 }
class ____ extends SimpleEntry<K, V> { WriteThroughEntry(K key, V value) { super(key, value); } /* * We inherit equals() and hashCode() instead of overriding them to use keyEquivalence and * valueEquivalence. */ @Override public V setValue(V newValue) { put(getKey(), newValue); return super.setValue(newValue); // done after put() so that it happens only if put() succeeds } } final
WriteThroughEntry
java
elastic__elasticsearch
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java
{ "start": 118826, "end": 119828 }
class ____ extends NumberContext { public TerminalNode DECIMAL_VALUE() { return getToken(EqlBaseParser.DECIMAL_VALUE, 0); } public DecimalLiteralContext(NumberContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterDecimalLiteral(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitDecimalLiteral(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitDecimalLiteral(this); else return visitor.visitChildren(this); } } @SuppressWarnings("CheckReturnValue") public static
DecimalLiteralContext
java
spring-projects__spring-security
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson2/JsonNodeUtils.java
{ "start": 917, "end": 1178 }
class ____ {@code JsonNode}. * * @author Joe Grandja * @since 7.0 * @deprecated as of 7.0 in favor of * {@code org.springframework.security.oauth2.server.authorization.jackson.JsonNodeUtils} * based on Jackson 3 */ @Deprecated(forRemoval = true) abstract
for
java
apache__kafka
raft/src/main/java/org/apache/kafka/raft/internals/BatchBuilder.java
{ "start": 2228, "end": 12313 }
class ____<T> { private final ByteBuffer initialBuffer; private final Compression compression; private final ByteBufferOutputStream batchOutput; private final DataOutputStreamWritable recordOutput; private final long baseOffset; private final long appendTime; private final int leaderEpoch; private final int initialPosition; private final int maxBytes; private final RecordSerde<T> serde; private final List<T> records; private long nextOffset; private int unflushedBytes; private boolean isOpenForAppends = true; public BatchBuilder( ByteBuffer buffer, RecordSerde<T> serde, Compression compression, long baseOffset, long appendTime, int leaderEpoch, int maxBytes ) { this.initialBuffer = buffer; this.batchOutput = new ByteBufferOutputStream(buffer); this.serde = serde; this.compression = compression; this.baseOffset = baseOffset; this.nextOffset = baseOffset; this.appendTime = appendTime; this.initialPosition = batchOutput.position(); this.leaderEpoch = leaderEpoch; this.maxBytes = maxBytes; this.records = new ArrayList<>(); // field compressionType must be set before calculating the batch header size int batchHeaderSizeInBytes = batchHeaderSizeInBytes(); batchOutput.position(initialPosition + batchHeaderSizeInBytes); this.recordOutput = new DataOutputStreamWritable(new DataOutputStream( compression.wrapForOutput(this.batchOutput, RecordBatch.MAGIC_VALUE_V2))); } /** * Append a record to this batch. The caller must first verify there is room for the batch * using {@link #bytesNeeded(Collection, ObjectSerializationCache)}. * * @param record the record to append * @param serializationCache serialization cache for use in {@link RecordSerde#write(Object, ObjectSerializationCache, Writable)} * @return the offset of the appended batch */ public long appendRecord(T record, ObjectSerializationCache serializationCache) { if (!isOpenForAppends) { throw new IllegalStateException("Cannot append new records after the batch has been built"); } if (nextOffset - baseOffset > Integer.MAX_VALUE) { throw new IllegalArgumentException("Cannot include more than " + Integer.MAX_VALUE + " records in a single batch"); } long offset = nextOffset++; int recordSizeInBytes = writeRecord( offset, record, serializationCache ); unflushedBytes += recordSizeInBytes; records.add(record); return offset; } /** * Check whether the batch has enough room for all the record values. * * Returns an empty {@link OptionalInt} if the batch builder has room for this list of records. * Otherwise, it returns the expected number of bytes needed for a batch to contain these records. * * @param records the records to use when checking for room * @param serializationCache serialization cache for computing sizes * @return empty {@link OptionalInt} if there is room for the records to be appended, otherwise * returns the number of bytes needed */ public OptionalInt bytesNeeded(Collection<T> records, ObjectSerializationCache serializationCache) { int bytesNeeded = bytesNeededForRecords( records, serializationCache ); if (!isOpenForAppends) { return OptionalInt.of(Math.addExact(batchHeaderSizeInBytes(), bytesNeeded)); } int approxUnusedSizeInBytes = maxBytes - approximateSizeInBytes(); if (approxUnusedSizeInBytes >= bytesNeeded) { return OptionalInt.empty(); } else if (unflushedBytes > 0) { recordOutput.flush(); unflushedBytes = 0; int unusedSizeInBytes = maxBytes - flushedSizeInBytes(); if (unusedSizeInBytes >= bytesNeeded) { return OptionalInt.empty(); } } return OptionalInt.of(Math.addExact(batchHeaderSizeInBytes(), bytesNeeded)); } private int flushedSizeInBytes() { return batchOutput.position() - initialPosition; } /** * Get an estimate of the current size of the appended data. This estimate * is precise if no compression is in use. * * @return estimated size in bytes of the appended records */ public int approximateSizeInBytes() { return flushedSizeInBytes() + unflushedBytes; } /** * Get the base offset of this batch. This is constant upon constructing * the builder instance. * * @return the base offset */ public long baseOffset() { return baseOffset; } /** * Return the offset of the last appended record. This is updated after * every append and can be used after the batch has been built to obtain * the last offset. * * @return the offset of the last appended record */ public long lastOffset() { return nextOffset - 1; } /** * Get the number of records appended to the batch. This is updated after * each append. * * @return the number of appended records */ public int numRecords() { return (int) (nextOffset - baseOffset); } /** * Check whether there has been at least one record appended to the batch. * * @return true if one or more records have been appended */ public boolean nonEmpty() { return numRecords() > 0; } /** * Return the reference to the initial buffer passed through the constructor. * This is used in case the buffer needs to be returned to a pool (e.g. * in {@link org.apache.kafka.common.memory.MemoryPool#release(ByteBuffer)}). * * @return the initial buffer passed to the constructor */ public ByteBuffer initialBuffer() { return initialBuffer; } /** * Get a list of the records appended to the batch. * @return a list of records */ public List<T> records() { return records; } private void writeDefaultBatchHeader() { ByteBuffer buffer = batchOutput.buffer(); int lastPosition = buffer.position(); buffer.position(initialPosition); int size = lastPosition - initialPosition; int lastOffsetDelta = (int) (lastOffset() - baseOffset); DefaultRecordBatch.writeHeader( buffer, baseOffset, lastOffsetDelta, size, RecordBatch.MAGIC_VALUE_V2, compression.type(), TimestampType.CREATE_TIME, appendTime, appendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, false, false, false, leaderEpoch, numRecords() ); buffer.position(lastPosition); } public MemoryRecords build() { recordOutput.close(); writeDefaultBatchHeader(); ByteBuffer buffer = batchOutput.buffer().duplicate(); buffer.flip(); buffer.position(initialPosition); isOpenForAppends = false; return MemoryRecords.readableRecords(buffer.slice()); } public int writeRecord( long offset, T payload, ObjectSerializationCache serializationCache ) { int offsetDelta = (int) (offset - baseOffset); long timestampDelta = 0; int payloadSize = serde.recordSize(payload, serializationCache); int sizeInBytes = DefaultRecord.sizeOfBodyInBytes( offsetDelta, timestampDelta, -1, payloadSize, DefaultRecord.EMPTY_HEADERS ); recordOutput.writeVarint(sizeInBytes); // Write attributes (currently unused) recordOutput.writeByte((byte) 0); // Write timestamp and offset recordOutput.writeVarlong(timestampDelta); recordOutput.writeVarint(offsetDelta); // Write key, which is always null for controller messages recordOutput.writeVarint(-1); // Write value recordOutput.writeVarint(payloadSize); serde.write(payload, serializationCache, recordOutput); // Write headers (currently unused) recordOutput.writeVarint(0); return ByteUtils.sizeOfVarint(sizeInBytes) + sizeInBytes; } private int batchHeaderSizeInBytes() { return AbstractRecords.recordBatchHeaderSizeInBytes( RecordBatch.MAGIC_VALUE_V2, compression.type() ); } private int bytesNeededForRecords( Collection<T> records, ObjectSerializationCache serializationCache ) { long expectedNextOffset = nextOffset; int bytesNeeded = 0; for (T record : records) { if (expectedNextOffset - baseOffset >= Integer.MAX_VALUE) { throw new IllegalArgumentException( String.format( "Adding %d records to a batch with base offset of %d and next offset of %d", records.size(), baseOffset, expectedNextOffset ) ); } int recordSizeInBytes = DefaultRecord.sizeOfBodyInBytes( (int) (expectedNextOffset - baseOffset), 0, -1, serde.recordSize(record, serializationCache), DefaultRecord.EMPTY_HEADERS ); bytesNeeded = Math.addExact(bytesNeeded, ByteUtils.sizeOfVarint(recordSizeInBytes)); bytesNeeded = Math.addExact(bytesNeeded, recordSizeInBytes); expectedNextOffset += 1; } return bytesNeeded; } }
BatchBuilder
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/introspect/TestAutoDetect.java
{ "start": 2757, "end": 2892 }
class ____ extends DataParent2789 { DataClassA() { super(DataType2789.CLASS_A); } } private
DataClassA
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java
{ "start": 2567, "end": 3560 }
class ____ extends Configured implements Serializer<Writable> { private DataOutputStream dataOut; @Override public void open(OutputStream out) { if (out instanceof DataOutputStream) { dataOut = (DataOutputStream) out; } else { dataOut = new DataOutputStream(out); } } @Override public void serialize(Writable w) throws IOException { w.write(dataOut); } @Override public void close() throws IOException { dataOut.close(); } } @InterfaceAudience.Private @Override public boolean accept(Class<?> c) { return Writable.class.isAssignableFrom(c); } @InterfaceAudience.Private @Override public Serializer<Writable> getSerializer(Class<Writable> c) { return new WritableSerializer(); } @InterfaceAudience.Private @Override public Deserializer<Writable> getDeserializer(Class<Writable> c) { return new WritableDeserializer(getConf(), c); } }
WritableSerializer
java
elastic__elasticsearch
modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java
{ "start": 5031, "end": 5272 }
class ____ not need to be whitelisted), the Java name of the method * (which will also be the Painless name for the method), an opening parenthesis, a * comma-delimited list of Painless type names corresponding to the type/
does
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/instrument/classloading/WeavingTransformer.java
{ "start": 3322, "end": 3872 }
class ____ definition */ public byte[] transformIfNecessary(String className, String internalName, byte[] bytes, @Nullable ProtectionDomain pd) { byte[] result = bytes; for (ClassFileTransformer cft : this.transformers) { try { byte[] transformed = cft.transform(this.classLoader, internalName, null, pd, result); if (transformed != null) { result = transformed; } } catch (IllegalClassFormatException ex) { throw new IllegalStateException("Class file transformation failed", ex); } } return result; } }
byte
java
netty__netty
example/src/main/java/io/netty/example/http/cors/HttpCorsServer.java
{ "start": 3229, "end": 4075 }
class ____ { static final boolean SSL = System.getProperty("ssl") != null; static final int PORT = Integer.parseInt(System.getProperty("port", SSL? "8443" : "8080")); public static void main(String[] args) throws Exception { // Configure SSL. final SslContext sslCtx = ServerUtil.buildSslContext(); EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory()); try { ServerBootstrap b = new ServerBootstrap(); b.group(group) .channel(NioServerSocketChannel.class) .handler(new LoggingHandler(LogLevel.INFO)) .childHandler(new HttpCorsServerInitializer(sslCtx)); b.bind(PORT).sync().channel().closeFuture().sync(); } finally { group.shutdownGracefully(); } } }
HttpCorsServer
java
apache__camel
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/WordpressServiceProvider.java
{ "start": 1747, "end": 4010 }
class ____ { private static final WordpressServiceProvider INSTANCE = new WordpressServiceProvider(); } public static WordpressServiceProvider getInstance() { return ServiceProviderHolder.INSTANCE; } public void init(String wordpressApiUrl) { this.init(wordpressApiUrl, WordpressConstants.API_VERSION); } public void init(String wordpressApiUrl, String apiVersion) { this.init(new WordpressAPIConfiguration(wordpressApiUrl, apiVersion)); } public void init(WordpressAPIConfiguration config) { ObjectHelper.notNullOrEmpty(config.getApiUrl(), "Please inform the Wordpress API url , eg.: http://myblog.com/wp-json/wp"); if (config.getApiVersion() == null || config.getApiVersion().isEmpty()) { config.setApiVersion(WordpressConstants.API_VERSION); } final WordpressServicePosts servicePosts = new WordpressServicePostsAdapter(config.getApiUrl(), config.getApiVersion()); final WordpressServiceUsers serviceUsers = new WordpressServiceUsersAdapter(config.getApiUrl(), config.getApiVersion()); servicePosts.setWordpressAuthentication(config.getAuthentication()); serviceUsers.setWordpressAuthentication(config.getAuthentication()); this.services = new HashMap<>(); this.services.put(WordpressServicePosts.class, servicePosts); this.services.put(WordpressServiceUsers.class, serviceUsers); this.configuration = config; LOGGER.info("Wordpress Service Provider initialized using base URL: {}, API Version {}", config.getApiUrl(), config.getApiVersion()); } @SuppressWarnings("unchecked") public <T extends WordpressService> T getService(Class<T> wordpressServiceClazz) { T service = (T) this.services.get(wordpressServiceClazz); if (service == null) { throw new IllegalArgumentException(String.format("Couldn't find a Wordpress Service '%s'", wordpressServiceClazz)); } return service; } public boolean hasAuthentication() { if (this.configuration != null) { return this.configuration.getAuthentication() != null; } return false; } }
ServiceProviderHolder
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/PreMatchingHeadersFilterTest.java
{ "start": 769, "end": 1473 }
class ____ { @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest() .setArchiveProducer(new Supplier<>() { @Override public JavaArchive get() { return ShrinkWrap.create(JavaArchive.class) .addClasses(Resource.class); } }); @Test public void testJsonHeaderAdded() { given() .body("{\"foo\": \"bar\"}") .when() .post("/test") .then() .statusCode(200) .body("foo", equalTo("bar")); } @Path("test") public static
PreMatchingHeadersFilterTest
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/aop/framework/AbstractAopProxyTests.java
{ "start": 53199, "end": 53708 }
class ____ extends DefaultPointcutAdvisor { public int count; public TestDynamicPointcutAdvice(MethodInterceptor mi, final String pattern) { super(mi); setPointcut(new DynamicMethodMatcherPointcut() { @Override public boolean matches(Method m, @Nullable Class<?> targetClass, Object... args) { boolean run = m.getName().contains(pattern); if (run) { ++count; } return run; } }); } } @SuppressWarnings("serial") protected static
TestDynamicPointcutAdvice
java
apache__rocketmq
broker/src/main/java/org/apache/rocketmq/broker/client/ClientHousekeepingService.java
{ "start": 1363, "end": 3954 }
class ____ implements ChannelEventListener { private static final Logger log = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME); private final BrokerController brokerController; private ScheduledExecutorService scheduledExecutorService; public ClientHousekeepingService(final BrokerController brokerController) { this.brokerController = brokerController; scheduledExecutorService = ThreadUtils.newScheduledThreadPool(1, new ThreadFactoryImpl("ClientHousekeepingScheduledThread", brokerController.getBrokerIdentity())); } public void start() { this.scheduledExecutorService.scheduleAtFixedRate(() -> { try { ClientHousekeepingService.this.scanExceptionChannel(); } catch (Throwable e) { log.error("Error occurred when scan not active client channels.", e); } }, 1000 * 10, 1000 * 10, TimeUnit.MILLISECONDS); } private void scanExceptionChannel() { this.brokerController.getProducerManager().scanNotActiveChannel(); this.brokerController.getConsumerManager().scanNotActiveChannel(); } public void shutdown() { this.scheduledExecutorService.shutdown(); } @Override public void onChannelConnect(String remoteAddr, Channel channel) { this.brokerController.getBrokerStatsManager().incChannelConnectNum(); } @Override public void onChannelClose(String remoteAddr, Channel channel) { this.brokerController.getProducerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getConsumerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getBrokerStatsManager().incChannelCloseNum(); } @Override public void onChannelException(String remoteAddr, Channel channel) { this.brokerController.getProducerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getConsumerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getBrokerStatsManager().incChannelExceptionNum(); } @Override public void onChannelIdle(String remoteAddr, Channel channel) { this.brokerController.getProducerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getConsumerManager().doChannelCloseEvent(remoteAddr, channel); this.brokerController.getBrokerStatsManager().incChannelIdleNum(); } @Override public void onChannelActive(String remoteAddr, Channel channel) { } }
ClientHousekeepingService