language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java | {
"start": 32788,
"end": 34552
} | class ____ {
private boolean hasRemaining;
private boolean noBlockMoved;
private boolean retryFailed;
Result() {
hasRemaining = false;
noBlockMoved = true;
retryFailed = false;
}
boolean isHasRemaining() {
return hasRemaining;
}
boolean isNoBlockMoved() {
return noBlockMoved;
}
void updateHasRemaining(boolean hasRemaining) {
this.hasRemaining |= hasRemaining;
}
void setNoBlockMoved(boolean noBlockMoved) {
this.noBlockMoved = noBlockMoved;
}
void setRetryFailed() {
this.retryFailed = true;
}
/**
* @return NO_MOVE_PROGRESS if no progress in move after some retry. Return
* SUCCESS if all moves are success and there is no remaining move.
* Return NO_MOVE_BLOCK if there moves available but all the moves
* cannot be scheduled. Otherwise, return IN_PROGRESS since there
* must be some remaining moves.
*/
ExitStatus getExitStatus() {
if (retryFailed) {
return ExitStatus.NO_MOVE_PROGRESS;
} else {
return !isHasRemaining() ? ExitStatus.SUCCESS
: isNoBlockMoved() ? ExitStatus.NO_MOVE_BLOCK
: ExitStatus.IN_PROGRESS;
}
}
}
/**
* Run a Mover in command line.
*
* @param args Command line arguments
*/
public static void main(String[] args) {
if (DFSUtil.parseHelpArgument(args, Cli.USAGE, System.out, true)) {
System.exit(0);
}
try {
System.exit(ToolRunner.run(new HdfsConfiguration(), new Cli(), args));
} catch (Throwable e) {
LOG.error("Exiting " + Mover.class.getSimpleName()
+ " due to an exception", e);
System.exit(-1);
}
}
}
| Result |
java | quarkusio__quarkus | extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/RemoveEnvVarDecorator.java | {
"start": 372,
"end": 1812
} | class ____ extends ApplicationContainerDecorator<ContainerFluent<?>> {
private final String envVarName;
public RemoveEnvVarDecorator(String envVarName) {
this(ANY, envVarName);
}
public RemoveEnvVarDecorator(String name, String envVarName) {
super(name);
this.envVarName = envVarName;
}
public void andThenVisit(ContainerFluent<?> container) {
container.removeMatchingFromEnv(e -> e.getName().equals(envVarName));
}
public String getEnvVarKey() {
return this.envVarName;
}
public Class<? extends Decorator>[] after() {
return new Class[] { ResourceProvidingDecorator.class, AddEnvVarDecorator.class };
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((envVarName == null) ? 0 : envVarName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
RemoveEnvVarDecorator other = (RemoveEnvVarDecorator) obj;
if (envVarName == null) {
if (other.envVarName != null)
return false;
} else if (!envVarName.equals(other.envVarName))
return false;
return true;
}
}
| RemoveEnvVarDecorator |
java | spring-projects__spring-boot | module/spring-boot-restclient/src/test/java/org/springframework/boot/restclient/RestTemplateBuilderTests.java | {
"start": 3507,
"end": 21953
} | class ____ {
private final RestTemplateBuilder builder = new RestTemplateBuilder();
@Mock
@SuppressWarnings("NullAway.Init")
private HttpMessageConverter<Object> messageConverter;
@Mock
@SuppressWarnings("NullAway.Init")
private ClientHttpRequestInterceptor interceptor;
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenCustomizersAreNullShouldThrowException() {
RestTemplateCustomizer[] customizers = null;
assertThatIllegalArgumentException().isThrownBy(() -> new RestTemplateBuilder(customizers))
.withMessageContaining("'customizers' must not be null");
}
@Test
void createWithCustomizersShouldApplyCustomizers() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = new RestTemplateBuilder(customizer).build();
then(customizer).should().customize(template);
}
@Test
void buildShouldDetectRequestFactory() {
RestTemplate restTemplate = this.builder.build();
assertThat(restTemplate.getRequestFactory()).isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
void detectRequestFactoryWhenFalseShouldDisableDetection() {
RestTemplate restTemplate = this.builder.detectRequestFactory(false).build();
assertThat(restTemplate.getRequestFactory()).isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
void rootUriShouldApply() {
RestTemplate restTemplate = this.builder.rootUri("https://example.com").build();
MockRestServiceServer server = MockRestServiceServer.bindTo(restTemplate).build();
server.expect(requestTo("https://example.com/hello")).andRespond(withSuccess());
restTemplate.getForEntity("/hello", String.class);
server.verify();
}
@Test
void rootUriShouldApplyAfterUriTemplateHandler() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler)
.rootUri("https://example.com")
.build();
UriTemplateHandler handler = template.getUriTemplateHandler();
handler.expand("/hello");
assertThat(handler).isInstanceOf(RootUriBuilderFactory.class);
then(uriTemplateHandler).should().expand("https://example.com/hello");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void messageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.messageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("'messageConverters' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void messageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.messageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("'messageConverters' must not be null");
}
@Test
void messageConvertersShouldApply() {
RestTemplate template = this.builder.messageConverters(this.messageConverter).build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
void messageConvertersShouldReplaceExisting() {
RestTemplate template = this.builder.messageConverters(new ResourceHttpMessageConverter())
.messageConverters(Collections.singleton(this.messageConverter))
.build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalMessageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalMessageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("'messageConverters' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalMessageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalMessageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("'messageConverters' must not be null");
}
@Test
void additionalMessageConvertersShouldAddToExisting() {
HttpMessageConverter<?> resourceConverter = new ResourceHttpMessageConverter();
RestTemplate template = this.builder.messageConverters(resourceConverter)
.additionalMessageConverters(this.messageConverter)
.build();
assertThat(template.getMessageConverters()).containsOnly(resourceConverter, this.messageConverter);
}
@Test
void defaultMessageConvertersShouldSetDefaultList() {
RestTemplate template = new RestTemplate(Collections.singletonList(new StringHttpMessageConverter()));
this.builder.defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters()).hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
void defaultMessageConvertersShouldClearExisting() {
RestTemplate template = new RestTemplate(Collections.singletonList(new StringHttpMessageConverter()));
this.builder.additionalMessageConverters(this.messageConverter).defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters()).hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
@SuppressWarnings("NullAway") // Test null check
void interceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.interceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("'interceptors' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void interceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.interceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("'interceptors' must not be null");
}
@Test
void interceptorsShouldApply() {
RestTemplate template = this.builder.interceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
void interceptorsShouldReplaceExisting() {
RestTemplate template = this.builder.interceptors(mock(ClientHttpRequestInterceptor.class))
.interceptors(Collections.singleton(this.interceptor))
.build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalInterceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalInterceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("'interceptors' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalInterceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalInterceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("'interceptors' must not be null");
}
@Test
void additionalInterceptorsShouldAddToExisting() {
ClientHttpRequestInterceptor interceptor = mock(ClientHttpRequestInterceptor.class);
RestTemplate template = this.builder.interceptors(interceptor).additionalInterceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(interceptor, this.interceptor);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void requestFactoryClassWhenFactoryIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.requestFactory((Class<ClientHttpRequestFactory>) null))
.withMessageContaining("'requestFactoryType' must not be null");
}
@Test
void requestFactoryClassShouldApply() {
RestTemplate template = this.builder.requestFactory(SimpleClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory()).isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
void requestFactoryPackagePrivateClassShouldApply() {
RestTemplate template = this.builder.requestFactory(TestClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory()).isInstanceOf(TestClientHttpRequestFactory.class);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void requestFactoryWhenSupplierIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.requestFactory((Supplier<ClientHttpRequestFactory>) null))
.withMessageContaining("requestFactorySupplier' must not be null");
}
@Test
void requestFactoryShouldApply() {
ClientHttpRequestFactory requestFactory = mock(ClientHttpRequestFactory.class);
RestTemplate template = this.builder.requestFactory(() -> requestFactory).build();
assertThat(template.getRequestFactory()).isSameAs(requestFactory);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void uriTemplateHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.builder.uriTemplateHandler(null))
.withMessageContaining("'uriTemplateHandler' must not be null");
}
@Test
void uriTemplateHandlerShouldApply() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler).build();
assertThat(template.getUriTemplateHandler()).isSameAs(uriTemplateHandler);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void errorHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.builder.errorHandler(null))
.withMessageContaining("'errorHandler' must not be null");
}
@Test
void errorHandlerShouldApply() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
RestTemplate template = this.builder.errorHandler(errorHandler).build();
assertThat(template.getErrorHandler()).isSameAs(errorHandler);
}
@Test
void basicAuthenticationShouldApply() {
RestTemplate template = this.builder.basicAuthentication("spring", "boot", StandardCharsets.UTF_8).build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerNames()).containsOnly(HttpHeaders.AUTHORIZATION);
assertThat(request.getHeaders().get(HttpHeaders.AUTHORIZATION)).containsExactly("Basic c3ByaW5nOmJvb3Q=");
}
@Test
void defaultHeaderAddsHeader() {
RestTemplate template = this.builder.defaultHeader("spring", "boot").build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerSet()).contains(entry("spring", Collections.singletonList("boot")));
}
@Test
void defaultHeaderAddsHeaderValues() {
String name = HttpHeaders.ACCEPT;
String[] values = { MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE };
RestTemplate template = this.builder.defaultHeader(name, values).build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerSet()).contains(entry(name, Arrays.asList(values)));
}
@Test // gh-17885
void defaultHeaderWhenUsingMockRestServiceServerAddsHeader() {
RestTemplate template = this.builder.defaultHeader("spring", "boot").build();
MockRestServiceServer.bindTo(template).build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerSet()).contains(entry("spring", Collections.singletonList("boot")));
}
@Test
@SuppressWarnings("unchecked")
void clientSettingsAppliesSettings() {
HttpClientSettings settings = HttpClientSettings.defaults()
.withConnectTimeout(Duration.ofSeconds(1))
.withReadTimeout(Duration.ofSeconds(2));
RestTemplate template = this.builder.clientSettings(settings).build();
Resolver<HttpRoute, ConnectionConfig> resolver = (Resolver<HttpRoute, ConnectionConfig>) Extractors
.byName("httpClient.connManager.connectionConfigResolver")
.apply(template.getRequestFactory());
ConnectionConfig config = resolver.resolve(mock());
assertThat(config.getConnectTimeout()).isEqualTo(Timeout.of(Duration.ofSeconds(1)));
assertThat(config.getSocketTimeout()).isEqualTo(Timeout.of(Duration.ofSeconds(2)));
}
@Test
void requestCustomizersAddsCustomizers() {
RestTemplate template = this.builder
.requestCustomizers((request) -> request.getHeaders().add("spring", "framework"))
.build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerSet()).contains(entry("spring", Collections.singletonList("framework")));
}
@Test
void additionalRequestCustomizersAddsCustomizers() {
RestTemplate template = this.builder
.requestCustomizers((request) -> request.getHeaders().add("spring", "framework"))
.additionalRequestCustomizers((request) -> request.getHeaders().add("for", "java"))
.build();
ClientHttpRequest request = createRequest(template);
assertThat(request.getHeaders().headerSet()).contains(entry("spring", Collections.singletonList("framework")))
.contains(entry("for", Collections.singletonList("java")));
}
@Test
@SuppressWarnings("NullAway") // Test null check
void customizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.builder.customizers((RestTemplateCustomizer[]) null))
.withMessageContaining("'customizers' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void customizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.customizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("'customizers' must not be null");
}
@Test
void customizersShouldApply() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer).build();
then(customizer).should().customize(template);
}
@Test
void customizersShouldBeAppliedLast() {
RestTemplate template = spy(new RestTemplate());
this.builder.additionalCustomizers(
(restTemplate) -> then(restTemplate).should().setRequestFactory(any(ClientHttpRequestFactory.class)));
this.builder.configure(template);
}
@Test
void customizersShouldReplaceExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1)
.customizers(Collections.singleton(customizer2))
.build();
then(customizer1).shouldHaveNoInteractions();
then(customizer2).should().customize(template);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalCustomizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalCustomizers((RestTemplateCustomizer[]) null))
.withMessageContaining("'customizers' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void additionalCustomizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.additionalCustomizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("customizers' must not be null");
}
@Test
void additionalCustomizersShouldAddToExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1).additionalCustomizers(customizer2).build();
InOrder ordered = inOrder(customizer1, customizer2);
ordered.verify(customizer1).customize(template);
ordered.verify(customizer2).customize(template);
}
@Test
void customizerShouldBeAppliedAtTheEnd() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
ClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory();
this.builder.interceptors(this.interceptor)
.messageConverters(this.messageConverter)
.rootUri("http://localhost:8080")
.errorHandler(errorHandler)
.basicAuthentication("spring", "boot")
.requestFactory(() -> requestFactory)
.customizers((restTemplate) -> {
assertThat(restTemplate.getInterceptors()).hasSize(1);
assertThat(restTemplate.getMessageConverters()).contains(this.messageConverter);
assertThat(restTemplate.getUriTemplateHandler()).isInstanceOf(RootUriBuilderFactory.class);
assertThat(restTemplate.getErrorHandler()).isEqualTo(errorHandler);
ClientHttpRequestFactory actualRequestFactory = restTemplate.getRequestFactory();
assertThat(actualRequestFactory).isInstanceOf(InterceptingClientHttpRequestFactory.class);
ClientHttpRequestInitializer initializer = restTemplate.getClientHttpRequestInitializers().get(0);
assertThat(initializer).isInstanceOf(RestTemplateBuilderClientHttpRequestInitializer.class);
})
.build();
}
@Test
void buildShouldReturnRestTemplate() {
RestTemplate template = this.builder.build();
assertThat(template.getClass()).isEqualTo(RestTemplate.class);
}
@Test
void buildClassShouldReturnClassInstance() {
RestTemplateSubclass template = this.builder.build(RestTemplateSubclass.class);
assertThat(template.getClass()).isEqualTo(RestTemplateSubclass.class);
}
@Test
void configureShouldApply() {
RestTemplate template = new RestTemplate();
this.builder.configure(template);
assertThat(template.getRequestFactory()).isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
void unwrappingDoesNotAffectRequestFactoryThatIsSetOnTheBuiltTemplate() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
RestTemplate template = this.builder.requestFactory(() -> new BufferingClientHttpRequestFactory(requestFactory))
.build();
assertThat(template.getRequestFactory()).isInstanceOf(BufferingClientHttpRequestFactory.class);
}
@Test
void configureRedirects() {
assertThat(this.builder.redirects(HttpRedirects.DONT_FOLLOW)).extracting("clientSettings")
.extracting("redirects")
.isSameAs(HttpRedirects.DONT_FOLLOW);
}
private ClientHttpRequest createRequest(RestTemplate template) {
ClientHttpRequest request = ReflectionTestUtils.invokeMethod(template, "createRequest",
URI.create("http://localhost"), HttpMethod.GET);
assertThat(request).isNotNull();
return request;
}
static | RestTemplateBuilderTests |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java | {
"start": 1516,
"end": 3353
} | class ____ extends LocalStateCompositeXPackPlugin implements ReloadablePlugin {
private static final Logger logger = LogManager.getLogger(TimeWarpedWatcher.class);
// use a single clock across all nodes using this plugin, this lets keep it static
private static final ClockMock clock = new ClockMock();
private final Watcher watcher;
public TimeWarpedWatcher(final Settings settings, final Path configPath) throws Exception {
super(settings, configPath);
logger.info("using time warped watchers plugin");
TimeWarpedWatcher thisVar = this;
this.watcher = new Watcher(settings) {
@Override
protected SSLService getSslService() {
return thisVar.getSslService();
}
@Override
protected XPackLicenseState getLicenseState() {
return thisVar.getLicenseState();
}
@Override
protected Clock getClock() {
return clock;
}
@Override
protected TriggerEngine<?, ?> getTriggerEngine(Clock clock, ScheduleRegistry scheduleRegistry) {
return new ScheduleTriggerEngineMock(scheduleRegistry, clock);
}
@Override
protected WatchExecutor getWatchExecutor(ThreadPool threadPool) {
return new SameThreadExecutor();
}
@Override
protected Consumer<Iterable<TriggerEvent>> getTriggerEngineListener(ExecutionService executionService) {
return new SyncTriggerEventConsumer(executionService);
}
};
plugins.add(watcher);
}
@Override
public void reload(Settings settings) throws Exception {
this.watcher.reload(settings);
}
public static | TimeWarpedWatcher |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/utils/ImplicitLinkedHashCollection.java | {
"start": 3497,
"end": 5225
} | class ____ implements Element {
static final HeadElement EMPTY = new HeadElement();
private int prev = HEAD_INDEX;
private int next = HEAD_INDEX;
@Override
public int prev() {
return prev;
}
@Override
public void setPrev(int prev) {
this.prev = prev;
}
@Override
public int next() {
return next;
}
@Override
public void setNext(int next) {
this.next = next;
}
}
private static Element indexToElement(Element head, Element[] elements, int index) {
if (index == HEAD_INDEX) {
return head;
}
return elements[index];
}
private static void addToListTail(Element head, Element[] elements, int elementIdx) {
int oldTailIdx = head.prev();
Element element = indexToElement(head, elements, elementIdx);
Element oldTail = indexToElement(head, elements, oldTailIdx);
head.setPrev(elementIdx);
oldTail.setNext(elementIdx);
element.setPrev(oldTailIdx);
element.setNext(HEAD_INDEX);
}
private static void removeFromList(Element head, Element[] elements, int elementIdx) {
Element element = indexToElement(head, elements, elementIdx);
elements[elementIdx] = null;
int prevIdx = element.prev();
int nextIdx = element.next();
Element prev = indexToElement(head, elements, prevIdx);
Element next = indexToElement(head, elements, nextIdx);
prev.setNext(nextIdx);
next.setPrev(prevIdx);
element.setNext(INVALID_INDEX);
element.setPrev(INVALID_INDEX);
}
private | HeadElement |
java | quarkusio__quarkus | extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/RefreshableVerificationKeyResolver.java | {
"start": 132,
"end": 296
} | interface ____ extends VerificationKeyResolver {
default Uni<Void> refresh() {
return Uni.createFrom().voidItem();
}
} | RefreshableVerificationKeyResolver |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java | {
"start": 3257,
"end": 7234
} | class ____ extends
Reducer<Text, LongWritable, Text, LongWritable> {
private LongWritable val = new LongWritable();
/**
* Sums all the individual values within the iterator and writes them to the
* same key.
*
* @param key
* This will be one of 2 constants: LENGTH_STR, COUNT_STR, or
* SQUARE_STR.
* @param values
* This will be an iterator of all the values associated with that
* key.
*/
public void reduce(Text key, Iterable<LongWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (LongWritable value : values) {
sum += value.get();
}
val.set(sum);
context.write(key, val);
}
}
/**
* Reads the output file and parses the summation of lengths, the word count,
* and the lengths squared, to perform a quick calculation of the standard
* deviation.
*
* @param path
* The path to find the output file in. Set in main to the output
* directory.
* @throws IOException
* If it cannot access the output directory, we throw an exception.
*/
private double readAndCalcStdDev(Path path, Configuration conf)
throws IOException {
FileSystem fs = FileSystem.get(conf);
Path file = new Path(path, "part-r-00000");
if (!fs.exists(file))
throw new IOException("Output not found!");
double stddev = 0;
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(fs.open(file), StandardCharsets.UTF_8));
long count = 0;
long length = 0;
long square = 0;
String line;
while ((line = br.readLine()) != null) {
StringTokenizer st = new StringTokenizer(line);
// grab type
String type = st.nextToken();
// differentiate
if (type.equals(COUNT.toString())) {
String countLit = st.nextToken();
count = Long.parseLong(countLit);
} else if (type.equals(LENGTH.toString())) {
String lengthLit = st.nextToken();
length = Long.parseLong(lengthLit);
} else if (type.equals(SQUARE.toString())) {
String squareLit = st.nextToken();
square = Long.parseLong(squareLit);
}
}
// average = total sum / number of elements;
double mean = (((double) length) / ((double) count));
// standard deviation = sqrt((sum(lengths ^ 2)/count) - (mean ^ 2))
mean = Math.pow(mean, 2.0);
double term = (((double) square / ((double) count)));
stddev = Math.sqrt((term - mean));
System.out.println("The standard deviation is: " + stddev);
} finally {
if (br != null) {
br.close();
}
}
return stddev;
}
public static void main(String[] args) throws Exception {
ToolRunner.run(new Configuration(), new WordStandardDeviation(),
args);
}
@Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: wordstddev <in> <out>");
return 0;
}
Configuration conf = getConf();
Job job = Job.getInstance(conf, "word stddev");
job.setJarByClass(WordStandardDeviation.class);
job.setMapperClass(WordStandardDeviationMapper.class);
job.setCombinerClass(WordStandardDeviationReducer.class);
job.setReducerClass(WordStandardDeviationReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
Path outputpath = new Path(args[1]);
FileOutputFormat.setOutputPath(job, outputpath);
boolean result = job.waitForCompletion(true);
// read output and calculate standard deviation
stddev = readAndCalcStdDev(outputpath, conf);
return (result ? 0 : 1);
}
public double getStandardDeviation() {
return stddev;
}
} | WordStandardDeviationReducer |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/DisableIfBuiltWithGraalVMOlderThanCondition.java | {
"start": 599,
"end": 3276
} | class ____ implements ExecutionCondition {
private static final String QUARKUS_INTEGRATION_TEST_NAME = QuarkusIntegrationTest.class.getName();
private static final Set<String> SUPPORTED_INTEGRATION_TESTS = Set.of(QUARKUS_INTEGRATION_TEST_NAME);
@Override
public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) {
Optional<AnnotatedElement> element = context.getElement();
Optional<DisableIfBuiltWithGraalVMOlderThan> optional = findAnnotation(element,
DisableIfBuiltWithGraalVMOlderThan.class);
if (!optional.isPresent()) {
return ConditionEvaluationResult.enabled("@DisableIfBuiltWithGraalVMOlderThan was not found");
}
if (!isIntegrationTest(context.getRequiredTestClass())) {
return ConditionEvaluationResult.enabled("@DisableIfBuiltWithGraalVMOlderThan was added to an unsupported test");
}
GraalVMVersion annotationValue = optional.get().value();
Properties quarkusArtifactProperties = readQuarkusArtifactProperties(context);
try {
GraalVM.Version version = GraalVM.Version
.of(quarkusArtifactProperties.getProperty("metadata.graalvm.version.full").lines());
int comparison = annotationValue.getVersion().compareTo(version);
if (comparison > 0) {
return ConditionEvaluationResult.disabled("Native binary was built with GraalVM{version=" + version.toString()
+ "} but the test is disabled for GraalVM versions older than " + annotationValue);
}
return ConditionEvaluationResult
.enabled("Native binary was built with a GraalVM version compatible with the required version by the test");
} catch (NumberFormatException e) {
return ConditionEvaluationResult
.disabled("Unable to determine the GraalVM version with which the native binary was built");
}
}
private boolean isIntegrationTest(Class<?> testClass) {
do {
Annotation[] annotations = testClass.getAnnotations();
for (Annotation annotation : annotations) {
Class<? extends Annotation> annotationType = annotation.annotationType();
String annotationTypeName = annotationType.getName();
if (SUPPORTED_INTEGRATION_TESTS.contains(annotationTypeName)) {
return true;
}
}
testClass = testClass.getSuperclass();
} while (testClass != Object.class);
return false;
}
}
| DisableIfBuiltWithGraalVMOlderThanCondition |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/config/BeanOverridingDefaultConfigClassesInheritedTests.java | {
"start": 1373,
"end": 1789
} | class ____ extends DefaultConfigClassesBaseTests {
@Test
@Override
void verifyEmployeeSetFromBaseContextConfig() {
assertThat(this.employee).as("The employee should have been autowired.").isNotNull();
assertThat(this.employee.getName()).as("The employee bean should have been overridden.").isEqualTo("Yoda");
}
@Configuration(proxyBeanMethods = false)
static | BeanOverridingDefaultConfigClassesInheritedTests |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInputTests.java | {
"start": 15038,
"end": 16634
} | class ____ extends FilterInputStream {
private final CountingBlobContainer container;
private long position = 0L;
private long start = Long.MAX_VALUE;
private long end = Long.MIN_VALUE;
CountingInputStream(CountingBlobContainer container, InputStream input) {
super(input);
this.container = Objects.requireNonNull(container);
this.container.totalOpens.increment();
this.container.openStreams.incrementAndGet();
}
@Override
public int read() throws IOException {
if (position < start) {
start = position;
}
final int result = in.read();
if (result == -1) {
return result;
}
this.container.totalBytes.increment();
position += 1L;
if (position > end) {
end = position;
}
return result;
}
@Override
public int read(byte[] b, int offset, int len) throws IOException {
if (position < start) {
start = position;
}
final int result = in.read(b, offset, len);
this.container.totalBytes.add(len);
position += len;
if (position > end) {
end = position;
}
return result;
}
@Override
public void close() throws IOException {
super.close();
this.container.openStreams.decrementAndGet();
}
}
}
| CountingInputStream |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/gem/ReportingPolicyGem.java | {
"start": 359,
"end": 1098
} | enum ____ {
IGNORE( null, false, false ),
WARN( Kind.WARNING, true, false ),
ERROR( Kind.ERROR, true, true );
private final Diagnostic.Kind diagnosticKind;
private final boolean requiresReport;
private final boolean failsBuild;
ReportingPolicyGem(Diagnostic.Kind diagnosticKind, boolean requiresReport, boolean failsBuild) {
this.requiresReport = requiresReport;
this.diagnosticKind = diagnosticKind;
this.failsBuild = failsBuild;
}
public Diagnostic.Kind getDiagnosticKind() {
return diagnosticKind;
}
public boolean requiresReport() {
return requiresReport;
}
public boolean failsBuild() {
return failsBuild;
}
}
| ReportingPolicyGem |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/MultimapWithProtoValuesSubject.java | {
"start": 2896,
"end": 29469
} | class ____<M extends Message> extends MultimapSubject {
/*
* Storing a FailureMetadata instance in a Subject subclass is generally a bad practice. For an
* explanation of why it works out OK here, see LiteProtoSubject.
*/
private final FailureMetadata metadata;
private final Multimap<?, M> actual;
private final FluentEqualityConfig config;
protected MultimapWithProtoValuesSubject(
FailureMetadata failureMetadata, @Nullable Multimap<?, M> multimap) {
this(failureMetadata, FluentEqualityConfig.defaultInstance(), multimap);
}
MultimapWithProtoValuesSubject(
FailureMetadata failureMetadata,
FluentEqualityConfig config,
@Nullable Multimap<?, M> multimap) {
super(failureMetadata, multimap);
this.metadata = failureMetadata;
this.actual = multimap;
this.config = config;
}
/**
* Returns a context-aware {@link Subject} for making assertions about the values for the given
* key within the {@link Multimap}.
*
* <p>This method performs no checks on its own and cannot cause test failures. Subsequent
* assertions must be chained onto this method call to test properties of the {@link Multimap}.
*/
/*
* This is mostly safe because we only read from the map. And if it produces NPE/CCE immediately,
* that's no worse than many existing Collection implementations....
*/
@SuppressWarnings("unchecked")
@Override
public IterableOfProtosSubject<M> valuesForKey(@Nullable Object key) {
return check("valuesForKey(%s)", key)
.about(protos())
.that(((Multimap<Object, M>) actual).get(key));
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// MultimapWithProtoValuesFluentAssertion Configuration
//////////////////////////////////////////////////////////////////////////////////////////////////
MultimapWithProtoValuesFluentAssertion<M> usingConfig(FluentEqualityConfig newConfig) {
return new MultimapWithProtoValuesFluentAssertionImpl<>(
new MultimapWithProtoValuesSubject<>(metadata, newConfig, actual));
}
/**
* Specifies that the 'has' bit of individual fields should be ignored when comparing for
* equality.
*
* <p>For version 2 Protocol Buffers, this setting determines whether two protos with the same
* value for a field compare equal if one explicitly sets the value, and the other merely
* implicitly uses the schema-defined default. This setting also determines whether unknown fields
* should be considered in the comparison. By {@code ignoringFieldAbsence()}, unknown fields are
* ignored, and value-equal fields as specified above are considered equal.
*
* <p>For version 3 Protocol Buffers, this setting does not affect primitive fields, because their
* default value is indistinguishable from unset.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceForValues() {
return usingConfig(config.ignoringFieldAbsence());
}
/**
* Specifies that the 'has' bit of these explicitly specified top-level field numbers should be
* ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if they are to be ignored as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues(
int firstFieldNumber, int... rest) {
return usingConfig(config.ignoringFieldAbsenceOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that the 'has' bit of these explicitly specified top-level field numbers should be
* ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if they are to be ignored as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringFieldAbsenceOfFields(fieldNumbers));
}
/**
* Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored
* when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored
* as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringFieldAbsenceOfFieldDescriptors(asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored
* when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored
* as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringFieldAbsenceOfFieldDescriptors(fieldDescriptors));
}
/**
* Specifies that the ordering of repeated fields, at all levels, should be ignored when comparing
* for equality.
*
* <p>This setting applies to all repeated fields recursively, but it does not ignore structure.
* For example, with {@link #ignoringRepeatedFieldOrderForValues()}, a repeated {@code int32}
* field {@code bar}, set inside a repeated message field {@code foo}, the following protos will
* all compare equal:
*
* <pre>{@code
* message1: {
* foo: {
* bar: 1
* bar: 2
* }
* foo: {
* bar: 3
* bar: 4
* }
* }
*
* message2: {
* foo: {
* bar: 2
* bar: 1
* }
* foo: {
* bar: 4
* bar: 3
* }
* }
*
* message3: {
* foo: {
* bar: 4
* bar: 3
* }
* foo: {
* bar: 2
* bar: 1
* }
* }
* }</pre>
*
* <p>However, the following message will compare equal to none of these:
*
* <pre>{@code
* message4: {
* foo: {
* bar: 1
* bar: 3
* }
* foo: {
* bar: 2
* bar: 4
* }
* }
* }</pre>
*
* <p>This setting does not apply to map fields, for which field order is always ignored. The
* serialization order of map fields is undefined, and it may change from runtime to runtime.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderForValues() {
return usingConfig(config.ignoringRepeatedFieldOrder());
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified top-level field
* numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly
* (via {@link FieldDescriptor}) if their orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues(
int firstFieldNumber, int... rest) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified top-level field
* numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly
* (via {@link FieldDescriptor}) if their orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFields(fieldNumbers));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified field descriptors
* should be ignored when comparing for equality. Sub-fields must be specified explicitly if their
* orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringRepeatedFieldOrderOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringRepeatedFieldOrderOfFieldDescriptors(asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified field descriptors
* should be ignored when comparing for equality. Sub-fields must be specified explicitly if their
* orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringRepeatedFieldOrderOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFieldDescriptors(fieldDescriptors));
}
/**
* Specifies that, for all repeated and map fields, any elements in the 'actual' proto which are
* not found in the 'expected' proto are ignored, with the exception of fields in the expected
* proto which are empty. To ignore empty repeated fields as well, use {@link
* #comparingExpectedFieldsOnlyForValues}.
*
* <p>This rule is applied independently from {@link #ignoringRepeatedFieldOrderForValues}. If
* ignoring repeated field order AND extra repeated field elements, all that is tested is that the
* expected elements comprise a subset of the actual elements. If not ignoring repeated field
* order, but still ignoring extra repeated field elements, the actual elements must contain a
* subsequence that matches the expected elements for the test to pass. (The subsequence rule does
* not apply to Map fields, which are always compared by key.)
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsForValues() {
return usingConfig(config.ignoringExtraRepeatedFieldElements());
}
/**
* Specifies that extra repeated field elements for these explicitly specified top-level field
* numbers should be ignored. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if their extra elements are to be ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldsForValues(int firstFieldNumber, int... rest) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that extra repeated field elements for these explicitly specified top-level field
* numbers should be ignored. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if their extra elements are to be ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldsForValues(Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringExtraRepeatedFieldElementsOfFields(fieldNumbers));
}
/**
* Specifies that extra repeated field elements for these explicitly specified field descriptors
* should be ignored. Sub-fields must be specified explicitly if their extra elements are to be
* ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(
asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that extra repeated field elements for these explicitly specified field descriptors
* should be ignored. Sub-fields must be specified explicitly if their extra elements are to be
* ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MultimapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(fieldDescriptors));
}
/**
* Compares double fields as equal if they are both finite and their absolute difference is less
* than or equal to {@code tolerance}.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForValues(double tolerance) {
return usingConfig(config.usingDoubleTolerance(tolerance));
}
/**
* Compares double fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues(
double tolerance, int firstFieldNumber, int... rest) {
return usingConfig(
config.usingDoubleToleranceForFields(tolerance, asList(firstFieldNumber, rest)));
}
/**
* Compares double fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues(
double tolerance, Iterable<Integer> fieldNumbers) {
return usingConfig(config.usingDoubleToleranceForFields(tolerance, fieldNumbers));
}
/**
* Compares double fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues(
double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.usingDoubleToleranceForFieldDescriptors(
tolerance, asList(firstFieldDescriptor, rest)));
}
/**
* Compares double fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues(
double tolerance, Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.usingDoubleToleranceForFieldDescriptors(tolerance, fieldDescriptors));
}
/**
* Compares float fields as equal if they are both finite and their absolute difference is less
* than or equal to {@code tolerance}.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingFloatToleranceForValues(float tolerance) {
return usingConfig(config.usingFloatTolerance(tolerance));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues(
float tolerance, int firstFieldNumber, int... rest) {
return usingConfig(
config.usingFloatToleranceForFields(tolerance, asList(firstFieldNumber, rest)));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues(
float tolerance, Iterable<Integer> fieldNumbers) {
return usingConfig(config.usingFloatToleranceForFields(tolerance, fieldNumbers));
}
/**
* Compares float fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues(
float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.usingFloatToleranceForFieldDescriptors(
tolerance, asList(firstFieldDescriptor, rest)));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MultimapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues(
float tolerance, Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.usingFloatToleranceForFieldDescriptors(tolerance, fieldDescriptors));
}
/**
* Limits the comparison of Protocol buffers to the fields set in the expected proto(s). When
* multiple protos are specified, the comparison is limited to the union of set fields in all the
* expected protos.
*
* <p>The "expected proto(s)" are those passed to the method at the end of the call chain, such as
* {@link #containsEntry} or {@link #containsExactlyEntriesIn}.
*
* <p>Fields not set in the expected proto(s) are ignored. In particular, proto3 fields which have
* their default values are ignored, as these are indistinguishable from unset fields. If you want
* to assert that a proto3 message has certain fields with default values, you cannot use this
* method.
*/
public MultimapWithProtoValuesFluentAssertion<M> comparingExpectedFieldsOnlyForValues() {
return usingConfig(config.comparingExpectedFieldsOnly());
}
/**
* Limits the comparison of Protocol buffers to the defined {@link FieldScope}.
*
* <p>This method is additive and has well-defined ordering semantics. If the invoking {@link
* ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is
* invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is
* constrained to the intersection of {@link FieldScope}s {@code X} and {@code Y}.
*
* <p>By default, {@link MultimapWithProtoValuesFluentAssertion} is constrained to {@link
* FieldScopes#all()}, that is, no fields are excluded from comparison.
*/
public MultimapWithProtoValuesFluentAssertion<M> withPartialScopeForValues(
FieldScope fieldScope) {
return usingConfig(config.withPartialScope(checkNotNull(fieldScope, "fieldScope")));
}
/**
* Excludes the top-level message fields with the given tag numbers from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers
* ignored.
*
* <p>If an invalid field number is supplied, the terminal comparison operation will throw a
* runtime exception.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues(
int firstFieldNumber, int... rest) {
return ignoringFieldsForValues(asList(firstFieldNumber, rest));
}
/**
* Excludes the top-level message fields with the given tag numbers from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers
* ignored.
*
* <p>If an invalid field number is supplied, the terminal comparison operation will throw a
* runtime exception.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringFields(fieldNumbers));
}
/**
* Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* descriptors are ignored, no matter where they occur in the tree.
*
* <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it
* is silently ignored.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return ignoringFieldDescriptorsForValues(asList(firstFieldDescriptor, rest));
}
/**
* Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* descriptors are ignored, no matter where they occur in the tree.
*
* <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it
* is silently ignored.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringFieldDescriptors(fieldDescriptors));
}
/**
* Excludes all specific field paths under the argument {@link FieldScope} from the comparison.
*
* <p>This method is additive and has well-defined ordering semantics. If the invoking {@link
* ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is
* invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is
* constrained to the subtraction of {@code X - Y}.
*
* <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that
* is, no fields are excluded from comparison.
*/
public MultimapWithProtoValuesFluentAssertion<M> ignoringFieldScopeForValues(
FieldScope fieldScope) {
return usingConfig(config.ignoringFieldScope(checkNotNull(fieldScope, "fieldScope")));
}
/**
* If set, in the event of a comparison failure, the error message printed will list only those
* specific fields that did not match between the actual and expected values. Useful for very
* large protocol buffers.
*
* <p>This a purely cosmetic setting, and it has no effect on the behavior of the test.
*/
public MultimapWithProtoValuesFluentAssertion<M> reportingMismatchesOnlyForValues() {
return usingConfig(config.reportingMismatchesOnly());
}
/**
* Specifies the {@link TypeRegistry} and {@link ExtensionRegistry} to use for {@link
* com.google.protobuf.Any Any} messages.
*
* <p>To compare the value of an {@code Any} message, ProtoTruth looks in the given type registry
* for a descriptor for the message's type URL:
*
* <ul>
* <li>If ProtoTruth finds a descriptor, it unpacks the value and compares it against the
* expected value, respecting any configuration methods used for the assertion.
* <li>If ProtoTruth does not find a descriptor (or if the value can't be deserialized with the
* descriptor), it compares the raw, serialized bytes of the expected and actual values.
* </ul>
*
* <p>When ProtoTruth unpacks a value, it is parsing a serialized proto. That proto may contain
* extensions. To look up those extensions, ProtoTruth uses the provided {@link
* ExtensionRegistry}.
*
* @since 1.1
*/
public MultimapWithProtoValuesFluentAssertion<M> unpackingAnyUsingForValues(
TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) {
return usingConfig(config.unpackingAnyUsing(typeRegistry, extensionRegistry));
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// UsingCorrespondence Methods
//////////////////////////////////////////////////////////////////////////////////////////////////
private MultimapSubject.UsingCorrespondence<M, M> usingCorrespondence(
Iterable<? extends M> expectedValues) {
return comparingValuesUsing(
config
.withExpectedMessages(expectedValues)
.<M>toCorrespondence(FieldScopeUtil.getSingleDescriptor(actual.values())));
}
// The UsingCorrespondence methods have conflicting erasure with default MapSubject methods,
// so we can't implement them both on the same class, but we want to define both so
// MultimapWithProtoValuesSubjects are interchangeable with MapSubjects when no configuration is
// specified. So, we implement a dumb, private delegator to return instead.
private static final | MultimapWithProtoValuesSubject |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/persistent/ClusterAndProjectPersistentTasksSmokeIT.java | {
"start": 10053,
"end": 11128
} | class ____ implements PersistentTaskParams {
public static final TestEmptyProjectParams INSTANCE = new TestEmptyProjectParams();
public static final ObjectParser<TestEmptyProjectParams, Void> PARSER = new ObjectParser<>(
TestProjectPersistentTasksExecutor.NAME,
true,
() -> INSTANCE
);
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject().endObject();
}
@Override
public void writeTo(StreamOutput out) {}
@Override
public String getWriteableName() {
return TestProjectPersistentTasksExecutor.NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.current();
}
public static TestEmptyProjectParams fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}
public static | TestEmptyProjectParams |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/OffsetDateTimeByInstantComparatorTest.java | {
"start": 880,
"end": 1857
} | class ____ {
private OffsetDateTimeByInstantComparator comparator;
@BeforeEach
public void setUp() {
comparator = OffsetDateTimeByInstantComparator.getInstance();
}
@Test
void should_have_one_instance() {
assertThat(comparator).isSameAs(OffsetDateTimeByInstantComparator.getInstance());
}
@Test
void should_have_description() {
assertThat(comparator.description()).isEqualTo("OffsetDateTime.timeLineOrder()");
}
@Test
void should_disregard_time_zone_difference() {
ZonedDateTime now = ZonedDateTime.now();
OffsetDateTime inParis = now.withZoneSameInstant(ZoneId.of("Europe/Paris")).toOffsetDateTime();
OffsetDateTime inNewYork = now.withZoneSameInstant(ZoneId.of("America/New_York")).toOffsetDateTime();
assertThat(inParis.compareTo(inNewYork)).as("Built-in comparison should report that they differ").isNotZero();
assertThat(comparator.compare(inParis, inNewYork)).isZero();
}
}
| OffsetDateTimeByInstantComparatorTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/translate/UnicodeUnpairedSurrogateRemover.java | {
"start": 1341,
"end": 1966
} | class ____ extends CodePointTranslator {
/**
* Constructs a new instance.
*/
public UnicodeUnpairedSurrogateRemover() {
// empty
}
/**
* Implements translate that throws out unpaired surrogates.
* {@inheritDoc}
*/
@Override
public boolean translate(final int codePoint, final Writer out) throws IOException {
// true: It's a surrogate. Write nothing and say we've translated.
return codePoint >= Character.MIN_SURROGATE && codePoint <= Character.MAX_SURROGATE;
// It's not a surrogate. Don't translate it.
}
}
| UnicodeUnpairedSurrogateRemover |
java | elastic__elasticsearch | x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/DebugProxy.java | {
"start": 317,
"end": 353
} | interface ____ compatible proxy.
*/
| for |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/BeanDefinition.java | {
"start": 1567,
"end": 3559
} | interface ____ extends AttributeAccessor, BeanMetadataElement {
/**
* Scope identifier for the standard singleton scope: {@value}.
* <p>Note that extended bean factories might support further scopes.
* @see #setScope
* @see ConfigurableBeanFactory#SCOPE_SINGLETON
*/
String SCOPE_SINGLETON = ConfigurableBeanFactory.SCOPE_SINGLETON;
/**
* Scope identifier for the standard prototype scope: {@value}.
* <p>Note that extended bean factories might support further scopes.
* @see #setScope
* @see ConfigurableBeanFactory#SCOPE_PROTOTYPE
*/
String SCOPE_PROTOTYPE = ConfigurableBeanFactory.SCOPE_PROTOTYPE;
/**
* Role hint indicating that a {@code BeanDefinition} is a major part
* of the application. Typically corresponds to a user-defined bean.
*/
int ROLE_APPLICATION = 0;
/**
* Role hint indicating that a {@code BeanDefinition} is a supporting
* part of some larger configuration, typically an outer
* {@link org.springframework.beans.factory.parsing.ComponentDefinition}.
* {@code SUPPORT} beans are considered important enough to be aware
* of when looking more closely at a particular
* {@link org.springframework.beans.factory.parsing.ComponentDefinition},
* but not when looking at the overall configuration of an application.
*/
int ROLE_SUPPORT = 1;
/**
* Role hint indicating that a {@code BeanDefinition} is providing an
* entirely background role and has no relevance to the end-user. This hint is
* used when registering beans that are completely part of the internal workings
* of a {@link org.springframework.beans.factory.parsing.ComponentDefinition}.
*/
int ROLE_INFRASTRUCTURE = 2;
// Modifiable attributes
/**
* Set the name of the parent definition of this bean definition, if any.
*/
void setParentName(@Nullable String parentName);
/**
* Return the name of the parent definition of this bean definition, if any.
*/
@Nullable String getParentName();
/**
* Specify the bean | BeanDefinition |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java | {
"start": 6099,
"end": 15493
} | class ____ extends
AbstractAbfsIntegrationTest {
private static final Path TEST_FILE_PATH = new Path("testfile");
private static final String TEST_FOLDER_PATH = "testFolder";
private static final String TEST_CHILD_FILE = "childFile";
public ITestAzureBlobFileSystemCreate() throws Exception {
super();
}
@Test
public void testEnsureFileCreatedImmediately() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
assertIsFile(fs, TEST_FILE_PATH);
} finally {
out.close();
}
assertIsFile(fs, TEST_FILE_PATH);
}
@Test
@SuppressWarnings("deprecation")
public void testCreateNonRecursive() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testFolderPath = path(TEST_FOLDER_PATH);
Path testFile = new Path(testFolderPath, TEST_CHILD_FILE);
try {
fs.createNonRecursive(testFile, true, 1024, (short) 1, 1024, null);
fail("Should've thrown");
} catch (FileNotFoundException expected) {
}
fs.registerListener(new TracingHeaderValidator(
fs.getAbfsStore().getAbfsConfiguration().getClientCorrelationId(),
fs.getFileSystemId(), FSOperationType.MKDIR, false, 0));
fs.mkdirs(testFolderPath);
fs.registerListener(null);
fs.createNonRecursive(testFile, true, 1024, (short) 1, 1024, null)
.close();
assertIsFile(fs, testFile);
}
@Test
@SuppressWarnings("deprecation")
public void testCreateNonRecursive1() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testFolderPath = path(TEST_FOLDER_PATH);
Path testFile = new Path(testFolderPath, TEST_CHILD_FILE);
try {
fs.createNonRecursive(testFile, FsPermission.getDefault(),
EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), 1024, (short) 1,
1024, null);
fail("Should've thrown");
} catch (FileNotFoundException expected) {
}
fs.mkdirs(testFolderPath);
fs.createNonRecursive(testFile, true, 1024, (short) 1, 1024, null)
.close();
assertIsFile(fs, testFile);
}
@Test
@SuppressWarnings("deprecation")
public void testCreateNonRecursive2() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testFolderPath = path(TEST_FOLDER_PATH);
Path testFile = new Path(testFolderPath, TEST_CHILD_FILE);
try {
fs.createNonRecursive(testFile, FsPermission.getDefault(), false, 1024,
(short) 1, 1024, null);
fail("Should've thrown");
} catch (FileNotFoundException e) {
}
fs.mkdirs(testFolderPath);
fs.createNonRecursive(testFile, true, 1024, (short) 1, 1024, null)
.close();
assertIsFile(fs, testFile);
}
/**
* Test createNonRecursive when parent exist.
*
* @throws Exception in case of failure
*/
@Test
public void testCreateNonRecursiveWhenParentExist() throws Exception {
AzureBlobFileSystem fs = getFileSystem();
assumeBlobServiceType();
fs.setWorkingDirectory(new Path(ROOT_PATH));
Path createDirectoryPath = new Path("hbase/A");
fs.mkdirs(createDirectoryPath);
fs.createNonRecursive(new Path(createDirectoryPath, "B"), FsPermission
.getDefault(), false, 1024,
(short) 1, 1024, null);
Assertions.assertThat(fs.exists(new Path(createDirectoryPath, "B")))
.describedAs("File should be created").isTrue();
fs.close();
}
/**
* Test createNonRecursive when parent does not exist.
*
* @throws Exception in case of failure
*/
@Test
public void testCreateNonRecursiveWhenParentNotExist() throws Exception {
AzureBlobFileSystem fs = getFileSystem();
assumeBlobServiceType();
fs.setWorkingDirectory(new Path(ROOT_PATH));
Path createDirectoryPath = new Path("A/");
fs.mkdirs(createDirectoryPath);
intercept(FileNotFoundException.class,
() -> fs.createNonRecursive(new Path("A/B/C"), FsPermission
.getDefault(), false, 1024, (short) 1, 1024, null));
Assertions.assertThat(fs.exists(new Path("A/B/C")))
.describedAs("New File should not be created.").isFalse();
fs.close();
}
/**
* Helper method to create a json file.
* @param path parent path
* @param renameJson rename json path
*
* @return file system
* @throws IOException in case of failure
*/
private AzureBlobFileSystem createJsonFile(Path path, Path renameJson) throws IOException {
final AzureBlobFileSystem fs = Mockito.spy(this.getFileSystem());
assumeBlobServiceType();
AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore());
doReturn(store).when(fs).getAbfsStore();
AbfsClient client = Mockito.spy(store.getClient());
doReturn(client).when(store).getClient();
fs.setWorkingDirectory(new Path(ROOT_PATH));
fs.mkdirs(new Path(path, "test3"));
VersionedFileStatus fileStatus
= (VersionedFileStatus) fs.getFileStatus(path);
new RenameAtomicity(path,
new Path("/hbase/test4"), renameJson,
getTestTracingContext(fs, true), fileStatus.getEtag(),
client).preRename();
Assertions.assertThat(fs.exists(renameJson))
.describedAs("Rename Pending Json file should exist.")
.isTrue();
return fs;
}
/**
* Test createNonRecursive when parent does not exist and rename pending exists.
* Rename redo should fail.
* Json file should be deleted.
* No new File creation.
*
* @throws Exception in case of failure
*/
@Test
public void testCreateNonRecursiveWhenParentNotExistAndRenamePendingExist() throws Exception {
AzureBlobFileSystem fs = null;
try {
Path path = new Path("/hbase/test1/test2");
Path renameJson = new Path(path.getParent(), path.getName() + SUFFIX);
fs = createJsonFile(path, renameJson);
fs.delete(path, true);
Assertions.assertThat(fs.exists(renameJson)).isTrue();
AzureBlobFileSystem finalFs = fs;
intercept(FileNotFoundException.class,
() -> finalFs.createNonRecursive(new Path(path, "test4"), FsPermission
.getDefault(), false, 1024, (short) 1, 1024, null));
Assertions.assertThat(finalFs.exists(new Path(path, "test4")))
.describedAs("New File should not be created.")
.isFalse();
Assertions.assertThat(finalFs.exists(renameJson))
.describedAs("Rename Pending Json file should be deleted.")
.isFalse();
} finally {
if (fs != null) {
fs.close();
}
}
}
/**
* Test createNonRecursive when parent and rename pending exist.
* Rename redo should be successful.
* Json file should be deleted.
* No file should be created.
*
* @throws Exception in case of failure
*/
@Test
public void testCreateNonRecursiveWhenParentAndRenamePendingExist() throws Exception {
AzureBlobFileSystem fs = null;
try {
Path path = new Path("/hbase/test1/test2");
Path renameJson = new Path(path.getParent(), path.getName() + SUFFIX);
fs = createJsonFile(path, renameJson);
AzureBlobFileSystem finalFs = fs;
intercept(FileNotFoundException.class,
() -> finalFs.createNonRecursive(new Path(path, "test4"), FsPermission
.getDefault(), false, 1024, (short) 1, 1024, null));
Assertions.assertThat(finalFs.exists(path))
.describedAs("Old path should be deleted.")
.isFalse();
Assertions.assertThat(finalFs.exists(new Path(path, "test4")))
.describedAs("New File should not be created.")
.isFalse();
Assertions.assertThat(finalFs.exists(renameJson))
.describedAs("Rename Pending Json file should be deleted.")
.isFalse();
Assertions.assertThat(finalFs.exists(new Path("/hbase/test4")))
.describedAs("Rename should be successful.")
.isTrue();
} finally {
if (fs != null) {
fs.close();
}
}
}
@Test
public void testCreateOnRoot() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testFile = path(AbfsHttpConstants.ROOT_PATH);
AbfsRestOperationException ex = intercept(AbfsRestOperationException.class, () ->
fs.create(testFile, true));
if (ex.getStatusCode() != HTTP_CONFLICT) {
// Request should fail with 409.
throw ex;
}
ex = intercept(AbfsRestOperationException.class, () ->
fs.createNonRecursive(testFile, FsPermission.getDefault(),
false, 1024, (short) 1, 1024, null));
if (ex.getStatusCode() != HTTP_CONFLICT) {
// Request should fail with 409.
throw ex;
}
}
/**
* Attempts to use to the ABFS stream after it is closed.
*/
@Test
public void testWriteAfterClose() throws Throwable {
final AzureBlobFileSystem fs = getFileSystem();
Path testFolderPath = path(TEST_FOLDER_PATH);
Path testPath = new Path(testFolderPath, TEST_CHILD_FILE);
FSDataOutputStream out = fs.create(testPath);
out.close();
intercept(IOException.class, () -> out.write('a'));
intercept(IOException.class, () -> out.write(new byte[]{'a'}));
// hsync is not ignored on a closed stream
// out.hsync();
out.flush();
out.close();
}
/**
* Attempts to double close an ABFS output stream from within a
* FilterOutputStream.
* That | ITestAzureBlobFileSystemCreate |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/license/internal/TrialLicenseVersionTests.java | {
"start": 599,
"end": 2203
} | class ____ extends ESTestCase {
public void testCanParseAllVersions() {
for (var version : Version.getDeclaredVersions(Version.class)) {
// Only consider versions before the cut-over; the comparison becomes meaningless after the cut-over point
if (version.onOrBefore(Version.fromId(CURRENT_TRIAL_VERSION))) {
TrialLicenseVersion parsedVersion = TrialLicenseVersion.fromXContent(version.toString());
assertTrue(parsedVersion.ableToStartNewTrial());
}
}
}
public void testRoundTripParsing() {
var randomVersion = new TrialLicenseVersion(randomNonNegativeInt());
assertThat(TrialLicenseVersion.fromXContent(randomVersion.toString()), equalTo(randomVersion));
}
public void testNewTrialAllowed() {
assertTrue(new TrialLicenseVersion(randomIntBetween(7_00_00_00, 7_99_99_99)).ableToStartNewTrial());
assertTrue(new TrialLicenseVersion(randomIntBetween(8_00_00_00, 8_99_99_99)).ableToStartNewTrial());
assertFalse(new TrialLicenseVersion(CURRENT.asInt()).ableToStartNewTrial());
assertFalse(new TrialLicenseVersion(randomIntBetween(9_00_00_00, CURRENT_TRIAL_VERSION)).ableToStartNewTrial());
final int trialVersion = randomIntBetween(CURRENT_TRIAL_VERSION, CURRENT.asInt());
if (trialVersion < CURRENT.asInt()) {
assertTrue(new TrialLicenseVersion(trialVersion).ableToStartNewTrial());
} else {
assertFalse(new TrialLicenseVersion(trialVersion).ableToStartNewTrial());
}
}
}
| TrialLicenseVersionTests |
java | apache__hadoop | hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java | {
"start": 1056,
"end": 1266
} | class ____ extends AbstractContractOpenTest {
@Override
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
} | TestAdlContractOpenLive |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/main/java/io/github/resilience4j/bulkhead/autoconfigure/ThreadPoolBulkheadMetricsAutoConfiguration.java | {
"start": 1974,
"end": 2894
} | class ____ {
@Bean
@ConditionalOnProperty(value = "resilience4j.thread-pool-bulkhead.metrics.legacy.enabled", havingValue = "true")
@ConditionalOnMissingBean
public TaggedThreadPoolBulkheadMetrics registerThreadPoolBulkheadMetrics(
ThreadPoolBulkheadRegistry threadPoolBulkheadRegistry) {
return TaggedThreadPoolBulkheadMetrics
.ofThreadPoolBulkheadRegistry(threadPoolBulkheadRegistry);
}
@Bean
@ConditionalOnBean(MeterRegistry.class)
@ConditionalOnProperty(value = "resilience4j.thread-pool-bulkhead.metrics.legacy.enabled", havingValue = "false", matchIfMissing = true)
@ConditionalOnMissingBean
public TaggedThreadPoolBulkheadMetricsPublisher taggedThreadPoolBulkheadMetricsPublisher(
MeterRegistry meterRegistry) {
return new TaggedThreadPoolBulkheadMetricsPublisher(meterRegistry);
}
}
| ThreadPoolBulkheadMetricsAutoConfiguration |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/config/CachingJupiterConfigurationTests.java | {
"start": 1265,
"end": 5662
} | class ____ {
private final JupiterConfiguration delegate = mock();
private final JupiterConfiguration cache = new CachingJupiterConfiguration(delegate);
@Test
void cachesDefaultExecutionMode() {
when(delegate.getDefaultExecutionMode()).thenReturn(ExecutionMode.CONCURRENT);
assertThat(cache.getDefaultExecutionMode()).isEqualTo(ExecutionMode.CONCURRENT);
assertThat(cache.getDefaultExecutionMode()).isEqualTo(ExecutionMode.CONCURRENT);
verify(delegate, only()).getDefaultExecutionMode();
}
@Test
void cachesDefaultTestInstanceLifecycle() {
when(delegate.getDefaultTestInstanceLifecycle()).thenReturn(Lifecycle.PER_CLASS);
assertThat(cache.getDefaultTestInstanceLifecycle()).isEqualTo(Lifecycle.PER_CLASS);
assertThat(cache.getDefaultTestInstanceLifecycle()).isEqualTo(Lifecycle.PER_CLASS);
verify(delegate, only()).getDefaultTestInstanceLifecycle();
}
@Test
void cachesExecutionConditionFilter() {
Predicate<ExecutionCondition> predicate = executionCondition -> true;
when(delegate.getExecutionConditionFilter()).thenReturn(predicate);
assertThat(cache.getExecutionConditionFilter()).isSameAs(predicate);
assertThat(cache.getExecutionConditionFilter()).isSameAs(predicate);
verify(delegate, only()).getExecutionConditionFilter();
}
@Test
void cachesExtensionAutoDetectionEnabled() {
when(delegate.isExtensionAutoDetectionEnabled()).thenReturn(true);
assertThat(cache.isExtensionAutoDetectionEnabled()).isTrue();
assertThat(cache.isExtensionAutoDetectionEnabled()).isTrue();
verify(delegate, only()).isExtensionAutoDetectionEnabled();
}
@Test
void cachesParallelExecutionEnabled() {
when(delegate.isParallelExecutionEnabled()).thenReturn(true);
assertThat(cache.isParallelExecutionEnabled()).isTrue();
assertThat(cache.isParallelExecutionEnabled()).isTrue();
verify(delegate, only()).isParallelExecutionEnabled();
}
@Test
void cachesDefaultDisplayNameGenerator() {
CustomDisplayNameGenerator customDisplayNameGenerator = new CustomDisplayNameGenerator();
when(delegate.getDefaultDisplayNameGenerator()).thenReturn(customDisplayNameGenerator);
// call `cache.getDefaultDisplayNameGenerator()` twice to verify the delegate method is called only once.
assertThat(cache.getDefaultDisplayNameGenerator()).isSameAs(customDisplayNameGenerator);
assertThat(cache.getDefaultDisplayNameGenerator()).isSameAs(customDisplayNameGenerator);
verify(delegate, only()).getDefaultDisplayNameGenerator();
}
@Test
void cachesDefaultTestMethodOrderer() {
final Optional<MethodOrderer> methodOrderer = Optional.of(new MethodOrderer.MethodName());
when(delegate.getDefaultTestMethodOrderer()).thenReturn(methodOrderer);
// call `cache.getDefaultTestMethodOrderer()` twice to verify the delegate method is called only once.
assertThat(cache.getDefaultTestMethodOrderer()).isSameAs(methodOrderer);
assertThat(cache.getDefaultTestMethodOrderer()).isSameAs(methodOrderer);
verify(delegate, only()).getDefaultTestMethodOrderer();
}
@Test
void cachesDefaultTempDirCleanupMode() {
when(delegate.getDefaultTempDirCleanupMode()).thenReturn(NEVER);
// call `cache.getDefaultTempStrategyDirCleanupMode()` twice to verify the delegate method is called only once.
assertThat(cache.getDefaultTempDirCleanupMode()).isSameAs(NEVER);
assertThat(cache.getDefaultTempDirCleanupMode()).isSameAs(NEVER);
verify(delegate, only()).getDefaultTempDirCleanupMode();
}
@Test
void cachesDefaultTempDirFactorySupplier() {
Supplier<TempDirFactory> supplier = mock();
when(delegate.getDefaultTempDirFactorySupplier()).thenReturn(supplier);
// call `cache.getDefaultTempDirFactorySupplier()` twice to verify the delegate method is called only once.
assertThat(cache.getDefaultTempDirFactorySupplier()).isSameAs(supplier);
assertThat(cache.getDefaultTempDirFactorySupplier()).isSameAs(supplier);
verify(delegate, only()).getDefaultTempDirFactorySupplier();
}
@Test
void doesNotCacheRawParameters() {
when(delegate.getRawConfigurationParameter("foo")).thenReturn(Optional.of("bar")).thenReturn(
Optional.of("baz"));
assertThat(cache.getRawConfigurationParameter("foo")).contains("bar");
assertThat(cache.getRawConfigurationParameter("foo")).contains("baz");
verify(delegate, times(2)).getRawConfigurationParameter("foo");
verifyNoMoreInteractions(delegate);
}
}
| CachingJupiterConfigurationTests |
java | apache__spark | sql/api/src/main/java/org/apache/spark/sql/api/java/UDF1.java | {
"start": 980,
"end": 1061
} | interface ____<T1, R> extends Serializable {
R call(T1 t1) throws Exception;
}
| UDF1 |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/tck/ElementAtTckTest.java | {
"start": 772,
"end": 1091
} | class ____ extends BaseTck<Integer> {
@Override
public Publisher<Integer> createPublisher(final long elements) {
return
Flowable.range(1, 10).elementAt(5).toFlowable()
;
}
@Override
public long maxElementsFromPublisher() {
return 1;
}
}
| ElementAtTckTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RestEndpointBuilderFactory.java | {
"start": 2861,
"end": 5610
} | class ____.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param outType the value to set
* @return the dsl builder
*/
default RestEndpointConsumerBuilder outType(String outType) {
doSetProperty("outType", outType);
return this;
}
/**
* Media type such as: 'text/xml', or 'application/json' this REST
* service returns.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param produces the value to set
* @return the dsl builder
*/
default RestEndpointConsumerBuilder produces(String produces) {
doSetProperty("produces", produces);
return this;
}
/**
* Name of the route this REST services creates.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param routeId the value to set
* @return the dsl builder
*/
default RestEndpointConsumerBuilder routeId(String routeId) {
doSetProperty("routeId", routeId);
return this;
}
/**
* The Camel Rest component to use for the consumer REST transport, such
* as jetty, servlet, undertow. If no component has been explicitly
* configured, then Camel will lookup if there is a Camel component that
* integrates with the Rest DSL, or if a
* org.apache.camel.spi.RestConsumerFactory is registered in the
* registry. If either one is found, then that is being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param consumerComponentName the value to set
* @return the dsl builder
*/
default RestEndpointConsumerBuilder consumerComponentName(String consumerComponentName) {
doSetProperty("consumerComponentName", consumerComponentName);
return this;
}
/**
* Human description to document this REST service.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param description the value to set
* @return the dsl builder
*/
default RestEndpointConsumerBuilder description(String description) {
doSetProperty("description", description);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the REST component.
*/
public | name |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/protocol/Command.java | {
"start": 1295,
"end": 5027
} | class ____<K, V, T> implements RedisCommand<K, V, T> {
protected static final byte ST_INITIAL = 0;
protected static final byte ST_COMPLETED = 1;
protected static final byte ST_CANCELLED = 2;
private final ProtocolKeyword type;
protected CommandArgs<K, V> args;
protected CommandOutput<K, V, T> output;
protected Throwable exception;
protected volatile byte status = ST_INITIAL;
/**
* Create a new command with the supplied type.
*
* @param type Command type, must not be {@code null}.
* @param output Command output, can be {@code null}.
*/
public Command(ProtocolKeyword type, CommandOutput<K, V, T> output) {
this(type, output, null);
}
/**
* Create a new command with the supplied type and args.
*
* @param type Command type, must not be {@code null}.
* @param output Command output, can be {@code null}.
* @param args Command args, can be {@code null}
*/
public Command(ProtocolKeyword type, CommandOutput<K, V, T> output, CommandArgs<K, V> args) {
LettuceAssert.notNull(type, "Command type must not be null");
this.type = type;
this.output = output;
this.args = args;
}
/**
* Get the object that holds this command's output.
*
* @return The command output object.
*/
@Override
public CommandOutput<K, V, T> getOutput() {
return output;
}
@Override
public boolean completeExceptionally(Throwable throwable) {
if (output != null) {
output.setError(throwable.getMessage());
}
exception = throwable;
this.status = ST_COMPLETED;
return true;
}
/**
* Mark this command complete and notify all waiting threads.
*/
@Override
public void complete() {
this.status = ST_COMPLETED;
}
@Override
public void cancel() {
this.status = ST_CANCELLED;
}
/**
* Encode and write this command to the supplied buffer using the new <a href="https://redis.io/topics/protocol">Unified
* Request Protocol</a>.
*
* @param buf Buffer to write to.
*/
public void encode(ByteBuf buf) {
buf.touch("Command.encode(…)");
buf.writeByte('*');
CommandArgs.IntegerArgument.writeInteger(buf, 1 + (args != null ? args.count() : 0));
buf.writeBytes(CommandArgs.CRLF);
CommandArgs.BytesArgument.writeBytes(buf, type.getBytes());
if (args != null) {
args.encode(buf);
}
}
public String getError() {
return output.getError();
}
@Override
public CommandArgs<K, V> getArgs() {
return args;
}
/**
*
* @return the resut from the output.
*/
public T get() {
if (output != null) {
return output.get();
}
return null;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [type=").append(type);
sb.append(", output=").append(output);
sb.append(']');
return sb.toString();
}
public void setOutput(CommandOutput<K, V, T> output) {
if (this.status != ST_INITIAL) {
throw new IllegalStateException("Command is completed/cancelled. Cannot set a new output");
}
this.output = output;
}
@Override
public ProtocolKeyword getType() {
return type;
}
@Override
public boolean isCancelled() {
return status == ST_CANCELLED;
}
@Override
public boolean isDone() {
return status != ST_INITIAL;
}
}
| Command |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/WelcomePageRouterFunctionFactoryTests.java | {
"start": 7474,
"end": 7909
} | class ____ implements View {
private final DataBufferFactory bufferFactory = new DefaultDataBufferFactory();
@Override
public Mono<Void> render(@Nullable Map<String, ?> model, @Nullable MediaType contentType,
ServerWebExchange exchange) {
DataBuffer buffer = this.bufferFactory.wrap("welcome-page-template".getBytes(StandardCharsets.UTF_8));
return exchange.getResponse().writeWith(Mono.just(buffer));
}
}
}
| TestView |
java | quarkusio__quarkus | integration-tests/micrometer-prometheus/src/main/java/io/quarkus/doc/micrometer/ExampleResource.java | {
"start": 615,
"end": 4124
} | class ____ {
// tag::gauge[]
private final LinkedList<Long> list = new LinkedList<>(); // <1>
// end::gauge[]
// tag::registry[]
private final MeterRegistry registry;
// tag::ctor[]
ExampleResource(MeterRegistry registry) {
this.registry = registry;
// tag::gauge[]
registry.gaugeCollectionSize("example.list.size", Tags.empty(), list); // <2>
// end::gauge[]
}
// end::ctor[]
// end::registry[]
// tag::gauge[]
@GET
@Path("gauge/{number}")
public Long checkListSize(@PathParam("number") long number) { // <3>
if (number == 2 || number % 2 == 0) {
// add even numbers to the list
list.add(number);
} else {
// remove items from the list for odd numbers
try {
number = list.removeFirst();
} catch (NoSuchElementException nse) {
number = 0;
}
}
return number;
}
// end::gauge[]
// tag::primeMethod[]
@GET
@Path("prime/{number}")
public String checkIfPrime(@PathParam("number") long number) {
if (number < 1) {
// tag::counted[]
registry.counter("example.prime.number", "type", "not-natural") // <1>
.increment(); // <2>
// end::counted[]
return "Only natural numbers can be prime numbers.";
}
if (number == 1) {
// tag::counted[]
registry.counter("example.prime.number", "type", "one") // <1>
.increment(); // <2>
// end::counted[]
return number + " is not prime.";
}
if (number == 2 || number % 2 == 0) {
// tag::counted[]
registry.counter("example.prime.number", "type", "even") // <1>
.increment(); // <2>
// end::counted[]
return number + " is not prime.";
}
// tag::timed[]
if (timedTestPrimeNumber(number)) { // <3>
// end::timed[]
// tag::ignore[]
registry.counter("example.prime.number", "type", "prime") // <1>
.increment();
return number + " is prime.";
} else
// end::ignore[]
// tag::default[]
if (testPrimeNumber(number)) {
// end::default[]
// tag::counted[]
registry.counter("example.prime.number", "type", "prime") // <1>
.increment(); // <2>
// end::counted[]
return number + " is prime.";
} else {
// tag::counted[]
registry.counter("example.prime.number", "type", "not-prime") // <1>
.increment(); // <2>
// end::counted[]
return number + " is not prime.";
}
}
// end::primeMethod[]
// tag::timed[]
protected boolean timedTestPrimeNumber(long number) {
Timer.Sample sample = Timer.start(registry); // <4>
boolean result = testPrimeNumber(number); // <5>
sample.stop(registry.timer("example.prime.number.test", "prime", result + "")); // <6>
return result;
}
// end::timed[]
protected boolean testPrimeNumber(long number) {
for (int i = 3; i < Math.floor(Math.sqrt(number)) + 1; i = i + 2) {
if (number % i == 0) {
return false;
}
}
return true;
}
}
// end::example[]
| ExampleResource |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_doesNotContainPattern_String_Test.java | {
"start": 1010,
"end": 1515
} | class ____ extends CharSequenceAssertBaseTest {
private static CharSequence regex;
@BeforeAll
static void setUpOnce() {
regex = matchAnything().pattern();
}
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.doesNotContainPattern(regex);
}
@Override
protected void verify_internal_effects() {
verify(strings).assertDoesNotContainPattern(getInfo(assertions), getActual(assertions), regex);
}
}
| CharSequenceAssert_doesNotContainPattern_String_Test |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/future/FutureAssert_isNotDone_Test.java | {
"start": 1007,
"end": 1586
} | class ____ {
@Test
void should_pass_if_actual_is_not_done() {
// GIVEN
Future<?> actual = mock(Future.class);
// WHEN
when(actual.isDone()).thenReturn(false);
// THEN
then(actual).isNotDone();
}
@Test
void should_fail_if_actual_is_done() {
// GIVEN
Future<?> actual = mock(Future.class);
when(actual.isDone()).thenReturn(true);
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).isNotDone());
// THEN
then(assertionError).hasMessageContaining("not to be done");
}
}
| FutureAssert_isNotDone_Test |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/analytics/reports/FormatEnum.java | {
"start": 1047,
"end": 1671
} | enum ____ {
// Matrix
MATRIX("Matrix"),
// MultiBlock
MULTIBLOCK("MultiBlock"),
// Summary
SUMMARY("Summary"),
// Tabular
TABULAR("Tabular");
final String value;
FormatEnum(String value) {
this.value = value;
}
@JsonValue
public String value() {
return this.value;
}
@JsonCreator
public static FormatEnum fromValue(String value) {
for (FormatEnum e : FormatEnum.values()) {
if (e.value.equals(value)) {
return e;
}
}
throw new IllegalArgumentException(value);
}
}
| FormatEnum |
java | google__guice | extensions/struts2/src/com/google/inject/struts2/Struts2Factory.java | {
"start": 1487,
"end": 2808
} | class ____ extends ObjectFactory {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(Struts2Factory.class.getName());
private static final String ERROR_NO_INJECTOR =
"Cannot find a Guice injector. Are you sure you registered a GuiceServletContextListener "
+ "that uses the Struts2GuicePluginModule in your application's web.xml?";
private static @com.google.inject.Inject Injector injector;
private final List<ProvidedInterceptor> interceptors = new ArrayList<>();
private volatile Injector strutsInjector;
@Override
public boolean isNoArgConstructorRequired() {
return false;
}
@Inject(value = "guice.module", required = false)
void setModule(String moduleClassName) {
throw new RuntimeException(
"The struts2 plugin no longer supports"
+ " specifying a module via the 'guice.module' property in XML."
+ " Please install your module via a GuiceServletContextListener instead.");
}
Set<Class<?>> boundClasses = new HashSet<>();
@Override
public Class<?> getClassInstance(String name) throws ClassNotFoundException {
Class<?> clazz = super.getClassInstance(name);
synchronized (this) {
if (strutsInjector == null) {
// We can only bind each | Struts2Factory |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/socket/WebSocketMessageBrokerSecurityConfigurationTests.java | {
"start": 28159,
"end": 28899
} | class ____ implements HandshakeHandler {
Map<String, Object> attributes;
@Override
public boolean doHandshake(ServerHttpRequest request, ServerHttpResponse response, WebSocketHandler wsHandler,
Map<String, Object> attributes) throws HandshakeFailureException {
this.attributes = attributes;
if (wsHandler instanceof SockJsWebSocketHandler sockJs) {
// work around SPR-12716
WebSocketServerSockJsSession session = (WebSocketServerSockJsSession) ReflectionTestUtils
.getField(sockJs, "sockJsSession");
this.attributes = session.getAttributes();
}
return true;
}
}
@Configuration
@EnableWebSocketSecurity
@EnableWebSocketMessageBroker
@Import(SyncExecutorConfig.class)
static | TestHandshakeHandler |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java | {
"start": 910,
"end": 6443
} | class ____ {
public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl.";
@Deprecated
static final Setting<String> EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope);
public static final Setting<String> ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString(
"xpack.inference.elastic.url",
Setting.Property.NodeScope
);
/**
* This setting is for testing only. It controls whether authorization is only performed once at bootup. If set to true, an
* authorization request will be made repeatedly on an interval.
*/
public static final Setting<Boolean> PERIODIC_AUTHORIZATION_ENABLED = Setting.boolSetting(
"xpack.inference.elastic.periodic_authorization_enabled",
true,
Setting.Property.NodeScope
);
private static final TimeValue DEFAULT_AUTH_REQUEST_INTERVAL = TimeValue.timeValueMinutes(10);
static final Setting<TimeValue> AUTHORIZATION_REQUEST_INTERVAL = Setting.timeSetting(
"xpack.inference.elastic.authorization_request_interval",
DEFAULT_AUTH_REQUEST_INTERVAL,
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
private static final TimeValue DEFAULT_AUTH_REQUEST_JITTER = TimeValue.timeValueMinutes(5);
static final Setting<TimeValue> MAX_AUTHORIZATION_REQUEST_JITTER = Setting.timeSetting(
"xpack.inference.elastic.max_authorization_request_jitter",
DEFAULT_AUTH_REQUEST_JITTER,
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix(
ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX,
false
);
public static final Setting<Boolean> ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting(
ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled",
true,
Setting.Property.NodeScope
);
/**
* Total time to live (TTL) defines maximum life span of persistent connections regardless of their
* expiration setting. No persistent connection will be re-used past its TTL value.
* Using a TTL of -1 will disable the expiration of persistent connections (the idle connection evictor will still apply).
*/
public static final Setting<TimeValue> CONNECTION_TTL_SETTING = Setting.timeSetting(
"xpack.inference.elastic.http.connection_ttl",
TimeValue.timeValueSeconds(60),
Setting.Property.NodeScope
);
@Deprecated
private final String eisGatewayUrl;
private final String elasticInferenceServiceUrl;
private final boolean periodicAuthorizationEnabled;
private volatile TimeValue authRequestInterval;
private volatile TimeValue maxAuthorizationRequestJitter;
private final TimeValue connectionTtl;
public ElasticInferenceServiceSettings(Settings settings) {
eisGatewayUrl = EIS_GATEWAY_URL.get(settings);
elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings);
periodicAuthorizationEnabled = PERIODIC_AUTHORIZATION_ENABLED.get(settings);
authRequestInterval = AUTHORIZATION_REQUEST_INTERVAL.get(settings);
maxAuthorizationRequestJitter = MAX_AUTHORIZATION_REQUEST_JITTER.get(settings);
connectionTtl = CONNECTION_TTL_SETTING.get(settings);
}
/**
* This must be called after the object is constructed to avoid leaking the this reference before the constructor
* finishes.
*
* Handles initializing the settings changes listener.
*/
public final void init(ClusterService clusterService) {
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(AUTHORIZATION_REQUEST_INTERVAL, this::setAuthorizationRequestInterval);
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(MAX_AUTHORIZATION_REQUEST_JITTER, this::setMaxAuthorizationRequestJitter);
}
private void setAuthorizationRequestInterval(TimeValue interval) {
authRequestInterval = interval;
}
private void setMaxAuthorizationRequestJitter(TimeValue jitter) {
maxAuthorizationRequestJitter = jitter;
}
public TimeValue getAuthRequestInterval() {
return authRequestInterval;
}
public TimeValue getMaxAuthorizationRequestJitter() {
return maxAuthorizationRequestJitter;
}
public TimeValue getConnectionTtl() {
return connectionTtl;
}
public static List<Setting<?>> getSettingsDefinitions() {
ArrayList<Setting<?>> settings = new ArrayList<>();
settings.add(EIS_GATEWAY_URL);
settings.add(ELASTIC_INFERENCE_SERVICE_URL);
settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED);
settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings());
settings.add(PERIODIC_AUTHORIZATION_ENABLED);
settings.add(AUTHORIZATION_REQUEST_INTERVAL);
settings.add(MAX_AUTHORIZATION_REQUEST_JITTER);
settings.add(CONNECTION_TTL_SETTING);
return settings;
}
public String getElasticInferenceServiceUrl() {
return Strings.isEmpty(elasticInferenceServiceUrl) ? eisGatewayUrl : elasticInferenceServiceUrl;
}
public boolean isPeriodicAuthorizationEnabled() {
return periodicAuthorizationEnabled;
}
}
| ElasticInferenceServiceSettings |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumSerializationTests.java | {
"start": 1121,
"end": 2332
} | class ____ extends AbstractExpressionSerializationTests<Sum> {
@Override
protected Sum createTestInstance() {
return new Sum(randomSource(), randomChild(), randomChild(), randomChild(), randomChild());
}
@Override
protected Sum mutateInstance(Sum instance) throws IOException {
Expression field = instance.field();
Expression filter = instance.filter();
Expression window = instance.window();
Expression summationMode = instance.summationMode();
switch (randomIntBetween(0, 3)) {
case 0 -> field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild);
case 1 -> filter = randomValueOtherThan(filter, AbstractExpressionSerializationTests::randomChild);
case 2 -> window = randomValueOtherThan(window, AbstractExpressionSerializationTests::randomChild);
case 3 -> summationMode = randomValueOtherThan(summationMode, AbstractExpressionSerializationTests::randomChild);
default -> throw new AssertionError("unexpected value");
}
return new Sum(instance.source(), field, filter, window, summationMode);
}
public static | SumSerializationTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java | {
"start": 4673,
"end": 4790
} | class ____ do not contain term
frequencies.N01 = subsetSize - subsetFreq;
// documents not in | and |
java | apache__maven | api/maven-api-cli/src/main/java/org/apache/maven/api/cli/Logger.java | {
"start": 1463,
"end": 1559
} | interface ____ {
/**
* Represents the severity levels for log messages.
*/
| Logger |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/DefaultEnhancementContext.java | {
"start": 667,
"end": 3535
} | class ____ implements EnhancementContext {
private final ConcurrentHashMap<String, Type.PersistenceType> discoveredTypes = new ConcurrentHashMap<>();
/**
* @return the classloader for this class
*/
@Override
public ClassLoader getLoadingClassLoader() {
return getClass().getClassLoader();
}
/**
* look for @Entity annotation
*/
@Override
public boolean isEntityClass(UnloadedClass classDescriptor) {
return classDescriptor.hasAnnotation( Entity.class );
}
/**
* look for @Embeddable annotation
*/
@Override
public boolean isCompositeClass(UnloadedClass classDescriptor) {
return classDescriptor.hasAnnotation( Embeddable.class )
|| discoveredTypes.get( classDescriptor.getName() ) == Type.PersistenceType.EMBEDDABLE;
}
/**
* look for @MappedSuperclass annotation
*/
@Override
public boolean isMappedSuperclassClass(UnloadedClass classDescriptor) {
return classDescriptor.hasAnnotation( MappedSuperclass.class );
}
/**
* @return true
*/
@Override
public boolean doBiDirectionalAssociationManagement(UnloadedField field) {
return true;
}
/**
* @return true
*/
@Override
public boolean doDirtyCheckingInline(UnloadedClass classDescriptor) {
return true;
}
/**
* @return false
*/
@Override
public boolean doExtendedEnhancement(UnloadedClass classDescriptor) {
return false;
}
/**
* @return true
*/
@Override
public boolean hasLazyLoadableAttributes(UnloadedClass classDescriptor) {
return true;
}
/**
* @return true
*/
@Override
public boolean isLazyLoadable(UnloadedField field) {
return true;
}
/**
* look for @Transient annotation
*/
@Override
public boolean isPersistentField(UnloadedField ctField) {
return ! ctField.hasAnnotation( Transient.class );
}
/**
* look for @OneToMany, @ManyToMany and @ElementCollection annotations
*/
@Override
public boolean isMappedCollection(UnloadedField field) {
// If the collection is definitely a plural attribute, we respect that
if (field.hasAnnotation( OneToMany.class ) || field.hasAnnotation( ManyToMany.class ) || field.hasAnnotation( ElementCollection.class )) {
return true;
}
// But a collection might be treated like a singular attribute if it is annotated with `@Basic`
// If no annotations are given though, a collection is treated like a OneToMany
return !field.hasAnnotation( Basic.class );
}
/**
* keep the same order.
*/
@Override
public UnloadedField[] order(UnloadedField[] persistentFields) {
return persistentFields;
}
@Override
public boolean isDiscoveredType(UnloadedClass classDescriptor) {
return discoveredTypes.containsKey( classDescriptor.getName() );
}
@Override
public void registerDiscoveredType(UnloadedClass classDescriptor, Type.PersistenceType type) {
discoveredTypes.put( classDescriptor.getName(), type );
}
}
| DefaultEnhancementContext |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultipleOutputs.java | {
"start": 2479,
"end": 2744
} | class ____ with its own value
* class.
* <p>
* A named output can be a single file or a multi file. The later is referred as
* a multi named output.
* <p>
* A multi named output is an unbound set of files all sharing the same
* <code>OutputFormat</code>, key | and |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Jt400EndpointBuilderFactory.java | {
"start": 31831,
"end": 39203
} | interface ____
extends
EndpointConsumerBuilder {
default Jt400EndpointConsumerBuilder basic() {
return (Jt400EndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder pollStrategy(org.apache.camel.spi.PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedJt400EndpointConsumerBuilder pollStrategy(String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
}
/**
* Builder for endpoint producers for the JT400 component.
*/
public | AdvancedJt400EndpointConsumerBuilder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java | {
"start": 4252,
"end": 6442
} | class ____ yet initialized");
}
return coreResolve(hostName);
}
/**
* Utility method for getting a list of hostname resolved to a list of node
* in the network topology. This method doesn't initialize the class.
* Call {@link #init(Configuration)} explicitly.
* @param hostNames list of hostNames.
* @return nodes {@link Node} after resolving the hostnames
*/
public static List<Node> resolve(List<String> hostNames) {
if (!initCalled) {
throw new IllegalStateException("RackResolver class " +
"not yet initialized");
}
return coreResolve(hostNames);
}
private static Node coreResolve(String hostName) {
List <String> tmpList = Collections.singletonList(hostName);
return coreResolve(tmpList).get(0);
}
private static List<Node> coreResolve(List<String> hostNames) {
List<Node> nodes = new ArrayList<Node>(hostNames.size());
List<String> rNameList = dnsToSwitchMapping.resolve(hostNames);
if (rNameList == null || rNameList.isEmpty()) {
for (String hostName : hostNames) {
nodes.add(new NodeBase(hostName, NetworkTopology.DEFAULT_RACK));
}
LOG.info("Got an error when resolve hostNames. Falling back to "
+ NetworkTopology.DEFAULT_RACK + " for all.");
} else {
for (int i = 0; i < hostNames.size(); i++) {
if (Strings.isNullOrEmpty(rNameList.get(i))) {
// fallback to use default rack
nodes.add(new NodeBase(hostNames.get(i),
NetworkTopology.DEFAULT_RACK));
LOG.debug("Could not resolve {}. Falling back to {}",
hostNames.get(i), NetworkTopology.DEFAULT_RACK);
} else {
nodes.add(new NodeBase(hostNames.get(i), rNameList.get(i)));
LOG.debug("Resolved {} to {}", hostNames.get(i), rNameList.get(i));
}
}
}
return nodes;
}
/**
* Only used by tests.
*/
@Private
@VisibleForTesting
static DNSToSwitchMapping getDnsToSwitchMapping() {
return dnsToSwitchMapping;
}
/**
* Only used by tests.
*/
@Private
@VisibleForTesting
static void reset() {
initCalled = false;
dnsToSwitchMapping = null;
}
}
| not |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/FailureAnalyzersTests.java | {
"start": 1534,
"end": 3905
} | class ____ {
@SuppressWarnings("NullAway.Init")
private static FailureAnalyzer failureAnalyzer;
private final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@BeforeEach
void configureMock() {
failureAnalyzer = mock(FailureAnalyzer.class);
}
@Test
void analyzersAreLoadedAndCalled() {
RuntimeException failure = new RuntimeException();
analyzeAndReport(failure, BasicFailureAnalyzer.class, BasicFailureAnalyzer.class);
then(failureAnalyzer).should(times(2)).analyze(failure);
}
@Test
void analyzerIsConstructedWithBeanFactory() {
RuntimeException failure = new RuntimeException();
analyzeAndReport(failure, BasicFailureAnalyzer.class, BeanFactoryConstructorFailureAnalyzer.class);
then(failureAnalyzer).should(times(2)).analyze(failure);
}
@Test
void analyzerIsConstructedWithEnvironment() {
RuntimeException failure = new RuntimeException();
analyzeAndReport(failure, BasicFailureAnalyzer.class, EnvironmentConstructorFailureAnalyzer.class);
then(failureAnalyzer).should(times(2)).analyze(failure);
}
@Test
void analyzerThatFailsDuringInitializationDoesNotPreventOtherAnalyzersFromBeingCalled() {
RuntimeException failure = new RuntimeException();
analyzeAndReport(failure, BrokenInitializationFailureAnalyzer.class, BasicFailureAnalyzer.class);
then(failureAnalyzer).should().analyze(failure);
}
@Test
void analyzerThatFailsDuringAnalysisDoesNotPreventOtherAnalyzersFromBeingCalled() {
RuntimeException failure = new RuntimeException();
analyzeAndReport(failure, BrokenAnalysisFailureAnalyzer.class, BasicFailureAnalyzer.class);
then(failureAnalyzer).should().analyze(failure);
}
@SafeVarargs
private void analyzeAndReport(Throwable failure, Class<? extends FailureAnalyzer>... failureAnalyzerClasses) {
analyzeAndReport(failure, this.context, failureAnalyzerClasses);
}
@SafeVarargs
private void analyzeAndReport(Throwable failure, AnnotationConfigApplicationContext context,
Class<? extends FailureAnalyzer>... failureAnalyzerClasses) {
MockSpringFactoriesLoader loader = new MockSpringFactoriesLoader();
for (Class<? extends FailureAnalyzer> failureAnalyzerClass : failureAnalyzerClasses) {
loader.add(FailureAnalyzer.class, failureAnalyzerClass);
}
new FailureAnalyzers(context, loader).reportException(failure);
}
static | FailureAnalyzersTests |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-dubbo/src/main/java/org/apache/dubbo/rpc/protocol/dubbo/ChannelWrappedInvoker.java | {
"start": 2339,
"end": 4854
} | class ____<T> extends AbstractInvoker<T> {
private final Channel channel;
private final String serviceKey;
private final ExchangeClient currentClient;
ChannelWrappedInvoker(Class<T> serviceType, Channel channel, URL url, String serviceKey) {
super(serviceType, url, new String[] {GROUP_KEY, TOKEN_KEY});
this.channel = channel;
this.serviceKey = serviceKey;
this.currentClient = new HeaderExchangeClient(new ChannelWrapper(this.channel), false);
}
@Override
@SuppressWarnings("deprecation")
protected Result doInvoke(Invocation invocation) throws Throwable {
RpcInvocation inv = (RpcInvocation) invocation;
// use interface's name as service path to export if it's not found on client side
inv.setAttachment(PATH_KEY, getInterface().getName());
inv.setAttachment(CALLBACK_SERVICE_KEY, serviceKey);
Integer payload = getUrl().getParameter(PAYLOAD, Integer.class);
Request request = new Request();
if (payload != null) {
request.setPayload(payload);
}
request.setData(inv);
request.setVersion(Version.getProtocolVersion());
try {
if (RpcUtils.isOneway(getUrl(), inv)) { // may have concurrency issue
currentClient.send(
request, getUrl().getMethodParameter(RpcUtils.getMethodName(invocation), SENT_KEY, false));
return AsyncRpcResult.newDefaultAsyncResult(invocation);
} else {
CompletableFuture<AppResponse> appResponseFuture =
currentClient.request(request).thenApply(AppResponse.class::cast);
return new AsyncRpcResult(appResponseFuture, inv);
}
} catch (RpcException e) {
throw e;
} catch (TimeoutException e) {
throw new RpcException(RpcException.TIMEOUT_EXCEPTION, e.getMessage(), e);
} catch (RemotingException e) {
throw new RpcException(RpcException.NETWORK_EXCEPTION, e.getMessage(), e);
} catch (Throwable e) { // here is non-biz exception, wrap it.
throw new RpcException(e.getMessage(), e);
}
}
@Override
public void destroy() {
// super.destroy();
// try {
// channel.close();
// } catch (Throwable t) {
// logger.warn(t.getMessage(), t);
// }
}
public static | ChannelWrappedInvoker |
java | netty__netty | microbench/src/main/java/io/netty/microbench/buffer/RandomSizeByteBufAllocationBenchmark.java | {
"start": 2365,
"end": 3798
} | enum ____ {
JEMALLOC,
ADAPTIVE
}
@Param({ "ADAPTIVE" })
public AllocatorType allocatorType = AllocatorType.ADAPTIVE;
@Param({ "128", "128000" })
public int samples;
private ByteBufAllocator allocator;
private short[] sizeSamples;
private int sampleMask;
private int nextSampleIndex;
static {
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.DISABLED);
}
@Setup
public void init() {
if (!(Thread.currentThread() instanceof FastThreadLocalThread)) {
throw new IllegalStateException("This benchmark must be run with FastThreadLocalThread: run it with: " +
"-Djmh.executor=CUSTOM -Djmh.executor.class=io.netty.microbench.util.AbstractMicrobenchmark$HarnessExecutor");
}
switch (allocatorType) {
case JEMALLOC:
allocator = new PooledByteBufAllocator(true);
break;
case ADAPTIVE:
allocator = new AdaptiveByteBufAllocator(true, true);
break;
default:
throw new IllegalArgumentException("Unknown allocator type: " + allocatorType);
}
samples = MathUtil.findNextPositivePowerOfTwo(samples);
sampleMask = samples - 1;
sizeSamples = new short[samples];
SplittableRandom rnd = new SplittableRandom(SEED);
// here we're not using random size [0, 16896] because if the size | AllocatorType |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/actuate/web/WebMvcEndpointAccessIntegrationTests.java | {
"start": 9036,
"end": 9340
} | class ____ {
@GetMapping("/")
String get() {
return "get";
}
@PostMapping("/")
String post() {
return "post";
}
}
@org.springframework.boot.actuate.endpoint.web.annotation.ServletEndpoint(id = "customservlet")
@SuppressWarnings({ "deprecation", "removal" })
static | CustomMvcEndpoint |
java | quarkusio__quarkus | integration-tests/oidc-dpop/src/main/java/io/quarkus/it/keycloak/ProtectedResource.java | {
"start": 364,
"end": 1120
} | class ____ {
@Inject
JsonWebToken principal;
@Inject
RoutingContext routingContext;
@GET
@Produces("text/plain")
@Path("dpop-jwt")
public String hello() {
return "Hello, " + principal.getName() + "; "
+ "JWK thumbprint in JWT: " + isJwtTokenThumbprintAvailable() + ", "
+ "JWK thumbprint in introspection: " + isIntrospectionThumbprintAvailable();
}
private boolean isJwtTokenThumbprintAvailable() {
return Boolean.TRUE.equals(routingContext.get(OidcUtils.DPOP_JWT_THUMBPRINT));
}
private boolean isIntrospectionThumbprintAvailable() {
return Boolean.TRUE.equals(routingContext.get(OidcUtils.DPOP_INTROSPECTION_THUMBPRINT));
}
}
| ProtectedResource |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/util/introspection/PropertySupport_publicGetterExistsFor_Test.java | {
"start": 899,
"end": 1954
} | class ____ {
private PropertySupport propertySupport = PropertySupport.instance();
private Person bruceWayne;
private Person joker;
private SuperHero batman;
@BeforeEach
public void setUp() {
bruceWayne = new Person("Bruce Wayne");
joker = new Person("Joker");
batman = new SuperHero("Batman", bruceWayne, joker);
}
@Test
void should_return_true_if_public_getter_exists_for_field() {
assertThat(propertySupport.publicGetterExistsFor("archenemy", batman)).as("check archenemy").isTrue();
// with inherited public getter
assertThat(propertySupport.publicGetterExistsFor("name", batman)).as("check name").isTrue();
}
@Test
void should_return_false_if_public_getter_does_not_exist() {
// getter exists but is package visible
assertThat(propertySupport.publicGetterExistsFor("trueIdentity", batman)).as("package visible getter").isFalse();
assertThat(propertySupport.publicGetterExistsFor("realJob", batman)).as("with non existing getter").isFalse();
}
}
| PropertySupport_publicGetterExistsFor_Test |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java | {
"start": 39228,
"end": 39904
} | class ____ {
}
@Test
void avroAliasOnClass() {
check(AliasA.class,
"{\"type\":\"record\",\"name\":\"AliasA\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"b.a\"]}");
check(AliasB.class,
"{\"type\":\"record\",\"name\":\"AliasB\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"a\"]}");
check(AliasC.class,
"{\"type\":\"record\",\"name\":\"AliasC\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"a\"]}");
}
@AvroAlias(alias = "alias1", space = "space1")
@AvroAlias(alias = "alias2", space = "space2")
private static | AliasC |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/access/intercept/MethodInvocationPrivilegeEvaluator.java | {
"start": 1689,
"end": 2056
} | class ____ to allow applications to determine whether or not the current
* principal would be allowed to at least attempt to invoke the method, irrespective of
* the "after" invocation handling.
* </p>
*
* @author Ben Alex
* @deprecated Use {@link org.springframework.security.authorization.AuthorizationManager}
* instead
*/
@NullUnmarked
@Deprecated
public | aims |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedAnonymousClassTest.java | {
"start": 3028,
"end": 3627
} | class ____ {
public static void main(String[] args) throws Exception {
new Callable<Void>() {
public Void call() throws Exception {
return null;
}
}.call();
}
}
""")
.doTest();
}
@Test
public void liveCallableViaCinit() {
compilationHelper
.addSourceLines(
"a/One.java",
"""
package a;
import java.util.concurrent.Callable;
import java.util.ArrayList;
public | One |
java | quarkusio__quarkus | integration-tests/grpc-plain-text-mutiny/src/test/java/io/quarkus/grpc/examples/hello/HelloWorldEndpointTestBase.java | {
"start": 343,
"end": 2287
} | class ____ {
@Test
public void testHelloWorldServiceUsingBlockingStub() {
String response = get("/hello/blocking/neo").asString();
assertThat(response).startsWith("Hello neo");
assertNoHeaders();
}
@Test
public void testHelloWorldServiceUsingMutinyStub() {
String response = get("/hello/mutiny/neo-mutiny").asString();
assertThat(response).startsWith("Hello neo-mutiny");
assertNoHeaders();
}
@Test
void shouldSetHeaderWithMutiny() {
String response = given().queryParam("headers", "true")
.when().get("/hello/mutiny/neo-mutiny-w-headers").asString();
assertThat(response).startsWith("Hello neo-mutiny-w-headers");
assertHasHeader("my-extra-header", "my-extra-value");
}
@Test
void shouldSetHeader() {
String response = given().queryParam("headers", "true")
.when().get("/hello/blocking/neo-w-headers").asString();
assertThat(response).startsWith("Hello neo-w-headers");
assertHasHeader("my-blocking-header", "my-blocking-value");
}
@Test
void shouldSetHeaderWithInterface() {
String response = given().queryParam("headers", "true")
.when().get("/hello/interface/i-neo-w-headers").asString();
assertThat(response).startsWith("Hello i-neo-w-headers");
assertHasHeader("my-interface-header", "my-interface-value");
}
@BeforeEach
public void setUp() {
delete("/hello").then().statusCode(204);
}
private void assertHasHeader(String key, String value) {
Map<?, ?> result = get("/hello/headers").as(Map.class);
assertThat(result).hasSize(1);
assertThat(result.get(key)).isEqualTo(value);
}
private void assertNoHeaders() {
Map<?, ?> result = get("/hello/headers").as(Map.class);
assertThat(result).hasSize(0);
}
}
| HelloWorldEndpointTestBase |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-jersey/src/main/java/smoketest/jersey/Endpoint.java | {
"start": 791,
"end": 991
} | class ____ {
private final Service service;
public Endpoint(Service service) {
this.service = service;
}
@GET
public String message() {
return "Hello " + this.service.message();
}
}
| Endpoint |
java | spring-projects__spring-boot | module/spring-boot-health/src/test/java/org/springframework/boot/health/actuate/endpoint/ReactiveHealthIndicatorImplementationTests.java | {
"start": 2577,
"end": 2892
} | class ____ extends AbstractReactiveHealthIndicator {
SimpleReactiveHealthIndicator() {
super("Health check failed for simple");
}
@Override
protected Mono<Health> doHealthCheck(Health.Builder builder) {
return Mono.just(builder.up().build());
}
}
private static final | SimpleReactiveHealthIndicator |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java | {
"start": 1320,
"end": 3362
} | class ____ extends AcknowledgedRequest<Request> implements ToXContentFragment {
public static final ParseField FORCE = new ParseField("force");
private String id;
private boolean force;
public Request(StreamInput in) throws IOException {
super(in);
id = in.readString();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
force = in.readBoolean();
} else {
force = false;
}
}
public Request(String id) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.id = ExceptionsHelper.requireNonNull(id, TrainedModelConfig.MODEL_ID);
}
public String getId() {
return id;
}
public boolean isForce() {
return force;
}
public void setForce(boolean force) {
this.force = force;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(TrainedModelConfig.MODEL_ID.getPreferredName(), id);
builder.field(FORCE.getPreferredName(), force);
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteTrainedModelAction.Request request = (DeleteTrainedModelAction.Request) o;
return Objects.equals(id, request.id) && force == request.force;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
out.writeBoolean(force);
}
}
@Override
public int hashCode() {
return Objects.hash(id, force);
}
}
}
| Request |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java | {
"start": 1066,
"end": 3208
} | class ____ extends LuceneMaxOperatorTestCase {
@Override
public LuceneMaxFactory.NumberType getNumberType() {
return LuceneMaxFactory.NumberType.LONG;
}
@Override
protected NumberTypeTest getNumberTypeTest() {
return new NumberTypeTest() {
long max = Long.MIN_VALUE;
@Override
public IndexableField newPointField() {
return new LongField(FIELD_NAME, newValue(), randomFrom(Field.Store.values()));
}
@Override
public IndexableField newDocValuesField() {
return new SortedNumericDocValuesField(FIELD_NAME, newValue());
}
private long newValue() {
final long value = randomLong();
max = Math.max(max, value);
return value;
}
@Override
public void assertPage(Page page) {
assertThat(page.getBlock(0), instanceOf(LongBlock.class));
final LongBlock db = page.getBlock(0);
assertThat(page.getBlock(1), instanceOf(BooleanBlock.class));
final BooleanBlock bb = page.getBlock(1);
if (bb.getBoolean(0) == false) {
assertThat(db.getLong(0), equalTo(Long.MIN_VALUE));
} else {
assertThat(db.getLong(0), lessThanOrEqualTo(max));
}
}
@Override
public AggregatorFunction newAggregatorFunction(DriverContext context) {
return new MaxLongAggregatorFunctionSupplier().aggregator(context, List.of(0, 1));
}
@Override
public void assertMaxValue(Block block, boolean exactResult) {
assertThat(block, instanceOf(LongBlock.class));
final LongBlock lb = (LongBlock) block;
if (exactResult) {
assertThat(lb.getLong(0), equalTo(max));
} else {
assertThat(lb.getLong(0), lessThanOrEqualTo(max));
}
}
};
}
}
| LuceneMaxLongOperatorTests |
java | google__dagger | javatests/dagger/functional/producers/cancellation/ProducerSubcomponentCancellationTest.java | {
"start": 1151,
"end": 4848
} | class ____ {
private final ProducerTester tester = new ProducerTester();
private final CancellationComponent component =
DaggerCancellationComponent.builder()
.module(new CancellationModule(tester))
.dependency(new Dependency(tester))
.executor(MoreExecutors.directExecutor())
.build();
private final CancellationSubcomponent subcomponent =
component.subcomponentBuilder().module(new CancellationSubcomponentModule(tester)).build();
@Test
public void initialState() {
tester.assertNoStartedNodes();
}
@Test
public void cancellingSubcomponent_doesNotCancelParent() throws Exception {
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
// Subcomponent entry point depends on all leaves from the parent component and on the single
// leaf in the subcomponent itself, so they should all have started.
tester.assertStarted("leaf1", "leaf2", "leaf3", "subLeaf").only();
assertThat(subcomponentEntryPoint.cancel(true)).isTrue();
assertThat(subcomponentEntryPoint.isCancelled()).isTrue();
// None of the tasks running in the parent were cancelled.
tester.assertNotCancelled("leaf1", "leaf2", "leaf3");
tester.assertCancelled("subLeaf").only();
// Finish all the parent tasks to ensure that it can still complete normally.
tester.complete(
"dependencyFuture",
"leaf1",
"leaf2",
"leaf3",
"foo",
"bar",
"baz",
"qux",
"entryPoint1",
"entryPoint2");
assertThat(component.entryPoint1().get(1, MILLISECONDS)).isEqualTo("completed");
assertThat(component.entryPoint2().get().get(1, MILLISECONDS)).isEqualTo("completed");
}
@Test
public void cancellingSubcomponent_preventsUnstartedNodesFromStarting() {
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
tester.complete("subLeaf");
tester.assertNotStarted("subTask1", "subTask2");
subcomponentEntryPoint.cancel(true);
// Complete the remaining dependencies of subTask1 and subTask2.
tester.complete("leaf1", "leaf2", "leaf3", "foo", "bar", "baz", "qux");
// Since the subcomponent was cancelled, they are not started.
tester.assertNotStarted("subTask1", "subTask2");
}
@Test
public void cancellingProducerFromComponentDependency_inSubcomponent_cancelsUnderlyingTask()
throws Exception {
// Request subcomponent's entry point.
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
// Finish all parent tasks so that the subcomponent's tasks can start.
tester.complete("leaf1", "leaf2", "leaf3", "foo", "bar", "baz", "qux", "subLeaf");
tester.assertStarted("subTask1", "subTask2");
tester.assertNotCancelled("subTask1", "subTask2");
// When subTask2 runs, it cancels the dependency future.
// TODO(cgdecker): Is this what we want to happen?
// On the one hand, there's a policy of "futures from component dependencies come from outside
// our control and should be cancelled unconditionally". On the other hand, the dependency is
// coming from the parent component, and the policy is also not to cancel things belonging to
// the parent unless it allows that.
tester.assertCancelled("dependencyFuture");
// The future it returns didn't depend directly on that future, though, so the subcomponent
// should be able to complete normally.
tester.complete("subTask1", "subTask2", "subEntryPoint");
assertThat(subcomponentEntryPoint.get(1, MILLISECONDS)).isEqualTo("completed");
}
}
| ProducerSubcomponentCancellationTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/util/connections/ConnectionCheckingConnectionProvider.java | {
"start": 905,
"end": 3372
} | class ____ extends UserSuppliedConnectionProviderImpl {
private final DataSource dataSource = new BaseDataSource( Environment.getProperties() );
/**
* Counts the "open" events. Does NOT hold the total number of open connections
* existing at a given time, just the amount of times a connection was opened.
*/
private final AtomicInteger connectionOpenEventCount = new AtomicInteger();
//Using a Vector just to avoid synchronizing on a bag
private final Vector<CheckedConnection> openedConnections = new Vector<>();
@Override
public Connection getConnection() throws SQLException {
this.connectionOpenEventCount.incrementAndGet();
final CheckedConnection opened = new CheckedConnection( dataSource.getConnection() );
this.openedConnections.add( opened );
return opened;
}
@Override
public void closeConnection(Connection connection) throws SQLException {
connection.close();
//cast is also meant to verify we're not being returned a different implementation
CheckedConnection wrapper = (CheckedConnection) connection;
boolean removed = this.openedConnections.remove( wrapper );
if ( !removed ) {
throw new IllegalStateException(
"Closing a connection which wasn't registered in this ConnectionProviderDecorator" );
}
}
/**
* Resets the counters to zero; it's useful to invoke this after Hibernate
* has booted to exclude connections being used during initialization.
* @throws IllegalStateException if any unclosed connection are being detected.
*/
public void clear() {
this.connectionOpenEventCount.set( 0 );
if ( !areAllConnectionClosed() ) {
throw new IllegalStateException( "Resetting test helper while not all connections have been closed yet" );
}
}
/**
* @return the count of connections which are currently open.
*/
public int getCurrentOpenConnections() {
return this.openedConnections.size();
}
/**
* @return {@code true} iff all known connections that have been opened are now closed.
*/
public boolean areAllConnectionClosed() {
return this.openedConnections.isEmpty();
}
/**
* @return This returns the count of connections that have been opened since
* construction, or since the last time method {@link #clear()} has
* been invoked. N.B. this count includes connections that have since been closed.
*/
public int getTotalOpenedConnectionCount() {
return this.connectionOpenEventCount.get();
}
private static final | ConnectionCheckingConnectionProvider |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java | {
"start": 6152,
"end": 71735
} | class ____ extends InferenceServiceTestCase {
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private ThreadPool threadPool;
@Before
public void init() throws Exception {
threadPool = createThreadPool(inferenceUtilityExecutors());
}
@After
public void shutdown() throws IOException {
terminate(threadPool);
}
public void testParseRequestConfig_CreatesAnAmazonBedrockModel() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoFailureListener(model -> {
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_CreatesACohereModel() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoFailureListener(model -> {
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.COHERE));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "cohere", null, null, null, null),
AmazonBedrockEmbeddingsTaskSettingsTests.mutableMap("truncate", CohereTruncation.START),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_CohereSettingsWithNoCohereModel() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ElasticsearchStatusException.class));
assertThat(
exception.getMessage(),
is("The [text_embedding] task type for provider [amazontitan] does not allow [truncate] field")
);
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
AmazonBedrockEmbeddingsTaskSettingsTests.mutableMap("truncate", CohereTruncation.START),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ElasticsearchStatusException.class));
assertThat(exception.getMessage(), is("The [amazonbedrock] service does not support task type [sparse_embedding]"));
});
service.parseRequestConfig(
"id",
TaskType.SPARSE_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
@SuppressWarnings("checkstyle:LineLength")
public void testGetConfiguration() throws Exception {
try (var service = createAmazonBedrockService()) {
String content = XContentHelper.stripWhitespace(
"""
{
"service": "amazonbedrock",
"name": "Amazon Bedrock",
"task_types": ["text_embedding", "completion"],
"configurations": {
"dimensions": {
"description": "The number of dimensions the resulting embeddings should have. For more information refer to https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html.",
"label": "Dimensions",
"required": false,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["text_embedding"]
},
"secret_key": {
"description": "A valid AWS secret key that is paired with the access_key.",
"label": "Secret Key",
"required": true,
"sensitive": true,
"updatable": true,
"type": "str",
"supported_task_types": ["text_embedding", "completion"]
},
"provider": {
"description": "The model provider for your deployment.",
"label": "Provider",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "completion"]
},
"access_key": {
"description": "A valid AWS access key that has permissions to use Amazon Bedrock.",
"label": "Access Key",
"required": true,
"sensitive": true,
"updatable": true,
"type": "str",
"supported_task_types": ["text_embedding", "completion"]
},
"model": {
"description": "The base model ID or an ARN to a custom model based on a foundational model.",
"label": "Model",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "completion"]
},
"rate_limit.requests_per_minute": {
"description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.",
"label": "Rate Limit",
"required": false,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["text_embedding", "completion"]
},
"region": {
"description": "The region that your model or ARN is deployed in.",
"label": "Region",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "completion"]
}
}
}
"""
);
InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes(
new BytesArray(content),
XContentType.JSON
);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
InferenceServiceConfiguration serviceConfiguration = service.getConfiguration();
assertToXContentEquivalent(
originalBytes,
toXContent(serviceConfiguration, XContentType.JSON, humanReadable),
XContentType.JSON
);
}
}
public void testCreateModel_ForEmbeddingsTask_InvalidProvider() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ElasticsearchStatusException.class));
assertThat(exception.getMessage(), is("The [text_embedding] task type for provider [anthropic] is not available"));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "anthropic", null, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testCreateModel_TopKParameter_NotAvailable() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ElasticsearchStatusException.class));
assertThat(exception.getMessage(), is("The [top_k] task parameter is not available for provider [amazontitan]"));
});
service.parseRequestConfig(
"id",
TaskType.COMPLETION,
getRequestConfigMap(
createChatCompletionRequestSettingsMap("region", "model", "amazontitan"),
getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createAmazonBedrockService()) {
var config = getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
);
config.put("extra_key", "value");
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ElasticsearchStatusException.class));
assertThat(
exception.getMessage(),
is("Configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service")
);
});
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, modelVerificationListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException {
try (var service = createAmazonBedrockService()) {
var serviceSettings = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null);
serviceSettings.put("extra_key", "value");
var config = getRequestConfigMap(serviceSettings, Map.of(), getAmazonBedrockSecretSettingsMap("access", "secret"));
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(e -> {
assertThat(e, instanceOf(ElasticsearchStatusException.class));
assertThat(
e.getMessage(),
is("Configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service")
);
});
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, modelVerificationListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic");
var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
taskSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(settingsMap, taskSettingsMap, secretSettingsMap);
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(e -> {
assertThat(e, instanceOf(ElasticsearchStatusException.class));
assertThat(
e.getMessage(),
is("Configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service")
);
});
service.parseRequestConfig("id", TaskType.COMPLETION, config, modelVerificationListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic");
var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
secretSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(settingsMap, taskSettingsMap, secretSettingsMap);
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(e -> {
assertThat(e, instanceOf(ElasticsearchStatusException.class));
assertThat(
e.getMessage(),
is("Configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service")
);
});
service.parseRequestConfig("id", TaskType.COMPLETION, config, modelVerificationListener);
}
}
public void testParseRequestConfig_MovesModel() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoFailureListener(model -> {
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoFailureListener(model -> {
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
Map.of(),
createRandomChunkingSettingsMap(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoFailureListener(model -> {
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testCreateModel_ForEmbeddingsTask_DimensionsIsNotAllowed() throws IOException {
try (var service = createAmazonBedrockService()) {
ActionListener<Model> modelVerificationListener = ActionTestUtils.assertNoSuccessListener(exception -> {
assertThat(exception, instanceOf(ValidationException.class));
assertThat(exception.getMessage(), containsString("[service_settings] does not allow the setting [dimensions]"));
});
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", 512, null, null, null),
Map.of(),
getAmazonBedrockSecretSettingsMap("access", "secret")
),
modelVerificationListener
);
}
}
public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModel() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(
settingsMap,
new HashMap<>(Map.of()),
createRandomChunkingSettingsMap(),
secretSettingsMap
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided()
throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<String, Object>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "amazontitan");
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, Map.of(), secretSettingsMap);
var thrownException = expectThrows(
ElasticsearchStatusException.class,
() -> service.parsePersistedConfigWithSecrets(
"id",
TaskType.SPARSE_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
)
);
assertThat(thrownException.getMessage(), containsString("Failed to parse stored model [id] for [amazonbedrock] service"));
assertThat(
thrownException.getMessage(),
containsString("The [amazonbedrock] service does not support task type [sparse_embedding]")
);
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
secretSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
persistedConfig.secrets().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
settingsMap.put("extra_key", "value");
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic");
var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
taskSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class));
var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC));
var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings();
assertThat(taskSettings.temperature(), is(1.0));
assertThat(taskSettings.topP(), is(0.5));
assertThat(taskSettings.topK(), is(0.2));
assertThat(taskSettings.maxNewTokens(), is(128));
var secretSettings = (AwsSecretSettings) model.getSecretSettings();
assertThat(secretSettings.accessKey().toString(), is("access"));
assertThat(secretSettings.secretKey().toString(), is("secret"));
}
}
public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModel() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<String, Object>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(
settingsMap,
new HashMap<>(Map.of()),
createRandomChunkingSettingsMap(),
secretSettingsMap
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_CreatesAnAmazonBedrockChatCompletionModel() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic");
var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap);
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class));
var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC));
var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings();
assertThat(taskSettings.temperature(), is(1.0));
assertThat(taskSettings.topP(), is(0.5));
assertThat(taskSettings.topK(), is(0.2));
assertThat(taskSettings.maxNewTokens(), is(128));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
var thrownException = expectThrows(
ElasticsearchStatusException.class,
() -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config())
);
assertThat(thrownException.getMessage(), containsString("Failed to parse stored model [id] for [amazonbedrock] service"));
assertThat(
thrownException.getMessage(),
containsString("The [amazonbedrock] service does not support task type [sparse_embedding]")
);
}
}
public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null);
settingsMap.put("extra_key", "value");
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap<>(Map.of()), secretSettingsMap);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class));
var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN));
assertNull(model.getSecretSettings());
}
}
public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
try (var service = createAmazonBedrockService()) {
var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic");
var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128);
taskSettingsMap.put("extra_key", "value");
var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret");
var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap);
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class));
var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings();
assertThat(settings.region(), is("region"));
assertThat(settings.modelId(), is("model"));
assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC));
var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings();
assertThat(taskSettings.temperature(), is(1.0));
assertThat(taskSettings.topP(), is(0.5));
assertThat(taskSettings.topK(), is(0.2));
assertThat(taskSettings.maxNewTokens(), is(128));
assertNull(model.getSecretSettings());
}
}
public void testInfer_ThrowsErrorWhenModelIsNotAmazonBedrockModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
var mockModel = getInvalidModel("model_id", "service_name");
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
mockModel,
null,
null,
null,
List.of(""),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(
thrownException.getMessage(),
is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.")
);
verify(factory, times(1)).createSender();
verify(sender, times(1)).startAsynchronously(any());
}
verify(sender, times(1)).close();
verifyNoMoreInteractions(factory);
verifyNoMoreInteractions(sender);
}
public void testInfer_SendsRequest_ForTitanEmbeddingsModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
var model = AmazonBedrockEmbeddingsModelTests.createModel(
"id",
"region",
"model",
AmazonBedrockProvider.AMAZONTITAN,
"access",
"secret"
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
);
var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()
) {
var results = new DenseEmbeddingFloatResults(List.of(new DenseEmbeddingFloatResults.Embedding(new float[] { 0.123F, 0.678F })));
requestSender.enqueue(results);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.678F }))));
}
}
public void testInfer_SendsRequest_ForCohereEmbeddingsModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) {
var results = new DenseEmbeddingFloatResults(
List.of(new DenseEmbeddingFloatResults.Embedding(new float[] { 0.123F, 0.678F }))
);
requestSender.enqueue(results);
var model = AmazonBedrockEmbeddingsModelTests.createModel(
"id",
"region",
"model",
AmazonBedrockProvider.COHERE,
"access",
"secret"
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.CLASSIFICATION,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.678F }))));
}
}
}
public void testInfer_SendsRequest_ForChatCompletionModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) {
var mockResults = new ChatCompletionResults(List.of(new ChatCompletionResults.Result("test result")));
requestSender.enqueue(mockResults);
var model = AmazonBedrockChatCompletionModelTests.createModel(
"id",
"region",
"model",
AmazonBedrockProvider.AMAZONTITAN,
"access",
"secret"
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationCompletion(List.of("test result"))));
}
}
}
public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
var model = AmazonBedrockChatCompletionModelTests.createModel(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomFrom(AmazonBedrockProvider.values()),
randomAlphaOfLength(10),
randomAlphaOfLength(10)
);
assertThrows(
ElasticsearchStatusException.class,
() -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); }
);
}
}
public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(null);
}
public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values()));
}
private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
var embeddingSize = randomNonNegativeInt();
var provider = randomFrom(AmazonBedrockProvider.values());
var model = AmazonBedrockEmbeddingsModelTests.createModel(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
provider,
randomNonNegativeInt(),
randomBoolean(),
randomNonNegativeInt(),
similarityMeasure,
RateLimitSettingsTests.createRandom(),
createRandomChunkingSettings(),
randomAlphaOfLength(10),
randomAlphaOfLength(10)
);
Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize);
SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null
? getProviderDefaultSimilarityMeasure(provider)
: similarityMeasure;
assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity());
assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue());
}
}
public void testInfer_UnauthorizedResponse() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
);
var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()
) {
requestSender.enqueue(
BedrockRuntimeException.builder().message("The security token included in the request is invalid").build()
);
var model = AmazonBedrockEmbeddingsModelTests.createModel(
"id",
"us-east-1",
"amazon.titan-embed-text-v1",
AmazonBedrockProvider.AMAZONTITAN,
"_INVALID_AWS_ACCESS_KEY_",
"_INVALID_AWS_SECRET_KEY_"
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INTERNAL_INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var exceptionThrown = assertThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exceptionThrown.getCause().getMessage(), containsString("The security token included in the request is invalid"));
}
}
public void testSupportsStreaming() throws IOException {
try (var service = new AmazonBedrockService(mock(), mock(), createWithEmptySettings(mock()), mockClusterServiceEmpty())) {
assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION)));
assertFalse(service.canStream(TaskType.ANY));
}
}
public void testChunkedInfer_ChunkingSettingsSet() throws IOException {
var model = AmazonBedrockEmbeddingsModelTests.createModel(
"id",
"region",
"model",
AmazonBedrockProvider.AMAZONTITAN,
createRandomChunkingSettings(),
"access",
"secret"
);
testChunkedInfer(model);
}
public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException {
var model = AmazonBedrockEmbeddingsModelTests.createModel(
"id",
"region",
"model",
AmazonBedrockProvider.AMAZONTITAN,
null,
"access",
"secret"
);
testChunkedInfer(model);
}
private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
try (
var service = new AmazonBedrockService(
factory,
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
)
) {
try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) {
{
var mockResults1 = new DenseEmbeddingFloatResults(
List.of(new DenseEmbeddingFloatResults.Embedding(new float[] { 0.123F, 0.678F }))
);
requestSender.enqueue(mockResults1);
}
{
var mockResults2 = new DenseEmbeddingFloatResults(
List.of(new DenseEmbeddingFloatResults.Embedding(new float[] { 0.223F, 0.278F }))
);
requestSender.enqueue(mockResults2);
}
PlainActionFuture<List<ChunkedInference>> listener = new PlainActionFuture<>();
service.chunkedInfer(
model,
null,
List.of(new ChunkInferenceInput("a"), new ChunkInferenceInput("bb")),
new HashMap<>(),
InputType.INTERNAL_INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var results = listener.actionGet(TIMEOUT);
assertThat(results, hasSize(2));
{
assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var floatResult = (ChunkedInferenceEmbedding) results.get(0);
assertThat(floatResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset());
assertThat(floatResult.chunks().get(0).embedding(), instanceOf(DenseEmbeddingFloatResults.Embedding.class));
assertArrayEquals(
new float[] { 0.123F, 0.678F },
((DenseEmbeddingFloatResults.Embedding) floatResult.chunks().get(0).embedding()).values(),
0.0f
);
}
{
assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var floatResult = (ChunkedInferenceEmbedding) results.get(1);
assertThat(floatResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset());
assertThat(floatResult.chunks().get(0).embedding(), instanceOf(DenseEmbeddingFloatResults.Embedding.class));
assertArrayEquals(
new float[] { 0.223F, 0.278F },
((DenseEmbeddingFloatResults.Embedding) floatResult.chunks().get(0).embedding()).values(),
0.0f
);
}
}
}
}
private AmazonBedrockService createAmazonBedrockService() {
var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory(
ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY),
mockClusterServiceEmpty()
);
return new AmazonBedrockService(
mock(HttpRequestSender.Factory.class),
amazonBedrockFactory,
createWithEmptySettings(threadPool),
mockClusterServiceEmpty()
);
}
@Override
public InferenceService createInferenceService() {
return createAmazonBedrockService();
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> chunkingSettings,
Map<String, Object> secretSettings
) {
var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings);
requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings);
return requestConfigMap;
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> secretSettings
) {
var builtServiceSettings = new HashMap<>();
builtServiceSettings.putAll(serviceSettings);
builtServiceSettings.putAll(secretSettings);
return new HashMap<>(
Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)
);
}
private Utils.PersistedConfig getPersistedConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> chunkingSettings,
Map<String, Object> secretSettings
) {
var persistedConfigMap = getPersistedConfigMap(serviceSettings, taskSettings, secretSettings);
persistedConfigMap.config().put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings);
return persistedConfigMap;
}
private Utils.PersistedConfig getPersistedConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> secretSettings
) {
return new Utils.PersistedConfig(
new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)),
new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings))
);
}
}
| AmazonBedrockServiceTests |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/Application.java | {
"start": 914,
"end": 9872
} | class ____ implements Closeable {
// WARNING: do not inject a logger here, it's too early: the log manager has not been properly set up yet
/**
* The name of the generated application class
*/
public static final String APP_CLASS_NAME = "io.quarkus.runner.ApplicationImpl";
private static final int ST_INITIAL = 0;
private static final int ST_STARTING = 1;
private static final int ST_STARTED = 2;
private static final int ST_STOPPING = 3;
private static final int ST_STOPPED = 4;
private final Lock stateLock = Locks.reentrantLock();
private final Condition stateCond = stateLock.newCondition();
private int state = ST_INITIAL;
protected static volatile Application currentApplication;
/**
* Embedded applications don't set up or modify logging, and don't provide start/
* stop notifications to the {@link ApplicationStateNotification}.
*/
private final boolean auxiliaryApplication;
/**
* Construct a new instance.
*
* @param auxiliaryApplication
*/
protected Application(boolean auxiliaryApplication) {
this.auxiliaryApplication = auxiliaryApplication;
}
/**
* Start the application. If another thread is also trying to start the application, this method waits for that
* thread to finish starting. Returns immediately if the application is started already. If the application
* fails to start, an exception is thrown.
*
* @param args the command-line arguments
* @implNote The command line args are not yet used, but at some point we'll want a facility for overriding config and/or
* letting the user hook into it.
*/
public final void start(String[] args) {
if (!auxiliaryApplication) {
currentApplication = this;
}
final Lock stateLock = this.stateLock;
stateLock.lock();
try {
loop: for (;;)
switch (state) {
case ST_INITIAL:
break loop; // normal startup
case ST_STARTING: {
try {
stateCond.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw interruptedOnAwaitStart();
}
break;
}
case ST_STARTED:
return; // all good
default: {
throw new IllegalStateException("The application is stopping");
}
}
state = ST_STARTING;
} finally {
stateLock.unlock();
}
try {
doStart(args);
} catch (Throwable t) {
stateLock.lock();
final ConfigProviderResolver cpr = ConfigProviderResolver.instance();
try {
cpr.releaseConfig(cpr.getConfig());
} catch (IllegalStateException ignored) {
// just means no config was installed, which is fine
}
try {
state = ST_STOPPED;
stateCond.signalAll();
} finally {
stateLock.unlock();
}
if (!auxiliaryApplication) {
ApplicationStateNotification.notifyStartupFailed(t);
}
throw t;
}
stateLock.lock();
try {
state = ST_STARTED;
stateCond.signalAll();
if (!auxiliaryApplication) {
ApplicationStateNotification.notifyStartupComplete();
}
} finally {
stateLock.unlock();
}
}
protected abstract void doStart(String[] args);
public final void close() {
try {
stop();
} finally {
try {
ConfigProviderResolver.instance()
.releaseConfig(
ConfigProviderResolver.instance().getConfig(Thread.currentThread().getContextClassLoader()));
} catch (Throwable ignored) {
}
}
}
/**
* Stop the application. If another thread is also trying to stop the application, this method waits for that
* thread to finish. Returns immediately if the application is already stopped. If an exception is thrown during
* stop, that exception is propagated.
*/
public final void stop() {
stop(null);
}
/**
* Stop the application. If another thread is also trying to stop the application, this method waits for that
* thread to finish. Returns immediately if the application is already stopped. If an exception is thrown during
* stop, that exception is propagated.
*/
public final void stop(Runnable afterStopTask) {
Logger logger = Logger.getLogger(Application.class);
logger.debugf("Stopping application");
if (logger.isTraceEnabled()) {
logger.tracef(new RuntimeException("Application Stop Stack Trace"), "Application shutting down");
}
final Lock stateLock = this.stateLock;
stateLock.lock();
try {
loop: for (;;)
switch (state) {
case ST_INITIAL:
throw new IllegalStateException("The application has not been started");
case ST_STARTING: {
try {
stateCond.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw interruptedOnAwaitStart();
}
break;
}
case ST_STARTED:
break loop; // normal shutdown
case ST_STOPPING: {
try {
stateCond.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw interruptedOnAwaitStop();
}
break;
}
case ST_STOPPED:
return; // all good
default:
throw Assert.impossibleSwitchCase(state);
}
state = ST_STOPPING;
} finally {
stateLock.unlock();
}
Timing.staticInitStopped(auxiliaryApplication);
try {
ShutdownRecorder.runShutdown();
doStop();
} finally {
if (!auxiliaryApplication) {
currentApplication = null;
}
if (afterStopTask != null) {
try {
afterStopTask.run();
} catch (Throwable t) {
Logger.getLogger(Application.class).error("Failed to run stop task", t);
}
}
stateLock.lock();
try {
state = ST_STOPPED;
//note that at the moment if these are started or stopped concurrently
//the timing will be off
Timing.printStopTime(getName(), auxiliaryApplication);
stateCond.signalAll();
if (!auxiliaryApplication) {
ApplicationStateNotification.notifyApplicationStopped();
}
} finally {
stateLock.unlock();
}
}
}
public static Application currentApplication() {
return currentApplication;
}
protected abstract void doStop();
public abstract String getName();
private static IllegalStateException interruptedOnAwaitStart() {
return new IllegalStateException("Interrupted while waiting for another thread to start the application");
}
private static IllegalStateException interruptedOnAwaitStop() {
return new IllegalStateException("Interrupted while waiting for another thread to stop the application");
}
public void awaitShutdown() {
final Lock stateLock = this.stateLock;
stateLock.lock();
try {
for (;;) {
if (state == ST_STOPPED) {
return; // all good
} else {
try {
stateCond.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw interruptedOnAwaitStop();
}
}
}
} finally {
stateLock.unlock();
}
}
public boolean isStarted() {
return state == ST_STARTED;
}
}
| Application |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/AnnotationMatcherTest.java | {
"start": 1519,
"end": 1797
} | class ____ extends CompilerBasedAbstractTest {
final List<ScannerTest> tests = new ArrayList<>();
@Before
public void setUp() {
tests.clear();
writeFile(
"SampleAnnotation1.java",
"""
package com.google;
public @ | AnnotationMatcherTest |
java | quarkusio__quarkus | extensions/jdbc/jdbc-mariadb/runtime/src/main/java/io/quarkus/jdbc/mariadb/runtime/MariaDBServiceBindingConverter.java | {
"start": 795,
"end": 1158
} | class ____
extends DatasourceServiceBindingConfigSourceFactory.Jdbc {
@Override
protected String formatUrl(String urlFormat, String type, String host, String database, String portPart) {
return super.formatUrl(urlFormat, "mariadb", host, database, portPart);
}
}
}
| MariaDBDatasourceServiceBindingConfigSourceFactory |
java | spring-projects__spring-framework | spring-orm/src/test/java/org/springframework/orm/jpa/domain/Person.java | {
"start": 1317,
"end": 2658
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Integer id;
private transient TestBean testBean;
// Lazy relationship to force use of instrumentation in JPA implementation.
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
@JoinColumn(name = "DRIVERS_LICENSE_ID")
private DriversLicense driversLicense;
private String first_name;
@Basic(fetch = FetchType.LAZY)
private String last_name;
public transient ApplicationContext postLoaded;
public Integer getId() {
return id;
}
public void setTestBean(TestBean testBean) {
this.testBean = testBean;
}
public TestBean getTestBean() {
return testBean;
}
public void setFirstName(String firstName) {
this.first_name = firstName;
}
public String getFirstName() {
return this.first_name;
}
public void setLastName(String lastName) {
this.last_name = lastName;
}
public String getLastName() {
return this.last_name;
}
public void setDriversLicense(DriversLicense driversLicense) {
this.driversLicense = driversLicense;
}
public DriversLicense getDriversLicense() {
return this.driversLicense;
}
@Override
public String toString() {
return getClass().getName() + ":(" + hashCode() + ") id=" + id + "; firstName=" + first_name +
"; lastName=" + last_name + "; testBean=" + testBean;
}
}
| Person |
java | apache__camel | components/camel-google/camel-google-bigquery/src/generated/java/org/apache/camel/component/google/bigquery/GoogleBigQueryEndpointConfigurer.java | {
"start": 742,
"end": 3421
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
GoogleBigQueryEndpoint target = (GoogleBigQueryEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "connectionfactory":
case "connectionFactory": target.getConfiguration().setConnectionFactory(property(camelContext, org.apache.camel.component.google.bigquery.GoogleBigQueryConnectionFactory.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "serviceaccountkey":
case "serviceAccountKey": target.getConfiguration().setServiceAccountKey(property(camelContext, java.lang.String.class, value)); return true;
case "useasinsertid":
case "useAsInsertId": target.getConfiguration().setUseAsInsertId(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"connectionFactory"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "connectionfactory":
case "connectionFactory": return org.apache.camel.component.google.bigquery.GoogleBigQueryConnectionFactory.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "serviceaccountkey":
case "serviceAccountKey": return java.lang.String.class;
case "useasinsertid":
case "useAsInsertId": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
GoogleBigQueryEndpoint target = (GoogleBigQueryEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "connectionfactory":
case "connectionFactory": return target.getConfiguration().getConnectionFactory();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "serviceaccountkey":
case "serviceAccountKey": return target.getConfiguration().getServiceAccountKey();
case "useasinsertid":
case "useAsInsertId": return target.getConfiguration().getUseAsInsertId();
default: return null;
}
}
}
| GoogleBigQueryEndpointConfigurer |
java | apache__flink | flink-yarn/src/test/java/org/apache/flink/yarn/YarnResourceManagerDriverTest.java | {
"start": 4574,
"end": 34242
} | class ____ extends ResourceManagerDriverTestBase<YarnWorkerNode> {
private static final Logger log = LoggerFactory.getLogger(YarnResourceManagerDriverTest.class);
private static final Resource testingResource =
Resource.newInstance(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
private static final Priority testingPriority = Priority.newInstance(1);
private static final Container testingContainer =
createTestingContainerWithResource(testingResource, testingPriority, 1);
private static final TaskExecutorProcessSpec testingTaskExecutorProcessSpec =
new TaskExecutorProcessSpec(
new CPUResource(1),
MemorySize.ZERO,
MemorySize.ZERO,
MemorySize.ofMebiBytes(256),
MemorySize.ofMebiBytes(256),
MemorySize.ofMebiBytes(256),
MemorySize.ofMebiBytes(256),
MemorySize.ZERO,
MemorySize.ZERO,
Collections.emptyList());
@TempDir private java.nio.file.Path tmpFolder;
@Override
protected Context createContext() {
return new Context();
}
@Test
void testRunAsyncCausesFatalError() throws Exception {
new Context() {
{
final String exceptionMessage = "runAsyncCausesFatalError";
addContainerRequestFutures.add(CompletableFuture.completedFuture(null));
testingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(
ignored -> {
throw new RuntimeException(exceptionMessage);
});
final CompletableFuture<Throwable> throwableCompletableFuture =
new CompletableFuture<>();
resourceEventHandlerBuilder.setOnErrorConsumer(
throwableCompletableFuture::complete);
runTest(
() -> {
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec));
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
Throwable t =
throwableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);
final Optional<RuntimeException> optionalCause =
ExceptionUtils.findThrowable(t, RuntimeException.class);
assertThat(optionalCause).isPresent();
assertThat(optionalCause.get()).hasMessage(exceptionMessage);
});
}
};
}
@Test
void testShutdownRequestCausesFatalError() throws Exception {
new Context() {
{
final CompletableFuture<Throwable> throwableCompletableFuture =
new CompletableFuture<>();
resourceEventHandlerBuilder.setOnErrorConsumer(
throwableCompletableFuture::complete);
runTest(
() -> {
resourceManagerClientCallbackHandler.onShutdownRequest();
Throwable throwable =
throwableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(throwable)
.satisfies(anyCauseMatches(ResourceManagerException.class))
.satisfies(anyCauseMatches(ERROR_MESSAGE_ON_SHUTDOWN_REQUEST));
});
}
};
}
@Test
void testOnErrorCausesFatalError() throws Exception {
new Context() {
{
final CompletableFuture<Throwable> throwableCompletableFuture =
new CompletableFuture<>();
resourceEventHandlerBuilder.setOnErrorConsumer(
throwableCompletableFuture::complete);
Throwable expectedThrowable = new Exception("test");
runTest(
() -> {
resourceManagerClientCallbackHandler.onError(expectedThrowable);
Throwable actualThrowable =
throwableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(actualThrowable).isEqualTo(expectedThrowable);
});
}
};
}
@Test
void testOnErrorAfterTerminationIgnored() throws Exception {
new Context() {
{
final CompletableFuture<Throwable> throwableCompletableFuture =
new CompletableFuture<>();
resourceEventHandlerBuilder.setOnErrorConsumer(
throwableCompletableFuture::complete);
Throwable expectedThrowable = new Exception("test");
runTest(
() -> {
getDriver().terminate();
resourceManagerClientCallbackHandler.onError(expectedThrowable);
assertThatThrownBy(
() ->
throwableCompletableFuture.get(
TIMEOUT_SHOULD_NOT_HAPPEN_MS,
TimeUnit.MILLISECONDS))
.isInstanceOf(TimeoutException.class);
});
}
};
}
@Test
void testTerminationDoesNotBlock() throws Exception {
new Context() {
{
runTest(
() -> {
try {
runInMainThread(() -> getDriver().terminate());
} catch (Exception ex) {
log.error("cannot terminate driver", ex);
fail("termination of driver failed");
}
});
}
};
}
@Test
void testTerminationWaitsOnContainerStopSuccess() throws Exception {
new Context() {
{
final CompletableFuture<ContainerId> containerIdFuture = new CompletableFuture<>();
testingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(
(containerId, ignored, callbackHandler) ->
containerIdFuture.complete(containerId));
resetYarnNodeManagerClientFactory();
runTest(
() -> {
// acquire a resource so we have something to release
final CompletableFuture<YarnWorkerNode> yarnWorkerFuture =
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec))
.thenCompose(Function.identity());
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
final YarnWorkerNode worker = yarnWorkerFuture.get();
// release the resource -- it will be blocked
// terminate driver this should wait on the callback
final CompletableFuture<Void> driverHasTerminatedFuture =
runInMainThread(
() -> {
getDriver().releaseResource(worker);
getDriver().terminate();
});
assertThatThrownBy(
() ->
driverHasTerminatedFuture.get(
20, TimeUnit.MILLISECONDS))
.isInstanceOf(TimeoutException.class);
nodeManagerClientCallbackHandler.onContainerStopped(
containerIdFuture.get());
// wait for completion of termination
// if this blocks forever, then our implementation is wrong
driverHasTerminatedFuture.get();
});
}
};
}
@Test
void testTerminationWaitsOnContainerStopError() throws Exception {
new Context() {
{
final CompletableFuture<ContainerId> containerIdFuture = new CompletableFuture<>();
testingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(
(containerId, ignored, callbackHandler) ->
containerIdFuture.complete(containerId));
resetYarnNodeManagerClientFactory();
runTest(
() -> {
// acquire a resource so we have something to release
final CompletableFuture<YarnWorkerNode> yarnWorkerFuture =
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec))
.thenCompose(Function.identity());
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
final YarnWorkerNode worker = yarnWorkerFuture.get();
// release the resource -- it will be blocked
// terminate driver this should wait on the callback
final CompletableFuture<Void> driverHasTerminatedFuture =
runInMainThread(
() -> {
getDriver().releaseResource(worker);
getDriver().terminate();
});
assertThatThrownBy(
() ->
driverHasTerminatedFuture.get(
20, TimeUnit.MILLISECONDS))
.isInstanceOf(TimeoutException.class);
nodeManagerClientCallbackHandler.onStopContainerError(
containerIdFuture.get(), null);
// wait for completion of termination
// if this blocks forever, then our implementation is wrong
driverHasTerminatedFuture.get();
});
}
};
}
/**
* Tests that application files are deleted when the YARN application master is de-registered.
*/
@Test
void testDeleteApplicationFiles() throws Exception {
new Context() {
{
final File applicationDir = Files.createTempDirectory(tmpFolder, ".flink").toFile();
env.put(FLINK_YARN_FILES, applicationDir.getCanonicalPath());
runTest(
() -> {
getDriver().deregisterApplication(ApplicationStatus.SUCCEEDED, null);
assertThat(applicationDir.toPath()).doesNotExist();
});
}
};
}
@Test
void testOnContainerAllocated() throws Exception {
new Context() {
{
addContainerRequestFutures.add(new CompletableFuture<>());
testingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(
(ignored1, ignored2) ->
addContainerRequestFutures
.get(
addContainerRequestFuturesNumCompleted
.getAndIncrement())
.complete(null));
runTest(
() -> {
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec));
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
verifyFutureCompleted(addContainerRequestFutures.get(0));
verifyFutureCompleted(removeContainerRequestFuture);
verifyFutureCompleted(startContainerAsyncFuture);
});
}
};
}
@Test
void testCancelRequestedResource() throws Exception {
new Context() {
{
addContainerRequestFutures.add(new CompletableFuture<>());
testingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(
(ignored1, ignored2) ->
addContainerRequestFutures
.get(
addContainerRequestFuturesNumCompleted
.getAndIncrement())
.complete(null));
runTest(
() -> {
runInMainThread(
() -> {
CompletableFuture<YarnWorkerNode> requestFuture =
getDriver()
.requestResource(
testingTaskExecutorProcessSpec);
requestFuture.cancel(true);
});
verifyFutureCompleted(addContainerRequestFutures.get(0));
verifyFutureCompleted(removeContainerRequestFuture);
assertThat(startContainerAsyncFuture.isDone()).isFalse();
});
}
};
}
@Test
void testUpdateBlocklist() throws Exception {
new Context() {
{
final Set<String> yarnReceivedBlocklist = new HashSet<>();
testingYarnAMRMClientAsyncBuilder.setUpdateBlocklistConsumer(
(additions, removals) -> {
if (additions != null) {
yarnReceivedBlocklist.addAll(additions);
}
if (removals != null) {
yarnReceivedBlocklist.removeAll(removals);
}
});
final Set<String> blockedNodes = new HashSet<>();
setBlockedNodeRetriever(() -> blockedNodes);
runTest(
() -> {
blockedNodes.addAll(Arrays.asList("node1", "node2", "node3"));
runInMainThread(
() ->
getDriver()
.requestResource(
TASK_EXECUTOR_PROCESS_SPEC))
.get();
assertThat(yarnReceivedBlocklist)
.containsExactlyInAnyOrder("node1", "node2", "node3");
blockedNodes.remove("node1");
runInMainThread(
() ->
getDriver()
.requestResource(
TASK_EXECUTOR_PROCESS_SPEC))
.get();
assertThat(yarnReceivedBlocklist)
.containsExactlyInAnyOrder("node2", "node3");
});
}
};
}
@Test
void testOnSuccessfulContainerCompleted() throws Exception {
runTestOnContainerCompleted(createSuccessfulCompletedContainerStatus());
}
@Test
void testOnContainerCompletedBecauseDisksFailed() throws Exception {
runTestOnContainerCompleted(createCompletedContainerStatusBecauseDisksFailed());
}
@Test
void testOnContainerCompletedBecauseItWasAborted() throws Exception {
runTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasAborted());
}
@Test
void testOnContainerCompletedBecauseItWasInvalid() throws Exception {
runTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasInvalid());
}
@Test
void testOnContainerCompletedForUnknownCause() throws Exception {
runTestOnContainerCompleted(createCompletedContainerStatusForUnknownCause());
}
@Test
void testOnContainerCompletedBecauseItWasPreempted() throws Exception {
runTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasPreempted());
}
void runTestOnContainerCompleted(ContainerStatus completedContainerStatus) throws Exception {
new Context() {
{
addContainerRequestFutures.add(new CompletableFuture<>());
addContainerRequestFutures.add(new CompletableFuture<>());
testingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(
(ignored1, ignored2) ->
addContainerRequestFutures
.get(
addContainerRequestFuturesNumCompleted
.getAndIncrement())
.complete(null));
resourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(
(ignore1, ignore2) ->
getDriver().requestResource(testingTaskExecutorProcessSpec));
runTest(
() -> {
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec));
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
resourceManagerClientCallbackHandler.onContainersCompleted(
ImmutableList.of(completedContainerStatus));
verifyFutureCompleted(addContainerRequestFutures.get(1));
});
}
};
}
@Test
void testOnStartContainerError() throws Exception {
new Context() {
{
addContainerRequestFutures.add(new CompletableFuture<>());
addContainerRequestFutures.add(new CompletableFuture<>());
testingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(
(ignored1, ignored2) ->
addContainerRequestFutures
.get(
addContainerRequestFuturesNumCompleted
.getAndIncrement())
.complete(null));
resourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(
(ignore1, ignore2) ->
getDriver().requestResource(testingTaskExecutorProcessSpec));
runTest(
() -> {
runInMainThread(
() ->
getDriver()
.requestResource(
testingTaskExecutorProcessSpec));
resourceManagerClientCallbackHandler.onContainersAllocated(
ImmutableList.of(testingContainer));
nodeManagerClientCallbackHandler.onStartContainerError(
testingContainer.getId(), new Exception("start error"));
verifyFutureCompleted(releaseAssignedContainerFuture);
verifyFutureCompleted(addContainerRequestFutures.get(1));
});
}
};
}
@Test
void testStartWorkerVariousSpec() throws Exception {
final TaskExecutorProcessSpec taskExecutorProcessSpec1 =
new TaskExecutorProcessSpec(
new CPUResource(1),
MemorySize.ZERO,
MemorySize.ZERO,
MemorySize.ofMebiBytes(50),
MemorySize.ofMebiBytes(50),
MemorySize.ofMebiBytes(50),
MemorySize.ofMebiBytes(50),
MemorySize.ZERO,
MemorySize.ZERO,
Collections.emptyList());
final TaskExecutorProcessSpec taskExecutorProcessSpec2 =
new TaskExecutorProcessSpec(
new CPUResource(2),
MemorySize.ZERO,
MemorySize.ZERO,
MemorySize.ofMebiBytes(500),
MemorySize.ofMebiBytes(500),
MemorySize.ofMebiBytes(500),
MemorySize.ofMebiBytes(500),
MemorySize.ZERO,
MemorySize.ZERO,
Collections.emptyList());
new Context() {
{
final String startCommand1 =
TaskManagerOptions.TASK_HEAP_MEMORY.key() + "=" + (50L << 20);
final String startCommand2 =
TaskManagerOptions.TASK_HEAP_MEMORY.key() + "=" + (100L << 20);
final CompletableFuture<Void> startContainerAsyncCommandFuture1 =
new CompletableFuture<>();
final CompletableFuture<Void> startContainerAsyncCommandFuture2 =
new CompletableFuture<>();
prepareForTestStartTaskExecutorProcessVariousSpec(
startCommand1,
startCommand2,
startContainerAsyncCommandFuture1,
startContainerAsyncCommandFuture2,
taskExecutorProcessSpec1);
testingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(
tuple -> {
final Priority priority = tuple.f0;
final List<AMRMClient.ContainerRequest> matchingRequests =
new ArrayList<>();
for (CompletableFuture<AMRMClient.ContainerRequest>
addContainerRequestFuture : addContainerRequestFutures) {
final AMRMClient.ContainerRequest request =
addContainerRequestFuture.getNow(null);
if (request != null && priority.equals(request.getPriority())) {
assertThat(tuple.f2).isEqualTo(request.getCapability());
matchingRequests.add(request);
}
}
return Collections.singletonList(matchingRequests);
});
runTest(
() -> {
final Resource containerResource1 =
((YarnResourceManagerDriver) getDriver())
.getContainerResource(taskExecutorProcessSpec1)
.get();
final Resource containerResource2 =
((YarnResourceManagerDriver) getDriver())
.getContainerResource(taskExecutorProcessSpec2)
.get();
// Make sure two worker resource spec will be normalized to different
// container resources
assertThat(containerResource2).isNotEqualTo(containerResource1);
runInMainThread(
() -> getDriver().requestResource(taskExecutorProcessSpec1));
runInMainThread(
() -> getDriver().requestResource(taskExecutorProcessSpec2));
// Verify both containers requested
verifyFutureCompleted(addContainerRequestFutures.get(0));
verifyFutureCompleted(addContainerRequestFutures.get(1));
// Mock that container 1 is allocated
Container container1 =
createTestingContainerWithResource(containerResource1);
resourceManagerClientCallbackHandler.onContainersAllocated(
Collections.singletonList(container1));
// Verify that only worker with spec1 is started.
verifyFutureCompleted(startContainerAsyncCommandFuture1);
assertThat(startContainerAsyncCommandFuture2.isDone()).isFalse();
// Mock that container 1 is completed, while the worker is still pending
ContainerStatus testingContainerStatus =
createTestingContainerCompletedStatus(container1.getId());
resourceManagerClientCallbackHandler.onContainersCompleted(
Collections.singletonList(testingContainerStatus));
// Verify that only container 1 is requested again
verifyFutureCompleted(addContainerRequestFutures.get(2));
assertThat(addContainerRequestFutures.get(2).get().getCapability())
.isEqualTo(containerResource1);
assertThat(addContainerRequestFutures.get(3).isDone()).isFalse();
});
}
};
}
private boolean containsStartCommand(
ContainerLaunchContext containerLaunchContext, String command) {
return containerLaunchContext.getCommands().stream().anyMatch(str -> str.contains(command));
}
private static Container createTestingContainerWithResource(
Resource resource, Priority priority, int containerIdx) {
final ContainerId containerId =
ContainerId.newContainerId(
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(System.currentTimeMillis(), 1), 1),
containerIdx);
final NodeId nodeId = NodeId.newInstance("container", 1234);
return new TestingContainer(containerId, nodeId, resource, priority);
}
private | YarnResourceManagerDriverTest |
java | apache__camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/entity/ApplicationXMLEntity.java | {
"start": 993,
"end": 1456
} | class ____ extends ApplicationEntity {
public ApplicationXMLEntity(byte[] content, String charset, String contentTransferEncoding,
boolean isMainBody, String filename) {
super(content, ContentType.create(AS2MediaType.APPLICATION_XML, charset), contentTransferEncoding, isMainBody,
filename);
}
@Override
public void close() throws IOException {
// do nothing
}
}
| ApplicationXMLEntity |
java | quarkusio__quarkus | integration-tests/smallrye-context-propagation/src/test/java/io/quarkus/context/test/mutiny/MutinyTransactionalBean.java | {
"start": 396,
"end": 1274
} | class ____ {
@Inject
TransactionManager tm;
@Transactional(value = TxType.REQUIRES_NEW)
public void doInTx() {
Assertions.assertEquals(0, Person.count());
Person entity = new Person();
entity.name = "Stef";
entity.persist();
}
@Transactional(value = TxType.REQUIRES_NEW)
public Uni<String> doInTxUni() {
Assertions.assertEquals(0, Person.count());
Person entity = new Person();
entity.name = "Stef";
entity.persist();
return Uni.createFrom().item("OK");
}
@Transactional(value = TxType.REQUIRES_NEW)
public Multi<String> doInTxMulti() {
Assertions.assertEquals(0, Person.count());
Person entity = new Person();
entity.name = "Stef";
entity.persist();
return Multi.createFrom().items("OK");
}
}
| MutinyTransactionalBean |
java | apache__camel | components/camel-ibm/camel-ibm-cos/src/main/java/org/apache/camel/component/ibm/cos/IBMCOSOperations.java | {
"start": 885,
"end": 1100
} | enum ____ {
copyObject,
listObjects,
deleteObject,
deleteObjects,
deleteBucket,
listBuckets,
getObject,
getObjectRange,
headBucket,
createBucket,
putObject
}
| IBMCOSOperations |
java | spring-projects__spring-boot | module/spring-boot-jsonb/src/test/java/org/springframework/boot/jsonb/autoconfigure/JsonbAutoConfigurationTests.java | {
"start": 1034,
"end": 1506
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(JsonbAutoConfiguration.class));
@Test
void jsonbRegistration() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(Jsonb.class);
Jsonb jsonb = context.getBean(Jsonb.class);
assertThat(jsonb.toJson(new DataObject())).isEqualTo("{\"data\":\"hello\"}");
});
}
public | JsonbAutoConfigurationTests |
java | apache__camel | components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheProviders.java | {
"start": 854,
"end": 2389
} | enum ____ implements JCacheProvider {
hazelcast {
{
shortName = "hazelcast";
className = "com.hazelcast.cache.HazelcastCachingProvider";
}
},
ehcache {
{
shortName = "ehcache";
className = "org.ehcache.jsr107.EhcacheCachingProvider";
}
},
caffeine {
{
shortName = "caffeine";
className = "com.github.benmanes.caffeine.jcache.spi.CaffeineCachingProvider";
}
},
ispnEmbedded {
{
shortName = "infinispan-embedded";
className = "org.infinispan.jcache.embedded.JCachingProvider";
}
};
protected String shortName;
protected String className;
@Override
public String shortName() {
return shortName;
}
@Override
public String className() {
return className;
}
public static JCacheProvider lookup(String providerName) {
if (providerName != null) {
for (JCacheProvider provider : values()) {
if (provider.shortName().equals(providerName) || provider.className().equals(providerName)) {
return provider;
}
}
}
return new JCacheProvider() {
@Override
public String shortName() {
return providerName;
}
@Override
public String className() {
return providerName;
}
};
}
}
| JCacheProviders |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/IdBagBinder.java | {
"start": 1213,
"end": 4169
} | class ____ extends BagBinder {
public IdBagBinder(
Supplier<ManagedBean<? extends UserCollectionType>> customTypeBeanResolver,
MetadataBuildingContext buildingContext) {
super( customTypeBeanResolver, buildingContext );
}
@Override
protected Collection createCollection(PersistentClass owner) {
return new IdentifierBag( getCustomTypeBeanResolver(), owner, getBuildingContext() );
}
@Override
protected boolean bindStarToManySecondPass(Map<String, PersistentClass> persistentClasses) {
final boolean result = super.bindStarToManySecondPass( persistentClasses );
final var collectionIdAnn = property.getDirectAnnotationUsage( CollectionId.class );
if ( collectionIdAnn == null ) {
throw new MappingException( "idbag mapping missing '@CollectionId' annotation" );
}
final var propertyData = new WrappedInferredData(
new PropertyInferredData(
null,
declaringClass,
property,
//default access should not be useful
null,
buildingContext
),
"id"
);
final var idColumns = AnnotatedColumn.buildColumnsFromAnnotations(
new Column[]{collectionIdAnn.column()},
// null,
null,
Nullability.FORCED_NOT_NULL,
propertyHolder,
propertyData,
Collections.emptyMap(),
buildingContext
);
// we need to make sure all id columns must be not-null.
for ( var idColumn : idColumns.getColumns() ) {
idColumn.setNullable( false );
}
final var idValueBinder = new BasicValueBinder( COLLECTION_ID, buildingContext );
idValueBinder.setTable( collection.getCollectionTable() );
idValueBinder.setColumns( idColumns );
idValueBinder.setType( property, getElementType() );
final BasicValue id = idValueBinder.make();
( (IdentifierCollection) collection ).setIdentifier( id );
final String generator = collectionIdAnn.generator();
checkLegalCollectionIdStrategy( generator );
if ( isGlobalGeneratorNameGlobal( buildingContext ) ) {
buildingContext.getMetadataCollector()
.addSecondPass( new IdBagIdGeneratorResolverSecondPass(
id,
property,
generator,
generatorName( generator ),
getBuildingContext()
) );
}
else {
makeIdGenerator(
id,
property,
generator,
generatorName( generator ),
getBuildingContext(),
localGenerators
);
}
return result;
}
private static String generatorName(String generator) {
return switch ( generator ) {
case "sequence", "increment" -> "";
default -> generator;
};
}
private static void checkLegalCollectionIdStrategy(String namedGenerator) {
switch ( namedGenerator ) {
case "identity":
throw new MappingException("IDENTITY generation not supported for @CollectionId");
case "assigned":
throw new MappingException("Assigned generation not supported for @CollectionId");
case "native":
throw new MappingException("Native generation not supported for @CollectionId");
}
}
}
| IdBagBinder |
java | apache__camel | components/camel-velocity/src/test/java/org/apache/camel/component/velocity/VelocityFileLetterWithPropertyTest.java | {
"start": 903,
"end": 1339
} | class ____ extends VelocityLetterTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
System.setProperty("ENV", "src/test/resources/");
from("direct:a").to("velocity:file:{{ENV}}org/apache/camel/component/velocity/letter.vm").to("mock:result");
}
};
}
}
| VelocityFileLetterWithPropertyTest |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionVisitor.java | {
"start": 1925,
"end": 2343
} | class ____ produced by a factory method annotated with
* {@link io.micronaut.context.annotation.Bean} this method should be called.</p>
*
* @param factoryClass The factory class
* @param factoryMethod The factory method
*/
void visitBeanFactoryMethod(ClassElement factoryClass,
MethodElement factoryMethod);
/**
* <p>In the case where the produced | is |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/SageMakerClient.java | {
"start": 2817,
"end": 8448
} | class ____ implements Closeable {
private static final Logger log = LogManager.getLogger(SageMakerClient.class);
private final Cache<RegionAndSecrets, SageMakerRuntimeAsyncClient> existingClients = CacheBuilder.<
RegionAndSecrets,
SageMakerRuntimeAsyncClient>builder()
.removalListener(removal -> removal.getValue().close())
.setExpireAfterAccess(TimeValue.timeValueMinutes(15))
.build();
private final CacheLoader<RegionAndSecrets, SageMakerRuntimeAsyncClient> clientFactory;
private final ThreadPool threadPool;
public SageMakerClient(CacheLoader<RegionAndSecrets, SageMakerRuntimeAsyncClient> clientFactory, ThreadPool threadPool) {
this.clientFactory = clientFactory;
this.threadPool = threadPool;
}
public void invoke(
RegionAndSecrets regionAndSecrets,
InvokeEndpointRequest request,
TimeValue timeout,
ActionListener<InvokeEndpointResponse> listener
) {
SageMakerRuntimeAsyncClient asyncClient;
try {
asyncClient = existingClients.computeIfAbsent(regionAndSecrets, clientFactory);
} catch (ExecutionException e) {
listener.onFailure(clientFailure(regionAndSecrets, e));
return;
}
var contextPreservingListener = new ContextPreservingActionListener<>(
threadPool.getThreadContext().newRestorableContext(false),
listener
);
var awsFuture = asyncClient.invokeEndpoint(request);
var timeoutListener = ListenerTimeouts.wrapWithTimeout(
threadPool,
timeout,
threadPool.executor(UTILITY_THREAD_POOL_NAME),
contextPreservingListener,
ignored -> {
FutureUtils.cancel(awsFuture);
contextPreservingListener.onFailure(
new ElasticsearchStatusException("Request timed out after [{}]", RestStatus.REQUEST_TIMEOUT, timeout)
);
}
);
awsFuture.thenAcceptAsync(timeoutListener::onResponse, threadPool.executor(UTILITY_THREAD_POOL_NAME))
.exceptionallyAsync(t -> failAndMaybeThrowError(t, timeoutListener), threadPool.executor(UTILITY_THREAD_POOL_NAME));
}
private static Exception clientFailure(RegionAndSecrets regionAndSecrets, Exception cause) {
return new ElasticsearchStatusException(
"failed to create SageMakerRuntime client for region [{}]",
RestStatus.INTERNAL_SERVER_ERROR,
cause,
regionAndSecrets.region()
);
}
private Void failAndMaybeThrowError(Throwable t, ActionListener<?> listener) {
if (t instanceof CompletionException ce) {
t = ce.getCause();
}
if (t instanceof Exception e) {
listener.onFailure(e);
} else {
ExceptionsHelper.maybeError(t).ifPresent(ExceptionsHelper::maybeDieOnAnotherThread);
log.atWarn().withThrowable(t).log("Unknown failure calling SageMaker.");
listener.onFailure(new RuntimeException("Unknown failure calling SageMaker.", t));
}
return null; // Void
}
public void invokeStream(
RegionAndSecrets regionAndSecrets,
InvokeEndpointWithResponseStreamRequest request,
TimeValue timeout,
ActionListener<SageMakerStream> listener
) {
SageMakerRuntimeAsyncClient asyncClient;
try {
asyncClient = existingClients.computeIfAbsent(regionAndSecrets, clientFactory);
} catch (ExecutionException e) {
listener.onFailure(clientFailure(regionAndSecrets, e));
return;
}
var contextPreservingListener = new ContextPreservingActionListener<>(
threadPool.getThreadContext().newRestorableContext(false),
listener
);
var responseStreamProcessor = new SageMakerStreamingResponseProcessor();
var cancelAwsRequestListener = new AtomicReference<CompletableFuture<?>>();
var timeoutListener = ListenerTimeouts.wrapWithTimeout(
threadPool,
timeout,
threadPool.executor(UTILITY_THREAD_POOL_NAME),
contextPreservingListener,
ignored -> {
FutureUtils.cancel(cancelAwsRequestListener.get());
contextPreservingListener.onFailure(
new ElasticsearchStatusException("Request timed out after [{}]", RestStatus.REQUEST_TIMEOUT, timeout)
);
}
);
// To stay consistent with HTTP providers, we cancel the TimeoutListener onResponse because we are measuring the time it takes to
// start receiving bytes.
var responseStreamListener = InvokeEndpointWithResponseStreamResponseHandler.builder()
.onResponse(response -> timeoutListener.onResponse(new SageMakerStream(response, responseStreamProcessor)))
.onEventStream(publisher -> responseStreamProcessor.setPublisher(FlowAdapters.toFlowPublisher(publisher)))
.build();
var awsFuture = asyncClient.invokeEndpointWithResponseStream(request, responseStreamListener);
cancelAwsRequestListener.set(awsFuture);
awsFuture.exceptionallyAsync(t -> failAndMaybeThrowError(t, timeoutListener), threadPool.executor(UTILITY_THREAD_POOL_NAME));
}
@Override
public void close() {
existingClients.invalidateAll(); // will close each cached client
}
public record RegionAndSecrets(String region, AwsSecretSettings secretSettings) {}
public static | SageMakerClient |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/single/relation/PolymorphicRemovalTest.java | {
"start": 4909,
"end": 5404
} | class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
@ManyToOne(fetch = FetchType.LAZY)
private EmployeeType type;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public EmployeeType getType() {
return type;
}
public void setType(EmployeeType type) {
this.type = type;
}
}
}
| Employee |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/metamodel/generics/embeddable/CreationDate.java | {
"start": 240,
"end": 402
} | class ____ extends AbstractValueObject<Date> {
protected CreationDate() {
super();
}
public CreationDate(final Date value) {
super( value );
}
}
| CreationDate |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/SynthProxiableBeanWithoutNoArgConstructorTest.java | {
"start": 760,
"end": 2335
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(SynthBean.class))
.addBuildChainCustomizer(buildCustomizer());
static Consumer<BuildChainBuilder> buildCustomizer() {
return new Consumer<BuildChainBuilder>() {
@Override
public void accept(BuildChainBuilder builder) {
builder.addBuildStep(new BuildStep() {
@Override
public void execute(BuildContext context) {
context.produce(SyntheticBeanBuildItem.configure(SynthBean.class)
.scope(ApplicationScoped.class)
.types(SynthBean.class)
.unremovable()
.creator(cg -> {
BlockCreator bc = cg.createMethod();
bc.return_(bc.new_(SynthBean.class, Const.of("foo")));
})
.done());
}
}).produces(SyntheticBeanBuildItem.class).build();
}
};
}
@Test
public void testSyntheticBean() {
InstanceHandle<SynthBean> instance = Arc.container().instance(SynthBean.class);
assertTrue(instance.isAvailable());
assertEquals("foo", instance.get().getString());
}
@Vetoed
static | SynthProxiableBeanWithoutNoArgConstructorTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/partition/PartitionKeyAndAssociationTest.java | {
"start": 2833,
"end": 3383
} | class ____ {
@Id
private Long id;
@PartitionKey
private Long accountId;
@OneToOne( mappedBy = "contact", cascade = CascadeType.ALL )
private ContactAddress contactAddress;
private String name;
public SalesContact() {
}
public SalesContact(Long id, Long accountId, String name) {
this.id = id;
this.accountId = accountId;
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity( name = "ContactEmail" )
public static | SalesContact |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/method/annotation/InitBinderDataBinderFactory.java | {
"start": 1432,
"end": 3433
} | class ____ extends DefaultDataBinderFactory {
private final List<InvocableHandlerMethod> binderMethods;
/**
* Create a new InitBinderDataBinderFactory instance.
* @param binderMethods {@code @InitBinder} methods
* @param initializer for global data binder initialization
*/
public InitBinderDataBinderFactory(@Nullable List<InvocableHandlerMethod> binderMethods,
@Nullable WebBindingInitializer initializer) {
super(initializer);
this.binderMethods = (binderMethods != null ? binderMethods : Collections.emptyList());
}
/**
* Initialize a WebDataBinder with {@code @InitBinder} methods.
* <p>If the {@code @InitBinder} annotation specifies attributes names,
* it is invoked only if the names include the target object name.
* @throws Exception if one of the invoked @{@link InitBinder} methods fails
* @see #isBinderMethodApplicable
*/
@Override
public void initBinder(WebDataBinder dataBinder, NativeWebRequest request) throws Exception {
for (InvocableHandlerMethod binderMethod : this.binderMethods) {
if (isBinderMethodApplicable(binderMethod, dataBinder)) {
Object returnValue = binderMethod.invokeForRequest(request, null, dataBinder);
if (returnValue != null) {
throw new IllegalStateException(
"@InitBinder methods must not return a value (should be void): " + binderMethod);
}
}
}
}
/**
* Determine whether the given {@code @InitBinder} method should be used
* to initialize the given {@link WebDataBinder} instance. By default we
* check the specified attribute names in the annotation value, if any.
*/
protected boolean isBinderMethodApplicable(HandlerMethod initBinderMethod, WebDataBinder dataBinder) {
InitBinder ann = initBinderMethod.getMethodAnnotation(InitBinder.class);
Assert.state(ann != null, "No InitBinder annotation");
String[] names = ann.value();
return (ObjectUtils.isEmpty(names) || ObjectUtils.containsElement(names, dataBinder.getObjectName()));
}
}
| InitBinderDataBinderFactory |
java | apache__camel | components/camel-stax/src/main/java/org/apache/camel/language/xtokenizer/XMLTokenizeLanguage.java | {
"start": 1749,
"end": 4020
} | class ____ extends SingleInputTypedLanguageSupport {
@Override
protected boolean supportResultType() {
// result type is handled specially in tokenizer
return false;
}
@Override
public Expression createExpression(Expression source, String expression, Object[] properties) {
Class<?> type = property(Class.class, properties, 0, null);
Character mode = property(Character.class, properties, 2, "i");
XMLTokenExpressionIterator xml = new XMLTokenExpressionIterator(source, expression, mode);
xml.setGroup(property(int.class, properties, 3, 1));
Object obj = properties[4];
if (obj != null) {
Namespaces ns;
if (obj instanceof Namespaces) {
ns = (Namespaces) obj;
} else if (obj instanceof Map) {
ns = new Namespaces();
((Map<String, String>) obj).forEach(ns::add);
} else {
throw new IllegalArgumentException(
"Namespaces is not instance of java.util.Map or " + Namespaces.class.getName());
}
xml.setNamespaces(ns.getNamespaces());
}
Expression answer = xml;
if (type != null && type != Object.class) {
// wrap iterator in a converter
final Expression delegate = xml;
answer = new ExpressionAdapter() {
@Override
public Object evaluate(Exchange exchange) {
Object value = delegate.evaluate(exchange, Object.class);
if (value instanceof Iterator<?> it) {
value = new IteratorConvertTo(exchange, it, type);
}
return value;
}
@Override
public void init(CamelContext context) {
super.init(context);
delegate.init(context);
}
@Override
public String toString() {
return delegate.toString();
}
};
}
if (getCamelContext() != null) {
answer.init(getCamelContext());
}
return answer;
}
}
| XMLTokenizeLanguage |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/databricks/parser/DatabricksSelectParser.java | {
"start": 242,
"end": 568
} | class ____ extends SparkSelectParser {
public DatabricksSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
}
@Override
protected SQLExprParser createExprParser() {
return new DatabricksExprParser(this.lexer);
}
}
| DatabricksSelectParser |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/JSONFieldDefaultValueTest.java | {
"start": 156,
"end": 2209
} | class ____ extends TestCase {
public void test_default_value() throws Exception {
Model m = new Model();
String s = JSON.toJSONString(m);
System.out.println(s);
Model m2 = JSON.parseObject(s, Model.class);
assertEquals("string", m2.getString());
assertEquals(false, m2.getaBoolean());
assertEquals(true, m2.getaBoolean2().booleanValue());
assertEquals(0, m2.getAnInt());
assertEquals(888, m2.getInteger().intValue());
assertEquals(0, m2.getaShort());
assertEquals(88, m2.getaShort2().shortValue());
assertEquals('\u0000', m2.getaChar());
assertEquals('J', m2.getCharacter().charValue());
assertEquals(0, m2.getaByte());
assertEquals(8, m2.getaByte2().byteValue());
assertEquals(0, m2.getaLong());
assertEquals(8888, m2.getaLong2().longValue());
assertEquals("0.0", "" + m2.getaFloat());
assertEquals("8.8", "" + m2.getaFloat2());
assertEquals("0.0", "" + m2.getaDouble());
assertEquals("88.88", "" + m2.getaDouble2());
}
public void test_not_null() throws Exception {
Model m = new Model("test", true, 888, (short)88, 'J', (byte)8, 8888L, 8.8F, 88.88, false, 999, (short)99, 'C', (byte)9, 9999L, 9.9F, 99.99);
String s = JSON.toJSONString(m);
System.out.println(s);
Model m2 = JSON.parseObject(s, Model.class);
assertEquals("test", m2.getString());
assertEquals(true, m2.getaBoolean());
assertEquals(false, m2.getaBoolean2().booleanValue());
assertEquals(888, m2.getAnInt());
assertEquals(999, m2.getInteger().intValue());
assertEquals(88, m2.getaShort());
assertEquals(99, m2.getaShort2().shortValue());
assertEquals('J', m2.getaChar());
assertEquals('C', m2.getCharacter().charValue());
assertEquals(8, m2.getaByte());
assertEquals(9, m2.getaByte2().byteValue());
assertEquals(8888, m2.getaLong());
assertEquals(9999, m2.getaLong2().longValue());
assertEquals("8.8", "" + m2.getaFloat());
assertEquals("9.9", "" + m2.getaFloat2());
assertEquals("88.88", "" + m2.getaDouble());
assertEquals("99.99", "" + m2.getaDouble2());
}
public static | JSONFieldDefaultValueTest |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/report/identifier/BaseMetadataIdentifier.java | {
"start": 863,
"end": 979
} | interface ____ {
String getUniqueKey(KeyTypeEnum keyType);
String getIdentifierKey();
}
| BaseMetadataIdentifier |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/TrtPanel.java | {
"start": 319,
"end": 967
} | class ____ implements Serializable {
@Id
private Long id;
@ManyToOne(fetch = FetchType.LAZY)
private Panel panel;
private Long clientId;
private String deltaStamp;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Panel getPanel() {
return panel;
}
public void setPanel(Panel panel) {
this.panel = panel;
}
public Long getClientId() {
return clientId;
}
public void setClientId(Long clientId) {
this.clientId = clientId;
}
public String getDeltaStamp() {
return deltaStamp;
}
public void setDeltaStamp(String deltaStamp) {
this.deltaStamp = deltaStamp;
}
}
| TrtPanel |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/XAttrNameParam.java | {
"start": 894,
"end": 1408
} | class ____ extends StringParam {
/** Parameter name. **/
public static final String NAME = "xattr.name";
/** Default parameter value. **/
public static final String DEFAULT = "";
private static Domain DOMAIN = new Domain(NAME, Pattern.compile(".*"));
public XAttrNameParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
}
@Override
public String getName() {
return NAME;
}
public String getXAttrName() {
return getValue();
}
}
| XAttrNameParam |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XmlVerifierEndpointBuilderFactory.java | {
"start": 10216,
"end": 16628
} | class ____ the value depends on the type of the output node search.
* The output node search is forwarded to XmlSignature2Message.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Group: producer
*
* @param outputNodeSearch the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder outputNodeSearch(String outputNodeSearch) {
doSetProperty("outputNodeSearch", outputNodeSearch);
return this;
}
/**
* Determines the search type for determining the output node which is
* serialized into the output message bodyF. See
* setOutputNodeSearch(Object). The supported default search types you
* can find in DefaultXmlSignature2Message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: Default
* Group: producer
*
* @param outputNodeSearchType the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder outputNodeSearchType(String outputNodeSearchType) {
doSetProperty("outputNodeSearchType", outputNodeSearchType);
return this;
}
/**
* The character encoding of the resulting signed XML document. If null
* then the encoding of the original XML document is used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param outputXmlEncoding the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder outputXmlEncoding(String outputXmlEncoding) {
doSetProperty("outputXmlEncoding", outputXmlEncoding);
return this;
}
/**
* Indicator whether the XML signature elements (elements with local
* name Signature and namespace http://www.w3.org/2000/09/xmldsig#)
* shall be removed from the document set to the output message.
* Normally, this is only necessary, if the XML signature is enveloped.
* The default value is Boolean#FALSE. This parameter is forwarded to
* XmlSignature2Message. This indicator has no effect if the output node
* search is of type
* DefaultXmlSignature2Message#OUTPUT_NODE_SEARCH_TYPE_DEFAULT.F.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: producer
*
* @param removeSignatureElements the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder removeSignatureElements(Boolean removeSignatureElements) {
doSetProperty("removeSignatureElements", removeSignatureElements);
return this;
}
/**
* Indicator whether the XML signature elements (elements with local
* name Signature and namespace http://www.w3.org/2000/09/xmldsig#)
* shall be removed from the document set to the output message.
* Normally, this is only necessary, if the XML signature is enveloped.
* The default value is Boolean#FALSE. This parameter is forwarded to
* XmlSignature2Message. This indicator has no effect if the output node
* search is of type
* DefaultXmlSignature2Message#OUTPUT_NODE_SEARCH_TYPE_DEFAULT.F.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param removeSignatureElements the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder removeSignatureElements(String removeSignatureElements) {
doSetProperty("removeSignatureElements", removeSignatureElements);
return this;
}
/**
* Classpath to the XML Schema. Must be specified in the detached XML
* Signature case for determining the ID attributes, might be set in the
* enveloped and enveloping case. If set, then the XML document is
* validated with the specified XML schema. The schema resource URI can
* be overwritten by the header
* XmlSignatureConstants#HEADER_SCHEMA_RESOURCE_URI.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param schemaResourceUri the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder schemaResourceUri(String schemaResourceUri) {
doSetProperty("schemaResourceUri", schemaResourceUri);
return this;
}
/**
* Enables secure validation. If true then secure validation is enabled.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param secureValidation the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder secureValidation(Boolean secureValidation) {
doSetProperty("secureValidation", secureValidation);
return this;
}
/**
* Enables secure validation. If true then secure validation is enabled.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param secureValidation the value to set
* @return the dsl builder
*/
default XmlVerifierEndpointBuilder secureValidation(String secureValidation) {
doSetProperty("secureValidation", secureValidation);
return this;
}
/**
* Handles the different validation failed situations. The default
* implementation throws specific exceptions for the different
* situations (All exceptions have the package name
* org.apache.camel.component.xmlsecurity.api and are a sub- | of |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/AnnotationBasedProcessorFactory.java | {
"start": 1144,
"end": 1829
} | interface ____ {
/**
* Service factory key.
*/
String FACTORY = "annotation-processor-factory";
/**
* Creates dynamic router processor from the configured annotation.
*/
AsyncProcessor createDynamicRouter(CamelContext camelContext, DynamicRouter annotation);
/**
* Creates recipient list processor from the configured annotation.
*/
AsyncProcessor createRecipientList(CamelContext camelContext, RecipientList annotation);
/**
* Creates routing slip processor from the configured annotation.
*/
AsyncProcessor createRoutingSlip(CamelContext camelContext, RoutingSlip annotation);
}
| AnnotationBasedProcessorFactory |
java | netty__netty | common/src/test/java/io/netty/util/internal/TypeParameterMatcherTest.java | {
"start": 2908,
"end": 2995
} | class ____<A, B, C> {
A a;
B b;
C c;
}
public static | TypeX |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/DataClassRowMapper.java | {
"start": 3187,
"end": 5280
} | class ____ each row should be mapped to
*/
public DataClassRowMapper(Class<T> mappedClass) {
this(mappedClass, DefaultConversionService.getSharedInstance());
}
public DataClassRowMapper(Class<T> mappedClass, ConversionService conversionService) {
super(mappedClass, conversionService);
this.mappedConstructor = BeanUtils.getResolvableConstructor(mappedClass);
int paramCount = this.mappedConstructor.getParameterCount();
this.constructorParameterNames = (paramCount > 0 ?
BeanUtils.getParameterNames(this.mappedConstructor) : new String[0]);
for (String name : this.constructorParameterNames) {
suppressProperty(name);
}
this.constructorParameterTypes = new TypeDescriptor[paramCount];
for (int i = 0; i < paramCount; i++) {
this.constructorParameterTypes[i] = new TypeDescriptor(new MethodParameter(this.mappedConstructor, i));
}
}
@Override
protected T constructMappedInstance(Readable readable, List<? extends ReadableMetadata> itemMetadatas, TypeConverter tc) {
@Nullable Object[] args = new Object[this.constructorParameterNames.length];
for (int i = 0; i < args.length; i++) {
String name = this.constructorParameterNames[i];
int index = findIndex(itemMetadatas, lowerCaseName(name));
if (index == -1) {
index = findIndex(itemMetadatas, underscoreName(name));
}
if (index == -1) {
throw new DataRetrievalFailureException(
"Unable to map constructor parameter '" + name + "' to a column or out-parameter");
}
TypeDescriptor td = this.constructorParameterTypes[i];
Object value = getItemValue(readable, index, td.getType());
args[i] = tc.convertIfNecessary(value, td.getType(), td);
}
return BeanUtils.instantiateClass(this.mappedConstructor, args);
}
private int findIndex(List<? extends ReadableMetadata> itemMetadatas, String name) {
int index = 0;
for (ReadableMetadata itemMetadata : itemMetadatas) {
// we use equalsIgnoreCase, similar to RowMetadata#contains(String)
if (itemMetadata.getName().equalsIgnoreCase(name)) {
return index;
}
index++;
}
return -1;
}
}
| that |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GridFsEndpointBuilderFactory.java | {
"start": 11548,
"end": 17365
} | interface ____
extends
EndpointConsumerBuilder {
default GridFsEndpointConsumerBuilder basic() {
return (GridFsEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedGridFsEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the MongoDB GridFS component.
*/
public | AdvancedGridFsEndpointConsumerBuilder |
java | apache__camel | core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultOptimisedComponentResolver.java | {
"start": 1082,
"end": 1601
} | class ____ implements OptimisedComponentResolver {
private final CamelContext camelContext;
public DefaultOptimisedComponentResolver(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public Component resolveComponent(String uri) {
String scheme = ExchangeHelper.resolveScheme(uri);
if (scheme != null) {
return camelContext.getComponent(scheme);
} else {
return null;
}
}
}
| DefaultOptimisedComponentResolver |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java | {
"start": 23571,
"end": 25511
} | interface ____; the parameter indexing
// applies to this range
final int baseParametersLen = sam.getParameterCount();
final Type output;
if (lambdaOutputTypeArgumentIndices.length > 0) {
output =
TypeExtractionUtils.extractTypeFromLambda(
baseClass,
exec,
lambdaOutputTypeArgumentIndices,
paramLen,
baseParametersLen);
} else {
output = exec.getReturnType();
TypeExtractionUtils.validateLambdaType(baseClass, output);
}
return new TypeExtractor().privateCreateTypeInfo(output, inType, null);
} else {
if (inType != null) {
validateInputType(
baseClass, function.getClass(), inputTypeArgumentIndex, inType);
}
return new TypeExtractor()
.privateCreateTypeInfo(
baseClass,
function.getClass(),
outputTypeArgumentIndex,
inType,
null);
}
} catch (InvalidTypesException e) {
if (allowMissing) {
return (TypeInformation<OUT>)
new MissingTypeInfo(
functionName != null ? functionName : function.toString(), e);
} else {
throw e;
}
}
}
/**
* Returns the binary operator's return type.
*
* <p>This method can extract a type in 4 different ways:
*
* <p>1. By using the generics of the base | has |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/authentication/ui/DefaultOneTimeTokenSubmitPageGeneratingFilterTests.java | {
"start": 1298,
"end": 5293
} | class ____ {
DefaultOneTimeTokenSubmitPageGeneratingFilter filter = new DefaultOneTimeTokenSubmitPageGeneratingFilter();
MockHttpServletRequest request;
MockHttpServletResponse response = new MockHttpServletResponse();
MockFilterChain filterChain = new MockFilterChain();
@BeforeEach
void setup() {
this.request = get("/login/ott").build();
}
@Test
void filterWhenTokenQueryParamThenShouldIncludeJavascriptToAutoSubmitFormAndInputHasTokenValue() throws Exception {
this.request.setParameter("token", "1234");
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
String response = this.response.getContentAsString();
assertThat(response).contains(
"<input type=\"text\" id=\"token\" name=\"token\" value=\"1234\" placeholder=\"Token\" required=\"true\" autofocus=\"autofocus\"/>");
}
@Test
void setRequestMatcherWhenNullThenException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.filter.setRequestMatcher(null));
}
@Test
void setLoginProcessingUrlWhenNullOrEmptyThenException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.filter.setLoginProcessingUrl(null));
assertThatIllegalArgumentException().isThrownBy(() -> this.filter.setLoginProcessingUrl(""));
}
@Test
void setLoginProcessingUrlThenUseItForFormAction() throws Exception {
this.filter.setLoginProcessingUrl("/login/another");
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
String response = this.response.getContentAsString();
assertThat(response).contains("<form class=\"login-form\" action=\"/login/another\" method=\"post\">");
}
@Test
void setContextThenGenerates() throws Exception {
MockHttpServletRequest request = get().requestUri("/context", "/login/ott", null).build();
this.filter.setLoginProcessingUrl("/login/another");
this.filter.doFilterInternal(request, this.response, this.filterChain);
String response = this.response.getContentAsString();
assertThat(response).contains("<form class=\"login-form\" action=\"/context/login/another\" method=\"post\">");
}
@Test
void filterWhenTokenQueryParamUsesSpecialCharactersThenValueIsEscaped() throws Exception {
this.request.setParameter("token", "this<>!@#\"");
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
String response = this.response.getContentAsString();
assertThat(response).contains(
"<input type=\"text\" id=\"token\" name=\"token\" value=\"this<>!@#"\" placeholder=\"Token\" required=\"true\" autofocus=\"autofocus\"/>");
}
@Test
void filterThenRenders() throws Exception {
this.request.setParameter("token", "this<>!@#\"");
this.filter.setLoginProcessingUrl("/login/another");
this.filter.setResolveHiddenInputs((r) -> Map.of("_csrf", "csrf-token-value"));
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
String response = this.response.getContentAsString();
assertThat(response).isEqualTo(
"""
<!DOCTYPE html>
<html lang="en">
<head>
<title>One-Time Token Login</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"/>
<link href="/default-ui.css" rel="stylesheet" />
</head>
<body>
<div class="container">
<form class="login-form" action="/login/another" method="post">
<h2>Please input the token</h2>
<p>
<label for="token" class="screenreader">Token</label>
<input type="text" id="token" name="token" value="this<>!@#"" placeholder="Token" required="true" autofocus="autofocus"/>
</p>
<button class="primary" type="submit">Sign in</button>
<input name="_csrf" type="hidden" value="csrf-token-value" />
</form>
</div>
</body>
</html>
""");
}
}
| DefaultOneTimeTokenSubmitPageGeneratingFilterTests |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java | {
"start": 144,
"end": 362
} | interface ____ a complete generic visitor for a parse tree produced
* by {@link PainlessParser}.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
| defines |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java | {
"start": 1545,
"end": 22739
} | class ____ extends AbstractScalarFunctionTestCase {
public MvPercentileTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> cases = new ArrayList<>();
var fieldSuppliers = Stream.of(
MultivalueTestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true),
MultivalueTestCaseSupplier.longCases(Long.MIN_VALUE, Long.MAX_VALUE, true),
MultivalueTestCaseSupplier.doubleCases(-Double.MAX_VALUE, Double.MAX_VALUE, true)
).flatMap(List::stream).toList();
var percentileSuppliers = Stream.of(
TestCaseSupplier.intCases(0, 100, true),
TestCaseSupplier.longCases(0, 100, true),
TestCaseSupplier.doubleCases(0, 100, true)
).flatMap(List::stream).toList();
for (var fieldSupplier : fieldSuppliers) {
for (var percentileSupplier : percentileSuppliers) {
cases.add(makeSupplier(fieldSupplier, percentileSupplier));
}
}
for (var percentileType : List.of(INTEGER, LONG, DOUBLE)) {
cases.addAll(
List.of(
// Doubles
new TestCaseSupplier(
"median double",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10., 5., 10.), DOUBLE, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
equalTo(5.)
)
),
new TestCaseSupplier(
"single value double",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(55.), DOUBLE, "field"),
percentileWithType(randomIntBetween(0, 100), percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
equalTo(55.)
)
),
new TestCaseSupplier(
"p0 double",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10., 5., 10.), DOUBLE, "field"),
percentileWithType(0, percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
equalTo(-10.)
)
),
new TestCaseSupplier(
"p100 double",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10., 5., 10.), DOUBLE, "field"),
percentileWithType(100, percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
equalTo(10.)
)
),
new TestCaseSupplier(
"averaged double",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10., 5., 10.), DOUBLE, "field"),
percentileWithType(75, percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
equalTo(7.5)
)
),
new TestCaseSupplier(
"big double difference",
List.of(DOUBLE, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-Double.MAX_VALUE, Double.MAX_VALUE), DOUBLE, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(DOUBLE, percentileType),
DOUBLE,
closeTo(0, 0.0000001)
)
),
// Int
new TestCaseSupplier(
"median int",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10, 5, 10), INTEGER, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(5)
)
),
new TestCaseSupplier(
"single value int",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(55), INTEGER, "field"),
percentileWithType(randomIntBetween(0, 100), percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(55)
)
),
new TestCaseSupplier(
"p0 int",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10, 5, 10), INTEGER, "field"),
percentileWithType(0, percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(-10)
)
),
new TestCaseSupplier(
"p100 int",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10, 5, 10), INTEGER, "field"),
percentileWithType(100, percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(10)
)
),
new TestCaseSupplier(
"averaged int",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10, 5, 10), INTEGER, "field"),
percentileWithType(75, percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(7)
)
),
new TestCaseSupplier(
"big int difference",
List.of(INTEGER, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(Integer.MIN_VALUE, Integer.MAX_VALUE), INTEGER, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(INTEGER, percentileType),
INTEGER,
equalTo(-1) // Negative max is 1 smaller than positive max
)
),
// Long
new TestCaseSupplier(
"median long",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10L, 5L, 10L), LONG, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(5L)
)
),
new TestCaseSupplier(
"single value long",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(55L), LONG, "field"),
percentileWithType(randomIntBetween(0, 100), percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(55L)
)
),
new TestCaseSupplier(
"p0 long",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10L, 5L, 10L), LONG, "field"),
percentileWithType(0, percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(-10L)
)
),
new TestCaseSupplier(
"p100 long",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10L, 5L, 10L), LONG, "field"),
percentileWithType(100, percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(10L)
)
),
new TestCaseSupplier(
"averaged long",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-10L, 5L, 10L), LONG, "field"),
percentileWithType(75, percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(7L)
)
),
new TestCaseSupplier(
"big long difference",
List.of(LONG, percentileType),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(Long.MIN_VALUE, Long.MAX_VALUE), LONG, "field"),
percentileWithType(50, percentileType)
),
evaluatorString(LONG, percentileType),
LONG,
equalTo(0L)
)
)
)
);
for (var fieldType : List.of(INTEGER, LONG, DataType.DOUBLE)) {
cases.add(
new TestCaseSupplier(
"out of bounds percentile <" + fieldType + ", " + percentileType + ">",
List.of(fieldType, percentileType),
() -> {
var percentile = numberWithType(
randomBoolean() ? randomIntBetween(Integer.MIN_VALUE, -1) : randomIntBetween(101, Integer.MAX_VALUE),
percentileType
);
return new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(numberWithType(0, fieldType), fieldType, "field"),
new TestCaseSupplier.TypedData(percentile, percentileType, "percentile")
),
evaluatorString(fieldType, percentileType),
fieldType,
nullValue()
).withWarning(
"Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded."
)
.withWarning(
"Line 1:1: java.lang.IllegalArgumentException: Percentile parameter must be "
+ "a number between 0 and 100, found ["
+ percentile.doubleValue()
+ "]"
);
}
)
);
}
}
cases.add(
new TestCaseSupplier(
"from example",
List.of(DOUBLE, INTEGER),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(List.of(-3.34, -6.33, 6.23, -0.31), DOUBLE, "field"),
new TestCaseSupplier.TypedData(75, INTEGER, "percentile")
),
evaluatorString(DOUBLE, INTEGER),
DOUBLE,
equalTo(1.325)
)
)
);
return parameterSuppliersFromTypedDataWithDefaultChecks(
(nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL && nullPosition == 0
? DataType.NULL
: original.expectedType(),
(nullPosition, nullData, original) -> original,
cases
);
}
@SuppressWarnings("unchecked")
private static TestCaseSupplier makeSupplier(
TestCaseSupplier.TypedDataSupplier fieldSupplier,
TestCaseSupplier.TypedDataSupplier percentileSupplier
) {
return new TestCaseSupplier(
"field: " + fieldSupplier.name() + ", percentile: " + percentileSupplier.name(),
List.of(fieldSupplier.type(), percentileSupplier.type()),
() -> {
var fieldTypedData = fieldSupplier.get();
var percentileTypedData = percentileSupplier.get();
var values = (List<Number>) fieldTypedData.data();
var percentile = ((Number) percentileTypedData.data()).doubleValue();
var expectedMatcher = makePercentileMatcher(values, percentile);
return new TestCaseSupplier.TestCase(
List.of(fieldTypedData, percentileTypedData),
evaluatorString(fieldSupplier.type(), percentileSupplier.type()),
fieldSupplier.type(),
expectedMatcher
);
}
);
}
private static Matcher<?> makePercentileMatcher(List<Number> rawValues, double percentile) {
if (rawValues.isEmpty() || percentile < 0 || percentile > 100) {
return nullValue();
}
if (rawValues.size() == 1) {
return equalTo(rawValues.get(0));
}
int valueCount = rawValues.size();
var p = percentile / 100.0;
var index = p * (valueCount - 1);
var lowerIndex = (int) index;
var upperIndex = lowerIndex + 1;
var fraction = index - lowerIndex;
if (rawValues.get(0) instanceof Integer) {
var values = rawValues.stream().mapToInt(Number::intValue).sorted().toArray();
int expected;
if (percentile == 0) {
expected = values[0];
} else if (percentile == 100) {
expected = values[valueCount - 1];
} else {
assert lowerIndex >= 0 && upperIndex < valueCount;
var difference = (long) values[upperIndex] - values[lowerIndex];
expected = values[lowerIndex] + (int) (fraction * difference);
}
return equalTo(expected);
}
if (rawValues.get(0) instanceof Long) {
var values = rawValues.stream().mapToLong(Number::longValue).sorted().toArray();
long expected;
if (percentile == 0) {
expected = values[0];
} else if (percentile == 100) {
expected = values[valueCount - 1];
} else {
assert lowerIndex >= 0 && upperIndex < valueCount;
expected = calculatePercentile(fraction, BigDecimal.valueOf(values[lowerIndex]), BigDecimal.valueOf(values[upperIndex]))
.longValue();
}
// Double*bigLong may lose precision, we allow a small range
return anyOf(equalTo(Math.min(expected, expected - 1)), equalTo(expected), equalTo(Math.max(expected, expected + 1)));
}
if (rawValues.get(0) instanceof Double) {
var values = rawValues.stream().mapToDouble(Number::doubleValue).sorted().toArray();
double expected;
if (percentile == 0) {
expected = values[0];
} else if (percentile == 100) {
expected = values[valueCount - 1];
} else {
assert lowerIndex >= 0 && upperIndex < valueCount;
expected = calculatePercentile(fraction, new BigDecimal(values[lowerIndex]), new BigDecimal(values[upperIndex]))
.doubleValue();
}
return closeTo(expected, Math.abs(expected * 0.0000001));
}
throw new IllegalArgumentException("Unsupported type: " + rawValues.get(0).getClass());
}
private static BigDecimal calculatePercentile(double fraction, BigDecimal lowerValue, BigDecimal upperValue) {
var difference = upperValue.subtract(lowerValue);
return lowerValue.add(new BigDecimal(fraction).multiply(difference));
}
private static TestCaseSupplier.TypedData percentileWithType(Number value, DataType type) {
return new TestCaseSupplier.TypedData(numberWithType(value, type), type, "percentile");
}
private static Number numberWithType(Number value, DataType type) {
return switch (type) {
case INTEGER -> value.intValue();
case LONG -> value.longValue();
default -> value.doubleValue();
};
}
private static String evaluatorString(DataType fieldDataType, DataType percentileDataType) {
var fieldTypeName = StringUtils.underscoreToLowerCamelCase(fieldDataType.name());
fieldTypeName = fieldTypeName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldTypeName.substring(1);
var percentileEvaluator = TestCaseSupplier.castToDoubleEvaluator("Attribute[channel=1]", percentileDataType);
return "MvPercentile" + fieldTypeName + "Evaluator[values=Attribute[channel=0], percentile=" + percentileEvaluator + "]";
}
@Override
protected final Expression build(Source source, List<Expression> args) {
return new MvPercentile(source, args.get(0), args.get(1));
}
}
| MvPercentileTests |
java | elastic__elasticsearch | plugins/examples/custom-processor/src/main/java/org/elasticsearch/example/customprocessor/ExampleProcessorPlugin.java | {
"start": 683,
"end": 957
} | class ____ extends Plugin implements IngestPlugin {
@Override
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
return Map.of(ExampleRepeatProcessor.TYPE, new ExampleRepeatProcessor.Factory());
}
}
| ExampleProcessorPlugin |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinRightSide.java | {
"start": 1292,
"end": 2228
} | class ____<K, VLeft, VRight, VOut> extends KStreamKStreamJoin<K, VLeft, VRight, VOut, VRight, VLeft> {
KStreamKStreamJoinRightSide(final JoinWindowsInternal windows,
final ValueJoinerWithKey<? super K, ? super VRight, ? super VLeft, ? extends VOut> joiner,
final boolean outer,
final TimeTrackerSupplier sharedTimeTrackerSupplier,
final StoreFactory otherWindowStoreFactory,
final Optional<StoreFactory> outerJoinWindowStoreFactory) {
super(windows, joiner, outer, windows.afterMs, windows.beforeMs, sharedTimeTrackerSupplier,
otherWindowStoreFactory, outerJoinWindowStoreFactory);
}
@Override
public Processor<K, VRight, K, VOut> get() {
return new KStreamKStreamRightJoinProcessor();
}
private | KStreamKStreamJoinRightSide |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/AutowiredPropertyMarker.java | {
"start": 771,
"end": 1424
} | class ____ an individually autowired property value, to be added
* to {@link BeanDefinition#getPropertyValues()} for a specific bean property.
*
* <p>At runtime, this will be replaced with a {@link DependencyDescriptor}
* for the corresponding bean property's write method, eventually to be resolved
* through a {@link AutowireCapableBeanFactory#resolveDependency} step.
*
* @author Juergen Hoeller
* @since 5.2
* @see AutowireCapableBeanFactory#resolveDependency
* @see BeanDefinition#getPropertyValues()
* @see org.springframework.beans.factory.support.BeanDefinitionBuilder#addAutowiredProperty
*/
@SuppressWarnings("serial")
public final | for |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/SetBodyDefinition.java | {
"start": 1294,
"end": 2229
} | class ____ extends ExpressionNode {
public SetBodyDefinition() {
}
protected SetBodyDefinition(SetBodyDefinition source) {
super(source);
}
public SetBodyDefinition(Expression expression) {
super(expression);
}
@Override
public SetBodyDefinition copyDefinition() {
return new SetBodyDefinition(this);
}
@Override
public String toString() {
return "SetBody[" + getExpression() + "]";
}
@Override
public String getShortName() {
return "setBody";
}
@Override
public String getLabel() {
return "setBody[" + getExpression() + "]";
}
/**
* Expression that returns the new body to use
*/
@Override
public void setExpression(ExpressionDefinition expression) {
// override to include javadoc what the expression is used for
super.setExpression(expression);
}
}
| SetBodyDefinition |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/internal/pool/Executor.java | {
"start": 538,
"end": 594
} | interface ____<S> {
/**
* The action.
*/
| Executor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/joinedsubclass/RootEntity.java | {
"start": 605,
"end": 706
} | class ____ {
@Id
@GeneratedValue
private Long id;
public Long getId() {
return id;
}
}
| RootEntity |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.