language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-test/camel-test-junit5/src/test/java/org/apache/camel/test/junit5/patterns/IsAutoStartupExcludePatternsTest.java | {
"start": 1047,
"end": 2097
} | class ____ extends CamelTestSupport {
@Override
public String isAutoStartupExcludePatterns() {
return "myRoute,timer*";
}
@Test
public void testRouteNotAutoStartedThenStarted() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(0);
mock.setAssertPeriod(50);
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
mock.reset();
mock.expectedMessageCount(1);
context.getRouteController().startRoute("myRoute");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("timer:tick?delay=1").to("mock:result");
from("direct:start").to("seda:foo");
from("seda:foo").id("myRoute").to("mock:result");
}
};
}
}
| IsAutoStartupExcludePatternsTest |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractSoftAssertions.java | {
"start": 5143,
"end": 5517
} | class ____ expected to be thrown but had
* not been.
* @since 2.6.0 / 3.6.0
*
* @see #shouldHaveThrown(Class)
*/
@Contract("_ -> fail")
public void failBecauseExceptionWasNotThrown(Class<? extends Throwable> throwableClass) {
shouldHaveThrown(throwableClass);
}
/**
* Fails with a message explaining that a {@link Throwable} of given | was |
java | google__guava | android/guava/src/com/google/common/graph/Graphs.java | {
"start": 15240,
"end": 16683
} | class ____<N, V> extends ForwardingValueGraph<N, V> {
private final ValueGraph<N, V> graph;
TransposedValueGraph(ValueGraph<N, V> graph) {
this.graph = graph;
}
@Override
ValueGraph<N, V> delegate() {
return graph;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
@Override
public @Nullable V edgeValueOrDefault(N nodeU, N nodeV, @Nullable V defaultValue) {
return delegate().edgeValueOrDefault(nodeV, nodeU, defaultValue); // transpose
}
@Override
public @Nullable V edgeValueOrDefault(EndpointPair<N> endpoints, @Nullable V defaultValue) {
return delegate().edgeValueOrDefault(transpose(endpoints), defaultValue);
}
}
private static final | TransposedValueGraph |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java | {
"start": 1257,
"end": 3841
} | class ____<T,K,V> extends SubjectInheritingThread {
private static final Logger LOG = LoggerFactory.getLogger(MergeThread.class);
private AtomicInteger numPending = new AtomicInteger(0);
private LinkedList<List<T>> pendingToBeMerged;
protected final MergeManagerImpl<K,V> manager;
private final ExceptionReporter reporter;
private boolean closed = false;
private final int mergeFactor;
public MergeThread(MergeManagerImpl<K,V> manager, int mergeFactor,
ExceptionReporter reporter) {
this.pendingToBeMerged = new LinkedList<List<T>>();
this.manager = manager;
this.mergeFactor = mergeFactor;
this.reporter = reporter;
}
public synchronized void close() throws InterruptedException {
closed = true;
waitForMerge();
interrupt();
}
public void startMerge(Set<T> inputs) {
if (!closed) {
numPending.incrementAndGet();
List<T> toMergeInputs = new ArrayList<T>();
Iterator<T> iter=inputs.iterator();
for (int ctr = 0; iter.hasNext() && ctr < mergeFactor; ++ctr) {
toMergeInputs.add(iter.next());
iter.remove();
}
LOG.info(getName() + ": Starting merge with " + toMergeInputs.size() +
" segments, while ignoring " + inputs.size() + " segments");
synchronized(pendingToBeMerged) {
pendingToBeMerged.addLast(toMergeInputs);
pendingToBeMerged.notifyAll();
}
}
}
public synchronized void waitForMerge() throws InterruptedException {
while (numPending.get() > 0) {
wait();
}
}
public void work() {
while (true) {
List<T> inputs = null;
try {
// Wait for notification to start the merge...
synchronized (pendingToBeMerged) {
while(pendingToBeMerged.size() <= 0) {
pendingToBeMerged.wait();
}
// Pickup the inputs to merge.
inputs = pendingToBeMerged.removeFirst();
}
// Merge
merge(inputs);
} catch (InterruptedException ie) {
numPending.set(0);
return;
} catch(Throwable t) {
numPending.set(0);
reporter.reportException(t);
return;
} finally {
synchronized (this) {
numPending.decrementAndGet();
notifyAll();
}
}
}
}
public abstract void merge(List<T> inputs) throws IOException;
@VisibleForTesting
int getMergeFactor() {
return mergeFactor;
}
@VisibleForTesting
LinkedList<List<T>> getPendingToBeMerged() {
return pendingToBeMerged;
}
}
| MergeThread |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java | {
"start": 841,
"end": 4892
} | class ____ extends AbstractWireSerializingTestCase<AzureOpenAiEmbeddingsTaskSettings> {
public static AzureOpenAiEmbeddingsTaskSettings createRandomWithUser() {
return new AzureOpenAiEmbeddingsTaskSettings(randomAlphaOfLength(15));
}
public void testIsEmpty() {
var randomSettings = createRandom();
var stringRep = Strings.toString(randomSettings);
assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}"));
}
/**
* The created settings can have the user set to null.
*/
public static AzureOpenAiEmbeddingsTaskSettings createRandom() {
return new AzureOpenAiEmbeddingsTaskSettings(randomAlphaOfLengthOrNull(15));
}
public void testUpdatedTaskSettings() {
var initialSettings = createRandom();
var newSettings = createRandom();
AzureOpenAiEmbeddingsTaskSettings updatedSettings = (AzureOpenAiEmbeddingsTaskSettings) initialSettings.updatedTaskSettings(
newSettings.user() == null ? Map.of() : Map.of(USER, newSettings.user())
);
if (newSettings.user() == null) {
assertEquals(initialSettings.user(), updatedSettings.user());
} else {
assertEquals(newSettings.user(), updatedSettings.user());
}
}
public void testFromMap_WithUser() {
assertEquals(
new AzureOpenAiEmbeddingsTaskSettings("user"),
AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user")))
);
}
public void testFromMap_UserIsEmptyString() {
var thrownException = expectThrows(
ValidationException.class,
() -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "")))
);
MatcherAssert.assertThat(
thrownException.getMessage(),
is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;"))
);
}
public void testFromMap_MissingUser_DoesNotThrowException() {
var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of()));
assertNull(taskSettings.user());
}
public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() {
var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user")));
var overriddenTaskSettings = AzureOpenAiEmbeddingsTaskSettings.of(
taskSettings,
AzureOpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS
);
MatcherAssert.assertThat(overriddenTaskSettings, is(taskSettings));
}
public void testOverrideWith_UsesOverriddenSettings() {
var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user")));
var requestTaskSettings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user2")));
var overriddenTaskSettings = AzureOpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings);
MatcherAssert.assertThat(overriddenTaskSettings, is(new AzureOpenAiEmbeddingsTaskSettings("user2")));
}
@Override
protected Writeable.Reader<AzureOpenAiEmbeddingsTaskSettings> instanceReader() {
return AzureOpenAiEmbeddingsTaskSettings::new;
}
@Override
protected AzureOpenAiEmbeddingsTaskSettings createTestInstance() {
return createRandomWithUser();
}
@Override
protected AzureOpenAiEmbeddingsTaskSettings mutateInstance(AzureOpenAiEmbeddingsTaskSettings instance) throws IOException {
String user = randomValueOtherThan(instance.user(), () -> randomAlphaOfLengthOrNull(15));
return new AzureOpenAiEmbeddingsTaskSettings(user);
}
public static Map<String, Object> getAzureOpenAiRequestTaskSettingsMap(@Nullable String user) {
var map = new HashMap<String, Object>();
if (user != null) {
map.put(USER, user);
}
return map;
}
}
| AzureOpenAiEmbeddingsTaskSettingsTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/gpu/PerGpuDeviceInformation.java | {
"start": 1736,
"end": 2274
} | class ____ extends
XmlAdapter<String, Float> {
@Override
public String marshal(Float v) throws Exception {
if (v == null) {
return "";
}
return String.valueOf(v);
}
@Override
public Float unmarshal(String v) throws Exception {
if (v == null) {
return -1f;
}
return Float.valueOf(v.split(" ")[0]);
}
}
/**
* Convert formats like "725 MiB" to long.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
static | StrToFloatBeforeSpaceAdapter |
java | elastic__elasticsearch | modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/RunningStatsTests.java | {
"start": 598,
"end": 5287
} | class ____ extends BaseMatrixStatsTestCase {
/** test running stats */
public void testRunningStats() throws Exception {
final MatrixStatsResults results = new MatrixStatsResults(createRunningStats(fieldA, fieldB));
actualStats.assertNearlyEqual(results);
}
/** Test merging stats across observation shards */
public void testMergedStats() throws Exception {
// slice observations into shards
int numShards = randomIntBetween(2, 10);
double obsPerShard = Math.floor(numObs / numShards);
int start = 0;
RunningStats stats = null;
List<Double> fieldAShard, fieldBShard;
for (int s = 0; s < numShards - 1; start = ++s * (int) obsPerShard) {
fieldAShard = fieldA.subList(start, start + (int) obsPerShard);
fieldBShard = fieldB.subList(start, start + (int) obsPerShard);
if (stats == null) {
stats = createRunningStats(fieldAShard, fieldBShard);
} else {
stats.merge(createRunningStats(fieldAShard, fieldBShard));
}
}
stats.merge(createRunningStats(fieldA.subList(start, fieldA.size()), fieldB.subList(start, fieldB.size())));
final MatrixStatsResults results = new MatrixStatsResults(stats);
actualStats.assertNearlyEqual(results);
}
public void testEmptyRunningStatsMissingFieldNames() throws Exception {
final List<RunningStats> runningStats = Arrays.asList(
new RunningStats(new String[] { "b", "a", "c" }, new double[] { 10.0d, 30.0d, 25.0d }), // if moving this item change last two
// assertions
new RunningStats(new String[] { "a", "b", "c", "d" }, new double[] { 1.0d, 2.0d, 3.0d, 4.0d }),
new RunningStats(new String[] { "a", "a", "x", "x" }, new double[] { 17.0d, 28.0d, 32.0d, 44.0d }),
new RunningStats(new String[] { "a", "c" }, new double[] { 100.0d, 200.0d }),
new RunningStats(new String[] { "x", "y", "z" }, new double[] { 11.0d, 35.0d, 20.0d }),
new RunningStats(new String[] { "A", "B", "C" }, new double[] { 11.0d, 35.0d, 20.0d }),
new RunningStats(new String[] { "a" }, new double[] { 15.0d })
);
final RunningStats otherRunningStat = new RunningStats(new String[] { "a", "b", "c" }, new double[] { -12.3, 0.0, 203.56d });
final RunningStats emptyStats = new RunningStats();
assertTrue(otherRunningStat.missingFieldNames(null).isEmpty());
assertTrue(emptyStats.missingFieldNames(otherRunningStat).isEmpty());
assertTrue(otherRunningStat.missingFieldNames(emptyStats).isEmpty());
for (int i = 0; i < runningStats.size(); i++) {
final RunningStats a = runningStats.get(i);
for (int j = 0; j < runningStats.size(); j++) {
final RunningStats b = runningStats.get(j);
assertEquals("Error while merging running stats " + i + " and " + j, i == j, a.missingFieldNames(b).isEmpty());
assertEquals("Error while merging running stats " + i + " and " + j, i == j, b.missingFieldNames(a).isEmpty());
}
assertEquals("Error while merging running stats " + i, i == 0, a.missingFieldNames(otherRunningStat).isEmpty());
assertEquals("Error while merging running stats " + i, i == 0, otherRunningStat.missingFieldNames(a).isEmpty());
}
}
public void testRunningStatsMissingFieldNames() throws Exception {
final RunningStats a = new RunningStats(new String[] { "x", "y", "z" }, new double[] { 11.0d, 35.0d, 20.0d });
final RunningStats b = new RunningStats(new String[] { "x", "a", "c" }, new double[] { 2.0d, 5.0d, 7.0d });
assertEquals(a.missingFieldNames(b), new HashSet<>(Arrays.asList("a", "c", "y", "z")));
assertEquals(b.missingFieldNames(a), new HashSet<>(Arrays.asList("a", "c", "y", "z")));
}
private RunningStats createRunningStats(List<Double> fieldAObs, List<Double> fieldBObs) {
RunningStats stats = new RunningStats();
// create a document with two numeric fields
final String[] fieldNames = new String[2];
fieldNames[0] = fieldAKey;
fieldNames[1] = fieldBKey;
final double[] fieldVals = new double[2];
// running stats computation
for (int n = 0; n < fieldAObs.size(); ++n) {
fieldVals[0] = fieldAObs.get(n);
fieldVals[1] = fieldBObs.get(n);
stats.add(fieldNames, fieldVals);
}
return stats;
}
}
| RunningStatsTests |
java | elastic__elasticsearch | modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java | {
"start": 1449,
"end": 4249
} | class ____ extends AbstractTokenFilterFactory {
private final AnalysisPredicateScript.Factory factory;
private final List<String> filterNames;
ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String name, Settings settings, ScriptService scriptService) {
super(name);
Settings scriptSettings = settings.getAsSettings("script");
Script script = Script.parse(scriptSettings);
if (script.getType() != ScriptType.INLINE) {
throw new IllegalArgumentException("Cannot use stored scripts in tokenfilter [" + name + "]");
}
this.factory = scriptService.compile(script, AnalysisPredicateScript.CONTEXT);
this.filterNames = settings.getAsList("filter");
if (this.filterNames.isEmpty()) {
throw new IllegalArgumentException("Empty list of filters provided to tokenfilter [" + name + "]");
}
}
@Override
public TokenStream create(TokenStream tokenStream) {
throw new UnsupportedOperationException("getChainAwareTokenFilterFactory should be called first");
}
@Override
public TokenFilterFactory getChainAwareTokenFilterFactory(
IndexCreationContext context,
TokenizerFactory tokenizer,
List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters,
Function<String, TokenFilterFactory> allFilters
) {
List<TokenFilterFactory> filters = new ArrayList<>();
List<TokenFilterFactory> existingChain = new ArrayList<>(previousTokenFilters);
for (String filter : filterNames) {
TokenFilterFactory tff = allFilters.apply(filter);
if (tff == null) {
throw new IllegalArgumentException(
"ScriptedConditionTokenFilter [" + name() + "] refers to undefined token filter [" + filter + "]"
);
}
tff = tff.getChainAwareTokenFilterFactory(context, tokenizer, charFilters, existingChain, allFilters);
filters.add(tff);
existingChain.add(tff);
}
return new TokenFilterFactory() {
@Override
public String name() {
return ScriptedConditionTokenFilterFactory.this.name();
}
@Override
public TokenStream create(TokenStream tokenStream) {
Function<TokenStream, TokenStream> filter = in -> {
for (TokenFilterFactory tff : filters) {
in = tff.create(in);
}
return in;
};
return new ScriptedConditionTokenFilter(tokenStream, filter, factory.newInstance());
}
};
}
private static | ScriptedConditionTokenFilterFactory |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/json/jackson/HybridJacksonPool.java | {
"start": 3856,
"end": 6123
} | class ____ implements RecyclerPool<BufferRecycler> {
private static final int CACHE_LINE_SHIFT = 4;
private static final int CACHE_LINE_PADDING = 1 << CACHE_LINE_SHIFT;
private final XorShiftThreadProbe threadProbe;
private final AtomicReferenceArray<Node> topStacks;
private final int stripesCount;
public StripedLockFreePool(int stripesCount) {
if (stripesCount <= 0) {
throw new IllegalArgumentException("Expecting a stripesCount that is larger than 0");
}
this.stripesCount = stripesCount;
int size = roundToPowerOfTwo(stripesCount);
this.topStacks = new AtomicReferenceArray<>(size * CACHE_LINE_PADDING);
int mask = (size - 1) << CACHE_LINE_SHIFT;
this.threadProbe = new XorShiftThreadProbe(mask);
}
public int size() {
return stackSizes().sum();
}
public int[] stackStats() {
return stackSizes().toArray();
}
private IntStream stackSizes() {
return IntStream.range(0, stripesCount).map(i -> {
Node node = topStacks.get(i * CACHE_LINE_PADDING);
return node == null ? 0 : node.level;
});
}
@Override
public BufferRecycler acquirePooled() {
int index = threadProbe.index();
Node currentHead = topStacks.get(index);
while (true) {
if (currentHead == null) {
return new VThreadBufferRecycler(index);
}
if (topStacks.compareAndSet(index, currentHead, currentHead.next)) {
currentHead.next = null;
return currentHead.value;
} else {
currentHead = topStacks.get(index);
}
}
}
@Override
public void releasePooled(BufferRecycler recycler) {
VThreadBufferRecycler vThreadBufferRecycler = (VThreadBufferRecycler) recycler;
Node newHead = new Node(vThreadBufferRecycler);
Node next = topStacks.get(vThreadBufferRecycler.slot);
while (true) {
newHead.level = next == null ? 1 : next.level + 1;
if (topStacks.compareAndSet(vThreadBufferRecycler.slot, next, newHead)) {
newHead.next = next;
return;
} else {
next = topStacks.get(vThreadBufferRecycler.slot);
}
}
}
private static | StripedLockFreePool |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiResponseHandlerTests.java | {
"start": 769,
"end": 3783
} | class ____ extends ESTestCase {
public void testBuildRateLimitErrorMessage() {
int statusCode = 429;
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(statusCode);
var response = mock(HttpResponse.class);
when(response.getStatusLine()).thenReturn(statusLine);
var httpResult = new HttpResult(response, new byte[] {});
{
when(response.getFirstHeader(AzureOpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn(
new BasicHeader(AzureOpenAiResponseHandler.REMAINING_REQUESTS, "2999")
);
when(response.getFirstHeader(AzureOpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(
new BasicHeader(AzureOpenAiResponseHandler.REMAINING_TOKENS, "99800")
);
var error = AzureOpenAiResponseHandler.buildRateLimitErrorMessage(httpResult);
assertThat(error, containsString("Remaining tokens [99800]. Remaining requests [2999]"));
}
{
when(response.getFirstHeader(AzureOpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null);
var error = AzureOpenAiResponseHandler.buildRateLimitErrorMessage(httpResult);
assertThat(error, containsString("Remaining tokens [unknown]. Remaining requests [2999]"));
}
{
when(response.getFirstHeader(AzureOpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn(
new BasicHeader(AzureOpenAiResponseHandler.REMAINING_REQUESTS, "2999")
);
when(response.getFirstHeader(AzureOpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null);
var error = AzureOpenAiResponseHandler.buildRateLimitErrorMessage(httpResult);
assertThat(error, containsString("Remaining tokens [unknown]. Remaining requests [2999]"));
}
}
private static HttpResult createContentTooLargeResult(int statusCode) {
return createResult(
statusCode,
"This model's maximum context length is 8192 tokens, however you requested 13531 tokens (13531 in your prompt;"
+ "0 for the completion). Please reduce your prompt; or completion length."
);
}
private static HttpResult createResult(int statusCode, String message) {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(statusCode);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
String responseJson = Strings.format("""
{
"error": {
"message": "%s",
"type": "content_too_large",
"param": null,
"code": null
}
}
""", message);
return new HttpResult(httpResponse, responseJson.getBytes(StandardCharsets.UTF_8));
}
}
| AzureOpenAiResponseHandlerTests |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/client/AbstractClientBase.java | {
"start": 3130,
"end": 7033
} | class ____ extends ServiceSupport
implements SalesforceSession.SalesforceSessionListener, HttpClientHolder {
protected static final String APPLICATION_JSON_UTF8 = "application/json;charset=utf-8";
protected static final String APPLICATION_XML_UTF8 = "application/xml;charset=utf-8";
private static final int DEFAULT_TERMINATION_TIMEOUT = 10;
protected final Logger log = LoggerFactory.getLogger(getClass());
protected final SalesforceHttpClient httpClient;
protected final SalesforceSession session;
protected final SalesforceLoginConfig loginConfig;
protected final String version;
protected String accessToken;
protected String instanceUrl;
private Phaser inflightRequests;
private final long terminationTimeout;
private final ObjectMapper objectMapper;
public AbstractClientBase(String version, SalesforceSession session, SalesforceHttpClient httpClient,
SalesforceLoginConfig loginConfig) {
this(version, session, httpClient, loginConfig, DEFAULT_TERMINATION_TIMEOUT);
}
AbstractClientBase(String version, SalesforceSession session, SalesforceHttpClient httpClient,
SalesforceLoginConfig loginConfig, int terminationTimeout) {
this.version = version;
this.session = session;
this.httpClient = httpClient;
this.loginConfig = loginConfig;
this.terminationTimeout = terminationTimeout;
this.objectMapper = JsonUtils.createObjectMapper();
}
@Override
protected void doStart() throws Exception {
super.doStart();
// local cache
accessToken = session.getAccessToken();
if (accessToken == null && !loginConfig.isLazyLogin()) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
instanceUrl = session.getInstanceUrl();
// also register this client as a session listener
session.addListener(this);
inflightRequests = new Phaser(1);
}
@Override
public void doStop() throws Exception {
super.doStop();
if (inflightRequests != null) {
inflightRequests.arrive();
if (!inflightRequests.isTerminated()) {
try {
inflightRequests.awaitAdvanceInterruptibly(0, terminationTimeout, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (TimeoutException ignored) {
// exception is ignored
}
}
}
// deregister listener
session.removeListener(this);
}
@Override
public void onLogin(String accessToken, String instanceUrl) {
if (!accessToken.equals(this.accessToken)) {
this.accessToken = accessToken;
this.instanceUrl = instanceUrl;
}
}
@Override
public void onLogout() {
// ignore, if this client makes another request with stale token,
// SalesforceSecurityListener will auto login!
}
protected Request getRequest(HttpMethod method, String url, Map<String, List<String>> headers) {
return getRequest(method.asString(), url, headers);
}
protected Request getRequest(String method, String url, Map<String, List<String>> headers) {
HttpRequest request
= (HttpRequest) httpClient.newHttpRequest(new HttpConversation(), URI.create(url)).method(method)
.timeout(session.getTimeout(), TimeUnit.MILLISECONDS);
request.getConversation().setAttribute(SalesforceSecurityHandler.CLIENT_ATTRIBUTE, this);
addHeadersTo(request, headers);
return request;
}
protected | AbstractClientBase |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/vectors/diskbbq/next/QuantEncodingTests.java | {
"start": 580,
"end": 3828
} | class ____ extends ESTestCase {
public void testSingleBitNibbles() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.ONE_BIT_4BIT_QUERY;
int discretized = encoding.discretizedDimensions(randomIntBetween(1, 1024));
// should discretize to something that can be packed into bytes from bits and nibbles
assertEquals(0, discretized % 2);
assertEquals(0, discretized % 8);
}
public void testSingleBitNibblesPackSize() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.ONE_BIT_4BIT_QUERY;
assertEquals(1, encoding.getDocPackedLength(3));
assertEquals(4, encoding.getQueryPackedLength(3));
assertEquals(1, encoding.getDocPackedLength(8));
assertEquals(4, encoding.getQueryPackedLength(8));
assertEquals(2, encoding.getDocPackedLength(15));
assertEquals(2, encoding.getDocPackedLength(16));
assertEquals(8, encoding.getQueryPackedLength(15));
assertEquals(8, encoding.getQueryPackedLength(16));
}
public void testDibitAndNibbles() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.TWO_BIT_4BIT_QUERY;
int discretized = encoding.discretizedDimensions(randomIntBetween(1, 1024));
// should discretize to something that can be packed into bytes from two bits and nibbles
assertEquals(0, discretized % 2);
assertEquals(0, discretized % 4);
}
public void testDibitAndNibblesPackSize() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.TWO_BIT_4BIT_QUERY;
assertEquals(2, encoding.getDocPackedLength(3));
assertEquals(4, encoding.getQueryPackedLength(3));
assertEquals(2, encoding.getDocPackedLength(8));
assertEquals(4, encoding.getQueryPackedLength(8));
assertEquals(4, encoding.getDocPackedLength(15));
assertEquals(4, encoding.getDocPackedLength(16));
assertEquals(8, encoding.getQueryPackedLength(15));
assertEquals(8, encoding.getQueryPackedLength(16));
}
public void testHalfByteAndNibbles() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.FOUR_BIT_SYMMETRIC;
int discretized = encoding.discretizedDimensions(randomIntBetween(1, 1024));
// should discretize to something that can be packed into bytes from four bits and nibbles
assertEquals(0, discretized % 2);
}
public void testHalfByteAndNibblesPackSize() {
ESNextDiskBBQVectorsFormat.QuantEncoding encoding = ESNextDiskBBQVectorsFormat.QuantEncoding.FOUR_BIT_SYMMETRIC;
assertEquals(4, encoding.getDocPackedLength(3));
assertEquals(4, encoding.getQueryPackedLength(3));
assertEquals(4, encoding.getDocPackedLength(8));
assertEquals(4, encoding.getQueryPackedLength(8));
assertEquals(8, encoding.getDocPackedLength(16));
assertEquals(8, encoding.getDocPackedLength(16));
assertEquals(8, encoding.getQueryPackedLength(16));
assertEquals(8, encoding.getQueryPackedLength(16));
}
}
| QuantEncodingTests |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/generation/multipart/FormDataOutputMapperGenerator.java | {
"start": 11983,
"end": 12337
} | class ____ '"
+ currentClassInHierarchy.name()
+ "' is not part of the Jandex index so its fields will be ignored. If you intended to include these fields, consider making the dependency part of the Jandex index by following the advice at: https://quarkus.io/guides/cdi-reference#bean_discovery");
}
}
}
| of |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ops/OneToManyMappedByCascadeDeleteTest.java | {
"start": 1980,
"end": 2485
} | class ____ {
@Id
private Integer id;
@OneToMany(targetEntity = Child.class, mappedBy = "parent", fetch = FetchType.LAZY)
@Cascade(CascadeType.REMOVE)
private List<Child> children = new ArrayList<>();
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
}
}
@Entity(name = "child")
public static | Parent |
java | elastic__elasticsearch | modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java | {
"start": 10426,
"end": 13572
} | class ____ implements Processor.Factory {
static final Set<Property> DEFAULT_PROPERTIES = EnumSet.allOf(Property.class);
@Override
public AttachmentProcessor create(
Map<String, Processor.Factory> registry,
String processorTag,
String description,
Map<String, Object> config,
ProjectId projectId
) {
String field = readStringProperty(TYPE, processorTag, config, "field");
String resourceName = readOptionalStringProperty(TYPE, processorTag, config, "resource_name");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment");
List<String> propertyNames = readOptionalList(TYPE, processorTag, config, "properties");
int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED);
boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
String indexedCharsField = readOptionalStringProperty(TYPE, processorTag, config, "indexed_chars_field");
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
// Revisit whether we want to update the [remove_binary] default to be 'true' - would need to find a way to do this safely
Boolean removeBinary = readOptionalBooleanProperty(TYPE, processorTag, config, "remove_binary");
if (removeBinary == null) {
DEPRECATION_LOGGER.warn(
DeprecationCategory.PARSING,
"attachment-remove-binary",
"The default [remove_binary] value of 'false' is deprecated and will be "
+ "set to 'true' in a future release. Set [remove_binary] explicitly to "
+ "'true' or 'false' to ensure no behavior change."
);
removeBinary = false;
}
final Set<Property> properties;
if (propertyNames != null) {
properties = EnumSet.noneOf(Property.class);
for (String fieldName : propertyNames) {
try {
properties.add(Property.parse(fieldName));
} catch (Exception e) {
throw newConfigurationException(
TYPE,
processorTag,
"properties",
"illegal field option [" + fieldName + "]. valid values are " + Arrays.toString(Property.values())
);
}
}
} else {
properties = DEFAULT_PROPERTIES;
}
return new AttachmentProcessor(
processorTag,
description,
field,
targetField,
properties,
indexedChars,
ignoreMissing,
indexedCharsField,
resourceName,
removeBinary
);
}
}
| Factory |
java | google__error-prone | test_helpers/src/main/java/com/google/errorprone/DiagnosticTestHelper.java | {
"start": 12430,
"end": 12797
} | class ____ implements Predicate<String> {
private final String pattern;
SimpleStringContains(String pattern) {
this.pattern = pattern;
}
@Override
public boolean test(String input) {
return input.contains(pattern);
}
@Override
public String toString() {
return pattern;
}
}
private static | SimpleStringContains |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java | {
"start": 1011,
"end": 2047
} | class ____ extends HttpServerFunctionalTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestHttpServerWebapps.class);
/**
* Test that the test server is loadable on the classpath
* @throws Throwable if something went wrong
*/
@Test
public void testValidServerResource() throws Throwable {
HttpServer2 server = null;
try {
server = createServer("test");
} finally {
stop(server);
}
}
/**
* Test that an invalid webapp triggers an exception
* @throws Throwable if something went wrong
*/
@Test
public void testMissingServerResource() throws Throwable {
try {
HttpServer2 server = createServer("NoSuchWebapp");
//should not have got here.
//close the server
String serverDescription = server.toString();
stop(server);
fail("Expected an exception, got " + serverDescription);
} catch (FileNotFoundException expected) {
LOG.debug("Expected exception " + expected, expected);
}
}
}
| TestHttpServerWebapps |
java | apache__camel | components/camel-scheduler/src/generated/java/org/apache/camel/component/scheduler/SchedulerEndpointUriFactory.java | {
"start": 519,
"end": 3028
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":name";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(23);
props.add("backoffErrorThreshold");
props.add("backoffIdleThreshold");
props.add("backoffMultiplier");
props.add("bridgeErrorHandler");
props.add("delay");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("greedy");
props.add("includeMetadata");
props.add("initialDelay");
props.add("name");
props.add("pollStrategy");
props.add("poolSize");
props.add("repeatCount");
props.add("runLoggingLevel");
props.add("scheduledExecutorService");
props.add("scheduler");
props.add("schedulerProperties");
props.add("sendEmptyMessageWhenIdle");
props.add("startScheduler");
props.add("synchronous");
props.add("timeUnit");
props.add("useFixedDelay");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
Map<String, String> prefixes = new HashMap<>(1);
prefixes.put("schedulerProperties", "scheduler.");
MULTI_VALUE_PREFIXES = Collections.unmodifiableMap(prefixes);
}
@Override
public boolean isEnabled(String scheme) {
return "scheduler".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "name", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| SchedulerEndpointUriFactory |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/InvariantTypes.java | {
"start": 993,
"end": 1256
} | class ____ not operate on wildcards and instead delegates to
* {@link CovariantTypes}.
*
* TypeVariables are considered a specific unknown type restricted by the upper bound. No inference of type variables is
* performed.
*
* @author Jozef Hartinger
*
*/
| does |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentCreatorImplementation.java | {
"start": 1032,
"end": 1690
} | class ____ {
/** Creates a new {@link ComponentCreatorImplementation}. */
public static ComponentCreatorImplementation create(
XTypeSpec spec, XClassName name, ImmutableMap<ComponentRequirement, XPropertySpec> fields) {
return new AutoValue_ComponentCreatorImplementation(spec, name, fields);
}
/** The type spec for the creator implementation. */
public abstract XTypeSpec spec();
/** The name of the creator implementation class. */
public abstract XClassName name();
/** All fields that are present in this implementation. */
abstract ImmutableMap<ComponentRequirement, XPropertySpec> fields();
}
| ComponentCreatorImplementation |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRedundancyMonitor.java | {
"start": 1603,
"end": 1661
} | class ____ RedundancyMonitor in BlockManager.
*/
public | tests |
java | apache__flink | flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/events/reporter/EventReporterFactory.java | {
"start": 1347,
"end": 1607
} | interface ____ {
/**
* Creates a new event reporter.
*
* @param properties configured properties for the reporter
* @return created metric reporter
*/
EventReporter createEventReporter(Properties properties);
}
| EventReporterFactory |
java | apache__kafka | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/streams/CurrentAssignmentBuilderTest.java | {
"start": 1849,
"end": 44681
} | class ____ {
private static final String SUBTOPOLOGY_ID1 = Uuid.randomUuid().toString();
private static final String SUBTOPOLOGY_ID2 = Uuid.randomUuid().toString();
private static final String PROCESS_ID = "process_id";
private static final String MEMBER_NAME = "member";
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToStable(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member =
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(
mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToStableAtTargetEpoch(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member =
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(
mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToStableWithNewTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(taskRole,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 9, 2, 8)),
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, 9, 4, 8))))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2, 4),
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(taskRole,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 9, 2, 8, 4, memberEpoch + 1)),
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, 9, 4, 8, 7, memberEpoch + 1))))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToUnrevokedTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 4, 5)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2),
mkTasks(SUBTOPOLOGY_ID2, 4)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1),
mkTasks(SUBTOPOLOGY_ID2, 3)))
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToUnrevokedWithEmptyAssignment(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member =
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(
mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, TasksTuple.EMPTY)
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(
mkTasksTupleWithCommonEpoch(
taskRole,
memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToUnreleasedTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2, 4),
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testStableToUnreleasedTasksWithOwnedTasksNotHavingRevokedTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 5)))
.withCurrentActiveTaskProcessId((subtopologyId, __) ->
SUBTOPOLOGY_ID2.equals(subtopologyId) ? PROCESS_ID : null
)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withOwnedAssignment(mkTasksTuple(taskRole))
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnrevokedTasksToStable(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1),
mkTasks(SUBTOPOLOGY_ID2, 4)))
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withOwnedAssignment(mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testRemainsInUnrevokedTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1),
mkTasks(SUBTOPOLOGY_ID2, 4)))
.build();
CurrentAssignmentBuilder currentAssignmentBuilder = new CurrentAssignmentBuilder(
member)
.withTargetAssignment(memberEpoch + 2, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds(
(subtopologyId, partitionId) -> Set.of());
assertEquals(
member,
currentAssignmentBuilder
.withOwnedAssignment(null)
.build()
);
assertEquals(
member,
currentAssignmentBuilder
.withOwnedAssignment(mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.build()
);
assertEquals(
member,
currentAssignmentBuilder
.withOwnedAssignment(mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 4, 5, 6)))
.build()
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnrevokedTasksToUnrevokedTasks(TaskRole taskRole) {
final int memberEpoch = 10;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1),
mkTasks(SUBTOPOLOGY_ID2, 4)))
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 2, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withOwnedAssignment(mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2),
mkTasks(SUBTOPOLOGY_ID2, 5)))
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnrevokedTasksToUnreleasedTasks(TaskRole taskRole) {
final int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch - 1)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1),
mkTasks(SUBTOPOLOGY_ID2, 4)))
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withOwnedAssignment(mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6))
)
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnreleasedTasksToStable(TaskRole taskRole) {
final int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId("process1")
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) ->
Set.of(PROCESS_ID))
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId("process1")
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnreleasedTasksToStableWithNewTasks(TaskRole taskRole) {
int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId("process1")
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId("process1")
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnreleasedTasksToUnreleasedTasks(TaskRole taskRole) {
int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
.withCurrentWarmupTaskProcessIds(
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
.build();
assertEquals(member, updatedMember);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnreleasedTasksToUnreleasedTasksOtherUnreleasedTaskRole(TaskRole taskRole) {
int memberEpoch = 11;
// The unreleased task is owned by a task of a different role on the same process.
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
? Set.of() : Set.of(PROCESS_ID))
.withCurrentWarmupTaskProcessIds(
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
? Set.of(PROCESS_ID) : Set.of())
.build();
assertEquals(member, updatedMember);
}
@Test
public void testUnreleasedTasksToUnreleasedTasksAnyActiveOwner() {
int memberEpoch = 11;
// The unreleased task remains unreleased, because it is owned by any other instance in
// an active role, no matter the process.
// The task that is not unreleased can be assigned.
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(TaskRole.ACTIVE, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember expectedMember = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(TaskRole.ACTIVE, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch, mkTasksTuple(TaskRole.ACTIVE,
mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4),
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) ->
(subtopologyId.equals(SUBTOPOLOGY_ID1) && partitionId == 4) ? "anyOtherProcess"
: null)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(expectedMember, updatedMember);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnreleasedTasksToUnrevokedTasks(TaskRole taskRole) {
int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNRELEASED_TASKS)
.setProcessId("process1")
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 4),
mkTasks(SUBTOPOLOGY_ID2, 7)))
.build();
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId("process1")
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2),
mkTasks(SUBTOPOLOGY_ID2, 5)))
.build(),
updatedMember
);
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUnknownState(TaskRole taskRole) {
int memberEpoch = 11;
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNKNOWN)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setProcessId(PROCESS_ID)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.setTasksPendingRevocation(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 2),
mkTasks(SUBTOPOLOGY_ID2, 5)))
.build();
// When the member is in an unknown state, the member is first to force
// a reset of the client side member state.
assertThrows(FencedMemberEpochException.class, () -> new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.build());
// Then the member rejoins with no owned tasks.
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds(
(subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withOwnedAssignment(mkTasksTuple(taskRole))
.build();
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(taskRole, memberEpoch,
mkTasks(SUBTOPOLOGY_ID1, 3),
mkTasks(SUBTOPOLOGY_ID2, 6)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@Test
public void testAssignmentEpochsShouldBePreservedFromPreviousAssignment() {
final int memberEpoch = 10;
// Create a member with tasks that have specific epochs in assigned tasks
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(TaskRole.ACTIVE,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 5, 2, 6)),
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, 7, 4, 8))))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
// Same tasks in target assignment should retain their epochs from assigned tasks
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(memberEpoch + 1, mkTasksTuple(TaskRole.ACTIVE,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
.withCurrentStandbyTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
// Verify that epochs are preserved from assigned tasks
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch + 1)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(TaskRole.ACTIVE,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 5, 2, 6)),
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, 7, 4, 8))))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
@Test
public void testNewlyAssignedTasksGetTargetAssignmentEpoch() {
final int memberEpoch = 10;
final int targetAssignmentEpoch = 11;
// Create a member with empty assignments
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
// New tasks are assigned
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(targetAssignmentEpoch, mkTasksTuple(TaskRole.ACTIVE,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.build();
// Verify that all tasks use the target assignment epoch
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(targetAssignmentEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithCommonEpoch(TaskRole.ACTIVE, targetAssignmentEpoch,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
/**
* Tests mixed epoch assignment scenarios.
* - Some epochs from previously assigned tasks (Tasks 1, 2).
* This happens regardless of whether the assigned task is reconciled (owned) by the client (Task 1) or not (Task 2)
* - Some newly assigned task (Task 5) which should get the target assignment epoch.
* - Some tasks are revoked by the member (Task 3, 4). One is immediately reassigned, which also gets
* the target assignment epoch (Task 3).
*/
@Test
public void testMixedPreservedAndNewAssignmentEpochs() {
final int memberEpoch = 10;
final int targetAssignmentEpoch = 11;
// Create a member with:
// - Tasks 1, 2 in assigned with epochs 5, 6
// - Tasks 3, 4 in pending revocation with epochs 7, 8
StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.UNREVOKED_TASKS)
.setProcessId(PROCESS_ID)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(TaskRole.ACTIVE,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 5, 2, 6))))
.setTasksPendingRevocation(mkTasksTupleWithEpochs(TaskRole.ACTIVE,
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, 7, 4, 8))))
.build();
// The member revokes tasks 3, 4 (not in owned), transitions to next epoch
StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withTargetAssignment(targetAssignmentEpoch, mkTasksTuple(TaskRole.ACTIVE,
mkTasks(SUBTOPOLOGY_ID1, 1, 2),
mkTasks(SUBTOPOLOGY_ID2, 3, 5)))
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
.withCurrentStandbyTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
.withOwnedAssignment(mkTasksTuple(TaskRole.ACTIVE,
mkTasks(SUBTOPOLOGY_ID1, 1))) // Only owns task 1 (task 2 is not yet reconciled, tasks 3,4 already revoked)
.build();
// Verify mixed epoch assignment:
// - Task 1 from SUBTOPOLOGY_ID1 should have epoch 5 (previous assignment epoch)
// - Task 3 from SUBTOPOLOGY_ID2 should have epoch 11 (target assignment epoch)
// - Task 5 from SUBTOPOLOGY_ID2 should have epoch 11 (target assignment epoch)
assertEquals(
new StreamsGroupMember.Builder(MEMBER_NAME)
.setState(MemberState.STABLE)
.setProcessId(PROCESS_ID)
.setMemberEpoch(targetAssignmentEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setAssignedTasks(mkTasksTupleWithEpochs(TaskRole.ACTIVE,
mkTasksWithEpochs(SUBTOPOLOGY_ID1, Map.of(1, 5, 2, 6)),
mkTasksWithEpochs(SUBTOPOLOGY_ID2, Map.of(3, targetAssignmentEpoch, 5, targetAssignmentEpoch))))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build(),
updatedMember
);
}
}
| CurrentAssignmentBuilderTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/FlatpackComponentBuilderFactory.java | {
"start": 5383,
"end": 6293
} | class ____
extends AbstractComponentBuilder<FlatpackComponent>
implements FlatpackComponentBuilder {
@Override
protected FlatpackComponent buildConcreteComponent() {
return new FlatpackComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((FlatpackComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((FlatpackComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((FlatpackComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | FlatpackComponentBuilderImpl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java | {
"start": 14435,
"end": 27133
} | class ____ extends AckedClusterStateUpdateTask {
protected boolean found = false;
protected boolean changed = false;
private final ProjectId projectId;
private final PutRepositoryRequest request;
private final RepositoriesService repositoriesService;
RegisterRepositoryTask(
final RepositoriesService repositoriesService,
final ProjectId projectId,
final PutRepositoryRequest request,
final ListenableFuture<AcknowledgedResponse> acknowledgementStep
) {
super(request, acknowledgementStep);
this.repositoriesService = repositoriesService;
this.projectId = projectId;
this.request = request;
}
/**
* Constructor used by {@link org.elasticsearch.action.admin.cluster.repositories.reservedstate.ReservedRepositoryAction}
* @param repositoriesService
* @param request
*/
public RegisterRepositoryTask(
final RepositoriesService repositoriesService,
final ProjectId projectId,
final PutRepositoryRequest request
) {
this(repositoriesService, projectId, request, null);
}
@Override
public ClusterState execute(ClusterState currentState) {
final var projectState = currentState.projectState(projectId);
RepositoriesMetadata repositories = RepositoriesMetadata.get(projectState.metadata());
List<RepositoryMetadata> repositoriesMetadata = new ArrayList<>(repositories.repositories().size() + 1);
for (RepositoryMetadata repositoryMetadata : repositories.repositories()) {
if (repositoryMetadata.name().equals(request.name())) {
rejectInvalidReadonlyFlagChange(repositoryMetadata, request.settings());
final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(
request.name(),
// Copy the UUID from the existing instance rather than resetting it back to MISSING_UUID which would force us to
// re-read the RepositoryData to get it again. In principle the new RepositoryMetadata might point to a different
// underlying repository at this point, but if so that'll cause things to fail in clear ways and eventually (before
// writing anything) we'll read the RepositoryData again and update the UUID in the RepositoryMetadata to match. See
// also #109936.
repositoryMetadata.uuid(),
request.type(),
request.settings()
);
Repository existing = repositoriesService.repositoryOrNull(projectId, request.name());
assert existing != null : "repository [" + newRepositoryMetadata.name() + "] must exist";
assert existing.getMetadata() == repositoryMetadata;
final RepositoryMetadata updatedMetadata;
if (canUpdateInPlace(newRepositoryMetadata, existing)) {
if (repositoryMetadata.settings().equals(newRepositoryMetadata.settings())) {
if (repositoryMetadata.generation() == RepositoryData.CORRUPTED_REPO_GEN) {
// If recreating a corrupted repository with the same settings, reset the corrupt flag.
// Setting the safe generation to unknown, so that a consistent generation is found.
ensureRepositoryNotInUse(projectState, request.name());
logger.info(
"repository [{}/{}] is marked as corrupted, resetting the corruption marker",
repositoryMetadata.name(),
repositoryMetadata.uuid()
);
repositoryMetadata = repositoryMetadata.withGeneration(
RepositoryData.UNKNOWN_REPO_GEN,
repositoryMetadata.pendingGeneration()
);
} else {
// Previous version is the same as this one no update is needed.
return currentState;
}
}
// we're updating in place so the updated metadata must point at the same uuid and generations
updatedMetadata = repositoryMetadata.withSettings(newRepositoryMetadata.settings());
} else {
ensureRepositoryNotInUse(projectState, request.name());
updatedMetadata = newRepositoryMetadata;
}
found = true;
repositoriesMetadata.add(updatedMetadata);
} else {
repositoriesMetadata.add(repositoryMetadata);
}
}
if (found == false) {
repositoriesMetadata.add(new RepositoryMetadata(request.name(), request.type(), request.settings()));
}
repositories = new RepositoriesMetadata(repositoriesMetadata);
changed = true;
return ClusterState.builder(currentState)
.putProjectMetadata(ProjectMetadata.builder(projectState.metadata()).putCustom(RepositoriesMetadata.TYPE, repositories))
.build();
}
}
/**
* Ensures that we can create the repository and that it's creation actually works
* <p>
* This verification method will create and then close the repository we want to create.
*
* @param request
*/
public void validateRepositoryCanBeCreated(final ProjectId projectId, final PutRepositoryRequest request) {
final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(request.name(), request.type(), request.settings());
// Trying to create the new repository on master to make sure it works
closeRepository(createRepository(projectId, newRepositoryMetadata));
}
private void validatePutRepositoryRequest(
final ProjectId projectId,
final PutRepositoryRequest request,
ActionListener<Void> resultListener
) {
final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(request.name(), request.type(), request.settings());
try {
final var repository = createRepository(projectId, newRepositoryMetadata);
if (request.verify()) {
// verify repository on local node only, different from verifyRepository method that runs on other cluster nodes
threadPool.executor(ThreadPool.Names.SNAPSHOT)
.execute(ActionRunnable.run(ActionListener.runBefore(resultListener, () -> closeRepository(repository)), () -> {
final var token = repository.startVerification();
if (token != null) {
repository.verify(token, clusterService.localNode());
repository.endVerification(token);
}
}));
} else {
closeRepository(repository);
resultListener.onResponse(null);
}
} catch (Exception e) {
resultListener.onFailure(e);
}
}
private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) {
submitUnbatchedTask(clusterService, source, task);
}
@SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here
private static void submitUnbatchedTask(
ClusterService clusterService,
@SuppressWarnings("SameParameterValue") String source,
ClusterStateUpdateTask task
) {
clusterService.submitUnbatchedStateUpdateTask(source, task);
}
/**
* Set the repository UUID in the named repository's {@link RepositoryMetadata} to match the UUID in its {@link RepositoryData},
* which may involve a cluster state update.
*
* @param listener notified when the {@link RepositoryMetadata} is updated, possibly on this thread or possibly on the master service
* thread
*/
public static void updateRepositoryUuidInMetadata(
ClusterService clusterService,
final ProjectId projectId,
final String repositoryName,
RepositoryData repositoryData,
ActionListener<Void> listener
) {
final String repositoryUuid = repositoryData.getUuid();
if (repositoryUuid.equals(RepositoryData.MISSING_UUID)) {
listener.onResponse(null);
return;
}
final RepositoryMetadata repositoryMetadata = RepositoriesMetadata.get(clusterService.state().metadata().getProject(projectId))
.repository(repositoryName);
if (repositoryMetadata == null || repositoryMetadata.uuid().equals(repositoryUuid)) {
listener.onResponse(null);
return;
}
logger.info(
Strings.format(
"Registering repository %s with repository UUID [%s] and generation [%d]",
projectRepoString(projectId, repositoryName),
repositoryData.getUuid(),
repositoryData.getGenId()
)
);
submitUnbatchedTask(
clusterService,
"update repository UUID " + projectRepoString(projectId, repositoryName) + " to [" + repositoryUuid + "]",
new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
final var project = currentState.metadata().getProject(projectId);
final RepositoriesMetadata currentReposMetadata = RepositoriesMetadata.get(project);
final RepositoryMetadata repositoryMetadata = currentReposMetadata.repository(repositoryName);
if (repositoryMetadata == null || repositoryMetadata.uuid().equals(repositoryUuid)) {
return currentState;
} else {
final RepositoriesMetadata newReposMetadata = currentReposMetadata.withUuid(repositoryName, repositoryUuid);
return ClusterState.builder(currentState)
.putProjectMetadata(ProjectMetadata.builder(project).putCustom(RepositoriesMetadata.TYPE, newReposMetadata))
.build();
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(ClusterState oldState, ClusterState newState) {
listener.onResponse(null);
}
}
);
}
/**
* Unregisters repository in the cluster
* <p>
* This method can be only called on the master node. It removes repository information from cluster metadata.
*
* @param projectId project to look for the repository
* @param request unregister repository request
* @param listener unregister repository listener
*/
public void unregisterRepository(
final ProjectId projectId,
final DeleteRepositoryRequest request,
final ActionListener<AcknowledgedResponse> listener
) {
submitUnbatchedTask(
"delete_repository " + projectRepoString(projectId, request.name()),
new UnregisterRepositoryTask(projectId, request, listener) {
@Override
public void clusterStateProcessed(ClusterState oldState, ClusterState newState) {
if (deletedRepositories.isEmpty() == false) {
logger.info("deleted repositories [{}] for project [{}]", deletedRepositories, projectId);
}
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
// repository was created on both master and data nodes
return discoveryNode.isMasterNode() || discoveryNode.canContainData();
}
}
);
}
/**
* Task | RegisterRepositoryTask |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/controller/PeriodicTaskFlag.java | {
"start": 887,
"end": 1011
} | enum ____ {
/**
* Set if we want to log the name and execution time on each run.
*/
VERBOSE
}
| PeriodicTaskFlag |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/PluginValue.java | {
"start": 1530,
"end": 1717
} | interface ____ {
String value();
/** If false, standard configuration value substitution is not done on the referenced value. */
boolean substitute() default true;
}
| PluginValue |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 112364,
"end": 112419
} | class ____ extends ImmutableAbstractClass {}
| D |
java | apache__dubbo | dubbo-plugin/dubbo-filter-cache/src/main/java/org/apache/dubbo/cache/support/expiring/ExpiringMap.java | {
"start": 7191,
"end": 10488
} | class ____ implements Runnable {
private long timeToLiveMillis;
private long expirationIntervalMillis;
private volatile boolean running = false;
private final Thread expirerThread;
@Override
public String toString() {
return "ExpireThread{" + ", timeToLiveMillis="
+ timeToLiveMillis + ", expirationIntervalMillis="
+ expirationIntervalMillis + ", running="
+ running + ", expirerThread="
+ expirerThread + '}';
}
public ExpireThread() {
expirerThread = new Thread(this, "ExpiryMapExpire-" + expireCount.getAndIncrement());
expirerThread.setDaemon(true);
}
@Override
public void run() {
while (running) {
processExpires();
try {
Thread.sleep(expirationIntervalMillis);
} catch (InterruptedException e) {
running = false;
}
}
}
private void processExpires() {
long timeNow = System.currentTimeMillis();
if (timeToLiveMillis <= 0) {
return;
}
for (ExpiryObject o : delegateMap.values()) {
long timeIdle = timeNow - o.getLastAccessTime();
if (timeIdle >= timeToLiveMillis) {
delegateMap.remove(o.getKey());
}
}
}
/**
* start expiring Thread
*/
public void startExpiring() {
if (!running) {
running = true;
expirerThread.start();
}
}
/**
* start thread
*/
public void startExpiryIfNotStarted() {
if (running && timeToLiveMillis <= 0) {
return;
}
startExpiring();
}
/**
* stop thread
*/
public void stopExpiring() {
if (running) {
running = false;
expirerThread.interrupt();
}
}
/**
* get thread state
*
* @return thread state
*/
public boolean isRunning() {
return running;
}
/**
* get time to live
*
* @return time to live
*/
public int getTimeToLive() {
return (int) timeToLiveMillis / 1000;
}
/**
* update time to live
*
* @param timeToLive time to live
*/
public void setTimeToLive(long timeToLive) {
this.timeToLiveMillis = timeToLive * 1000;
}
/**
* get expiration interval
*
* @return expiration interval (second)
*/
public int getExpirationInterval() {
return (int) expirationIntervalMillis / 1000;
}
/**
* set expiration interval
*
* @param expirationInterval expiration interval (second)
*/
public void setExpirationInterval(long expirationInterval) {
this.expirationIntervalMillis = expirationInterval * 1000;
}
}
}
| ExpireThread |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/Builder.java | {
"start": 14319,
"end": 15585
} | class ____ implements BuildpackResolverContext {
private final ImageFetcher imageFetcher;
private final ImagePlatform platform;
private final BuilderMetadata builderMetadata;
private final BuildpackLayersMetadata buildpackLayersMetadata;
BuilderResolverContext(ImageFetcher imageFetcher, ImagePlatform platform, BuilderMetadata builderMetadata,
BuildpackLayersMetadata buildpackLayersMetadata) {
this.imageFetcher = imageFetcher;
this.platform = platform;
this.builderMetadata = builderMetadata;
this.buildpackLayersMetadata = buildpackLayersMetadata;
}
@Override
public List<BuildpackMetadata> getBuildpackMetadata() {
return this.builderMetadata.getBuildpacks();
}
@Override
public BuildpackLayersMetadata getBuildpackLayersMetadata() {
return this.buildpackLayersMetadata;
}
@Override
public Image fetchImage(ImageReference reference, ImageType imageType) throws IOException {
return this.imageFetcher.fetchImage(imageType, reference, this.platform);
}
@Override
public void exportImageLayers(ImageReference reference, IOBiConsumer<String, TarArchive> exports)
throws IOException {
Builder.this.docker.image().exportLayers(reference, this.platform, exports);
}
}
}
| BuilderResolverContext |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java | {
"start": 24516,
"end": 26036
} | class ____ implements LinearizabilityChecker.SequentialSpec {
private final Version initialVersion;
private CASSequentialSpec(Version initialVersion) {
this.initialVersion = initialVersion;
}
@Override
public Object initialState() {
return casSuccess(initialVersion);
}
@Override
public Optional<Object> nextState(Object currentState, Object input, Object output) {
State state = (State) currentState;
if (output instanceof IndexResponseHistoryOutput indexResponseHistoryOutput) {
if (input.equals(state.safeVersion) || (state.lastFailed && ((Version) input).compareTo(state.safeVersion) > 0)) {
return Optional.of(casSuccess(indexResponseHistoryOutput.getVersion()));
} else {
return Optional.empty();
}
} else {
return Optional.of(state.failed());
}
}
}
private record State(Version safeVersion, boolean lastFailed) {
public State failed() {
return lastFailed ? this : casFail(safeVersion);
}
}
private static State casFail(Version stateVersion) {
return new State(stateVersion, true);
}
private static State casSuccess(Version version1) {
return new State(version1, false);
}
/**
* HistoryOutput contains the information from the output of calls.
*/
private | CASSequentialSpec |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/PlatformEvent.java | {
"start": 1171,
"end": 2780
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private final ZonedDateTime created;
private final String createdById;
private final Map<String, String> eventData = new HashMap<>();
@JsonCreator
public PlatformEvent(@JsonProperty("CreatedDate") final ZonedDateTime created,
@JsonProperty("CreatedById") final String createdById) {
this.created = created;
this.createdById = createdById;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof PlatformEvent)) {
return false;
}
final PlatformEvent other = (PlatformEvent) obj;
return Objects.equals(created, other.created) && Objects.equals(createdById, other.createdById)
&& Objects.equals(eventData, other.eventData);
}
public ZonedDateTime getCreated() {
return created;
}
public String getCreatedById() {
return createdById;
}
public Map<String, String> getEventData() {
return eventData;
}
@Override
public int hashCode() {
return Objects.hash(created, createdById, eventData);
}
@JsonAnySetter
public void set(final String name, final String value) {
eventData.put(name, value);
}
@Override
public String toString() {
return new StringBuilder().append("PlatformEvent: createdById: ").append(createdById).append(", createdId: ")
.append(created).append(", data: ").append(eventData)
.toString();
}
}
| PlatformEvent |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/internal/CustomMutabilityConvertedPrimitiveBasicTypeImpl.java | {
"start": 442,
"end": 1041
} | class ____<J> extends CustomMutabilityConvertedBasicTypeImpl<J> {
private final Class<J> primitiveClass;
public CustomMutabilityConvertedPrimitiveBasicTypeImpl(
String name,
String description,
JdbcType jdbcType,
BasicValueConverter<J, ?> converter,
Class<J> primitiveClass,
MutabilityPlan<J> mutabilityPlan) {
super( name, description, jdbcType, converter, mutabilityPlan );
assert primitiveClass.isPrimitive();
this.primitiveClass = primitiveClass;
}
@Override
public Class<J> getJavaType() {
return primitiveClass;
}
}
| CustomMutabilityConvertedPrimitiveBasicTypeImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/criteria/internal/hhh14916/Author.java | {
"start": 530,
"end": 812
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Long authorId;
@Column
public String name;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "author", orphanRemoval = true, cascade = CascadeType.ALL)
public List<Book> books = new ArrayList<>();
}
| Author |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/web/NacosWebServerListener.java | {
"start": 1134,
"end": 2069
} | class ____ implements ApplicationListener<WebServerInitializedEvent> {
private static final String SPRING_MANAGEMENT_CONTEXT_NAMESPACE = "management";
private final ServerMemberManager serverMemberManager;
public NacosWebServerListener(ServerMemberManager serverMemberManager, ServletContext servletContext) {
this.serverMemberManager = serverMemberManager;
EnvUtil.setContextPath(servletContext.getContextPath());
}
@Override
public void onApplicationEvent(WebServerInitializedEvent event) {
String serverNamespace = event.getApplicationContext().getServerNamespace();
if (SPRING_MANAGEMENT_CONTEXT_NAMESPACE.equals(serverNamespace)) {
// ignore
// fix#issue https://github.com/alibaba/nacos/issues/7230
return;
}
serverMemberManager.setSelfReady(event.getWebServer().getPort());
}
}
| NacosWebServerListener |
java | spring-projects__spring-framework | spring-aop/src/test/java/org/springframework/aop/support/DelegatingIntroductionInterceptorTests.java | {
"start": 4607,
"end": 5670
} | class ____ extends DelegatingIntroductionInterceptor implements TimeStamped, ITester {
@Override
public void foo() {
}
@Override
public long getTimeStamp() {
return t;
}
}
DelegatingIntroductionInterceptor ii = new TestII();
TestBean target = new TestBean();
ProxyFactory pf = new ProxyFactory(target);
IntroductionAdvisor ia = new DefaultIntroductionAdvisor(ii);
assertThat(ia.isPerInstance()).isTrue();
pf.addAdvisor(0, ia);
//assertTrue(Arrays.binarySearch(pf.getProxiedInterfaces(), TimeStamped.class) != -1);
TimeStamped ts = (TimeStamped) pf.getProxy();
assertThat(ts).isInstanceOf(TimeStamped.class);
// Shouldn't proxy framework interfaces
assertThat(ts).isNotInstanceOf(MethodInterceptor.class);
assertThat(ts).isNotInstanceOf(IntroductionInterceptor.class);
assertThat(ts.getTimeStamp()).isEqualTo(t);
((ITester) ts).foo();
((ITestBean) ts).getAge();
// Test removal
ii.suppressInterface(TimeStamped.class);
// Note that we need to construct a new proxy factory,
// or suppress the | TestII |
java | apache__flink | flink-end-to-end-tests/flink-stream-state-ttl-test/src/main/java/org/apache/flink/streaming/tests/verify/TtlStateVerifier.java | {
"start": 1220,
"end": 2109
} | interface ____<UV, GV> {
List<TtlStateVerifier<?, ?>> VERIFIERS =
Arrays.asList(
new TtlValueStateVerifier(),
new TtlListStateVerifier(),
new TtlMapStateVerifier(),
new TtlAggregatingStateVerifier(),
new TtlReducingStateVerifier());
@Nonnull
default String getId() {
return this.getClass().getSimpleName();
}
@Nonnull
State createState(
@Nonnull FunctionInitializationContext context, @Nonnull StateTtlConfig ttlConfig);
@Nonnull
TypeSerializer<UV> getUpdateSerializer();
UV generateRandomUpdate();
GV get(@Nonnull State state) throws Exception;
void update(@Nonnull State state, Object update) throws Exception;
boolean verify(@Nonnull TtlVerificationContext<?, ?> verificationContext);
}
| TtlStateVerifier |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/IdempotentConsumerAsyncTest.java | {
"start": 1284,
"end": 5731
} | class ____ extends ContextTestSupport {
protected Endpoint startEndpoint;
protected MockEndpoint resultEndpoint;
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testDuplicateMessagesAreFilteredOut() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.threads().to("mock:result");
}
});
context.start();
resultEndpoint.expectedBodiesReceivedInAnyOrder("one", "two", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
@Test
public void testFailedExchangesNotAddedDeadLetterChannel() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:error").maximumRedeliveries(2).redeliveryDelay(0).logStackTrace(false));
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.threads().process(new Processor() {
public void process(Exchange exchange) {
String id = exchange.getIn().getHeader("messageId", String.class);
if (id.equals("2")) {
throw new IllegalArgumentException("Damn I cannot handle id 2");
}
}
}).to("mock:result");
}
});
context.start();
// we send in 2 messages with id 2 that fails
getMockEndpoint("mock:error").expectedMessageCount(2);
resultEndpoint.expectedBodiesReceivedInAnyOrder("one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
@Test
public void testFailedExchangesNotAdded() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.threads().process(new Processor() {
public void process(Exchange exchange) {
String id = exchange.getIn().getHeader("messageId", String.class);
if (id.equals("2")) {
throw new IllegalArgumentException("Damn I cannot handle id 2");
}
}
}).to("mock:result");
}
});
context.start();
resultEndpoint.expectedBodiesReceivedInAnyOrder("one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
protected void sendMessage(final Object messageId, final Object body) throws Exception {
template.send(startEndpoint, new Processor() {
public void process(Exchange exchange) {
// now lets fire in a message
Message in = exchange.getIn();
in.setBody(body);
in.setHeader("messageId", messageId);
}
});
// must sleep a little as the route is async and we can be to fast
Thread.sleep(50);
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
startEndpoint = resolveMandatoryEndpoint("direct:start");
resultEndpoint = getMockEndpoint("mock:result");
}
}
| IdempotentConsumerAsyncTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/request/AlibabaCloudSearchRerankRequest.java | {
"start": 1445,
"end": 4628
} | class ____ implements Request {
private final AlibabaCloudSearchAccount account;
private final String query;
private final List<String> input;
private final Boolean returnDocuments;
private final Integer topN;
private final URI uri;
private final AlibabaCloudSearchRerankTaskSettings taskSettings;
private final String model;
private final String host;
private final String workspaceName;
private final String httpSchema;
private final String inferenceEntityId;
public AlibabaCloudSearchRerankRequest(
AlibabaCloudSearchAccount account,
String query,
List<String> input,
@Nullable Boolean returnDocuments,
@Nullable Integer topN,
AlibabaCloudSearchRerankModel rerankModel
) {
Objects.requireNonNull(rerankModel);
this.account = Objects.requireNonNull(account);
this.query = Objects.requireNonNull(query);
this.input = Objects.requireNonNull(input);
this.returnDocuments = returnDocuments;
this.topN = topN;
taskSettings = rerankModel.getTaskSettings();
model = rerankModel.getServiceSettings().getCommonSettings().modelId();
host = rerankModel.getServiceSettings().getCommonSettings().getHost();
workspaceName = rerankModel.getServiceSettings().getCommonSettings().getWorkspaceName();
httpSchema = rerankModel.getServiceSettings().getCommonSettings().getHttpSchema() != null
? rerankModel.getServiceSettings().getCommonSettings().getHttpSchema()
: "https";
uri = buildUri(null, AlibabaCloudSearchUtils.SERVICE_NAME, this::buildDefaultUri);
inferenceEntityId = rerankModel.getInferenceEntityId();
}
@Override
public HttpRequest createHttpRequest() {
HttpPost httpPost = new HttpPost(uri);
ByteArrayEntity byteEntity = new ByteArrayEntity(
Strings.toString(new AlibabaCloudSearchRerankRequestEntity(query, input, returnDocuments, topN, taskSettings))
.getBytes(StandardCharsets.UTF_8)
);
httpPost.setEntity(byteEntity);
httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType());
httpPost.setHeader(createAuthBearerHeader(account.apiKey()));
return new HttpRequest(httpPost, getInferenceEntityId());
}
@Override
public String getInferenceEntityId() {
return inferenceEntityId;
}
@Override
public URI getURI() {
return uri;
}
@Override
public Request truncate() {
return this;
}
@Override
public boolean[] getTruncationInfo() {
return null;
}
URI buildDefaultUri() throws URISyntaxException {
return new URIBuilder().setScheme(httpSchema)
.setHost(host)
.setPathSegments(
AlibabaCloudSearchUtils.VERSION_3,
AlibabaCloudSearchUtils.OPENAPI_PATH,
AlibabaCloudSearchUtils.WORKSPACE_PATH,
workspaceName,
AlibabaCloudSearchUtils.RERANK_PATH,
model
)
.build();
}
}
| AlibabaCloudSearchRerankRequest |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/PackageArchetypeCatalogMojo.java | {
"start": 8643,
"end": 9541
} | class ____ {
private String groupId;
private String artifactId;
private String version;
private String description;
public String getGroupId() {
return groupId;
}
public void setGroupId(String groupId) {
this.groupId = groupId;
}
public String getArtifactId() {
return artifactId;
}
public void setArtifactId(String artifactId) {
this.artifactId = artifactId;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
}
| ArchetypeModel |
java | greenrobot__greendao | tests/DaoTest/src/androidTest/java/org/greenrobot/greendao/daotest/rx/RxDaoTest.java | {
"start": 1193,
"end": 12957
} | class ____ extends AbstractDaoTest<TestEntityDao, TestEntity, Long> {
private RxDao rxDao;
public RxDaoTest() {
super(TestEntityDao.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
rxDao = dao.rx();
}
public void testScheduler() {
TestSubscriber<List<TestEntity>> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.loadAll());
Thread lastSeenThread = testSubscriber.getLastSeenThread();
assertNotSame(lastSeenThread, Thread.currentThread());
}
public void testNoScheduler() {
RxDao<TestEntity, Long> rxDaoNoScheduler = dao.rxPlain();
TestSubscriber<List<TestEntity>> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDaoNoScheduler.loadAll());
Thread lastSeenThread = testSubscriber.getLastSeenThread();
assertSame(lastSeenThread, Thread.currentThread());
}
public void testLoadAll() {
insertEntity("foo");
insertEntity("bar");
TestSubscriber<List<TestEntity>> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.loadAll());
assertEquals(1, testSubscriber.getValueCount());
List<TestEntity> entities = testSubscriber.getOnNextEvents().get(0);
// Order of entities is unspecified
int foo = 0, bar = 0;
for (TestEntity entity : entities) {
String value = entity.getSimpleStringNotNull();
if (value.equals("foo")) {
foo++;
} else if (value.equals("bar")) {
bar++;
} else {
fail(value);
}
}
assertEquals(1, foo);
assertEquals(1, bar);
}
public void testLoad() {
TestEntity foo = insertEntity("foo");
TestSubscriber<TestEntity> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.load(foo.getId()));
assertEquals(1, testSubscriber.getValueCount());
TestEntity foo2 = testSubscriber.getOnNextEvents().get(0);
assertEquals(foo.getSimpleStringNotNull(), foo2.getSimpleStringNotNull());
}
public void testLoad_noResult() {
TestSubscriber<TestEntity> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.load(42));
assertEquals(1, testSubscriber.getValueCount());
// Should we really propagate null through Rx?
assertNull(testSubscriber.getOnNextEvents().get(0));
}
public void testRefresh() {
TestEntity entity = insertEntity("foo");
entity.setSimpleStringNotNull("temp");
RxTestHelper.awaitTestSubscriber(rxDao.refresh(entity));
assertEquals("foo", entity.getSimpleStringNotNull());
}
public void testInsert() {
TestEntity foo = RxTestHelper.createEntity("foo");
TestSubscriber<TestEntity> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.insert(foo));
assertEquals(1, testSubscriber.getValueCount());
TestEntity foo2 = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, foo2);
List<TestEntity> all = dao.loadAll();
assertEquals(1, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
}
public void testInsertInTx() {
TestEntity foo = RxTestHelper.createEntity("foo");
TestEntity bar = RxTestHelper.createEntity("bar");
TestSubscriber<Object[]> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.insertInTx(foo, bar));
assertEquals(1, testSubscriber.getValueCount());
Object[] array = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, array[0]);
assertSame(bar, array[1]);
List<TestEntity> all = dao.loadAll();
assertEquals(2, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
assertEquals(bar.getSimpleStringNotNull(), all.get(1).getSimpleStringNotNull());
}
public void testInsertInTxList() {
TestEntity foo = RxTestHelper.createEntity("foo");
TestEntity bar = RxTestHelper.createEntity("bar");
List<TestEntity> list = new ArrayList<>();
list.add(foo);
list.add(bar);
TestSubscriber<List<TestEntity>> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.insertInTx(list));
assertEquals(1, testSubscriber.getValueCount());
List<TestEntity> result = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, result.get(0));
assertSame(bar, result.get(1));
List<TestEntity> all = dao.loadAll();
assertEquals(2, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
assertEquals(bar.getSimpleStringNotNull(), all.get(1).getSimpleStringNotNull());
}
public void testInsertOrReplace() {
TestEntity foo = insertEntity("foo");
foo.setSimpleStringNotNull("bar");
assertUpdatedEntity(foo, rxDao.insertOrReplace(foo));
}
public void testInsertOrReplaceInTx() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
assertUpdatedEntities(foo, bar, rxDao.insertOrReplaceInTx(foo, bar));
}
public void testInsertOrReplaceInTxList() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
List<TestEntity> list = new ArrayList<>();
list.add(foo);
list.add(bar);
assertUpdatedEntities(list, rxDao.insertOrReplaceInTx(list));
}
public void testSave() {
TestEntity foo = insertEntity("foo");
foo.setSimpleStringNotNull("bar");
assertUpdatedEntity(foo, rxDao.save(foo));
}
public void testSaveInTx() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
assertUpdatedEntities(foo, bar, rxDao.saveInTx(foo, bar));
}
public void testSaveInTxList() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
List<TestEntity> list = new ArrayList<>();
list.add(foo);
list.add(bar);
assertUpdatedEntities(list, rxDao.saveInTx(list));
}
public void testUpdate() {
TestEntity foo = insertEntity("foo");
foo.setSimpleString("foofoo");
TestSubscriber testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.update(foo));
assertEquals(1, testSubscriber.getValueCount());
assertSame(foo, testSubscriber.getOnNextEvents().get(0));
List<TestEntity> testEntities = dao.loadAll();
assertEquals(1, testEntities.size());
assertNotSame(foo, testEntities.get(0));
assertEquals("foofoo", testEntities.get(0).getSimpleString());
}
public void testUpdateInTx() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
bar.setSimpleStringNotNull("bar2");
assertUpdatedEntities(foo, bar, rxDao.updateInTx(foo, bar));
}
public void testUpdateInTxList() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
foo.setSimpleStringNotNull("foo2");
bar.setSimpleStringNotNull("bar2");
List<TestEntity> list = new ArrayList<>();
list.add(foo);
list.add(bar);
assertUpdatedEntities(list, rxDao.updateInTx(list));
}
private void assertUpdatedEntity(TestEntity foo, Observable<TestEntity> observable) {
TestSubscriber<TestEntity> testSubscriber = RxTestHelper.awaitTestSubscriber(observable);
assertEquals(1, testSubscriber.getValueCount());
TestEntity bar = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, bar);
List<TestEntity> all = dao.loadAll();
assertEquals(1, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
}
private void assertUpdatedEntities(TestEntity foo, TestEntity bar, Observable<Object[]> observable) {
TestSubscriber<Object[]> testSubscriber = RxTestHelper.awaitTestSubscriber(observable);
assertEquals(1, testSubscriber.getValueCount());
Object[] array = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, array[0]);
assertSame(bar, array[1]);
List<TestEntity> all = dao.loadAll();
assertEquals(2, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
assertEquals(bar.getSimpleStringNotNull(), all.get(1).getSimpleStringNotNull());
}
private void assertUpdatedEntities(List<TestEntity> entities, Observable<List<TestEntity>> observable) {
TestEntity foo = entities.get(0);
TestEntity bar = entities.get(1);
TestSubscriber<List<TestEntity>> testSubscriber = RxTestHelper.awaitTestSubscriber(observable);
assertEquals(1, testSubscriber.getValueCount());
List<TestEntity> result = testSubscriber.getOnNextEvents().get(0);
assertSame(foo, result.get(0));
assertSame(bar, result.get(1));
List<TestEntity> all = dao.loadAll();
assertEquals(2, all.size());
assertEquals(foo.getSimpleStringNotNull(), all.get(0).getSimpleStringNotNull());
assertEquals(bar.getSimpleStringNotNull(), all.get(1).getSimpleStringNotNull());
}
public void testDelete() {
TestEntity foo = insertEntity("foo");
assertDeleted(rxDao.delete(foo));
}
public void testDeleteByKey() {
TestEntity foo = insertEntity("foo");
assertDeleted(rxDao.deleteByKey(foo.getId()));
}
public void testDeleteAll() {
insertEntity("foo");
insertEntity("bar");
assertDeleted(rxDao.deleteAll());
}
public void testDeleteInTx() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
assertDeleted(rxDao.deleteInTx(foo, bar));
}
public void testDeleteInTxList() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
List<TestEntity> list = new ArrayList<>();
list.add(foo);
list.add(bar);
assertDeleted(rxDao.deleteInTx(list));
}
public void testDeleteByKeyInTx() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
assertDeleted(rxDao.deleteByKeyInTx(foo.getId(), bar.getId()));
}
public void testDeleteByKeyInTxList() {
TestEntity foo = insertEntity("foo");
TestEntity bar = insertEntity("bar");
List<Long> list = new ArrayList<>();
list.add(foo.getId());
list.add(bar.getId());
assertDeleted(rxDao.deleteByKeyInTx(list));
}
private void assertDeleted(Observable<Void> observable) {
TestSubscriber testSubscriber = RxTestHelper.awaitTestSubscriber(observable);
assertEquals(1, testSubscriber.getValueCount());
assertNull(testSubscriber.getOnNextEvents().get(0));
assertEquals(0, dao.count());
}
public void testCount() {
insertEntity("foo");
TestSubscriber<Long> testSubscriber = RxTestHelper.awaitTestSubscriber(rxDao.count());
assertEquals(1, testSubscriber.getValueCount());
Long count = testSubscriber.getOnNextEvents().get(0);
assertEquals(1L, (long) count);
}
protected TestEntity insertEntity(String simpleStringNotNull) {
return RxTestHelper.insertEntity(dao, simpleStringNotNull);
}
}
| RxDaoTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/groupwindow/assigners/SlidingWindowAssigner.java | {
"start": 3952,
"end": 5961
} | class ____ implements IterableIterator<TimeWindow> {
private final long paneSize;
private long paneStart;
private int numPanesRemaining;
PanesIterable(long paneStart, long paneSize, int numPanesPerWindow) {
this.paneStart = paneStart;
this.paneSize = paneSize;
this.numPanesRemaining = numPanesPerWindow;
}
@Override
public boolean hasNext() {
return numPanesRemaining > 0;
}
@Override
public TimeWindow next() {
TimeWindow window = new TimeWindow(paneStart, paneStart + paneSize);
numPanesRemaining--;
paneStart += paneSize;
return window;
}
@Override
public Iterator<TimeWindow> iterator() {
return this;
}
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
/**
* Creates a new {@code SlidingEventTimeWindows} {@link
* org.apache.flink.streaming.api.windowing.assigners.WindowAssigner} that assigns elements to
* sliding time windows based on the element timestamp.
*
* @param size The size of the generated windows.
* @param slide The slide interval of the generated windows.
* @return The time policy.
*/
public static SlidingWindowAssigner of(Duration size, Duration slide) {
return new SlidingWindowAssigner(size.toMillis(), slide.toMillis(), 0, true);
}
public SlidingWindowAssigner withOffset(Duration offset) {
return new SlidingWindowAssigner(size, slide, offset.toMillis(), isEventTime);
}
public SlidingWindowAssigner withEventTime() {
return new SlidingWindowAssigner(size, slide, offset, true);
}
public SlidingWindowAssigner withProcessingTime() {
return new SlidingWindowAssigner(size, slide, offset, false);
}
}
| PanesIterable |
java | hibernate__hibernate-orm | hibernate-hikaricp/src/main/java/org/hibernate/hikaricp/internal/HikariCPConnectionProvider.java | {
"start": 2286,
"end": 6303
} | class ____ implements ConnectionProvider, Configurable, Stoppable {
@Serial
private static final long serialVersionUID = -9131625057941275711L;
/**
* HikariCP configuration.
*/
private HikariConfig hikariConfig = null;
/**
* HikariCP data source.
*/
private HikariDataSource hikariDataSource = null;
// *************************************************************************
// Configurable
// *************************************************************************
@Override
public void configure(Map<String, Object> configuration) throws HibernateException {
try {
CONNECTION_INFO_LOGGER.configureConnectionPool( "HikariCP" );
hikariConfig = loadConfiguration( configuration );
hikariDataSource = new HikariDataSource( hikariConfig );
}
catch (Exception e) {
CONNECTION_INFO_LOGGER.unableToInstantiateConnectionPool( e );
throw new ConnectionProviderConfigurationException(
"Could not configure HikariCP: " + e.getMessage(), e );
}
}
// *************************************************************************
// ConnectionProvider
// *************************************************************************
@Override
public Connection getConnection() throws SQLException {
return hikariDataSource != null ? hikariDataSource.getConnection() : null;
}
@Override
public void closeConnection(Connection connection) throws SQLException {
connection.close();
}
@Override
public boolean supportsAggressiveRelease() {
return false;
}
@Override
public DatabaseConnectionInfo getDatabaseConnectionInfo(Dialect dialect) {
try ( var connection = hikariDataSource.getConnection() ) {
final var info = new DatabaseConnectionInfoImpl(
HikariCPConnectionProvider.class,
hikariConfig.getJdbcUrl(),
// Attempt to resolve the driver name from the dialect,
// in case it wasn't explicitly set and access to the
// database metadata is allowed
isBlank( hikariConfig.getDriverClassName() )
? getDriverName( connection )
: hikariConfig.getDriverClassName(),
dialect.getClass(),
dialect.getVersion(),
hasSchema( connection ),
hasCatalog( connection ),
hikariConfig.getSchema() != null
? hikariConfig.getSchema()
: getSchema( connection ),
hikariConfig.getCatalog() != null
? hikariConfig.getCatalog()
: getCatalog( connection ),
Boolean.toString( hikariConfig.isAutoCommit() ),
hikariConfig.getTransactionIsolation() != null
? hikariConfig.getTransactionIsolation()
: toIsolationNiceName( getIsolation( connection ) ),
hikariConfig.getMinimumIdle(),
hikariConfig.getMaximumPoolSize(),
getFetchSize( connection )
);
if ( !connection.getAutoCommit() ) {
connection.rollback();
}
return info;
}
catch (SQLException e) {
throw new JDBCConnectionException( "Could not create connection", e );
}
}
@Override
public boolean isUnwrappableAs(Class<?> unwrapType) {
return unwrapType.isAssignableFrom( HikariCPConnectionProvider.class )
|| unwrapType.isAssignableFrom( HikariDataSource.class )
|| unwrapType.isAssignableFrom( HikariConfig.class );
}
@Override
@SuppressWarnings("unchecked")
public <T> T unwrap(Class<T> unwrapType) {
if ( unwrapType.isAssignableFrom( HikariCPConnectionProvider.class ) ) {
return (T) this;
}
else if ( unwrapType.isAssignableFrom( HikariDataSource.class ) ) {
return (T) hikariDataSource;
}
else if ( unwrapType.isAssignableFrom( HikariConfig.class ) ) {
return (T) hikariConfig;
}
else {
throw new UnknownUnwrapTypeException( unwrapType );
}
}
// *************************************************************************
// Stoppable
// *************************************************************************
@Override
public void stop() {
if ( hikariDataSource != null ) {
CONNECTION_INFO_LOGGER.cleaningUpConnectionPool( "HikariCP" );
hikariDataSource.close();
}
}
}
| HikariCPConnectionProvider |
java | spring-projects__spring-security | rsocket/src/test/java/org/springframework/security/rsocket/core/PayloadSocketAcceptorInterceptorTests.java | {
"start": 1661,
"end": 4305
} | class ____ {
@Mock
private PayloadInterceptor interceptor;
@Mock
private SocketAcceptor socketAcceptor;
@Mock
private ConnectionSetupPayload setupPayload;
@Mock
private RSocket rSocket;
@Mock
private Payload payload;
private List<PayloadInterceptor> interceptors;
private PayloadSocketAcceptorInterceptor acceptorInterceptor;
@BeforeEach
public void setup() {
this.interceptors = Arrays.asList(this.interceptor);
this.acceptorInterceptor = new PayloadSocketAcceptorInterceptor(this.interceptors);
}
@Test
public void applyWhenDefaultMetadataMimeTypeThenDefaulted() {
given(this.setupPayload.dataMimeType()).willReturn(MediaType.APPLICATION_JSON_VALUE);
PayloadExchange exchange = captureExchange();
assertThat(exchange.getMetadataMimeType().toString())
.isEqualTo(WellKnownMimeType.MESSAGE_RSOCKET_COMPOSITE_METADATA.getString());
assertThat(exchange.getDataMimeType()).isEqualTo(MediaType.APPLICATION_JSON);
}
@Test
public void acceptWhenDefaultMetadataMimeTypeOverrideThenDefaulted() {
this.acceptorInterceptor.setDefaultMetadataMimeType(MediaType.APPLICATION_JSON);
given(this.setupPayload.dataMimeType()).willReturn(MediaType.APPLICATION_JSON_VALUE);
PayloadExchange exchange = captureExchange();
assertThat(exchange.getMetadataMimeType()).isEqualTo(MediaType.APPLICATION_JSON);
assertThat(exchange.getDataMimeType()).isEqualTo(MediaType.APPLICATION_JSON);
}
@Test
public void acceptWhenDefaultDataMimeTypeThenDefaulted() {
this.acceptorInterceptor.setDefaultDataMimeType(MediaType.APPLICATION_JSON);
PayloadExchange exchange = captureExchange();
assertThat(exchange.getMetadataMimeType().toString())
.isEqualTo(WellKnownMimeType.MESSAGE_RSOCKET_COMPOSITE_METADATA.getString());
assertThat(exchange.getDataMimeType()).isEqualTo(MediaType.APPLICATION_JSON);
}
private PayloadExchange captureExchange() {
given(this.socketAcceptor.accept(any(), any())).willReturn(Mono.just(this.rSocket));
given(this.interceptor.intercept(any(), any())).willReturn(Mono.empty());
SocketAcceptor wrappedAcceptor = this.acceptorInterceptor.apply(this.socketAcceptor);
RSocket result = wrappedAcceptor.accept(this.setupPayload, this.rSocket).block();
assertThat(result).isInstanceOf(PayloadInterceptorRSocket.class);
given(this.rSocket.fireAndForget(any())).willReturn(Mono.empty());
result.fireAndForget(this.payload).block();
ArgumentCaptor<PayloadExchange> exchangeArg = ArgumentCaptor.forClass(PayloadExchange.class);
verify(this.interceptor, times(2)).intercept(exchangeArg.capture(), any());
return exchangeArg.getValue();
}
}
| PayloadSocketAcceptorInterceptorTests |
java | apache__camel | components/camel-google/camel-google-pubsub/src/main/java/org/apache/camel/component/google/pubsub/GooglePubsubConstants.java | {
"start": 906,
"end": 2575
} | class ____ {
@Metadata(description = "The ID of the message, assigned by the server when the message is published.", javaType = "String")
public static final String MESSAGE_ID = "CamelGooglePubsubMessageId";
@Metadata(label = "consumer", description = "The ID used to acknowledge the received message.", javaType = "String")
public static final String ACK_ID = "CamelGooglePubsubMsgAckId";
@Metadata(label = "consumer", description = "The time at which the message was published",
javaType = "com.google.protobuf.Timestamp")
public static final String PUBLISH_TIME = "CamelGooglePubsubPublishTime";
@Deprecated(since = "4.15")
@Metadata(description = "The attributes of the message.", javaType = "Map<String, String>")
public static final String ATTRIBUTES = "CamelGooglePubsubAttributes";
@Metadata(label = "producer",
description = "If non-empty, identifies related messages for which publish order should be\n" +
" respected.",
javaType = "String")
public static final String ORDERING_KEY = "CamelGooglePubsubOrderingKey";
@Metadata(label = "consumer", description = "Can be used to manually acknowledge or negative-acknowledge a " +
"message when ackMode=NONE.",
javaType = "org.apache.camel.component.google.pubsub.consumer.GooglePubsubAcknowledge")
public static final String GOOGLE_PUBSUB_ACKNOWLEDGE = "CamelGooglePubsubAcknowledge";
@Deprecated(since = "4.15")
public static final String RESERVED_GOOGLE_CLIENT_ATTRIBUTE_PREFIX = "goog";
public | GooglePubsubConstants |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestPartMethodArgumentResolverTests.java | {
"start": 26707,
"end": 27164
} | class ____ implements WebDataBinderFactory {
@Override
public WebDataBinder createBinder(NativeWebRequest webRequest, @Nullable Object target,
String objectName) {
LocalValidatorFactoryBean validator = new LocalValidatorFactoryBean();
validator.afterPropertiesSet();
WebDataBinder dataBinder = new WebDataBinder(target, objectName);
dataBinder.setValidator(validator);
return dataBinder;
}
}
private static | ValidatingBinderFactory |
java | dropwizard__dropwizard | dropwizard-logging/src/main/java/io/dropwizard/logging/common/UdpSocketAppenderFactory.java | {
"start": 991,
"end": 1840
} | class ____<E extends DeferredProcessingAware> extends AbstractOutputStreamAppenderFactory<E> {
@NotEmpty
private String host = "localhost";
@PortRange
private int port = 514;
@JsonProperty
public String getHost() {
return host;
}
@JsonProperty
public void setHost(String host) {
this.host = host;
}
@JsonProperty
public int getPort() {
return port;
}
@JsonProperty
public void setPort(int port) {
this.port = port;
}
@Override
protected OutputStreamAppender<E> appender(LoggerContext context) {
final DropwizardUdpSocketAppender<E> appender = new DropwizardUdpSocketAppender<>(host, port);
appender.setContext(context);
appender.setName("udp-socket-appender");
return appender;
}
}
| UdpSocketAppenderFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/timezones/ColumnZonedTest.java | {
"start": 1209,
"end": 3151
} | class ____ {
@Test void test(SessionFactoryScope scope) {
final ZonedDateTime nowZoned;
final OffsetDateTime nowOffset;
final Dialect dialect = scope.getSessionFactory().getJdbcServices().getDialect();
if ( dialect instanceof SybaseDialect ) {
// Sybase has 1/300th sec precision
nowZoned = ZonedDateTime.now().withZoneSameInstant( ZoneId.of("CET") )
.with( ChronoField.NANO_OF_SECOND, 0L );
nowOffset = OffsetDateTime.now().withOffsetSameInstant( ZoneOffset.ofHours(3) )
.with( ChronoField.NANO_OF_SECOND, 0L );
}
else if ( dialect.getDefaultTimestampPrecision() == 6 ) {
nowZoned = ZonedDateTime.now().withZoneSameInstant( ZoneId.of("CET") ).truncatedTo( ChronoUnit.MICROS );
nowOffset = OffsetDateTime.now().withOffsetSameInstant( ZoneOffset.ofHours(3) ).truncatedTo( ChronoUnit.MICROS );
}
else {
nowZoned = ZonedDateTime.now().withZoneSameInstant( ZoneId.of("CET") );
nowOffset = OffsetDateTime.now().withOffsetSameInstant( ZoneOffset.ofHours(3) );
}
long id = scope.fromTransaction( s-> {
Zoned z = new Zoned();
z.zonedDateTime = nowZoned;
z.offsetDateTime = nowOffset;
s.persist(z);
return z.id;
});
scope.inSession( s -> {
Zoned z = s.find(Zoned.class, id);
Instant expected = DateTimeUtils.adjustToDefaultPrecision( nowZoned.toInstant(), dialect );
Instant actual = DateTimeUtils.adjustToDefaultPrecision( z.zonedDateTime.toInstant(), dialect );
assertEquals(
expected,
actual
);
expected = DateTimeUtils.adjustToDefaultPrecision( nowOffset.toInstant(), dialect );
actual = DateTimeUtils.adjustToDefaultPrecision( z.offsetDateTime.toInstant(), dialect );
assertEquals(
expected,
actual
);
assertEquals( nowZoned.toOffsetDateTime().getOffset(), z.zonedDateTime.toOffsetDateTime().getOffset() );
assertEquals( nowOffset.getOffset(), z.offsetDateTime.getOffset() );
});
}
@Entity
public static | ColumnZonedTest |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/resource/ResourceTransformer.java | {
"start": 962,
"end": 1379
} | interface ____ {
/**
* Transform the given resource.
* @param exchange the current exchange
* @param resource the resource to transform
* @param transformerChain the chain of remaining transformers to delegate to
* @return the transformed resource (never empty)
*/
Mono<Resource> transform(ServerWebExchange exchange, Resource resource,
ResourceTransformerChain transformerChain);
}
| ResourceTransformer |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/expression/method/PrePostAnnotationSecurityMetadataSourceTests.java | {
"start": 9015,
"end": 9264
} | interface ____ {
@PreAuthorize("interfaceMethodAuthzExpression")
@PreFilter(filterTarget = "param", value = "interfacePreFilterExpression")
List<?> doSomething(List<?> param);
}
@PreAuthorize("someExpression")
public static | ReturnAnotherList |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/StrAlgoArgs.java | {
"start": 1843,
"end": 4316
} | class ____ {
/**
* Utility constructor.
*/
private Builder() {
}
/**
* Creates new {@link StrAlgoArgs} by keys.
*
* @return new {@link StrAlgoArgs} with {@literal By KEYS} set.
*/
public static StrAlgoArgs keys(String... keys) {
return new StrAlgoArgs().by(By.KEYS, keys);
}
/**
* Creates new {@link StrAlgoArgs} by strings.
*
* @return new {@link StrAlgoArgs} with {@literal By STRINGS} set.
*/
public static StrAlgoArgs strings(String... strings) {
return new StrAlgoArgs().by(By.STRINGS, strings);
}
/**
* Creates new {@link StrAlgoArgs} by strings and charset.
*
* @return new {@link StrAlgoArgs} with {@literal By STRINGS} set.
*/
public static StrAlgoArgs strings(Charset charset, String... strings) {
return new StrAlgoArgs().by(By.STRINGS, strings).charset(charset);
}
}
/**
* restrict the list of matches to the ones of a given minimal length.
*
* @return {@code this} {@link StrAlgoArgs}.
*/
public StrAlgoArgs minMatchLen(int minMatchLen) {
this.minMatchLen = minMatchLen;
return this;
}
/**
* Request just the length of the match for results.
*
* @return {@code this} {@link StrAlgoArgs}.
*/
public StrAlgoArgs justLen() {
justLen = true;
return this;
}
/**
* Request match len for results.
*
* @return {@code this} {@link StrAlgoArgs}.
*/
public StrAlgoArgs withMatchLen() {
withMatchLen = true;
return this;
}
/**
* Request match position in each strings for results.
*
* @return {@code this} {@link StrAlgoArgs}.
*/
public StrAlgoArgs withIdx() {
withIdx = true;
return this;
}
public StrAlgoArgs by(By by, String... keys) {
LettuceAssert.notNull(by, "By-selector must not be null");
LettuceAssert.notEmpty(keys, "Keys must not be empty");
this.by = by;
this.keys = keys;
return this;
}
public boolean isWithIdx() {
return withIdx;
}
public StrAlgoArgs charset(Charset charset) {
LettuceAssert.notNull(charset, "Charset must not be null");
this.charset = charset;
return this;
}
public | Builder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/lazytoone/mappedby/JoinFetchedInverseToOneAllowProxyTests.java | {
"start": 7038,
"end": 7905
} | class ____ {
@Id
private Integer id;
@OneToOne( fetch = LAZY, mappedBy = "supplementalInfo", optional = false )
// @LazyToOne( value = NO_PROXY )
@Fetch( JOIN )
private Customer customer;
private String something;
public SupplementalInfo() {
}
public SupplementalInfo(Integer id, Customer customer, String something) {
this.id = id;
this.customer = customer;
this.something = something;
customer.setSupplementalInfo( this );
}
public Integer getId() {
return id;
}
private void setId(Integer id) {
this.id = id;
}
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
public String getSomething() {
return something;
}
public void setSomething(String something) {
this.something = something;
}
}
}
| SupplementalInfo |
java | quarkusio__quarkus | extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/RestDataProcessor.java | {
"start": 1457,
"end": 7521
} | class ____ {
@BuildStep
void supportingBuildItems(Capabilities capabilities,
BuildProducer<RuntimeInitializedClassBuildItem> runtimeInitializedClassBuildItemBuildProducer,
BuildProducer<ResteasyJaxrsProviderBuildItem> resteasyJaxrsProviderBuildItemBuildProducer,
BuildProducer<ContainerRequestFilterBuildItem> containerRequestFilterBuildItemBuildProducer) {
boolean isResteasyClassicAvailable = capabilities.isPresent(Capability.RESTEASY);
boolean isResteasyReactiveAvailable = capabilities.isPresent(Capability.RESTEASY_REACTIVE);
if (!isResteasyClassicAvailable && !isResteasyReactiveAvailable) {
throw new IllegalStateException(
"REST Data Panache can only work if 'quarkus-rest' or 'quarkus-resteasy' is present");
}
if (isResteasyClassicAvailable) {
runtimeInitializedClassBuildItemBuildProducer
.produce(new RuntimeInitializedClassBuildItem("org.jboss.resteasy.links.impl.EL"));
resteasyJaxrsProviderBuildItemBuildProducer
.produce(new ResteasyJaxrsProviderBuildItem(SortQueryParamFilter.class.getName()));
} else {
containerRequestFilterBuildItemBuildProducer
.produce(new ContainerRequestFilterBuildItem.Builder(SortQueryParamFilter.class.getName())
.setNameBindingNames(Collections.singleton(SortQueryParamValidator.class.getName())).build());
}
}
@BuildStep
void implementResources(CombinedIndexBuildItem index,
List<RestDataResourceBuildItem> resourceBuildItems,
List<ResourcePropertiesBuildItem> resourcePropertiesBuildItems,
List<BuildTimeConditionBuildItem> buildTimeConditions,
Capabilities capabilities,
BuildProducer<GeneratedBeanBuildItem> resteasyClassicImplementationsProducer,
BuildProducer<GeneratedJaxRsResourceBuildItem> resteasyReactiveImplementationsProducer) {
boolean isReactivePanache = capabilities.isPresent(Capability.HIBERNATE_REACTIVE);
boolean isResteasyClassic = capabilities.isPresent(Capability.RESTEASY);
if (isReactivePanache && isResteasyClassic) {
throw new IllegalStateException(
"Reactive REST Data Panache does not work with 'quarkus-resteasy'. Only 'quarkus-rest' extensions are supported");
}
Set<String> excludedClasses = getExcludedClasses(buildTimeConditions);
ClassOutput classOutput = isResteasyClassic ? new GeneratedBeanGizmoAdaptor(resteasyClassicImplementationsProducer)
: new GeneratedJaxRsResourceGizmoAdaptor(resteasyReactiveImplementationsProducer);
JaxRsResourceImplementor jaxRsResourceImplementor = new JaxRsResourceImplementor(capabilities);
ResourcePropertiesProvider resourcePropertiesProvider = new ResourcePropertiesProvider(index.getIndex());
for (RestDataResourceBuildItem resourceBuildItem : resourceBuildItems) {
if (!excludedClasses.contains(resourceBuildItem.getResourceMetadata().getResourceName())) {
ResourceMetadata resourceMetadata = resourceBuildItem.getResourceMetadata();
ResourceProperties resourceProperties = getResourceProperties(resourcePropertiesProvider,
resourceMetadata, resourcePropertiesBuildItems);
if (resourceProperties.isHal()) {
if (isResteasyClassic && !hasAnyJsonCapabilityForResteasyClassic(capabilities)) {
throw new IllegalStateException("Cannot generate HAL endpoints without "
+ "either 'quarkus-resteasy-jsonb' or 'quarkus-resteasy-jackson'");
} else if (!isResteasyClassic && !hasAnyJsonCapabilityForResteasyReactive(capabilities)) {
throw new IllegalStateException("Cannot generate HAL endpoints without "
+ "either 'quarkus-rest-jsonb' or 'quarkus-rest-jackson'");
}
}
if (resourceProperties.isExposed()) {
jaxRsResourceImplementor.implement(classOutput, resourceMetadata, resourceProperties, capabilities);
}
}
}
}
private ResourceProperties getResourceProperties(ResourcePropertiesProvider resourcePropertiesProvider,
ResourceMetadata resourceMetadata, List<ResourcePropertiesBuildItem> resourcePropertiesBuildItems) {
for (ResourcePropertiesBuildItem resourcePropertiesBuildItem : resourcePropertiesBuildItems) {
if (resourcePropertiesBuildItem.getResourceType().equals(resourceMetadata.getResourceClass())
|| resourcePropertiesBuildItem.getResourceType().equals(resourceMetadata.getResourceName())) {
return resourcePropertiesBuildItem.getResourcePropertiesInfo();
}
}
return resourcePropertiesProvider.getFromClass(resourceMetadata.getResourceName());
}
private boolean hasAnyJsonCapabilityForResteasyClassic(Capabilities capabilities) {
return capabilities.isPresent(Capability.RESTEASY_JSON_JSONB)
|| capabilities.isPresent(Capability.RESTEASY_JSON_JACKSON);
}
private boolean hasAnyJsonCapabilityForResteasyReactive(Capabilities capabilities) {
return capabilities.isPresent(Capability.RESTEASY_REACTIVE_JSON_JSONB)
|| capabilities.isPresent(Capability.RESTEASY_REACTIVE_JSON_JACKSON);
}
private static Set<String> getExcludedClasses(List<BuildTimeConditionBuildItem> buildTimeConditions) {
return buildTimeConditions.stream()
.filter(item -> !item.isEnabled())
.map(BuildTimeConditionBuildItem::getTarget)
.filter(target -> target.kind() == AnnotationTarget.Kind.CLASS)
.map(target -> target.asClass().toString())
.collect(Collectors.toSet());
}
}
| RestDataProcessor |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultStaticConstantExtractionPhase.java | {
"start": 1367,
"end": 3419
} | class ____ extends IRTreeBaseVisitor<ScriptScope> {
private ClassNode classNode;
@Override
public void visitClass(ClassNode irClassNode, ScriptScope scope) {
this.classNode = irClassNode;
super.visitClass(irClassNode, scope);
}
@Override
public void visitConstant(ConstantNode irConstantNode, ScriptScope scope) {
super.visitConstant(irConstantNode, scope);
Object constant = irConstantNode.getDecorationValue(IRDConstant.class);
if (constant instanceof String
|| constant instanceof Double
|| constant instanceof Float
|| constant instanceof Long
|| constant instanceof Integer
|| constant instanceof Character
|| constant instanceof Short
|| constant instanceof Byte
|| constant instanceof Boolean) {
/*
* Constant can be loaded into the constant pool so we let the byte
* code generation phase do that.
*/
return;
}
/*
* The constant *can't* be loaded into the constant pool so we make it
* a static constant and register the value with ScriptScope. The byte
* code generation will load the static constant.
*/
String fieldName = scope.getNextSyntheticName("constant");
scope.addStaticConstant(fieldName, constant);
FieldNode constantField = new FieldNode(irConstantNode.getLocation());
constantField.attachDecoration(new IRDModifiers(Modifier.PUBLIC | Modifier.STATIC));
constantField.attachDecoration(irConstantNode.getDecoration(IRDConstant.class));
Class<?> type = irConstantNode.getDecorationValue(IRDExpressionType.class);
constantField.attachDecoration(new IRDFieldType(type));
constantField.attachDecoration(new IRDName(fieldName));
classNode.addFieldNode(constantField);
irConstantNode.attachDecoration(new IRDConstantFieldName(fieldName));
}
}
| DefaultStaticConstantExtractionPhase |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/WeightedTargetLoadBalancerProvider.java | {
"start": 1459,
"end": 4527
} | class ____ extends LoadBalancerProvider {
@Nullable
private final LoadBalancerRegistry lbRegistry;
// We can not call this(LoadBalancerRegistry.getDefaultRegistry()), because it will get stuck
// recursively loading LoadBalancerRegistry and WeightedTargetLoadBalancerProvider.
public WeightedTargetLoadBalancerProvider() {
this(null);
}
@VisibleForTesting
WeightedTargetLoadBalancerProvider(@Nullable LoadBalancerRegistry lbRegistry) {
this.lbRegistry = lbRegistry;
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public int getPriority() {
return 5;
}
@Override
public String getPolicyName() {
return XdsLbPolicies.WEIGHTED_TARGET_POLICY_NAME;
}
@Override
public LoadBalancer newLoadBalancer(Helper helper) {
return new WeightedTargetLoadBalancer(helper);
}
@Override
public ConfigOrError parseLoadBalancingPolicyConfig(Map<String, ?> rawConfig) {
try {
Map<String, ?> targets = JsonUtil.getObject(rawConfig, "targets");
if (targets == null || targets.isEmpty()) {
return ConfigOrError.fromError(Status.INTERNAL.withDescription(
"No targets provided for weighted_target LB policy:\n " + rawConfig));
}
Map<String, WeightedPolicySelection> parsedChildConfigs = new LinkedHashMap<>();
for (String name : targets.keySet()) {
Map<String, ?> rawWeightedTarget = JsonUtil.getObject(targets, name);
if (rawWeightedTarget == null || rawWeightedTarget.isEmpty()) {
return ConfigOrError.fromError(Status.INTERNAL.withDescription(
"No config for target " + name + " in weighted_target LB policy:\n " + rawConfig));
}
Integer weight = JsonUtil.getNumberAsInteger(rawWeightedTarget, "weight");
if (weight == null || weight < 1) {
return ConfigOrError.fromError(Status.INTERNAL.withDescription(
"Wrong weight for target " + name + " in weighted_target LB policy:\n " + rawConfig));
}
LoadBalancerRegistry lbRegistry =
this.lbRegistry == null ? LoadBalancerRegistry.getDefaultRegistry() : this.lbRegistry;
ConfigOrError childConfig = GracefulSwitchLoadBalancer.parseLoadBalancingPolicyConfig(
JsonUtil.getListOfObjects(rawWeightedTarget, "childPolicy"), lbRegistry);
if (childConfig.getError() != null) {
return ConfigOrError.fromError(GrpcUtil.statusWithDetails(
Status.Code.INTERNAL,
"Could not parse weighted_target's child policy: " + name,
childConfig.getError()));
}
parsedChildConfigs.put(name, new WeightedPolicySelection(weight, childConfig.getConfig()));
}
return ConfigOrError.fromConfig(new WeightedTargetConfig(parsedChildConfigs));
} catch (RuntimeException e) {
return ConfigOrError.fromError(
Status.INTERNAL.withCause(e).withDescription(
"Failed to parse weighted_target LB config: " + rawConfig));
}
}
static final | WeightedTargetLoadBalancerProvider |
java | apache__flink | flink-core/src/test/java/org/apache/flink/util/ReferenceTest.java | {
"start": 973,
"end": 1746
} | class ____ {
@Test
void testOwnedReferenceIsOwned() {
final Reference<String> value = Reference.owned("foobar");
assertThat(value.isOwned()).isTrue();
}
@Test
void testBorrowedReferenceIsNotOwned() {
final Reference<String> value = Reference.borrowed("foobar");
assertThat(value.isOwned()).isFalse();
}
@Test
void testOwnedReferenceReturnsSomeOwned() {
final String value = "foobar";
final Reference<String> owned = Reference.owned(value);
assertThat(owned.owned()).hasValue(value);
}
@Test
void testBorrowedReferenceReturnsEmptyOwned() {
Reference<String> value = Reference.borrowed("foobar");
assertThat(value.owned()).isEmpty();
}
}
| ReferenceTest |
java | apache__dubbo | dubbo-plugin/dubbo-filter-cache/src/main/java/org/apache/dubbo/cache/support/expiring/ExpiringCache.java | {
"start": 1251,
"end": 2018
} | class ____ {@link ExpiringCacheFactory} to store method's returns value
* to server from store without making method call.
* <pre>
* e.g. 1) <dubbo:service cache="expiring" cache.seconds="60" cache.interval="10"/>
* 2) <dubbo:consumer cache="expiring" />
* </pre>
* <li>It used constructor argument url instance <b>cache.seconds</b> value to decide time to live of cached object.Default value of it is 180 second.</li>
* <li>It used constructor argument url instance <b>cache.interval</b> value for cache value expiration interval.Default value of this is 4 second</li>
* @see Cache
* @see ExpiringCacheFactory
* @see org.apache.dubbo.cache.support.AbstractCacheFactory
* @see org.apache.dubbo.cache.filter.CacheFilter
*/
public | using |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/AddEventNotifierOrderedTest.java | {
"start": 1441,
"end": 3331
} | class ____ extends ContextTestSupport {
private static final List<String> events = new ArrayList<>();
@Override
@BeforeEach
public void setUp() throws Exception {
events.clear();
super.setUp();
}
@Override
protected CamelContext createCamelContext() throws Exception {
DefaultCamelContext context = new DefaultCamelContext(createCamelRegistry());
context.getManagementStrategy().addEventNotifier(new MyNotifier("notifier1", -100));
context.getManagementStrategy().addEventNotifier(new MyNotifier("notifier2", -200));
return context;
}
@Test
public void testAdd() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
Assertions.assertEquals(2, events.size());
Assertions.assertEquals("notifier2", events.get(0));
Assertions.assertEquals("notifier1", events.get(1));
// add new notifier after started
resetMocks();
events.clear();
context.getManagementStrategy().addEventNotifier(new MyNotifier("notifier3", -300));
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
Assertions.assertEquals(3, events.size());
Assertions.assertEquals("notifier3", events.get(0));
Assertions.assertEquals("notifier2", events.get(1));
Assertions.assertEquals("notifier1", events.get(2));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:result");
}
};
}
private static | AddEventNotifierOrderedTest |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/io/Content.java | {
"start": 1124,
"end": 3049
} | interface ____ {
/**
* The size of the content in bytes.
* @return the content size
*/
int size();
/**
* Write the content to the given output stream.
* @param outputStream the output stream to write to
* @throws IOException on IO error
*/
void writeTo(OutputStream outputStream) throws IOException;
/**
* Create a new {@link Content} from the given UTF-8 string.
* @param string the string to write
* @return a new {@link Content} instance
*/
static Content of(String string) {
Assert.notNull(string, "'string' must not be null");
return of(string.getBytes(StandardCharsets.UTF_8));
}
/**
* Create a new {@link Content} from the given input stream.
* @param bytes the bytes to write
* @return a new {@link Content} instance
*/
static Content of(byte[] bytes) {
Assert.notNull(bytes, "'bytes' must not be null");
return of(bytes.length, () -> new ByteArrayInputStream(bytes));
}
/**
* Create a new {@link Content} from the given file.
* @param file the file to write
* @return a new {@link Content} instance
*/
static Content of(File file) {
Assert.notNull(file, "'file' must not be null");
return of((int) file.length(), () -> new FileInputStream(file));
}
/**
* Create a new {@link Content} from the given input stream. The stream will be closed
* after it has been written.
* @param size the size of the supplied input stream
* @param supplier the input stream supplier
* @return a new {@link Content} instance
*/
static Content of(int size, IOSupplier<InputStream> supplier) {
Assert.isTrue(size >= 0, "'size' must not be negative");
Assert.notNull(supplier, "'supplier' must not be null");
return new Content() {
@Override
public int size() {
return size;
}
@Override
public void writeTo(OutputStream outputStream) throws IOException {
FileCopyUtils.copy(supplier.get(), outputStream);
}
};
}
}
| Content |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContains_at_Index_with_Integer_Argument_Test.java | {
"start": 1738,
"end": 7132
} | class ____ extends ByteArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), null, 8, someIndex()))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_empty() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), emptyArray(), 8,
someIndex()))
.withMessage(actualIsEmpty());
}
@Test
void should_throw_error_if_Index_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContains(someInfo(), actual, 8, null))
.withMessage("Index should not be null");
}
@Test
void should_throw_error_if_Index_is_out_of_bounds() {
assertThatExceptionOfType(IndexOutOfBoundsException.class).isThrownBy(() -> arrays.assertContains(someInfo(),
actual, 8,
atIndex(6)))
.withMessageContaining("Index should be between <0> and <2> (inclusive) but was:%n <6>".formatted());
}
@Test
void should_fail_if_actual_does_not_contain_value_at_index() {
AssertionInfo info = someInfo();
Index index = atIndex(1);
Throwable error = catchThrowable(() -> arrays.assertContains(info, actual, 6, index));
assertThat(error).isInstanceOf(AssertionError.class);
byte found = 8;
verify(failures).failure(info, shouldContainAtIndex(actual, (byte) 6, index, found));
}
@Test
void should_pass_if_actual_contains_value_at_index() {
arrays.assertContains(someInfo(), actual, 8, atIndex(1));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
null, -8,
someIndex()))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_empty_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
emptyArray(),
-8,
someIndex()))
.withMessage(actualIsEmpty());
}
@Test
void should_throw_error_if_Index_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
actual, -8,
null))
.withMessage("Index should not be null");
}
@Test
void should_throw_error_if_Index_is_out_of_bounds_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(IndexOutOfBoundsException.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
actual,
-8,
atIndex(6)))
.withMessageContaining("Index should be between <0> and <2> (inclusive) but was:%n <6>".formatted());
}
@Test
void should_fail_if_actual_does_not_contain_value_at_index_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Index index = atIndex(1);
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContains(info, actual, 6, index));
assertThat(error).isInstanceOf(AssertionError.class);
byte found = 8;
verify(failures).failure(info, shouldContainAtIndex(actual, (byte) 6, index, found, absValueComparisonStrategy));
}
@Test
void should_pass_if_actual_contains_value_at_index_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, -8, atIndex(1));
}
}
| ByteArrays_assertContains_at_Index_with_Integer_Argument_Test |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-common/deployment/src/main/java/io/quarkus/resteasy/reactive/common/deployment/JaxRsSecurityConfig.java | {
"start": 344,
"end": 989
} | interface ____ {
/**
* if set to true, access to all JAX-RS resources will be denied by default
*/
@WithName("deny-unannotated-endpoints")
@WithDefault("false")
boolean denyJaxRs();
/**
* If no security annotations are affecting a method then they will default to requiring these roles,
* (equivalent to adding an @RolesAllowed annotation with the roles to every endpoint class).
*
* The role of '**' means any authenticated user, which is equivalent to the {@code io.quarkus.security.Authenticated}
* annotation.
*/
Optional<List<String>> defaultRolesAllowed();
}
| JaxRsSecurityConfig |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/ResteasyReactiveDeploymentManager.java | {
"start": 4478,
"end": 5173
} | class ____ {
final IndexView index;
int inputBufferSize = 10000;
int minChunkSize = 128;
int outputBufferSize = 8192;
/**
* By default, we assume a default produced media type of "text/plain"
* for String endpoint return types. If this is disabled, the default
* produced media type will be "[text/plain, */*]" which is more
* expensive due to negotiation.
*/
private boolean singleDefaultProduces;
/**
* When one of the quarkus-resteasy-reactive-jackson or quarkus-resteasy-reactive-jsonb extension are active
* and the result type of an endpoint is an application | ScanStep |
java | apache__camel | core/camel-util/src/main/java/org/apache/camel/util/ObjectHelper.java | {
"start": 15682,
"end": 16017
} | class ____
clazz = doLoadClass(name, Thread.currentThread().getContextClassLoader());
}
if (clazz == null) {
// then the provided loader
clazz = doLoadClass(name, loader);
}
if (clazz == null) {
// and fallback to the loader the loaded the ObjectHelper | loader |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/SelectWithFkInPackageTest.java | {
"start": 2125,
"end": 2302
} | class ____ {
@Id
@GeneratedValue
public Long id;
@Column
public String title;
public Book() {
}
public Book(String title) {
this.title = title;
}
}
}
| Book |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/datasource/jndi/JndiDataSourceFactoryTest.java | {
"start": 3185,
"end": 3583
} | class ____ extends InitialContext {
private static final Map<String, Object> bindings = new HashMap<>();
MockContext(boolean lazy) throws NamingException {
super(lazy);
}
@Override
public Object lookup(String name) {
return bindings.get(name);
}
@Override
public void bind(String name, Object obj) {
bindings.put(name, obj);
}
}
}
| MockContext |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/parallel/ResourceLocks.java | {
"start": 1232,
"end": 1379
} | interface ____ {
/**
* An array of one or more {@linkplain ResourceLock @ResourceLock} declarations.
*/
ResourceLock[] value();
}
| ResourceLocks |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ASTHelpersSuggestionsTest.java | {
"start": 1203,
"end": 1641
} | class ____ {
void f(Symbol s) {
s.isStatic();
s.packge();
}
}
""")
.addOutputLines(
"Test.java",
"""
import static com.google.errorprone.util.ASTHelpers.enclosingPackage;
import static com.google.errorprone.util.ASTHelpers.isStatic;
import com.sun.tools.javac.code.Symbol;
| Test |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/RequestManager.java | {
"start": 1871,
"end": 2464
} | class ____ managing and starting requests for Glide. Can use activity, fragment and connectivity
* lifecycle events to intelligently stop, start, and restart requests. Retrieve either by
* instantiating a new object, or to take advantage built in Activity and Fragment lifecycle
* handling, use the static Glide.load methods with your Fragment or Activity.
*
* @see Glide#with(android.app.Activity)
* @see Glide#with(androidx.fragment.app.FragmentActivity)
* @see Glide#with(android.app.Fragment)
* @see Glide#with(androidx.fragment.app.Fragment)
* @see Glide#with(Context)
*/
public | for |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/disposer/DisposerTest.java | {
"start": 3748,
"end": 4273
} | class ____ {
static final AtomicInteger DESTROYED = new AtomicInteger();
static final AtomicReference<Object> DISPOSED = new AtomicReference<>();
@ApplicationScoped
@Produces
Comparable<BigDecimal> produce() {
return BigDecimal.ONE;
}
void dipose(@Disposes Comparable<BigDecimal> value) {
DISPOSED.set(value);
}
@PreDestroy
void destroy() {
DESTROYED.incrementAndGet();
}
}
}
| BigDecimalProducer |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassReader.java | {
"start": 168099,
"end": 170536
} | class ____ or
* adapters.</i>
*
* @param offset the start offset of an unsigned short value in this {@link ClassReader}, whose
* value is the index of a CONSTANT_Package entry in class's constant pool table.
* @param charBuffer the buffer to be used to read the item. This buffer must be sufficiently
* large. It is not automatically resized.
* @return the String corresponding to the specified CONSTANT_Package entry.
*/
public String readPackage(final int offset, final char[] charBuffer) {
return readStringish(offset, charBuffer);
}
/**
* Reads a CONSTANT_Dynamic constant pool entry in {@link #classFileBuffer}.
*
* @param constantPoolEntryIndex the index of a CONSTANT_Dynamic entry in the class's constant
* pool table.
* @param charBuffer the buffer to be used to read the string. This buffer must be sufficiently
* large. It is not automatically resized.
* @return the ConstantDynamic corresponding to the specified CONSTANT_Dynamic entry.
*/
private ConstantDynamic readConstantDynamic(
final int constantPoolEntryIndex, final char[] charBuffer) {
ConstantDynamic constantDynamic = constantDynamicValues[constantPoolEntryIndex];
if (constantDynamic != null) {
return constantDynamic;
}
int cpInfoOffset = cpInfoOffsets[constantPoolEntryIndex];
int nameAndTypeCpInfoOffset = cpInfoOffsets[readUnsignedShort(cpInfoOffset + 2)];
String name = readUTF8(nameAndTypeCpInfoOffset, charBuffer);
String descriptor = readUTF8(nameAndTypeCpInfoOffset + 2, charBuffer);
int bootstrapMethodOffset = bootstrapMethodOffsets[readUnsignedShort(cpInfoOffset)];
Handle handle = (Handle) readConst(readUnsignedShort(bootstrapMethodOffset), charBuffer);
Object[] bootstrapMethodArguments = new Object[readUnsignedShort(bootstrapMethodOffset + 2)];
bootstrapMethodOffset += 4;
for (int i = 0; i < bootstrapMethodArguments.length; i++) {
bootstrapMethodArguments[i] = readConst(readUnsignedShort(bootstrapMethodOffset), charBuffer);
bootstrapMethodOffset += 2;
}
return constantDynamicValues[constantPoolEntryIndex] =
new ConstantDynamic(name, descriptor, handle, bootstrapMethodArguments);
}
/**
* Reads a numeric or string constant pool entry in this {@link ClassReader}. <i>This method is
* intended for {@link Attribute} sub classes, and is normally not needed by | generators |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/StatFilterExecuteFirstResultSetTest.java | {
"start": 1089,
"end": 2497
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setFilters("stat");
dataSource.setTestOnBorrow(false);
MockDriver driver = new MockDriver() {
public MockPreparedStatement createMockPreparedStatement(MockConnection conn, String sql) {
return new MyMockPreparedStatement(conn, sql);
}
};
dataSource.setDriver(driver);
dataSource.init();
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void test_stat() throws Exception {
assertTrue(dataSource.isInited());
final String sql = "select 1";
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement(sql);
boolean firstResult = stmt.execute();
assertTrue(firstResult);
ResultSet rs = stmt.getResultSet();
rs.next();
rs.close();
stmt.close();
conn.close();
JdbcSqlStat sqlStat = dataSource.getDataSourceStat().getSqlStat(sql);
assertEquals(1, sqlStat.getHistogramSum());
assertEquals(1, sqlStat.getExecuteAndResultHoldTimeHistogramSum());
}
static | StatFilterExecuteFirstResultSetTest |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/model/mcp/registry/PackageTest.java | {
"start": 1067,
"end": 4838
} | class ____ extends BasicRequestTest {
@Test
void testSerialize() throws JsonProcessingException {
Package pkg = new Package();
pkg.setRegistryType("maven");
pkg.setRegistryBaseUrl("https://repo.maven.apache.org/maven2/");
pkg.setIdentifier("com.alibaba.nacos:test-package");
pkg.setVersion("1.0.0");
pkg.setFileSha256("abc123");
pkg.setRuntimeHint("java11");
// Create test arguments
NamedArgument namedArgument = new NamedArgument();
namedArgument.setName("arg1");
namedArgument.setValue("value1");
PositionalArgument positionalArgument = new PositionalArgument();
positionalArgument.setValueHint("posValue");
pkg.setRuntimeArguments(Collections.singletonList(namedArgument));
pkg.setPackageArguments(Arrays.asList(namedArgument, positionalArgument));
KeyValueInput envVar = new KeyValueInput();
envVar.setName("ENV_VAR");
envVar.setValue("env_value");
pkg.setEnvironmentVariables(Collections.singletonList(envVar));
String json = mapper.writeValueAsString(pkg);
assertNotNull(json);
assertTrue(json.contains("\"registryType\":\"maven\""));
assertTrue(json.contains("\"registryBaseUrl\":\"https://repo.maven.apache.org/maven2/\""));
assertTrue(json.contains("\"identifier\":\"com.alibaba.nacos:test-package\""));
assertTrue(json.contains("\"version\":\"1.0.0\""));
assertTrue(json.contains("\"fileSha256\":\"abc123\""));
assertTrue(json.contains("\"runtimeHint\":\"java11\""));
assertTrue(json.contains("\"runtimeArguments\":["));
assertTrue(json.contains("\"packageArguments\":["));
assertTrue(json.contains("\"environmentVariables\":["));
}
@Test
void testDeserialize() throws JsonProcessingException {
String json = "{"
+ "\"registryType\":\"maven\","
+ "\"registryBaseUrl\":\"https://repo.maven.apache.org/maven2/\","
+ "\"identifier\":\"com.alibaba.nacos:test-package\","
+ "\"version\":\"1.0.0\","
+ "\"fileSha256\":\"abc123\","
+ "\"runtimeHint\":\"java11\","
+ "\"runtimeArguments\":[{\"type\":\"named\",\"name\":\"arg1\",\"value\":\"value1\"}],"
+ "\"packageArguments\":["
+ " {\"type\":\"named\",\"name\":\"arg1\",\"value\":\"value1\"},"
+ " {\"type\":\"positional\",\"valueHint\":\"posValue\"}"
+ "],"
+ "\"environmentVariables\":[{\"name\":\"ENV_VAR\",\"value\":\"env_value\"}]"
+ "}";
Package pkg = mapper.readValue(json, Package.class);
assertNotNull(pkg);
assertEquals("maven", pkg.getRegistryType());
assertEquals("https://repo.maven.apache.org/maven2/", pkg.getRegistryBaseUrl());
assertEquals("com.alibaba.nacos:test-package", pkg.getIdentifier());
assertEquals("1.0.0", pkg.getVersion());
assertEquals("abc123", pkg.getFileSha256());
assertEquals("java11", pkg.getRuntimeHint());
assertEquals(1, pkg.getRuntimeArguments().size());
assertEquals("named", ((NamedArgument) pkg.getRuntimeArguments().get(0)).getType());
assertEquals(2, pkg.getPackageArguments().size());
assertEquals("named", ((NamedArgument) pkg.getPackageArguments().get(0)).getType());
assertEquals("positional", ((PositionalArgument) pkg.getPackageArguments().get(1)).getType());
assertEquals(1, pkg.getEnvironmentVariables().size());
assertEquals("ENV_VAR", pkg.getEnvironmentVariables().get(0).getName());
}
} | PackageTest |
java | spring-projects__spring-boot | module/spring-boot-data-redis/src/main/java/org/springframework/boot/data/redis/autoconfigure/DataRedisConnectionConfiguration.java | {
"start": 8631,
"end": 8699
} | enum ____ {
STANDALONE, CLUSTER, MASTER_REPLICA, SENTINEL
}
}
| Mode |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java | {
"start": 23735,
"end": 24035
} | class ____<T1, T2> implements MapFunction<T1, TaggedUnion<T1, T2>> {
private static final long serialVersionUID = 1L;
@Override
public TaggedUnion<T1, T2> map(T1 value) throws Exception {
return TaggedUnion.one(value);
}
}
private static | Input1Tagger |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/support/ReflectionUtils.java | {
"start": 1387,
"end": 1595
} | class ____ the supplied name and no parameters. Searches all
* superclasses up to {@code Object}.
* <p>
* Returns {@code null} if no {@link Method} can be found.
*
* @param clazz the | with |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/ContentDispositionTests.java | {
"start": 1094,
"end": 11184
} | class ____ {
@Test
void parseFilenameQuoted() {
assertThat(parse("form-data; name=\"foo\"; filename=\"foo.txt\""))
.isEqualTo(ContentDisposition.formData()
.name("foo")
.filename("foo.txt")
.build());
}
@Test
void parseFilenameUnquoted() {
assertThat(parse("form-data; filename=unquoted"))
.isEqualTo(ContentDisposition.formData()
.filename("unquoted")
.build());
}
@Test // SPR-16091
void parseFilenameWithSemicolon() {
assertThat(parse("attachment; filename=\"filename with ; semicolon.txt\""))
.isEqualTo(ContentDisposition.attachment()
.filename("filename with ; semicolon.txt")
.build());
}
@Test
void parseEncodedFilename() {
assertThat(parse("form-data; name=\"name\"; filename*=UTF-8''%E4%B8%AD%E6%96%87.txt"))
.isEqualTo(ContentDisposition.formData()
.name("name")
.filename("中文.txt", StandardCharsets.UTF_8)
.build());
}
@Test // gh-24112
void parseEncodedFilenameWithPaddedCharset() {
assertThat(parse("attachment; filename*= UTF-8''some-file.zip"))
.isEqualTo(ContentDisposition.attachment()
.filename("some-file.zip", StandardCharsets.UTF_8)
.build());
}
@Test // gh-26463
void parseBase64EncodedFilename() {
String input = "attachment; filename=\"=?UTF-8?B?5pel5pys6KqeLmNzdg==?=\"";
assertThat(parse(input).getFilename()).isEqualTo("日本語.csv");
}
@Test
void parseBase64EncodedFilenameMultipleSegments() {
String input =
"attachment; filename=\"=?utf-8?B?U3ByaW5n5qGG5p625Li65Z+65LqOSmF2YeeahOeOsOS7o+S8geS4muW6lA==?= " +
"=?utf-8?B?55So56iL5bqP5o+Q5L6b5LqG5YWo6Z2i55qE57yW56iL5ZKM6YWN572u5qih?= " +
"=?utf-8?B?5Z6LLnR4dA==?=\"";
assertThat(parse(input).getFilename()).isEqualTo("Spring框架为基于Java的现代企业应用程序提供了全面的编程和配置模型.txt");
}
@Test // gh-26463
void parseBase64EncodedShiftJISFilename() {
String input = "attachment; filename=\"=?SHIFT_JIS?B?k/qWe4zqLmNzdg==?=\"";
assertThat(parse(input).getFilename()).isEqualTo("日本語.csv");
}
@Test
void parseQuotedPrintableFilename() {
String input = "attachment; filename=\"=?UTF-8?Q?=E6=97=A5=E6=9C=AC=E8=AA=9E.csv?=\"";
assertThat(parse(input).getFilename()).isEqualTo("日本語.csv");
}
@Test
void parseQuotedPrintableFilenameMultipleSegments() {
String input =
"attachment; filename=\"=?utf-8?Q?Spring=E6=A1=86=E6=9E=B6=E4=B8=BA=E5=9F=BA=E4=BA=8E?=" +
"=?utf-8?Q?Java=E7=9A=84=E7=8E=B0=E4=BB=A3=E4=BC=81=E4=B8=9A=E5=BA=94?=" +
"=?utf-8?Q?=E7=94=A8=E7=A8=8B=E5=BA=8F=E6=8F=90=E4=BE=9B=E4=BA=86=E5=85=A8?=" +
"=?utf-8?Q?=E9=9D=A2=E7=9A=84=E7=BC=96=E7=A8=8B=E5=92=8C=E9=85=8D=E7=BD=AE?=" +
"=?utf-8?Q?=E6=A8=A1=E5=9E=8B.txt?=\"";
assertThat(parse(input).getFilename()).isEqualTo("Spring框架为基于Java的现代企业应用程序提供了全面的编程和配置模型.txt");
}
@Test
void parseQuotedPrintableShiftJISFilename() {
String input = "attachment; filename=\"=?SHIFT_JIS?Q?=93=FA=96{=8C=EA.csv?=\"";
assertThat(parse(input).getFilename()).isEqualTo("日本語.csv");
}
@Test
void parseEncodedFilenameWithoutCharset() {
assertThat(parse("form-data; name=\"name\"; filename*=test.txt"))
.isEqualTo(ContentDisposition.formData()
.name("name")
.filename("test.txt")
.build());
}
@Test
void parseEncodedFilenameWithInvalidCharset() {
assertThatIllegalArgumentException()
.isThrownBy(() -> parse("form-data; name=\"name\"; filename*=UTF-16''test.txt"));
}
@Test
void parseEncodedFilenameWithInvalidName() {
assertThatIllegalArgumentException()
.isThrownBy(() -> parse("form-data; name=\"name\"; filename*=UTF-8''%A"));
assertThatIllegalArgumentException()
.isThrownBy(() -> parse("form-data; name=\"name\"; filename*=UTF-8''%A.txt"));
}
@Test
void parseBackslash() {
String s = "form-data; name=\"foo\"; filename=\"foo\\\\bar \\\"baz\\\" qux \\\\\\\" quux.txt\"";
ContentDisposition cd = ContentDisposition.parse(
s);
assertThat(cd.getName()).isEqualTo("foo");
assertThat(cd.getFilename()).isEqualTo("foo\\bar \"baz\" qux \\\" quux.txt");
assertThat(cd.toString()).isEqualTo(s);
}
@Test
void parseBackslashInLastPosition() {
ContentDisposition cd = ContentDisposition.parse("form-data; name=\"foo\"; filename=\"bar\\\"");
assertThat(cd.getName()).isEqualTo("foo");
assertThat(cd.getFilename()).isEqualTo("bar\\");
assertThat(cd.toString()).isEqualTo("form-data; name=\"foo\"; filename=\"bar\\\\\"");
}
@Test
void parseWindowsPath() {
ContentDisposition cd = ContentDisposition.parse("form-data; name=\"foo\"; filename=\"D:\\foo\\bar.txt\"");
assertThat(cd.getName()).isEqualTo("foo");
assertThat(cd.getFilename()).isEqualTo("D:\\foo\\bar.txt");
assertThat(cd.toString()).isEqualTo("form-data; name=\"foo\"; filename=\"D:\\\\foo\\\\bar.txt\"");
}
@Test
void parseWithExtraSemicolons() {
assertThat(parse("form-data; name=\"foo\";; ; filename=\"foo.txt\";"))
.isEqualTo(ContentDisposition.formData()
.name("foo")
.filename("foo.txt")
.build());
}
@Test
void parseAttributesCaseInsensitively() {
ContentDisposition cd = ContentDisposition.parse("form-data; Name=\"foo\"; FileName=\"bar.txt\"");
assertThat(cd.getName()).isEqualTo("foo");
assertThat(cd.getFilename()).isEqualTo("bar.txt");
assertThat(cd.toString()).isEqualTo("form-data; name=\"foo\"; filename=\"bar.txt\"");
}
@Test
void parseEmpty() {
assertThatIllegalArgumentException().isThrownBy(() -> parse(""));
}
@Test
void parseNoType() {
assertThatIllegalArgumentException().isThrownBy(() -> parse(";"));
}
@Test
void parseInvalidParameter() {
assertThatIllegalArgumentException().isThrownBy(() -> parse("foo;bar"));
}
@Test
void format() {
assertThat(
ContentDisposition.formData()
.name("foo")
.filename("foo.txt")
.build().toString())
.isEqualTo("form-data; name=\"foo\"; filename=\"foo.txt\"");
}
@Test
void formatWithEncodedFilename() {
assertThat(
ContentDisposition.formData()
.name("name")
.filename("中文.txt", StandardCharsets.UTF_8)
.build().toString())
.isEqualTo("form-data; name=\"name\"; " +
"filename=\"=?UTF-8?Q?=E4=B8=AD=E6=96=87.txt?=\"; " +
"filename*=UTF-8''%E4%B8%AD%E6%96%87.txt");
}
@Test
void formatWithEncodedFilenameUsingUsAscii() {
assertThat(
ContentDisposition.formData()
.name("name")
.filename("test.txt", StandardCharsets.US_ASCII)
.build()
.toString())
.isEqualTo("form-data; name=\"name\"; filename=\"test.txt\"");
}
@Test // gh-24220
void formatWithFilenameWithQuotes() {
BiConsumer<String, String> tester = (input, output) -> {
assertThat(ContentDisposition.formData().filename(input).build().toString())
.isEqualTo("form-data; filename=\"" + output + "\"");
assertThat(ContentDisposition.formData().filename(input, StandardCharsets.US_ASCII).build().toString())
.isEqualTo("form-data; filename=\"" + output + "\"");
};
String filename = "\"foo.txt";
tester.accept(filename, "\\\"foo.txt");
filename = "\\\"foo.txt";
tester.accept(filename, "\\\\\\\"foo.txt");
filename = "\\\\\"foo.txt";
tester.accept(filename, "\\\\\\\\\\\"foo.txt");
filename = "\\\\\\\"foo.txt";
tester.accept(filename, "\\\\\\\\\\\\\\\"foo.txt");
filename = "\\\\\\\\\"foo.txt";
tester.accept(filename, "\\\\\\\\\\\\\\\\\\\"foo.txt");
tester.accept("\"\"foo.txt", "\\\"\\\"foo.txt");
tester.accept("\"\"\"foo.txt", "\\\"\\\"\\\"foo.txt");
tester.accept("foo.txt\\", "foo.txt\\\\");
tester.accept("foo.txt\\\\", "foo.txt\\\\\\\\");
tester.accept("foo.txt\\\\\\", "foo.txt\\\\\\\\\\\\");
}
@Test
void formatWithUtf8FilenameWithQuotes() {
String filename = "\"中文.txt";
assertThat(ContentDisposition.formData().filename(filename, StandardCharsets.UTF_8).build().toString())
.isEqualTo("form-data; filename=\"=?UTF-8?Q?=22=E4=B8=AD=E6=96=87.txt?=\"; filename*=UTF-8''%22%E4%B8%AD%E6%96%87.txt");
}
@Test
void formatWithEncodedFilenameUsingInvalidCharset() {
assertThatIllegalArgumentException().isThrownBy(() ->
ContentDisposition.formData()
.name("name")
.filename("test.txt", StandardCharsets.UTF_16)
.build()
.toString());
}
@Test
void parseFormatted() {
ContentDisposition cd = ContentDisposition.builder("form-data")
.name("foo")
.filename("foo\\bar \"baz\" qux \\\" quux.txt").build();
ContentDisposition parsed = ContentDisposition.parse(cd.toString());
assertThat(parsed).isEqualTo(cd);
assertThat(parsed.toString()).isEqualTo(cd.toString());
}
@Test // gh-30252
void parseFormattedWithQuestionMark() {
String filename = "filename with ?问号.txt";
ContentDisposition cd = ContentDisposition.attachment()
.filename(filename, StandardCharsets.UTF_8)
.build();
String result = cd.toString();
assertThat(result).isEqualTo("attachment; " +
"filename=\"=?UTF-8?Q?filename_with_=3F=E9=97=AE=E5=8F=B7.txt?=\"; " +
"filename*=UTF-8''filename%20with%20%3F%E9%97%AE%E5%8F%B7.txt");
String[] parts = result.split("; ");
String quotedPrintableFilename = parts[0] + "; " + parts[1];
assertThat(ContentDisposition.parse(quotedPrintableFilename).getFilename())
.isEqualTo(filename);
String rfc5987Filename = parts[0] + "; " + parts[2];
assertThat(ContentDisposition.parse(rfc5987Filename).getFilename())
.isEqualTo(filename);
}
@Test
void attachmentType(){
ContentDisposition attachment = ContentDisposition.attachment().build();
assertThat(attachment.isAttachment()).isTrue();
assertThat(attachment.isFormData()).isFalse();
assertThat(attachment.isInline()).isFalse();
}
@Test
void formDataType(){
ContentDisposition formData = ContentDisposition.formData().build();
assertThat(formData.isAttachment()).isFalse();
assertThat(formData.isFormData()).isTrue();
assertThat(formData.isInline()).isFalse();
}
@Test
void inlineType(){
ContentDisposition inline = ContentDisposition.inline().build();
assertThat(inline.isAttachment()).isFalse();
assertThat(inline.isFormData()).isFalse();
assertThat(inline.isInline()).isTrue();
}
}
| ContentDispositionTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java | {
"start": 4854,
"end": 10614
} | class ____ extends
AbstractINodeDiff<INodeDirectory, INodeDirectoryAttributes, DirectoryDiff> {
/** The size of the children list at snapshot creation time. */
private final int childrenSize;
/** The children list diff. */
private final ChildrenDiff diff;
private boolean isSnapshotRoot = false;
private DirectoryDiff(int snapshotId, INodeDirectory dir) {
this(snapshotId, dir, new ChildrenDiff());
}
public DirectoryDiff(int snapshotId, INodeDirectory dir,
ChildrenDiff diff) {
super(snapshotId, null, null);
this.childrenSize = dir.getChildrenList(Snapshot.CURRENT_STATE_ID).size();
this.diff = diff;
}
/** Constructor used by FSImage loading */
DirectoryDiff(int snapshotId, INodeDirectoryAttributes snapshotINode,
DirectoryDiff posteriorDiff, int childrenSize, List<INode> createdList,
List<INode> deletedList, boolean isSnapshotRoot) {
super(snapshotId, snapshotINode, posteriorDiff);
this.childrenSize = childrenSize;
this.diff = new ChildrenDiff(createdList, deletedList);
this.isSnapshotRoot = isSnapshotRoot;
}
public ChildrenDiff getChildrenDiff() {
return diff;
}
void setSnapshotRoot(INodeDirectoryAttributes root) {
this.snapshotINode = root;
this.isSnapshotRoot = true;
}
public boolean isSnapshotRoot() {
return isSnapshotRoot;
}
@Override
void combinePosteriorAndCollectBlocks(
final INode.ReclaimContext reclaimContext,
final INodeDirectory currentDir,
final DirectoryDiff posterior) {
// DeletionOrdered: must not combine posterior
assert !SnapshotManager.isDeletionOrdered();
diff.combinePosterior(posterior.diff, new Diff.Processor<INode>() {
/** Collect blocks for deleted files. */
@Override
public void process(INode inode) {
if (inode != null) {
inode.destroyAndCollectBlocks(reclaimContext);
}
}
});
}
/**
* @return The children list of a directory in a snapshot.
* Since the snapshot is read-only, the logical view of the list is
* never changed although the internal data structure may mutate.
*/
private ReadOnlyList<INode> getChildrenList(final INodeDirectory currentDir) {
return new ReadOnlyList<INode>() {
private List<INode> children = null;
private List<INode> initChildren() {
if (children == null) {
final ChildrenDiff combined = new ChildrenDiff();
DirectoryDiffList directoryDiffList =
currentDir.getDirectoryWithSnapshotFeature().diffs;
final int diffIndex =
directoryDiffList.getDiffIndexById(getSnapshotId());
List<DirectoryDiff> diffList = directoryDiffList
.getDiffListBetweenSnapshots(diffIndex,
directoryDiffList.asList().size(), currentDir);
for (DirectoryDiff d : diffList) {
combined.combinePosterior(d.diff, null);
}
children = combined.apply2Current(ReadOnlyList.Util
.asList(currentDir.getChildrenList(Snapshot.CURRENT_STATE_ID)));
}
return children;
}
@Override
public Iterator<INode> iterator() {
return initChildren().iterator();
}
@Override
public boolean isEmpty() {
return childrenSize == 0;
}
@Override
public int size() {
return childrenSize;
}
@Override
public INode get(int i) {
return initChildren().get(i);
}
};
}
/** @return the child with the given name. */
INode getChild(byte[] name, boolean checkPosterior,
INodeDirectory currentDir) {
for(DirectoryDiff d = this; ; d = d.getPosterior()) {
final Container<INode> returned = d.diff.accessPrevious(name);
if (returned != null) {
// the diff is able to determine the inode
return returned.getElement();
} else if (!checkPosterior) {
// Since checkPosterior is false, return null, i.e. not found.
return null;
} else if (d.getPosterior() == null) {
// no more posterior diff, get from current inode.
return currentDir.getChild(name, Snapshot.CURRENT_STATE_ID);
}
}
}
@Override
public String toString() {
return super.toString() + " childrenSize=" + childrenSize + ", " + diff;
}
int getChildrenSize() {
return childrenSize;
}
@Override
void write(DataOutput out, ReferenceMap referenceMap) throws IOException {
writeSnapshot(out);
out.writeInt(childrenSize);
// Write snapshotINode
out.writeBoolean(isSnapshotRoot);
if (!isSnapshotRoot) {
if (snapshotINode != null) {
out.writeBoolean(true);
FSImageSerialization.writeINodeDirectoryAttributes(snapshotINode, out);
} else {
out.writeBoolean(false);
}
}
// Write diff. Node need to write poseriorDiff, since diffs is a list.
diff.write(out, referenceMap);
}
@Override
void destroyDiffAndCollectBlocks(
INode.ReclaimContext reclaimContext, INodeDirectory currentINode) {
// this diff has been deleted
diff.destroyDeletedList(reclaimContext);
INodeDirectoryAttributes snapshotINode = getSnapshotINode();
if (snapshotINode != null && snapshotINode.getAclFeature() != null) {
AclStorage.removeAclFeature(snapshotINode.getAclFeature());
}
}
}
/** A list of directory diffs. */
public static | DirectoryDiff |
java | grpc__grpc-java | okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/framed/Variant.java | {
"start": 841,
"end": 1266
} | interface ____ {
/** The protocol as selected using ALPN. */
Protocol getProtocol();
/**
* @param client true if this is the HTTP client's reader, reading frames from a server.
*/
FrameReader newReader(BufferedSource source, boolean client);
/**
* @param client true if this is the HTTP client's writer, writing frames to a server.
*/
FrameWriter newWriter(BufferedSink sink, boolean client);
}
| Variant |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/ClassicUuidGenerator.java | {
"start": 1256,
"end": 5105
} | class ____ implements UuidGenerator {
private static final Logger LOG = LoggerFactory.getLogger(ClassicUuidGenerator.class);
private static final Lock LOCK = new ReentrantLock();
private static final String UNIQUE_STUB;
private static int instanceCount;
private static String hostName;
private String seed;
// must use AtomicLong to ensure atomic get and update operation that is thread-safe
private final AtomicLong sequence = new AtomicLong(1);
private final int length;
static {
String stub = "";
boolean canAccessSystemProps = true;
try {
System.getProperty("java.version");
} catch (SecurityException se) {
canAccessSystemProps = false;
}
if (canAccessSystemProps) {
try {
if (hostName == null) {
hostName = InetAddressUtil.getLocalHostName();
}
stub = "-" + System.currentTimeMillis() + "-";
} catch (Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace("Cannot generate unique stub by using DNS", e);
} else {
LOG.warn("Cannot generate unique stub by using DNS due {}. This exception is ignored.", e.getMessage());
}
}
}
// fallback to use localhost
if (hostName == null) {
hostName = "localhost";
}
hostName = sanitizeHostName(hostName);
if (ObjectHelper.isEmpty(stub)) {
stub = "-1-" + System.currentTimeMillis() + "-";
}
UNIQUE_STUB = stub;
}
public ClassicUuidGenerator(String prefix) {
LOCK.lock();
try {
this.seed = prefix + UNIQUE_STUB + (instanceCount++) + "-";
// let the ID be friendly for URL and file systems
this.seed = generateSanitizedId(this.seed);
this.length = seed.length() + (Long.toString(Long.MAX_VALUE)).length();
} finally {
LOCK.unlock();
}
}
public ClassicUuidGenerator() {
this("ID-" + hostName);
}
/**
* As we have to find the hostname as a side-affect of generating a unique stub, we allow it's easy retrieval here
*
* @return the local host name
*/
public static String getHostName() {
return hostName;
}
public static String sanitizeHostName(String hostName) {
boolean changed = false;
StringBuilder sb = new StringBuilder();
for (char ch : hostName.toCharArray()) {
// only include ASCII chars
if (ch < 127) {
sb.append(ch);
} else {
changed = true;
}
}
if (changed) {
String newHost = sb.toString();
LOG.info("Sanitized hostname from: {} to: {}", hostName, newHost);
return newHost;
} else {
return hostName;
}
}
@Override
public String generateUuid() {
StringBuilder sb = new StringBuilder(length);
sb.append(seed);
sb.append(sequence.getAndIncrement());
return sb.toString();
}
/**
* Generate a unique ID - that is friendly for a URL or file system
*
* @return a unique id
*/
public String generateSanitizedId() {
return generateSanitizedId(generateUuid());
}
/**
* Ensures that the id is friendly for a URL or file system
*
* @param id the unique id
* @return the id as file friendly id
*/
public static String generateSanitizedId(String id) {
id = id.replace(':', '-');
id = id.replace('_', '-');
id = id.replace('.', '-');
id = id.replace('/', '-');
return id;
}
}
| ClassicUuidGenerator |
java | spring-projects__spring-security | buildSrc/src/main/java/org/springframework/gradle/classpath/CheckClasspathForProhibitedDependencies.java | {
"start": 1266,
"end": 2979
} | class ____ extends DefaultTask {
private Configuration classpath;
public CheckClasspathForProhibitedDependencies() {
getOutputs().upToDateWhen((task) -> true);
}
public void setClasspath(Configuration classpath) {
this.classpath = classpath;
}
@Classpath
public FileCollection getClasspath() {
return this.classpath;
}
@TaskAction
public void checkForProhibitedDependencies() throws IOException {
ResolvedConfiguration resolvedConfiguration = this.classpath.getResolvedConfiguration();
TreeSet<String> prohibited = resolvedConfiguration.getResolvedArtifacts().stream()
.map((artifact) -> artifact.getModuleVersion().getId()).filter(this::prohibited)
.map((id) -> id.getGroup() + ":" + id.getName()).collect(Collectors.toCollection(TreeSet::new));
if (!prohibited.isEmpty()) {
StringBuilder message = new StringBuilder(String.format("Found prohibited dependencies in '%s':%n", this.classpath.getName()));
for (String dependency : prohibited) {
message.append(String.format(" %s%n", dependency));
}
throw new GradleException(message.toString());
}
}
private boolean prohibited(ModuleVersionIdentifier id) {
String group = id.getGroup();
if (group.equals("javax.batch")) {
return false;
}
if (group.equals("javax.cache")) {
return false;
}
if (group.equals("javax.money")) {
return false;
}
if (group.startsWith("javax")) {
return true;
}
if (group.equals("org.slf4j") && id.getName().equals("jcl-over-slf4j")) {
return true;
}
if (group.startsWith("org.jboss.spec")) {
return true;
}
if (group.equals("org.apache.geronimo.specs")) {
return true;
}
return false;
}
}
| CheckClasspathForProhibitedDependencies |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/simple/InnerClassWithPrivateConstructor.java | {
"start": 1136,
"end": 1339
} | class ____ {
private String name;
private Nested(String ignored) {
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
}
| Nested |
java | quarkusio__quarkus | extensions/panache/hibernate-reactive-rest-data-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/rest/data/panache/runtime/RestDataPanacheExceptionMapper.java | {
"start": 408,
"end": 2201
} | class ____ {
private static final Logger LOGGER = Logger.getLogger(RestDataPanacheExceptionMapper.class);
@ServerExceptionMapper({ RestDataPanacheException.class, CompositeException.class })
public RestResponse<Response> mapExceptions(Exception exception) {
LOGGER.warnf(exception, "Mapping an unhandled %s", exception.getClass().getSimpleName());
RestResponse<Response> response = throwableToResponse(exception, exception.getMessage());
if (response == null) {
response = RestResponse.status(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), exception.getMessage());
}
return response;
}
private RestResponse<Response> throwableToResponse(Throwable throwable, String message) {
if (throwable instanceof org.hibernate.exception.ConstraintViolationException
|| throwable instanceof HibernateException) {
return RestResponse.status(Response.Status.CONFLICT.getStatusCode(), message);
}
if (throwable instanceof jakarta.validation.ConstraintViolationException) {
return RestResponse.status(Response.Status.BAD_REQUEST.getStatusCode(), message);
}
if (throwable instanceof CompositeException) {
CompositeException compositeException = (CompositeException) throwable;
for (Throwable cause : compositeException.getCauses()) {
RestResponse<Response> response = throwableToResponse(cause, message);
if (response != null) {
return response;
}
}
} else if (throwable.getCause() != null) {
return throwableToResponse(throwable.getCause(), message);
}
return null;
};
}
| RestDataPanacheExceptionMapper |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/AppManagerTestBase.java | {
"start": 1711,
"end": 3348
} | class ____ extends RMAppManager {
private final RMStateStore stateStore;
public TestRMAppManager(RMContext context, Configuration conf) {
super(context, null, null, new ApplicationACLsManager(conf), conf);
this.stateStore = context.getStateStore();
}
public TestRMAppManager(RMContext context,
ClientToAMTokenSecretManagerInRM clientToAMSecretManager,
YarnScheduler scheduler, ApplicationMasterService masterService,
ApplicationACLsManager applicationACLsManager, Configuration conf) {
super(context, scheduler, masterService, applicationACLsManager, conf);
this.stateStore = context.getStateStore();
}
public void checkAppNumCompletedLimit() {
super.checkAppNumCompletedLimit();
}
public void finishApplication(ApplicationId appId) {
super.finishApplication(appId);
}
public int getCompletedAppsListSize() {
return super.getCompletedAppsListSize();
}
public int getNumberOfCompletedAppsInStateStore() {
return this.completedAppsInStateStore;
}
public void submitApplication(
ApplicationSubmissionContext submissionContext, String user)
throws YarnException {
super.submitApplication(submissionContext, System.currentTimeMillis(),
UserGroupInformation.createRemoteUser(user));
}
public String getUserNameForPlacement(final String user,
final ApplicationSubmissionContext context,
final PlacementManager placementManager) throws YarnException {
return super.getUserNameForPlacement(user, context, placementManager);
}
}
}
| TestRMAppManager |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/postprocessor/TestPojo.java | {
"start": 1006,
"end": 1330
} | class ____ {
@MagicAnnotation("Changed Value")
private String testValue = "Initial Value";
@Produce("mock:foo")
private ProducerTemplate producer;
public String getTestValue() {
return this.testValue;
}
public void sendToFoo(String msg) {
producer.sendBody(msg);
}
}
| TestPojo |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java | {
"start": 2921,
"end": 3130
} | class ____ extends ScalarFunction {
* public String eval(Object o) {
* return o.toString();
* }
* }
*
* // a function that accepts any data type as argument and computes a STRING
* | StringifyFunction |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/DefaultSlotPoolServiceSchedulerFactory.java | {
"start": 3778,
"end": 14362
} | class ____
implements SlotPoolServiceSchedulerFactory {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultSlotPoolServiceSchedulerFactory.class);
private final SlotPoolServiceFactory slotPoolServiceFactory;
private final SchedulerNGFactory schedulerNGFactory;
private DefaultSlotPoolServiceSchedulerFactory(
SlotPoolServiceFactory slotPoolServiceFactory, SchedulerNGFactory schedulerNGFactory) {
this.slotPoolServiceFactory = slotPoolServiceFactory;
this.schedulerNGFactory = schedulerNGFactory;
}
@VisibleForTesting
SchedulerNGFactory getSchedulerNGFactory() {
return schedulerNGFactory;
}
@Override
public SlotPoolService createSlotPoolService(
JobID jid,
DeclarativeSlotPoolFactory declarativeSlotPoolFactory,
@Nonnull ComponentMainThreadExecutor componentMainThreadExecutor) {
return slotPoolServiceFactory.createSlotPoolService(
jid, declarativeSlotPoolFactory, componentMainThreadExecutor);
}
@Override
public JobManagerOptions.SchedulerType getSchedulerType() {
return schedulerNGFactory.getSchedulerType();
}
@Override
public SchedulerNG createScheduler(
Logger log,
ExecutionPlan executionPlan,
Executor ioExecutor,
Configuration configuration,
SlotPoolService slotPoolService,
ScheduledExecutorService futureExecutor,
ClassLoader userCodeLoader,
CheckpointRecoveryFactory checkpointRecoveryFactory,
Duration rpcTimeout,
BlobWriter blobWriter,
JobManagerJobMetricGroup jobManagerJobMetricGroup,
Duration slotRequestTimeout,
ShuffleMaster<?> shuffleMaster,
JobMasterPartitionTracker partitionTracker,
ExecutionDeploymentTracker executionDeploymentTracker,
long initializationTimestamp,
ComponentMainThreadExecutor mainThreadExecutor,
FatalErrorHandler fatalErrorHandler,
JobStatusListener jobStatusListener,
Collection<FailureEnricher> failureEnrichers,
BlocklistOperations blocklistOperations)
throws Exception {
return schedulerNGFactory.createInstance(
log,
executionPlan,
ioExecutor,
configuration,
slotPoolService,
futureExecutor,
userCodeLoader,
checkpointRecoveryFactory,
rpcTimeout,
blobWriter,
jobManagerJobMetricGroup,
slotRequestTimeout,
shuffleMaster,
partitionTracker,
executionDeploymentTracker,
initializationTimestamp,
mainThreadExecutor,
fatalErrorHandler,
jobStatusListener,
failureEnrichers,
blocklistOperations);
}
public static DefaultSlotPoolServiceSchedulerFactory create(
SlotPoolServiceFactory slotPoolServiceFactory, SchedulerNGFactory schedulerNGFactory) {
return new DefaultSlotPoolServiceSchedulerFactory(
slotPoolServiceFactory, schedulerNGFactory);
}
public static DefaultSlotPoolServiceSchedulerFactory fromConfiguration(
Configuration configuration, JobType jobType, boolean isDynamicGraph) {
final Duration rpcTimeout = configuration.get(RpcOptions.ASK_TIMEOUT_DURATION);
final Duration slotIdleTimeout = configuration.get(JobManagerOptions.SLOT_IDLE_TIMEOUT);
final Duration batchSlotTimeout = configuration.get(JobManagerOptions.SLOT_REQUEST_TIMEOUT);
final SlotPoolServiceFactory slotPoolServiceFactory;
final SchedulerNGFactory schedulerNGFactory;
JobManagerOptions.SchedulerType schedulerType =
getSchedulerType(configuration, jobType, isDynamicGraph);
final Duration slotRequestMaxInterval = configuration.get(SLOT_REQUEST_MAX_INTERVAL);
final TaskManagerLoadBalanceMode mode = configuration.get(TASK_MANAGER_LOAD_BALANCE_MODE);
boolean deferSlotAllocation =
mode == TaskManagerLoadBalanceMode.TASKS && jobType == JobType.STREAMING;
if (configuration
.getOptional(JobManagerOptions.HYBRID_PARTITION_DATA_CONSUME_CONSTRAINT)
.isPresent()) {
Preconditions.checkState(
schedulerType == JobManagerOptions.SchedulerType.AdaptiveBatch,
"Only adaptive batch scheduler supports setting "
+ JobManagerOptions.HYBRID_PARTITION_DATA_CONSUME_CONSTRAINT.key());
}
switch (schedulerType) {
case Default:
schedulerNGFactory = new DefaultSchedulerFactory();
slotPoolServiceFactory =
new DeclarativeSlotPoolBridgeServiceFactory(
SystemClock.getInstance(),
rpcTimeout,
slotIdleTimeout,
batchSlotTimeout,
slotRequestMaxInterval,
deferSlotAllocation,
getRequestSlotMatchingStrategy(configuration, jobType));
break;
case Adaptive:
schedulerNGFactory = new AdaptiveSchedulerFactory();
slotPoolServiceFactory =
new DeclarativeSlotPoolServiceFactory(
SystemClock.getInstance(),
slotIdleTimeout,
rpcTimeout,
slotRequestMaxInterval);
break;
case AdaptiveBatch:
schedulerNGFactory = new AdaptiveBatchSchedulerFactory();
slotPoolServiceFactory =
new DeclarativeSlotPoolBridgeServiceFactory(
SystemClock.getInstance(),
rpcTimeout,
slotIdleTimeout,
batchSlotTimeout,
slotRequestMaxInterval,
deferSlotAllocation,
getRequestSlotMatchingStrategy(configuration, jobType));
break;
default:
throw new IllegalArgumentException(
String.format(
"Illegal value [%s] for config option [%s]",
schedulerType, JobManagerOptions.SCHEDULER.key()));
}
return new DefaultSlotPoolServiceSchedulerFactory(
slotPoolServiceFactory, schedulerNGFactory);
}
private static JobManagerOptions.SchedulerType getSchedulerType(
Configuration configuration, JobType jobType, boolean isDynamicGraph) {
JobManagerOptions.SchedulerType schedulerType;
if (jobType == JobType.BATCH) {
if (configuration.get(JobManagerOptions.SCHEDULER_MODE)
== SchedulerExecutionMode.REACTIVE
|| configuration.get(JobManagerOptions.SCHEDULER)
== JobManagerOptions.SchedulerType.Adaptive) {
LOG.info(
"Adaptive Scheduler configured, but Batch job detected. Changing scheduler type to 'AdaptiveBatch'.");
// overwrite
schedulerType = JobManagerOptions.SchedulerType.AdaptiveBatch;
} else {
schedulerType =
configuration
.getOptional(JobManagerOptions.SCHEDULER)
.orElse(
isDynamicGraph
? JobManagerOptions.SchedulerType.AdaptiveBatch
: JobManagerOptions.SchedulerType.Default);
}
} else {
if (configuration.get(JobManagerOptions.SCHEDULER_MODE)
== SchedulerExecutionMode.REACTIVE) {
schedulerType = JobManagerOptions.SchedulerType.Adaptive;
} else {
schedulerType =
configuration
.getOptional(JobManagerOptions.SCHEDULER)
.orElse(
System.getProperties()
.containsKey(
"flink.tests.enable-adaptive-scheduler")
? JobManagerOptions.SchedulerType.Adaptive
: JobManagerOptions.SchedulerType.Default);
}
}
return schedulerType;
}
public static RequestSlotMatchingStrategy getRequestSlotMatchingStrategy(
Configuration configuration, JobType jobType) {
final boolean isLocalRecoveryEnabled =
configuration.get(StateRecoveryOptions.LOCAL_RECOVERY);
final TaskManagerLoadBalanceMode mode = configuration.get(TASK_MANAGER_LOAD_BALANCE_MODE);
if (isLocalRecoveryEnabled) {
if (jobType == JobType.STREAMING) {
final RequestSlotMatchingStrategy rollback =
mode == TaskManagerLoadBalanceMode.TASKS
? TasksBalancedRequestSlotMatchingStrategy.INSTANCE
: SimpleRequestSlotMatchingStrategy.INSTANCE;
return PreferredAllocationRequestSlotMatchingStrategy.create(rollback);
} else {
LOG.warn(
"Batch jobs do not support local recovery. Falling back for request slot matching strategy to {}.",
SimpleRequestSlotMatchingStrategy.class.getSimpleName());
return SimpleRequestSlotMatchingStrategy.INSTANCE;
}
} else {
if (jobType == JobType.STREAMING && mode == TaskManagerLoadBalanceMode.TASKS) {
return TasksBalancedRequestSlotMatchingStrategy.INSTANCE;
}
return SimpleRequestSlotMatchingStrategy.INSTANCE;
}
}
}
| DefaultSlotPoolServiceSchedulerFactory |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/HttpSecurityImpl.java | {
"start": 1605,
"end": 12105
} | class ____ implements HttpSecurity {
private static final Logger LOG = Logger.getLogger(HttpSecurityImpl.class.getName());
private final List<HttpPermissionCarrier> httpPermissions;
private final List<HttpAuthenticationMechanism> mechanisms;
private final VertxHttpConfig vertxHttpConfig;
private RolesMapping rolesMapping;
private ClientAuth clientAuth;
private Optional<String> httpServerTlsConfigName;
private CORSConfig corsConfig;
private CSRF csrf;
HttpSecurityImpl(ClientAuth clientAuth, VertxHttpConfig vertxHttpConfig, Optional<String> httpServerTlsConfigName) {
this.rolesMapping = null;
this.httpPermissions = new ArrayList<>();
this.mechanisms = new ArrayList<>();
this.clientAuth = clientAuth;
this.vertxHttpConfig = vertxHttpConfig;
this.httpServerTlsConfigName = httpServerTlsConfigName;
this.corsConfig = vertxHttpConfig == null ? null : vertxHttpConfig.cors();
this.csrf = null;
}
@Override
public HttpSecurity cors(String origin) {
Objects.requireNonNull(origin);
return cors(Set.of(origin));
}
@Override
public HttpSecurity cors(Set<String> origins) {
return cors(CORS.origins(origins).build());
}
@Override
public HttpSecurity cors(CORS cors) {
if (cors == null) {
throw new IllegalArgumentException("CORS must not be null");
}
final boolean alreadyConfiguredInAppProps = corsConfig.accessControlAllowCredentials().isPresent()
|| corsConfig.accessControlMaxAge().isPresent()
|| corsConfig.headers().isPresent()
|| corsConfig.methods().isPresent()
|| corsConfig.exposedHeaders().isPresent();
if (alreadyConfiguredInAppProps) {
throw new IllegalStateException(
"CORS cannot be configured both programmatically and in the 'application.properties' file");
}
final CORSConfig newCorsConfig = (CORSConfig) cors;
if (!corsConfig.origins().orElse(List.of()).isEmpty()) {
// for example SmallRye OpenAPI extension adds a management URL to 'origins'
// and we want users know that they are loosing some configuration
final List<String> newOrigins = newCorsConfig.origins().orElse(List.of());
final String missingOrigins = corsConfig.origins().get().stream()
.filter(origin -> !newOrigins.contains(origin)).collect(Collectors.joining(","));
if (!missingOrigins.isEmpty()) {
LOG.warnf(
"CORS are configured programmatically, but previously configured '%s' origins are missing in the new configuration",
missingOrigins);
}
}
corsConfig = newCorsConfig;
return this;
}
@Override
public HttpSecurity csrf(CSRF csrf) {
if (csrf == null) {
throw new IllegalArgumentException("CSRF must not be null");
}
this.csrf = csrf;
return this;
}
@Override
public HttpSecurity mechanism(HttpAuthenticationMechanism mechanism) {
Objects.requireNonNull(mechanism);
if (mechanism.getClass() == FormAuthenticationMechanism.class) {
final FormAuthConfig defaults = HttpSecurityUtils.getDefaultAuthConfig().auth().form();
final FormAuthConfig actualConfig = vertxHttpConfig.auth().form();
if (!actualConfig.equals(defaults)) {
throw new IllegalArgumentException("Cannot configure form-based authentication programmatically "
+ "because it has already been configured in the 'application.properties' file");
}
} else if (mechanism.getClass() == BasicAuthenticationMechanism.class) {
String actualRealm = vertxHttpConfig.auth().realm().orElse(null);
if (actualRealm != null) {
throw new IllegalArgumentException("Cannot configure basic authentication programmatically because "
+ "the authentication realm has already been configured in the 'application.properties' file");
}
} else if (mechanism.getClass() == MtlsAuthenticationMechanism.class) {
boolean mTlsEnabled = !ClientAuth.NONE.equals(clientAuth);
if (mTlsEnabled) {
// current we do not allow "merging" (or overriding) of the configuration provided in application.properties
// there shouldn't be a technical issue allowing that, but that's the behavior we have for other mechanisms
// as well, so this method only allows to "enable" mTLS, never disable or change configuration provided
// properties file
throw new IllegalArgumentException("TLS client authentication has already been enabled with this API or"
+ " with the 'quarkus.http.ssl.client-auth' configuration property");
}
var mTLS = ((MtlsAuthenticationMechanism) mechanism);
clientAuth = mTLS.getTlsClientAuth();
if (mTLS.getHttpServerTlsConfigName().isPresent()) {
if (httpServerTlsConfigName.isPresent()) {
throw new IllegalArgumentException("Cannot configure TLS configuration name programmatically because it "
+ " has already been configured with the 'quarkus.http.tls-configuration-name' configuration property");
}
httpServerTlsConfigName = mTLS.getHttpServerTlsConfigName();
if (mTLS.getInitialTlsConfiguration() != null) {
TlsConfigurationRegistry tlsConfigurationRegistry = Arc.container().instance(TlsConfigurationRegistry.class)
.get();
if (tlsConfigurationRegistry.get(httpServerTlsConfigName.get()).isPresent()) {
throw new IllegalArgumentException(("Cannot register the TLS configuration '%s' in the TLS "
+ "Configuration registry because configuration with this name has already"
+ " been registered").formatted(httpServerTlsConfigName.get()));
}
tlsConfigurationRegistry.register(httpServerTlsConfigName.get(), mTLS.getInitialTlsConfiguration());
}
}
}
this.mechanisms.add(mechanism);
return this;
}
@Override
public HttpSecurity basic() {
return mechanism(Basic.create());
}
@Override
public HttpSecurity basic(String authenticationRealm) {
return mechanism(Basic.realm(authenticationRealm));
}
@Override
public HttpSecurity mTLS() {
return mTLS(ClientAuth.REQUIRED);
}
@Override
public HttpSecurity mTLS(String tlsConfigurationName, TlsConfiguration tlsConfiguration) {
return mechanism(MTLS.required(tlsConfigurationName, tlsConfiguration));
}
@Override
public HttpSecurity mTLS(MtlsAuthenticationMechanism mTLSAuthenticationMechanism) {
return mechanism(mTLSAuthenticationMechanism);
}
@Override
public HttpSecurity mTLS(ClientAuth tlsClientAuth) {
if (tlsClientAuth == null) {
throw new IllegalArgumentException("Client authentication cannot be null");
}
return switch (tlsClientAuth) {
case REQUIRED -> mechanism(MTLS.required());
case REQUEST -> mechanism(MTLS.request());
case NONE -> throw new IllegalArgumentException("Client authentication cannot be disabled with this API");
};
}
@Override
public HttpPermission path(String... patterns) {
if (patterns == null || patterns.length == 0) {
throw new IllegalArgumentException("Paths must not be empty");
}
var httpPermission = new HttpPermissionImpl(patterns);
httpPermissions.add(httpPermission);
return httpPermission;
}
@Override
public HttpPermission get(String... paths) {
return path(paths).methods("GET");
}
@Override
public HttpPermission put(String... paths) {
return path(paths).methods("PUT");
}
@Override
public HttpPermission post(String... paths) {
return path(paths).methods("POST");
}
@Override
public HttpPermission delete(String... paths) {
return path(paths).methods("DELETE");
}
@Override
public HttpSecurity rolesMapping(Map<String, List<String>> roleToRoles) {
if (rolesMapping != null) {
throw new IllegalStateException("Roles mapping is already configured");
}
if (roleToRoles == null || roleToRoles.isEmpty()) {
throw new IllegalArgumentException("Roles must not be empty");
}
roleToRoles.forEach(new BiConsumer<String, List<String>>() {
@Override
public void accept(String sourceRole, List<String> targetRoles) {
if (sourceRole.isEmpty()) {
throw new IllegalArgumentException("Source role must not be empty");
}
if (targetRoles == null || targetRoles.isEmpty()) {
throw new IllegalArgumentException("Target roles for role '%s' must not be empty".formatted(sourceRole));
}
}
});
this.rolesMapping = RolesMapping.of(roleToRoles);
return this;
}
@Override
public HttpSecurity rolesMapping(String sourceRole, List<String> targetRoles) {
if (sourceRole == null) {
throw new IllegalArgumentException("Source role must not be null");
}
if (targetRoles == null) {
throw new IllegalArgumentException("Target roles for role '%s' must not be null".formatted(sourceRole));
}
return rolesMapping(Map.of(sourceRole, targetRoles));
}
@Override
public HttpSecurity rolesMapping(String sourceRole, String targetRole) {
if (targetRole == null) {
throw new IllegalArgumentException("Target role for role '%s' must not be null".formatted(sourceRole));
}
return rolesMapping(sourceRole, List.of(targetRole));
}
void addHttpPermissions(List<HttpPermissionCarrier> httpPermissions) {
this.httpPermissions.addAll(httpPermissions);
}
private final | HttpSecurityImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converter/hbm/MoneyConverter.java | {
"start": 274,
"end": 664
} | class ____
implements AttributeConverter<Money, Long> {
@Override
public Long convertToDatabaseColumn(Money attribute) {
return attribute == null ? null : attribute.getCents();
}
@Override
public Money convertToEntityAttribute(Long dbData) {
return dbData == null ? null : new Money(dbData);
}
}
//end::basic-hbm-attribute-converter-mapping-moneyconverter-example[]
| MoneyConverter |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/ElementQuery.java | {
"start": 7541,
"end": 7622
} | interface ____ building a query.
* @param <T> The element type.
*/
| when |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/healthcheck/interceptor/HealthCheckInterceptorChain.java | {
"start": 893,
"end": 1302
} | class ____ extends AbstractNamingInterceptorChain<NacosHealthCheckTask> {
private static final HealthCheckInterceptorChain INSTANCE = new HealthCheckInterceptorChain();
private HealthCheckInterceptorChain() {
super(AbstractHealthCheckInterceptor.class);
}
public static HealthCheckInterceptorChain getInstance() {
return INSTANCE;
}
}
| HealthCheckInterceptorChain |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/SyntheticBeanWithLookupTest.java | {
"start": 3768,
"end": 4155
} | class ____ implements SyntheticBeanCreator<MyPojo> {
static final AtomicInteger counter = new AtomicInteger();
@Override
public MyPojo create(Instance<Object> lookup, Parameters params) {
counter.incrementAndGet();
lookup.select(MyDependentBean.class).get();
return new MyPojo();
}
}
public static | MyPojoCreator |
java | jhy__jsoup | src/main/java/org/jsoup/nodes/LeafNode.java | {
"start": 294,
"end": 3146
} | class ____ extends Node {
Object value; // either a string value, or an attribute map (in the rare case multiple attributes are set)
public LeafNode() {
value = "";
}
protected LeafNode(String coreValue) {
Validate.notNull(coreValue);
value = coreValue;
}
@Override protected final boolean hasAttributes() {
return value instanceof Attributes;
}
@Override
public final Attributes attributes() {
ensureAttributes();
return (Attributes) value;
}
private void ensureAttributes() {
if (!hasAttributes()) { // then value is String coreValue
String coreValue = (String) value;
Attributes attributes = new Attributes();
value = attributes;
attributes.put(nodeName(), coreValue);
}
}
String coreValue() {
return attr(nodeName());
}
@Override @Nullable
public Element parent() {
return parentNode;
}
@Override
public String nodeValue() {
return coreValue();
}
void coreValue(String value) {
attr(nodeName(), value);
}
@Override
public String attr(String key) {
if (!hasAttributes()) {
return nodeName().equals(key) ? (String) value : EmptyString;
}
return super.attr(key);
}
@Override
public Node attr(String key, String value) {
if (!hasAttributes() && key.equals(nodeName())) {
this.value = value;
} else {
ensureAttributes();
super.attr(key, value);
}
return this;
}
@Override
public boolean hasAttr(String key) {
ensureAttributes();
return super.hasAttr(key);
}
@Override
public Node removeAttr(String key) {
ensureAttributes();
return super.removeAttr(key);
}
@Override
public String absUrl(String key) {
ensureAttributes();
return super.absUrl(key);
}
@Override
public String baseUri() {
return parentNode != null ? parentNode.baseUri() : "";
}
@Override
protected void doSetBaseUri(String baseUri) {
// noop
}
@Override
public int childNodeSize() {
return 0;
}
@Override
public Node empty() {
return this;
}
@Override
protected List<Node> ensureChildNodes() {
return EmptyNodes;
}
@Override
void outerHtmlTail(QuietAppendable accum, Document.OutputSettings out) {}
@Override
protected LeafNode doClone(Node parent) {
LeafNode clone = (LeafNode) super.doClone(parent);
// Object value could be plain string or attributes - need to clone
if (hasAttributes())
clone.value = ((Attributes) value).clone();
return clone;
}
}
| LeafNode |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/ComputedParamContext.java | {
"start": 217,
"end": 528
} | interface ____ {
/**
* The name of the parameter whose value is being computed
*/
String name();
/**
* Information about the method parameters of the REST Client method for which the computed value is needed
*/
List<MethodParameter> methodParameters();
| ComputedParamContext |
java | square__retrofit | retrofit-adapters/rxjava/src/test/java/retrofit2/adapter/rxjava/CompletableThrowingSafeSubscriberTest.java | {
"start": 4431,
"end": 4921
} | class ____ implements CompletableSubscriber {
private final RecordingSubscriber<Void> delegate;
ForwardingCompletableObserver(RecordingSubscriber<Void> delegate) {
this.delegate = delegate;
}
@Override
public void onSubscribe(Subscription d) {}
@Override
public void onCompleted() {
delegate.onCompleted();
}
@Override
public void onError(Throwable throwable) {
delegate.onError(throwable);
}
}
}
| ForwardingCompletableObserver |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/createTable/OracleCreateTableTest46.java | {
"start": 1026,
"end": 6043
} | class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
" CREATE TABLE \"SC_001\".\"TB_001\" \n" +
" ( \"ID\" NUMBER NOT NULL ENABLE, \n" +
" \"GMT_CREATE\" DATE NOT NULL ENABLE, \n" +
" \"GMT_MODIFIED\" DATE NOT NULL ENABLE, \n" +
" \"SETTING_LEVEL\" VARCHAR2(8) NOT NULL ENABLE, \n" +
" \"OWNER\" NUMBER DEFAULT 0 NOT NULL ENABLE NOVALIDATE, \n" +
" \"ITEM_KEY\" VARCHAR2(64) NOT NULL ENABLE NOVALIDATE, \n" +
" \"ITEM_VALUE\" VARCHAR2(64), \n" +
" \"MODIFIER\" NUMBER NOT NULL ENABLE NOVALIDATE, \n" +
" \"MEMO\" VARCHAR2(512), \n" +
" CONSTRAINT \"AMAIL_USER_SETTING_PK\" PRIMARY KEY (\"ID\")\n" +
" USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS \n" +
" STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645\n" +
" PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT)\n" +
" TABLESPACE \"APPDATA1M\" ENABLE\n" +
" ) PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING\n" +
" STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645\n" +
" PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT)\n" +
" TABLESPACE \"APPDATA1M\" ";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE TABLE \"SC_001\".\"TB_001\" (\n" +
"\t\"ID\" NUMBER NOT NULL ENABLE,\n" +
"\t\"GMT_CREATE\" DATE NOT NULL ENABLE,\n" +
"\t\"GMT_MODIFIED\" DATE NOT NULL ENABLE,\n" +
"\t\"SETTING_LEVEL\" VARCHAR2(8) NOT NULL ENABLE,\n" +
"\t\"OWNER\" NUMBER DEFAULT 0 NOT NULL ENABLE,\n" +
"\t\"ITEM_KEY\" VARCHAR2(64) NOT NULL ENABLE,\n" +
"\t\"ITEM_VALUE\" VARCHAR2(64),\n" +
"\t\"MODIFIER\" NUMBER NOT NULL ENABLE,\n" +
"\t\"MEMO\" VARCHAR2(512),\n" +
"\tCONSTRAINT \"AMAIL_USER_SETTING_PK\" PRIMARY KEY (\"ID\")\n" +
"\t\tUSING INDEX\n" +
"\t\tPCTFREE 10\n" +
"\t\tINITRANS 2\n" +
"\t\tMAXTRANS 255\n" +
"\t\tTABLESPACE \"APPDATA1M\"\n" +
"\t\tSTORAGE (\n" +
"\t\t\tINITIAL 65536\n" +
"\t\t\tNEXT 1048576\n" +
"\t\t\tMINEXTENTS 1\n" +
"\t\t\tMAXEXTENTS 2147483645\n" +
"\t\t\tPCTINCREASE 0\n" +
"\t\t\tFREELISTS 1\n" +
"\t\t\tFREELIST GROUPS 1\n" +
"\t\t\tBUFFER_POOL DEFAULT\n" +
"\t\t)\n" +
"\t\tCOMPUTE STATISTICS\n" +
"\t\tENABLE\n" +
")\n" +
"PCTFREE 10\n" +
"PCTUSED 40\n" +
"INITRANS 1\n" +
"MAXTRANS 255\n" +
"NOCOMPRESS\n" +
"LOGGING\n" +
"TABLESPACE \"APPDATA1M\"\n" +
"STORAGE (\n" +
"\tINITIAL 65536\n" +
"\tNEXT 1048576\n" +
"\tMINEXTENTS 1\n" +
"\tMAXEXTENTS 2147483645\n" +
"\tPCTINCREASE 0\n" +
"\tFREELISTS 1\n" +
"\tFREELIST GROUPS 1\n" +
"\tBUFFER_POOL DEFAULT\n" +
")",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(9, visitor.getColumns().size());
assertTrue(visitor.containsColumn("SC_001.TB_001", "ID"));
}
}
| OracleCreateTableTest46 |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/authentication/NamespacePasswordEncoderTests.java | {
"start": 2973,
"end": 3406
} | class ____ {
@Autowired
void configure(AuthenticationManagerBuilder auth) throws Exception {
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
// @formatter:off
auth
.inMemoryAuthentication()
.withUser("user").password(encoder.encode("password")).roles("USER").and()
.passwordEncoder(encoder);
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | PasswordEncoderWithInMemoryConfig |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/AppPriorityACLsManager.java | {
"start": 1842,
"end": 7273
} | class ____ {
private Priority priority;
private Priority defaultPriority;
private AccessControlList acl;
PriorityACL(Priority priority, Priority defaultPriority,
AccessControlList acl) {
this.setPriority(priority);
this.setDefaultPriority(defaultPriority);
this.setAcl(acl);
}
public Priority getPriority() {
return priority;
}
public void setPriority(Priority maxPriority) {
this.priority = maxPriority;
}
public Priority getDefaultPriority() {
return defaultPriority;
}
public void setDefaultPriority(Priority defaultPriority) {
this.defaultPriority = defaultPriority;
}
public AccessControlList getAcl() {
return acl;
}
public void setAcl(AccessControlList acl) {
this.acl = acl;
}
}
private boolean isACLsEnable;
private final ConcurrentMap<String, List<PriorityACL>> allAcls =
new ConcurrentHashMap<>();
public AppPriorityACLsManager(Configuration conf) {
this.isACLsEnable = conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE,
YarnConfiguration.DEFAULT_YARN_ACL_ENABLE);
}
/**
* Clear priority acl during refresh.
*
* @param queueName
* Queue Name
*/
public void clearPriorityACLs(String queueName) {
allAcls.remove(queueName);
}
/**
* Each Queue could have configured with different priority acl's groups. This
* method helps to store each such ACL list against queue.
*
* @param priorityACLGroups
* List of Priority ACL Groups.
* @param queueName
* Queue Name associate with priority acl groups.
*/
public void addPrioirityACLs(List<AppPriorityACLGroup> priorityACLGroups,
String queueName) {
List<PriorityACL> priorityACL = allAcls.get(queueName);
if (null == priorityACL) {
priorityACL = new ArrayList<PriorityACL>();
allAcls.put(queueName, priorityACL);
}
// Ensure lowest priority PriorityACLGroup comes first in the list.
Collections.sort(priorityACLGroups);
for (AppPriorityACLGroup priorityACLGroup : priorityACLGroups) {
priorityACL.add(new PriorityACL(priorityACLGroup.getMaxPriority(),
priorityACLGroup.getDefaultPriority(),
priorityACLGroup.getACLList()));
if (LOG.isDebugEnabled()) {
LOG.debug("Priority ACL group added: max-priority - "
+ priorityACLGroup.getMaxPriority() + "default-priority - "
+ priorityACLGroup.getDefaultPriority());
}
}
}
/**
* Priority based checkAccess to ensure that given user has enough permission
* to submit application at a given priority level.
*
* @param callerUGI
* User who submits the application.
* @param queueName
* Queue to which application is submitted.
* @param submittedPriority
* priority of the application.
* @return True or False to indicate whether application can be submitted at
* submitted priority level or not.
*/
public boolean checkAccess(UserGroupInformation callerUGI, String queueName,
Priority submittedPriority) {
if (!isACLsEnable) {
return true;
}
List<PriorityACL> acls = allAcls.get(queueName);
if (acls == null || acls.isEmpty()) {
return true;
}
PriorityACL approvedPriorityACL = getMappedPriorityAclForUGI(acls,
callerUGI, submittedPriority);
if (approvedPriorityACL == null) {
return false;
}
return true;
}
/**
* If an application is submitted without any priority, and submitted user has
* a default priority, this method helps to update this default priority as
* app's priority.
*
* @param queueName
* Submitted queue
* @param user
* User who submitted this application
* @return Default priority associated with given user.
*/
public Priority getDefaultPriority(String queueName,
UserGroupInformation user) {
if (!isACLsEnable) {
return null;
}
List<PriorityACL> acls = allAcls.get(queueName);
if (acls == null || acls.isEmpty()) {
return null;
}
PriorityACL approvedPriorityACL = getMappedPriorityAclForUGI(acls, user,
null);
if (approvedPriorityACL == null) {
return null;
}
Priority defaultPriority = Priority
.newInstance(approvedPriorityACL.getDefaultPriority().getPriority());
return defaultPriority;
}
private PriorityACL getMappedPriorityAclForUGI(List<PriorityACL> acls ,
UserGroupInformation user, Priority submittedPriority) {
// Iterate through all configured ACLs starting from lower priority.
// If user is found corresponding to a configured priority, then store
// that entry. if failed, continue iterate through whole acl list.
PriorityACL selectedAcl = null;
for (PriorityACL entry : acls) {
AccessControlList list = entry.getAcl();
if (list.isUserAllowed(user)) {
selectedAcl = entry;
// If submittedPriority is passed through the argument, also check
// whether submittedPriority is under max-priority of each ACL group.
if (submittedPriority != null) {
selectedAcl = null;
if (submittedPriority.getPriority() <= entry.getPriority()
.getPriority()) {
return entry;
}
}
}
}
return selectedAcl;
}
}
| PriorityACL |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/FieldDeserializerTest7.java | {
"start": 750,
"end": 795
} | class ____ {
public double id;
}
}
| V1 |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java | {
"start": 9234,
"end": 10668
} | class ____
extends RichGroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>
implements GroupCombineFunction<Tuple2<String, Integer>, Tuple2<String, Integer>> {
private IntCounter reduceCalls;
private IntCounter combineCalls;
@Override
public void open(OpenContext openContext) {
this.reduceCalls = getRuntimeContext().getIntCounter("reduce-calls");
this.combineCalls = getRuntimeContext().getIntCounter("combine-calls");
}
@Override
public void reduce(
Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) {
reduceCalls.add(1);
reduceInternal(values, out);
}
@Override
public void combine(
Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) {
combineCalls.add(1);
reduceInternal(values, out);
}
private void reduceInternal(
Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) {
int sum = 0;
String key = null;
for (Tuple2<String, Integer> e : values) {
key = e.f0;
sum += e.f1;
}
out.collect(new Tuple2<>(key, sum));
}
}
/** Custom accumulator. */
public static | CountWords |
java | quarkusio__quarkus | extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversRecorder.java | {
"start": 465,
"end": 775
} | class ____ {
public HibernateOrmIntegrationStaticInitListener createStaticInitListener(HibernateEnversBuildTimeConfig buildTimeConfig,
String puName) {
return new HibernateEnversIntegrationStaticInitListener(buildTimeConfig, puName);
}
private static final | HibernateEnversRecorder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.