function_name
stringlengths 1
57
| function_code
stringlengths 20
4.99k
| documentation
stringlengths 50
2k
| language
stringclasses 5
values | file_path
stringlengths 8
166
| line_number
int32 4
16.7k
| parameters
listlengths 0
20
| return_type
stringlengths 0
131
| has_type_hints
bool 2
classes | complexity
int32 1
51
| quality_score
float32 6
9.68
| repo_name
stringclasses 34
values | repo_stars
int32 2.9k
242k
| docstring_style
stringclasses 7
values | is_async
bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
_is_label_or_level_reference
|
def _is_label_or_level_reference(self, key: Level, axis: AxisInt = 0) -> bool:
"""
Test whether a key is a label or level reference for a given axis.
To be considered either a label or a level reference, `key` must be a
string that:
- (axis=0): Matches a column label or an index level
- (axis=1): Matches an index label or a column level
Parameters
----------
key : Hashable
Potential label or level name
axis : int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
bool
"""
return self._is_level_reference(key, axis=axis) or self._is_label_reference(
key, axis=axis
)
|
Test whether a key is a label or level reference for a given axis.
To be considered either a label or a level reference, `key` must be a
string that:
- (axis=0): Matches a column label or an index level
- (axis=1): Matches an index label or a column level
Parameters
----------
key : Hashable
Potential label or level name
axis : int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
bool
|
python
|
pandas/core/generic.py
| 1,666
|
[
"self",
"key",
"axis"
] |
bool
| true
| 2
| 6.4
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
_prepare_dump
|
def _prepare_dump(self, existing_dump: CacheDump | None) -> CacheDump:
"""Prepare a cache dump from the current Memoizer state.
Takes the existing dump (if any) and merges it with the current
in-memory cache entries.
Args:
existing_dump: The existing dump to merge with, or None if starting fresh.
Returns:
The prepared cache dump ready to be written to disk.
"""
# Start with existing data or empty structure
if existing_dump is not None:
dump = existing_dump
else:
dump: CacheDump = {"cache_entries": {}, "cache_size": 0}
# Ensure cache_entries exists
if "cache_entries" not in dump:
dump["cache_entries"] = {}
# Format cache entries as {"params": ..., "result": ...}
formatted_cache: dict[str, CacheDumpEntry] = {}
for key, value in self._cache._memory.items():
entry = cast(CacheEntry, value)
formatted_cache[key] = CacheDumpEntry(
params=entry.encoded_params,
result=entry.encoded_result,
)
# Merge based on sub_key
if self._sub_key:
# Store under sub_key
dump["cache_entries"][self._sub_key] = formatted_cache
else:
# Merge directly into cache_entries
dump["cache_entries"].update(formatted_cache)
# Calculate total cache size across all entries
total_size = 0
for value in dump["cache_entries"].values():
if isinstance(value, dict):
# Check if it's a CacheDumpEntry (has 'params' and 'result') or a sub_key dict
if "params" in value and "result" in value:
# Direct entry
total_size += 1
else:
# Sub_key with nested entries
total_size += len(value)
else:
total_size += 1
dump["cache_size"] = total_size
return dump
|
Prepare a cache dump from the current Memoizer state.
Takes the existing dump (if any) and merges it with the current
in-memory cache entries.
Args:
existing_dump: The existing dump to merge with, or None if starting fresh.
Returns:
The prepared cache dump ready to be written to disk.
|
python
|
torch/_inductor/runtime/caching/interfaces.py
| 339
|
[
"self",
"existing_dump"
] |
CacheDump
| true
| 13
| 8.24
|
pytorch/pytorch
| 96,034
|
google
| false
|
resetCaches
|
@Override
public void resetCaches() {
this.cacheMap.values().forEach(Cache::clear);
if (this.dynamic) {
this.cacheMap.clear();
}
}
|
Reset this cache manager's caches, removing them completely for on-demand
re-creation in 'dynamic' mode, or simply clearing their entries otherwise.
@since 6.2.14
|
java
|
spring-context/src/main/java/org/springframework/cache/concurrent/ConcurrentMapCacheManager.java
| 187
|
[] |
void
| true
| 2
| 6.56
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
selectInvocableMethod
|
public static Method selectInvocableMethod(Method method, @Nullable Class<?> targetType) {
if (targetType == null) {
return method;
}
Method methodToUse = MethodIntrospector.selectInvocableMethod(method, targetType);
if (Modifier.isPrivate(methodToUse.getModifiers()) && !Modifier.isStatic(methodToUse.getModifiers()) &&
SpringProxy.class.isAssignableFrom(targetType)) {
throw new IllegalStateException(String.format(
"Need to invoke method '%s' found on proxy for target class '%s' but cannot " +
"be delegated to target bean. Switch its visibility to package or protected.",
method.getName(), method.getDeclaringClass().getSimpleName()));
}
return methodToUse;
}
|
Select an invocable method on the target type: either the given method itself
if actually exposed on the target type, or otherwise a corresponding method
on one of the target type's interfaces or on the target type itself.
@param method the method to check
@param targetType the target type to search methods on (typically an AOP proxy)
@return a corresponding invocable method on the target type
@throws IllegalStateException if the given method is not invocable on the given
target type (typically due to a proxy mismatch)
@since 4.3
@see MethodIntrospector#selectInvocableMethod(Method, Class)
|
java
|
spring-aop/src/main/java/org/springframework/aop/support/AopUtils.java
| 147
|
[
"method",
"targetType"
] |
Method
| true
| 5
| 7.6
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
getOrDeduceName
|
protected final String getOrDeduceName(@Nullable Object value) {
if (this.name != null) {
return this.name;
}
if (this.beanName != null) {
return this.beanName;
}
if (value == null) {
return "null";
}
return Conventions.getVariableName(value);
}
|
Deduces the name for this registration. Will return user specified name or fallback
to the bean name. If the bean name is not available, convention based naming is
used.
@param value the object used for convention based names
@return the deduced name
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/web/servlet/DynamicRegistrationBean.java
| 159
|
[
"value"
] |
String
| true
| 4
| 8.24
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
aot_module
|
def aot_module(mod: nn.Module, *args, **kwargs) -> nn.Module:
"""
Traces the forward and backward graph of :attr:`mod` using torch dispatch
tracing mechanism. It is wrapper function, that underneath uses
:func:`aot_function` to perform tracing and compilation.
:func:`aot_module` lifts the parameters and buffers of ``nn.Module`` as inputs
to a new callable which is then compiled through :func:`aot_function`.
.. warning::
This API is experimental and likely to change.
Args:
mod (Callable): A ``nn.Module`` module.
args : args to be passed to :func:`aot_function`
kwargs : kwargs to be passed to :func:`aot_function`
Returns:
Returns a ``nn.Module`` that retains the eager behavior of the original
:attr:`mod`, but with forward and backward graph compiled.
"""
# See Note: [Fake Modules and AOTAutograd]
torch._dynamo.utils.assert_no_fake_params_or_buffers(mod)
def functional_call(named_params, named_buffers, *args, **kwargs):
params_and_buffers = {**named_params, **named_buffers}
return torch.func.functional_call(mod, params_and_buffers, args, kwargs)
named_params = dict(mod.named_parameters(remove_duplicate=False))
named_buffers = dict(mod.named_buffers(remove_duplicate=False))
num_params_buffers = len(named_params) + len(named_buffers)
compiled_f = aot_function(
functional_call, *args, num_params_buffers=num_params_buffers, **kwargs
)
class AOTModule(nn.Module):
def __init__(self) -> None:
super().__init__()
self.orig_module = mod
def forward(self, *args, **kwargs):
return compiled_f(
named_params,
named_buffers,
*args,
**kwargs,
)
return AOTModule()
|
Traces the forward and backward graph of :attr:`mod` using torch dispatch
tracing mechanism. It is wrapper function, that underneath uses
:func:`aot_function` to perform tracing and compilation.
:func:`aot_module` lifts the parameters and buffers of ``nn.Module`` as inputs
to a new callable which is then compiled through :func:`aot_function`.
.. warning::
This API is experimental and likely to change.
Args:
mod (Callable): A ``nn.Module`` module.
args : args to be passed to :func:`aot_function`
kwargs : kwargs to be passed to :func:`aot_function`
Returns:
Returns a ``nn.Module`` that retains the eager behavior of the original
:attr:`mod`, but with forward and backward graph compiled.
|
python
|
torch/_functorch/aot_autograd.py
| 844
|
[
"mod"
] |
nn.Module
| true
| 1
| 6.56
|
pytorch/pytorch
| 96,034
|
google
| false
|
findPackageJSON
|
function findPackageJSON(specifier, base = 'data:') {
if (arguments.length === 0) {
throw new ERR_MISSING_ARGS('specifier');
}
try {
specifier = `${specifier}`;
} catch {
validateString(specifier, 'specifier');
}
let parentURL = base;
if (!isURL(base)) {
validateString(base, 'base');
parentURL = path.isAbsolute(base) ? pathToFileURL(base) : new URL(base);
}
if (specifier && specifier[0] !== '.' && specifier[0] !== '/' && !URLCanParse(specifier)) {
// If `specifier` is a bare specifier.
const { packageJSONPath } = getPackageJSONURL(specifier, parentURL);
return packageJSONPath;
}
let resolvedTarget;
defaultResolve ??= require('internal/modules/esm/resolve').defaultResolve;
try {
// TODO(@JakobJingleheimer): Detect whether findPackageJSON is being used within a loader
// (possibly piggyback on `isForAsyncLoaderHookWorker` from the loader?) and if so:
// - When inside, use the default resolve
// - (I think it's impossible to use the chain because of re-entry & a deadlock from atomics).
// - When outside, use cascadedLoader.resolveSync (not implemented yet, but the pieces exist).
resolvedTarget = defaultResolve(specifier, { parentURL: `${parentURL}` }).url;
} catch (err) {
if (err.code === 'ERR_UNSUPPORTED_DIR_IMPORT') {
resolvedTarget = err.url;
} else {
throw err;
}
}
const pkg = getNearestParentPackageJSON(fileURLToPath(resolvedTarget));
return pkg?.path;
}
|
@param {string | URL} specifier The location for which to get the "root" package.json
@param {string | URL} [base] The location of the current module (ex file://tmp/foo.js).
@returns {string}
|
javascript
|
lib/internal/modules/package_json_reader.js
| 326
|
[
"specifier"
] | false
| 12
| 6.4
|
nodejs/node
| 114,839
|
jsdoc
| false
|
|
case_when
|
def case_when(
self,
caselist: list[
tuple[
ArrayLike | Callable[[Series], Series | np.ndarray | Sequence[bool]],
ArrayLike | Scalar | Callable[[Series], Series | np.ndarray],
],
],
) -> Series:
"""
Replace values where the conditions are True.
.. versionadded:: 2.2.0
Parameters
----------
caselist : A list of tuples of conditions and expected replacements
Takes the form: ``(condition0, replacement0)``,
``(condition1, replacement1)``, ... .
``condition`` should be a 1-D boolean array-like object
or a callable. If ``condition`` is a callable,
it is computed on the Series
and should return a boolean Series or array.
The callable must not change the input Series
(though pandas doesn`t check it). ``replacement`` should be a
1-D array-like object, a scalar or a callable.
If ``replacement`` is a callable, it is computed on the Series
and should return a scalar or Series. The callable
must not change the input Series
(though pandas doesn`t check it).
Returns
-------
Series
A new Series with values replaced based on the provided conditions.
See Also
--------
Series.mask : Replace values where the condition is True.
Examples
--------
>>> c = pd.Series([6, 7, 8, 9], name="c")
>>> a = pd.Series([0, 0, 1, 2])
>>> b = pd.Series([0, 3, 4, 5])
>>> c.case_when(
... caselist=[
... (a.gt(0), a), # condition, replacement
... (b.gt(0), b),
... ]
... )
0 6
1 3
2 1
3 2
Name: c, dtype: int64
"""
if not isinstance(caselist, list):
raise TypeError(
f"The caselist argument should be a list; instead got {type(caselist)}"
)
if not caselist:
raise ValueError(
"provide at least one boolean condition, "
"with a corresponding replacement."
)
for num, entry in enumerate(caselist):
if not isinstance(entry, tuple):
raise TypeError(
f"Argument {num} must be a tuple; instead got {type(entry)}."
)
if len(entry) != 2:
raise ValueError(
f"Argument {num} must have length 2; "
"a condition and replacement; "
f"instead got length {len(entry)}."
)
caselist = [
(
com.apply_if_callable(condition, self),
com.apply_if_callable(replacement, self),
)
for condition, replacement in caselist
]
default = self.copy(deep=False)
conditions, replacements = zip(*caselist, strict=True)
common_dtypes = [infer_dtype_from(arg)[0] for arg in [*replacements, default]]
if len(set(common_dtypes)) > 1:
common_dtype = find_common_type(common_dtypes)
updated_replacements = []
for condition, replacement in zip(conditions, replacements, strict=True):
if is_scalar(replacement):
replacement = construct_1d_arraylike_from_scalar(
value=replacement, length=len(condition), dtype=common_dtype
)
elif isinstance(replacement, ABCSeries):
replacement = replacement.astype(common_dtype)
else:
replacement = pd_array(replacement, dtype=common_dtype)
updated_replacements.append(replacement)
replacements = updated_replacements
default = default.astype(common_dtype)
counter = range(len(conditions) - 1, -1, -1)
for position, condition, replacement in zip(
counter, reversed(conditions), reversed(replacements), strict=True
):
try:
default = default.mask(
condition, other=replacement, axis=0, inplace=False, level=None
)
except Exception as error:
raise ValueError(
f"Failed to apply condition{position} and replacement{position}."
) from error
return default
|
Replace values where the conditions are True.
.. versionadded:: 2.2.0
Parameters
----------
caselist : A list of tuples of conditions and expected replacements
Takes the form: ``(condition0, replacement0)``,
``(condition1, replacement1)``, ... .
``condition`` should be a 1-D boolean array-like object
or a callable. If ``condition`` is a callable,
it is computed on the Series
and should return a boolean Series or array.
The callable must not change the input Series
(though pandas doesn`t check it). ``replacement`` should be a
1-D array-like object, a scalar or a callable.
If ``replacement`` is a callable, it is computed on the Series
and should return a scalar or Series. The callable
must not change the input Series
(though pandas doesn`t check it).
Returns
-------
Series
A new Series with values replaced based on the provided conditions.
See Also
--------
Series.mask : Replace values where the condition is True.
Examples
--------
>>> c = pd.Series([6, 7, 8, 9], name="c")
>>> a = pd.Series([0, 0, 1, 2])
>>> b = pd.Series([0, 3, 4, 5])
>>> c.case_when(
... caselist=[
... (a.gt(0), a), # condition, replacement
... (b.gt(0), b),
... ]
... )
0 6
1 3
2 1
3 2
Name: c, dtype: int64
|
python
|
pandas/core/series.py
| 6,053
|
[
"self",
"caselist"
] |
Series
| true
| 12
| 8.24
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
toStringTrueFalse
|
public static String toStringTrueFalse(final boolean bool) {
return toString(bool, TRUE, FALSE);
}
|
Converts a boolean to a String returning {@code 'true'}
or {@code 'false'}.
<pre>
BooleanUtils.toStringTrueFalse(true) = "true"
BooleanUtils.toStringTrueFalse(false) = "false"
</pre>
@param bool the Boolean to check
@return {@code 'true'}, {@code 'false'}, or {@code null}
|
java
|
src/main/java/org/apache/commons/lang3/BooleanUtils.java
| 1,089
|
[
"bool"
] |
String
| true
| 1
| 6.48
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
enqueueConsumerRebalanceListenerCallback
|
private CompletableFuture<Void> enqueueConsumerRebalanceListenerCallback(ConsumerRebalanceListenerMethodName methodName,
Set<TopicPartition> partitions) {
SortedSet<TopicPartition> sortedPartitions = new TreeSet<>(TOPIC_PARTITION_COMPARATOR);
sortedPartitions.addAll(partitions);
CompletableBackgroundEvent<Void> event = new ConsumerRebalanceListenerCallbackNeededEvent(methodName, sortedPartitions);
backgroundEventHandler.add(event);
log.debug("The event to trigger the {} method execution was enqueued successfully", methodName.fullyQualifiedMethodName());
return event.future();
}
|
Enqueue a {@link ConsumerRebalanceListenerCallbackNeededEvent} to trigger the execution of the
appropriate {@link ConsumerRebalanceListener} {@link ConsumerRebalanceListenerMethodName method} on the
application thread.
<p/>
Because the reconciliation process (run in the background thread) will be blocked by the application thread
until it completes this, we need to provide a {@link CompletableFuture} by which to remember where we left off.
@param methodName Callback method that needs to be executed on the application thread
@param partitions Partitions to supply to the callback method
@return Future that will be chained within the rest of the reconciliation logic
|
java
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ConsumerMembershipManager.java
| 456
|
[
"methodName",
"partitions"
] | true
| 1
| 6.08
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
listConfigResources
|
@Override
public ListConfigResourcesResult listConfigResources(Set<ConfigResource.Type> configResourceTypes, ListConfigResourcesOptions options) {
final long now = time.milliseconds();
final KafkaFutureImpl<Collection<ConfigResource>> future = new KafkaFutureImpl<>();
final Call call = new Call("listConfigResources", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) {
@Override
ListConfigResourcesRequest.Builder createRequest(int timeoutMs) {
return new ListConfigResourcesRequest.Builder(
new ListConfigResourcesRequestData()
.setResourceTypes(
configResourceTypes
.stream()
.map(ConfigResource.Type::id)
.collect(Collectors.toList())
)
);
}
@Override
void handleResponse(AbstractResponse abstractResponse) {
ListConfigResourcesResponse response = (ListConfigResourcesResponse) abstractResponse;
if (response.error().isFailure()) {
future.completeExceptionally(response.error().exception());
} else {
future.complete(response.configResources());
}
}
@Override
void handleFailure(Throwable throwable) {
future.completeExceptionally(throwable);
}
};
runnable.call(call, now);
return new ListConfigResourcesResult(future);
}
|
Forcefully terminates an ongoing transaction for a given transactional ID.
<p>
This API is intended for well-formed but long-running transactions that are known to the
transaction coordinator. It is primarily designed for supporting 2PC (two-phase commit) workflows,
where a coordinator may need to unilaterally terminate a participant transaction that hasn't completed.
</p>
@param transactionalId The transactional ID whose active transaction should be forcefully terminated.
@return a {@link TerminateTransactionResult} that can be used to await the operation result.
|
java
|
clients/src/main/java/org/apache/kafka/clients/admin/KafkaAdminClient.java
| 4,883
|
[
"configResourceTypes",
"options"
] |
ListConfigResourcesResult
| true
| 2
| 7.44
|
apache/kafka
| 31,560
|
javadoc
| false
|
stopTrace
|
@Override
public void stopTrace(Traceable traceable) {
final String spanId = traceable.getSpanId();
final var span = Span.fromContextOrNull(spans.remove(spanId));
if (span != null) {
logger.trace("Finishing trace [{}]", spanId);
span.end();
}
}
|
Most of the examples of how to use the OTel API look something like this, where the span context
is automatically propagated:
<pre>{@code
Span span = tracer.spanBuilder("parent").startSpan();
try (Scope scope = parentSpan.makeCurrent()) {
// ...do some stuff, possibly creating further spans
} finally {
span.end();
}
}</pre>
This typically isn't useful in Elasticsearch, because a {@link Scope} can't be used across threads.
However, if a scope is active, then the APM agent can capture additional information, so this method
exists to make it possible to use scopes in the few situation where it makes sense.
@param traceable provides the ID of a currently-open span for which to open a scope.
@return a method to close the scope when you are finished with it.
|
java
|
modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java
| 388
|
[
"traceable"
] |
void
| true
| 2
| 7.92
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
toString
|
public static String toString(final Formattable formattable) {
return String.format(SIMPLEST_FORMAT, formattable);
}
|
Gets the default formatted representation of the specified
{@link Formattable}.
@param formattable the instance to convert to a string, not null.
@return the resulting string, not null.
|
java
|
src/main/java/org/apache/commons/lang3/text/FormattableUtils.java
| 136
|
[
"formattable"
] |
String
| true
| 1
| 6.96
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
obtrudeException
|
@Override
public void obtrudeException(Throwable ex) {
throw erroneousCompletionException();
}
|
Completes this future exceptionally. For internal use by the Kafka clients, not by user code.
@param throwable the exception.
@return {@code true} if this invocation caused this CompletableFuture
to transition to a completed state, else {@code false}
|
java
|
clients/src/main/java/org/apache/kafka/common/internals/KafkaCompletableFuture.java
| 67
|
[
"ex"
] |
void
| true
| 1
| 6.64
|
apache/kafka
| 31,560
|
javadoc
| false
|
of
|
public static ErrorAttributeOptions of(Include... includes) {
return of(Arrays.asList(includes));
}
|
Create an {@code ErrorAttributeOptions} that includes the specified attribute
{@link Include} options.
@param includes error attributes to include
@return an {@code ErrorAttributeOptions}
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/web/error/ErrorAttributeOptions.java
| 116
|
[] |
ErrorAttributeOptions
| true
| 1
| 6
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
resolveEmbeddedValue
|
@Nullable String resolveEmbeddedValue(String value);
|
Resolve the given embedded value, for example, an annotation attribute.
@param value the value to resolve
@return the resolved value (may be the original value as-is)
@since 3.0
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/config/ConfigurableBeanFactory.java
| 249
|
[
"value"
] |
String
| true
| 1
| 6.64
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
packAsBinaryImpl
|
public static void packAsBinaryImpl(int[] vector, byte[] packed) {
int limit = vector.length - 7;
int i = 0;
int index = 0;
for (; i < limit; i += 8, index++) {
assert vector[i] == 0 || vector[i] == 1;
assert vector[i + 1] == 0 || vector[i + 1] == 1;
assert vector[i + 2] == 0 || vector[i + 2] == 1;
assert vector[i + 3] == 0 || vector[i + 3] == 1;
assert vector[i + 4] == 0 || vector[i + 4] == 1;
assert vector[i + 5] == 0 || vector[i + 5] == 1;
assert vector[i + 6] == 0 || vector[i + 6] == 1;
assert vector[i + 7] == 0 || vector[i + 7] == 1;
int result = vector[i] << 7 | (vector[i + 1] << 6) | (vector[i + 2] << 5) | (vector[i + 3] << 4) | (vector[i + 4] << 3)
| (vector[i + 5] << 2) | (vector[i + 6] << 1) | (vector[i + 7]);
packed[index] = (byte) result;
}
if (i == vector.length) {
return;
}
byte result = 0;
for (int j = 7; j >= 0 && i < vector.length; i++, j--) {
assert vector[i] == 0 || vector[i] == 1;
result |= (byte) ((vector[i] & 1) << j);
}
packed[index] = result;
}
|
Packs two bit vector (values 0-3) into a byte array with lower bits first.
The striding is similar to transposeHalfByte
@param vector the input vector with values 0-3
@param packed the output packed byte array
|
java
|
libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java
| 376
|
[
"vector",
"packed"
] |
void
| true
| 14
| 6.72
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
advance
|
private void advance() throws IOException {
close();
if (it.hasNext()) {
in = it.next().openStream();
}
}
|
Closes the current input stream and opens the next one, if any.
|
java
|
android/guava/src/com/google/common/io/MultiInputStream.java
| 62
|
[] |
void
| true
| 2
| 6.8
|
google/guava
| 51,352
|
javadoc
| false
|
sample
|
function sample(collection) {
var func = isArray(collection) ? arraySample : baseSample;
return func(collection);
}
|
Gets a random element from `collection`.
@static
@memberOf _
@since 2.0.0
@category Collection
@param {Array|Object} collection The collection to sample.
@returns {*} Returns the random element.
@example
_.sample([1, 2, 3, 4]);
// => 2
|
javascript
|
lodash.js
| 9,873
|
[
"collection"
] | false
| 2
| 7.28
|
lodash/lodash
| 61,490
|
jsdoc
| false
|
|
_num2excel
|
def _num2excel(self, index: int) -> str:
"""
Convert 0-based column index to Excel column name.
Parameters
----------
index : int
The numeric column index to convert to a Excel column name.
Returns
-------
column_name : str
The column name corresponding to the index.
Raises
------
ValueError
Index is negative
"""
if index < 0:
raise ValueError(f"Index cannot be negative: {index}")
column_name = ""
# while loop in case column name needs to be longer than 1 character
while index > 0 or not column_name:
index, remainder = divmod(index, 26)
column_name = chr(65 + remainder) + column_name
return column_name
|
Convert 0-based column index to Excel column name.
Parameters
----------
index : int
The numeric column index to convert to a Excel column name.
Returns
-------
column_name : str
The column name corresponding to the index.
Raises
------
ValueError
Index is negative
|
python
|
pandas/io/formats/excel.py
| 880
|
[
"self",
"index"
] |
str
| true
| 4
| 6.88
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
autograd_cache_key
|
def autograd_cache_key(
gm: torch.fx.GraphModule,
example_inputs,
config: AOTConfig,
fx_config: _CompileFxKwargs,
# TODO: add args and parameters
) -> tuple[str, list[str]]:
"""
Generate a unique hash of the FX graph for caching.
"""
try:
check_cacheable(gm)
if has_triton_package():
# Due to https://github.com/triton-lang/triton/issues/3729,
# if triton is < 3.2.0, AOTAutogradCache may cause us to
# attempt to load a cache entry without initializing
# the CUDA context on the autograd thread.
# Without caching, we naturally do this initialization when
# tracing through the graph with the autograd engine.
import triton
if triton.__version__ < "3.2.0":
raise BypassAOTAutogradCache("AOTAutogradCache requires triton 3.2.0")
details = AOTAutogradCacheDetails(gm, example_inputs, config, fx_config)
pickler = AOTAutogradCachePickler(gm)
# The prefix distinguishes among the other kinds of objects we cache
key = "a" + pickler.get_hash(details)
debug_lines = pickler.debug_lines(details)
log.debug(
"Autograd graph cache hash details for key %s:\n%s",
key,
LazyString(lambda: "\n".join(debug_lines)),
)
return key, debug_lines
except Exception:
# If enable_aot_compile is set, we're in AOT precompile mode where we always
# want to use fallback nonce keys. Unlike caching, it's fine if we can't generate
# a proper key because we are guaranteed in an AOT precompile world users are in
# complete control of distributing and loading artifacts.
if torch._functorch.config.bypass_autograd_cache_key:
log.info(
"Failed to generate AOTAutograd cache key; falling back to nonce due to enable_aot_compile",
exc_info=True,
)
return str(random.random()), []
else:
raise
|
Generate a unique hash of the FX graph for caching.
|
python
|
torch/_functorch/_aot_autograd/autograd_cache.py
| 519
|
[
"gm",
"example_inputs",
"config",
"fx_config"
] |
tuple[str, list[str]]
| true
| 5
| 6
|
pytorch/pytorch
| 96,034
|
unknown
| false
|
visitorNoAsyncModifier
|
function visitorNoAsyncModifier(node: Node): VisitResult<Node | undefined> {
if (node.kind === SyntaxKind.AsyncKeyword) {
return undefined;
}
return node;
}
|
Restores the `HierarchyFacts` for this node's ancestor after visiting this node's
subtree.
@param ancestorFacts The `HierarchyFacts` of the ancestor to restore after visiting the subtree.
|
typescript
|
src/compiler/transformers/es2018.ts
| 245
|
[
"node"
] | true
| 2
| 6.08
|
microsoft/TypeScript
| 107,154
|
jsdoc
| false
|
|
doc
|
def doc(*docstrings: None | str | Callable, **params: object) -> Callable[[F], F]:
"""
A decorator to take docstring templates, concatenate them and perform string
substitution on them.
This decorator will add a variable "_docstring_components" to the wrapped
callable to keep track the original docstring template for potential usage.
If it should be consider as a template, it will be saved as a string.
Otherwise, it will be saved as callable, and later user __doc__ and dedent
to get docstring.
Parameters
----------
*docstrings : None, str, or callable
The string / docstring / docstring template to be appended in order
after default docstring under callable.
**params
The string which would be used to format docstring template.
"""
def decorator(decorated: F) -> F:
# collecting docstring and docstring templates
docstring_components: list[str | Callable] = []
if decorated.__doc__:
docstring_components.append(dedent(decorated.__doc__))
for docstring in docstrings:
if docstring is None:
continue
if hasattr(docstring, "_docstring_components"):
docstring_components.extend(
docstring._docstring_components # pyright: ignore[reportAttributeAccessIssue]
)
elif isinstance(docstring, str) or docstring.__doc__:
docstring_components.append(docstring)
params_applied = [
component.format(**params)
if isinstance(component, str) and len(params) > 0
else component
for component in docstring_components
]
decorated.__doc__ = "".join(
[
component
if isinstance(component, str)
else dedent(component.__doc__ or "")
for component in params_applied
]
)
# error: "F" has no attribute "_docstring_components"
decorated._docstring_components = ( # type: ignore[attr-defined]
docstring_components
)
return decorated
return decorator
|
A decorator to take docstring templates, concatenate them and perform string
substitution on them.
This decorator will add a variable "_docstring_components" to the wrapped
callable to keep track the original docstring template for potential usage.
If it should be consider as a template, it will be saved as a string.
Otherwise, it will be saved as callable, and later user __doc__ and dedent
to get docstring.
Parameters
----------
*docstrings : None, str, or callable
The string / docstring / docstring template to be appended in order
after default docstring under callable.
**params
The string which would be used to format docstring template.
|
python
|
pandas/util/_decorators.py
| 346
|
[] |
Callable[[F], F]
| true
| 11
| 6.8
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
substituteExpressionIdentifier
|
function substituteExpressionIdentifier(node: Identifier): Expression {
return trySubstituteNamespaceExportedName(node)
|| node;
}
|
Hooks node substitutions.
@param hint A hint as to the intended usage of the node.
@param node The node to substitute.
|
typescript
|
src/compiler/transformers/ts.ts
| 2,679
|
[
"node"
] | true
| 2
| 6.64
|
microsoft/TypeScript
| 107,154
|
jsdoc
| false
|
|
offsetOfMaxTimestamp
|
default Optional<Long> offsetOfMaxTimestamp() {
if (magic() == RecordBatch.MAGIC_VALUE_V0) return Optional.empty();
long maxTimestamp = maxTimestamp();
try (CloseableIterator<Record> iter = streamingIterator(BufferSupplier.create())) {
while (iter.hasNext()) {
Record record = iter.next();
if (maxTimestamp == record.timestamp()) return Optional.of(record.offset());
}
}
return Optional.empty();
}
|
iterate all records to find the offset of max timestamp.
noted:
1) that the earliest offset will return if there are multi records having same (max) timestamp
2) it always returns None if the {@link RecordBatch#magic()} is equal to {@link RecordBatch#MAGIC_VALUE_V0}
@return offset of max timestamp
|
java
|
clients/src/main/java/org/apache/kafka/common/record/RecordBatch.java
| 257
|
[] | true
| 4
| 7.76
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
get_default_engine
|
def get_default_engine(ext: str, mode: Literal["reader", "writer"] = "reader") -> str:
"""
Return the default reader/writer for the given extension.
Parameters
----------
ext : str
The excel file extension for which to get the default engine.
mode : str {'reader', 'writer'}
Whether to get the default engine for reading or writing.
Either 'reader' or 'writer'
Returns
-------
str
The default engine for the extension.
"""
_default_readers = {
"xlsx": "openpyxl",
"xlsm": "openpyxl",
"xlsb": "pyxlsb",
"xls": "xlrd",
"ods": "odf",
}
_default_writers = {
"xlsx": "openpyxl",
"xlsm": "openpyxl",
"xlsb": "pyxlsb",
"ods": "odf",
}
assert mode in ["reader", "writer"]
if mode == "writer":
# Prefer xlsxwriter over openpyxl if installed
xlsxwriter = import_optional_dependency("xlsxwriter", errors="warn")
if xlsxwriter:
_default_writers["xlsx"] = "xlsxwriter"
return _default_writers[ext]
else:
return _default_readers[ext]
|
Return the default reader/writer for the given extension.
Parameters
----------
ext : str
The excel file extension for which to get the default engine.
mode : str {'reader', 'writer'}
Whether to get the default engine for reading or writing.
Either 'reader' or 'writer'
Returns
-------
str
The default engine for the extension.
|
python
|
pandas/io/excel/_util.py
| 50
|
[
"ext",
"mode"
] |
str
| true
| 4
| 6.88
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
sanitize_for_serialization
|
def sanitize_for_serialization(obj: V1Pod):
"""
Convert pod to dict.... but *safely*.
When pod objects created with one k8s version are unpickled in a python
env with a more recent k8s version (in which the object attrs may have
changed) the unpickled obj may throw an error because the attr
expected on new obj may not be there on the unpickled obj.
This function still converts the pod to a dict; the only difference is
it populates missing attrs with None. You may compare with
https://github.com/kubernetes-client/python/blob/5a96bbcbe21a552cc1f9cda13e0522fafb0dbac8/kubernetes/client/api_client.py#L202
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
:meta private:
"""
if obj is None:
return None
if isinstance(obj, (float, bool, bytes, str, int)):
return obj
if isinstance(obj, list):
return [sanitize_for_serialization(sub_obj) for sub_obj in obj]
if isinstance(obj, tuple):
return tuple(sanitize_for_serialization(sub_obj) for sub_obj in obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
obj_dict = {
obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in obj.openapi_types.items()
# below is the only line we change, and we just add default=None for getattr
if getattr(obj, attr, None) is not None
}
return {key: sanitize_for_serialization(val) for key, val in obj_dict.items()}
|
Convert pod to dict.... but *safely*.
When pod objects created with one k8s version are unpickled in a python
env with a more recent k8s version (in which the object attrs may have
changed) the unpickled obj may throw an error because the attr
expected on new obj may not be there on the unpickled obj.
This function still converts the pod to a dict; the only difference is
it populates missing attrs with None. You may compare with
https://github.com/kubernetes-client/python/blob/5a96bbcbe21a552cc1f9cda13e0522fafb0dbac8/kubernetes/client/api_client.py#L202
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
:meta private:
|
python
|
airflow-core/src/airflow/utils/sqlalchemy.py
| 158
|
[
"obj"
] | true
| 8
| 8.24
|
apache/airflow
| 43,597
|
sphinx
| false
|
|
canFollowGetOrSetKeyword
|
function canFollowGetOrSetKeyword(): boolean {
return token() === SyntaxKind.OpenBracketToken
|| isLiteralPropertyName();
}
|
Reports a diagnostic error for the current token being an invalid name.
@param blankDiagnostic Diagnostic to report for the case of the name being blank (matched tokenIfBlankName).
@param nameDiagnostic Diagnostic to report for all other cases.
@param tokenIfBlankName Current token if the name was invalid for being blank (not provided / skipped).
|
typescript
|
src/compiler/parser.ts
| 2,819
|
[] | true
| 2
| 6.64
|
microsoft/TypeScript
| 107,154
|
jsdoc
| false
|
|
length
|
public final int length() {
return longs.length();
}
|
Returns the length of the array.
@return the length of the array
|
java
|
android/guava/src/com/google/common/util/concurrent/AtomicDoubleArray.java
| 90
|
[] | true
| 1
| 6.8
|
google/guava
| 51,352
|
javadoc
| false
|
|
diff
|
def diff(self, periods: int = 1) -> Index:
"""
Computes the difference between consecutive values in the Index object.
If periods is greater than 1, computes the difference between values that
are `periods` number of positions apart.
Parameters
----------
periods : int, optional
The number of positions between the current and previous
value to compute the difference with. Default is 1.
Returns
-------
Index
A new Index object with the computed differences.
Examples
--------
>>> import pandas as pd
>>> idx = pd.Index([10, 20, 30, 40, 50])
>>> idx.diff()
Index([nan, 10.0, 10.0, 10.0, 10.0], dtype='float64')
"""
return Index(self.to_series().diff(periods))
|
Computes the difference between consecutive values in the Index object.
If periods is greater than 1, computes the difference between values that
are `periods` number of positions apart.
Parameters
----------
periods : int, optional
The number of positions between the current and previous
value to compute the difference with. Default is 1.
Returns
-------
Index
A new Index object with the computed differences.
Examples
--------
>>> import pandas as pd
>>> idx = pd.Index([10, 20, 30, 40, 50])
>>> idx.diff()
Index([nan, 10.0, 10.0, 10.0, 10.0], dtype='float64')
|
python
|
pandas/core/indexes/base.py
| 7,230
|
[
"self",
"periods"
] |
Index
| true
| 1
| 7.12
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
waitFor
|
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
public boolean waitFor(Guard guard, long time, TimeUnit unit) throws InterruptedException {
long timeoutNanos = toSafeNanos(time, unit);
if (!((guard.monitor == this) && lock.isHeldByCurrentThread())) {
throw new IllegalMonitorStateException();
}
if (guard.isSatisfied()) {
return true;
}
if (Thread.interrupted()) {
throw new InterruptedException();
}
return awaitNanos(guard, timeoutNanos, true);
}
|
Waits for the guard to be satisfied. Waits at most the given time, and may be interrupted. May
be called only by a thread currently occupying this monitor.
@return whether the guard is now satisfied
@throws InterruptedException if interrupted while waiting
|
java
|
android/guava/src/com/google/common/util/concurrent/Monitor.java
| 860
|
[
"guard",
"time",
"unit"
] | true
| 5
| 6.72
|
google/guava
| 51,352
|
javadoc
| false
|
|
equals
|
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ZeroBucket that = (ZeroBucket) o;
if (count() != that.count()) return false;
if (Double.compare(zeroThreshold(), that.zeroThreshold()) != 0) return false;
if (compareExponentiallyScaledValues(index(), scale(), that.index(), that.scale()) != 0) return false;
return true;
}
|
Collapses all buckets from the given iterator whose lower boundaries are smaller than the zero threshold.
The iterator is advanced to point at the first, non-collapsed bucket.
@param buckets The iterator whose buckets may be collapsed.
@return A potentially updated {@link ZeroBucket} with the collapsed buckets' counts and an adjusted threshold.
|
java
|
libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/ZeroBucket.java
| 269
|
[
"o"
] | true
| 6
| 7.92
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
|
weakValues
|
@GwtIncompatible // java.lang.ref.WeakReference
@CanIgnoreReturnValue
public CacheBuilder<K, V> weakValues() {
return setValueStrength(Strength.WEAK);
}
|
Specifies that each value (not key) stored in the cache should be wrapped in a {@link
WeakReference} (by default, strong references are used).
<p>Weak values will be garbage collected once they are weakly reachable. This makes them a poor
candidate for caching; consider {@link #softValues} instead.
<p><b>Note:</b> when this method is used, the resulting cache will use identity ({@code ==})
comparison to determine equality of values.
<p>Entries with values that have been garbage collected may be counted in {@link Cache#size},
but will never be visible to read or write operations; such entries are cleaned up as part of
the routine maintenance described in the class javadoc.
@return this {@code CacheBuilder} instance (for chaining)
@throws IllegalStateException if the value strength was already set
|
java
|
android/guava/src/com/google/common/cache/CacheBuilder.java
| 660
|
[] | true
| 1
| 6.72
|
google/guava
| 51,352
|
javadoc
| false
|
|
load
|
List<Document> load(boolean expandLists) throws IOException {
List<Document> documents = new ArrayList<>();
Document document = new Document();
StringBuilder buffer = new StringBuilder();
try (CharacterReader reader = new CharacterReader(this.resource)) {
while (reader.read()) {
if (reader.isCommentPrefixCharacter()) {
char commentPrefixCharacter = reader.getCharacter();
if (isNewDocument(reader)) {
if (!document.isEmpty()) {
documents.add(document);
}
document = new Document();
}
else {
if (document.isEmpty() && !documents.isEmpty()) {
document = documents.remove(documents.size() - 1);
}
reader.setLastLineCommentPrefixCharacter(commentPrefixCharacter);
reader.skipComment();
}
}
else {
reader.setLastLineCommentPrefixCharacter(-1);
loadKeyAndValue(expandLists, document, reader, buffer);
}
}
}
if (!document.isEmpty() && !documents.contains(document)) {
documents.add(document);
}
return documents;
}
|
Load {@code .properties} data and return a map of {@code String} ->
{@link OriginTrackedValue}.
@param expandLists if list {@code name[]=a,b,c} shortcuts should be expanded
@return the loaded properties
@throws IOException on read error
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/env/OriginTrackedPropertiesLoader.java
| 76
|
[
"expandLists"
] | true
| 9
| 7.44
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
|
substituteBinaryExpression
|
function substituteBinaryExpression(node: BinaryExpression): Expression {
// When we see an assignment expression whose left-hand side is an exported symbol,
// we should ensure all exports of that symbol are updated with the correct value.
//
// - We do not substitute generated identifiers unless they are file-level reserved names.
// - We do not substitute identifiers tagged with the LocalName flag.
// - We only substitute identifiers that are exported at the top level.
if (
isAssignmentOperator(node.operatorToken.kind)
&& isIdentifier(node.left)
&& (!isGeneratedIdentifier(node.left) || isFileLevelReservedGeneratedIdentifier(node.left))
&& !isLocalName(node.left)
) {
const exportedNames = getExports(node.left);
if (exportedNames) {
// For each additional export of the declaration, apply an export assignment.
let expression: Expression = node;
for (const exportName of exportedNames) {
expression = createExportExpression(exportName, preventSubstitution(expression));
}
return expression;
}
}
return node;
}
|
Substitution for a BinaryExpression that may contain an imported or exported symbol.
@param node The node to substitute.
|
typescript
|
src/compiler/transformers/module/system.ts
| 1,946
|
[
"node"
] | true
| 7
| 6.56
|
microsoft/TypeScript
| 107,154
|
jsdoc
| false
|
|
stripFloatLiteralFraction
|
std::optional<std::string>
stripFloatLiteralFraction(const MatchFinder::MatchResult &Result,
const Expr &Node) {
if (const auto *LitFloat = llvm::dyn_cast<FloatingLiteral>(&Node))
// Attempt to simplify a `Duration` factory call with a literal argument.
if (std::optional<llvm::APSInt> IntValue = truncateIfIntegral(*LitFloat))
return toString(*IntValue, /*radix=*/10);
return std::nullopt;
}
|
Returns `true` if `Node` is a value which evaluates to a literal `0`.
|
cpp
|
clang-tools-extra/clang-tidy/abseil/DurationRewriter.cpp
| 158
|
[] | true
| 3
| 7.2
|
llvm/llvm-project
| 36,021
|
doxygen
| false
|
|
cleanup
|
void cleanup() {
log.trace("Closing the consumer network thread");
Timer timer = time.timer(closeTimeout);
try {
// If an error was thrown from initializeResources(), it's possible that the list of request managers
// is null, so check before using. If the request manager list is null, there wasn't any real work
// performed, so not being able to close the request managers isn't so bad.
if (requestManagers != null && networkClientDelegate != null)
runAtClose(requestManagers.entries(), networkClientDelegate, time.milliseconds());
} catch (Exception e) {
log.error("Unexpected error during shutdown. Proceed with closing.", e);
} finally {
// Likewise, if an error was thrown from initializeResources(), it's possible for the network client
// to be null, so check before using. If the network client is null, things have failed catastrophically
// enough that there aren't any outstanding requests to be sent anyway.
if (networkClientDelegate != null)
sendUnsentRequests(timer);
asyncConsumerMetrics.recordApplicationEventExpiredSize(applicationEventReaper.reap(applicationEventQueue));
closeQuietly(requestManagers, "request managers");
closeQuietly(networkClientDelegate, "network client delegate");
log.debug("Closed the consumer network thread");
}
}
|
Check the unsent queue one last time and poll until all requests are sent or the timer runs out.
|
java
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ConsumerNetworkThread.java
| 409
|
[] |
void
| true
| 5
| 7.04
|
apache/kafka
| 31,560
|
javadoc
| false
|
ceiling
|
public static Date ceiling(final Date date, final int field) {
return modify(toCalendar(date), field, ModifyType.CEILING).getTime();
}
|
Gets a date ceiling, leaving the field specified as the most
significant field.
<p>For example, if you had the date-time of 28 Mar 2002
13:45:01.231, if you passed with HOUR, it would return 28 Mar
2002 14:00:00.000. If this was passed with MONTH, it would
return 1 Apr 2002 0:00:00.000.</p>
@param date the date to work with, not null.
@param field the field from {@link Calendar} or {@code SEMI_MONTH}.
@return the different ceil date, not null.
@throws NullPointerException if the date is {@code null}.
@throws ArithmeticException if the year is over 280 million.
@since 2.5
|
java
|
src/main/java/org/apache/commons/lang3/time/DateUtils.java
| 369
|
[
"date",
"field"
] |
Date
| true
| 1
| 6.64
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
bulkGet
|
public Iterable<L> bulkGet(Iterable<? extends Object> keys) {
// Initially using the list to store the keys, then reusing it to store the respective L's
List<Object> result = newArrayList(keys);
if (result.isEmpty()) {
return ImmutableList.of();
}
int[] stripes = new int[result.size()];
for (int i = 0; i < result.size(); i++) {
stripes[i] = indexFor(result.get(i));
}
Arrays.sort(stripes);
// optimize for runs of identical stripes
int previousStripe = stripes[0];
result.set(0, getAt(previousStripe));
for (int i = 1; i < result.size(); i++) {
int currentStripe = stripes[i];
if (currentStripe == previousStripe) {
result.set(i, result.get(i - 1));
} else {
result.set(i, getAt(currentStripe));
previousStripe = currentStripe;
}
}
/*
* Note that the returned Iterable holds references to the returned stripes, to avoid
* error-prone code like:
*
* Striped<Lock> stripedLock = Striped.lazyWeakXXX(...)'
* Iterable<Lock> locks = stripedLock.bulkGet(keys);
* for (Lock lock : locks) {
* lock.lock();
* }
* operation();
* for (Lock lock : locks) {
* lock.unlock();
* }
*
* If we only held the int[] stripes, translating it on the fly to L's, the original locks might
* be garbage collected after locking them, ending up in a huge mess.
*/
@SuppressWarnings("unchecked") // we carefully replaced all keys with their respective L's
List<L> asStripes = (List<L>) result;
return Collections.unmodifiableList(asStripes);
}
|
Returns the stripes that correspond to the passed objects, in ascending (as per {@link
#getAt(int)}) order. Thus, threads that use the stripes in the order returned by this method
are guaranteed to not deadlock each other.
<p>It should be noted that using a {@code Striped<L>} with relatively few stripes, and {@code
bulkGet(keys)} with a relative large number of keys can cause an excessive number of shared
stripes (much like the birthday paradox, where much fewer than anticipated birthdays are needed
for a pair of them to match). Please consider carefully the implications of the number of
stripes, the intended concurrency level, and the typical number of keys used in a {@code
bulkGet(keys)} operation. See <a href="http://www.mathpages.com/home/kmath199.htm">Balls in
Bins model</a> for mathematical formulas that can be used to estimate the probability of
collisions.
@param keys arbitrary non-null keys
@return the stripes corresponding to the objects (one per each object, derived by delegating to
{@link #get(Object)}; may contain duplicates), in an increasing index order.
|
java
|
android/guava/src/com/google/common/util/concurrent/Striped.java
| 141
|
[
"keys"
] | true
| 5
| 8.08
|
google/guava
| 51,352
|
javadoc
| false
|
|
polyadd
|
def polyadd(c1, c2):
"""
Add one polynomial to another.
Returns the sum of two polynomials `c1` + `c2`. The arguments are
sequences of coefficients from lowest order term to highest, i.e.,
[1,2,3] represents the polynomial ``1 + 2*x + 3*x**2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of polynomial coefficients ordered from low to high.
Returns
-------
out : ndarray
The coefficient array representing their sum.
See Also
--------
polysub, polymulx, polymul, polydiv, polypow
Examples
--------
>>> from numpy.polynomial import polynomial as P
>>> c1 = (1, 2, 3)
>>> c2 = (3, 2, 1)
>>> sum = P.polyadd(c1,c2); sum
array([4., 4., 4.])
>>> P.polyval(2, sum) # 4 + 4(2) + 4(2**2)
28.0
"""
return pu._add(c1, c2)
|
Add one polynomial to another.
Returns the sum of two polynomials `c1` + `c2`. The arguments are
sequences of coefficients from lowest order term to highest, i.e.,
[1,2,3] represents the polynomial ``1 + 2*x + 3*x**2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of polynomial coefficients ordered from low to high.
Returns
-------
out : ndarray
The coefficient array representing their sum.
See Also
--------
polysub, polymulx, polymul, polydiv, polypow
Examples
--------
>>> from numpy.polynomial import polynomial as P
>>> c1 = (1, 2, 3)
>>> c2 = (3, 2, 1)
>>> sum = P.polyadd(c1,c2); sum
array([4., 4., 4.])
>>> P.polyval(2, sum) # 4 + 4(2) + 4(2**2)
28.0
|
python
|
numpy/polynomial/polynomial.py
| 215
|
[
"c1",
"c2"
] | false
| 1
| 6.32
|
numpy/numpy
| 31,054
|
numpy
| false
|
|
getOrCreateInflater
|
Inflater getOrCreateInflater() {
Deque<Inflater> inflaterCache = this.inflaterCache;
if (inflaterCache != null) {
synchronized (inflaterCache) {
Inflater inflater = this.inflaterCache.poll();
if (inflater != null) {
return inflater;
}
}
}
return new Inflater(true);
}
|
Get previously used {@link Inflater} from the cache, or create a new one.
@return a usable {@link Inflater}
|
java
|
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/NestedJarFileResources.java
| 115
|
[] |
Inflater
| true
| 3
| 8.24
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
describeClassicGroups
|
default DescribeClassicGroupsResult describeClassicGroups(Collection<String> groupIds) {
return describeClassicGroups(groupIds, new DescribeClassicGroupsOptions());
}
|
Describe some classic groups in the cluster, with the default options.
<p>
This is a convenience method for {@link #describeClassicGroups(Collection, DescribeClassicGroupsOptions)}
with default options. See the overload for more details.
@param groupIds The IDs of the groups to describe.
@return The DescribeClassicGroupsResult.
|
java
|
clients/src/main/java/org/apache/kafka/clients/admin/Admin.java
| 2,079
|
[
"groupIds"
] |
DescribeClassicGroupsResult
| true
| 1
| 6.32
|
apache/kafka
| 31,560
|
javadoc
| false
|
equals
|
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final LatLng latLng = (LatLng) o;
return Double.compare(latLng.lon, lon) == 0 && Double.compare(latLng.lat, lat) == 0;
}
|
Determines the minimum latitude of the great circle defined by this LatLng to the provided LatLng.
@param latLng The LatLng.
@return The minimum latitude of the great circle in radians.
|
java
|
libs/h3/src/main/java/org/elasticsearch/h3/LatLng.java
| 181
|
[
"o"
] | true
| 5
| 7.92
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
|
processBackgroundEventsOnClose
|
void processBackgroundEventsOnClose() {
if (backgroundEventQueue == null || backgroundEventHandler == null) {
return;
}
try {
processBackgroundEvents();
} catch (Exception e) {
if (!(e instanceof GroupAuthorizationException || e instanceof TopicAuthorizationException || e instanceof InvalidTopicException))
throw e;
}
}
|
Process background events on close. Except some expected exceptions which might occur
during close, exceptions encountered are thrown.
|
java
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareConsumerImpl.java
| 1,208
|
[] |
void
| true
| 7
| 6.56
|
apache/kafka
| 31,560
|
javadoc
| false
|
resolvePatternEmptyDirectories
|
private Set<StandardConfigDataResource> resolvePatternEmptyDirectories(StandardConfigDataReference reference) {
String directory = reference.getDirectory();
Assert.state(directory != null, "'directory' must not be null");
Resource[] subdirectories = this.resourceLoader.getResources(directory, ResourceType.DIRECTORY);
ConfigDataLocation location = reference.getConfigDataLocation();
if (!location.isOptional() && ObjectUtils.isEmpty(subdirectories)) {
String message = String.format("Config data location '%s' contains no subdirectories", location);
throw new ConfigDataLocationNotFoundException(location, message, null);
}
return Arrays.stream(subdirectories)
.filter(Resource::exists)
.map((resource) -> new StandardConfigDataResource(reference, resource, true))
.collect(Collectors.toCollection(LinkedHashSet::new));
}
|
Create a new {@link StandardConfigDataLocationResolver} instance.
@param logFactory the factory for loggers to use
@param binder a binder backed by the initial {@link Environment}
@param resourceLoader a {@link ResourceLoader} used to load resources
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/context/config/StandardConfigDataLocationResolver.java
| 296
|
[
"reference"
] | true
| 3
| 6.08
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
|
subtract
|
public Fraction subtract(final Fraction fraction) {
return addSub(fraction, false /* subtract */);
}
|
Subtracts the value of another fraction from the value of this one,
returning the result in reduced form.
@param fraction the fraction to subtract, must not be {@code null}
@return a {@link Fraction} instance with the resulting values
@throws NullPointerException if the fraction is {@code null}
@throws ArithmeticException if the resulting numerator or denominator
cannot be represented in an {@code int}.
|
java
|
src/main/java/org/apache/commons/lang3/math/Fraction.java
| 869
|
[
"fraction"
] |
Fraction
| true
| 1
| 6.32
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
compileScript
|
function compileScript(name, body, baseUrl) {
const hostDefinedOptionId = Symbol(name);
async function importModuleDynamically(specifier, _, importAttributes, phase) {
const cascadedLoader = require('internal/modules/esm/loader').getOrInitializeCascadedLoader();
return cascadedLoader.import(specifier, baseUrl, importAttributes,
phase === 'source' ? kSourcePhase : kEvaluationPhase);
}
return makeContextifyScript(
body, // code
name, // filename,
0, // lineOffset
0, // columnOffset,
undefined, // cachedData
false, // produceCachedData
undefined, // parsingContext
hostDefinedOptionId, // hostDefinedOptionId
importModuleDynamically, // importModuleDynamically
);
}
|
@param {string} name - The filename of the script.
@param {string} body - The code of the script.
@param {string} baseUrl Path of the parent importing the module.
@returns {ContextifyScript} The created contextify script.
|
javascript
|
lib/internal/process/execution.js
| 381
|
[
"name",
"body",
"baseUrl"
] | false
| 2
| 6.08
|
nodejs/node
| 114,839
|
jsdoc
| true
|
|
performCacheEvicts
|
private void performCacheEvicts(List<CacheOperationContext> contexts, @Nullable Object result) {
for (CacheOperationContext context : contexts) {
CacheEvictOperation operation = (CacheEvictOperation) context.metadata.operation;
if (isConditionPassing(context, result)) {
Object key = context.getGeneratedKey();
for (Cache cache : context.getCaches()) {
if (operation.isCacheWide()) {
logInvalidating(context, operation, null);
doClear(cache, operation.isBeforeInvocation());
}
else {
if (key == null) {
key = generateKey(context, result);
}
logInvalidating(context, operation, key);
doEvict(cache, key, operation.isBeforeInvocation());
}
}
}
}
}
|
Find a cached value only for {@link CacheableOperation} that passes the condition.
@param contexts the cacheable operations
@return a {@link Cache.ValueWrapper} holding the cached value,
or {@code null} if none is found
|
java
|
spring-context/src/main/java/org/springframework/cache/interceptor/CacheAspectSupport.java
| 687
|
[
"contexts",
"result"
] |
void
| true
| 4
| 7.92
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
_box_as_indexlike
|
def _box_as_indexlike(
dt_array: ArrayLike, utc: bool = False, name: Hashable | None = None
) -> Index:
"""
Properly boxes the ndarray of datetimes to DatetimeIndex
if it is possible or to generic Index instead
Parameters
----------
dt_array: 1-d array
Array of datetimes to be wrapped in an Index.
utc : bool
Whether to convert/localize timestamps to UTC.
name : string, default None
Name for a resulting index
Returns
-------
result : datetime of converted dates
- DatetimeIndex if convertible to sole datetime64 type
- general Index otherwise
"""
if lib.is_np_dtype(dt_array.dtype, "M"):
tz = "utc" if utc else None
return DatetimeIndex(dt_array, tz=tz, name=name)
return Index(dt_array, name=name, dtype=dt_array.dtype)
|
Properly boxes the ndarray of datetimes to DatetimeIndex
if it is possible or to generic Index instead
Parameters
----------
dt_array: 1-d array
Array of datetimes to be wrapped in an Index.
utc : bool
Whether to convert/localize timestamps to UTC.
name : string, default None
Name for a resulting index
Returns
-------
result : datetime of converted dates
- DatetimeIndex if convertible to sole datetime64 type
- general Index otherwise
|
python
|
pandas/core/tools/datetimes.py
| 266
|
[
"dt_array",
"utc",
"name"
] |
Index
| true
| 3
| 6.4
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
publishEvent
|
protected void publishEvent(Object event, @Nullable ResolvableType typeHint) {
Assert.notNull(event, "Event must not be null");
ResolvableType eventType = null;
// Decorate event as an ApplicationEvent if necessary
ApplicationEvent applicationEvent;
if (event instanceof ApplicationEvent applEvent) {
applicationEvent = applEvent;
eventType = typeHint;
}
else {
ResolvableType payloadType = null;
if (typeHint != null && ApplicationEvent.class.isAssignableFrom(typeHint.toClass())) {
eventType = typeHint;
}
else {
payloadType = typeHint;
}
applicationEvent = new PayloadApplicationEvent<>(this, event, payloadType);
}
// Determine event type only once (for multicast and parent publish)
if (eventType == null) {
eventType = ResolvableType.forInstance(applicationEvent);
if (typeHint == null) {
typeHint = eventType;
}
}
// Multicast right now if possible - or lazily once the multicaster is initialized
if (this.earlyApplicationEvents != null) {
this.earlyApplicationEvents.add(applicationEvent);
}
else if (this.applicationEventMulticaster != null) {
this.applicationEventMulticaster.multicastEvent(applicationEvent, eventType);
}
// Publish event via parent context as well...
if (this.parent != null) {
if (this.parent instanceof AbstractApplicationContext abstractApplicationContext) {
abstractApplicationContext.publishEvent(event, typeHint);
}
else {
this.parent.publishEvent(event);
}
}
}
|
Publish the given event to all listeners.
<p>This is the internal delegate that all other {@code publishEvent}
methods refer to. It is not meant to be called directly but rather serves
as a propagation mechanism between application contexts in a hierarchy,
potentially overridden in subclasses for a custom propagation arrangement.
@param event the event to publish (may be an {@link ApplicationEvent}
or a payload object to be turned into a {@link PayloadApplicationEvent})
@param typeHint the resolved event type, if known.
The implementation of this method also tolerates a payload type hint for
a payload object to be turned into a {@link PayloadApplicationEvent}.
However, the recommended way is to construct an actual event object via
{@link PayloadApplicationEvent#PayloadApplicationEvent(Object, Object, ResolvableType)}
instead for such scenarios.
@since 4.2
@see ApplicationEventMulticaster#multicastEvent(ApplicationEvent, ResolvableType)
|
java
|
spring-context/src/main/java/org/springframework/context/support/AbstractApplicationContext.java
| 413
|
[
"event",
"typeHint"
] |
void
| true
| 10
| 6.4
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
isNodeImport
|
function isNodeImport(node: Node): boolean {
const { parent } = node;
switch (parent.kind) {
case SyntaxKind.ImportEqualsDeclaration:
return (parent as ImportEqualsDeclaration).name === node && isExternalModuleImportEquals(parent as ImportEqualsDeclaration);
case SyntaxKind.ImportSpecifier:
// For a rename import `{ foo as bar }`, don't search for the imported symbol. Just find local uses of `bar`.
return !(parent as ImportSpecifier).propertyName;
case SyntaxKind.ImportClause:
case SyntaxKind.NamespaceImport:
Debug.assert((parent as ImportClause | NamespaceImport).name === node);
return true;
case SyntaxKind.BindingElement:
return isInJSFile(node) && isVariableDeclarationInitializedToBareOrAccessedRequire(parent.parent.parent);
default:
return false;
}
}
|
Given a local reference, we might notice that it's an import/export and recursively search for references of that.
If at an import, look locally for the symbol it imports.
If at an export, look for all imports of it.
This doesn't handle export specifiers; that is done in `getReferencesAtExportSpecifier`.
@param comingFromExport If we are doing a search for all exports, don't bother looking backwards for the imported symbol, since that's the reason we're here.
@internal
|
typescript
|
src/services/importTracker.ts
| 733
|
[
"node"
] | true
| 3
| 6.56
|
microsoft/TypeScript
| 107,154
|
jsdoc
| false
|
|
log
|
def log(x):
"""
Compute the natural logarithm of `x`.
Return the "principal value" (for a description of this, see `numpy.log`)
of :math:`log_e(x)`. For real `x > 0`, this is a real number (``log(0)``
returns ``-inf`` and ``log(np.inf)`` returns ``inf``). Otherwise, the
complex principle value is returned.
Parameters
----------
x : array_like
The value(s) whose log is (are) required.
Returns
-------
out : ndarray or scalar
The log of the `x` value(s). If `x` was a scalar, so is `out`,
otherwise an array is returned.
See Also
--------
numpy.log
Notes
-----
For a log() that returns ``NAN`` when real `x < 0`, use `numpy.log`
(note, however, that otherwise `numpy.log` and this `log` are identical,
i.e., both return ``-inf`` for `x = 0`, ``inf`` for `x = inf`, and,
notably, the complex principle value if ``x.imag != 0``).
Examples
--------
>>> import numpy as np
>>> np.emath.log(np.exp(1))
1.0
Negative arguments are handled "correctly" (recall that
``exp(log(x)) == x`` does *not* hold for real ``x < 0``):
>>> np.emath.log(-np.exp(1)) == (1 + np.pi * 1j)
True
"""
x = _fix_real_lt_zero(x)
return nx.log(x)
|
Compute the natural logarithm of `x`.
Return the "principal value" (for a description of this, see `numpy.log`)
of :math:`log_e(x)`. For real `x > 0`, this is a real number (``log(0)``
returns ``-inf`` and ``log(np.inf)`` returns ``inf``). Otherwise, the
complex principle value is returned.
Parameters
----------
x : array_like
The value(s) whose log is (are) required.
Returns
-------
out : ndarray or scalar
The log of the `x` value(s). If `x` was a scalar, so is `out`,
otherwise an array is returned.
See Also
--------
numpy.log
Notes
-----
For a log() that returns ``NAN`` when real `x < 0`, use `numpy.log`
(note, however, that otherwise `numpy.log` and this `log` are identical,
i.e., both return ``-inf`` for `x = 0`, ``inf`` for `x = inf`, and,
notably, the complex principle value if ``x.imag != 0``).
Examples
--------
>>> import numpy as np
>>> np.emath.log(np.exp(1))
1.0
Negative arguments are handled "correctly" (recall that
``exp(log(x)) == x`` does *not* hold for real ``x < 0``):
>>> np.emath.log(-np.exp(1)) == (1 + np.pi * 1j)
True
|
python
|
numpy/lib/_scimath_impl.py
| 243
|
[
"x"
] | false
| 1
| 6.48
|
numpy/numpy
| 31,054
|
numpy
| false
|
|
send
|
long send(int sockfd, CloseableByteBuffer buffer, int flags);
|
Send a message to a socket.
@param sockfd The open socket file descriptor
@param buffer The message bytes to send
@param flags Flags that may adjust how the message is sent
@return The number of bytes sent, or -1 on failure with errno set
@see <a href="https://man7.org/linux/man-pages/man2/sendto.2.html">send manpage</a>
|
java
|
libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/PosixCLibrary.java
| 139
|
[
"sockfd",
"buffer",
"flags"
] | true
| 1
| 6.8
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
|
binaryToByte
|
public static byte binaryToByte(final boolean[] src, final int srcPos, final byte dstInit, final int dstPos, final int nBools) {
if (src.length == 0 && srcPos == 0 || 0 == nBools) {
return dstInit;
}
if (nBools - 1 + dstPos >= Byte.SIZE) {
throw new IllegalArgumentException("nBools - 1 + dstPos >= 8");
}
byte out = dstInit;
for (int i = 0; i < nBools; i++) {
final int shift = i + dstPos;
final int bits = (src[i + srcPos] ? 1 : 0) << shift;
final int mask = 0x1 << shift;
out = (byte) (out & ~mask | bits);
}
return out;
}
|
Converts binary (represented as boolean array) into a byte using the default (little-endian, LSB0) byte and bit ordering.
@param src the binary to convert.
@param srcPos the position in {@code src}, in boolean unit, from where to start the conversion.
@param dstInit initial value of the destination byte.
@param dstPos the position of the LSB, in bits, in the result byte.
@param nBools the number of booleans to convert.
@return a byte containing the selected bits.
@throws NullPointerException if {@code src} is {@code null}.
@throws IllegalArgumentException if {@code nBools - 1 + dstPos >= 8}.
@throws ArrayIndexOutOfBoundsException if {@code srcPos + nBools > src.length}.
|
java
|
src/main/java/org/apache/commons/lang3/Conversion.java
| 155
|
[
"src",
"srcPos",
"dstInit",
"dstPos",
"nBools"
] | true
| 7
| 7.92
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
indexOfAny
|
public static int indexOfAny(final CharSequence cs, final char... searchChars) {
return indexOfAny(cs, 0, searchChars);
}
|
Search a CharSequence to find the first index of any character in the given set of characters.
<p>
A {@code null} String will return {@code -1}. A {@code null} or zero length search array will return {@code -1}.
</p>
<pre>
StringUtils.indexOfAny(null, *) = -1
StringUtils.indexOfAny("", *) = -1
StringUtils.indexOfAny(*, null) = -1
StringUtils.indexOfAny(*, []) = -1
StringUtils.indexOfAny("zzabyycdxx", ['z', 'a']) = 0
StringUtils.indexOfAny("zzabyycdxx", ['b', 'y']) = 3
StringUtils.indexOfAny("aba", ['z']) = -1
</pre>
@param cs the CharSequence to check, may be null.
@param searchChars the chars to search for, may be null.
@return the index of any of the chars, -1 if no match or null input.
@since 2.0
@since 3.0 Changed signature from indexOfAny(String, char[]) to indexOfAny(CharSequence, char...)
|
java
|
src/main/java/org/apache/commons/lang3/StringUtils.java
| 2,710
|
[
"cs"
] | true
| 1
| 6.8
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
newValue
|
private Object newValue(Class<?> type, @Nullable TypeDescriptor desc, String name) {
try {
if (type.isArray()) {
return createArray(type);
}
else if (Collection.class.isAssignableFrom(type)) {
TypeDescriptor elementDesc = (desc != null ? desc.getElementTypeDescriptor() : null);
return CollectionFactory.createCollection(type, (elementDesc != null ? elementDesc.getType() : null), 16);
}
else if (Map.class.isAssignableFrom(type)) {
TypeDescriptor keyDesc = (desc != null ? desc.getMapKeyTypeDescriptor() : null);
return CollectionFactory.createMap(type, (keyDesc != null ? keyDesc.getType() : null), 16);
}
else {
Constructor<?> ctor = type.getDeclaredConstructor();
if (Modifier.isPrivate(ctor.getModifiers())) {
throw new IllegalAccessException("Auto-growing not allowed with private constructor: " + ctor);
}
return BeanUtils.instantiateClass(ctor);
}
}
catch (Throwable ex) {
throw new NullValueInNestedPathException(getRootClass(), this.nestedPath + name,
"Could not instantiate property type [" + type.getName() + "] to auto-grow nested property path", ex);
}
}
|
Retrieve a Property accessor for the given nested property.
Create a new one if not found in the cache.
<p>Note: Caching nested PropertyAccessors is necessary now,
to keep registered custom editors for nested properties.
@param nestedProperty property to create the PropertyAccessor for
@return the PropertyAccessor instance, either cached or newly created
|
java
|
spring-beans/src/main/java/org/springframework/beans/AbstractNestablePropertyAccessor.java
| 895
|
[
"type",
"desc",
"name"
] |
Object
| true
| 10
| 7.44
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
value
|
public XContentBuilder value(String value) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeString(value);
return this;
}
|
@return the value of the "human readable" flag. When the value is equal to true,
some types of values are written in a format easier to read for a human.
|
java
|
libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java
| 756
|
[
"value"
] |
XContentBuilder
| true
| 2
| 7.04
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
postProcessBeanFactory
|
@Override
@SuppressWarnings("unchecked")
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
if (this.customQualifierTypes != null) {
if (!(beanFactory instanceof DefaultListableBeanFactory dlbf)) {
throw new IllegalStateException(
"CustomAutowireConfigurer needs to operate on a DefaultListableBeanFactory");
}
if (!(dlbf.getAutowireCandidateResolver() instanceof QualifierAnnotationAutowireCandidateResolver)) {
dlbf.setAutowireCandidateResolver(new QualifierAnnotationAutowireCandidateResolver());
}
QualifierAnnotationAutowireCandidateResolver resolver =
(QualifierAnnotationAutowireCandidateResolver) dlbf.getAutowireCandidateResolver();
for (Object value : this.customQualifierTypes) {
Class<? extends Annotation> customType = null;
if (value instanceof Class) {
customType = (Class<? extends Annotation>) value;
}
else if (value instanceof String className) {
customType = (Class<? extends Annotation>) ClassUtils.resolveClassName(className, this.beanClassLoader);
}
else {
throw new IllegalArgumentException(
"Invalid value [" + value + "] for custom qualifier type: needs to be Class or String.");
}
if (!Annotation.class.isAssignableFrom(customType)) {
throw new IllegalArgumentException(
"Qualifier type [" + customType.getName() + "] needs to be annotation type");
}
resolver.addQualifierType(customType);
}
}
}
|
Register custom qualifier annotation types to be considered
when autowiring beans. Each element of the provided set may
be either a Class instance or a String representation of the
fully-qualified class name of the custom annotation.
<p>Note that any annotation that is itself annotated with Spring's
{@link org.springframework.beans.factory.annotation.Qualifier}
does not require explicit registration.
@param customQualifierTypes the custom types to register
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/annotation/CustomAutowireConfigurer.java
| 89
|
[
"beanFactory"
] |
void
| true
| 7
| 6.24
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
iterator
|
CloseableIterator<Record> iterator(BufferSupplier bufferSupplier) {
if (isCompressed())
return new DeepRecordsIterator(this, false, Integer.MAX_VALUE, bufferSupplier);
return new CloseableIterator<>() {
private boolean hasNext = true;
@Override
public void close() {}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public Record next() {
if (!hasNext)
throw new NoSuchElementException();
hasNext = false;
return AbstractLegacyRecordBatch.this;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
|
Get an iterator for the nested entries contained within this batch. Note that
if the batch is not compressed, then this method will return an iterator over the
shallow record only (i.e. this object).
@return An iterator over the records contained within this batch
|
java
|
clients/src/main/java/org/apache/kafka/common/record/AbstractLegacyRecordBatch.java
| 234
|
[
"bufferSupplier"
] | true
| 3
| 7.04
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
errorCounts
|
public abstract Map<Errors, Integer> errorCounts();
|
The number of each type of error in the response, including {@link Errors#NONE} and top-level errors as well as
more specifically scoped errors (such as topic or partition-level errors).
@return A count of errors.
|
java
|
clients/src/main/java/org/apache/kafka/common/requests/AbstractResponse.java
| 65
|
[] | true
| 1
| 6.64
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
categorize_pools
|
def categorize_pools(self, pool_names: set) -> tuple[dict, set, set]:
"""
Categorize the given pool_names into matched_pool_names and not_found_pool_names based on existing pool_names.
Existing pools are returned as a dict of {pool_name : Pool}.
:param pool_names: set of pool_names
:return: tuple of dict of existed pools, set of matched pool_names, set of not found pool_names
"""
existed_pools = self.session.execute(select(Pool).filter(Pool.pool.in_(pool_names))).scalars()
existing_pools_dict = {pool.pool: pool for pool in existed_pools}
matched_pool_names = set(existing_pools_dict.keys())
not_found_pool_names = pool_names - matched_pool_names
return existing_pools_dict, matched_pool_names, not_found_pool_names
|
Categorize the given pool_names into matched_pool_names and not_found_pool_names based on existing pool_names.
Existing pools are returned as a dict of {pool_name : Pool}.
:param pool_names: set of pool_names
:return: tuple of dict of existed pools, set of matched pool_names, set of not found pool_names
|
python
|
airflow-core/src/airflow/api_fastapi/core_api/services/public/pools.py
| 116
|
[
"self",
"pool_names"
] |
tuple[dict, set, set]
| true
| 1
| 7.04
|
apache/airflow
| 43,597
|
sphinx
| false
|
splitMatcher
|
public static StrMatcher splitMatcher() {
return SPLIT_MATCHER;
}
|
Gets the matcher for the same characters as StringTokenizer,
namely space, tab, newline and form-feed.
@return the split matcher.
|
java
|
src/main/java/org/apache/commons/lang3/text/StrMatcher.java
| 341
|
[] |
StrMatcher
| true
| 1
| 6.48
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
insert
|
public static boolean[] insert(final int index, final boolean[] array, final boolean... values) {
if (array == null) {
return null;
}
if (isEmpty(values)) {
return clone(array);
}
if (index < 0 || index > array.length) {
throw new IndexOutOfBoundsException("Index: " + index + ", Length: " + array.length);
}
final boolean[] result = new boolean[array.length + values.length];
System.arraycopy(values, 0, result, index, values.length);
if (index > 0) {
System.arraycopy(array, 0, result, 0, index);
}
if (index < array.length) {
System.arraycopy(array, index, result, index + values.length, array.length - index);
}
return result;
}
|
Inserts elements into an array at the given index (starting from zero).
<p>
When an array is returned, it is always a new array.
</p>
<pre>
ArrayUtils.insert(index, null, null) = null
ArrayUtils.insert(index, array, null) = cloned copy of 'array'
ArrayUtils.insert(index, null, values) = null
</pre>
@param index the position within {@code array} to insert the new values.
@param array the array to insert the values into, may be {@code null}.
@param values the new values to insert, may be {@code null}.
@return The new array or {@code null} if the given array is {@code null}.
@throws IndexOutOfBoundsException if {@code array} is provided and either {@code index < 0} or {@code index > array.length}.
@since 3.6
|
java
|
src/main/java/org/apache/commons/lang3/ArrayUtils.java
| 2,800
|
[
"index",
"array"
] | true
| 7
| 8.08
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
equals
|
@Deprecated
public static boolean equals(final Object object1, final Object object2) {
return Objects.equals(object1, object2);
}
|
Compares two objects for equality, where either one or both
objects may be {@code null}.
<pre>
ObjectUtils.equals(null, null) = true
ObjectUtils.equals(null, "") = false
ObjectUtils.equals("", null) = false
ObjectUtils.equals("", "") = true
ObjectUtils.equals(Boolean.TRUE, null) = false
ObjectUtils.equals(Boolean.TRUE, "true") = false
ObjectUtils.equals(Boolean.TRUE, Boolean.TRUE) = true
ObjectUtils.equals(Boolean.TRUE, Boolean.FALSE) = false
</pre>
@param object1 the first object, may be {@code null}.
@param object2 the second object, may be {@code null}.
@return {@code true} if the values of both objects are the same.
@deprecated this method has been replaced by {@code java.util.Objects.equals(Object, Object)} in Java 7 and will
be removed from future releases.
|
java
|
src/main/java/org/apache/commons/lang3/ObjectUtils.java
| 564
|
[
"object1",
"object2"
] | true
| 1
| 6.32
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
toByte
|
public static byte toByte(final String str, final byte defaultValue) {
try {
return Byte.parseByte(str);
} catch (final RuntimeException e) {
return defaultValue;
}
}
|
Converts a {@link String} to a {@code byte}, returning a default value if the conversion fails.
<p>
If the string is {@code null}, the default value is returned.
</p>
<pre>
NumberUtils.toByte(null, 1) = 1
NumberUtils.toByte("", 1) = 1
NumberUtils.toByte("1", 0) = 1
</pre>
@param str the string to convert, may be null.
@param defaultValue the default value.
@return the byte represented by the string, or the default if conversion fails.
@since 2.5
|
java
|
src/main/java/org/apache/commons/lang3/math/NumberUtils.java
| 1,380
|
[
"str",
"defaultValue"
] | true
| 2
| 8.08
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
oss_read
|
def oss_read(self, remote_log_location, return_error=False):
"""
Return the log at the remote_log_location or '' if no logs are found or there is an error.
:param remote_log_location: the log's location in remote storage
:param return_error: if True, returns a string error message if an
error occurs. Otherwise, returns '' when an error occurs.
"""
oss_remote_log_location = f"{self.base_folder}/{remote_log_location}"
try:
self.log.info("read remote log: %s", oss_remote_log_location)
return self.hook.read_key(self.bucket_name, oss_remote_log_location)
except Exception:
msg = f"Could not read logs from {oss_remote_log_location}"
self.log.exception(msg)
# return error if needed
if return_error:
return msg
|
Return the log at the remote_log_location or '' if no logs are found or there is an error.
:param remote_log_location: the log's location in remote storage
:param return_error: if True, returns a string error message if an
error occurs. Otherwise, returns '' when an error occurs.
|
python
|
providers/alibaba/src/airflow/providers/alibaba/cloud/log/oss_task_handler.py
| 110
|
[
"self",
"remote_log_location",
"return_error"
] | false
| 2
| 6.24
|
apache/airflow
| 43,597
|
sphinx
| false
|
|
read
|
public String read() throws IOException {
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.toString(reader);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
|
Reads the contents of this source as a string.
@throws IOException if an I/O error occurs while reading from this source
|
java
|
android/guava/src/com/google/common/io/CharSource.java
| 292
|
[] |
String
| true
| 2
| 6.88
|
google/guava
| 51,352
|
javadoc
| false
|
_get_fieldspec
|
def _get_fieldspec(dtype):
"""
Produce a list of name/dtype pairs corresponding to the dtype fields
Similar to dtype.descr, but the second item of each tuple is a dtype, not a
string. As a result, this handles subarray dtypes
Can be passed to the dtype constructor to reconstruct the dtype, noting that
this (deliberately) discards field offsets.
Examples
--------
>>> import numpy as np
>>> dt = np.dtype([(('a', 'A'), np.int64), ('b', np.double, 3)])
>>> dt.descr
[(('a', 'A'), '<i8'), ('b', '<f8', (3,))]
>>> _get_fieldspec(dt)
[(('a', 'A'), dtype('int64')), ('b', dtype(('<f8', (3,))))]
"""
if dtype.names is None:
# .descr returns a nameless field, so we should too
return [('', dtype)]
else:
fields = ((name, dtype.fields[name]) for name in dtype.names)
# keep any titles, if present
return [
(name if len(f) == 2 else (f[2], name), f[0])
for name, f in fields
]
|
Produce a list of name/dtype pairs corresponding to the dtype fields
Similar to dtype.descr, but the second item of each tuple is a dtype, not a
string. As a result, this handles subarray dtypes
Can be passed to the dtype constructor to reconstruct the dtype, noting that
this (deliberately) discards field offsets.
Examples
--------
>>> import numpy as np
>>> dt = np.dtype([(('a', 'A'), np.int64), ('b', np.double, 3)])
>>> dt.descr
[(('a', 'A'), '<i8'), ('b', '<f8', (3,))]
>>> _get_fieldspec(dt)
[(('a', 'A'), dtype('int64')), ('b', dtype(('<f8', (3,))))]
|
python
|
numpy/lib/recfunctions.py
| 71
|
[
"dtype"
] | false
| 4
| 6.8
|
numpy/numpy
| 31,054
|
unknown
| false
|
|
length
|
public int length() {
return bitSet.length();
}
|
Returns the "logical size" of this {@link BitSet}: the index of the highest set bit in the {@link BitSet} plus one.
Returns zero if the {@link BitSet} contains no set bits.
@return the logical size of this {@link BitSet}.
|
java
|
src/main/java/org/apache/commons/lang3/util/FluentBitSet.java
| 303
|
[] | true
| 1
| 6.8
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
isCandidateComponent
|
protected boolean isCandidateComponent(AnnotatedBeanDefinition beanDefinition) {
AnnotationMetadata metadata = beanDefinition.getMetadata();
return (metadata.isIndependent() && (metadata.isConcrete() ||
(metadata.isAbstract() && metadata.hasAnnotatedMethods(Lookup.class.getName()))));
}
|
Determine whether the given bean definition qualifies as a candidate component.
<p>The default implementation checks whether the class is not dependent on an
enclosing class as well as whether the class is either concrete (and therefore
not an interface) or has {@link Lookup @Lookup} methods.
<p>Can be overridden in subclasses.
@param beanDefinition the bean definition to check
@return whether the bean definition qualifies as a candidate component
|
java
|
spring-context/src/main/java/org/springframework/context/annotation/ClassPathScanningCandidateComponentProvider.java
| 571
|
[
"beanDefinition"
] | true
| 4
| 7.76
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
|
determineServiceLocatorExceptionConstructor
|
@SuppressWarnings("unchecked")
protected Constructor<Exception> determineServiceLocatorExceptionConstructor(Class<? extends Exception> exceptionClass) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(String.class, Throwable.class);
}
catch (NoSuchMethodException ex) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(Throwable.class);
}
catch (NoSuchMethodException ex2) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(String.class);
}
catch (NoSuchMethodException ex3) {
throw new IllegalArgumentException(
"Service locator exception [" + exceptionClass.getName() +
"] neither has a (String, Throwable) constructor nor a (String) constructor");
}
}
}
}
|
Determine the constructor to use for the given service locator exception
class. Only called in case of a custom service locator exception.
<p>The default implementation looks for a constructor with one of the
following parameter types: {@code (String, Throwable)}
or {@code (Throwable)} or {@code (String)}.
@param exceptionClass the exception class
@return the constructor to use
@see #setServiceLocatorExceptionClass
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBean.java
| 280
|
[
"exceptionClass"
] | true
| 4
| 7.44
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
|
json
|
public static Object json(Object fieldValue) {
return JsonProcessor.apply(fieldValue, false, true);
}
|
Uses {@link JsonProcessor} to convert a JSON string to a structured JSON
object.
@param fieldValue JSON string
@return structured JSON object
|
java
|
modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java
| 62
|
[
"fieldValue"
] |
Object
| true
| 1
| 6.64
|
elastic/elasticsearch
| 75,680
|
javadoc
| false
|
encode
|
public static String encode(AnsiElement element) {
if (isEnabled()) {
return ENCODE_START + element + ENCODE_END;
}
return "";
}
|
Encode a single {@link AnsiElement} if output is enabled.
@param element the element to encode
@return the encoded element or an empty string
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/ansi/AnsiOutput.java
| 85
|
[
"element"
] |
String
| true
| 2
| 7.76
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
_dump_dynamic_shapes
|
def _dump_dynamic_shapes(
dynamic_shapes: Union[dict[str, Any], tuple[Any], list[Any], None],
args: tuple[Any],
kwargs: Optional[dict[str, Any]] = None,
to_dict: Optional[bool] = False,
) -> Union[DynamicShapesSpec, dict[str, Any]]:
"""
Utility function for dynamic shapes serialization, serializing a dynamic_shapes spec.
Returns a DynamicShapesSpec dataclass containing 2 fields, "dynamic_shapes" and "dims".
Uses args & kwargs to distinguish between tensor-level and dim-level specs (only for Nones).
dynamic_shapes: A pytree structure mirroring the dynamic_shapes input to export():
- Each tensor input is represented with a list of values, non-tensor inputs with None.
- dynamic dimensions (i.e. symbols) in tensors and Dim enums are represented with strings.
- static dimensions are represented with ints.
dims: A dictionary mapping each symbol name to the min/max range and derived dim names.
For example:
```
dx = Dim("dx", min=4, max=16)
dy = dx + 1
inputs = (
[
torch.randn(4, 4),
torch.randn(5, 4),
],
torch.randn(4),
torch.randn(4, 4),
"hello",
)
dynamic_shapes = {
"a": [
(dx, 4),
(dy, 4),
],
"b": (Dim.STATIC,),
"c": None,
"d": None,
}
out = _dump_dynamic_shapes(dynamic_shapes, inputs, to_dict=True)
```
would generate the following output:
```
{
"dynamic_shapes": (
[
["dx", 4],
["dx + 1", 4],
],
["_DimHint.STATIC"],
["_DimHint.STATIC", "_DimHint.STATIC"],
None,
),
"dims": {
"dx": {
"min": 4,
"max": 16,
"derived": ["dx + 1"],
},
},
}
```
"""
dims: dict[str, dict[str, Any]] = {}
def _standardize_shapes(path, tensor, shape): # type: ignore[no-untyped-def]
"""
Helps standardize the dynamic_shapes tree structure we serialize,
returning lists for each tensor shape, handling tensor-level Nones.
"""
if not isinstance(tensor, torch.Tensor):
return None
if shape is None:
return [Dim.STATIC] * len(tensor.shape)
out = []
if isinstance(shape, dict):
for i, s in enumerate(tensor.shape):
out.append(s if shape.get(i) is None else shape.get(i))
else:
assert isinstance(shape, (tuple, list))
for i, s in enumerate(tensor.shape):
out.append(s if shape[i] is None else shape[i])
return out
def _track_dim_from_dims(
val: Union[None, int, _DimHint, Dim],
) -> Union[None, int, str]:
"""
Tracks dims, ranges, derived dims from the standardized dynamic_shapes spec.
"""
if val is None or isinstance(val, int): # non-tensor input or static
return val
if isinstance(val, _DimHint): # store enum as string
return val.__class__.__name__ + "." + val.type.name
assert isinstance(val, Dim)
# track root dim
root = val.root if isinstance(val, _DerivedDim) else val # type: ignore[attr-defined]
if root.__name__ not in dims:
dims[root.__name__] = {
"min": root.min, # type: ignore[attr-defined,union-attr]
"max": root.max, # type: ignore[attr-defined,union-attr]
"derived": set(),
}
# track derived dims
if isinstance(val, _DerivedDim):
dims[root.__name__]["derived"].add(val.__name__)
return val.__name__
if dynamic_shapes is None:
return {"dynamic_shapes": None, "dims": {}}
# convert to tuple of specs, for each arg/kwarg
kwargs = kwargs or {}
if isinstance(dynamic_shapes, dict):
dynamic_shapes = dynamic_shapes.values() # type: ignore[assignment]
# pyrefly: ignore [bad-assignment, bad-argument-type]
dynamic_shapes = tuple(dynamic_shapes)
combined_args = tuple(args) + tuple(kwargs.values())
# run same check when we're processing shapes for export - is this too lazy?
_check_dynamic_shapes(dict(enumerate(combined_args)), dynamic_shapes) # type: ignore[arg-type]
tree_shapes = _tree_map_with_path(
_standardize_shapes, combined_args, dynamic_shapes, tree_name="inputs"
)
serialized_shapes = tree_map(_track_dim_from_dims, tree_shapes)
return _postprocess_serialized_shapes(serialized_shapes, dims, to_dict=to_dict)
|
Utility function for dynamic shapes serialization, serializing a dynamic_shapes spec.
Returns a DynamicShapesSpec dataclass containing 2 fields, "dynamic_shapes" and "dims".
Uses args & kwargs to distinguish between tensor-level and dim-level specs (only for Nones).
dynamic_shapes: A pytree structure mirroring the dynamic_shapes input to export():
- Each tensor input is represented with a list of values, non-tensor inputs with None.
- dynamic dimensions (i.e. symbols) in tensors and Dim enums are represented with strings.
- static dimensions are represented with ints.
dims: A dictionary mapping each symbol name to the min/max range and derived dim names.
For example:
```
dx = Dim("dx", min=4, max=16)
dy = dx + 1
inputs = (
[
torch.randn(4, 4),
torch.randn(5, 4),
],
torch.randn(4),
torch.randn(4, 4),
"hello",
)
dynamic_shapes = {
"a": [
(dx, 4),
(dy, 4),
],
"b": (Dim.STATIC,),
"c": None,
"d": None,
}
out = _dump_dynamic_shapes(dynamic_shapes, inputs, to_dict=True)
```
would generate the following output:
```
{
"dynamic_shapes": (
[
["dx", 4],
["dx + 1", 4],
],
["_DimHint.STATIC"],
["_DimHint.STATIC", "_DimHint.STATIC"],
None,
),
"dims": {
"dx": {
"min": 4,
"max": 16,
"derived": ["dx + 1"],
},
},
}
```
|
python
|
torch/_export/serde/dynamic_shapes.py
| 66
|
[
"dynamic_shapes",
"args",
"kwargs",
"to_dict"
] |
Union[DynamicShapesSpec, dict[str, Any]]
| true
| 18
| 7.04
|
pytorch/pytorch
| 96,034
|
unknown
| false
|
contains
|
boolean contains(String propertyName);
|
Is there a property value (or other processing entry) for this property?
@param propertyName the name of the property we're interested in
@return whether there is a property value for this property
|
java
|
spring-beans/src/main/java/org/springframework/beans/PropertyValues.java
| 92
|
[
"propertyName"
] | true
| 1
| 6.32
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
|
E
|
function E(sym, val, def, ...otherClasses) {
// Special case for SystemError that formats the error message differently
// The SystemErrors only have SystemError as their base classes.
messages.set(sym, val);
const ErrClass = def === SystemError ?
makeSystemErrorWithCode(sym) :
makeNodeErrorWithCode(def, sym);
if (otherClasses.length !== 0) {
if (otherClasses.includes(HideStackFramesError)) {
if (otherClasses.length !== 1) {
otherClasses.forEach((clazz) => {
if (clazz !== HideStackFramesError) {
ErrClass[clazz.name] = makeNodeErrorWithCode(clazz, sym);
ErrClass[clazz.name].HideStackFramesError = makeNodeErrorForHideStackFrame(ErrClass[clazz.name], clazz);
}
});
}
} else {
otherClasses.forEach((clazz) => {
ErrClass[clazz.name] = makeNodeErrorWithCode(clazz, sym);
});
}
}
if (otherClasses.includes(HideStackFramesError)) {
ErrClass.HideStackFramesError = makeNodeErrorForHideStackFrame(ErrClass, def);
}
codes[sym] = ErrClass;
}
|
This function removes unnecessary frames from Node.js core errors.
@template {(...args: unknown[]) => unknown} T
@param {T} fn
@returns {T}
|
javascript
|
lib/internal/errors.js
| 555
|
[
"sym",
"val",
"def"
] | false
| 8
| 6.24
|
nodejs/node
| 114,839
|
jsdoc
| false
|
|
is_monotonic_increasing
|
def is_monotonic_increasing(self) -> Series:
"""
Return whether each group's values are monotonically increasing.
Returns
-------
Series
See Also
--------
SeriesGroupBy.is_monotonic_decreasing : Return whether each group's values
are monotonically decreasing.
Examples
--------
>>> s = pd.Series([2, 1, 3, 4], index=["Falcon", "Falcon", "Parrot", "Parrot"])
>>> s.groupby(level=0).is_monotonic_increasing
Falcon False
Parrot True
dtype: bool
"""
return self.apply(lambda ser: ser.is_monotonic_increasing)
|
Return whether each group's values are monotonically increasing.
Returns
-------
Series
See Also
--------
SeriesGroupBy.is_monotonic_decreasing : Return whether each group's values
are monotonically decreasing.
Examples
--------
>>> s = pd.Series([2, 1, 3, 4], index=["Falcon", "Falcon", "Parrot", "Parrot"])
>>> s.groupby(level=0).is_monotonic_increasing
Falcon False
Parrot True
dtype: bool
|
python
|
pandas/core/groupby/generic.py
| 1,742
|
[
"self"
] |
Series
| true
| 1
| 6.64
|
pandas-dev/pandas
| 47,362
|
unknown
| false
|
withoutConfigDataOption
|
ConfigDataEnvironmentContributor withoutConfigDataOption(ConfigData.Option option) {
return new ConfigDataEnvironmentContributor(this.kind, this.location, this.resource,
this.fromProfileSpecificImport, this.propertySource, this.configurationPropertySource, this.properties,
this.configDataOptions.without(option), this.children, this.conversionService);
}
|
Return if the contributor has a specific config data option.
@param option the option to check
@return {@code true} if the option is present
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataEnvironmentContributor.java
| 184
|
[
"option"
] |
ConfigDataEnvironmentContributor
| true
| 1
| 7.04
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
createExecutor
|
private ExecutorService createExecutor() {
return Executors.newFixedThreadPool(getTaskCount());
}
|
Creates the {@link ExecutorService} to be used. This method is called if
no {@link ExecutorService} was provided at construction time.
@return the {@link ExecutorService} to be used.
|
java
|
src/main/java/org/apache/commons/lang3/concurrent/BackgroundInitializer.java
| 223
|
[] |
ExecutorService
| true
| 1
| 6.8
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
wrapWithAtLeast
|
function wrapWithAtLeast(body: string, input: DMMF.InputType) {
if (input.constraints?.fields && input.constraints.fields.length > 0) {
const fields = input.constraints.fields.map((f) => `"${f}"`).join(' | ')
return `Prisma.AtLeast<${body}, ${fields}>`
}
return body
}
|
Wraps an input type with `Prisma.AtLeast`
@param body type string to wrap
@param input original input type
@returns
|
typescript
|
packages/client-generator-js/src/TSClient/Input.ts
| 172
|
[
"body",
"input"
] | false
| 3
| 6.8
|
prisma/prisma
| 44,834
|
jsdoc
| false
|
|
getClasses
|
public static Class<?>[] getClasses(Collection<Configurations> configurations) {
List<Configurations> collated = collate(configurations);
LinkedHashSet<Class<?>> classes = collated.stream()
.flatMap(Configurations::streamClasses)
.collect(Collectors.toCollection(LinkedHashSet::new));
return ClassUtils.toClassArray(classes);
}
|
Return the classes from all the specified configurations in the order that they
would be registered.
@param configurations the source configuration
@return configuration classes in registration order
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/context/annotation/Configurations.java
| 152
|
[
"configurations"
] | true
| 1
| 6.4
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
|
_get_compatible_log_stream
|
def _get_compatible_log_stream(
log_messages: LogMessages,
) -> RawLogStream:
"""
Convert legacy log message blobs into a generator that yields log lines.
:param log_messages: List of legacy log message strings.
:return: A generator that yields interleaved log lines.
"""
yield from chain.from_iterable(
_stream_lines_by_chunk(io.StringIO(log_message)) for log_message in log_messages
)
|
Convert legacy log message blobs into a generator that yields log lines.
:param log_messages: List of legacy log message strings.
:return: A generator that yields interleaved log lines.
|
python
|
airflow-core/src/airflow/utils/log/file_task_handler.py
| 401
|
[
"log_messages"
] |
RawLogStream
| true
| 1
| 7.04
|
apache/airflow
| 43,597
|
sphinx
| false
|
_maybe_infer_dtype_type
|
def _maybe_infer_dtype_type(element):
"""
Try to infer an object's dtype, for use in arithmetic ops.
Uses `element.dtype` if that's available.
Objects implementing the iterator protocol are cast to a NumPy array,
and from there the array's type is used.
Parameters
----------
element : object
Possibly has a `.dtype` attribute, and possibly the iterator
protocol.
Returns
-------
tipo : type
Examples
--------
>>> from collections import namedtuple
>>> Foo = namedtuple("Foo", "dtype")
>>> _maybe_infer_dtype_type(Foo(np.dtype("i8")))
dtype('int64')
"""
tipo = None
if hasattr(element, "dtype"):
tipo = element.dtype
elif is_list_like(element):
element = np.asarray(element)
tipo = element.dtype
return tipo
|
Try to infer an object's dtype, for use in arithmetic ops.
Uses `element.dtype` if that's available.
Objects implementing the iterator protocol are cast to a NumPy array,
and from there the array's type is used.
Parameters
----------
element : object
Possibly has a `.dtype` attribute, and possibly the iterator
protocol.
Returns
-------
tipo : type
Examples
--------
>>> from collections import namedtuple
>>> Foo = namedtuple("Foo", "dtype")
>>> _maybe_infer_dtype_type(Foo(np.dtype("i8")))
dtype('int64')
|
python
|
pandas/core/dtypes/cast.py
| 826
|
[
"element"
] | false
| 3
| 7.04
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
|
requiresEagerInitForType
|
private boolean requiresEagerInitForType(@Nullable String factoryBeanName) {
return (factoryBeanName != null && isFactoryBean(factoryBeanName) && !containsSingleton(factoryBeanName));
}
|
Check whether the specified bean would need to be eagerly initialized
in order to determine its type.
@param factoryBeanName a factory-bean reference that the bean definition
defines a factory method for
@return whether eager initialization is necessary
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/support/DefaultListableBeanFactory.java
| 722
|
[
"factoryBeanName"
] | true
| 3
| 7.52
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
|
escapeHtml
|
function escapeHtml(string: string) {
if (__DEV__) {
checkHtmlStringCoercion(string);
}
const str = '' + string;
const match = matchHtmlRegExp.exec(str);
if (!match) {
return str;
}
let escape;
let html = '';
let index;
let lastIndex = 0;
for (index = match.index; index < str.length; index++) {
switch (str.charCodeAt(index)) {
case 34: // "
escape = '"';
break;
case 38: // &
escape = '&';
break;
case 39: // '
escape = '''; // modified from escape-html; used to be '''
break;
case 60: // <
escape = '<';
break;
case 62: // >
escape = '>';
break;
default:
continue;
}
if (lastIndex !== index) {
html += str.slice(lastIndex, index);
}
lastIndex = index + 1;
html += escape;
}
return lastIndex !== index ? html + str.slice(lastIndex, index) : html;
}
|
Escapes special characters and HTML entities in a given html string.
@param {string} string HTML string to escape for later insertion
@return {string}
@public
|
javascript
|
packages/react-dom-bindings/src/server/escapeTextForBrowser.js
| 53
|
[
"string"
] | false
| 6
| 6.24
|
facebook/react
| 241,750
|
jsdoc
| false
|
|
resetCachedUrlHandlers
|
private static void resetCachedUrlHandlers() {
try {
URL.setURLStreamHandlerFactory(null);
}
catch (Error ex) {
// Ignore
}
}
|
Reset any cached handlers just in case a jar protocol has already been used. We
reset the handler by trying to set a null {@link URLStreamHandlerFactory} which
should have no effect other than clearing the handlers cache.
|
java
|
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/net/protocol/Handlers.java
| 54
|
[] |
void
| true
| 2
| 6.88
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
max
|
public static double max(final double... array) {
Objects.requireNonNull(array, "array");
Validate.isTrue(array.length != 0, "Array cannot be empty.");
// Finds and returns max
double max = array[0];
for (int j = 1; j < array.length; j++) {
max = max(array[j], max);
}
return max;
}
|
Returns the maximum value in an array.
@param array an array, must not be null or empty.
@return the maximum value in the array.
@throws NullPointerException if {@code array} is {@code null}.
@throws IllegalArgumentException if {@code array} is empty.
@since 3.4 Changed signature from max(double[]) to max(double...)
|
java
|
src/main/java/org/apache/commons/lang3/math/IEEE754rUtils.java
| 41
|
[] | true
| 2
| 8.08
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
flush
|
def flush(self, fsync: bool = False) -> None:
"""
Force all buffered modifications to be written to disk.
Parameters
----------
fsync : bool (default False)
call ``os.fsync()`` on the file handle to force writing to disk.
Notes
-----
Without ``fsync=True``, flushing may not guarantee that the OS writes
to disk. With fsync, the operation will block until the OS claims the
file has been written; however, other caching layers may still
interfere.
"""
if self._handle is not None:
self._handle.flush()
if fsync:
with suppress(OSError):
os.fsync(self._handle.fileno())
|
Force all buffered modifications to be written to disk.
Parameters
----------
fsync : bool (default False)
call ``os.fsync()`` on the file handle to force writing to disk.
Notes
-----
Without ``fsync=True``, flushing may not guarantee that the OS writes
to disk. With fsync, the operation will block until the OS claims the
file has been written; however, other caching layers may still
interfere.
|
python
|
pandas/io/pytables.py
| 782
|
[
"self",
"fsync"
] |
None
| true
| 3
| 6.88
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
isDefaultCandidate
|
public static boolean isDefaultCandidate(ConfigurableBeanFactory beanFactory, String beanName) {
try {
BeanDefinition mbd = beanFactory.getMergedBeanDefinition(beanName);
return (!(mbd instanceof AbstractBeanDefinition abd) || abd.isDefaultCandidate());
}
catch (NoSuchBeanDefinitionException ex) {
// A manually registered singleton instance not backed by a BeanDefinition.
return true;
}
}
|
Check the default-candidate status for the specified bean.
@param beanFactory the bean factory
@param beanName the name of the bean to check
@return whether the specified bean qualifies as a default candidate
@since 6.2.4
@see AbstractBeanDefinition#isDefaultCandidate()
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/support/AutowireUtils.java
| 292
|
[
"beanFactory",
"beanName"
] | true
| 3
| 7.6
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
|
of
|
static SslBundle of(@Nullable SslStoreBundle stores, @Nullable SslBundleKey key, @Nullable SslOptions options,
@Nullable String protocol, @Nullable SslManagerBundle managers) {
SslManagerBundle managersToUse = (managers != null) ? managers : SslManagerBundle.from(stores, key);
return new SslBundle() {
@Override
public SslStoreBundle getStores() {
return (stores != null) ? stores : SslStoreBundle.NONE;
}
@Override
public SslBundleKey getKey() {
return (key != null) ? key : SslBundleKey.NONE;
}
@Override
public SslOptions getOptions() {
return (options != null) ? options : SslOptions.NONE;
}
@Override
public String getProtocol() {
return (!StringUtils.hasText(protocol)) ? DEFAULT_PROTOCOL : protocol;
}
@Override
public SslManagerBundle getManagers() {
return managersToUse;
}
@Override
public String toString() {
ToStringCreator creator = new ToStringCreator(this);
creator.append("key", getKey());
creator.append("options", getOptions());
creator.append("protocol", getProtocol());
creator.append("stores", getStores());
return creator.toString();
}
};
}
|
Factory method to create a new {@link SslBundle} instance.
@param stores the stores or {@code null}
@param key the key or {@code null}
@param options the options or {@code null}
@param protocol the protocol or {@code null}
@param managers the managers or {@code null}
@return a new {@link SslBundle} instance
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/ssl/SslBundle.java
| 145
|
[
"stores",
"key",
"options",
"protocol",
"managers"
] |
SslBundle
| true
| 6
| 7.92
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
findSource
|
private @Nullable File findSource(@Nullable Class<?> sourceClass) {
try {
ProtectionDomain domain = (sourceClass != null) ? sourceClass.getProtectionDomain() : null;
CodeSource codeSource = (domain != null) ? domain.getCodeSource() : null;
URL location = (codeSource != null) ? codeSource.getLocation() : null;
File source = (location != null) ? findSource(location) : null;
if (source != null && source.exists() && !isUnitTest()) {
return source.getAbsoluteFile();
}
}
catch (Exception ex) {
// Ignore
}
return null;
}
|
Create a new {@link ApplicationHome} instance for the specified source class.
@param sourceClass the source class or {@code null}
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/system/ApplicationHome.java
| 93
|
[
"sourceClass"
] |
File
| true
| 9
| 6.88
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
__eq__
|
def __eq__(self, other: object) -> bool:
"""
Check whether 'other' is equal to self.
By default, 'other' is considered equal if either
* it's a string matching 'self.name'.
* it's an instance of this type and all of the attributes
in ``self._metadata`` are equal between `self` and `other`.
Parameters
----------
other : Any
Returns
-------
bool
"""
if isinstance(other, str):
try:
other = self.construct_from_string(other)
except TypeError:
return False
if isinstance(other, type(self)):
return all(
getattr(self, attr) == getattr(other, attr) for attr in self._metadata
)
return False
|
Check whether 'other' is equal to self.
By default, 'other' is considered equal if either
* it's a string matching 'self.name'.
* it's an instance of this type and all of the attributes
in ``self._metadata`` are equal between `self` and `other`.
Parameters
----------
other : Any
Returns
-------
bool
|
python
|
pandas/core/dtypes/base.py
| 121
|
[
"self",
"other"
] |
bool
| true
| 3
| 6.88
|
pandas-dev/pandas
| 47,362
|
numpy
| false
|
createDefaultKeyManagerFactory
|
private static KeyManagerFactory createDefaultKeyManagerFactory() {
String defaultAlgorithm = KeyManagerFactory.getDefaultAlgorithm();
KeyManagerFactory keyManagerFactory;
try {
keyManagerFactory = KeyManagerFactory.getInstance(defaultAlgorithm);
keyManagerFactory.init(null, null);
}
catch (NoSuchAlgorithmException | KeyStoreException | UnrecoverableKeyException ex) {
throw new IllegalStateException(
"Unable to create KeyManagerFactory for default '%s' algorithm".formatted(defaultAlgorithm), ex);
}
return keyManagerFactory;
}
|
Factory method to create a new {@link SslManagerBundle} using the given
{@link TrustManager TrustManagers} and the default {@link KeyManagerFactory}.
@param trustManagers the trust managers to use
@return a new {@link SslManagerBundle} instance
@since 3.5.0
|
java
|
core/spring-boot/src/main/java/org/springframework/boot/ssl/SslManagerBundle.java
| 170
|
[] |
KeyManagerFactory
| true
| 2
| 7.44
|
spring-projects/spring-boot
| 79,428
|
javadoc
| false
|
currentInvocation
|
public static MethodInvocation currentInvocation() throws IllegalStateException {
MethodInvocation mi = invocation.get();
if (mi == null) {
throw new IllegalStateException(
"No MethodInvocation found: Check that an AOP invocation is in progress and that the " +
"ExposeInvocationInterceptor is upfront in the interceptor chain. Specifically, note that " +
"advices with order HIGHEST_PRECEDENCE will execute before ExposeInvocationInterceptor! " +
"In addition, ExposeInvocationInterceptor and ExposeInvocationInterceptor.currentInvocation() " +
"must be invoked from the same thread.");
}
return mi;
}
|
Return the AOP Alliance MethodInvocation object associated with the current invocation.
@return the invocation object associated with the current invocation
@throws IllegalStateException if there is no AOP invocation in progress,
or if the ExposeInvocationInterceptor was not added to this interceptor chain
|
java
|
spring-aop/src/main/java/org/springframework/aop/interceptor/ExposeInvocationInterceptor.java
| 71
|
[] |
MethodInvocation
| true
| 2
| 7.12
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
validate_unique_buffer_names
|
def validate_unique_buffer_names(
nodes: list[BaseSchedulerNode],
name_to_buf: dict[str, SchedulerBuffer],
name_to_freeable_input_buf: dict[str, FreeableInputBuffer],
) -> None:
"""
Validate that for each node's output buffer, the name_to_buf mapping is correct.
For each output buffer buf, we should have name_to_buf[buf.get_name()] == buf.
Also validate that no buffer names overlap with freeable input buffer names.
Raises:
RuntimeError: If buffer name mapping is incorrect or names overlap
"""
for node in nodes:
for buf in node.get_outputs():
buf_name = buf.get_name()
# Check if buffer name exists in the mapping
if buf_name not in name_to_buf:
raise RuntimeError(
f"{buf_name} from {node.get_name()} is not found in name_to_buf mapping."
f" This indicates a missing buffer mapping."
)
# Check if the mapping points to the correct buffer object
if name_to_buf[buf_name] != buf:
raise RuntimeError(
f"Buffer name mapping is incorrect for '{buf_name}'."
f"Expected name_to_buf['{buf_name}'] to be {buf.debug_str()}"
f"but got {name_to_buf[buf_name].debug_str()}"
f"This indicates some buffers share the same name"
)
# Check if buffer name conflicts with freeable input buffer names
if buf_name in name_to_freeable_input_buf:
raise RuntimeError(
f"Buffer name conflict detected: '{buf_name}' from node {node.get_name()} "
f"is also used as a freeable input buffer name. "
)
|
Validate that for each node's output buffer, the name_to_buf mapping is correct.
For each output buffer buf, we should have name_to_buf[buf.get_name()] == buf.
Also validate that no buffer names overlap with freeable input buffer names.
Raises:
RuntimeError: If buffer name mapping is incorrect or names overlap
|
python
|
torch/_inductor/memory.py
| 843
|
[
"nodes",
"name_to_buf",
"name_to_freeable_input_buf"
] |
None
| true
| 6
| 6.88
|
pytorch/pytorch
| 96,034
|
unknown
| false
|
getValue
|
@Deprecated
@Override
public Boolean getValue() {
return Boolean.valueOf(this.value);
}
|
Gets the value as a Boolean instance.
@return the value as a Boolean, never null.
@deprecated Use {@link #get()}.
|
java
|
src/main/java/org/apache/commons/lang3/mutable/MutableBoolean.java
| 118
|
[] |
Boolean
| true
| 1
| 7.04
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
availableBytes
|
private int availableBytes(int position, int size) {
// Cache current size in case concurrent write changes it
int currentSizeInBytes = sizeInBytes();
if (position < 0)
throw new IllegalArgumentException("Invalid position: " + position + " in read from " + this);
// position should always be relative to the start of the file hence compare with file size
// to verify if the position is within the file.
if (position > currentSizeInBytes)
throw new IllegalArgumentException("Slice from position " + position + " exceeds end position of " + this);
if (size < 0)
throw new IllegalArgumentException("Invalid size: " + size + " in read from " + this);
int end = this.start + position + size;
// Handle integer overflow or if end is beyond the end of the file
if (end < 0 || end > start + currentSizeInBytes)
end = this.start + currentSizeInBytes;
return end - (this.start + position);
}
|
Return a slice of records from this instance, the difference with {@link FileRecords#slice(int, int)} is
that the position is not necessarily on an offset boundary.
This method is reserved for cases where offset alignment is not necessary, such as in the replication of raft
snapshots.
@param position The start position to begin the read from
@param size The number of bytes after the start position to include
@return A unaligned slice of records on this message set limited based on the given position and size
|
java
|
clients/src/main/java/org/apache/kafka/common/record/FileRecords.java
| 166
|
[
"position",
"size"
] | true
| 6
| 7.92
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
updateRequested
|
public synchronized boolean updateRequested() {
return this.needFullUpdate || this.needPartialUpdate;
}
|
Check whether an update has been explicitly requested.
@return true if an update was requested, false otherwise
|
java
|
clients/src/main/java/org/apache/kafka/clients/Metadata.java
| 265
|
[] | true
| 2
| 8
|
apache/kafka
| 31,560
|
javadoc
| false
|
|
isBlank
|
public static boolean isBlank(final CharSequence cs) {
final int strLen = length(cs);
for (int i = 0; i < strLen; i++) {
if (!Character.isWhitespace(cs.charAt(i))) {
return false;
}
}
return true;
}
|
Tests if a CharSequence is empty ({@code "")}, null, or contains only whitespace as defined by {@link Character#isWhitespace(char)}.
<pre>
StringUtils.isBlank(null) = true
StringUtils.isBlank("") = true
StringUtils.isBlank(" ") = true
StringUtils.isBlank("bob") = false
StringUtils.isBlank(" bob ") = false
</pre>
@param cs the CharSequence to check, may be null.
@return {@code true} if the CharSequence is null, empty or whitespace only.
@since 2.0
@since 3.0 Changed signature from isBlank(String) to isBlank(CharSequence)
|
java
|
src/main/java/org/apache/commons/lang3/StringUtils.java
| 3,507
|
[
"cs"
] | true
| 3
| 7.76
|
apache/commons-lang
| 2,896
|
javadoc
| false
|
|
equals
|
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof GenericBeanDefinition that &&
ObjectUtils.nullSafeEquals(this.parentName, that.parentName) && super.equals(other)));
}
|
Create a new GenericBeanDefinition as deep copy of the given
bean definition.
@param original the original bean definition to copy from
|
java
|
spring-beans/src/main/java/org/springframework/beans/factory/support/GenericBeanDefinition.java
| 88
|
[
"other"
] | true
| 4
| 6.4
|
spring-projects/spring-framework
| 59,386
|
javadoc
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.