function_name
stringlengths
1
57
function_code
stringlengths
20
4.99k
documentation
stringlengths
50
2k
language
stringclasses
5 values
file_path
stringlengths
8
166
line_number
int32
4
16.7k
parameters
listlengths
0
20
return_type
stringlengths
0
131
has_type_hints
bool
2 classes
complexity
int32
1
51
quality_score
float32
6
9.68
repo_name
stringclasses
34 values
repo_stars
int32
2.9k
242k
docstring_style
stringclasses
7 values
is_async
bool
2 classes
currentName
@Override public String currentName() throws IOException { if (level() == 1) { return new StringBuilder(parentName).append(DELIMITER).append(delegate().currentName()).toString(); } return delegate().currentName(); }
Retrieves the name of the current field being parsed. If the current parsing level is 1, the returned field name will be constructed by prepending the parent name to the delegate's currentFieldName, otherwise just delegate. @return The current field name, potentially modified by prepending the parent name as a prefix. @throws IOException If an I/O error occurs during parsing.
java
libs/x-content/src/main/java/org/elasticsearch/xcontent/FlatteningXContentParser.java
41
[]
String
true
2
7.92
elastic/elasticsearch
75,680
javadoc
false
searchForTimestamp
public TimestampAndOffset searchForTimestamp(long targetTimestamp, int startingPosition, long startingOffset) { for (RecordBatch batch : batchesFrom(startingPosition)) { if (batch.maxTimestamp() >= targetTimestamp) { // We found a message for (Record record : batch) { long timestamp = record.timestamp(); if (timestamp >= targetTimestamp && record.offset() >= startingOffset) return new TimestampAndOffset(timestamp, record.offset(), maybeLeaderEpoch(batch.partitionLeaderEpoch())); } } } return null; }
Search forward for the first message that meets the following requirements: - Message's timestamp is greater than or equals to the targetTimestamp. - Message's position in the log file is greater than or equals to the startingPosition. - Message's offset is greater than or equals to the startingOffset. @param targetTimestamp The timestamp to search for. @param startingPosition The starting position to search. @param startingOffset The starting offset to search. @return The timestamp and offset of the message found. Null if no message is found.
java
clients/src/main/java/org/apache/kafka/common/record/FileRecords.java
356
[ "targetTimestamp", "startingPosition", "startingOffset" ]
TimestampAndOffset
true
4
8.24
apache/kafka
31,560
javadoc
false
compose
default FailableDoubleUnaryOperator<E> compose(final FailableDoubleUnaryOperator<E> before) { Objects.requireNonNull(before); return (final double v) -> applyAsDouble(before.applyAsDouble(v)); }
Returns a composed {@link FailableDoubleUnaryOperator} like {@link DoubleUnaryOperator#compose(DoubleUnaryOperator)}. @param before the operator to apply before this one. @return a composed {@link FailableDoubleUnaryOperator} like {@link DoubleUnaryOperator#compose(DoubleUnaryOperator)}. @throws NullPointerException if before is null. @see #andThen(FailableDoubleUnaryOperator)
java
src/main/java/org/apache/commons/lang3/function/FailableDoubleUnaryOperator.java
90
[ "before" ]
true
1
6
apache/commons-lang
2,896
javadoc
false
invokeAdviceMethodWithGivenArgs
protected @Nullable Object invokeAdviceMethodWithGivenArgs(@Nullable Object[] args) throws Throwable { @Nullable Object[] actualArgs = args; if (this.aspectJAdviceMethod.getParameterCount() == 0) { actualArgs = null; } Object aspectInstance = this.aspectInstanceFactory.getAspectInstance(); if (aspectInstance.equals(null)) { // Possibly a NullBean -> simply proceed if necessary. if (getJoinPoint() instanceof ProceedingJoinPoint pjp) { return pjp.proceed(); } return null; } try { ReflectionUtils.makeAccessible(this.aspectJAdviceMethod); return this.aspectJAdviceMethod.invoke(aspectInstance, actualArgs); } catch (IllegalArgumentException ex) { throw new AopInvocationException("Mismatch on arguments to advice method [" + this.aspectJAdviceMethod + "]; pointcut expression [" + this.pointcut.getPointcutExpression() + "]", ex); } catch (InvocationTargetException ex) { throw ex.getTargetException(); } }
Invoke the advice method. @param jpMatch the JoinPointMatch that matched this execution join point @param returnValue the return value from the method execution (may be null) @param ex the exception thrown by the method execution (may be null) @return the invocation result @throws Throwable in case of invocation failure
java
spring-aop/src/main/java/org/springframework/aop/aspectj/AbstractAspectJAdvice.java
633
[ "args" ]
Object
true
6
7.76
spring-projects/spring-framework
59,386
javadoc
false
and
public static boolean and(final boolean... array) { ObjectUtils.requireNonEmpty(array, "array"); for (final boolean element : array) { if (!element) { return false; } } return true; }
Performs an 'and' operation on a set of booleans. <pre> BooleanUtils.and(true, true) = true BooleanUtils.and(false, false) = false BooleanUtils.and(true, false) = false BooleanUtils.and(true, true, false) = false BooleanUtils.and(true, true, true) = true </pre> @param array an array of {@code boolean}s @return the result of the logical 'and' operation. That is {@code false} if any of the parameters is {@code false} and {@code true} otherwise. @throws NullPointerException if {@code array} is {@code null} @throws IllegalArgumentException if {@code array} is empty. @since 3.0.1
java
src/main/java/org/apache/commons/lang3/BooleanUtils.java
100
[]
true
2
7.76
apache/commons-lang
2,896
javadoc
false
replace
public String replace(final CharSequence source, final int offset, final int length) { if (source == null) { return null; } final StrBuilder buf = new StrBuilder(length).append(source, offset, length); substitute(buf, 0, length); return buf.toString(); }
Replaces all the occurrences of variables with their matching values from the resolver using the given source as a template. The source is not altered by this method. <p> Only the specified portion of the buffer will be processed. The rest of the buffer is not processed, and is not returned. </p> @param source the buffer to use as a template, not changed, null returns null. @param offset the start offset within the array, must be valid. @param length the length within the array to be processed, must be valid. @return the result of the replace operation. @since 3.2
java
src/main/java/org/apache/commons/lang3/text/StrSubstitutor.java
574
[ "source", "offset", "length" ]
String
true
2
8.24
apache/commons-lang
2,896
javadoc
false
toArray
public static <O> Collector<O, ?, O[]> toArray(final Class<O> elementType) { return new ArrayCollector<>(elementType); }
Returns a {@link Collector} that accumulates the input elements into a new array. @param elementType Type of an element in the array. @param <O> the type of the input elements. @return a {@link Collector} which collects all the input elements into an array, in encounter order.
java
src/main/java/org/apache/commons/lang3/Streams.java
548
[ "elementType" ]
true
1
6.64
apache/commons-lang
2,896
javadoc
false
post_compile
def post_compile(self, compiled_fn, aot_config, *, runtime_metadata) -> Callable: """ Given an output of the compiler, wrap it with information received from prologue. Args: compiled_fn: Callable after calling compiler_fn aot_config: AOTConfig after calling prologue runtime_metadata: ViewAndMutationMeta after calling all wrappers's pre_compile steps. Example: def wrapped_compiled_fn(args): # do something with args, aot_config, fw_metadata return compiled_fn(args) return wrapped_compiled_fn """ return compiled_fn
Given an output of the compiler, wrap it with information received from prologue. Args: compiled_fn: Callable after calling compiler_fn aot_config: AOTConfig after calling prologue runtime_metadata: ViewAndMutationMeta after calling all wrappers's pre_compile steps. Example: def wrapped_compiled_fn(args): # do something with args, aot_config, fw_metadata return compiled_fn(args) return wrapped_compiled_fn
python
torch/_functorch/_aot_autograd/schemas.py
1,103
[ "self", "compiled_fn", "aot_config", "runtime_metadata" ]
Callable
true
1
6.64
pytorch/pytorch
96,034
google
false
await
public boolean await(long timeout, TimeUnit unit) throws InterruptedException { return latch.await(timeout, unit); }
Await the completion of this request (up to the given time interval) @param timeout The maximum time to wait @param unit The unit for the max time @return true if the request completed, false if we timed out
java
clients/src/main/java/org/apache/kafka/clients/producer/internals/ProduceRequestResult.java
119
[ "timeout", "unit" ]
true
1
6.32
apache/kafka
31,560
javadoc
false
shape
def shape(self) -> tuple[int, int]: """ Return a tuple representing the dimensionality of the DataFrame. Unlike the `len()` method, which only returns the number of rows, `shape` provides both row and column counts, making it a more informative method for understanding dataset size. See Also -------- numpy.ndarray.shape : Tuple of array dimensions. Examples -------- >>> df = pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({"col1": [1, 2], "col2": [3, 4], "col3": [5, 6]}) >>> df.shape (2, 3) """ return len(self.index), len(self.columns)
Return a tuple representing the dimensionality of the DataFrame. Unlike the `len()` method, which only returns the number of rows, `shape` provides both row and column counts, making it a more informative method for understanding dataset size. See Also -------- numpy.ndarray.shape : Tuple of array dimensions. Examples -------- >>> df = pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({"col1": [1, 2], "col2": [3, 4], "col3": [5, 6]}) >>> df.shape (2, 3)
python
pandas/core/frame.py
1,045
[ "self" ]
tuple[int, int]
true
1
6.08
pandas-dev/pandas
47,362
unknown
false
nextCorrelationId
int nextCorrelationId() { if (SaslClientAuthenticator.isReserved(correlation)) { // the numeric overflow is fine as negative values is acceptable correlation = SaslClientAuthenticator.MAX_RESERVED_CORRELATION_ID + 1; } return correlation++; }
Return true if the ApiKey belongs to the Telemetry API.
java
clients/src/main/java/org/apache/kafka/clients/NetworkClient.java
1,480
[]
true
2
6.88
apache/kafka
31,560
javadoc
false
of
@Contract("_, !null -> !null") static @Nullable ConfigurationProperty of(ConfigurationPropertyName name, @Nullable OriginTrackedValue value) { if (value == null) { return null; } return new ConfigurationProperty(name, value.getValue(), value.getOrigin()); }
Return the value of the configuration property. @return the configuration property value
java
core/spring-boot/src/main/java/org/springframework/boot/context/properties/source/ConfigurationProperty.java
128
[ "name", "value" ]
ConfigurationProperty
true
2
6.4
spring-projects/spring-boot
79,428
javadoc
false
mapDefinitionInProjectIfFileInProject
function mapDefinitionInProjectIfFileInProject( definition: DocumentPosition, project: Project, ) { // If the definition is actually from the project, definition is correct as is if ( project.containsFile(toNormalizedPath(definition.fileName)) && !isLocationProjectReferenceRedirect(project, definition) ) { return definition; } }
@param projects Projects initially known to contain {@link initialLocation} @param defaultProject The default project containing {@link initialLocation} @param initialLocation Where the search operation was triggered @param getResultsForPosition This is where you plug in `findReferences`, `renameLocation`, etc @param forPositionInResult Given an item returned by {@link getResultsForPosition} enumerate the positions referred to by that result @returns In the common case where there's only one project, returns an array of results from {@link getResultsForPosition}. If multiple projects were searched - even if they didn't return results - the result will be a map from project to per-project results.
typescript
src/server/session.ts
844
[ "definition", "project" ]
false
3
6.48
microsoft/TypeScript
107,154
jsdoc
false
name
public String name() { return this.name; }
Get the name of the metric. @return the metric name; never null
java
clients/src/main/java/org/apache/kafka/common/MetricNameTemplate.java
78
[]
String
true
1
6.8
apache/kafka
31,560
javadoc
false
indexesOf
public static BitSet indexesOf(final float[] array, final float valueToFind) { return indexesOf(array, valueToFind, 0); }
Finds the indices of the given value in the array. <p>This method returns an empty BitSet for a {@code null} input array.</p> @param array the array to search for the object, may be {@code null}. @param valueToFind the value to find. @return a BitSet of all the indices of the value within the array, an empty BitSet if not found or {@code null} array input. @since 3.10
java
src/main/java/org/apache/commons/lang3/ArrayUtils.java
2,139
[ "array", "valueToFind" ]
BitSet
true
1
6.8
apache/commons-lang
2,896
javadoc
false
execute
private ClassicHttpResponse execute(HttpUriRequest request, URI url, String description) { try { HttpHost host = HttpHost.create(url); request.addHeader("User-Agent", "SpringBootCli/" + getClass().getPackage().getImplementationVersion()); return getHttp().executeOpen(host, request, null); } catch (IOException ex) { throw new ReportableException( "Failed to " + description + " from service at '" + url + "' (" + ex.getMessage() + ")"); } }
Retrieves the meta-data of the service at the specified URL. @param url the URL @return the response
java
cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/init/InitializrService.java
184
[ "request", "url", "description" ]
ClassicHttpResponse
true
2
8.24
spring-projects/spring-boot
79,428
javadoc
false
validate_graph_acyclic
def validate_graph_acyclic(nodes: list[BaseSchedulerNode]) -> None: """ Validate that the graph is acyclic by checking predecessor relationships. Raises: RuntimeError: If a cycle is detected in the graph """ # DFS coloring scheme for cycle detection: # WHITE (0): Node has not been visited yet # GRAY (1): Node is currently being processed (in the recursion stack) # BLACK (2): Node has been completely processed (finished exploring all its predecessors) # A back edge (cycle) is detected when we encounter a GRAY node during DFS traversal WHITE, GRAY, BLACK = 0, 1, 2 color = dict.fromkeys(nodes, WHITE) path: list[BaseSchedulerNode] = [] # Track current DFS path def dfs_visit(node: BaseSchedulerNode) -> None: if color[node] == BLACK: return if color[node] == GRAY: path.append(node) path_info = " -> ".join([node.get_name() for node in path]) raise RuntimeError( f"Cycle detected in memory planning graph" f"Path containing cycle (i -> j: j is a dependency of i): {path_info} " f"This indicates invalid dependency relationships in the scheduler graph" ) color[node] = GRAY path.append(node) for pred_node in node.mpi_node.pred_nodes: assert pred_node != node dfs_visit(pred_node) path.pop() color[node] = BLACK # Start DFS from all unvisited nodes for node in nodes: if color[node] == WHITE: dfs_visit(node)
Validate that the graph is acyclic by checking predecessor relationships. Raises: RuntimeError: If a cycle is detected in the graph
python
torch/_inductor/memory.py
797
[ "nodes" ]
None
true
6
6.72
pytorch/pytorch
96,034
unknown
false
describe_execution
def describe_execution(self, execution_arn: str) -> dict: """ Describe a State Machine Execution. .. seealso:: - :external+boto3:py:meth:`SFN.Client.describe_execution` :param execution_arn: ARN of the State Machine Execution. :return: Dict with execution details. """ return self.get_conn().describe_execution(executionArn=execution_arn)
Describe a State Machine Execution. .. seealso:: - :external+boto3:py:meth:`SFN.Client.describe_execution` :param execution_arn: ARN of the State Machine Execution. :return: Dict with execution details.
python
providers/amazon/src/airflow/providers/amazon/aws/hooks/step_function.py
90
[ "self", "execution_arn" ]
dict
true
1
6.24
apache/airflow
43,597
sphinx
false
_extract_dtype_and_bounds
def _extract_dtype_and_bounds( *args: CuteDSLArg, ) -> tuple[Optional[torch.dtype], ValueRanges[sympy.Expr]]: """Extract dtype and bounds from CSEVariable arguments (including OpsValue wrappers).""" for arg in args: cse_var = CuteDSLOpOverrides._get_cse_var(arg) if cse_var is not None: return cse_var.dtype, cse_var.bounds return None, ValueRanges.unknown()
Extract dtype and bounds from CSEVariable arguments (including OpsValue wrappers).
python
torch/_inductor/codegen/cutedsl/cutedsl_op_overrides.py
88
[]
tuple[Optional[torch.dtype], ValueRanges[sympy.Expr]]
true
3
6.08
pytorch/pytorch
96,034
unknown
false
isNarrowableReference
function isNarrowableReference(expr: Expression): boolean { switch (expr.kind) { case SyntaxKind.Identifier: case SyntaxKind.ThisKeyword: case SyntaxKind.SuperKeyword: case SyntaxKind.MetaProperty: return true; case SyntaxKind.PropertyAccessExpression: case SyntaxKind.ParenthesizedExpression: case SyntaxKind.NonNullExpression: return isNarrowableReference((expr as PropertyAccessExpression | ParenthesizedExpression | NonNullExpression).expression); case SyntaxKind.ElementAccessExpression: return (isStringOrNumericLiteralLike((expr as ElementAccessExpression).argumentExpression) || isEntityNameExpression((expr as ElementAccessExpression).argumentExpression)) && isNarrowableReference((expr as ElementAccessExpression).expression); case SyntaxKind.BinaryExpression: return (expr as BinaryExpression).operatorToken.kind === SyntaxKind.CommaToken && isNarrowableReference((expr as BinaryExpression).right) || isAssignmentOperator((expr as BinaryExpression).operatorToken.kind) && isLeftHandSideExpression((expr as BinaryExpression).left); } return false; }
Declares a Symbol for the node and adds it to symbols. Reports errors for conflicting identifier names. @param symbolTable - The symbol table which node will be added to. @param parent - node's parent declaration. @param node - The declaration to be added to the symbol table @param includes - The SymbolFlags that node has in addition to its declaration type (eg: export, ambient, etc.) @param excludes - The flags which node cannot be declared alongside in a symbol table. Used to report forbidden declarations.
typescript
src/compiler/binder.ts
1,268
[ "expr" ]
true
6
6.72
microsoft/TypeScript
107,154
jsdoc
false
instantiate
Object instantiate(RootBeanDefinition bd, @Nullable String beanName, BeanFactory owner, @Nullable Object factoryBean, Method factoryMethod, @Nullable Object... args) throws BeansException;
Return an instance of the bean with the given name in this factory, creating it via the given factory method. @param bd the bean definition @param beanName the name of the bean when it is created in this context. The name can be {@code null} if we are autowiring a bean which doesn't belong to the factory. @param owner the owning BeanFactory @param factoryBean the factory bean instance to call the factory method on, or {@code null} in case of a static factory method @param factoryMethod the factory method to use @param args the factory method arguments to apply @return a bean instance for this bean definition @throws BeansException if the instantiation attempt failed
java
spring-beans/src/main/java/org/springframework/beans/factory/support/InstantiationStrategy.java
83
[ "bd", "beanName", "owner", "factoryBean", "factoryMethod" ]
Object
true
1
6.64
spring-projects/spring-framework
59,386
javadoc
false
checkedException
static Throwable checkedException(final Throwable ex) { Validate.isTrue(ExceptionUtils.isChecked(ex), "Not a checked exception: %s", ex); return ex; }
Tests whether the specified {@link Throwable} is a checked exception. If not, an exception is thrown. @param ex the {@link Throwable} to check @return a flag whether the passed in exception is a checked exception @throws IllegalArgumentException if the {@link Throwable} is not a checked exception
java
src/main/java/org/apache/commons/lang3/concurrent/ConcurrentUtils.java
108
[ "ex" ]
Throwable
true
1
6.64
apache/commons-lang
2,896
javadoc
false
readlink
function readlink(path, options, callback) { callback = makeCallback(typeof options === 'function' ? options : callback); options = getOptions(options); const req = new FSReqCallback(); req.oncomplete = callback; binding.readlink(getValidatedPath(path), options.encoding, req); }
Reads the contents of a symbolic link referred to by `path`. @param {string | Buffer | URL} path @param {{ encoding?: string; } | string} [options] @param {( err?: Error, linkString?: string | Buffer ) => any} callback @returns {void}
javascript
lib/fs.js
1,730
[ "path", "options", "callback" ]
false
2
6.08
nodejs/node
114,839
jsdoc
false
isIdentifierReference
function isIdentifierReference(node) { const parent = node.parent; switch (parent.type) { case 'LabeledStatement': case 'BreakStatement': case 'ContinueStatement': case 'ArrayPattern': case 'RestElement': case 'ImportSpecifier': case 'ImportDefaultSpecifier': case 'ImportNamespaceSpecifier': case 'CatchClause': return false; case 'FunctionDeclaration': case 'ComponentDeclaration': case 'HookDeclaration': case 'FunctionExpression': case 'ArrowFunctionExpression': case 'ClassDeclaration': case 'ClassExpression': case 'VariableDeclarator': return parent.id !== node; case 'Property': case 'PropertyDefinition': case 'MethodDefinition': return parent.key !== node || parent.computed || parent.shorthand; case 'AssignmentPattern': return parent.key !== node; default: return true; } }
Checks that a given identifier node is a reference or not. This is used to detect the first throwable node in a `try` block. @param {ASTNode} node An Identifier node to check. @returns {boolean} `true` if the node is a reference.
javascript
packages/eslint-plugin-react-hooks/src/code-path-analysis/code-path-analyzer.js
133
[ "node" ]
false
3
6.24
facebook/react
241,750
jsdoc
false
closeQuietly
public static void closeQuietly(@Nullable InputStream inputStream) { try { close(inputStream, true); } catch (IOException impossible) { throw new AssertionError(impossible); } }
Closes the given {@link InputStream}, logging any {@code IOException} that's thrown rather than propagating it. <p>While it's not safe in the general case to ignore exceptions that are thrown when closing an I/O resource, it should generally be safe in the case of a resource that's being used only for reading, such as an {@code InputStream}. Unlike with writable resources, there's no chance that a failure that occurs when closing the stream indicates a meaningful problem such as a failure to flush all bytes to the underlying resource. @param inputStream the input stream to be closed, or {@code null} in which case this method does nothing @since 17.0
java
android/guava/src/com/google/common/io/Closeables.java
112
[ "inputStream" ]
void
true
2
6.72
google/guava
51,352
javadoc
false
getStartOfZipContent
private static long getStartOfZipContent(FileDataBlock data, ZipEndOfCentralDirectoryRecord eocd, Zip64EndOfCentralDirectoryRecord zip64Eocd) throws IOException { long specifiedOffsetToStartOfCentralDirectory = (zip64Eocd != null) ? zip64Eocd.offsetToStartOfCentralDirectory() : Integer.toUnsignedLong(eocd.offsetToStartOfCentralDirectory()); long sizeOfCentralDirectoryAndEndRecords = getSizeOfCentralDirectoryAndEndRecords(eocd, zip64Eocd); long actualOffsetToStartOfCentralDirectory = data.size() - sizeOfCentralDirectoryAndEndRecords; return actualOffsetToStartOfCentralDirectory - specifiedOffsetToStartOfCentralDirectory; }
Returns the location in the data that the archive actually starts. For most files the archive data will start at 0, however, it is possible to have prefixed bytes (often used for startup scripts) at the beginning of the data. @param data the source data @param eocd the end of central directory record @param zip64Eocd the zip64 end of central directory record or {@code null} @return the offset within the data where the archive begins @throws IOException on I/O error
java
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/zip/ZipContent.java
636
[ "data", "eocd", "zip64Eocd" ]
true
2
8.08
spring-projects/spring-boot
79,428
javadoc
false
maybe_upcast_numeric_to_64bit
def maybe_upcast_numeric_to_64bit(arr: NumpyIndexT) -> NumpyIndexT: """ If array is an int/uint/float bit size lower than 64 bit, upcast it to 64 bit. Parameters ---------- arr : ndarray or ExtensionArray Returns ------- ndarray or ExtensionArray """ dtype = arr.dtype if dtype.kind == "i" and dtype != np.int64: return arr.astype(np.int64) elif dtype.kind == "u" and dtype != np.uint64: return arr.astype(np.uint64) elif dtype.kind == "f" and dtype != np.float64: return arr.astype(np.float64) else: return arr
If array is an int/uint/float bit size lower than 64 bit, upcast it to 64 bit. Parameters ---------- arr : ndarray or ExtensionArray Returns ------- ndarray or ExtensionArray
python
pandas/core/dtypes/cast.py
395
[ "arr" ]
NumpyIndexT
true
8
6.56
pandas-dev/pandas
47,362
numpy
false
allowCoreThreadTimeOut
public ThreadPoolTaskExecutorBuilder allowCoreThreadTimeOut(boolean allowCoreThreadTimeOut) { return new ThreadPoolTaskExecutorBuilder(this.queueCapacity, this.corePoolSize, this.maxPoolSize, allowCoreThreadTimeOut, this.keepAlive, this.acceptTasksAfterContextClose, this.awaitTermination, this.awaitTerminationPeriod, this.threadNamePrefix, this.taskDecorator, this.customizers); }
Set whether core threads are allowed to time out. When enabled, this enables dynamic growing and shrinking of the pool. @param allowCoreThreadTimeOut if core threads are allowed to time out @return a new builder instance
java
core/spring-boot/src/main/java/org/springframework/boot/task/ThreadPoolTaskExecutorBuilder.java
152
[ "allowCoreThreadTimeOut" ]
ThreadPoolTaskExecutorBuilder
true
1
6.72
spring-projects/spring-boot
79,428
javadoc
false
newIdentityHashMap
public static <K extends @Nullable Object, V extends @Nullable Object> IdentityHashMap<K, V> newIdentityHashMap() { return new IdentityHashMap<>(); }
Creates an {@code IdentityHashMap} instance. <p><b>Note:</b> this method is now unnecessary and should be treated as deprecated. Instead, use the {@code IdentityHashMap} constructor directly, taking advantage of <a href="https://docs.oracle.com/javase/tutorial/java/generics/genTypeInference.html#type-inference-instantiation">"diamond" syntax</a>. @return a new, empty {@code IdentityHashMap}
java
android/guava/src/com/google/common/collect/Maps.java
454
[]
true
1
6
google/guava
51,352
javadoc
false
unmerge_node
def unmerge_node(self, node: fx.Node) -> None: """Remove a node from its merge set, making it singleton.""" old_set = self.merge_sets[node] # If already singleton, nothing to do if len(old_set) == 1: return # Remove from old set old_set.remove(node) # Make node singleton self.merge_sets[node] = OrderedSet([node])
Remove a node from its merge set, making it singleton.
python
torch/_inductor/augmented_graph_helper.py
60
[ "self", "node" ]
None
true
2
6
pytorch/pytorch
96,034
unknown
false
nanos
public long nanos() { return timeUnit.toNanos(duration); }
@return the number of {@link #timeUnit()} units this value contains
java
libs/core/src/main/java/org/elasticsearch/core/TimeValue.java
114
[]
true
1
6
elastic/elasticsearch
75,680
javadoc
false
load
private T load() throws IOException { EmbeddedImplClassLoader loader = EmbeddedImplClassLoader.getInstance(parentLoader, providerName); if (loadAsProviderModule) { return loadAsModule(loader); } else { return loadAsNonModule(loader); } }
Checks that the module of the given type declares that it uses said type.
java
libs/core/src/main/java/org/elasticsearch/core/internal/provider/ProviderLocator.java
103
[]
T
true
2
6.88
elastic/elasticsearch
75,680
javadoc
false
collect
public <A, R> R collect(final Collector<? super T, A, R> collector) { makeTerminated(); return stream().collect(collector); }
Performs a mutable reduction operation on the elements of this stream using a {@link Collector}. A {@link Collector} encapsulates the functions used as arguments to {@link #collect(Supplier, BiConsumer, BiConsumer)}, allowing for reuse of collection strategies and composition of collect operations such as multiple-level grouping or partitioning. <p> If the underlying stream is parallel, and the {@link Collector} is concurrent, and either the stream is unordered or the collector is unordered, then a concurrent reduction will be performed (see {@link Collector} for details on concurrent reduction.) </p> <p> This is a terminal operation. </p> <p> When executed in parallel, multiple intermediate results may be instantiated, populated, and merged so as to maintain isolation of mutable data structures. Therefore, even when executed in parallel with non-thread-safe data structures (such as {@link ArrayList}), no additional synchronization is needed for a parallel reduction. </p> Note The following will accumulate strings into an ArrayList: <pre> {@code List<String> asList = stringStream.collect(Collectors.toList()); } </pre> <p> The following will classify {@code Person} objects by city: </p> <pre> {@code Map<String, List<Person>> peopleByCity = personStream.collect(Collectors.groupingBy(Person::getCity)); } </pre> <p> The following will classify {@code Person} objects by state and city, cascading two {@link Collector}s together: </p> <pre> {@code Map<String, Map<String, List<Person>>> peopleByStateAndCity = personStream .collect(Collectors.groupingBy(Person::getState, Collectors.groupingBy(Person::getCity))); } </pre> @param <R> the type of the result @param <A> the intermediate accumulation type of the {@link Collector} @param collector the {@link Collector} describing the reduction @return the result of the reduction @see #collect(Supplier, BiConsumer, BiConsumer) @see Collectors
java
src/main/java/org/apache/commons/lang3/stream/Streams.java
296
[ "collector" ]
R
true
1
7.76
apache/commons-lang
2,896
javadoc
false
nul
@SuppressWarnings("unchecked") public static <T> Supplier<T> nul() { return NUL; }
Gets the singleton supplier that always returns null. <p> This supplier never throws an exception. </p> @param <T> Supplied type. @return The NUL singleton. @since 3.14.0
java
src/main/java/org/apache/commons/lang3/function/Suppliers.java
59
[]
true
1
6.96
apache/commons-lang
2,896
javadoc
false
drainValueReferenceQueue
@GuardedBy("this") void drainValueReferenceQueue() { Reference<? extends V> ref; int i = 0; while ((ref = valueReferenceQueue.poll()) != null) { @SuppressWarnings("unchecked") ValueReference<K, V> valueReference = (ValueReference<K, V>) ref; map.reclaimValue(valueReference); if (++i == DRAIN_MAX) { break; } } }
Drain the key and value reference queues, cleaning up internal entries containing garbage collected keys or values.
java
android/guava/src/com/google/common/cache/LocalCache.java
2,403
[]
void
true
3
6.88
google/guava
51,352
javadoc
false
getAggregateBinder
private @Nullable AggregateBinder<?> getAggregateBinder(Bindable<?> target, Context context) { Class<?> resolvedType = target.getType().resolve(Object.class); if (Map.class.isAssignableFrom(resolvedType)) { return new MapBinder(context); } if (Collection.class.isAssignableFrom(resolvedType)) { return new CollectionBinder(context); } if (target.getType().isArray()) { return new ArrayBinder(context); } return null; }
Bind the specified target {@link Bindable} using this binder's {@link ConfigurationPropertySource property sources} or create a new instance using the type of the {@link Bindable} if the result of the binding is {@code null}. @param name the configuration property name to bind @param target the target bindable @param handler the bind handler (may be {@code null}) @param <T> the bound or created type @return the bound or created object @since 2.2.0
java
core/spring-boot/src/main/java/org/springframework/boot/context/properties/bind/Binder.java
448
[ "target", "context" ]
true
4
7.92
spring-projects/spring-boot
79,428
javadoc
false
parse
public static FetchSnapshotRequest parse(Readable readable, short version) { return new FetchSnapshotRequest(new FetchSnapshotRequestData(readable, version), version); }
Finds the PartitionSnapshot for a given topic partition. @param data the fetch snapshot request data @param topicPartition the topic partition to find @return the request partition snapshot if found, otherwise an empty Optional
java
clients/src/main/java/org/apache/kafka/common/requests/FetchSnapshotRequest.java
70
[ "readable", "version" ]
FetchSnapshotRequest
true
1
6.32
apache/kafka
31,560
javadoc
false
h3ToNotIntersectingChildrenSize
public static int h3ToNotIntersectingChildrenSize(String h3Address) { return h3ToNotIntersectingChildrenSize(stringToH3(h3Address)); }
h3ToNotIntersectingChildrenSize returns the exact number of children intersecting the given parent but not part of the children set. @param h3Address H3 address to find the number of children. @return int Exact number of children, 5 for Pentagons and 6 for hexagons,
java
libs/h3/src/main/java/org/elasticsearch/h3/H3.java
527
[ "h3Address" ]
true
1
6.48
elastic/elasticsearch
75,680
javadoc
false
setTo
@Override public void setTo(String... to) throws MailParseException { try { this.helper.setTo(to); } catch (MessagingException ex) { throw new MailParseException(ex); } }
Return the JavaMail MimeMessage that this MimeMailMessage is based on.
java
spring-context-support/src/main/java/org/springframework/mail/javamail/MimeMailMessage.java
106
[]
void
true
2
6.4
spring-projects/spring-framework
59,386
javadoc
false
flip
public FluentBitSet flip(final int bitIndex) { bitSet.flip(bitIndex); return this; }
Sets the bit at the specified index to the complement of its current value. @param bitIndex the index of the bit to flip. @throws IndexOutOfBoundsException if the specified index is negative. @return {@code this} instance.
java
src/main/java/org/apache/commons/lang3/util/FluentBitSet.java
215
[ "bitIndex" ]
FluentBitSet
true
1
6.64
apache/commons-lang
2,896
javadoc
false
anyExists
private boolean anyExists(ResourcePatternResolver resolver) throws IOException { String searchPath = this.path; if (searchPath.startsWith(ResourceLoader.CLASSPATH_URL_PREFIX)) { searchPath = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX + searchPath.substring(ResourceLoader.CLASSPATH_URL_PREFIX.length()); } if (searchPath.startsWith(ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX)) { Resource[] resources = resolver.getResources(searchPath); for (Resource resource : resources) { if (resource.exists()) { return true; } } } return false; }
Determine if this template location exists using the specified {@link ResourcePatternResolver}. @param resolver the resolver used to test if the location exists @return {@code true} if the location exists.
java
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/template/TemplateLocation.java
60
[ "resolver" ]
true
4
7.44
spring-projects/spring-boot
79,428
javadoc
false
_is_device_backend_autoload_enabled
def _is_device_backend_autoload_enabled() -> builtins.bool: """ Whether autoloading out-of-the-tree device extensions is enabled. The switch depends on the value of the environment variable `TORCH_DEVICE_BACKEND_AUTOLOAD`. Returns: bool: Whether to enable autoloading the extensions. Enabled by default. Examples: >>> torch._is_device_backend_autoload_enabled() True """ # enabled by default return os.getenv("TORCH_DEVICE_BACKEND_AUTOLOAD", "1") == "1"
Whether autoloading out-of-the-tree device extensions is enabled. The switch depends on the value of the environment variable `TORCH_DEVICE_BACKEND_AUTOLOAD`. Returns: bool: Whether to enable autoloading the extensions. Enabled by default. Examples: >>> torch._is_device_backend_autoload_enabled() True
python
torch/__init__.py
2,945
[]
builtins.bool
true
1
8
pytorch/pytorch
96,034
unknown
false
min_scalar_type
def min_scalar_type(a, /): """ min_scalar_type(a, /) For scalar ``a``, returns the data type with the smallest size and smallest scalar kind which can hold its value. For non-scalar array ``a``, returns the vector's dtype unmodified. Floating point values are not demoted to integers, and complex values are not demoted to floats. Parameters ---------- a : scalar or array_like The value whose minimal data type is to be found. Returns ------- out : dtype The minimal data type. See Also -------- result_type, promote_types, dtype, can_cast Examples -------- >>> import numpy as np >>> np.min_scalar_type(10) dtype('uint8') >>> np.min_scalar_type(-260) dtype('int16') >>> np.min_scalar_type(3.1) dtype('float16') >>> np.min_scalar_type(1e50) dtype('float64') >>> np.min_scalar_type(np.arange(4, dtype=np.float64)) dtype('float64') """ return (a,)
min_scalar_type(a, /) For scalar ``a``, returns the data type with the smallest size and smallest scalar kind which can hold its value. For non-scalar array ``a``, returns the vector's dtype unmodified. Floating point values are not demoted to integers, and complex values are not demoted to floats. Parameters ---------- a : scalar or array_like The value whose minimal data type is to be found. Returns ------- out : dtype The minimal data type. See Also -------- result_type, promote_types, dtype, can_cast Examples -------- >>> import numpy as np >>> np.min_scalar_type(10) dtype('uint8') >>> np.min_scalar_type(-260) dtype('int16') >>> np.min_scalar_type(3.1) dtype('float16') >>> np.min_scalar_type(1e50) dtype('float64') >>> np.min_scalar_type(np.arange(4, dtype=np.float64)) dtype('float64')
python
numpy/_core/multiarray.py
663
[ "a" ]
false
1
6
numpy/numpy
31,054
numpy
false
_binop
def _binop(self, other: Series, func, level=None, fill_value=None) -> Series: """ Perform generic binary operation with optional fill value. Parameters ---------- other : Series func : binary operator fill_value : float or object Value to substitute for NA/null values. If both Series are NA in a location, the result will be NA regardless of the passed fill value. level : int or level name, default None Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series """ this = self if not self.index.equals(other.index): this, other = self.align(other, level=level, join="outer") this_vals, other_vals = ops.fill_binop(this._values, other._values, fill_value) with np.errstate(all="ignore"): result = func(this_vals, other_vals) name = ops.get_op_result_name(self, other) out = this._construct_result(result, name, other) return cast(Series, out)
Perform generic binary operation with optional fill value. Parameters ---------- other : Series func : binary operator fill_value : float or object Value to substitute for NA/null values. If both Series are NA in a location, the result will be NA regardless of the passed fill value. level : int or level name, default None Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series
python
pandas/core/series.py
6,705
[ "self", "other", "func", "level", "fill_value" ]
Series
true
2
6.56
pandas-dev/pandas
47,362
numpy
false
getTypeDescriptor
public TypeDescriptor getTypeDescriptor() { TypeDescriptor typeDescriptor = this.typeDescriptor; if (typeDescriptor == null) { typeDescriptor = (this.field != null ? new TypeDescriptor(getResolvableType(), getDependencyType(), getAnnotations()) : new TypeDescriptor(obtainMethodParameter())); this.typeDescriptor = typeDescriptor; } return typeDescriptor; }
Build a {@link TypeDescriptor} object for the wrapped parameter/field. @since 5.1.4
java
spring-beans/src/main/java/org/springframework/beans/factory/config/DependencyDescriptor.java
282
[]
TypeDescriptor
true
3
6.4
spring-projects/spring-framework
59,386
javadoc
false
createPropertyResolver
public static ConfigurablePropertyResolver createPropertyResolver(MutablePropertySources propertySources) { return new ConfigurationPropertySourcesPropertyResolver(propertySources); }
Create a new {@link PropertyResolver} that resolves property values against an underlying set of {@link PropertySources}. Provides an {@link ConfigurationPropertySource} aware and optimized alternative to {@link PropertySourcesPropertyResolver}. @param propertySources the set of {@link PropertySource} objects to use @return a {@link ConfigurablePropertyResolver} implementation @since 2.5.0
java
core/spring-boot/src/main/java/org/springframework/boot/context/properties/source/ConfigurationPropertySources.java
61
[ "propertySources" ]
ConfigurablePropertyResolver
true
1
6.16
spring-projects/spring-boot
79,428
javadoc
false
_load_serializers
def _load_serializers(): """ Register built-in and settings-defined serializers. This is done lazily so that user code has a chance to (e.g.) set up custom settings without needing to be careful of import order. """ global _serializers serializers = {} for format in BUILTIN_SERIALIZERS: register_serializer(format, BUILTIN_SERIALIZERS[format], serializers) if hasattr(settings, "SERIALIZATION_MODULES"): for format in settings.SERIALIZATION_MODULES: register_serializer( format, settings.SERIALIZATION_MODULES[format], serializers ) _serializers = serializers
Register built-in and settings-defined serializers. This is done lazily so that user code has a chance to (e.g.) set up custom settings without needing to be careful of import order.
python
django/core/serializers/__init__.py
149
[]
false
4
6.24
django/django
86,204
unknown
false
getMigrations
private List<PropertyMigration> getMigrations(ConfigurationPropertySource propertySource, ConfigurationMetadataProperty metadataProperty) { ConfigurationPropertyName propertyName = asConfigurationPropertyName(metadataProperty); List<PropertyMigration> migrations = new ArrayList<>(); addMigration(propertySource, metadataProperty, propertyName, false, migrations); if (isMapType(metadataProperty) && propertySource instanceof IterableConfigurationPropertySource iterable) { iterable.stream() .filter(propertyName::isAncestorOf) .forEach((ancestorPropertyName) -> addMigration(propertySource, metadataProperty, ancestorPropertyName, true, migrations)); } return migrations; }
Analyse the {@link ConfigurableEnvironment environment} and attempt to rename legacy properties if a replacement exists. @return a report of the migration
java
core/spring-boot-properties-migrator/src/main/java/org/springframework/boot/context/properties/migrator/PropertiesMigrationReporter.java
120
[ "propertySource", "metadataProperty" ]
true
3
6.08
spring-projects/spring-boot
79,428
javadoc
false
geterrcall
def geterrcall(): """ Return the current callback function used on floating-point errors. When the error handling for a floating-point error (one of "divide", "over", "under", or "invalid") is set to 'call' or 'log', the function that is called or the log instance that is written to is returned by `geterrcall`. This function or log instance has been set with `seterrcall`. Returns ------- errobj : callable, log instance or None The current error handler. If no handler was set through `seterrcall`, ``None`` is returned. See Also -------- seterrcall, seterr, geterr Notes ----- For complete documentation of the types of floating-point exceptions and treatment options, see `seterr`. **Concurrency note:** see :ref:`fp_error_handling` Examples -------- >>> import numpy as np >>> np.geterrcall() # we did not yet set a handler, returns None >>> orig_settings = np.seterr(all='call') >>> def err_handler(type, flag): ... print("Floating point error (%s), with flag %s" % (type, flag)) >>> old_handler = np.seterrcall(err_handler) >>> np.array([1, 2, 3]) / 0.0 Floating point error (divide by zero), with flag 1 array([inf, inf, inf]) >>> cur_handler = np.geterrcall() >>> cur_handler is err_handler True >>> old_settings = np.seterr(**orig_settings) # restore original >>> old_handler = np.seterrcall(None) # restore original """ return _get_extobj_dict()["call"]
Return the current callback function used on floating-point errors. When the error handling for a floating-point error (one of "divide", "over", "under", or "invalid") is set to 'call' or 'log', the function that is called or the log instance that is written to is returned by `geterrcall`. This function or log instance has been set with `seterrcall`. Returns ------- errobj : callable, log instance or None The current error handler. If no handler was set through `seterrcall`, ``None`` is returned. See Also -------- seterrcall, seterr, geterr Notes ----- For complete documentation of the types of floating-point exceptions and treatment options, see `seterr`. **Concurrency note:** see :ref:`fp_error_handling` Examples -------- >>> import numpy as np >>> np.geterrcall() # we did not yet set a handler, returns None >>> orig_settings = np.seterr(all='call') >>> def err_handler(type, flag): ... print("Floating point error (%s), with flag %s" % (type, flag)) >>> old_handler = np.seterrcall(err_handler) >>> np.array([1, 2, 3]) / 0.0 Floating point error (divide by zero), with flag 1 array([inf, inf, inf]) >>> cur_handler = np.geterrcall() >>> cur_handler is err_handler True >>> old_settings = np.seterr(**orig_settings) # restore original >>> old_handler = np.seterrcall(None) # restore original
python
numpy/_core/_ufunc_config.py
329
[]
false
1
6.32
numpy/numpy
31,054
unknown
false
evalTypeScript
function evalTypeScript(name, source, breakFirstLine, print, shouldLoadESM = false) { const origModule = globalThis.module; // Set e.g. when called from the REPL. const module = createModule(name); const baseUrl = pathToFileURL(module.filename).href; if (shouldUseModuleEntryPoint(name, source)) { return evalTypeScriptModuleEntryPoint(source, print); } let compiledScript; // This variable can be modified if the source code is stripped. let sourceToRun = source; try { compiledScript = compileScript(name, source, baseUrl); } catch (originalError) { try { sourceToRun = stripTypeScriptModuleTypes(source, kEvalTag); // Retry the CJS/ESM syntax detection after stripping the types. if (shouldUseModuleEntryPoint(name, sourceToRun)) { return evalTypeScriptModuleEntryPoint(source, print); } // If the ContextifiedScript was successfully created, execute it. // outside the try-catch block to avoid catching runtime errors. compiledScript = compileScript(name, sourceToRun, baseUrl); } catch (tsError) { // If it's invalid or unsupported TypeScript syntax, rethrow the original error // with the TypeScript error message added to the stack. if (tsError.code === 'ERR_INVALID_TYPESCRIPT_SYNTAX' || tsError.code === 'ERR_UNSUPPORTED_TYPESCRIPT_SYNTAX') { originalError.stack = decorateCJSErrorWithTSMessage(originalError.stack, tsError.message); throw originalError; } throw tsError; } } const evalFunction = () => runScriptInContext(name, sourceToRun, breakFirstLine, print, module, baseUrl, compiledScript, origModule); if (shouldLoadESM) { return require('internal/modules/run_main').runEntryPointWithESMLoader(evalFunction); } evalFunction(); }
Wrapper of evalScript This function wraps the evaluation of the source code in a try-catch block. If the source code fails to be evaluated, it will retry evaluating the source code with the TypeScript parser. If the source code fails to be evaluated with the TypeScript parser, it will rethrow the original error, adding the TypeScript error message to the stack. This way we don't change the behavior of the code, but we provide a better error message in case of a typescript error. @param {string} name The name of the file @param {string} source The source code to evaluate @param {boolean} breakFirstLine Whether to break on the first line @param {boolean} print If the result should be printed @param {boolean} shouldLoadESM If the code should be loaded as an ESM module @returns {void}
javascript
lib/internal/process/execution.js
247
[ "name", "source", "breakFirstLine", "print" ]
false
8
6.08
nodejs/node
114,839
jsdoc
false
debugger
def debugger(): """Return the current debugger instance, or create if none.""" rdb = _current[0] if rdb is None or not rdb.active: rdb = _current[0] = Rdb() return rdb
Return the current debugger instance, or create if none.
python
celery/contrib/rdb.py
175
[]
false
3
6.08
celery/celery
27,741
unknown
false
skipPast
public void skipPast(String thru) { int thruStart = this.in.indexOf(thru, this.pos); this.pos = thruStart == -1 ? this.in.length() : (thruStart + thru.length()); }
Returns the current position and the entire input string. @return the current position and the entire input string.
java
cli/spring-boot-cli/src/json-shade/java/org/springframework/boot/cli/json/JSONTokener.java
518
[ "thru" ]
void
true
2
6.8
spring-projects/spring-boot
79,428
javadoc
false
escapeHtml3
public static final String escapeHtml3(final String input) { return ESCAPE_HTML3.translate(input); }
Escapes the characters in a {@link String} using HTML entities. <p>Supports only the HTML 3.0 entities.</p> @param input the {@link String} to escape, may be null @return a new escaped {@link String}, {@code null} if null string input @since 3.0
java
src/main/java/org/apache/commons/lang3/StringEscapeUtils.java
470
[ "input" ]
String
true
1
6.96
apache/commons-lang
2,896
javadoc
false
calculate
def calculate(cls, dags: dict[str, LazyDeserializedDAG], *, session: Session) -> Self: """ Query the run counts from the db. :param dags: dict of dags to query """ # Skip these queries entirely if no DAGs can be scheduled to save time. if not any(dag.timetable.can_be_scheduled for dag in dags.values()): return cls({}, {}) latest_runs = {run.dag_id: run for run in session.scalars(_get_latest_runs_stmt(dag_ids=dags.keys()))} active_run_counts = DagRun.active_runs_of_dags( dag_ids=dags.keys(), exclude_backfill=True, session=session, ) return cls(latest_runs, active_run_counts)
Query the run counts from the db. :param dags: dict of dags to query
python
airflow-core/src/airflow/dag_processing/collection.py
144
[ "cls", "dags", "session" ]
Self
true
2
6.88
apache/airflow
43,597
sphinx
false
createName
function createName(node: PropertyName) { if (isIdentifier(node) && node.escapedText === "constructor") { return factory.createComputedPropertyName(factory.createStringLiteral(idText(node), quotePreference === QuotePreference.Single)); } return getSynthesizedDeepClone(node, /*includeTrivia*/ false); }
(#49811) Note that there are cases in which the symbol declaration is not present. For example, in the code below both `MappedIndirect.ax` and `MappedIndirect.ay` have no declaration node attached (due to their mapped-type parent): ```ts type Base = { ax: number; ay: string }; type BaseKeys = keyof Base; type MappedIndirect = { [K in BaseKeys]: boolean }; ``` In such cases, we assume the declaration to be a `PropertySignature`.
typescript
src/services/codefixes/helpers.ts
349
[ "node" ]
false
3
6.48
microsoft/TypeScript
107,154
jsdoc
false
isAllBlank
public static boolean isAllBlank(final CharSequence... css) { if (ArrayUtils.isEmpty(css)) { return true; } for (final CharSequence cs : css) { if (isNotBlank(cs)) { return false; } } return true; }
Tests if all of the CharSequences are empty (""), null or whitespace only. <p> Whitespace is defined by {@link Character#isWhitespace(char)}. </p> <pre> StringUtils.isAllBlank(null) = true StringUtils.isAllBlank(null, "foo") = false StringUtils.isAllBlank(null, null) = true StringUtils.isAllBlank("", "bar") = false StringUtils.isAllBlank("bob", "") = false StringUtils.isAllBlank(" bob ", null) = false StringUtils.isAllBlank(" ", "bar") = false StringUtils.isAllBlank("foo", "bar") = false StringUtils.isAllBlank(new String[] {}) = true </pre> @param css the CharSequences to check, may be null or empty. @return {@code true} if all of the CharSequences are empty or null or whitespace only. @since 3.6
java
src/main/java/org/apache/commons/lang3/StringUtils.java
3,129
[]
true
3
7.76
apache/commons-lang
2,896
javadoc
false
measurable
public Measurable measurable() { if (isMeasurable()) return (Measurable) metricValueProvider; else throw new IllegalStateException("Not a measurable: " + this.metricValueProvider.getClass()); }
Get the underlying metric provider, which should be a {@link Measurable} @return Return the metric provider @throws IllegalStateException if the underlying metric is not a {@link Measurable}.
java
clients/src/main/java/org/apache/kafka/common/metrics/KafkaMetric.java
96
[]
Measurable
true
2
7.28
apache/kafka
31,560
javadoc
false
make_swiss_roll
def make_swiss_roll(n_samples=100, *, noise=0.0, random_state=None, hole=False): """Generate a swiss roll dataset. Read more in the :ref:`User Guide <sample_generators>`. Adapted with permission from Stephen Marsland's code [1]_. Parameters ---------- n_samples : int, default=100 The number of sample points on the Swiss Roll. noise : float, default=0.0 The standard deviation of the gaussian noise. random_state : int, RandomState instance or None, default=None Determines random number generation for dataset creation. Pass an int for reproducible output across multiple function calls. See :term:`Glossary <random_state>`. hole : bool, default=False If True generates the swiss roll with hole dataset. Returns ------- X : ndarray of shape (n_samples, 3) The points. t : ndarray of shape (n_samples,) The univariate position of the sample according to the main dimension of the points in the manifold. Notes ----- The algorithm is from Marsland [1]_. References ---------- .. [1] S. Marsland, "Machine Learning: An Algorithmic Perspective", 2nd edition, Chapter 6, 2014. https://homepages.ecs.vuw.ac.nz/~marslast/Code/Ch6/lle.py Examples -------- >>> from sklearn.datasets import make_swiss_roll >>> X, t = make_swiss_roll(noise=0.05, random_state=0) >>> X.shape (100, 3) >>> t.shape (100,) """ generator = check_random_state(random_state) if not hole: t = 1.5 * np.pi * (1 + 2 * generator.uniform(size=n_samples)) y = 21 * generator.uniform(size=n_samples) else: corners = np.array( [[np.pi * (1.5 + i), j * 7] for i in range(3) for j in range(3)] ) corners = np.delete(corners, 4, axis=0) corner_index = generator.choice(8, n_samples) parameters = generator.uniform(size=(2, n_samples)) * np.array([[np.pi], [7]]) t, y = corners[corner_index].T + parameters x = t * np.cos(t) z = t * np.sin(t) X = np.vstack((x, y, z)) X += noise * generator.standard_normal(size=(3, n_samples)) X = X.T t = np.squeeze(t) return X, t
Generate a swiss roll dataset. Read more in the :ref:`User Guide <sample_generators>`. Adapted with permission from Stephen Marsland's code [1]_. Parameters ---------- n_samples : int, default=100 The number of sample points on the Swiss Roll. noise : float, default=0.0 The standard deviation of the gaussian noise. random_state : int, RandomState instance or None, default=None Determines random number generation for dataset creation. Pass an int for reproducible output across multiple function calls. See :term:`Glossary <random_state>`. hole : bool, default=False If True generates the swiss roll with hole dataset. Returns ------- X : ndarray of shape (n_samples, 3) The points. t : ndarray of shape (n_samples,) The univariate position of the sample according to the main dimension of the points in the manifold. Notes ----- The algorithm is from Marsland [1]_. References ---------- .. [1] S. Marsland, "Machine Learning: An Algorithmic Perspective", 2nd edition, Chapter 6, 2014. https://homepages.ecs.vuw.ac.nz/~marslast/Code/Ch6/lle.py Examples -------- >>> from sklearn.datasets import make_swiss_roll >>> X, t = make_swiss_roll(noise=0.05, random_state=0) >>> X.shape (100, 3) >>> t.shape (100,)
python
sklearn/datasets/_samples_generator.py
1,860
[ "n_samples", "noise", "random_state", "hole" ]
false
3
7.12
scikit-learn/scikit-learn
64,340
numpy
false
alterConsumerGroupOffsets
AlterConsumerGroupOffsetsResult alterConsumerGroupOffsets(String groupId, Map<TopicPartition, OffsetAndMetadata> offsets, AlterConsumerGroupOffsetsOptions options);
<p>Alters offsets for the specified group. In order to succeed, the group must be empty. <p>This operation is not transactional so it may succeed for some partitions while fail for others. @param groupId The group for which to alter offsets. @param offsets A map of offsets by partition with associated metadata. Partitions not specified in the map are ignored. @param options The options to use when altering the offsets. @return The AlterOffsetsResult.
java
clients/src/main/java/org/apache/kafka/clients/admin/Admin.java
1,293
[ "groupId", "offsets", "options" ]
AlterConsumerGroupOffsetsResult
true
1
6.48
apache/kafka
31,560
javadoc
false
findAllAnnotationsOnBean
@Override public <A extends Annotation> Set<A> findAllAnnotationsOnBean( String beanName, Class<A> annotationType, boolean allowFactoryBeanInit) throws NoSuchBeanDefinitionException { Class<?> beanType = getType(beanName, allowFactoryBeanInit); return (beanType != null ? AnnotatedElementUtils.findAllMergedAnnotations(beanType, annotationType) : Collections.emptySet()); }
Add a new singleton bean. <p>Will overwrite any existing instance for the given name. @param name the name of the bean @param bean the bean instance
java
spring-beans/src/main/java/org/springframework/beans/factory/support/StaticListableBeanFactory.java
481
[ "beanName", "annotationType", "allowFactoryBeanInit" ]
true
2
6.88
spring-projects/spring-framework
59,386
javadoc
false
getMergedBeanDefinition
protected RootBeanDefinition getMergedBeanDefinition( String beanName, BeanDefinition bd, @Nullable BeanDefinition containingBd) throws BeanDefinitionStoreException { synchronized (this.mergedBeanDefinitions) { RootBeanDefinition mbd = null; RootBeanDefinition previous = null; // Check with full lock now in order to enforce the same merged instance. if (containingBd == null) { mbd = this.mergedBeanDefinitions.get(beanName); } if (mbd == null || mbd.stale) { previous = mbd; if (bd.getParentName() == null) { // Use copy of given root bean definition. if (bd instanceof RootBeanDefinition rootBeanDef) { mbd = rootBeanDef.cloneBeanDefinition(); } else { mbd = new RootBeanDefinition(bd); } } else { // Child bean definition: needs to be merged with parent. BeanDefinition pbd; try { String parentBeanName = transformedBeanName(bd.getParentName()); if (!beanName.equals(parentBeanName)) { pbd = getMergedBeanDefinition(parentBeanName); } else { if (getParentBeanFactory() instanceof ConfigurableBeanFactory parent) { pbd = parent.getMergedBeanDefinition(parentBeanName); } else { throw new NoSuchBeanDefinitionException(parentBeanName, "Parent name '" + parentBeanName + "' is equal to bean name '" + beanName + "': cannot be resolved without a ConfigurableBeanFactory parent"); } } } catch (NoSuchBeanDefinitionException ex) { throw new BeanDefinitionStoreException(bd.getResourceDescription(), beanName, "Could not resolve parent bean definition '" + bd.getParentName() + "'", ex); } // Deep copy with overridden values. mbd = new RootBeanDefinition(pbd); mbd.overrideFrom(bd); } // Set default singleton scope, if not configured before. if (!StringUtils.hasLength(mbd.getScope())) { mbd.setScope(SCOPE_SINGLETON); } // A bean contained in a non-singleton bean cannot be a singleton itself. // Let's correct this on the fly here, since this might be the result of // parent-child merging for the outer bean, in which case the original inner bean // definition will not have inherited the merged outer bean's singleton status. if (containingBd != null && !containingBd.isSingleton() && mbd.isSingleton()) { mbd.setScope(containingBd.getScope()); } // Cache the merged bean definition for the time being // (it might still get re-merged later on in order to pick up metadata changes) if (containingBd == null && (isCacheBeanMetadata() || isBeanEligibleForMetadataCaching(beanName))) { cacheMergedBeanDefinition(mbd, beanName); } } if (previous != null) { copyRelevantMergedBeanDefinitionCaches(previous, mbd); } return mbd; } }
Return a RootBeanDefinition for the given bean, by merging with the parent if the given bean's definition is a child bean definition. @param beanName the name of the bean definition @param bd the original bean definition (Root/ChildBeanDefinition) @param containingBd the containing bean definition in case of inner bean, or {@code null} in case of a top-level bean @return a (potentially merged) RootBeanDefinition for the given bean @throws BeanDefinitionStoreException in case of an invalid bean definition
java
spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanFactory.java
1,396
[ "beanName", "bd", "containingBd" ]
RootBeanDefinition
true
17
6.48
spring-projects/spring-framework
59,386
javadoc
false
logStartupProfileInfo
protected void logStartupProfileInfo(ConfigurableApplicationContext context) { Log log = getApplicationLog(); if (log.isInfoEnabled()) { List<String> activeProfiles = quoteProfiles(context.getEnvironment().getActiveProfiles()); if (ObjectUtils.isEmpty(activeProfiles)) { List<String> defaultProfiles = quoteProfiles(context.getEnvironment().getDefaultProfiles()); String message = String.format("%s default %s: ", defaultProfiles.size(), (defaultProfiles.size() <= 1) ? "profile" : "profiles"); log.info("No active profile set, falling back to " + message + StringUtils.collectionToDelimitedString(defaultProfiles, ", ")); } else { String message = (activeProfiles.size() == 1) ? "1 profile is active: " : activeProfiles.size() + " profiles are active: "; log.info("The following " + message + StringUtils.collectionToDelimitedString(activeProfiles, ", ")); } } }
Called to log active profile information. @param context the application context
java
core/spring-boot/src/main/java/org/springframework/boot/SpringApplication.java
644
[ "context" ]
void
true
5
6.4
spring-projects/spring-boot
79,428
javadoc
false
asByteArray
byte[] asByteArray() { ByteBuffer buffer = ByteBuffer.allocate(MINIMUM_SIZE); buffer.order(ByteOrder.LITTLE_ENDIAN); buffer.putInt(SIGNATURE); buffer.putShort(this.versionNeededToExtract); buffer.putShort(this.generalPurposeBitFlag); buffer.putShort(this.compressionMethod); buffer.putShort(this.lastModFileTime); buffer.putShort(this.lastModFileDate); buffer.putInt(this.crc32); buffer.putInt(this.compressedSize); buffer.putInt(this.uncompressedSize); buffer.putShort(this.fileNameLength); buffer.putShort(this.extraFieldLength); return buffer.array(); }
Return the contents of this record as a byte array suitable for writing to a zip. @return the record as a byte array
java
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/zip/ZipLocalFileHeaderRecord.java
87
[]
true
1
7.04
spring-projects/spring-boot
79,428
javadoc
false
convertToTypedArray
private Object convertToTypedArray(Object input, @Nullable String propertyName, Class<?> componentType) { if (input instanceof Collection<?> coll) { // Convert Collection elements to array elements. Object result = Array.newInstance(componentType, coll.size()); int i = 0; for (Iterator<?> it = coll.iterator(); it.hasNext(); i++) { Object value = convertIfNecessary( buildIndexedPropertyName(propertyName, i), null, it.next(), componentType); Array.set(result, i, value); } return result; } else if (input.getClass().isArray()) { // Convert array elements, if necessary. if (componentType.equals(input.getClass().componentType()) && !this.propertyEditorRegistry.hasCustomEditorForElement(componentType, propertyName)) { return input; } int arrayLength = Array.getLength(input); Object result = Array.newInstance(componentType, arrayLength); for (int i = 0; i < arrayLength; i++) { Object value = convertIfNecessary( buildIndexedPropertyName(propertyName, i), null, Array.get(input, i), componentType); Array.set(result, i, value); } return result; } else { // A plain value: convert it to an array with a single component. Object result = Array.newInstance(componentType, 1); Object value = convertIfNecessary( buildIndexedPropertyName(propertyName, 0), null, input, componentType); Array.set(result, 0, value); return result; } }
Convert the given text value using the given property editor. @param oldValue the previous value, if available (may be {@code null}) @param newTextValue the proposed text value @param editor the PropertyEditor to use @return the converted value
java
spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
438
[ "input", "propertyName", "componentType" ]
Object
true
7
7.76
spring-projects/spring-framework
59,386
javadoc
false
handleResponse
public boolean handleResponse(FetchResponse response, short version) { if (response.error() != Errors.NONE) { log.info("Node {} was unable to process the fetch request with {}: {}.", node, nextMetadata, response.error()); if (response.error() == Errors.FETCH_SESSION_ID_NOT_FOUND) { nextMetadata = FetchMetadata.INITIAL; } else { nextMetadata = nextMetadata.nextCloseExistingAttemptNew(); } return false; } Set<TopicPartition> topicPartitions = response.responseData(sessionTopicNames, version).keySet(); if (nextMetadata.isFull()) { if (topicPartitions.isEmpty() && response.throttleTimeMs() > 0) { // Normally, an empty full fetch response would be invalid. However, KIP-219 // specifies that if the broker wants to throttle the client, it will respond // to a full fetch request with an empty response and a throttleTimeMs // value set. We don't want to log this with a warning, since it's not an error. // However, the empty full fetch response can't be processed, so it's still appropriate // to return false here. if (log.isDebugEnabled()) { log.debug("Node {} sent a empty full fetch response to indicate that this " + "client should be throttled for {} ms.", node, response.throttleTimeMs()); } nextMetadata = FetchMetadata.INITIAL; return false; } String problem = verifyFullFetchResponsePartitions(topicPartitions, response.topicIds(), version); if (problem != null) { log.info("Node {} sent an invalid full fetch response with {}", node, problem); nextMetadata = FetchMetadata.INITIAL; return false; } else if (response.sessionId() == INVALID_SESSION_ID) { if (log.isDebugEnabled()) log.debug("Node {} sent a full fetch response{}", node, responseDataToLogString(topicPartitions)); nextMetadata = FetchMetadata.INITIAL; return true; } else { // The server created a new incremental fetch session. if (log.isDebugEnabled()) log.debug("Node {} sent a full fetch response that created a new incremental " + "fetch session {}{}", node, response.sessionId(), responseDataToLogString(topicPartitions)); nextMetadata = FetchMetadata.newIncremental(response.sessionId()); return true; } } else { String problem = verifyIncrementalFetchResponsePartitions(topicPartitions, response.topicIds(), version); if (problem != null) { log.info("Node {} sent an invalid incremental fetch response with {}", node, problem); nextMetadata = nextMetadata.nextCloseExistingAttemptNew(); return false; } else if (response.sessionId() == INVALID_SESSION_ID) { // The incremental fetch session was closed by the server. if (log.isDebugEnabled()) log.debug("Node {} sent an incremental fetch response closing session {}{}", node, nextMetadata.sessionId(), responseDataToLogString(topicPartitions)); nextMetadata = FetchMetadata.INITIAL; return true; } else { // The incremental fetch session was continued by the server. // We don't have to do anything special here to support KIP-219, since an empty incremental // fetch request is perfectly valid. if (log.isDebugEnabled()) log.debug("Node {} sent an incremental fetch response with throttleTimeMs = {} " + "for session {}{}", node, response.throttleTimeMs(), response.sessionId(), responseDataToLogString(topicPartitions)); nextMetadata = nextMetadata.nextIncremental(); return true; } } }
Handle the fetch response. @param response The response. @param version The version of the request. @return True if the response is well-formed; false if it can't be processed because of missing or unexpected partitions.
java
clients/src/main/java/org/apache/kafka/clients/FetchSessionHandler.java
527
[ "response", "version" ]
true
15
8.16
apache/kafka
31,560
javadoc
false
finalize_async_tasks
def finalize_async_tasks( outputs: list[Output], results: list[ApplyResult], skip_cleanup: bool, success_message: str ) -> bool: """ Finalize async tasks by checking results and cleaning up temporary files. :param outputs: List of Output objects containing file names and titles. :param results: List of ApplyResult objects containing the results of the tasks. :param skip_cleanup: Whether to skip cleanup of temporary files. :param success_message: Message to print if all tasks were successful. :return: True if there were errors, False otherwise. """ errors = False for result in results: if result.get()[0] != 0: errors = True if errors: get_console().print("\n[error]There were errors when running some tasks. Quitting.[/]\n") else: get_console().print(f"\n[success]{success_message}[/]\n") if not skip_cleanup: for output in outputs: Path(output.file_name).unlink(missing_ok=True) from airflow_breeze.utils.docker_command_utils import fix_ownership_using_docker fix_ownership_using_docker() return errors
Finalize async tasks by checking results and cleaning up temporary files. :param outputs: List of Output objects containing file names and titles. :param results: List of ApplyResult objects containing the results of the tasks. :param skip_cleanup: Whether to skip cleanup of temporary files. :param success_message: Message to print if all tasks were successful. :return: True if there were errors, False otherwise.
python
dev/breeze/src/airflow_breeze/utils/parallel.py
458
[ "outputs", "results", "skip_cleanup", "success_message" ]
bool
true
7
8.08
apache/airflow
43,597
sphinx
false
errorsByTopicId
public Map<Uuid, Errors> errorsByTopicId() { Map<Uuid, Errors> errors = new HashMap<>(); for (MetadataResponseTopic metadata : data.topics()) { if (metadata.topicId().equals(Uuid.ZERO_UUID)) { throw new IllegalStateException("Use errors() when managing topic using topic name"); } if (metadata.errorCode() != Errors.NONE.code()) errors.put(metadata.topicId(), Errors.forCode(metadata.errorCode())); } return errors; }
Get a map of the topicIds which had metadata errors @return the map
java
clients/src/main/java/org/apache/kafka/common/requests/MetadataResponse.java
115
[]
true
3
7.44
apache/kafka
31,560
javadoc
false
requestInFlight
public boolean requestInFlight() { return requestInFlight; }
@return True if no response has been received after the last send, indicating that there is a request in-flight.
java
clients/src/main/java/org/apache/kafka/clients/consumer/internals/RequestState.java
98
[]
true
1
6.96
apache/kafka
31,560
javadoc
false
column
Map<R, V> column(@ParametricNullness C columnKey);
Returns a view of all mappings that have the given column key. For each row key / column key / value mapping in the table with that column key, the returned map associates the row key with the value. If no mappings in the table have the provided column key, an empty map is returned. <p>Changes to the returned map will update the underlying table, and vice versa. @param columnKey key of column to search for in the table @return the corresponding map from row keys to values
java
android/guava/src/com/google/common/collect/Table.java
199
[ "columnKey" ]
true
1
6.64
google/guava
51,352
javadoc
false
whenFalse
public Source<T> whenFalse() { return when(Boolean.FALSE::equals); }
Return a filtered version of the source that will only map values that are {@code false}. @return a new filtered source instance
java
core/spring-boot/src/main/java/org/springframework/boot/context/properties/PropertyMapper.java
225
[]
true
1
6.8
spring-projects/spring-boot
79,428
javadoc
false
checkForStaticContext
function checkForStaticContext(nodeToCheck: Node, containingClass: Node) { let current: Node = nodeToCheck; while (current !== containingClass) { if (current.kind === SyntaxKind.PropertyDeclaration) { if (isStatic(current)) { rangeFacts |= RangeFacts.InStaticRegion; } break; } else if (current.kind === SyntaxKind.Parameter) { const ctorOrMethod = getContainingFunction(current)!; if (ctorOrMethod.kind === SyntaxKind.Constructor) { rangeFacts |= RangeFacts.InStaticRegion; } break; } else if (current.kind === SyntaxKind.MethodDeclaration) { if (isStatic(current)) { rangeFacts |= RangeFacts.InStaticRegion; } } current = current.parent; } }
Attempt to refine the extraction node (generally, by shrinking it) to produce better results. @param node The unrefined extraction node.
typescript
src/services/refactors/extractSymbol.ts
584
[ "nodeToCheck", "containingClass" ]
false
10
6.08
microsoft/TypeScript
107,154
jsdoc
false
resolveApplication
@Nullable File resolveApplication() throws IOException;
Resolves the file for the application. @return the file for the application or {@code null} if the application should be skipped @throws IOException if something went wrong
java
loader/spring-boot-jarmode-tools/src/main/java/org/springframework/boot/jarmode/tools/ExtractCommand.java
398
[]
File
true
1
6.8
spring-projects/spring-boot
79,428
javadoc
false
hasUnprocessedImports
boolean hasUnprocessedImports(ImportPhase importPhase) { if (getImports().isEmpty()) { return false; } return !this.children.containsKey(importPhase); }
Return true if this contributor has imports that have not yet been processed in the given phase. @param importPhase the import phase @return if there are unprocessed imports
java
core/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataEnvironmentContributor.java
204
[ "importPhase" ]
true
2
7.92
spring-projects/spring-boot
79,428
javadoc
false
forTypes
public static BindableRuntimeHintsRegistrar forTypes(Iterable<Class<?>> types) { Assert.notNull(types, "'types' must not be null"); return forTypes(StreamSupport.stream(types.spliterator(), false).toArray(Class<?>[]::new)); }
Create a new {@link BindableRuntimeHintsRegistrar} for the specified types. @param types the types to process @return a new {@link BindableRuntimeHintsRegistrar} instance
java
core/spring-boot/src/main/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrar.java
111
[ "types" ]
BindableRuntimeHintsRegistrar
true
1
6.16
spring-projects/spring-boot
79,428
javadoc
false
quantile
public abstract double quantile(double q);
Returns an estimate of a cutoff such that a specified fraction of the data added to this TDigest would be less than or equal to the cutoff. @param q The desired fraction @return The smallest value x such that cdf(x) &ge; q
java
libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java
153
[ "q" ]
true
1
6.8
elastic/elasticsearch
75,680
javadoc
false
estimateRank
private static long estimateRank(BucketIterator buckets, double value, boolean inclusive, double maxValue) { long rank = 0; while (buckets.hasNext()) { double bucketMidpoint = ExponentialScaleUtils.getPointOfLeastRelativeError(buckets.peekIndex(), buckets.scale()); bucketMidpoint = Math.min(bucketMidpoint, maxValue); if (bucketMidpoint < value || (inclusive && bucketMidpoint == value)) { rank += buckets.peekCount(); buckets.advance(); } else { break; } } return rank; }
Estimates the rank of a given value in the distribution represented by the histogram. In other words, returns the number of values which are less than (or less-or-equal, if {@code inclusive} is true) the provided value. @param histo the histogram to query @param value the value to estimate the rank for @param inclusive if true, counts values equal to the given value as well @return the number of elements less than (or less-or-equal, if {@code inclusive} is true) the given value
java
libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/ExponentialHistogramQuantile.java
108
[ "buckets", "value", "inclusive", "maxValue" ]
true
5
8.08
elastic/elasticsearch
75,680
javadoc
false
triggerDebouncedNotebookDocumentChangeEvent
function triggerDebouncedNotebookDocumentChangeEvent() { if (timer) { clearTimeout(timer); } if (!mergedEvents) { return; } const args = mergedEvents; mergedEvents = undefined; onDidChangeNotebookCells(args); }
Code here is used to ensure the Notebook Model is in sync the ipynb JSON file. E.g. assume you add a new cell, this new cell will not have any metadata at all. However when we save the ipynb, the metadata will be an empty object `{}`. Now thats completely different from the metadata os being `empty/undefined` in the model. As a result, when looking at things like diff view or accessing metadata, we'll see differences. This code ensures that the model is in sync with the ipynb file.
typescript
extensions/ipynb/src/notebookModelStoreSync.ts
35
[]
false
3
6.24
microsoft/vscode
179,840
jsdoc
false
doConnect
protected boolean doConnect(SocketChannel channel, InetSocketAddress address) throws IOException { try { return channel.connect(address); } catch (UnresolvedAddressException e) { throw new IOException("Can't resolve address: " + address, e); } }
Begin connecting to the given address and add the connection to this nioSelector associated with the given id number. <p> Note that this call only initiates the connection, which will be completed on a future {@link #poll(long)} call. Check {@link #connected()} to see which (if any) connections have completed after a given poll call. @param id The id for the new connection @param address The address to connect to @param sendBufferSize The send buffer for the new connection @param receiveBufferSize The receive buffer for the new connection @throws IllegalStateException if there is already a connection for that id @throws IOException if DNS resolution fails on the hostname or if the broker is down
java
clients/src/main/java/org/apache/kafka/common/network/Selector.java
276
[ "channel", "address" ]
true
2
6.72
apache/kafka
31,560
javadoc
false
is_extension_array_dtype
def is_extension_array_dtype(arr_or_dtype) -> bool: """ Check if an object is a pandas extension array type. See the :ref:`Use Guide <extending.extension-types>` for more. Parameters ---------- arr_or_dtype : object For array-like input, the ``.dtype`` attribute will be extracted. Returns ------- bool Whether the `arr_or_dtype` is an extension array type. See Also -------- api.extensions.ExtensionArray : Abstract base class for pandas extension arrays. Notes ----- This checks whether an object implements the pandas extension array interface. In pandas, this includes: * Categorical * Sparse * Interval * Period * DatetimeArray * TimedeltaArray Third-party libraries may implement arrays or types satisfying this interface as well. Examples -------- >>> from pandas.api.types import is_extension_array_dtype >>> arr = pd.Categorical(["a", "b"]) >>> is_extension_array_dtype(arr) True >>> is_extension_array_dtype(arr.dtype) True >>> arr = np.array(["a", "b"]) >>> is_extension_array_dtype(arr.dtype) False """ dtype = getattr(arr_or_dtype, "dtype", arr_or_dtype) if isinstance(dtype, ExtensionDtype): return True elif isinstance(dtype, np.dtype): return False else: try: with warnings.catch_warnings(): # pandas_dtype(..) can raise UserWarning for class input warnings.simplefilter("ignore", UserWarning) dtype = pandas_dtype(dtype) except (TypeError, ValueError): # np.dtype(..) can raise ValueError return False return isinstance(dtype, ExtensionDtype)
Check if an object is a pandas extension array type. See the :ref:`Use Guide <extending.extension-types>` for more. Parameters ---------- arr_or_dtype : object For array-like input, the ``.dtype`` attribute will be extracted. Returns ------- bool Whether the `arr_or_dtype` is an extension array type. See Also -------- api.extensions.ExtensionArray : Abstract base class for pandas extension arrays. Notes ----- This checks whether an object implements the pandas extension array interface. In pandas, this includes: * Categorical * Sparse * Interval * Period * DatetimeArray * TimedeltaArray Third-party libraries may implement arrays or types satisfying this interface as well. Examples -------- >>> from pandas.api.types import is_extension_array_dtype >>> arr = pd.Categorical(["a", "b"]) >>> is_extension_array_dtype(arr) True >>> is_extension_array_dtype(arr.dtype) True >>> arr = np.array(["a", "b"]) >>> is_extension_array_dtype(arr.dtype) False
python
pandas/core/dtypes/common.py
1,481
[ "arr_or_dtype" ]
bool
true
4
7.92
pandas-dev/pandas
47,362
numpy
false
chomp
@Deprecated public static String chomp(final String str, final String separator) { return Strings.CS.removeEnd(str, separator); }
Removes {@code separator} from the end of {@code str} if it's there, otherwise leave it alone. <p> NOTE: This method changed in version 2.0. It now more closely matches Perl chomp. For the previous behavior, use {@link #substringBeforeLast(String, String)}. This method uses {@link String#endsWith(String)}. </p> <pre> StringUtils.chomp(null, *) = null StringUtils.chomp("", *) = "" StringUtils.chomp("foobar", "bar") = "foo" StringUtils.chomp("foobar", "baz") = "foobar" StringUtils.chomp("foo", "foo") = "" StringUtils.chomp("foo ", "foo") = "foo " StringUtils.chomp(" foo", "foo") = " " StringUtils.chomp("foo", "foooo") = "foo" StringUtils.chomp("foo", "") = "foo" StringUtils.chomp("foo", null) = "foo" </pre> @param str the String to chomp from, may be null. @param separator separator String, may be null. @return String without trailing separator, {@code null} if null String input. @deprecated This feature will be removed in Lang 4, use {@link StringUtils#removeEnd(String, String)} instead.
java
src/main/java/org/apache/commons/lang3/StringUtils.java
723
[ "str", "separator" ]
String
true
1
6.48
apache/commons-lang
2,896
javadoc
false
sortedIndex
function sortedIndex(array, value) { return baseSortedIndex(array, value); }
Uses a binary search to determine the lowest index at which `value` should be inserted into `array` in order to maintain its sort order. @static @memberOf _ @since 0.1.0 @category Array @param {Array} array The sorted array to inspect. @param {*} value The value to evaluate. @returns {number} Returns the index at which `value` should be inserted into `array`. @example _.sortedIndex([30, 50], 40); // => 1
javascript
lodash.js
8,047
[ "array", "value" ]
false
1
6.24
lodash/lodash
61,490
jsdoc
false
findLibSSLInLocations
async function findLibSSLInLocations(directories: string[]) { for (const dir of directories) { const libssl = await findLibSSL(dir) if (libssl) { return libssl } } return undefined }
Looks for libssl in specified directories, returns the first one found @param directories @returns
typescript
packages/get-platform/src/getPlatform.ts
410
[ "directories" ]
false
2
6.48
prisma/prisma
44,834
jsdoc
true
stream
Stream<ConfigDataEnvironmentContributor> stream() { return StreamSupport.stream(spliterator(), false); }
Returns a {@link Stream} that traverses this contributor and all its children in priority order. @return the stream
java
core/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataEnvironmentContributor.java
225
[]
true
1
6.8
spring-projects/spring-boot
79,428
javadoc
false
hermpow
def hermpow(c, pow, maxpower=16): """Raise a Hermite series to a power. Returns the Hermite series `c` raised to the power `pow`. The argument `c` is a sequence of coefficients ordered from low to high. i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.`` Parameters ---------- c : array_like 1-D array of Hermite series coefficients ordered from low to high. pow : integer Power to which the series will be raised maxpower : integer, optional Maximum power allowed. This is mainly to limit growth of the series to unmanageable size. Default is 16 Returns ------- coef : ndarray Hermite series of power. See Also -------- hermadd, hermsub, hermmulx, hermmul, hermdiv Examples -------- >>> from numpy.polynomial.hermite import hermpow >>> hermpow([1, 2, 3], 2) array([81., 52., 82., 12., 9.]) """ return pu._pow(hermmul, c, pow, maxpower)
Raise a Hermite series to a power. Returns the Hermite series `c` raised to the power `pow`. The argument `c` is a sequence of coefficients ordered from low to high. i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.`` Parameters ---------- c : array_like 1-D array of Hermite series coefficients ordered from low to high. pow : integer Power to which the series will be raised maxpower : integer, optional Maximum power allowed. This is mainly to limit growth of the series to unmanageable size. Default is 16 Returns ------- coef : ndarray Hermite series of power. See Also -------- hermadd, hermsub, hermmulx, hermmul, hermdiv Examples -------- >>> from numpy.polynomial.hermite import hermpow >>> hermpow([1, 2, 3], 2) array([81., 52., 82., 12., 9.])
python
numpy/polynomial/hermite.py
559
[ "c", "pow", "maxpower" ]
false
1
6.32
numpy/numpy
31,054
numpy
false
andThen
default FailableBiConsumer<T, U, E> andThen(final FailableBiConsumer<? super T, ? super U, E> after) { Objects.requireNonNull(after); return (t, u) -> { accept(t, u); after.accept(t, u); }; }
Returns a composed {@link FailableBiConsumer} like {@link BiConsumer#andThen(BiConsumer)}. @param after the operation to perform after this one. @return a composed {@link FailableBiConsumer} like {@link BiConsumer#andThen(BiConsumer)}. @throws NullPointerException when {@code after} is null.
java
src/main/java/org/apache/commons/lang3/function/FailableBiConsumer.java
67
[ "after" ]
true
1
6.24
apache/commons-lang
2,896
javadoc
false
containsAllWords
public static boolean containsAllWords(final CharSequence word, final CharSequence... words) { if (StringUtils.isEmpty(word) || ArrayUtils.isEmpty(words)) { return false; } for (final CharSequence w : words) { if (StringUtils.isBlank(w)) { return false; } final Pattern p = Pattern.compile(".*\\b" + Pattern.quote(w.toString()) + "\\b.*"); if (!p.matcher(word).matches()) { return false; } } return true; }
Checks if the String contains all words in the given array. <p> A {@code null} String will return {@code false}. A {@code null}, zero length search array or if one element of array is null will return {@code false}. </p> <pre> WordUtils.containsAllWords(null, *) = false WordUtils.containsAllWords("", *) = false WordUtils.containsAllWords(*, null) = false WordUtils.containsAllWords(*, []) = false WordUtils.containsAllWords("abcd", "ab", "cd") = false WordUtils.containsAllWords("abc def", "def", "abc") = true </pre> @param word The CharSequence to check, may be null. @param words The array of String words to search for, may be null. @return {@code true} if all search words are found, {@code false} otherwise. @since 3.5
java
src/main/java/org/apache/commons/lang3/text/WordUtils.java
193
[ "word" ]
true
5
8.08
apache/commons-lang
2,896
javadoc
false
equals
@Override public boolean equals(@Nullable Object other) { return (this == other || (other instanceof MethodOverride that && this.methodName.equals(that.methodName) && ObjectUtils.nullSafeEquals(this.source, that.source))); }
Subclasses must override this to indicate whether they <em>match</em> the given method. This allows for argument list checking as well as method name checking. @param method the method to check @return whether this override matches the given method
java
spring-beans/src/main/java/org/springframework/beans/factory/support/MethodOverride.java
107
[ "other" ]
true
4
8.08
spring-projects/spring-framework
59,386
javadoc
false
bind_len
def bind_len(self, size: int) -> None: """ Bind this DimList to a specific length. Args: size: Number of dimensions to bind to Raises: DimensionBindError: If already bound to a different size """ if self._bound: if len(self._dims) != size: raise DimensionBindError( f"Dimlist has size {len(self._dims)} but it is being bound to size {size}" ) else: self._bound = True self._dims = [] for i in range(size): dim_name = f"{self._name}{i}" if self._name else f"dim{i}" self._dims.append(Dim(dim_name))
Bind this DimList to a specific length. Args: size: Number of dimensions to bind to Raises: DimensionBindError: If already bound to a different size
python
functorch/dim/__init__.py
189
[ "self", "size" ]
None
true
6
6.56
pytorch/pytorch
96,034
google
false
getResourceAsStream
@Override public @Nullable InputStream getResourceAsStream(String name) { Assert.state(this.resourceLoader != null, "ResourceLoaderClassLoadHelper not initialized"); Resource resource = this.resourceLoader.getResource(name); if (resource.exists()) { try { return resource.getInputStream(); } catch (IOException ex) { if (logger.isWarnEnabled()) { logger.warn("Could not load " + resource); } return null; } } else { return getClassLoader().getResourceAsStream(name); } }
Create a new ResourceLoaderClassLoadHelper for the given ResourceLoader. @param resourceLoader the ResourceLoader to delegate to
java
spring-context-support/src/main/java/org/springframework/scheduling/quartz/ResourceLoaderClassLoadHelper.java
109
[ "name" ]
InputStream
true
4
6.08
spring-projects/spring-framework
59,386
javadoc
false
maximumTimeToWait
public long maximumTimeToWait() { return cachedMaximumTimeToWait; }
Returns the delay for which the application thread can safely wait before it should be responsive to results from the request managers. For example, the subscription state can change when heartbeats are sent, so blocking for longer than the heartbeat interval might mean the application thread is not responsive to changes. Because this method is called by the application thread, it's not allowed to access the request managers that actually provide the information. As a result, the consumer network thread periodically caches the information from the request managers and this can then be read safely using this method. @return The maximum delay in milliseconds
java
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ConsumerNetworkThread.java
340
[]
true
1
6.96
apache/kafka
31,560
javadoc
false
record
def record( self, custom_params_encoder: Callable[_P, object] | None = None, custom_result_encoder: Callable[_P, Callable[[_R], _EncodedR]] | None = None, ) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: """Record a function call result with custom encoding to both caches. This is a decorator that wraps a function to enable memoization with custom encoding/decoding logic. Results are stored in both the in-memory cache and the on-disk cache. Args: custom_params_encoder: Optional encoder for function parameters. If None, parameters are pickled directly. custom_result_encoder: Optional encoder factory for function results. Takes function parameters and returns an encoder function that converts R -> _EncodedR. Returns: A decorator function that can be applied to functions. Example: @persistent_memoizer.record( custom_params_encoder=my_param_encoder, custom_result_encoder=my_result_encoder_factory, ) def expensive_function(x, y): return x + y """ def wrapper(fn: Callable[_P, _R]) -> Callable[_P, _R]: """Wrap the function to enable memoization. Args: fn: The function to wrap. Returns: A wrapped version of the function. """ # If caching is disabled, return the original function unchanged if not config.IS_CACHING_MODULE_ENABLED(): return fn # Get the memory-cached version from the memoizer memory_record_fn = self._memoizer.record( custom_params_encoder, custom_result_encoder )(fn) def inner(*args: _P.args, **kwargs: _P.kwargs) -> _R: """Call the original function and cache the result in both caches. Args: *args: Positional arguments to pass to the function. **kwargs: Keyword arguments to pass to the function. Returns: The result of calling the original function. """ # Call the memory-cached version (which calls fn and caches in memory) result = memory_record_fn(*args, **kwargs) # Also store in disk cache cache_key = self._make_key(custom_params_encoder, *args, **kwargs) # Get the cache entry from memory cache # We know it must be there since memory_record_fn just cached it cached_hit = self._memoizer._cache.get(cache_key) assert cached_hit, "Cache entry must exist in memory cache" cache_entry = cast(CacheEntry, cached_hit.value) # Store the full CacheEntry in disk cache for easier debugging pickled_entry: bytes = pickle.dumps(cache_entry) self._disk_cache.insert(cache_key, pickled_entry) return result return inner return wrapper
Record a function call result with custom encoding to both caches. This is a decorator that wraps a function to enable memoization with custom encoding/decoding logic. Results are stored in both the in-memory cache and the on-disk cache. Args: custom_params_encoder: Optional encoder for function parameters. If None, parameters are pickled directly. custom_result_encoder: Optional encoder factory for function results. Takes function parameters and returns an encoder function that converts R -> _EncodedR. Returns: A decorator function that can be applied to functions. Example: @persistent_memoizer.record( custom_params_encoder=my_param_encoder, custom_result_encoder=my_result_encoder_factory, ) def expensive_function(x, y): return x + y
python
torch/_inductor/runtime/caching/interfaces.py
632
[ "self", "custom_params_encoder", "custom_result_encoder" ]
Callable[[Callable[_P, _R]], Callable[_P, _R]]
true
2
9.12
pytorch/pytorch
96,034
google
false
get
public Object get(String name) { BoundField field = schema.get(name); if (field == null) throw new SchemaException("No such field: " + name); return getFieldOrDefault(field); }
Get the record value for the field with the given name by doing a hash table lookup (slower!) @param name The name of the field @return The value in the record @throws SchemaException If no such field exists
java
clients/src/main/java/org/apache/kafka/common/protocol/types/Struct.java
84
[ "name" ]
Object
true
2
7.6
apache/kafka
31,560
javadoc
false
generateInstanceSupplierForFactoryMethod
private CodeBlock generateInstanceSupplierForFactoryMethod(Method factoryMethod, Class<?> suppliedType, Class<?> targetClass, String factoryMethodName) { if (factoryMethod.getParameterCount() == 0) { return CodeBlock.of("return $T.<$T>forFactoryMethod($T.class, $S)", BeanInstanceSupplier.class, suppliedType, targetClass, factoryMethodName); } CodeBlock parameterTypes = generateParameterTypesCode(factoryMethod.getParameterTypes()); return CodeBlock.of("return $T.<$T>forFactoryMethod($T.class, $S, $L)", BeanInstanceSupplier.class, suppliedType, targetClass, factoryMethodName, parameterTypes); }
Generate the instance supplier code. @param registeredBean the bean to handle @param instantiationDescriptor the executable to use to create the bean @return the generated code @since 6.1.7
java
spring-beans/src/main/java/org/springframework/beans/factory/aot/InstanceSupplierCodeGenerator.java
341
[ "factoryMethod", "suppliedType", "targetClass", "factoryMethodName" ]
CodeBlock
true
2
7.44
spring-projects/spring-framework
59,386
javadoc
false
smart_split
def smart_split(text): r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.'] """ for bit in smart_split_re.finditer(str(text)): yield bit[0]
r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.']
python
django/utils/text.py
428
[ "text" ]
false
2
6.32
django/django
86,204
unknown
false
asBiFunction
public static <O1, O2, O> BiFunction<O1, O2, O> asBiFunction(final FailableBiFunction<O1, O2, O, ?> function) { return (input1, input2) -> apply(function, input1, input2); }
Converts the given {@link FailableBiFunction} into a standard {@link BiFunction}. @param <O1> the type of the first argument of the input of the functions @param <O2> the type of the second argument of the input of the functions @param <O> the type of the output of the functions @param function a {@link FailableBiFunction} @return a standard {@link BiFunction} @since 3.10
java
src/main/java/org/apache/commons/lang3/Functions.java
366
[ "function" ]
true
1
6.24
apache/commons-lang
2,896
javadoc
false
getMaximumWeight
long getMaximumWeight() { if (expireAfterWriteNanos == 0 || expireAfterAccessNanos == 0) { return 0; } return (weigher == null) ? maximumSize : maximumWeight; }
Specifies the weigher to use in determining the weight of entries. Entry weight is taken into consideration by {@link #maximumWeight(long)} when determining which entries to evict, and use of this method requires a corresponding call to {@link #maximumWeight(long)} prior to calling {@link #build}. Weights are measured and recorded when entries are inserted into the cache, and are thus effectively static during the lifetime of a cache entry. <p>When the weight of an entry is zero it will not be considered for size-based eviction (though it still may be evicted by other means). <p><b>Important note:</b> Instead of returning <em>this</em> as a {@code CacheBuilder} instance, this method returns {@code CacheBuilder<K1, V1>}. From this point on, either the original reference or the returned reference may be used to complete configuration and build the cache, but only the "generic" one is type-safe. That is, it will properly prevent you from building caches whose key or value types are incompatible with the types accepted by the weigher already provided; the {@code CacheBuilder} type cannot do this. For best results, simply use the standard method-chaining idiom, as illustrated in the documentation at top, configuring a {@code CacheBuilder} and building your {@link Cache} all in a single statement. <p><b>Warning:</b> if you ignore the above advice, and use this {@code CacheBuilder} to build a cache whose key or value type is incompatible with the weigher, you will likely experience a {@link ClassCastException} at some <i>undefined</i> point in the future. @param weigher the weigher to use in calculating the weight of cache entries @return this {@code CacheBuilder} instance (for chaining) @throws IllegalStateException if a weigher was already set or {@link #maximumSize(long)} was previously called @since 11.0
java
android/guava/src/com/google/common/cache/CacheBuilder.java
597
[]
true
4
7.92
google/guava
51,352
javadoc
false
setAsText
@Override public void setAsText(String text) throws IllegalArgumentException { if (StringUtils.hasText(text)) { String[] classNames = StringUtils.commaDelimitedListToStringArray(text); Class<?>[] classes = new Class<?>[classNames.length]; for (int i = 0; i < classNames.length; i++) { String className = classNames[i].trim(); classes[i] = ClassUtils.resolveClassName(className, this.classLoader); } setValue(classes); } else { setValue(null); } }
Create a default {@code ClassArrayEditor}, using the given {@code ClassLoader}. @param classLoader the {@code ClassLoader} to use (or pass {@code null} for the thread context {@code ClassLoader})
java
spring-beans/src/main/java/org/springframework/beans/propertyeditors/ClassArrayEditor.java
64
[ "text" ]
void
true
3
6.24
spring-projects/spring-framework
59,386
javadoc
false
initiateConnect
private void initiateConnect(Node node, long now) { String nodeConnectionId = node.idString(); try { connectionStates.connecting(nodeConnectionId, now, node.host()); InetAddress address = connectionStates.currentAddress(nodeConnectionId); log.debug("Initiating connection to node {} using address {}", node, address); selector.connect(nodeConnectionId, new InetSocketAddress(address, node.port()), this.socketSendBuffer, this.socketReceiveBuffer); } catch (IOException e) { log.warn("Error connecting to node {}", node, e); // Attempt failed, we'll try again after the backoff connectionStates.disconnected(nodeConnectionId, now); // Notify metadata updater of the connection failure metadataUpdater.handleServerDisconnect(now, nodeConnectionId, Optional.empty()); } }
Initiate a connection to the given node @param node the node to connect to @param now current time in epoch milliseconds
java
clients/src/main/java/org/apache/kafka/clients/NetworkClient.java
1,136
[ "node", "now" ]
void
true
2
6.56
apache/kafka
31,560
javadoc
false
ensureActiveGroup
public void ensureActiveGroup() { while (!ensureActiveGroup(time.timer(Long.MAX_VALUE))) { log.warn("still waiting to ensure active group"); } }
Ensure that the group is active (i.e. joined and synced)
java
clients/src/main/java/org/apache/kafka/clients/consumer/internals/AbstractCoordinator.java
400
[]
void
true
2
6.4
apache/kafka
31,560
javadoc
false
dtype
def dtype(self) -> CategoricalDtype: """ The :class:`~pandas.api.types.CategoricalDtype` for this instance. See Also -------- astype : Cast argument to a specified dtype. CategoricalDtype : Type for categorical data. Examples -------- >>> cat = pd.Categorical(["a", "b"], ordered=True) >>> cat ['a', 'b'] Categories (2, str): ['a' < 'b'] >>> cat.dtype CategoricalDtype(categories=['a', 'b'], ordered=True, categories_dtype=str) """ return self._dtype
The :class:`~pandas.api.types.CategoricalDtype` for this instance. See Also -------- astype : Cast argument to a specified dtype. CategoricalDtype : Type for categorical data. Examples -------- >>> cat = pd.Categorical(["a", "b"], ordered=True) >>> cat ['a', 'b'] Categories (2, str): ['a' < 'b'] >>> cat.dtype CategoricalDtype(categories=['a', 'b'], ordered=True, categories_dtype=str)
python
pandas/core/arrays/categorical.py
517
[ "self" ]
CategoricalDtype
true
1
6.48
pandas-dev/pandas
47,362
unknown
false