name stringlengths 12 178 | code_snippet stringlengths 8 36.5k | score float64 3.26 3.68 |
|---|---|---|
flink_AbstractPagedOutputView_getCurrentPositionInSegment | /**
* Gets the current write position (the position where the next bytes will be written) in the
* current memory segment.
*
* @return The current write offset in the current memory segment.
*/
public int getCurrentPositionInSegment() {
return this.positionInSegment;
} | 3.68 |
hbase_Compactor_getFileDetails | /**
* Extracts some details about the files to compact that are commonly needed by compactors.
* @param filesToCompact Files.
* @param allFiles Whether all files are included for compaction
* @parma major If major compaction
* @return The result.
*/
private FileDetails getFileDetails(Collection<HStoreFile> ... | 3.68 |
framework_DataProvider_getId | /**
* Gets an identifier for the given item. This identifier is used by the
* framework to determine equality between two items.
* <p>
* Default is to use item itself as its own identifier. If the item has
* {@link Object#equals(Object)} and {@link Object#hashCode()} implemented
* in a way that it can be compared... | 3.68 |
framework_CSSInjectWithColorpicker_createFontSizeSelect | /**
* Creates a font size selection control
*/
private Component createFontSizeSelect() {
final ComboBox<Integer> select = new ComboBox<>(null,
Arrays.asList(8, 9, 10, 12, 14, 16, 20, 25, 30, 40, 50));
select.setWidth("100px");
select.setValue(12);
select.setPlaceholder("Font size");
... | 3.68 |
pulsar_ProducerImpl_failPendingBatchMessages | /**
* fail any pending batch messages that were enqueued, however batch was not closed out.
*
*/
private void failPendingBatchMessages(PulsarClientException ex) {
if (batchMessageContainer.isEmpty()) {
return;
}
final int numMessagesInBatch = batchMessageContainer.getNumMessagesInBatch();
fin... | 3.68 |
hadoop_IOStatisticsStoreImpl_trackDuration | /**
* If the store is tracking the given key, return the
* duration tracker for it. If not tracked, return the
* stub tracker.
* @param key statistic key prefix
* @param count #of times to increment the matching counter in this
* operation.
* @return a tracker.
*/
@Override
public DurationTracker trackDuration... | 3.68 |
hadoop_PathLocation_getSourcePath | /**
* Get the source path in the global namespace for this path location.
*
* @return The path in the global namespace.
*/
public String getSourcePath() {
return this.sourcePath;
} | 3.68 |
pulsar_LinuxInfoUtils_isUsable | /**
* Determine whether nic is usable.
* @param nicPath Nic path
* @return whether nic is usable.
*/
private static boolean isUsable(Path nicPath) {
try {
String operstate = readTrimStringFromFile(nicPath.resolve("operstate"));
Operstate operState = Operstate.valueOf(operstate.toUpperCase(Locale... | 3.68 |
graphhopper_ViaRouting_buildEdgeRestrictions | /**
* Determines restrictions for the start/target edges to account for the heading, pass_through and curbside parameters
* for a single via-route leg.
*
* @param fromHeading the heading at the start node of this leg, or NaN if no restriction should be applied
* @param toHeading the heading at the target node ... | 3.68 |
hadoop_HsController_singleJobCounter | /*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleJobCounter()
*/
@Override
public void singleJobCounter() throws IOException{
super.singleJobCounter();
} | 3.68 |
hadoop_ResourceEstimatorService_getEstimatedResourceAllocation | /**
* Get estimated {code Resource} allocation for the pipeline.
*
* @param pipelineId id of the pipeline.
* @return Json format of {@link RLESparseResourceAllocation}.
* @throws SkylineStoreException if fails to get estimated {code Resource}
* allocation from {@link SkylineStore}.
... | 3.68 |
framework_VAbstractTextualDate_updateBufferedResolutions | /**
* Updates {@link VDateField#bufferedResolutions bufferedResolutions} before
* sending a response to the server.
* <p>
* The method can be overridden by subclasses to provide a custom logic for
* date variables to avoid overriding the {@link #onChange(ChangeEvent)}
* method.
*
* <p>
* Note that this method ... | 3.68 |
MagicPlugin_Targeting_getCurBlock | /**
* Returns the current block along the line of vision
*
* @return The block
*/
public Block getCurBlock()
{
return currentBlock;
} | 3.68 |
flink_InputSelection_fairSelectNextIndex | /**
* Fairly select one of the available inputs for reading.
*
* @param inputMask The mask of inputs that are selected. Note -1 for this is interpreted as all
* of the 32 inputs are available.
* @param availableInputsMask The mask of all available inputs. Note -1 for this is interpreted
* as all of the 32... | 3.68 |
hbase_WALKey_toStringMap | /**
* Produces a string map for this key. Useful for programmatic use and manipulation of the data
* stored in an WALKeyImpl, for example, printing as JSON.
* @return a Map containing data from this key
*/
default Map<String, Object> toStringMap() {
Map<String, Object> stringMap = new HashMap<>();
stringMap.put... | 3.68 |
framework_TreeTable_setAnimationsEnabled | /**
* Animations can be enabled by passing true to this method. Currently
* expanding rows slide in from the top and collapsing rows slide out the
* same way. NOTE! not supported in Internet Explorer 6 or 7.
*
* @param animationsEnabled
* true or false whether to enable animations or not.
*/
public vo... | 3.68 |
hbase_LruBlockCache_getCachedFileNamesForTest | /**
* Used in testing. May be very inefficient.
* @return the set of cached file names
*/
SortedSet<String> getCachedFileNamesForTest() {
SortedSet<String> fileNames = new TreeSet<>();
for (BlockCacheKey cacheKey : map.keySet()) {
fileNames.add(cacheKey.getHfileName());
}
return fileNames;
} | 3.68 |
hadoop_GetGroupsBase_run | /**
* Get the groups for the users given and print formatted output to the
* {@link PrintStream} configured earlier.
*/
@Override
public int run(String[] args) throws Exception {
if (args.length == 0) {
args = new String[] { UserGroupInformation.getCurrentUser().getUserName() };
}
for (String username : ... | 3.68 |
flink_StreamGraphGenerator_transform | /**
* Transforms one {@code Transformation}.
*
* <p>This checks whether we already transformed it and exits early in that case. If not it
* delegates to one of the transformation specific methods.
*/
private Collection<Integer> transform(Transformation<?> transform) {
if (alreadyTransformed.containsKey(transfo... | 3.68 |
framework_VAbstractCalendarPanel_isAcceptedByRangeEnd | /**
* Accepts dates earlier than or equal to rangeStart, depending on the
* resolution. If the resolution is set to DAY, the range will compare on a
* day-basis. If the resolution is set to YEAR, only years are compared. So
* even if the range is set to one millisecond in next year, also next year
* will be includ... | 3.68 |
hadoop_AbstractDNSToSwitchMapping_getSwitchMap | /**
* Get a copy of the map (for diagnostics)
* @return a clone of the map or null for none known
*/
public Map<String, String> getSwitchMap() {
return null;
} | 3.68 |
framework_Table_getColumnFooter | /**
* Gets the footer caption beneath the rows.
*
* @param propertyId
* The propertyId of the column *
* @return The caption of the footer or NULL if not set
*/
public String getColumnFooter(Object propertyId) {
return columnFooters.get(propertyId);
} | 3.68 |
hbase_MetricsSource_setTimeStampNextToReplicate | /**
* TimeStamp of next edit targeted for replication. Used for calculating lag, as if this timestamp
* is greater than timestamp of last shipped, it means there's at least one edit pending
* replication.
* @param timeStampNextToReplicate timestamp of next edit in the queue that should be replicated.
*/
public voi... | 3.68 |
AreaShop_GeneralRegion_saveRequired | /**
* Indicate this region needs to be saved, saving will happen by a repeating task.
*/
public void saveRequired() {
saveRequired = true;
} | 3.68 |
framework_VAbstractSplitPanel_constructDom | /**
* Constructs the DOM structure for this widget.
*/
protected void constructDom() {
DOM.appendChild(splitter, DOM.createDiv()); // for styling
DOM.appendChild(getElement(), wrapper);
wrapper.getStyle().setPosition(Position.RELATIVE);
wrapper.getStyle().setWidth(100, Unit.PCT);
wrapper.getStyle(... | 3.68 |
framework_Table_getItemIdsInRange | /**
* Gets items ids from a range of key values
*
* @param itemId
* The start key
* @param length
* amount of items to be retrieved
* @return
*/
private LinkedHashSet<Object> getItemIdsInRange(Object itemId,
final int length) {
LinkedHashSet<Object> ids = new LinkedHashSet<Obje... | 3.68 |
flink_SkipListUtils_getPrevIndexNode | /**
* Returns previous key pointer on the given index level.
*
* @param memorySegment memory segment for key space.
* @param offset offset of key space in the memory segment.
* @param totalLevel the level of the node.
* @param level on which level to get the previous key pointer of the node.
*/
public static lon... | 3.68 |
hbase_MiniHBaseCluster_getLiveMasterThreads | /** Returns List of live master threads (skips the aborted and the killed) */
public List<JVMClusterUtil.MasterThread> getLiveMasterThreads() {
return this.hbaseCluster.getLiveMasters();
} | 3.68 |
flink_CopyOnWriteSkipListStateMapSnapshot_writeValue | /** Write value from bytes. */
private void writeValue(long valuePointer, DataOutputView outputView) throws IOException {
outputView.write(owningStateMap.helpGetBytesForState(valuePointer));
} | 3.68 |
flink_CoGroupOperatorBase_setGroupOrderForInputOne | /**
* Sets the order of the elements within a group for the first input.
*
* @param order The order for the elements in a group.
*/
public void setGroupOrderForInputOne(Ordering order) {
setGroupOrder(0, order);
} | 3.68 |
dubbo_NetUtils_isPortInUsed | /**
* Check the port whether is in use in os
* @param port port to check
* @return true if it's occupied
*/
public static boolean isPortInUsed(int port) {
try (ServerSocket ignored = new ServerSocket(port)) {
return false;
} catch (IOException e) {
// continue
}
return true;
} | 3.68 |
hbase_KeyStoreFileType_fromPropertyValue | /**
* Converts a property value to a StoreFileType enum. If the property value is <code>null</code>
* or an empty string, returns <code>null</code>.
* @param propertyValue the property value.
* @return the KeyStoreFileType, or <code>null</code> if <code>propertyValue</code> is
* <code>null</code> or empty.... | 3.68 |
hbase_TableSplit_toString | /**
* Returns the details about this instance as a string.
* @return The values of this instance as a string.
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Split(");
sb.append("tablename=").append(tableName);
// null scan input ... | 3.68 |
morf_ConnectionResourcesBean_setPassword | /**
* @see org.alfasoftware.morf.jdbc.AbstractConnectionResources#setPassword(java.lang.String)
*/
@Override
public void setPassword(String password) {
this.password = password;
} | 3.68 |
framework_Window_removeResizeListener | /**
* Remove a resize listener.
*
* @param listener
*/
@Deprecated
public void removeResizeListener(ResizeListener listener) {
removeListener(ResizeEvent.class, listener);
} | 3.68 |
hbase_AsyncTable_ifEquals | /**
* Check for equality.
* @param value the expected value
*/
default CheckAndMutateBuilder ifEquals(byte[] value) {
return ifMatches(CompareOperator.EQUAL, value);
} | 3.68 |
hudi_CleanPlanner_isFileSliceNeededForPendingMajorOrMinorCompaction | /*
* Determine if file slice needed to be preserved for pending compaction or log compaction.
* @param fileSlice File slice
* @return true if file slice needs to be preserved, false otherwise.
*/
private boolean isFileSliceNeededForPendingMajorOrMinorCompaction(FileSlice fileSlice) {
return isFileSliceNeededForPe... | 3.68 |
graphhopper_StringUtils_getLevenshteinDistance | /**
* <p>Find the Levenshtein distance between two Strings.</p>
* <p>
* <p>This is the number of changes needed to change one String into
* another, where each change is a single character modification (deletion,
* insertion or substitution).</p>
* <p>
* <p>The implementation uses a single-dimensional array of l... | 3.68 |
zxing_MultiFormatReader_decodeWithState | /**
* Decode an image using the state set up by calling setHints() previously. Continuous scan
* clients will get a <b>large</b> speed increase by using this instead of decode().
*
* @param image The pixel data to decode
* @return The contents of the image
* @throws NotFoundException Any errors which occurred
*/... | 3.68 |
flink_GroupCombineNode_getOperator | /**
* Gets the operator represented by this optimizer node.
*
* @return The operator represented by this optimizer node.
*/
@Override
public GroupCombineOperatorBase<?, ?, ?> getOperator() {
return (GroupCombineOperatorBase<?, ?, ?>) super.getOperator();
} | 3.68 |
hadoop_SuccessData_getDescription | /**
* @return any description text.
*/
public String getDescription() {
return description;
} | 3.68 |
zxing_Detector_extractParameters | /**
* Extracts the number of data layers and data blocks from the layer around the bull's eye.
*
* @param bullsEyeCorners the array of bull's eye corners
* @return the number of errors corrected during parameter extraction
* @throws NotFoundException in case of too many errors or invalid parameters
*/
private int... | 3.68 |
hbase_SaslClientAuthenticationProviders_getSimpleProvider | /**
* Returns the provider and token pair for SIMPLE authentication. This method is a "hack" while
* SIMPLE authentication for HBase does not flow through the SASL codepath.
*/
public Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>>
getSimpleProvider() {
Optional<SaslClientAuthenticationP... | 3.68 |
flink_RemoteStorageScanner_run | /** Iterate the watched segment ids and check related file status. */
@Override
public void run() {
try {
Iterator<
Map.Entry<
Tuple2<TieredStoragePartitionId, TieredStorageSubpartitionId>,
Integer>>
iter... | 3.68 |
framework_Table_setTableFieldFactory | /**
* Sets the TableFieldFactory that is used to create editor for table cells.
*
* The TableFieldFactory is only used if the Table is editable. By default
* the DefaultFieldFactory is used.
*
* @param fieldFactory
* the field factory to set.
* @see #isEditable
* @see DefaultFieldFactory
*/
public ... | 3.68 |
hadoop_TimelineAuthenticationFilterInitializer_initFilter | /**
* Initializes {@link TimelineAuthenticationFilter}.
* <p>
* Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
* configuration properties prefixed with
* {@value org.apache.hadoop.yarn.conf.YarnConfiguration#TIMELINE_HTTP_AUTH_PREFIX}.
*
* @param container
* The filter contai... | 3.68 |
querydsl_StringExpression_length | /**
* Create a {@code this.length()} expression
*
* <p>Return the length of this String</p>
*
* @return this.length()
* @see java.lang.String#length()
*/
public NumberExpression<Integer> length() {
if (length == null) {
length = Expressions.numberOperation(Integer.class, Ops.STRING_LENGTH, mixin);
... | 3.68 |
flink_TaskSlot_add | /**
* Add the given task to the task slot. This is only possible if there is not already another
* task with the same execution attempt id added to the task slot. In this case, the method
* returns true. Otherwise the task slot is left unchanged and false is returned.
*
* <p>In case that the task slot state is not... | 3.68 |
hbase_MasterCoprocessorHost_preBalanceSwitch | // This hook allows Coprocessor change value of balance switch.
public void preBalanceSwitch(final boolean b) throws IOException {
if (this.coprocEnvironments.isEmpty()) {
return;
}
execOperation(new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
... | 3.68 |
hbase_RequestConverter_buildClearRegionBlockCacheRequest | /**
* Creates a protocol buffer ClearRegionBlockCacheRequest
* @return a ClearRegionBlockCacheRequest
*/
public static ClearRegionBlockCacheRequest
buildClearRegionBlockCacheRequest(List<RegionInfo> hris) {
ClearRegionBlockCacheRequest.Builder builder = ClearRegionBlockCacheRequest.newBuilder();
hris.forEach(h... | 3.68 |
druid_IPAddress_isClassC | /**
* Check if the IP address is belongs to a Class C IP address.
*
* @return Return <code>true</code> if the encapsulated IP address belongs to a class C IP address, otherwise
* returne <code>false</code>.
*/
public final boolean isClassC() {
return (ipAddress & 0x00000007) == 3;
} | 3.68 |
hbase_Reference_getFileRegion | /**
* */
public Range getFileRegion() {
return this.region;
} | 3.68 |
hbase_HBaseConfiguration_create | /**
* Creates a Configuration with HBase resources
* @param that Configuration to clone.
* @return a Configuration created with the hbase-*.xml files plus the given configuration.
*/
public static Configuration create(final Configuration that) {
Configuration conf = create();
merge(conf, that);
return conf;
} | 3.68 |
AreaShop_GeneralRegion_getFriendsFeature | /**
* Get the friends feature to query and manipulate friends of this region.
* @return The FriendsFeature of this region
*/
public FriendsFeature getFriendsFeature() {
return getFeature(FriendsFeature.class);
} | 3.68 |
hbase_ForeignExceptionDispatcher_dispatch | /**
* Sends an exception to all listeners.
* @param e {@link ForeignException} containing the cause. Can be null.
*/
private void dispatch(ForeignException e) {
// update all the listeners with the passed error
for (ForeignExceptionListener l : listeners) {
l.receive(e);
}
} | 3.68 |
hbase_ScanResultConsumerBase_onScanMetricsCreated | /**
* If {@code scan.isScanMetricsEnabled()} returns true, then this method will be called prior to
* all other methods in this interface to give you the {@link ScanMetrics} instance for this scan
* operation. The {@link ScanMetrics} instance will be updated on-the-fly during the scan, you can
* store it somewhere ... | 3.68 |
morf_AbstractSelectStatementBuilder_leftOuterJoin | /**
* Specifies an left outer join to a subselect:
*
* <blockquote><pre>
* TableReference sale = tableRef("Sale");
* TableReference customer = tableRef("Customer");
*
* // Define the subselect - a group by showing total sales by age in the
* // previous month.
* SelectStatement amountsByAgeLastMonth = select(f... | 3.68 |
zxing_LocaleManager_getProductSearchCountryTLD | /**
* The same as above, but specifically for Google Product Search.
*
* @param context application's {@link Context}
* @return The top-level domain to use.
*/
public static String getProductSearchCountryTLD(Context context) {
return doGetTLD(GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD, context);
} | 3.68 |
AreaShop_AreaShop_registerDynamicPermissions | /**
* Register dynamic permissions controlled by config settings.
*/
private void registerDynamicPermissions() {
// Register limit groups of amount of regions a player can have
ConfigurationSection section = getConfig().getConfigurationSection("limitGroups");
if(section == null) {
return;
}
for(String group : ... | 3.68 |
hadoop_SchedulerHealth_getAggregateFulFilledReservationsCount | /**
* Get the aggregate of all the fulfilled reservations count.
*
* @return aggregate fulfilled reservations count
*/
public Long getAggregateFulFilledReservationsCount() {
return getAggregateOperationCount(Operation.FULFILLED_RESERVATION);
} | 3.68 |
hadoop_ItemInfo_getRetryCount | /**
* Get the attempted retry count of the block for satisfy the policy.
*/
public int getRetryCount() {
return retryCount;
} | 3.68 |
dubbo_GenericBeanPostProcessorAdapter_doPostProcessBeforeInitialization | /**
* Adapter BeanPostProcessor#postProcessBeforeInitialization(Object, String) method , sub-type
* could override this method.
*
* @param bean Bean Object
* @param beanName Bean Name
* @return Bean Object
* @see BeanPostProcessor#postProcessBeforeInitialization(Object, String)
*/
protected T doPostProcessB... | 3.68 |
hbase_Bytes_binarySearch | /**
* Binary search for keys in indexes.
* @param arr array of byte arrays to search for
* @param key the key you want to find
* @param comparator a comparator to compare.
* @return zero-based index of the key, if the key is present in the array. Otherwise, a value -(i
* + 1) such that the k... | 3.68 |
hbase_MetricsHeapMemoryManager_setCurMemStoreSizeGauge | /**
* Set the current global memstore size used gauge
* @param memStoreSize the current memory usage in memstore, in bytes.
*/
public void setCurMemStoreSizeGauge(final long memStoreSize) {
source.setCurMemStoreSizeGauge(memStoreSize);
} | 3.68 |
morf_SqlUtils_bracket | /**
* Method that wraps a first elements of an (sub)expression with a bracket.
* <p>
* For example, in order to generate "(a + b) / c" SQL Math expression, we
* need to put first two elements (first subexpression) into a bracket. That
* could be achieved by the following DSL statement.
* </p>
*
* <pre>
* brack... | 3.68 |
hbase_User_getTokens | /**
* Returns all the tokens stored in the user's credentials.
*/
public Collection<Token<? extends TokenIdentifier>> getTokens() {
return ugi.getTokens();
} | 3.68 |
flink_StreamProjection_projectTupleX | /**
* Chooses a projectTupleX according to the length of {@link
* org.apache.flink.streaming.api.datastream.StreamProjection#fieldIndexes}.
*
* @return The projected DataStream.
* @see org.apache.flink.api.java.operators.ProjectOperator.Projection
*/
@SuppressWarnings("unchecked")
public <OUT extends Tuple> Singl... | 3.68 |
graphhopper_PbfFieldDecoder_decodeLongitude | /**
* Decodes a raw longitude value into degrees.
* <p>
*
* @param rawLongitude The PBF encoded value.
* @return The longitude in degrees.
*/
public double decodeLongitude(long rawLongitude) {
return COORDINATE_SCALING_FACTOR * (coordLongitudeOffset + (coordGranularity * rawLongitude));
} | 3.68 |
flink_Tuple20_toString | /**
* Creates a string representation of the tuple in the form (f0, f1, f2, f3, f4, f5, f6, f7, f8,
* f9, f10, f11, f12, f13, f14, f15, f16, f17, f18, f19), where the individual fields are the
* value returned by calling {@link Object#toString} on that field.
*
* @return The string representation of the tuple.
*/... | 3.68 |
pulsar_Transactions_getPendingAckStatsAsync | /**
* Get transaction pending ack stats.
*
* @param topic the topic of this transaction pending ack stats
* @param subName the subscription name of this transaction pending ack stats
* @return the stats of transaction pending ack.
*/
default CompletableFuture<TransactionPendingAckStats> getPendingAckStatsAsync(St... | 3.68 |
hadoop_ActiveAuditManagerS3A_removeActiveSpanFromMap | /**
* remove the span from the reference map, shrinking the map in the process.
* if/when a new span is activated in the thread, a new entry will be created.
* and if queried for a span, the unbounded span will be automatically
* added to the map for this thread ID.
*
*/
@VisibleForTesting
boolean removeActiveSpa... | 3.68 |
flink_TimeUtils_singular | /**
* @param label the original label
* @return the singular format of the original label
*/
private static String[] singular(String label) {
return new String[] {label};
} | 3.68 |
hbase_GroupingTableMap_extractKeyValues | /**
* Extract columns values from the current record. This method returns null if any of the columns
* are not found. Override this method if you want to deal with nulls differently.
* @return array of byte values
*/
protected byte[][] extractKeyValues(Result r) {
byte[][] keyVals = null;
ArrayList<byte[]> foun... | 3.68 |
hbase_IndividualBytesFieldCell_getTimestamp | // 4) Timestamp
@Override
public long getTimestamp() {
return timestamp;
} | 3.68 |
flink_JobExecutionResult_getAccumulatorResult | /**
* Gets the accumulator with the given name. Returns {@code null}, if no accumulator with that
* name was produced.
*
* @param accumulatorName The name of the accumulator.
* @param <T> The generic type of the accumulator value.
* @return The value of the accumulator with the given name.
*/
@SuppressWarnings("... | 3.68 |
morf_TableOutputter_getBoldFormat | /**
* @return the format to use for bold cells
* @throws WriteException if the format could not be created
*/
private WritableCellFormat getBoldFormat() throws WriteException {
WritableFont boldFont = new WritableFont(WritableFont.ARIAL, 8, WritableFont.BOLD);
WritableCellFormat boldHeading = new WritableCellFor... | 3.68 |
framework_VFilterSelect_updateReadOnly | /** For internal use only. May be removed or replaced in the future. */
public void updateReadOnly() {
debug("VFS: updateReadOnly()");
tb.setReadOnly(readonly || !textInputEnabled);
} | 3.68 |
framework_SpaceSelectHandler_setDeselectAllowed | /**
* Sets whether pressing space for the currently selected row should
* deselect the row.
*
* @param deselectAllowed
* <code>true</code> to allow deselecting the selected row;
* otherwise <code>false</code>
*/
public void setDeselectAllowed(boolean deselectAllowed) {
this.deselectAllo... | 3.68 |
graphhopper_MaxSpeedCalculator_fillMaxSpeed | /**
* This method sets max_speed values where the value is UNSET_SPEED to a value determined by
* the default speed library which is country-dependent.
*/
public void fillMaxSpeed(Graph graph, EncodingManager em) {
// In DefaultMaxSpeedParser and in OSMMaxSpeedParser we don't have the rural/urban info,
// bu... | 3.68 |
framework_Panel_getActionManager | /*
* ACTIONS
*/
@Override
protected ActionManager getActionManager() {
if (actionManager == null) {
actionManager = new ActionManager(this);
}
return actionManager;
} | 3.68 |
hbase_SaslClientAuthenticationProvider_relogin | /**
* Executes any necessary logic to re-login the client. Not all implementations will have any
* logic that needs to be executed.
*/
default void relogin() throws IOException {
} | 3.68 |
flink_DistinctType_newBuilder | /** Creates a builder for a {@link DistinctType}. */
public static DistinctType.Builder newBuilder(
ObjectIdentifier objectIdentifier, LogicalType sourceType) {
return new DistinctType.Builder(objectIdentifier, sourceType);
} | 3.68 |
framework_AbstractRendererConnector_createRenderer | /**
* Creates a new Renderer instance associated with this renderer connector.
* <p>
* You should typically not override this method since the framework by
* default generates an implementation that uses
* {@link com.google.gwt.core.client.GWT#create(Class)} to create a renderer
* of the same type as returned by ... | 3.68 |
MagicPlugin_Messages_getSpace | /**
* This relies on the negative space font RP:
* https://github.com/AmberWat/NegativeSpaceFont
*/
@Nonnull
@Override
public String getSpace(int pixels) {
if (pixels == 0) {
return "";
}
if (spaceAmounts.containsKey(pixels)) {
return spaceAmounts.get(pixels);
}
int totalPixels ... | 3.68 |
hbase_KeyValueHeap_getComparator | /**
* */
public CellComparator getComparator() {
return this.kvComparator;
} | 3.68 |
flink_MultipleInputNodeCreationProcessor_canBeInSameGroupWithOutputs | /**
* A node can only be assigned into the same multiple input group of its outputs if all outputs
* have a group and are the same.
*
* @return the {@link MultipleInputGroup} of the outputs if all outputs have a group and are the
* same, null otherwise
*/
private MultipleInputGroup canBeInSameGroupWithOutputs... | 3.68 |
hadoop_RenameOperation_endOfLoopActions | /**
* Operations to perform at the end of every loop iteration.
* <p>
* This may block the thread waiting for copies to complete
* and/or delete a page of data.
*/
private void endOfLoopActions() throws IOException {
if (keysToDelete.size() == pageSize) {
// finish ongoing copies then delete all queued keys.... | 3.68 |
flink_StateTable_put | /**
* Maps the composite of active key and given namespace to the specified state.
*
* @param namespace the namespace. Not null.
* @param state the state. Can be null.
*/
public void put(N namespace, S state) {
put(keyContext.getCurrentKey(), keyContext.getCurrentKeyGroupIndex(), namespace, state);
} | 3.68 |
hadoop_SequentialBlockGroupIdGenerator_hasValidBlockInRange | /**
* @param b A block object whose id is set to the starting point for check
* @return true if any ID in the range
* {id, id+HdfsConstants.MAX_BLOCKS_IN_GROUP} is pointed-to by a stored
* block.
*/
private boolean hasValidBlockInRange(Block b) {
final long id = b.getBlockId();
for (int i = 0; i < MA... | 3.68 |
hbase_ZKUtil_listChildrenAndWatchThem | /**
* List all the children of the specified znode, setting a watch for children changes and also
* setting a watch on every individual child in order to get the NodeCreated and NodeDeleted
* events.
* @param zkw zookeeper reference
* @param znode node to get children of and watch
* @return list of znode names,... | 3.68 |
hbase_AbstractRpcBasedConnectionRegistry_transformMetaRegionLocations | /**
* Simple helper to transform the result of getMetaRegionLocations() rpc.
*/
private static RegionLocations transformMetaRegionLocations(GetMetaRegionLocationsResponse resp) {
List<HRegionLocation> regionLocations = new ArrayList<>();
resp.getMetaLocationsList()
.forEach(location -> regionLocations.add(Pro... | 3.68 |
hadoop_CommitContext_switchToIOStatisticsContext | /**
* Switch to the context IOStatistics context,
* if needed.
*/
public void switchToIOStatisticsContext() {
IOStatisticsContext.setThreadIOStatisticsContext(ioStatisticsContext);
} | 3.68 |
hudi_HoodieTableConfig_setMetadataPartitionsInflight | /**
* Enables the specified metadata table partition as inflight.
*
* @param partitionTypes The list of partitions to enable as inflight.
*/
public void setMetadataPartitionsInflight(HoodieTableMetaClient metaClient, List<MetadataPartitionType> partitionTypes) {
Set<String> partitionsInflight = getMetadataPartiti... | 3.68 |
hbase_QuotaObserverChore_getTableQuotaSnapshots | /**
* Returns an unmodifiable view over the current {@link SpaceQuotaSnapshot} objects for each HBase
* table with a quota defined.
*/
public Map<TableName, SpaceQuotaSnapshot> getTableQuotaSnapshots() {
return readOnlyTableQuotaSnapshots;
} | 3.68 |
hadoop_DatanodeVolumeInfo_getReservedSpace | /**
* get reserved space.
*/
public long getReservedSpace() {
return reservedSpace;
} | 3.68 |
flink_RexNodeJsonDeserializer_deserializeSqlOperator | /** Logic shared with {@link AggregateCallJsonDeserializer}. */
static SqlOperator deserializeSqlOperator(JsonNode jsonNode, SerdeContext serdeContext) {
final SqlSyntax syntax;
if (jsonNode.has(FIELD_NAME_SYNTAX)) {
syntax =
serializableToCalcite(
SqlSyntax.class... | 3.68 |
framework_Table_setColumnIcon | /**
* Sets the icon Resource for the specified column.
* <p>
* Throws IllegalArgumentException if the specified column is not visible.
* </p>
*
* @param propertyId
* the propertyId identifying the column.
* @param icon
* the icon Resource to set.
*/
public void setColumnIcon(Object prope... | 3.68 |
hbase_PrivateCellUtil_compareKeyBasedOnColHint | /**
* Used to compare two cells based on the column hint provided. This is specifically used when we
* need to optimize the seeks based on the next indexed key. This is an advanced usage API
* specifically needed for some optimizations.
* @param nextIndexedCell the next indexed cell
* @param currentCell the ce... | 3.68 |
framework_AriaHelper_clearCaption | /**
* Removes a binding to a caption added with bindCaption() from the provided
* Widget.
*
* @param widget
* Widget, that was bound to a caption before
*/
private static void clearCaption(Widget widget) {
Roles.getTextboxRole()
.removeAriaLabelledbyProperty(widget.getElement());
} | 3.68 |
Activiti_AbstractOperation_executeExecutionListeners | /**
* Executes the execution listeners defined on the given element, with the given event type,
* and passing the provided execution to the {@link ExecutionListener} instances.
*/
protected void executeExecutionListeners(HasExecutionListeners elementWithExecutionListeners,
ExecutionEntity executionEntity, String... | 3.68 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.