patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -172,12 +172,13 @@ class TaskRunner: tickets = [] while True: for q in queues: - tickets.append(q.get(timeout=2)) # timeout after 2 seconds + try: + tickets.append(q.get(timeout=2)) # timeout after 2 seconds + except TornadoError: + for ticket, q in zip(tickets, queues): + q.put(ticket) if len(tickets) == len(queues): break - else: # release tickets - for ticket, q in zip(tickets, queues): - q.put(ticket) try: state = self.get_pre_run_state(
[TaskRunner->[get_retry_state->[,Retrying,get,info,utcnow],get_pre_run_state->[Skipped,TRIGGERFAIL,trigger,all,any,is_running,,DONTRUN,Running,is_pending,get,isinstance,cache_validator,is_finished,Success],get_run_state->[is_running,debug,DONTRUN,CachedState,get,SKIP,str,format,run,utcnow,Success],run->[context,Pending,get_pre_run_state,append,get,update,flatten_seq,len,get_run_state,str,set,isinstance,items,put,zip,values,get_post_run_state],__init__->[getLogger,type],get_post_run_state->[get_retry_state,DONTRUN,get,isinstance,is_finished]],handle_signals->[inner->[get_retry_state,debug,type,Failed,get,format,method],wraps]]
This method is the main entry point for the TaskRunners. It is the base method Get a that can be used to determine if a task has a result that can be.
This is a very specific catch -- based on your note above, is it necessary or could it be expanded to catch any `Exception` (which would be more robust for future queue types that could be added in the future)
@@ -578,6 +578,7 @@ func RegisterRoutes(m *macaron.Macaron) { // Organizations m.Get("/user/orgs", reqToken(), org.ListMyOrgs) m.Get("/users/:username/orgs", org.ListUserOrgs) + m.Post("/orgs", reqToken(), bind(api.CreateOrgOption{}), org.Create) m.Group("/orgs/:orgname", func() { m.Get("/repos", user.ListOrgRepos) m.Combo("").Get(org.Get).
[RequireCSRF,IsOrganizationOwner,Patch,Status,APIContexter,IsErrOrgNotExist,Delete,RepoRef,UserID,GetRepositoryByName,IsWriter,AllowsPulls,IsErrRepoNotExist,Put,IsErrRepoRedirectNotExist,UnitEnabled,Error,GetTeamByID,Post,JSON,Trace,IsOrganizationMember,RedirectToRepo,LookupRepoRedirect,Any,ParamsInt64,GetOrgByName,ToLower,IsErrUserNotExist,Get,GetUserByName,Query,HasAccess,ReferencesGitRepo,RepoRefByType,AccessLevel,Combo,Group,Params]
Group of all components of the API. Put - Put a new member in the organization.
Can you add Gitea copyright to header of this file?
@@ -102,8 +102,13 @@ func (d *Service) Handle(c net.Conn) { logReg = newLogRegister(d.logForwarder, d.G().Log) defer logReg.UnregisterLogger() } - - if err := d.RegisterProtocols(server, xp, connID, logReg, d.G()); err != nil { + shutdowners, err := d.RegisterProtocols(server, xp, connID, logReg, d.G()) + defer func() { + for _, shutdowner := range shutdowners { + shutdowner.Shutdown() + } + }() + if err != nil { d.G().Log.Warning("RegisterProtocols error: %s", err) return }
[GetExclusiveLock->[GetExclusiveLockWithoutAutoUnlock,ReleaseLock],writeServiceInfo->[ensureRuntimeDir],GetExclusiveLockWithoutAutoUnlock->[ensureRuntimeDir],SimulateGregorCrashForTesting->[HasGregor],Handle->[RegisterProtocols],ListenLoop->[Handle]]
Handle handles a connection.
Does this also work when the service is shutdown for non-CLI connections? i.e., do we stop server.Run() cleanly?
@@ -15,6 +15,7 @@ module Engine @routes = routes @override = override @game = game + @stops = [] restore_connections(connection_hexes) if connection_hexes end
[Route->[touch_node->[next_connection,segment,select],check_distance!->[distance],revenue->[check_distance!,check_cycles!,check_connected!,check_overlap!],select->[select]]]
Initialize a new instance of the class.
this should be nil line 45 is not needed make restore_connections private
@@ -156,9 +156,15 @@ module.exports = class extends BaseGenerator { type: String }); - // This adds support for a `--blueprint` flag which can be used to specify a blueprint to use for generation + // NOTE: Deprecated!!! Use --blueprints instead this.option('blueprint', { - desc: 'Specify a generator blueprint to use for the sub generators', + desc: 'Specify one or more generator blueprints to use for the sub generators', + type: String + }); + // This adds support for a `--blueprints` flag which can be used to specify one or more blueprints to use for generation + this.option('blueprints', { + desc: + 'A comma separated list of one or more generator blueprints to use for the sub generators, e.g. --blueprints kotlin,vuejs', type: String });
[No CFG could be retrieved]
Adds support for server side generation flags. Set entity suffix from config or options.
We should add an example here, also I think yeomen have a List type if I'm not wrong
@@ -140,7 +140,7 @@ class TypesDatabaseTests(base.PulpServerTests): self.assertTrue(DEF_1.id in e.missing_type_ids) self.assertTrue(DEF_2.id in e.missing_type_ids) self.assertTrue(DEF_3.id in e.missing_type_ids) - print(e) # used to test the __str__ impl + str(e) # used to test the __str__ impl def test_update_failed_create(self): """
[TypesDatabaseTests->[test_update_missing_no_error->[assertTrue,all_type_collection_names,index_information,assertEqual,len,unit_collection_name,type_units_collection,update_database],test_update_no_changes->[assertTrue,all_type_collection_names,index_information,assertEqual,len,unit_collection_name,type_units_collection,update_database],test_all_type_ids->[assertTrue,TypeDefinition,all_type_ids,assertEqual,len,_create_or_update_type],test_update_unit_key_single_field->[assertTrue,TypeDefinition,index_information,assertEqual,_update_unit_key,len,get_collection,unit_collection_name],test_update_failed_create->[TypeDefinition,update_database,print,assertEqual,len,fail],test_all_type_ids_no_entries->[assertTrue,all_type_ids,assertEqual,len,isinstance],test_drop_indexes->[TypeDefinition,_drop_indexes,index_information,assertEqual,_update_unit_key,len,get_collection,unit_collection_name],test_all_type_definitions->[assertEqual,all_type_definitions,update_database,len],test_create_or_update_existing_type_collection->[assertTrue,TypeDefinition,list,assertEqual,get_database,len,_create_or_update_type,get_collection,unit_collection_name],test_all_type_collection_names->[TypeDefinition,all_type_collection_names,assertEqual,len,_create_or_update_type,unit_collection_name],test_type_units_unit_key->[type_units_unit_key,_create_or_update_type,TypeDefinition,assertEqual],test_all_type_collection_names_no_entries->[assertTrue,assertEqual,all_type_collection_names,len],test_update_search_indexes->[TypeDefinition,index_information,_update_search_indexes,assertEqual,len,get_collection,unit_collection_name],test_create_or_update_type_collection->[assertTrue,TypeDefinition,list,assertEqual,get_database,len,_create_or_update_type,get_collection,unit_collection_name],clean->[super,clean],test_update_missing_with_error->[assertTrue,update_database,print,assertEqual,len,fail],test_update_unit_key_multiple_fields->[assertTrue,TypeDefinition,index_information,assertEqual,_update_unit_key,len,get_collection,unit_collection_name],test_type_units_unique_indexes_missing_def->[assertTrue,type_units_unit_key],test_update_clean_database->[assertTrue,all_type_collection_names,index_information,assertEqual,len,unit_collection_name,type_units_collection,update_database]],TypeDefinition]
Tests that updating a previously loaded database with some missing type definitions correctly throws an error when requested.
couldn't help myself. I know this isn't related to Collection caching at all. :)
@@ -2159,6 +2159,10 @@ int s_client_main(int argc, char **argv) int foundit = 0; BIO *fbio = BIO_new(BIO_f_buffer()); + if (fbio == NULL) { + BIO_printf(bio_err, "Unable to create BIO\n"); + goto end; + } BIO_push(fbio, sbio); /* Wait for multi-line response to end from LMTP or SMTP */ do {
[No CFG could be retrieved]
Reads and processes the n - line response of an OCSP message. END of MAIL.
I'd use `goto shut` here and in the gotos below.
@@ -1014,6 +1014,8 @@ namespace System.DirectoryServices.AccountManagement if (_queryCtx != null) _queryCtx.Dispose(); + _credValidate.Dispose(); + _disposed = true; GC.SuppressFinalize(this); }
[CredentialValidator->[Validate->[BindLdap,BindSam]],PrincipalContext->[StoreCtx->[Initialize],ValidateCredentials->[Validate],Dispose->[Dispose],ReadServerConfig->[Dispose],Initialize]]
Dispose of a specific object.
We do not need a null check here because the constructor of `PrincipalContext` always assigns an instance of `CredentialValidator` to `_credValidate` and this is the only place where `_credValidate` is modified (the field is readonly).
@@ -15,7 +15,8 @@ class Devise::TwoFactorAuthenticationController < DeviseController if user_fully_authenticated? && current_user.unconfirmed_mobile.blank? redirect_to dashboard_index_url end - @user_decorator = UserDecorator.new(current_user) + + @phone_number = UserDecorator.new(current_user).two_factor_phone_number end def update
[show->[new],handle_invalid_otp->[new],reset_attempt_count_if_user_no_longer_locked_out->[update],update_authenticated_resource->[update],send_number_change_sms_if_needed->[new],handle_second_factor_locked_resource->[new]]
shows the nexus otp user object.
Perhaps make @user_decorator -> @phone_number into a separate change?
@@ -21,8 +21,13 @@ function $SnifferProvider() { this.$get = ['$window', '$document', function($window, $document) { var eventSupport = {}, // Chrome Packaged Apps are not allowed to access `history.pushState`. They can be detected by - // the presence of `chrome.app.runtime` (see https://developer.chrome.com/apps/api_index) - isChromePackagedApp = $window.chrome && $window.chrome.app && $window.chrome.app.runtime, + // the presence of `chrome.app.runtime` (see https://developer.chrome.com/apps/api_index). + // For sandboxed apps, check for an extension runtime ID, but no access to other Chrome + // runtime APIs. See https://developer.chrome.com/apps/manifest/sandbox + isChromePackagedApp = + $window.chrome && + ($window.chrome.app && $window.chrome.app.runtime || + !$window.chrome.app && $window.chrome.runtime && $window.chrome.runtime.id), hasHistoryPushState = !isChromePackagedApp && $window.history && $window.history.pushState, android = toInt((/android (\d+)/.exec(lowercase(($window.navigator || {}).userAgent)) || [])[1]),
[No CFG could be retrieved]
Provides a sniffer for the given object. Check if the browser has history. pushState.
I don't think this check can distinguish sandboxed apps from extensions. We need a check that can reliably detect sandboxed apps _only_.
@@ -92,8 +92,6 @@ public final class ClientLoginValidator implements ILoginValidator { if (hashedMac == null) { return ErrorMessages.UNABLE_TO_OBTAIN_MAC; - } else if (!MacFinder.isValidHashedMacAddress(hashedMac)) { - return ErrorMessages.INVALID_MAC; } if (Boolean.TRUE.toString().equals(propertiesSentToClient.get(PASSWORD_REQUIRED_PROPERTY))) {
[ClientLoginValidator->[authenticate->[authenticate]]]
Verify connection.
System id does not have the same hashing applied to it, as it's random. Since the system id is client-side generated, any validity requirements based on hashing can be elided. Hence, we're making it more difficult for system-id/mac to be spoofed, but ultimately we're not really adding security. I'm curious if we think we should try to make it a bit harder for someone to modify/hardcode a different system-id. The scenario is basically someone changes their system id by custom compiling code, do we think that a hash guarantee is going to actually stop them if they get an error for invalid system id, or will the attacker be sophisticated enough already to hash their injected value so it passes?
@@ -160,8 +160,15 @@ class RemoteManager(object): urls.pop(EXPORT_SOURCES_TGZ_NAME, None) return urls - zipped_files = self._call_remote(remote, "get_recipe", conan_reference, dest_folder, - filter_function) + t1 = time.time() + conan_reference, urls = self._call_remote(remote, "get_recipe_urls", conan_reference) + urls = filter_function(urls) + if not urls: + return conan_reference + + dest_folder = self._client_cache.export(conan_reference) + zipped_files = self._call_remote(remote, "download_files_to_folder", urls, dest_folder) + duration = time.time() - t1 log_recipe_download(conan_reference, duration, remote, zipped_files)
[unzip_and_get_files->[remove],RemoteManager->[upload_package->[_package_integrity_check]],_compress_recipe_files->[add_tgz]]
Get a recipe from the remote.
Previously these actions were done in the `rest_client` but we used that ugly filter_files and the ugliness was increasing, so now the rest_api: a) gets urls b) download urls And is the remote manager who manages the file filtering calling first to a), then filtering then b)
@@ -36,6 +36,7 @@ import { useState, } from '../../../src/preact'; import {useMountEffect, useResourcesNotify} from '../../../src/preact/utils'; +import {useStyles} from './autoplay.jss'; /** * @param {?{getMetadata: (function():?JsonObject|undefined)}} player
[No CFG could be retrieved]
Component that has a unique id and its children. A video wrapper that exports a single component.
@samouri I'm not entirely sure if it's a good idea to keep this per-feature. I'm not sure if there's a restriction on one `useStyles` per component?
@@ -26,4 +26,15 @@ public interface PasswordEncoder { * @return the transformed version of the password */ String encode(String password); + + /** + * Method that checks if a plaintext password matches an encrypted/hashed + * version. Useful when the encoding method manages a salt, such as + * bcrypt/scrypt. + * + * @param rawPassword the password to check + * @param encodedPassword the encoded password + * @return whether or not the password matches the encoded password + */ + boolean matches(CharSequence rawPassword, String encodedPassword); }
[No CFG could be retrieved]
Encode a password.
Turn this into `boolean matches(CharSequence rawPassword);`
@@ -83,6 +83,12 @@ var ( // ErrKnownTransaction is returned if a transaction that is already in the pool // attempting to be added to the pool. ErrKnownTransaction = errors.New("known transaction") + + // ErrBlacklistFrom is returned if a transaction's from/source address is blacklisted + ErrBlacklistFrom = errors.New("`from` address of transaction is blacklisted") + + // ErrBlacklistTo is returned if a transaction's to/destination address is blacklisted + ErrBlacklistTo = errors.New("`to` address of transaction is blacklisted") ) var (
[Get->[Get],removeTx->[enqueueTx,Get],demoteUnexecutables->[enqueueTx,Get],validateTx->[GasPrice],addTxsLocked->[add],enqueueTx->[Add],promoteTx->[Add],add->[validateTx,GasPrice,add],addTx->[add],promoteExecutables->[promoteTx,removeTx],containsTx->[contains],sanitize]
This function returns a list of errors that can be used to determine the required gas for a This is a bit of a hack to avoid the problem with the tx_block_transaction.
... transaction in blacklist
@@ -195,7 +195,7 @@ def handle_routechange(state, state_change): def handle_transferrefund(state, state_change): if state_change.sender == state.route.node_address: - iteration = cancel_current_route(state) + iteration = cancel_current_route(state, send_reveal_secret=True) else: iteration = TransitionResult(state, list())
[handle_cancelroute->[cancel_current_route],handle_secretrequest->[cancel_current_route],state_transition->[handle_cancelroute,handle_secretrequest,try_new_route,handle_canceltransfer,handle_routechange,handle_block,handle_secretreveal,handle_transferrefund],handle_canceltransfer->[user_cancel_transfer],handle_transferrefund->[cancel_current_route]]
Handle a transferrefund transition.
Please, validate *all* the fields. There is nothing checking that the amount from the refund is correct.
@@ -81,6 +81,16 @@ public abstract class AbstractFlinkCombineRunner< PipelineOptions options, SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows); + + default void setup( + PipelineOptions options, + SideInputReader sideInputReader + ) {} + + default void teardown( + PipelineOptions options, + SideInputReader sideInputReader + ) {} } /**
[AbstractFlinkCombineRunner->[FinalFlinkCombiner->[extractOutput->[extractOutput]],PartialFlinkCombiner->[firstInput->[addInput],addInput->[addInput]],CompleteFlinkCombiner->[firstInput->[addInput],addInput->[addInput],extractOutput->[extractOutput]]]]
ExtractOutput extracts the output of a single accumulator from the pipeline.
This seems like maybe it would leave out places where these should be called.
@@ -1741,6 +1741,12 @@ ROM_START( sun4_20 ) ROM_LOAD( "520-2748-04.rom", 0x0000, 0x20000, CRC(e85b3fd8) SHA1(4cbc088f589375e2d5983f481f7d4261a408702e)) ROM_END +// SPARCstation ELC (Sun 4/25) +ROM_START(sun4_25) + ROM_REGION32_BE( 0x80000, "user1", ROMREGION_ERASEFF ) + ROM_LOAD( "520-3085-03.rom", 0x0000, 0x40000, CRC(faafaf0d) SHA1(23a1b78392883b06eff9f7828e955399b6daa3d6)) +ROM_END + // SPARCstation 1 (Sun 4/60) /* SCC init 1 for the keyboard is identical to Sun 4/75 init 3 */ ROM_START( sun4_60 )
[No CFG could be retrieved]
region 16 - bit Baudrate region 0x80000 - 0x80000 - 0x80000 - 0x.
We use one tab indent per scope level - you can see this for all the other ROM definitions in the file. Please follow the format of the surrounding code.
@@ -511,6 +511,14 @@ void SPIClass::transferBytes(const uint8_t * out, uint8_t * in, uint32_t size) { } } +/** + * Note: + * in and out need to be aligned to 32Bit + * or you get an Fatal exception (9) + * @param out uint8_t * + * @param in uint8_t * + * @param size uint8_t (max 64) + */ void SPIClass::transferBytes_(const uint8_t * out, uint8_t * in, uint8_t size) { while(SPI1CMD & SPIBUSY) {} // Set in/out Bits to transfer
[write32->[write32],write16->[write16],transfer16->[transfer]]
transferBytes - Transfer bytes from one memory to another.
I suggest an ASSERT() here checking the alignment of the inputs would be handy here and print a useful error when debugging was enabled...
@@ -497,6 +497,12 @@ export class AmpAdNetworkDoubleclickImpl extends AmpA4A { } this.fireDelayedImpressions(responseHeaders.get('X-AmpImps')); this.fireDelayedImpressions(responseHeaders.get('X-AmpRSImps'), true); + + const refreshInterval = Number(responseHeaders.get('force-refresh')); + if (refreshInterval) { + this.initializeRefreshManagerIfEligible_(refreshInterval); + } + // If the server returned a size, use that, otherwise use the size that we // sent in the ad request. let size = super.extractSize(responseHeaders);
[AmpAdNetworkDoubleclickImpl->[extractSize->[height,extractAmpAnalyticsConfig,get,setGoogleLifecycleVarsFromHeaders,width],getBlockParameters_->[serializeTargeting_,dev,user,isInManualExperiment,assign,join,googleBlockParameters,getMultiSizeDimensions,map],constructor->[resolver,experimentFeatureEnabled,extensionsFor,rejector,getMode,promise,SRA],tearDownSlot->[promise,rejector,removeElement,resolver],executeRtc_->[resolve,timerFor,status,text,xhrFor,user,append,textContent,isSecureUrl,message,verifyRtcConfigMember,now,tryParseJson,isObject,getElementById],initLifecycleReporter->[googleLifecycleReporterFactory],onCreativeRender->[height,dev,addCsiSignalsToAmpAnalyticsConfig,insertAnalyticsElement,isReportingEnabled,setStyles,width],generateAdKey_->[getAttribute,domFingerprintPlain,stringHash32],shouldSendRequestWithoutRtc->[resolve,verifyRtcConfigMember,RTC_FAILURE,parseUrl,reject,match],buildCallback->[getExperimentBranch,dev,getVisibilityState,PAUSED,addExperimentIdToElement,onVisibilityChanged,viewerForDoc,EXPERIMENT,randomlySelectUnsetExperiments],populateAdUrlState->[tryParseJson,Number],getSlotSize->[Number],layoutCallback->[user,getEnclosingContainerTypes],isValidElement->[querySelector,isGoogleAdsA4AValidEnvironment],fireDelayedImpressions->[split,dev,dict,isSecureUrl,createElementWithAttributes],getAdUrl->[getPageLevelParameters_,resolve,all,googleAdUrl,dev,now,assign,isExperimentOn],groupSlotsForSra->[groupAmpAdsByType],onNetworkFailure->[dev,maybeAppendErrorParameter],mergeRtc->[then,resolve,rtcResponse,RTC_SUCCESS,rtcTotalTime,message,success,parseUrl,deepMerge],delayAdRequestEnabled->[experimentFeatureEnabled,DELAYED_REQUEST],initiateSraRequests->[all,dev,shift,lineDelimitedStreamer,attemptCollapse,SAFEFRAME,map,metaJsonCreativeGrouper,hasAdPromise,resetAdUrl,element,length,isCancellation,sraResponseRejector,keys,xhrFor,checkStillCurrent,assignAdUrlToError,sraResponseResolver,constructSRARequest_,forEach,utf8Encode]],dev,isInManualExperiment,join,encodeURIComponent,map,isArray,googlePageParameters,registerElement,initialSize_,devicePixelRatio,getAttribute,extension,truncAndTimeUrl,adKey_,user,constructSRABlockParameters,now,assign,element,length,serializeItem_,push,getPageLevelParameters_,split,serializeTargeting_,keys,getFirstInstanceValue_,jsonTargeting_,extractFn,forEach,combiner]
Extract the size from the response headers.
let's put amp somewhere in this header name
@@ -572,12 +572,12 @@ describes.sandboxed('amp-img', {}, (env) => { }); it('does not interfere with SSR img creation', () => { - const impl = getImgWithBlur(true, true); + const impl = getImgWithBlur(true, true, true); const ampImg = impl.element; - ampImg.setAttribute('i-amphtml-ssr', ''); impl.buildCallback(); impl.layoutCallback(); + // debugger; expect(ampImg.querySelector('img[src*="sample.jpg"]')).to.exist; expect(ampImg.querySelector('img[src*="image/svg+xml"]')).to.exist; });
[No CFG could be retrieved]
This method checks that the image has a blur and that the image is rendered before the placeholder It does not interfere with SSR img after placeholder.
Oops, left this in.
@@ -48,9 +48,7 @@ class InProcessBundleFactory implements BundleFactory { @Override public <T> UncommittedBundle<T> createBundle(CommittedBundle<?> input, PCollection<T> output) { - return input.isKeyed() - ? InProcessBundle.keyed(output, input.getKey()) - : InProcessBundle.unkeyed(output); + return InProcessBundle.keyed(output, input.getKey()); } @Override
[InProcessBundleFactory->[create->[InProcessBundleFactory],InProcessBundle->[commit->[toString->[toString]],add->[add]]]]
Create a bundle with a key.
Based on the nullability, it makes sense that this can work, but it does read strangely.
@@ -281,11 +281,9 @@ namespace Microsoft.Xna.Framework.Graphics featureLevels.Add(FeatureLevel.Level_9_1); var driverType = GraphicsAdapter.UseReferenceDevice ? DriverType.Reference : DriverType.Hardware; - -#if DEBUG + try { -#endif // Create the Direct3D device. using (var defaultDevice = new SharpDX.Direct3D11.Device(driverType, creationFlags, featureLevels.ToArray())) _d3dDevice = defaultDevice.QueryInterface<SharpDX.Direct3D11.Device1>();
[No CFG could be retrieved]
Creates a Direct3D device and returns it. Get the Direct3D device.
Does anyone know why this code is inside the try block? I didn't add it, but it seems like it should be outside the try/catch in case the device creation fails the first time.
@@ -181,7 +181,7 @@ setup( }, install_requires=install_requires, tests_require=test_requirements, - python_requires='>=3.5', + python_requires='>=3.6', entry_points={ 'console_scripts': [ 'raiden = raiden.__main__:main'
[PyTest->[finalize_options->[finalize_options]],BuildPyCommand->[run->[run]],read_version_from_git]
Define the module that will be used to compile the n - core contract.
What are the breaking changes that make it impossible to run this project with the version 3.5 of the interpreter?
@@ -623,11 +623,13 @@ define([ /** * Set to true to copy the depth texture after rendering the globe. Makes czm_globeDepthTexture valid. + * Set to false if Entities on terrain or GroundPrimitives are not used for a potential performance improvement. + * * @type {Boolean} - * @default false + * @default true * @private */ - this.copyGlobeDepth = false; + this.copyGlobeDepth = true; /** * Blends the atmosphere to geometry far from the camera for horizon views. Allows for additional
[No CFG could be retrieved]
Displays a single . Highlights the color of unclassified 3D Tile geometry when the color s alpha is.
I would just remove this option and always copy depth.
@@ -205,6 +205,9 @@ public class OMBucketCreateRequest extends OMClientRequest { // Add default acls from volume. addDefaultAcls(omBucketInfo, omVolumeArgs); + // update used namespace for volume + omVolumeArgs.incrUsedNamespace(1L); + // Update table cache. metadataManager.getBucketTable().addCacheEntry(new CacheKey<>(bucketKey), new CacheValue<>(Optional.of(omBucketInfo), transactionLogIndex));
[OMBucketCreateRequest->[getBeinfo->[getBeinfo,getKeyName,warmUpEncryptedKeys,build,getMetadata,getCipher,setSuite,convert,OMException],checkQuotaBytesValid->[listBuckets,getVolume,IllegalArgumentException,getQuotaInBytes],addDefaultAcls->[toList,setAcls,getAcls,addAll,collect,inheritDefaultAcls],validateAndUpdateCache->[acquireWriteLock,acquireReadLock,OMBucketCreateResponse,getUserInfo,getFromProtobuf,getBucketInfo,getObjectIdFromTxId,getVolumeKey,incNumBucketCreates,getAclsEnabled,getOmRequest,getVolumeName,getMetrics,of,createErrorOMResponse,toAuditMap,getBucketName,getBucketKey,getCreateBucketRequest,auditLog,releaseWriteLock,incNumBucketCreateFails,setUpdateID,debug,isRatisEnabled,error,addDefaultAcls,isExist,setCreateBucketResponse,getReadCopy,incNumBuckets,OMException,getAuditLogger,buildAuditMessage,build,addCacheEntry,releaseReadLock,checkQuotaBytesValid,setObjectID,addResponseToDoubleBuffer,checkAcls,getMetadataManager,getOMResponseBuilder],preExecute->[getBeinfo,hasSourceVolume,hasBeinfo,build,getKmsProvider,validateBucketName,getBucketName,toBuilder,getBucketInfo,getCreateBucketRequest,now,setBucketInfo,hasSourceBucket,OMException,setModificationTime,setBeinfo],getLogger]]
Validate and update cache. This method is called when a bucket is created.
We should also update the volume table cache like above, as omVolumeArgs be updated.
@@ -9,6 +9,7 @@ using System.Linq; using System.IO; using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; +using System.IO.Compression; namespace Microsoft.NET.HostModel.Bundle {
[Bundler->[ShouldExclude->[ShouldExclude],FileType->[IsAssembly],GenerateBundle->[IsHost,AddToBundle,ShouldExclude,ShouldIgnore]]]
Creates a Bundler which bundles a managed app into the host native binary. Adds a file offset to the end of a bundle.
Nit: sort usings
@@ -57,6 +57,7 @@ class SMACTuner(Tuner): self.smbo_solver = None self.first_one = True self.update_ss_done = False + self.loguniform_key = set() def _main_cli(self): '''
[SMACTuner->[update_search_space->[_main_cli],__init__->[OptimizeMode]]]
Initialize the object variables. Get an optimizer object for a .
should also update the function `generate_parameters`.
@@ -144,7 +144,7 @@ class ImageClassificationDatasetCreater(preprocess_util.DatasetCreater): return create_dataset_from_list(path) label_set = preprocess_util.get_label_set_from_dir(path) data = [] - for l_name in label_set.keys(): + for l_name in list(label_set.keys()): image_paths = preprocess_util.list_images( os.path.join(path, l_name)) for p in image_paths:
[DiskImage->[convert_to_paddle_format->[read_image],read_image->[resize_image],convert_to_array->[read_image]],ImageClassificationDatasetCreater->[create_dataset_from_dir->[DiskImage,create_dataset_from_list],create_dataset_from_list->[DiskImage],__init__->[__init__]]]
Create a dataset from a directory.
This change is not necessary
@@ -222,6 +222,13 @@ public class BasicRootedOzoneClientAdapterImpl OzoneBucket bucket; try { bucket = proxy.getBucketDetails(volumeStr, bucketStr); + String bucketLayout = bucket.getBucketLayout().name(); + if (!StringUtils.equalsIgnoreCase(bucketLayout, + OMConfigKeys.OZONE_DEFAULT_BUCKET_LAYOUT_FILE_SYSTEM_OPTIMIZED)) { + throw new IllegalArgumentException(bucketLayout + " does not support" + + " file system semantics. Bucket Layout must be " + + OMConfigKeys.OZONE_DEFAULT_BUCKET_LAYOUT_FILE_SYSTEM_OPTIMIZED); + } } catch (OMException ex) { if (createIfNotExist) { // getBucketDetails can throw VOLUME_NOT_FOUND when the parent volume
[BasicRootedOzoneClientAdapterImpl->[getTrashRoots->[getFileStatus],deleteObjects->[incrementCounter,deleteObjects,getBucket,areInSameBucket],getKeyProviderUri->[getKeyProviderUri],deleteObject->[deleteObject,incrementCounter,getBucket],IteratorAdapter->[next->[next],hasNext->[hasNext]],getDelegationToken->[getDelegationToken],listStatus->[listStatusRoot,incrementCounter,listStatus,listStatusVolume,getBucket],listKeys->[listKeys,incrementCounter,getBucket],getFileStatus->[getFileStatus,incrementCounter,getBucket],getKeyProvider->[getKeyProvider],readFile->[getBucket],getCanonicalServiceName->[getCanonicalServiceName],close->[close],getFileStatusAdapterForVolume->[getGroupName],createFile->[createFile,incrementCounter,getBucket],getBucket->[getBucket],rename->[renameKey,incrementCounter,getBucket],createDirectory->[createDirectory,incrementCounter,getBucket]]]
Get the bucket with the given name.
I think need similar kind of change in BasicOzoneClientAdapterImpl also
@@ -16,6 +16,8 @@ import {BaseElement} from '../src/base-element'; import {dev} from '../src/log'; +import {isExperimentOn} from '../src/experiments'; +import {guaranteeSrcForSrcsetUnsupportedBrowsers} from '../src/utils/img'; import {isLayoutSizeDefined} from '../src/layout'; import {listen} from '../src/event-helper'; import {registerElement} from '../src/service/custom-element-registry';
[No CFG could be retrieved]
Creates an object that represents an image of a specific type. The callback for the that is called when a new is added to the.
This and other calls to guaranteeSrcForSrcsetUnsupportedBrowsers were not implemented by me, but are in the current version of amp-img.js. It's just a two lines, but idk how important it is to ensure the changes that show up are only yours, so just wanted to point it out if you had advice on fixing it. Otherwise, I will try and solution as well! Edit: I think it was resolved thanks to @choumx 's help!
@@ -58,7 +58,7 @@ public class S3Utils public static <T> T retryS3Operation(Callable<T> f) throws IOException, S3ServiceException, InterruptedException { int nTry = 0; - final int maxTries = 3; + final int maxTries = 10; while (true) { try { nTry++;
[S3Utils->[closeStreamsQuietly->[closeDataInputStream],awaitNextRetry->[pow,max,sleep,nextGaussian,info],isObjectInBucket->[getResponseCode,getS3ErrorCode,equals,S3Bucket,getObjectDetails],retryS3Operation->[call,propagate,getS3ErrorCode,equals,getCause,awaitNextRetry],Logger]]
Retry the given callable until it throws an exception or until max tries is reached.
can we make this configurable with a default?
@@ -281,8 +281,8 @@ def cwt_morlet(X, sfreq, freqs, use_fft=True, n_cycles=7.0, zero_mean=False, """ mode = 'same' # mode = "valid" - n_signals, n_times = X.shape decim = int(decim) + n_signals, n_times = X[:, ::decim].shape # Precompute wavelets for given frequency range to save time Ws = morlet(sfreq, freqs, n_cycles=n_cycles, zero_mean=zero_mean)
[_cwt->[_centered],AverageTFR->[plot->[_preproc_tfr],__iadd__->[_check_compat],__isub__->[_check_compat],__add__->[_check_compat],__sub__->[_check_compat],plot_topo->[_preproc_tfr]],_induced_power_cwt->[morlet],_time_frequency->[_cwt],tfr_morlet->[AverageTFR,_induced_power_cwt,_get_data],tfr_multitaper->[_prepare_picks,AverageTFR,_induced_power_mtm,_get_data],_induced_power_mtm->[_dpss_wavelet],cwt->[_cwt],cwt_morlet->[morlet],read_tfrs->[AverageTFR],single_trial_power->[morlet,cwt],write_tfrs->[_prepare_write_tfr]]
Compute time - frequency decomposition with Morlet wavelets.
I'm actually wondering whether we should have this `int(decim)` in the first place. It would crash on the next line if it wasn't an int, and we don't really want people to pass `float` that will get rounded.
@@ -3308,8 +3308,9 @@ public final class FilePath implements SerializableOnlyOverRemoting { private static final long serialVersionUID = 1L; - @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "TODO needs triage") - public static int SIDE_BUFFER_SIZE = 1024; + @Restricted(NoExternalUse.class) + @RestrictedSince("TODO") + public static final int SIDE_BUFFER_SIZE = 1024; private static final Logger LOGGER = Logger.getLogger(FilePath.class.getName());
[FilePath->[UntarFrom->[invoke->[extract]],unzip->[FilePath,unzip],AbstractInterceptorCallableWrapper->[call->[call],getClassLoader->[getClassLoader]],ValidateAntFileMask->[invoke->[equals],hasMatch->[isCaseSensitive->[isCaseSensitive,Cancel]]],UnzipLocal->[invoke->[getRemote,unzip]],copyTo->[copyTo,write,act],LastModified->[invoke->[lastModified]],DeleteRecursive->[invoke->[deleteRecursive]],Mode->[invoke->[mode]],renameTo->[act],createTempDir->[FilePath,act],CopyRecursiveRemoteToLocal->[invoke->[compress]],createLauncher->[call],chmod->[act,isUnix],CreateTextTempFile->[invoke->[mkdirs,createTempFile]],CopyTo->[invoke->[close]],isDirectory->[act],getUsableDiskSpace->[act],glob->[isAbsolute],FileCallableWrapper->[toString->[toString],call->[invoke],checkRoles->[checkRoles],getClassLoader],lastModified->[act],MoveAllChildrenTo->[invoke->[renameTo,getName,getRemote]],Exists->[invoke->[exists]],getHomeDirectory->[call],zip->[zip],deleteSuffixesRecursive->[act],isSymlink->[isSymlink],digest->[act],validateFileMask->[exists,validateAntFileMask,validateFileMask],getRealPath->[windowsToRealPath],Length->[invoke->[length]],actAsync->[wrap],absolutize->[FilePath],copyToWithPermission->[chmod,setLastModifiedIfPossible,mode,act,copyTo,lastModified],CallableWith->[call->[act],checkRoles->[checkRoles]],tar->[archive],deleteContents->[act],UnzipFrom->[invoke->[unzip]],DirectoryFilter->[accept->[isDirectory]],length->[act],mkdirsE->[exists,mkdirs],list->[list,act,getClassLoader],IsDirectory->[invoke->[isDirectory]],setLastModifiedIfPossible->[act],readFromOffset->[close->[close],read->[read],actAsync],readToString->[act],copyRecursiveTo->[actAsync,copyRecursiveTo,act,extract],CreateTempDir->[invoke->[getName]],GetHomeDirectory->[call->[FilePath]],getTotalDiskSpace->[act],readFromTar->[symlinkTo,mkdirs,getName,isDirectory,_chmod],hashCode->[hashCode],child->[FilePath],WritePipe->[invoke->[mkdirs]],mode->[act,isUnix],Write->[invoke->[write,mkdirs]],Read->[invoke->[newInputStreamDenyingSymlinkAsNeeded]],ReadFromTar->[invoke->[extract]],IsDescendant->[invoke->[isAbsolute,length,normalize,toString],getDirectChild->[getParent,equals]],UnzipRemote->[invoke->[unzip]],CopyRecursiveLocal->[invoke->[exists]],SymlinkDiscardingFileFilter->[accept->[isSymlink]],delete->[act],toURI->[act],ListFilter->[invoke->[FilePath]],moveAllChildrenTo->[act],isDescendant->[act],archive->[archive],touch->[act],withSuffix->[FilePath],mkdirs->[exists,act],equals->[equals],write->[mkdirs,act],WriteToTar->[invoke->[compress]],Touch->[invoke->[exists]],OffsetPipeSecureFileCallable->[invoke->[read]],writeToTar->[close],ListGlob->[invoke->[FilePath]],exists->[act],validateAntFileMask->[validateAntFileMask,act],act->[call,invoke,act],getFreeDiskSpace->[act],ToURI->[invoke->[toURI]],createTempFile->[FilePath,act],CreateTempFile->[invoke->[getName,createTempFile]],installIfNecessaryFrom->[installIfNecessaryFrom,untarFrom,unzipFrom],createTextTempFile->[createTextTempFile,FilePath,act],readObject->[_getChannelForSerialization],validateRelativeDirectory->[validateRelativeDirectory,validateRelativePath],validateRelativePath->[exists,isDirectory,child],isFileAncestorSymlink->[isSymlink,getParent,equals],deleteRecursive->[act],listDirectories->[list],ignoringSymlinks->[visit->[isSymlink,visit]],copyFrom->[copyFrom],getParent->[FilePath],Unpack->[invoke->[extract,unzip]],read->[actAsync,read],Mkdirs->[invoke->[mkdirs]],getName]]
Returns a channel that can be used to serialize a FilePath object. Get the unique id of the node.
I searched for usages in sources (with GitHub) and binaries (with `usage-in-plugins`) and could not find any references in the `jenkinsci` GitHub organization or in plugin binaries, so this is safe to restrict. Since it is restricted, it is safe to mark it final, as no plugins will inline the value during compilation.
@@ -56,15 +56,15 @@ public class MinimalWordCountJava8 { Pipeline p = Pipeline.create(options); p.apply(TextIO.Read.from("gs://apache-beam-samples/shakespeare/*")) + .apply(FlatMapElements .into(TypeDescriptors.strings()) - .via((String word) -> Arrays.asList(word.split("[^a-zA-Z']+")))) + .via((String word) -> Arrays.asList(word.split(TOKENIZER_PATTERN)))) .apply(Filter.by((String word) -> !word.isEmpty())) .apply(Count.<String>perElement()) .apply(MapElements .into(TypeDescriptors.strings()) .via((KV<String, Long> wordCount) -> wordCount.getKey() + ": " + wordCount.getValue())) - // CHANGE 3/3: The Google Cloud Storage path is required for outputting the results to. .apply(TextIO.Write.to("gs://YOUR_OUTPUT_BUCKET/AND_OUTPUT_PREFIX"));
[MinimalWordCountJava8->[main->[waitUntilFinish,to,create,apply]]]
This method is intended to be run from the command line.
We can keep the regex inline, instead of defining a new variable. I think this makes more sense for a minimal example.
@@ -1,7 +1,6 @@ /* - * Copyright 2007-2021 The OpenSSL Project Authors. All Rights Reserved. - * Copyright Nokia 2007-2019 - * Copyright Siemens AG 2015-2019 + * Copyright 2007-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright Siemens AG 2015-2022 * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy
[No CFG could be retrieved]
Plots a list of test cases that are contained in a single line of code and returns the - - - - - - - - - - - - - - - - - -.
??? Should this really be removed???
@@ -194,7 +194,7 @@ func TestAccessOriginWebConsoleMultipleIdentityProviders(t *testing.T) { linkRegexps := make([]string, 0) // Verify that the plain /login URI is unavailable when multiple IDPs are in use. - urlMap["/login"] = urlResults{http.StatusForbidden, ""} + urlMap["/login"] = urlResults{http.StatusNotFound, ""} // Create the common base URLs escapedPublicURL := url.QueryEscape(masterOptions.OAuthConfig.AssetPublicURL)
[CleanupMasterEtcd,QueryEscape,NewRequest,DefaultMasterOptions,Encode,Set,RoundTrip,Add,QuoteMeta,New,SetTransportDefaults,Errorf,Logf,SplitN,Match,Join,Execute,Must,FindAvailableBindAddress,Fatalf,Get,StartConfiguredMaster,Sprintf,Unmarshal,String,Parse,ReadAll]
Initialize the common base URLs for the given identity providers and redirect to the login page. expects the providerSelectionURL to be valid.
this status code changed because the oauth server no longer falls through to our "normal" chain. If you request a path handled by the oauth server, it is the end of the chain and will 404 on you if you fall through.
@@ -186,7 +186,7 @@ def get_experiment_port(args): exit(1) experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() - return experiment_dict[experiment_id]['port'] + return experiment_dict[experiment_id].get('port', 'N/A') def convert_time_stamp_to_date(content): '''Convert time stamp to date time format'''
[check_rest->[get_config_filename],get_config->[get_config_filename],parse_ids->[update_experiment],set_monitor->[update_experiment,get_experiment_status,show_experiment_info],webui_url->[get_config_filename,get_config],update_experiment->[update_experiment],check_experiment_id->[update_experiment],experiment_list->[update_experiment],stop_experiment->[parse_ids],trial_kill->[get_config_filename],experiment_status->[get_config_filename],save_experiment->[get_config],log_stderr->[log_internal],log_trial->[get_config_filename,log_trial_adl_helper],platform_clean->[hdfs_clean,update_experiment,remote_clean,get_platform_dir],get_experiment_port->[check_experiment_id],monitor_experiment->[set_monitor],trial_ls->[convert_time_stamp_to_date,get_config_filename],export_trials_data->[groupby_trial_id,get_config_filename,get_config],load_experiment->[get_config],log_internal->[get_config_filename],trial_codegen->[get_config_filename,check_experiment_id],show_experiment_info->[get_time_interval,convert_time_stamp_to_date,update_experiment],get_config_filename->[check_experiment_id],list_experiment->[convert_time_stamp_to_date,get_config_filename],experiment_clean->[hdfs_clean,get_config,local_clean,remote_clean],log_stdout->[log_internal]]
get the port of an experiment.
just curious, why some `get('port'` has default value 'N/A' while some others do not have default value?
@@ -101,9 +101,9 @@ def test_cov_estimation_on_raw_segment(): raw = Raw(raw_fname, preload=False) cov = compute_raw_covariance(raw) cov_mne = read_cov(erm_cov_fname) - assert_true(cov_mne.ch_names == cov.ch_names) - assert_true(linalg.norm(cov.data - cov_mne.data, ord='fro') / - linalg.norm(cov.data, ord='fro') < 1e-4) + assert_equal(cov_mne.ch_names, cov.ch_names) + assert_equal(cov_mne.nfree, cov.nfree) + assert_snr(cov.data, cov_mne.data, 100) # test IO when computation done in Python cov.save(op.join(tempdir, 'test-cov.fif')) # test saving
[test_ad_hoc_cov->[assert_array_almost_equal,make_ad_hoc_cov,_TempDir,read_evokeds,read_cov,assert_true,repr,join,save],test_evoked_whiten->[all,read_evokeds,regularize,read_cov,mean,whiten_evoked,assert_true,pick_types,abs],test_regularize_cov->[Raw,regularize,read_cov,mean,assert_true,info],test_arithmetic_cov->[assert_array_almost_equal,read_cov,assert_true],test_auto_low_rank->[get_data->[randn,rand,copy,svd,RandomState,dot],_auto_low_rank_model,get_data,catch_warnings,assert_equal,len,assert_raises,simplefilter],test_io_cov->[assert_array_almost_equal,_TempDir,assert_naming,read_cov,catch_warnings,assert_equal,assert_true,len,repr,join,pick_channels_cov,save,simplefilter,write_cov],test_rank->[,read_cov,sum,channel_type,compute_proj_raw,add_proj,dict,pick_info,assert_equal,copy,startswith,set,product,range,ix_,prepare_noise_cov,count,list,compute_raw_covariance,_get_sss_rank,len,Raw,read_evokeds,assert_true,_estimate_rank_meeg_cov,pick_types],test_cov_scaling->[assert_array_equal,read_evokeds,max,dict,read_cov,_undo_scaling_cov,pick_channels,_picks_by_type,assert_true,_apply_scaling_cov,pick_types],test_cov_estimation_on_raw_segment->[assert_array_almost_equal,Raw,_TempDir,compute_raw_covariance,read_cov,catch_warnings,pick_channels,assert_true,norm,len,crop,join,save,simplefilter],test_cov_estimation_with_triggers->[assert_array_almost_equal,Raw,all,_TempDir,dict,compute_covariance,read_cov,find_events,catch_warnings,assert_true,norm,assert_raises,len,join,Epochs,save,simplefilter,merge_events],test_compute_covariance_auto_reg->[Raw,dict,compute_covariance,find_events,assert_equal,assert_true,merge_events,assert_raises,crop,set,Epochs,pick_types,diff],dirname,run_tests_if_main,simplefilter,join]
Test estimation on a continuous recordings.
I had to lower the tolerance here from 10000 to 100, but by eye looking at `cov.plot()` comparisons of spectra and images of the covariances they look very similar, so I'm pretty sure the calculations are nearly identical.
@@ -6,8 +6,12 @@ class UserNotification < ActiveRecord::Base after_save :send_email - def self.last_notifications(user, last_notification_id = nil, per_page = 10) - last_notification_id = 999999999999999999999999 if last_notification_id < 1 + def self.last_notifications( + user, + last_notification_id = nil, + per_page = Constants::ACTIVITY_AND_NOTIF_SEARCH_LIMIT + ) + last_notification_id = Constants::INFINITY if last_notification_id < 1 Notification.joins(:user_notifications) .where('user_notifications.user_id = ?', user.id) .where('notifications.id < ?', last_notification_id)
[UserNotification->[unseen_notification_count->[count],last_notifications->[limit],seen_by_user->[update_all],recent_notifications->[limit],send_email->[system_message_notification_email,recent_notification_email,type_of,assignments_notification_email,send_email_notification],after_save,include,belongs_to]]
Returns the last notification of a user.
fix that! this solution doesn't work
@@ -45,6 +45,9 @@ public class DummyTaskEngine implements TaskEngine { switch (DummyTaskTypes.valueOf(taskName)) { case SUCCESSFUL_TASK: return (CompletableFuture<T>) CompletableFuture.completedFuture("result"); + case PARAMETERIZED_TASK: + Map<String, ?> params = context.getParameters().get(); + return (CompletableFuture<T>) CompletableFuture.completedFuture(params.get("parameter")); case FAILING_TASK: CompletableFuture<T> f = new CompletableFuture<>(); f.completeExceptionally(new Exception("exception"));
[DummyTaskEngine->[handles->[contains],runTask->[IllegalArgumentException,completeExceptionally,valueOf,completedFuture,Exception],getTasks->[forEach,add,DummyTask],add,values,toString]]
This method is called when a task is executed.
what if is Optional.empty ? In any case, for me this is an anti-pattern. We should not be using an Optional<Map> this way but an empty map IMO
@@ -1160,6 +1160,9 @@ function numberInputType(scope, element, attr, ctrl, $sniffer, $browser) { } ctrl.$formatters.push(function(value) { + if (isString(value) && !isNaN(value)) { + value = Number(value); + } return validate(ctrl, 'number', ctrl.$isEmpty(value) || isNumber(value), value); }); }
[No CFG could be retrieved]
Input type for number input Radio input type.
isNaN might not be the best choice here, but I think it's probably ok
@@ -159,6 +159,10 @@ function wpseo_upsert_new_title( $post_id, $new_title, $original_title ) { */ function wpseo_upsert_meta( $post_id, $new_meta_value, $orig_meta_value, $meta_key, $return_key ) { + $post_id = $post_id; + $sanitized_new_meta_value = strip_tags( $new_meta_value ); + $orig_meta_value = strip_tags( $orig_meta_value ); + $upsert_results = array( 'status' => 'success', 'post_id' => $post_id,
[No CFG could be retrieved]
This function upserts a meta value in the database. Update the post meta.
Eh.. is it just me or is this line completely superfluous ?
@@ -950,6 +950,7 @@ export function onContentPauseRequested(global) { adsManagerHeightOnLoad = null; } adsActive = true; + playerState = PlayerStates.PLAYING; postMessage({event: VideoEvents.AD_START}); removeHoverEventFromElement( /** @type {!Element} */ (videoPlayer),
[No CFG could be retrieved]
Displays the countdown of an ad. Handle an additional control of the video that is being played.
is `playerState` only tracking for ad playing state?
@@ -39,7 +39,7 @@ } _%> - <%= beans.map(bean => `private final ${bean.class} ${bean.instance};`).join('\n\n ') %> + <%= beans.map(bean => `protected final ${bean.class} ${bean.instance};`).join('\n\n ') %> public <%= constructorName %>(<%= beans.map(bean => `${bean.class} ${bean.instance}`).join(', ') %>) { <%= beans.map(bean => `this.${bean.instance} = ${bean.instance};`).join('\n ') %>
[No CFG could be retrieved]
Constructor for the CABAB class.
This is from another commit, and I know we must discuss this, but we can't merge this with this PR anyway
@@ -103,6 +103,7 @@ namespace { //// TEUCHOS_UNIT_TEST_TEMPLATE_1_DECL( CrsMatrix, NodeConversion, N2 ) { +#ifdef TPETRA_ENABLE_DEPRECATED_CODE typedef Tpetra::Map<> Map1; typedef Tpetra::CrsMatrix<>::scalar_type SCALAR; typedef Map1::local_ordinal_type LO;
[No CFG could be retrieved]
region Private functions Deduction of the Hilbert - Holm - Holm - H.
Best practice would be to make the test not run at all, not just disable the contents of the test. This would mean 1. wrapping the entire test declaration in the macro, and 2. protecting instantiation of the test (see bottom of file) with the macro.
@@ -66,8 +66,13 @@ func (cfg *TableManagerConfig) RegisterFlags(f *flag.FlagSet) { f.Int64Var(&cfg.ProvisionedReadThroughput, "dynamodb.periodic-table.read-throughput", 300, "DynamoDB periodic tables read throughput") f.Int64Var(&cfg.InactiveWriteThroughput, "dynamodb.periodic-table.inactive-write-throughput", 1, "DynamoDB periodic tables write throughput for inactive tables.") f.Int64Var(&cfg.InactiveReadThroughput, "dynamodb.periodic-table.inactive-read-throughput", 300, "DynamoDB periodic tables read throughput for inactive tables") + f.Int64Var(&cfg.ChunkTableProvisionedWriteThroughput, "dynamodb.chunk-table.write-throughput", 3000, "DynamoDB chunk tables write throughput") + f.Int64Var(&cfg.ChunkTableProvisionedReadThroughput, "dynamodb.chunk-table.read-throughput", 300, "DynamoDB chunk tables read throughput") + f.Int64Var(&cfg.ChunkTableInactiveWriteThroughput, "dynamodb.chunk-table.inactive-write-throughput", 1, "DynamoDB chunk tables write throughput for inactive tables.") + f.Int64Var(&cfg.ChunkTableInactiveReadThroughput, "dynamodb.chunk-table.inactive-read-throughput", 300, "DynamoDB chunk tables read throughput for inactive tables") cfg.PeriodicTableConfig.RegisterFlags(f) + cfg.PeriodicChunkTableConfig.RegisterFlags(f) } // PeriodicTableConfig for the use of periodic tables (ie, weekly tables). Can
[CreateTable->[CreateTable],DescribeTable->[DescribeTable],loop->[Stop],RegisterFlags->[RegisterFlags],UpdateTable->[UpdateTable]]
RegisterFlags registers flags for the TableManagerConfig.
Shouldn't these flags be on `PeriodicChunkTableConfig`? Better yet, could these be implemented in such a way that we don't have duplication with the periodic table config?
@@ -2731,7 +2731,7 @@ void ask_for_upgrade(const gchar *dbname, const gboolean has_gui) char *label_text = g_markup_printf_escaped(_("the database schema has to be upgraded for\n" "\n" "<span style='italic'>%s</span>\n" - "\n" + "\nthis might take a very long time in case of a large database\n\n" "do you want to proceed or quit now to do a backup\n"), dbname);
[No CFG could be retrieved]
check if a database lock file is available and if so ask user to proceed or upgrade the This function is called from the Dtd to backup the database.
I would remove the "very" as it is a bit frightening to me, saying it could takes hours or days. The upgrade is generally fast or done in "just" several minutes.
@@ -752,7 +752,7 @@ class PickleCoder(_PickleCoderBase): lambda x: dumps(x, protocol), pickle.loads) def as_deterministic_coder(self, step_label, error_message=None): - return DeterministicFastPrimitivesCoder(self, step_label) + return FastPrimitivesCoder(self, requires_deterministic=step_label) def to_type_hint(self): return Any
[ShardedKeyCoder->[as_cloud_object->[as_cloud_object],is_deterministic->[is_deterministic],_create_impl->[get_impl],to_type_hint->[to_type_hint],from_type_hint->[ShardedKeyCoder]],FastPrimitivesCoder->[as_cloud_object->[as_cloud_object],as_deterministic_coder->[DeterministicFastPrimitivesCoder,is_deterministic],is_deterministic->[is_deterministic],_create_impl->[get_impl],__init__->[PickleCoder]],ProtoCoder->[from_type_hint->[ProtoCoder]],_pickle_from_runner_api_parameter->[deserialize_coder],TupleCoder->[as_cloud_object->[as_cloud_object,_get_component_coders,is_kv_coder],as_deterministic_coder->[as_deterministic_coder,is_deterministic,TupleCoder],is_deterministic->[is_deterministic],to_runner_api_parameter->[is_kv_coder,coders],_create_impl->[get_impl],__eq__->[coders],to_type_hint->[to_type_hint],from_runner_api_parameter->[TupleCoder],from_type_hint->[TupleCoder],register_urn],Coder->[key_coder->[is_kv_coder],register_urn->[register],as_cloud_object->[serialize_coder,as_cloud_object,_get_component_coders],as_deterministic_coder->[is_deterministic],estimate_size->[encode],value_coder->[is_kv_coder],to_runner_api_parameter->[serialize_coder],__eq__->[_dict_without_impl],register_structured_urn->[register_urn,_get_component_coders],to_runner_api->[Coder],get_impl->[_create_impl]],WindowedValueCoder->[key_coder->[key_coder],as_cloud_object->[as_cloud_object,_get_component_coders],is_deterministic->[is_deterministic],value_coder->[value_coder],_create_impl->[get_impl],__init__->[PickleCoder,TimestampCoder],is_kv_coder->[is_kv_coder]],AvroGenericCoder->[to_runner_api_parameter->[encode],from_runner_api_parameter->[AvroGenericCoder,decode],register_urn],NullableCoder->[to_type_hint->[to_type_hint],_create_impl->[get_impl],is_deterministic->[is_deterministic]],StrUtf8Coder->[decode->[decode],encode->[encode]],_TimerCoder->[_create_impl->[get_impl],is_deterministic->[is_deterministic]],_PickleCoderBase->[as_cloud_object->[as_cloud_object]],DeterministicFastPrimitivesCoder->[_create_impl->[get_impl]],FastCoder->[decode->[get_impl],estimate_size->[get_impl],encode->[get_impl]],StateBackedIterableCoder->[to_runner_api_parameter->[_get_component_coders],_create_impl->[get_impl],from_runner_api_parameter->[StateBackedIterableCoder],register_urn],IterableCoder->[as_cloud_object->[as_cloud_object],as_deterministic_coder->[IterableCoder,as_deterministic_coder,is_deterministic],is_deterministic->[is_deterministic],_create_impl->[get_impl],__eq__->[value_coder],to_type_hint->[to_type_hint],from_type_hint->[IterableCoder]],ParamWindowedValueCoder->[_create_impl->[get_impl],from_runner_api_parameter->[ParamWindowedValueCoder],is_deterministic->[is_deterministic],register_urn],TupleSequenceCoder->[as_deterministic_coder->[as_deterministic_coder,is_deterministic,TupleSequenceCoder],is_deterministic->[is_deterministic],_create_impl->[get_impl],__eq__->[value_coder],from_type_hint->[TupleSequenceCoder]],MapCoder->[to_type_hint->[to_type_hint],_create_impl->[get_impl]],LengthPrefixCoder->[_create_impl->[get_impl],as_cloud_object->[as_cloud_object],is_deterministic->[is_deterministic],estimate_size->[estimate_size]],register_urn,register_structured_urn]
Returns a deterministic encoder for the sequence.
The name `requires_deterministic` is confusing. I thought it was a boolean and that passing `step_label` was a bug. Would it make more sense to have a separate `step_label` arg? Or maybe rename the single arg to `deterministic_step_label`?
@@ -78,6 +78,10 @@ public class VirtualColumns implements Cacheable Map<String, VirtualColumn> withDotSupport = Maps.newHashMap(); Map<String, VirtualColumn> withoutDotSupport = Maps.newHashMap(); for (VirtualColumn vc : virtualColumns) { + if (vc.getOutputName() == null || vc.getOutputName().isEmpty()) { + throw new IAE("Empty or null virtualColumn name"); + } + if (vc.getOutputName().equals(Column.TIME_COLUMN_NAME)) { throw new IAE("virtualColumn name[%s] not allowed", vc.getOutputName()); }
[VirtualColumns->[makeDimensionSelector->[getVirtualColumn,makeDimensionSelector],makeFloatColumnSelector->[getVirtualColumn,makeFloatColumnSelector],detectCycles->[getVirtualColumn,detectCycles],getColumnCapabilities->[getVirtualColumn],equals->[equals],getVirtualColumn->[splitColumnName],create->[VirtualColumns],hashCode->[hashCode],makeObjectColumnSelector->[getVirtualColumn,makeObjectColumnSelector],makeLongColumnSelector->[getVirtualColumn,makeLongColumnSelector],getColumnCapabilitiesWithFallback->[getColumnCapabilities],isEmpty->[isEmpty],toString->[toString],VirtualColumns]]
Creates a new virtual columns object.
Could use Strings.isNullOrEmpty
@@ -1011,7 +1011,7 @@ def str_to_msg(txt, ignore_fields=''): if key not in ignore_fields.split(','): msg[key] = convert(key, value) msg['episode_done'] = msg.get('episode_done', False) - return msg + return Message(msg) def msg_to_str(msg, ignore_fields=''):
[Opt->[__reduce__->[__getstate__],__deepcopy__->[Opt]],display_messages->[_ellipse,clip_text],TimeLogger->[time->[time],log->[reset,time],__init__->[Timer]],PaddingUtils->[pad_text->[valid]],OffensiveLanguageDetector->[__contains__->[contains_offensive_language],__init__->[_path],str_segment->[logprob->[log],segment->[segment],segment,log],add_words->[add_phrase],contains_offensive_language->[_check_sequence]],str_to_msg->[tolist->[tostr],convert->[tolist,tostr],convert],round_sigfigs->[round_sigfigs],maintain_dialog_history->[parse],Timer->[time->[time]],msg_to_str->[add_field->[filter],add_field],NoLock]
Convert a string to a message dict. Print a string with the content of .
What, does this handle it for FbDialogTeacher or ParlaiDialogTeacher? This is obviously correct, but I don't know who is using this...
@@ -247,5 +247,12 @@ module Idv def user_uuid document_capture_session&.user&.uuid end + + def throttler + @throttler ||= Throttle.for( + user: document_capture_session.user, + throttle_type: :idv_acuant, + ) + end end end
[ApiImageUploadForm->[update_funnel->[liveness_checking_enabled?],validate_pii_from_doc->[submit],track_event->[track_event],update_analytics->[track_event]]]
Returns the UUID of the currently logged in user or nil if no such user exists.
Same question about naming `throttler` vs. `throttle`.
@@ -320,6 +320,13 @@ class Application { trigger_error("Undefined property: " . __CLASS__ . ":\${$name}"); } + /** + * + */ + public function service($name) { + return $this->_services->{$name}; + } + /** * Make the global $CONFIG a reference to this application's config service *
[Application->[install->[run],upgrade->[run],bootCore->[start,loadCore],run->[bootCore]]]
Get a service by name.
I added this just to temporarily bypass the `public_services` property for so I got access to all services.
@@ -506,6 +506,8 @@ public class DeltaSync implements Serializable { return Pair.of(scheduledCompactionInstant, writeStatusRDD); } + + /** * Try to start a new commit. * <p>
[DeltaSync->[getHoodieClientConfig->[getHoodieClientConfig],registerAvroSchemas->[registerAvroSchemas],startCommit->[startCommit],syncMeta->[getSyncClassShortName],syncHive->[syncHive],syncOnce->[refreshTimeline],close->[close]]]
Writes the records to the sink. Schedule compaction if needed.
unnecessary empty lines
@@ -406,7 +406,8 @@ void Client::step(float dtime) // Control local player (0ms) LocalPlayer *player = m_env.getLocalPlayer(); assert(player); - player->applyControl(dtime, &m_env); + player->applyControl(dtime, player->getPlayerControl(), + player->getPlayerSettings(), &m_env); // Step environment m_env.step(dtime);
[No CFG could be retrieved]
This function is called when a client receives a response from the map. on_player_damage_damage is called from the server when a.
why we pass already owned objects to the player class ?
@@ -117,6 +117,7 @@ func (s *balanceRegionScheduler) transferPeer(cluster schedule.Cluster, region * target := cluster.GetStore(newPeer.GetStoreId()) avgScore := cluster.GetStoresAverageScore(core.RegionKind) + log.Debugf("source store is %v, target store is %v", source, target) if !shouldBalance(source, target, avgScore, core.RegionKind, region, opInfluence, cluster.GetTolerantSizeRatio()) { schedulerCounter.WithLabelValues(s.GetName(), "skip").Inc() return nil
[transferPeer->[GetTolerantSizeRatio,GetLocationLabels,GetStoreId,NewReplicaChecker,GetStore,SelectBestPeerToAddReplica,GetStoresAverageScore,GetId,GetName,CreateMovePeerOperator,Inc,WithLabelValues,GetRegionStores,NewDistinctScoreFilter],IsScheduleAllowed->[OperatorCount,GetRegionScheduleLimit],Schedule->[GetPeers,transferPeer,GetStoreId,Put,GetName,IsRegionHot,Inc,WithLabelValues,GetId,GetMaxReplicas],RegisterScheduler,NewStateFilter,NewBalanceSelector,NewSnapshotCountFilter,NewCacheFilter,NewPendingPeerCountFilter,NewStorageThresholdFilter,NewHealthFilter,NewIDTTL]
transferPeer transfers a peer from one cluster to another.
log store id should be enough.
@@ -17,12 +17,13 @@ def create_unique_slug_for_warehouses(apps, schema_editor): first_char = warehouse.name[0].lower() if first_char != previous_char: previous_char = first_char - slug_values = Warehouse.objects.filter( + slug_values = list(Warehouse.objects.filter( slug__istartswith=first_char - ).values_list("slug", flat=True) + ).values_list("slug", flat=True)) slug = generate_unique_slug(warehouse, slug_values) warehouse.slug = slug + warehouse.save(update_fields=["slug"]) slug_values.append(slug)
[generate_unique_slug->[slugify],create_unique_slug_for_warehouses->[append,get_model,filter,generate_unique_slug,name],Migration->[AddField,RunPython,SlugField,AlterField]]
Create a unique slug for Warehouse objects.
what if some warehouse has same first letter name ?? (same for ProductType and collection)
@@ -65,6 +65,7 @@ try { makeBodyVisible(document, waitForExtensions(window)); } catch (e) { makeBodyVisible(document); + throw e; } finally { perf.tick('e_is'); // TODO(erwinm): move invocation of the `flush` method when we have the
[No CFG could be retrieved]
Initialize the tags.
I never investigated this originally, but will `perf.tick` itself throw an error if something went wrong with the above `try` block?
@@ -207,6 +207,15 @@ def test_persyst_annotations(tmpdir): # make sure annotation with a "," character is in there assert 'seizure1,2' in annotations.description + assert 'CLip2' in annotations.description + + # roundtripping annotations should work. + tmp_fpath = tmp_path / 'tmp_file.edf' + with pytest.warns(RuntimeWarning, match='EDF format'): + raw.export(tmp_fpath) + raw_edf = read_raw_edf(tmp_fpath) + for annot in raw.annotations: + assert annot['description'] in raw_edf.annotations.description @requires_testing_data
[test_persyst_lay_load->[all,load_data,read_raw_persyst,len,pick_types,lower],test_persyst_raw->[assert_array_equal,max,get_data,min,squeeze,read_raw_persyst,pick_types],test_persyst_moved_file->[split,write,basename,enumerate,dirname,read_raw_persyst,str,copy,startswith,join,raises,open],test_persyst_errors->[raises,write,basename,enumerate,read_raw_persyst,str,copy,replace,startswith,join,remove,open,warns],test_persyst_standard->[_test_raw_reader],test_persyst_wrong_file->[basename,read_raw_persyst,str,copy,join,raises],test_persyst_dates->[write,basename,enumerate,read_raw_persyst,str,copy,startswith,join,remove,open],test_persyst_annotations->[count_nonzero,basename,read_raw_persyst,str,copy,join],data_path,join]
Test for reading in Persyst annotations. read in the n - th n - th n - th n - th n - th.
remove this. Your assert above is enough. Don't test EDF export features here. Keept this in export module. thx
@@ -634,9 +634,13 @@ class OrFinally(AfterAny): class TriggerContext(object): - def __init__(self, outer, window): + def __init__(self, outer, window, clock): self._outer = outer self._window = window + self._clock = clock + + def get_current_time(self): + return self._clock.time() def set_timer(self, name, time_domain, timestamp): self._outer.set_timer(self._window, name, time_domain, timestamp)
[OrFinally->[to_runner_api->[OrFinally],from_runner_api->[OrFinally,from_runner_api]],AfterEach->[from_runner_api->[AfterEach,from_runner_api],reset->[_sub_context,reset],on_merge->[_sub_context],on_element->[_sub_context],should_fire->[_sub_context],on_fire->[_sub_context],to_runner_api->[AfterEach,to_runner_api],_CombiningValueStateTag],AfterCount->[from_runner_api->[AfterCount],_CombiningValueStateTag],InMemoryUnmergedState->[copy->[InMemoryUnmergedState,copy],get_and_clear_timers->[get_timers]],AfterWatermark->[from_runner_api->[AfterWatermark,from_runner_api],reset->[reset],on_merge->[on_merge,is_late],on_element->[on_element,is_late],should_fire->[should_fire,is_late],on_fire->[on_fire,is_late],to_runner_api->[to_runner_api],_CombiningValueStateTag],_CombiningValueStateTag->[with_prefix->[_CombiningValueStateTag]],_ListStateTag->[with_prefix->[_ListStateTag]],SimpleState->[at->[TriggerContext]],GeneralTriggerDriver->[process_timer->[on_fire,get_state,known_windows,MergeableStateAdapter,get_window,should_fire,at],_output->[clear_state,get_state,add_state],process_elements->[TriggerMergeContext->[merge->[merge,on_merge,at]],TriggerMergeContext,merge,on_fire,clear_state,get_state,on_element,known_windows,MergeableStateAdapter,add_state,should_fire,at],__init__->[_WatermarkHoldStateTag],_ListStateTag,_CombiningValueStateTag],TriggerDriver->[process_entire_key->[process_timer,process_elements]],DefaultTrigger->[from_runner_api->[DefaultTrigger]],MergeableStateAdapter->[clear_timer->[clear_timer],_persist_window_ids->[set_global_state],clear_state->[clear_state],set_timer->[set_timer],get_state->[get_state],__init__->[get_global_state],add_state->[add_state],_ValueStateTag],_ValueStateTag->[with_prefix->[_ValueStateTag]],TriggerContext->[clear_timer->[clear_timer],clear_state->[clear_state],set_timer->[set_timer],get_state->[get_state],add_state->[add_state]],Repeatedly->[from_runner_api->[Repeatedly,from_runner_api],reset->[reset],on_merge->[on_merge],on_element->[on_element],should_fire->[should_fire],on_fire->[reset,on_fire],to_runner_api->[to_runner_api]],_WatermarkHoldStateTag->[with_prefix->[_WatermarkHoldStateTag]],NestedContext->[clear_timer->[clear_timer],clear_state->[clear_state,with_prefix],set_timer->[set_timer],get_state->[get_state,with_prefix],add_state->[with_prefix,add_state]],CombiningTriggerDriver->[process_timer->[process_timer],process_elements->[TriggerMergeContext->[merge->[]],process_elements]],_ParallelTriggerFn->[from_runner_api->[from_runner_api],reset->[reset],on_merge->[on_merge],on_element->[on_element],should_fire->[combine_op,should_fire],on_fire->[combine_op,should_fire,on_fire],to_runner_api->[to_runner_api]],DefaultGlobalBatchTriggerDriver->[process_elements->[TriggerMergeContext->[merge->[]],_UnwindowedValues]]]
Initialize the object.
<!--new_thread; commit:5ddb81e7534625fa7f44e5ab7dbada5eb605bf3b; resolved:0--> Let's not make this `@property` to make it clearer that this is actually retrieving the time on each call.
@@ -51,6 +51,7 @@ KNOWN_PRECISIONS = { } KNOWN_TASK_TYPES = { 'action_recognition', + 'audio_classification', 'classification', 'colorization', 'detection',
[FileSourceHttp->[deserialize->[validate_string]],Model->[deserialize->[DeserializationError,validate_string,deserialization_context,validate_string_enum,deserialize]],TaggedBase->[deserialize->[DeserializationError]],run_in_parallel->[start->[JobWithQueuedOutput,QueuedOutputContext],complete,cancel],PostprocUnpackArchive->[apply->[print_section_heading],deserialize->[validate_string,validate_relative_path]],load_models->[DeserializationError,deserialization_context,deserialize],JobWithQueuedOutput->[complete->[print],cancel->[cancel,interrupt]],deserialization_context->[DeserializationError],validate_nonnegative_int->[DeserializationError],DirectOutputContext->[print->[print]],Reporter->[log_error->[print,printf],print_section_heading->[printf],with_event_context->[Reporter],print->[printf],log_details->[print],log_warning->[print,printf],print_progress->[print],emit_event->[print],end_progress->[print],print_group_heading->[print,printf]],FileSourceGoogleDrive->[deserialize->[validate_string]],ModelFile->[deserialize->[DeserializationError,validate_string,deserialization_context,validate_nonnegative_int,validate_relative_path,deserialize]],validate_string->[DeserializationError],load_models_from_args->[print,load_models_or_die],PostprocRegexReplace->[apply->[print_section_heading],deserialize->[validate_string,validate_nonnegative_int,validate_relative_path]],load_models_or_die->[print,load_models],validate_string_enum->[validate_string,DeserializationError],JobContext->[printf->[print]],validate_relative_path->[validate_string,DeserializationError]]
IMPORTS OF THE MODEL INFORMATION AND THE PROGRAM INFORMATION A class to create a JobContext object that will print a single unique identifier.
Please update `README.txt` accordingly.
@@ -45,7 +45,7 @@ - window.addEventListener('scroll', function () { + function setSidebar () { // if footer is in frame, removed fixed style (otherwise add it, if it doesn't exist) if ((footer.getBoundingClientRect().top - window.innerHeight) <= 0) { if (sidebar.classList.contains('fixed')) {
[No CFG could be retrieved]
fullscreen sidenav expansion onClick - click on a menu item.
I like seeing this in a named function! Makes the intent much clearer
@@ -38,6 +38,8 @@ namespace MonoGame.Framework.Utilities return MonoGamePlatform.PlayStation4; #elif PSVITA return MonoGamePlatform.PSVita; +#elif STADIA + return MonoGamePlatform.Stadia; #endif } }
[PlatformInfo->[iOS,DesktopGL,Android,PlayStation4,OpenGL,NintendoSwitch,WebGL,tvOS,DirectX,XboxOne,Windows,WindowsUniversal,PSVita]]
Graphics backend that provides the base system for the ninteon system.
@Jjagg @harry-cpp - May be good to clean up these #ifs and replace it with partial class stuff?
@@ -21,9 +21,11 @@ class DependencyIsStageFileError(DvcException): class BaseDependency: IS_DEPENDENCY = True - DoesNotExistError = DependencyDoesNotExistError - IsNotFileOrDirError = DependencyIsNotFileOrDirError - IsStageFileError = DependencyIsStageFileError + DoesNotExistError = DependencyDoesNotExistError # type: Type[DvcException] + IsNotFileOrDirError = ( + DependencyIsNotFileOrDirError + ) # type: Type[DvcException] + IsStageFileError = DependencyIsStageFileError # type: Type[DvcException] def update(self, rev=None): pass
[DependencyIsNotFileOrDirError->[__init__->[super]],DependencyIsStageFileError->[__init__->[super]],DependencyDoesNotExistError->[__init__->[super]]]
Update the current node with the new node id.
Found a way to override subtypes. :)
@@ -0,0 +1,9 @@ +import java.io.IOException; + +public class PlayerLoadException extends Exception { + + public PlayerLoadException(IOException io) { + super(io); + } + +}
[No CFG could be retrieved]
No Summary Found.
Change the tab for spaces here.
@@ -241,7 +241,10 @@ func (g *generator) genNode(w io.Writer, n hcl2.Node) { func (g *generator) genResource(w io.Writer, r *hcl2.Resource) { resName := r.Name() - _, mod, typ, _ := r.DecomposeToken() + pkg, mod, typ, _ := r.DecomposeToken() + if mod == "" || strings.HasPrefix(mod, "/") || strings.HasPrefix(mod, "index/") { + mod = pkg + } // Add conversions to input properties for _, input := range r.Inputs {
[collectScopeRoots->[VisitExpressions,Add,Assert],genTemps->[genTempsMultiReturn],collectImports->[VisitExpressions,Range,genFunctionPackages,NewStringSet,AsString,Sprintf,Errorf,Assert,Packages,Add,DecomposeToken,SyntaxNode],genResource->[Title,lowerExpression,genTemps,Has,Fgenf,Sprintf,Contains,Fgen,Name,Type,ResolveOutputs,String,Traverse,HasPrefix,DecomposeToken],useLookupInvokeForm->[has,Split,DecomposeToken],genTempsMultiReturn->[Fgenf,Failf,Sprintf,Contains,ToUpper,argumentTypeName,Type,Split],genNode->[genLocalVariable,genResource,genOutputAssignment],genPostamble->[Fprint,Fprintf,genHelpers],genOutputAssignment->[lowerExpression,genTemps,Fgenf,Name,Type],genPreamble->[Fprint,Fprintf,collectImports,SortedValues],genLocalVariable->[lowerExpression,genTemps,Has,Fgenf,Name,Type],genHelpers->[generateHelperMethod],collectScopeRoots,Linearize,NewStringSet,ImportLanguages,genNode,genPostamble,Source,String,Errorf,genPreamble,Bytes,Packages,NewFormatter]
genResource generates a single resource in the writer. generate the resource instantiation if the options. Range is not nil.
Do the latter two cases actually occur? I thought we normalized all that to `""`.
@@ -534,6 +534,12 @@ class Runner { $assoc_args['orderby'] = 'post__in'; $assoc_args['field'] = 'url'; break; + case 'term': + $term_ids = array_slice( $args, 3 ); + $args = array( 'term', 'list', $args[2] ); + $assoc_args['include'] = implode( ',', $term_ids ); + $assoc_args['field'] = 'url'; + break; case 'site': $site_ids = array_slice( $args, 2 ); $args = array( 'site', 'list' );
[Runner->[init_config->[get_project_config_path,get_global_config_path],_run_command->[run_command],run_command->[find_command_to_run],load_wordpress->[check_wp_version,get_wp_config_code],start->[wp_exists,init_config,_run_command,do_early_invoke,find_command_to_run,cmd_starts_with,get_packages_dir_path,run_alias_group,init_colorization,init_logger,set_alias,get_wp_config_code,check_root,run_ssh_command,check_wp_version,find_wp_root],init_logger->[in_color],check_wp_version->[wp_exists]]]
Back - compatibility for the command line options. CLI command line interface This function is used to convert the post type and site to an associative array.
Do we need to make sure the returned terms are ordered by the order they were originally supplied?
@@ -119,6 +119,9 @@ public class HiveMetaStoreUtils { propValue + " is not a valid Hive table/partition property"); table.getParameters().put(tokens.get(0), tokens.get(1)); } + if (table.getParameters().containsKey(RUNTIME_PROPS)) { + table.getParameters().remove(RUNTIME_PROPS); + } } table.setPartitionKeys(getFieldSchemas(hiveTable.getPartitionKeys())); table.setSd(getStorageDescriptor(hiveTable));
[HiveMetaStoreUtils->[getSerDeInfo->[getParameters],getStorageDescriptor->[getParameters],getFieldSchemas->[getColumns,getFieldSchemas]]]
This method returns a Table object based on the given Hive table.
Instead of removing it here, can you figure out why the parameter is being added in the first place and remove it there?
@@ -456,6 +456,7 @@ class RestAPI: # pragma: no unittest self.sent_success_payment_schema = EventPaymentSentSuccessSchema() self.received_success_payment_schema = EventPaymentReceivedSuccessSchema() self.failed_payment_schema = EventPaymentSentFailedSchema() + self.notification_schema = NotificationSchema() @property def rpc_client(self) -> JSONRPCClient:
[RestAPI->[send_udc_transaction->[_plan_withdraw_from_udc,_deposit_to_udc,_withdraw_from_udc],patch_channel->[_close,_withdraw,get_channel,_deposit,_set_channel_reveal_timeout],get_partners_by_token->[get_channel_list],_close->[_updated_channel_state],mint_token_for->[mint_token_for],get_raiden_events_payment_history_with_timestamps->[get_raiden_events_payment_history_with_timestamps],get_pending_transfers->[get_pending_transfers],_withdraw->[_updated_channel_state],_deposit->[_updated_channel_state],_set_channel_reveal_timeout->[_updated_channel_state],get_tokens_list->[get_tokens_list],get_channel_list->[get_channel_list]],APIServer->[stop->[stop],__init__->[restapi_setup_type_converters,restapi_setup_urls]]]
Initialize the object.
Not related to your PR, but I wonder why all the schemas get instantiated here. Doing it as needed would avoid all these attributes and should not bring any noticeable performance problems.
@@ -109,7 +109,7 @@ public class ParametersAction implements RunAction2, Iterable<ParameterValue>, Q * @param additionalSafeParameters additional safe parameters * @since 1.651.2, 2.3 */ - public ParametersAction(List<ParameterValue> parameters, Collection<String> additionalSafeParameters) { + public ParametersAction(@CheckForNull List<ParameterValue> parameters, Collection<String> additionalSafeParameters) { this(parameters); if (additionalSafeParameters != null) { safeParameters.addAll(additionalSafeParameters);
[ParametersAction->[merge->[createUpdated,ParametersAction],createUpdated->[ParametersAction],createVariableResolver->[createVariableResolver],getAssignedLabel->[getParameters,getAssignedLabel],iterator->[iterator]]]
Construct a new action with additional safe parameters. A utility method to perform variable substitution on a given text.
Should be `@Nonnull` too.
@@ -62,7 +62,7 @@ and it takes care of all the other things for you`, }, cli.StringFlag{ Name: "pid, P", - Value: "custom/run/app.pid", + Value: "/var/run/gitea.pid", Usage: "Custom pid file path", }, },
[Handle,FileMode,Dir,GitHookService,Gziper,ServeData,Close,Mailer,RepoRef,SetURLPrefix,MultipartForm,Static,Redirect,IsSet,GetBranchCommit,Info,Error,CanEnableEditor,Post,New,NotFound,Captchaer,Route,ListenUnix,Locale,Logger,I18n,CommitsCount,Any,Join,Toggle,RequireRepoAdmin,ServeFileContent,Sessioner,RequireRepoWriter,Head,Recovery,Remove,RegisterRoutes,Get,RepoAssignment,Serve,GetAttachmentByUUID,InitMailRender,Csrfer,Renderer,Use,Cacher,GlobalInit,IsErrAttachmentNotExist,LocalPath,Contexter,OrgAssignment,Sprintf,Toolboxer,Chmod,String,Fatal,Combo,Open,Replace,Group,Params]
Commands for the Gitea web server. newMacaron returns a new instance of VerChecker.
Why changing the current default?
@@ -87,5 +87,15 @@ class TestLRNOp(OpTest): self.check_grad(['X'], 'Out', max_relative_error=0.01) +class TestLRNMKLDNNOp(TestLRNOp): + def get_attrs(self): + attrs = TestLRNOp.get_attrs(self) + attrs['use_mkldnn'] = True + return attrs + + def test_check_output(self): + self.check_output(atol=0.002) + + if __name__ == "__main__": unittest.main()
[TestLRNOp->[setUp->[get_input,get_attrs,get_out]]]
check_grad - main function.
There are two algorithms ACROSS_CHANNELS and WITHIN_CHANNEL, but TestLRNMKLDNNOp here only test the default one.
@@ -246,8 +246,9 @@ class ArticlesController < ApplicationController # handle series/collections if params["article"]["series"].present? - params["article"]["collection_id"] = Collection.find_series(params["article"]["series"], @user)&.id - elsif params["article"]["series"] == "" + collection = Collection.find_series(params["article"]["series"], @user) + params["article"]["collection_id"] = collection.id + elsif params["article"]["series"] == "" # reset collection? params["article"]["collection_id"] = nil end
[ArticlesController->[preview->[new],new->[new],update->[update]]]
This method is used to provide the necessary params for the missing_nail_key action.
So, the collection is the same thing as the series? :thinking:
@@ -106,6 +106,13 @@ describe("resource", function() { R.get({a: 'foo', b: 'bar'}); }); + it('should support por unescaped url', function() { + var R = $resource('http://localhost:8080/Path/:a'); + + $httpBackend.expect('GET', 'http://localhost:8080/Path/foo').respond(); + R.get({a: 'foo'}); + }); + it('should correctly encode url params', function() { var R = $resource('/Path/:a');
[No CFG could be retrieved]
This function checks that the url template is correctly encoded and correctly encodes the url params. should not encode params in url.
typo : "por" -> "an"
@@ -59,13 +59,14 @@ class User_Command extends \WP_CLI\CommandWithDBObject { /** * Delete one or more users. * - * @synopsis <id>... [--reassign=<id>] + * @synopsis <user>... [--reassign=<id>] */ public function delete( $args, $assoc_args ) { $assoc_args = wp_parse_args( $assoc_args, array( 'reassign' => null ) ); + $args[0] = self::get_user_from_first_arg( $args[0] )->ID; parent::delete( $args, $assoc_args ); }
[User_Command->[add_role->[add_role],set_role->[set_role],remove_role->[remove_role]]]
Delete a user.
That won't cut it. `<user>...` in the synopsis means that the command should support a variable number of arguments.
@@ -189,7 +189,8 @@ class CsNetfilters(object): def add_chain(self, rule): """ Add the given chain if it is not already present """ if not self.has_chain(rule.get_table(), rule.get_chain()): - CsHelper.execute("iptables -t %s -N %s" % (rule.get_table(), rule.get_chain())) + if rule.get_chain() != "": + CsHelper.execute("iptables -t %s -N %s" % (rule.get_table(), rule.get_chain())) self.chain.add(rule.get_table(), rule.get_chain()) def del_standard(self):
[CsNetfilter->[__eq__->[get_chain,get_rule,get_table]],CsNetfilters->[compare->[add,get_count,has_rule,add_rule,get_unseen],add_chain->[has_chain,add],get_all_rules->[get_count,last,add_rule,add],has_table->[get],__init__->[CsChain,CsTable],has_chain->[has_chain],has_rule->[get_count,get]]]
Add a rule to the chain if it is not already present.
so when rule.get_chain() != self.chain && rule.get_chain() != "", we still add it to self. chain?
@@ -31,5 +31,5 @@ func NewSecretStoreAll(mctx MetaContext) SecretStoreAll { // Use system keychain but fall back to file store if not available. return SecretStoreFallbackBehaviorOnError } - return NewSecretStoreUpgradeable(ssecretservice, sfile, shouldUpgradeOpportunistically, shouldStoreInFallback) + return NewSecretStoreUpgradeable(ssecretservice, sfile, "system keyring", "file-based secret store (see https://keybase.io/docs/crypto/local-key-security)", shouldUpgradeOpportunistically, shouldStoreInFallback) }
[ForceSecretStoreFile,GetForceLinuxKeyring,G,GetDataDir]
This function is called when a secret service is not available.
do we need the keybase.io url here? does user see this somewhere?
@@ -76,7 +76,8 @@ public Task Handle(MyReplyToOriginator message, IMessageHandlerContext context) { - TestContext.Intent = (MessageIntentEnum) Enum.Parse(typeof(MessageIntentEnum), context.MessageHeaders[Headers.MessageIntent]); + TestContext.Intent = (MessageIntentEnum)Enum.Parse(typeof(MessageIntentEnum), context.MessageHeaders[Headers.MessageIntent]); + TestContext.ReceivedCorrelationId = context.MessageHeaders[Headers.CorrelationId]; TestContext.Done = true; return Task.FromResult(0); }
[When_using_ReplyToOriginator->[Task->[Reply,Intent,AreEqual,Run],Endpoint->[RequestingSaga->[Task->[SendLocal,MarkAsComplete,ReplyToOriginator,SomeCorrelationId,CorrIdForResponse],ConfigureHowToFindSaga->[CorrIdForResponse,ToSaga]],MyReplyToOriginatorHandler->[Task->[FromResult,Intent,Parse,MessageHeaders,Done,MessageIntent]],config]]]
Handle the MyReplyToOriginator message.
we can remove the Done condition and check whether `ReceivedCorrelationId` has a value
@@ -293,7 +293,7 @@ namespace panzer { Thyra::LinearOpTester<double> tester; tester.show_all_tests(true); - tester.set_all_error_tol(1e-16); + tester.set_all_error_tol(std::numeric_limits<double>::epsilon()); tester.num_random_vectors(20); Teuchos::FancyOStream fout(Teuchos::rcpFromRef(std::cout));
[buildAssemblyPieces->[buildAssemblyPieces,testInitialzation]]
This test tests the missing values of a missing variable. This method creates the necessary components for the model. NOT HROW - This function is called from the main method of the RCP runtime. Initialize the global evaluation data for the given object.
Is that going to be enough? That does not allow for any accumulated roundoff. Could we make this `2*eps` or even `10*eps`? It is hard for a defect to hide in the difference between `eps` and `10*eps`.
@@ -401,7 +401,7 @@ public final class CentralAuthenticationServiceImpl implements CentralAuthentica final ServiceTicket serviceTicket = this.serviceTicketRegistry.getTicket(serviceTicketId, ServiceTicket.class); if (serviceTicket == null) { - logger.info("ServiceTicket [{}] does not exist.", serviceTicketId); + logger.info("Service ticket [{}] does not exist.", serviceTicketId); throw new InvalidTicketException(serviceTicketId); }
[CentralAuthenticationServiceImpl->[grantServiceTicket->[grantServiceTicket]]]
Validate a service ticket. Get the attributes of the principal.
changes like this are unrelated to this code review, no?
@@ -47,8 +47,15 @@ public class DatasetCleanerTask extends BaseAbstractTask { DatasetCleaner datasetCleaner = new DatasetCleaner(FileSystem.get(new Configuration()), this.taskContext.getTaskState().getProperties()); datasetCleaner.clean(); + this.workingState = WorkUnitState.WorkingState.SUCCESSFUL; } catch (IOException e) { + this.workingState = WorkUnitState.WorkingState.FAILED; throw new RuntimeException(e); } } + + @Override + public void commit() { + log.info("task {} commits with state {}", this.taskContext.getTaskState().getTaskId(), this.workingState); + } }
[DatasetCleanerTask->[run->[getProperties,clean,RuntimeException,Configuration,get,DatasetCleaner]]]
This method is invoked when the task is run.
Just curious, if this task runs in mapper or driver? I think most gobblin retention job runs on driver, if that's the case, what's the taskID here?
@@ -737,6 +737,15 @@ class _list_attr_splat(list): def __setattr__(self, attr, value): for x in self: setattr(x, attr, value) + def __getattribute__(self, attr): + if len(self) == 0: + raise AttributeError("Trying to access %r attribute on an empty 'splattable' list" % attr) + if len(self) == 1: + return getattr(self[0], attr) + try: + return _list_attr_splat([getattr(x, attr) for x in self]) + except: + raise AttributeError("Trying to access %r attribute on a 'splattable' list, but list items have no %r attribute" % (attr, attr)) def __dir__(self): if len(set(type(x) for x in self)) == 1:
[Plot->[xgrid->[_grid],column->[column],ygrid->[_grid],xaxis->[_axis],yaxis->[_axis],row->[row]]]
Set attribute of all objects in the sequence.
`except Exception` is the preferred form. Otherwise system exceptions are going to be caught here as well.
@@ -506,11 +506,15 @@ func (i *Ingester) flushUser(userID string, immediate bool) { } func (i *Ingester) flushSeries(u *userState, fp model.Fingerprint, series *memorySeries, immediate bool) { - // Enqueue this series flushing if the oldest chunk is older than the threshold u.fpLocker.Lock(fp) - firstTime := series.head().FirstTime() + if len(series.chunkDescs) <= 0 { + u.fpLocker.Unlock(fp) + return + } + + firstTime := series.chunkDescs[0].FirstTime() flush := immediate || model.Now().Sub(firstTime) > i.cfg.MaxChunkAge u.fpLocker.Unlock(fp)
[Ready->[Ready],UserStats->[getStateFor],append->[getStateFor],Collect->[Collect],Describe->[Describe],Query->[getStateFor],LabelValuesForLabelName->[getStateFor]]
flushSeries queues a series flush if the oldest chunk is older than the threshold or if the.
It sucks that there now have to be two distinct calls to `Unlock`, but I can't think of a better way right now. (Well, you could define a method on `memorySeries` that returns `(firstTime, ok)`, then define `flush` as `ok && immediate || model.Now().Sub(firstTime) > i.cfg.MaxChunkAge`, but maybe a bit much.)
@@ -130,6 +130,7 @@ add_action( 'wp_ajax_wpseo_replace_vars', 'wpseo_ajax_replace_vars' ); */ function wpseo_save_title() { check_ajax_referer( 'wpseo-bulk-editor' ); + wp_verify_nonce( WPSEO_Utils::filter_input( INPUT_POST, '_ajax_nonce' ) ); $new_title = $_POST['new_value']; $id = intval( $_POST['wpseo_post_id'] );
[No CFG could be retrieved]
Save the new page title.
This is double, check_ajax_referrer already does this.
@@ -199,7 +199,7 @@ CONF_SCHEMA = { 'ccxt_config': {'type': 'object'}, 'ccxt_async_config': {'type': 'object'} }, - 'required': ['name', 'key', 'secret', 'pair_whitelist'] + 'required': ['pair_whitelist'] }, 'edge': { 'type': 'object',
[No CFG could be retrieved]
Magic number of unique items Enumerates all the expectation properties of a transaction.
i think "name" should be required and not have a default - a user should make a concious decision on what exchange he wan'ts to use. I agree on the rest though.
@@ -497,9 +497,9 @@ public class FileTypeRegistry else { globalDisplay_.showErrorMessage( - "File Download Error", - "Unable to show file because file downloads are " + - "restricted on this server.\n"); + constants_.fileDownloadErrorCaption(), + constants_.fileDownloadErrorMessage() + + constants_.restrictedOnServerMessage()); } } }
[FileTypeRegistry->[getTypeForFile->[getTypeForFile],openFile->[execute->[openFile],onError->[execute],onResponseReceived->[openFile,execute],openFile],editFile->[editFile,openFile],getTextTypeForFile->[getTypeForFile],satelliteEditFile->[editFile]]]
Opens a file in the browser or the file type.
Use a single string, don't concat two strings together.
@@ -625,9 +625,8 @@ class QuantizationFreezePass(object): self._activation_bits = activation_bits self._weight_quantize_type = weight_quantize_type self._quantizable_ops = quantizable_op_type - supported_quantizable_ops = ['conv2d', 'depthwise_conv2d', 'mul'] for op in self._quantizable_ops: - assert op in supported_quantizable_ops, \ + assert op in QuantizationTransformPass._supported_quantizable_op_type, \ op + " is not supported for quantization." self._conv_ops = ['conv2d', 'depthwise_conv2d'] self._fake_quant_op_names = _fake_quant_op_list
[ScaleForTrainingPass->[apply->[_init_var_node]],AddQuantDequantPass->[_inser_quant_dequant_moving_average_abs_max_op->[_init_var_node]],ConvertToInt8Pass->[apply->[_remove_unused_var_nodes],_convert_to_int8->[_load_var]],QuantizationTransformPass->[_insert_quant_range_abs_max_op->[_init_var_node],apply->[_transform_backward,_quant_preprocess,_transform_forward],_create_global_step->[_init_var_node],_insert_quant_moving_average_abs_max_op->[_init_var_node]],ScaleForInferencePass->[apply->[_scale_name]],QuantizationFreezePass->[_insert_post_channel_dequant_op->[_dequantized_var_name,_init_var_node],_insert_post_dequant_op->[_dequantized_var_name]]]
Initializes the object properties from a given base - node.
`assert op in QuantizationFreezePass._supported_quantizable_op_type, \`
@@ -113,6 +113,18 @@ class AuthHandler(object): cont_resp = self.cont_auth.perform(self.cont_c) if self.dv_c: dv_resp = self.dv_auth.perform(self.dv_c) + for achall, response in zip(self.dv_c, dv_resp): + if isinstance(response, challenges.HTTP01Response): + if not response.simple_verify( + achall.chall, achall.domain, + achall.account_key.public_key(), response.PORT): + logger.warning("Self-verify of challenge failed.") + elif isinstance(response, challenges.TLSSNI01Response): + if not response.simple_verify( + achall.chall, achall.domain, + achall.account_key.public_key()): + logger.warning("Self-verify of challenge failed.") + except errors.AuthorizationError: logger.critical("Failure in setting up challenges.") logger.info("Attempting to clean up outstanding challenges...")
[_report_failed_challs->[dict,getUtility,add_message,setdefault,_generate_failed_chall_msg,itervalues],_find_smart_path->[AuthorizationError,enumerate,fatal,get],_find_dumb_path->[append,len,set,isinstance,add,enumerate,is_preferred],_generate_failed_chall_msg->[dict,append,sorted,setdefault,format,join],gen_challenge_path->[_find_smart_path,_find_dumb_path],mutually_exclusive->[isinstance],challb_to_achall->[KeyAuthorizationAnnotatedChallenge,Error,ProofOfPossession,DNS,isinstance,info,RecoveryContact],AuthHandler->[_find_updated_challb->[AuthorizationError,type],_solve_challenges->[perform,ErrorHandler,critical,len,info],get_authorizations->[_solve_challenges,request_domain_challenges,_choose_challenges,info,verify_authzr_complete,_respond,values],_choose_challenges->[gen_challenge_path,extend,_challenge_factory,info,_get_chall_pref],_send_responses->[append,chall_update,answer_challenge,izip],_handle_check->[poll,_find_updated_challb,update,append],_challenge_factory->[append,isinstance,challb_to_achall],_poll_challenges->[_report_failed_challs,keys,sleep,_handle_check,len,update,set,FailedChallenges,chall_update,add,clear],verify_authzr_complete->[AuthorizationError,values],_respond->[dict,_send_responses,extend,_poll_challenges,_cleanup_challenges],_cleanup_challenges->[info,remove,cleanup,isinstance],__init__->[dict],_get_chall_pref->[get_chall_pref,extend]],is_preferred->[mutually_exclusive],getLogger]
Get Responses for challenges from authenticators.
`self.config.http01_port` should be used, as it was previously
@@ -138,6 +138,6 @@ def test_ch_loc(): # test when more than one marker file provided mrks = [mrk_path, mrk2_path, mrk3_path] read_raw_kit(sqd_path, mrks, elp_path, hsp_path, preload=False) - + assert_dig_allclose(raw_py.info, raw_bin.info) run_tests_if_main()
[test_ch_loc->[assert_array_almost_equal,Raw,read_raw_kit,read_sns,bin_ch,join,zip],test_raw_events->[evts->[append],read_raw_kit,assert_array_equal,find_events,evts],test_data->[assert_array_almost_equal,list,assert_array_equal,Raw,assert_true,assert_raises,read_raw_kit,repr,join,loadmat,_test_raw_reader,range,pick_types,read_stim_ch,array],test_epochs->[read_events,assert_array_equal,get_data,read_raw_kit,read_epochs_kit,Epochs],abspath,currentframe,run_tests_if_main,dirname,join,getfile]
Test if the raw kit loc is not a duplicate of the ch_loc.
@teonlamont I was getting some nasty head shape sphere fits with this KIT data, so I wrote this function. This test fails...
@@ -135,7 +135,6 @@ describe('GoogleAdLifecycleReporter', () => { urlBuilt: '1', adRequestStart: '2', adRequestEnd: '3', - extractCreativeAndSignature: '4', adResponseValidateStart: '5', renderFriendlyStart: '6', renderCrossDomainStart: '7',
[No CFG could be retrieved]
Describe the expectation of a single ping. Tests that the server responds to the .
Should 5...9 be renumbered as 4...8?
@@ -328,7 +328,7 @@ public class Fork implements Closeable, Runnable, FinalState { .writeTo(Destination.of(this.taskContext.getDestinationType(this.branches, this.index), this.taskState)) .writeInFormat(this.taskContext.getWriterOutputFormat(this.branches, this.index)) .withWriterId(this.taskId) - .withSchema(this.convertedSchema) + .withSchema(this.convertedSchema.get()) .withBranches(this.branches) .forBranch(this.index) .build();
[Fork->[processRecords->[buildWriter],commitData->[commit,updateByteMetrics],updateRecordMetrics->[updateRecordMetrics],close->[close],getFinalState->[getFinalState],updateByteMetrics->[updateByteMetrics]]]
Build a writer for a missing key.
This will throw an NPE if `convertedSchema` is absent.
@@ -134,6 +134,9 @@ class MediaManager implements MediaManagerInterface */ private $downloadPath; + /** @var FFProbe */ + private $ffprobe; + /** * @var int */
[MediaManager->[modifyMedia->[save,getEntityById],buildData->[save],setDataToMedia->[get],getCurrentUser->[getUser],getFormatUrls->[getByIds],delete->[getEntityById]]]
Creates an instance of the class. Constructor for the missing - node - manager class.
Split over three lines
@@ -18,7 +18,7 @@ def _dummy(*args,**kw): return None def nice_join(seq, sep=", "): - seq = map(str, seq) + seq = list(map(str, seq)) if len(seq) <= 1: return sep.join(seq)
[Tuple->[__init__->[__init__]],Property->[__set__->[validate,__get__,matches],is_valid->[validate]],List->[validate->[is_valid]],MetaHasProps->[__new__->[__new__,autocreate]],PrimitiveProperty->[validate->[nice_join]],Enum->[validate->[nice_join]],HasProps->[properties_containers->[accumulate_from_subclasses],properties_with_refs->[accumulate_from_subclasses],properties_with_values->[properties],changed_vars->[properties_containers,properties_with_refs],pprint_props->[properties_with_values],__init__->[nice_join],class_properties->[accumulate_from_subclasses],changed_properties_with_values->[changed_vars],clone->[to_dict]],DashPattern->[__init__->[__init__]],ColorSpec->[__get__->[isconst],to_dict->[_formattuple,isconst]],Dict->[__init__->[__init__]]]
Join a sequence of strings with a nice join.
As long as this line is changing, how about `[str(x) for x in seq]` instead?
@@ -1239,7 +1239,7 @@ public class DependencyManager implements InstallShinyEvent.Handler, if (dep == null) { Debug.logWarning("No dependency record found for package '" + - packages.get(i) + "' (required by feature '" + + packages.get(i) + constants_.requiredByFeatureLog() + feature + "')"); continue; }
[DependencyManager->[getFeatureDescription->[getFeatureDescription],withRSConnect->[withDependencies],withDataImportJDBC->[execute->[execute],withDependencies],withTestPackage->[execute->[execute],withDependencies],withTutorialDependencies->[execute,withDependencies],withRagg->[withDependencies],withDataImportJSON->[execute->[execute],withDependencies],withRenv->[withDependencies],onInstallShiny->[withShiny],withRSQLite->[execute->[execute],withDependencies],withShiny->[execute->[execute],withDependencies],getFeatureDependencies->[getFeatureDependencies],withOdbc->[execute->[execute],withDependencies],withDataImportMongo->[execute->[execute],withDependencies],withDBI->[execute->[execute],withDependencies],withRoxygen->[withDependencies],withDataImportODBC->[execute->[execute],withDependencies],withShinyAddins->[execute->[execute],withDependencies],withRMarkdown->[execute,withRMarkdown,withDependencies],withKeyring->[execute->[execute],withDependencies],processDependencyRequest->[onError->[execute,onError],onResponseReceived->[execute->[execute],execute],execute],withUnsatisfiedDependencies->[onError->[onError],onResponseReceived->[execute->[],onResponseReceived],withUnsatisfiedDependencies,onResponseReceived],withRPlumber->[execute->[execute],withDependencies],withDataImportCSV->[execute->[execute],withDependencies],withThemes->[withDependencies],withDependencies->[execute->[execute],DependencyRequest,withDependencies],withStan->[execute,withDependencies],ifDependenciesSatisifed->[onError->[execute],onResponseReceived->[execute->[],execute]],withDataImportSAV->[execute->[execute],withDependencies],withDataImportXLS->[execute->[execute],withDependencies],installDependencies->[onError->[execute],onResponseReceived->[execute->[],execute],execute,installDependencies],withTinyTeX->[execute,withDependencies],withConnectionPackage->[execute->[execute],withDependencies],withReticulate->[execute->[execute],withDependencies],withDataImportXML->[execute->[execute],withDependencies],confirmPackageInstallation->[execute->[execute],execute],withProfvis->[execute->[execute],withDependencies],withR2D3->[withDependencies],withPackrat->[execute->[execute],withDependencies]]]
Get the list of R package dependencies for a feature.
Don't localize Debug logging calls.
@@ -469,3 +469,9 @@ ds_mgmt_dev_set_faulty(uuid_t uuid, Mgmt__DevStateResp *resp) { return 0; } + +int +ds_mgmt_dev_replace(uuid_t old_uuid, uuid_t new_uuid, Mgmt__DevReplaceResp *resp) +{ + return 0; +}
[mock_ds_mgmt_tgt_update_setup->[uuid_clear],mock_ds_mgmt_pool_overwrite_acl_teardown->[daos_acl_free,daos_prop_free],ds_mgmt_pool_delete_acl->[uuid_copy,daos_prop_dup],ds_mgmt_pool_update_acl->[uuid_copy,daos_acl_dup,daos_prop_dup],ds_mgmt_pool_list_cont->[D_ALLOC_ARRAY,memcpy],mock_ds_mgmt_pool_overwrite_acl_setup->[uuid_clear],mock_ds_mgmt_pool_get_acl_teardown->[daos_prop_free],mock_ds_mgmt_pool_update_acl_teardown->[daos_acl_free,daos_prop_free],ds_mgmt_pool_query->[uuid_copy],mock_ds_mgmt_pool_query_setup->[memset,uuid_clear],mock_ds_mgmt_pool_set_prop_teardown->[daos_prop_free],mock_ds_mgmt_pool_evict_setup->[uuid_clear],mock_ds_mgmt_cont_set_owner_teardown->[D_FREE],ds_mgmt_pool_overwrite_acl->[uuid_copy,daos_acl_dup,daos_prop_dup],ds_mgmt_pool_extend->[uuid_copy],ds_mgmt_cont_set_owner->[uuid_copy,D_STRNDUP],mock_ds_mgmt_pool_list_cont_teardown->[D_FREE],ds_mgmt_pool_get_acl->[uuid_copy,daos_prop_copy,daos_prop_alloc],mock_ds_mgmt_pool_delete_acl_setup->[uuid_clear],mock_ds_mgmt_pool_delete_acl_teardown->[daos_prop_free],mock_ds_mgmt_pool_get_acl_setup->[uuid_clear],ds_mgmt_pool_target_update_state->[uuid_copy],mock_ds_mgmt_pool_extend_setup->[uuid_clear],mock_ds_mgmt_list_cont_gen_cont->[D_ALLOC_ARRAY,uuid_generate],mock_ds_mgmt_pool_update_acl_setup->[uuid_clear],ds_mgmt_pool_set_prop->[daos_prop_alloc,daos_prop_copy,daos_prop_dup],ds_mgmt_evict_pool->[uuid_copy],mock_ds_mgmt_cont_set_owner_setup->[uuid_clear]]
int ds_mgmt_dev_set_faulty - check if a node has.
(style) line over 80 characters
@@ -600,7 +600,7 @@ class SignupView(BaseSignupView): # We should only see GitHub/Google users. assert self.sociallogin.account.provider in ("github", "google") - initial_username = form.initial.get("username", "") + initial_username = form.initial.get("username") or "" # When no username is provided, try to derive one from the email address. if not initial_username:
[user_delete->[scrub_user,donate_attributions,force_logout,delete_user],SignupView->[get->[get]],create_stripe_subscription->[get],recover->[get]]
Returns a form object that can be used to fill in the form widget. build the choice list with the given email address if there is no main email offer that is.
hehe. a sneak-in aftereffect :)
@@ -53,12 +53,13 @@ public final class GroupingOperationRewriter else { checkState(groupIdSymbol.isPresent(), "groupId symbol is missing"); - RelationId relationId = columnReferenceFields.get(NodeRef.of(expression.getGroupingColumns().get(0))).getRelationId(); + RelationId relationId = columnReferenceFields.get(NodeRef.of(expression.getGroupingColumns().get(0))).getFieldId().getRelationId(); List<Integer> columns = expression.getGroupingColumns().stream() .map(NodeRef::of) .peek(groupingColumn -> checkState(columnReferenceFields.containsKey(groupingColumn), "the grouping column is not in the columnReferencesField map")) .map(columnReferenceFields::get) + .map(ResolvedField::getFieldId) .map(fieldId -> translateFieldToInteger(fieldId, relationId)) .collect(toImmutableList());
[GroupingOperationRewriter->[calculateGrouping->[contains,get,size],rewriteGroupingOperation->[checkState,ArithmeticBinaryExpression,SubscriptExpression,size,GenericLiteral,isPresent,toSymbolReference,requireNonNull,getRelationId,LongLiteral,collect,ArrayConstructor,toImmutableList],translateFieldToInteger->[checkState,getFieldIndex,equals]]]
Rewrite grouping operation to a GroupingOperation with a group id and a grouping set.
If we want to ensure that all arguments to `grouping()` match the grouping columns, we should compare their `relationId` with the expected `relationId`. Currently, it is only checked that they all come from the same relation. Instead of comparing to the first argument's `relationId`, we could have the "expectedRelationId" passed by the caller.
@@ -210,7 +210,16 @@ def declarative(function=None, input_spec=None): # for usage: `declarative(foo, ...)` if function is not None: - return decorated(function) + if isinstance(function, Layer): + if isinstance(function.forward, StaticLayer): + class_name = function.__class__.__name__ + warnings.warn( + "`{}.forward` has already been decorated somewhere. It will be redecorated to replace previous one.". + format(class_name)) + function.forward = decorated(function.forward) + return function + else: + return decorated(function) # for usage: `@declarative` return decorated
[_trace->[create_program_from_desc,extract_vars],_extract_vars->[_extract_vars],extract_vars->[_extract_vars],declarative->[decorated->[copy_decorator_attrs],decorated],TracedLayer->[trace->[_trace,TracedLayer],save_inference_model->[save_inference_model,get_feed_fetch],__call__->[_run,_build_feed,_compile]],save->[get_inout_spec,SaveLoadConfig]]
Decorator function to handle imperative dygraph APIs into declarative function.
Please check whether ``function`` has ``forward``, if not, I think we should throw our exception to indicate users?
@@ -41,7 +41,7 @@ module RepositoryDatatableHelper def display_cell_value(cell, team) if cell.value_type == 'RepositoryAssetValue' # Return simple file_name if we call this method not from controller - return cell.value.asset.file_file_name unless defined?(render) + return cell.value.asset.file_name unless defined?(render) render partial: 'shared/asset_link', locals: { asset: cell.value.asset, display_image_tag: false }, formats: :html
[default_table_order_as_js_array->[to_s],default_table_columns->[to_json],assigned_row->[include?],can_perform_repository_actions->[team,can_manage_repository_rows?,can_manage_repository?,can_create_repositories?],prepare_row_columns->[created_at,assigned_row,repository_row_path,escape_input,name,edit_repository_repository_row_path,each,full_name,repository_repository_row_path,id,l,display_cell_value],display_cell_value->[display_tooltip,asset,render,custom_auto_link,data,value_type,file_file_name],include]
Displays a link to the next missing cell value in the tree.
Layout/EmptyLineAfterGuardClause: Add empty line after guard clause.
@@ -1436,6 +1436,16 @@ void homeaxis(const AxisEnum axis) { #endif + // Put away the Z probe + #if HOMING_Z_WITH_PROBE + if (axis == Z_AXIS) { + #if ENABLED(BLTOUCH) + if (set_bltouch_deployed(false)) return; + #endif + if (STOW_PROBE()) return; + } + #endif + // Clear retracted status if homing the Z axis #if ENABLED(FWRETRACT) if (axis == Z_AXIS) fwretract.hop_amount = 0.0;
[No CFG could be retrieved]
Creates a n - tuple with the current value of the given axis. Updates the hardware endstop positions of the given axis.
Moved this down to make the revert more complete, but if the other reverts solve the Z position problem then this may turn out to be ok moved back up above.