patch stringlengths 18 160k | callgraph stringlengths 4 179k | summary stringlengths 4 947 | msg stringlengths 6 3.42k |
|---|---|---|---|
@@ -304,6 +304,16 @@ func (s *BoltState) getContainerFromDB(id []byte, ctr *Container, ctrsBkt *bolt.
}
ctr.lock = lock
+ if ctr.config.OCIRuntime == "" {
+ ctr.ociRuntime = s.runtime.defaultOCIRuntime
+ } else {
+ ociRuntime, ok := s.runtime.ociRuntimes[ctr.config.OCIRuntime]
+ if !ok {
+ return errors.Wrapf(ErrInternal, "container %s was created with OCI runtime %s, but that runtime is not available in the current configuration", ctr.ID(), ctr.config.OCIRuntime)
+ }
+ ctr.ociRuntime = ociRuntime
+ }
+
ctr.runtime = s.runtime
ctr.valid = valid
| [getContainerFromDB->[Wrapf,Equal,Bucket,RetrieveLock,Unmarshal,Get],closeDBCon->[Close,Unlock],removeContainer->[ID,Wrapf,Join,Bucket,ForEach,Name,Dependencies,Delete,Errorf,Get,DeleteBucket],addContainer->[ID,Wrapf,Bucket,Equal,Marshal,closeDBCon,Update,Put,Name,CreateBucket,Dependencies,Namespace,Get,getDBCon],getPodFromDB->[Wrapf,Equal,Bucket,RetrieveLock,Unmarshal,Get],getVolumeFromDB->[Get,Wrapf,Bucket,Unmarshal],getDBCon->[Wrapf,Lock,Open],Wrapf,IsRootless,Bucket,Put,Get,GetRootlessUID,DefaultStoreOptions,Update] | getContainerFromDB retrieves a container from the database. | we support specifying the runtime by an absolute path, we needed it to avoid a breaking change. When the runtime starts with `/` we use directly the specified path without any lookup. Should we take care of this case here and skip the error when `ctr.config.OCIRuntime[0] == '/'` ? |
@@ -297,7 +297,8 @@ inner_evp_generic_fetch(struct evp_method_data_st *methdata, int operation_id,
unsupported = 1;
if (meth_id == 0
- || !ossl_method_store_cache_get(store, meth_id, properties, &method)) {
+ || !ossl_method_store_cache_get(store, prov, meth_id, properties,
+ &method)) {
OSSL_METHOD_CONSTRUCT_METHOD mcm = {
get_tmp_evp_method_store,
get_evp_method_from_store,
| [int->[evp_set_default_properties_int],EVP_default_properties_enable_fips->[evp_default_properties_enable_fips_int],EVP_set_default_properties->[evp_set_default_properties_int]] | This function is called by the generic fetch code for the evp_generic_fetch_ This method is called by the OSSL library when a method is not available. up_ref_method - Fetch the next algorithm in the chain. | Removing this call to `ossl_method_store_cache_get()` I can get it to work consistently. This call returns 0 (not in cache?) in many cases, so the wrong fetch is not always seen. Something still not quite right with fetching from cache? |
@@ -97,7 +97,7 @@ metadata_schema = {
# Category Details
'anpa-category': {
- 'type': 'dict',
+ 'type': 'list',
'nullable': True,
'mapping': {
'type': 'object',
| [rel,namedtuple] | A collection of all the fields of a single object. Return a dictionary of all the possible metadata for a single node. | Can we schema definition of each dict here so cerberus validates it? |
@@ -34,6 +34,7 @@ class Bml(CMakePackage):
url = "https://github.com/lanl/bml"
version('develop', git='https://github.com/lanl/bml', branch='master')
+ version('1.2.2', git='https://github.com/lanl/bml', tag='v1.2.2')
version('1.1.0', git='https://github.com/lanl/bml', tag='v1.1.0')
variant('shared', default=True, description='Build shared libs')
| [Bml->[cmake_args->[format],variant,depends_on,version]] | Creates a CMakePackage object for the given basic matrix library. | Can you use `version('1.2.2', 'checksum')` instead? |
@@ -34,7 +34,7 @@ module Verify
private
def idv_profile_complete?
- idv_session.resolution.try(:success?)
+ idv_session.profile_confirmation == true
end
def idv_finance_complete?
| [ReviewController->[valid_password?->[valid_password?]]] | idv_profile_complete? - check if profile is complete or not. | just a naming nit. The profile response is a `resolution` not a `confirmation`. That's the language that most of the vendors use. Resolving the contact info to a single record is step 1, then confirming financials and phone are steps 2 and 3. |
@@ -564,6 +564,14 @@ type UserChangedHandler interface {
HandleUserChanged(uid keybase1.UID) error
}
+type ConnectivityMonitorResult int
+
+const (
+ ConnectivityMonitor_YES ConnectivityMonitorResult = iota
+ ConnectivityMonitor_NO
+ ConnectivityMonitor_UNKNOWN
+)
+
type ConnectivityMonitor interface {
- IsConnected(ctx context.Context) bool
+ IsConnected(ctx context.Context) ConnectivityMonitorResult
}
| [No CFG could be retrieved] | HandleUserChanged implements the ConnectivityMonitor interface for UserChanged. | `go vet/lint` will probably complain about the `_` here |
@@ -881,11 +881,12 @@ class WheelBuilder(object):
try:
call_subprocess(wheel_args, cwd=req.setup_py_dir,
show_stdout=False, spinner=spinner)
- return True
except Exception:
spinner.finish("error")
logger.error('Failed building wheel for %s', req.name)
- return False
+ return None
+ # listdir's return value is sorted to be deterministic
+ return os.path.join(tempd, sorted(os.listdir(tempd))[0])
def _clean_one(self, req):
base_args = self._base_setup_args(req)
| [WheelBuilder->[_clean_one->[_base_setup_args],_build_one_legacy->[_base_setup_args],_build_one_pep517->[replace_python_tag],build->[_build_one,should_use_ephemeral_cache]],move_wheel_files->[record_installed->[normpath],clobber->[record_installed],open_for_csv,root_is_purelib,message_about_scripts_not_on_PATH,sorted_outrows,clobber,get_entrypoints,get_csv_rows_for_installed],get_entrypoints->[_split_ep],get_csv_rows_for_installed->[normpath,rehash],should_use_ephemeral_cache->[_contains_egg_info]] | Build one legacy version of a package. | I would call `sorted()` prior to accessing the first element so any unexpected case will be reproducible (as in the original issue). Or if it shouldn't proceed if there is more than one element, add an assertion about the length prior to accessing. Also, is this supposed to be a filename, or a path like in the other method? |
@@ -6,6 +6,7 @@ package models
import (
"fmt"
+ "os"
"sort"
"strings"
| [APIFormat->[ZipURL,APIURL,TarURL,APIFormat],LoadAttributes->[loadAttributes],loadAttributes,toConds,APIFormat,LoadAttributes] | loadAttributes loads all attributes of a single release of repository. GetReleaseAttachments returns a list of all the attachments of a release. | Can you add Gitea copyright to header of this file? |
@@ -1043,16 +1043,9 @@ void Client::handleCommand_DeleteParticleSpawner(NetworkPacket* pkt)
u32 server_id;
*pkt >> server_id;
- u32 client_id;
- auto i = m_particles_server_to_client.find(server_id);
- if (i != m_particles_server_to_client.end())
- client_id = i->second;
- else
- return;
-
ClientEvent *event = new ClientEvent();
event->type = CE_DELETE_PARTICLESPAWNER;
- event->delete_particlespawner.id = client_id;
+ event->delete_particlespawner.id = server_id;
m_client_event_queue.push(event);
}
| [handleCommand_DeathScreen->[push],handleCommand_StopSound->[stopSound,find,end],handleCommand_AccessDenied->[getCommand,empty,wide_to_utf8,getSize],handleCommand_HudChange->[find,push,end],handleCommand_FormspecPrepend->[assert,getLocalPlayer],handleCommand_HudSetParam->[size,getLocalPlayer,c_str,getTextureSource,empty,readS32,assert],handleCommand_NodeDef->[decompressZlib,sanity_check,deSerialize,readLongString,str,isRunning,getSize],handleCommand_BlockData->[getPos,deSerialize,addUpdateMeshTaskWithEdge,getString,getBlockNoCreateNoEx,getMap,assert,insertBlock,deSerializeNetworkSpecific,getSize],handleCommand_Inventory->[getLocalPlayer,deSerialize,getString,assert,getSize],handleCommand_ItemDef->[decompressZlib,sanity_check,deSerialize,readLongString,str,isRunning,getSize],handleCommand_ModChannelSignal->[setChannelState,on_modchannel_signal,leaveChannel],handleCommand_ShowFormSpec->[readLongString,push],handleCommand_ActiveObjectMessages->[readU16,getString,what,deSerializeString,good,processActiveObjectMessage,getSize],handleCommand_HudRemove->[erase,find,push,end],handleCommand_AddNode->[deSerialize,getU8,addNode,getU8Ptr,getSize],handleCommand_DenySudoMode->[deleteAuthData,pushToChatQueue],handleCommand_OverrideDayNightRatio->[push],handleCommand_FadeSound->[fadeSound,find,end],handleCommand_RemoveNode->[removeNode,getSize],handleCommand_SrpBytesSandB->[srp_user_process_challenge,size,Send,c_str],handleCommand_MovePlayer->[assert,setPosition,push,getLocalPlayer],handleCommand_Hello->[Disconnect,srp_user_delete,ser_ver_supported,choseAuthMech,getBool,getServerAddress,startAuth,promptConfirmRegistration,getSize],handleCommand_InventoryFormSpec->[assert,readLongString,getLocalPlayer],handleCommand_CloudParams->[push],handleCommand_DeleteParticleSpawner->[find,push,end],handleCommand_PlaySound->[playSound,playSoundAt,getPosition,getActiveObject],handleCommand_NodemetaChanged->[decompressZlib,isValidPosition,deSerialize,setNodeMetadata,readLongString,end,begin,getMap,getSize],handleCommand_Media->[isStarted,sanity_check,readLongString,isRunning,conventionalTransferDone,getSize],handleCommand_AddParticleSpawner->[getRemainingString,deSerialize,readLongString,getRemainingBytes,getSpawnerId,push,readU8],handleCommand_DetachedInventory->[deSerialize,erase,find,end],handleCommand_Breath->[assert,setBreath,getLocalPlayer],handleCommand_ModChannelMsg->[getPeerId,on_modchannel_message,channelRegistered],handleCommand_Movement->[assert,getLocalPlayer],handleCommand_SpawnParticle->[readF32,readU8,deSerialize,getString,push,readV3F32,deSerializeLongString,getSize],handleCommand_AnnounceMedia->[isStarted,trim,step,sanity_check,at_end,addFile,empty,base64_decode,isRunning,addRemoteServer,next,getSize],handleCommand_AuthAccept->[size,setPosition,deleteAuthData,getLocalPlayer,assert,gettext,Send,v3f],handleCommand_HudSetSky->[readU16,readU8,readARGB8,getString,deSerializeString,push_back,push,getSize],handleCommand_HudAdd->[getSize,push,getString],handleCommand_HP->[getLocalPlayer,on_hp_modification,assert,moddingEnabled,push],handleCommand_LocalPlayerAnimations->[assert,getLocalPlayer],handleCommand_EyeOffset->[assert,getLocalPlayer],handleCommand_Deprecated->[getCommand,getPeerId],handleCommand_AcceptSudoMode->[deleteAuthData,startAuth],handleCommand_ActiveObjectRemoveAdd->[what,readLongString,removeActiveObject,addActiveObject],handleCommand_ChatMessage->[moddingEnabled,on_receiving_message,pushToChatQueue,wide_to_utf8],handleCommand_TimeOfDay->[setTimeOfDaySpeed,setTimeOfDay,getDayNightRatio,getSize],handleCommand_Privileges->[clear,insert],handleCommand_UpdatePlayerList->[addPlayerName,removePlayerName],handleCommand_HudSetFlags->[assert,setMinimapMode,getLocalPlayer],handleCommand_CSMRestrictionFlags->[loadMods]] | Handles a command that deletes a particle spawner v2f - constructor for HudaddEvent. | there should be a sanity check here for the unlikely case that the server id is > `m_next_particle_spawner_id` |
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.23 on 2020-02-03 05:33
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0014_auto_20200110_0519'),
+ ('users', '0014_user_is_newsletter_subscribed'),
+ ]
+
+ operations = [
+ ]
| [No CFG could be retrieved] | No Summary Found. | Why is there an explicit merge migration? We can have just 1 single migrations .py file. I think all you need to do is put `('users', '0014_user_is_newsletter_subscribed'),` instead on line 16 inside the `kuma/users/migrations/0014_auto_20200110_0519.py` file and then delete this file here. |
@@ -151,6 +151,12 @@ func (c *MasterConfig) RunDNSServer() {
case "tcp6":
config.BindNetwork = "ipv6"
}
+ if strings.HasSuffix(c.Options.DNSConfig.ClusterDomain, ".") {
+ config.Domain = c.Options.DNSConfig.ClusterDomain
+ } else {
+ config.Domain = c.Options.DNSConfig.ClusterDomain + "."
+ }
+ config.Local = "openshift.default.svc." + config.Domain
config.DnsAddr = c.Options.DNSConfig.BindAddress
config.NoRec = !c.Options.DNSConfig.AllowRecursiveQueries
| [RunBuildConfigChangeController->[BuildConfigChangeControllerClients,Create,NewOSClientBuildConfigInstantiatorClient,Run],RunImageImportController->[Create,Infof,Duration,V,NewTokenBucketRateLimiter,ImageImportControllerClient,RunUntil,Run],RunDeploymentConfigController->[Codec,DeploymentConfigControllerClients,Run,Create],RunSDNController->[NewPlugin,SDNControllerClients,Fatalf,StartMaster],RunPolicyCache->[Run],RunDNSServer->[GetAndTestEtcdClient,TryListen,Infof,DNSServerClient,NewServerDefaults,SplitHostPort,Warningf,WaitForSuccessfulDial,Fatalf,ListenAndServe],RunServiceAccountsController->[KubeClient,NewServiceAccountsController,Infof,DefaultServiceAccountsControllerOptions,Run],RunProjectCache->[Run,Infof],RunBuildController->[Create,NewFromPlugins,CreateDeleteController,LegacyCodec,Run,BuildControllerClients,ImageFor,NewOSClientBuildClient],RunOriginNamespaceController->[Run,Create,OriginNamespaceControllerClients],RunProjectAuthorizationCache->[Run],RunDeploymentController->[Create,Join,EnvVars,Codec,DeploymentControllerClients,Fatalf,GetKubeClient,Run,ImageFor],RunGroupCache->[Run],RunServiceAccountTokensController->[ReadPrivateKey,KubeClient,Infof,ReadFile,CertsFromPEM,Fatalf,JWTTokenGenerator,Run,NewTokensController],RunSecurityAllocationController->[DefaultMCSAllocation,Create,NewRepair,Infof,Resource,SecurityAllocationControllerClient,New,V,RunOnce,ParseRange,Fatalf,NewEtcd,NewContiguousAllocationMap,Run,Namespaces],RunBuildPodController->[BuildPodControllerClients,Create,CreateDeleteController,Run,NewOSClientBuildClient],RunDeploymentImageChangeTriggerController->[DeploymentImageChangeTriggerControllerClient,Create,Run],RunDeploymentConfigChangeController->[Codec,Create,Run,DeploymentConfigChangeControllerClients],RunServiceAccountPullSecretsControllers->[NewDockercfgController,KubeClient,NewDockercfgTokenDeletedController,NewDockerRegistryServiceController,Run,NewDockercfgDeletedController],RunDeployerPodController->[DeployerPodControllerClients,Create,Run,Codec],RunBuildImageChangeTriggerController->[Run,BuildImageChangeTriggerControllerClients,Create,NewOSClientBuildConfigInstantiatorClient]] | RunDNSServer starts the DNS server c. Options. DNSConfig. BindAddress. | need to ensure trailing dot here as well, right? |
@@ -791,12 +791,10 @@ class Stage(params.StageParams):
class PipelineStage(Stage):
- def __init__(self, *args, name=None, meta=None, **kwargs):
+ def __init__(self, *args, name=None, **kwargs):
super().__init__(*args, **kwargs)
self.name = name
self.cmd_changed = False
- # This is how the Stage will discover any discrepancies
- self.meta = meta or {}
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name
| [create_stage->[loads_from],Stage->[_run->[_warn_if_fish],dumpd->[resolve_wdir],_status->[update],relpath->[relpath],remove->[unprotect_outs,remove,remove_outs],_fill_stage_outputs->[loads_from],check_can_commit->[stage_changed,save,_changed_entries],_outs_cached->[changed],get_all_files_number->[_filter_outs],_fill_stage_dependencies->[loads_from],get_used_cache->[get_used_cache,_filter_outs,update],update->[reproduce],run->[_run,_save_deps,commit,save,stage_changed,is_cached,remove_outs],status->[_status,stage_status],_changed->[stage_changed,_changed_deps,_changed_outs],is_cached->[_changed_outs,reload],_compute_md5->[dumpd],save->[_save_deps,save,_compute_md5],checkout->[checkout,_filter_outs],resolve_wdir->[relpath],commit->[commit],_fix_outs_deps_path->[relpath],reproduce->[changed],_deps_cached->[changed],stage_status->[stage_changed]]] | Initialize a with the given name meta and cmd_changed. | Note that this `meta` was never used before. The idea during the implementation was to provide metadata to the stage on the discrepancies between lockfile and pipeline file (eg: someone added an out in pipeline file manually). But, since we are moving away to having less warnings and messages, this _future-proofing_ is not needed. |
@@ -72,6 +72,18 @@ from _setup_support import (
show_help
)
+
+import os
+if os.environ.get('READTHEDOCS') == 'True':
+ # Workaround to upgrade ``npm`` since Read the Docs build environment have
+ # ``3.5.2`` and we need at least ``npm>=6.0``
+ os.system('cd ~ ; mkdir bin ; npm install npm')
+ os.environ['PATH'] = '/home/docs/node_modules/.bin/:{}'.format(os.environ.get('PATH'))
+ print('PATH: {}'.format(os.environ.get('PATH')))
+
+ # Tell Bokeh install script to always build the Javascript files
+ os.environ['BOKEH_BUILD_JS'] = '1'
+
# immediately bail for ancient pythons
if sys.version_info[:2] < (2, 7):
raise RuntimeError("Bokeh requires python >= 2.7")
| [package_path,RuntimeError,join,get_version,copy,conda_rendering,find_packages,show_bokehjs,append,len,show_help,fixup_building_sdist,open,get_package_data,fixup_for_packaged,package_files,install_js,get_cmdclass,build_or_install_bokehjs,setup,exit] | Creates a new package with the given name and version. The functions below are used to find the n - ary object that can be used to find. | Let's put all this in a function in `_setup_helpers.py`, e.g. `fixup_for_rtd()` |
@@ -1058,6 +1058,9 @@ func sendRelayPayment(mctx libkb.MetaContext, walletState *WalletState,
}
rres, err := walletState.SubmitRelayPayment(mctx.Ctx(), post)
if err != nil {
+ if rerr := walletState.RemovePendingTx(mctx.Ctx(), accountID, stellar1.TransactionID(relay.FundTx.TxHash)); rerr != nil {
+ mctx.CDebugf("error calling RemovePendingTx: %s", rerr)
+ }
return res, err
}
mctx.CDebugf("sent payment (relay) kbTxID:%v txID:%v pending:%v", rres.KeybaseID, rres.StellarID, rres.Pending)
| [StatusCode,GetMeUV,IsNativeXLM,NewPaymentID,BackgroundWithLogTags,RunEngine2,LocalSigchainGuard,StartStandaloneChat,WithUID,Post,New,SendMsgByName,AccountID,PaymentXLMTransaction,Details,Time,IsAppStatusErrorCode,NewMetaContext,IsResolutionNotFoundError,RuneCountInString,Lookup,RelocateTransaction,RecipientInput,Relay,informAcceptedDisclaimer,Split,WithCtx,WithPublicKeyOptional,Network,DecryptB64,SendMsgByNameNonblock,DeviceID,Result,Sync,IsValid,GetStellar,SubmitPayment,WithDesc,NewSeedStr,Verbose,GetCurrencyLocal,PaymentDetails,FetchAccountBundle,CreateAccountXLMTransaction,LookupByAddress,SeedStr,ShouldCreate,SubmitRelayPayment,SubmitRelayClaim,Stellar,Now,WriteByte,AddPendingTx,InformHasWallet,RecentPayments,Add,CreateNewAccount,ToSocialAssertion,Direct,OutsideCurrencyCode,GetNormalizedName,AssertionParse,ExchangeRate,AdvanceAccounts,GetUPAKLoader,AddAccount,Since,AddressStr,Refresh,ParseStellarSecretKey,AssetNative,Create,NewLoadUserByNameArg,NewLoadUserByUIDArg,ParseAmount,Typ,NewPerUserKeyUpgrade,GetUsernameAndUserVersionIfValid,UpdateUnreadCount,NewInitial,SecretKey,KickAutoClaimRunner,RemovePendingTx,Load,GetPerUserKeyring,NewAddressStr,AdvanceBundle,FetchSecretlessBundle,LookupUsername,GetServerDefinitions,WithForcePoll,CurrentGeneration,Eq,PrimaryAccount,ConvertOutsideToXLM,NewLoadUserArgWithMetaContext,CDebugf,SecureNoLogString,Balances,GetAccountDisplayCurrency,CTraceTimed,GetKey,WithNetContext,ActiveDevice,NewNormalizedUsername,Equal,ParseStellarAccountID,PostWithChainlink,Sprintf,LoadV2,BuildPaymentID,CachedHasWallet,String,StellarSimplifyAmount,EncodeToString,ToUserVersion,NewChatConversationID,NewMessageBodyWithSendpayment,MarkAsRead,SetDefaultDisplayCurrency,SubmitRequest,StringFromStellarAmount,CWarningf,Slice,Username,StellarAccountID,FloatString,Error,UID,Seconds,TransactionID,Ctx,FetchBundleWithGens,LoadUser,IsNil,Errorf,GetName,NewResolveThenIdentify2,Clock,Debug,SliceStable,NewMessageBodyWithRequestpayment,Unix,ConvertXLMToOutside,Join,hasAcceptedDisclaimer,Contains,IsAvailable,CurrentUsername,G,Background,RandBytes,ParseStellarAmount,LookupUnverified,ServerTimeboundsRecommendation] | sendPayment is called by the chat system when a new payment is received. claims a waiting relay. | this is the fix 2 of 2. |
@@ -59,6 +59,11 @@ class AddressMetaForm(forms.ModelForm):
class Meta:
model = Address
fields = ['country', 'preview']
+ labels = {
+ 'country': pgettext_lazy(
+ 'Country', 'Country'),
+ 'preview': pgettext_lazy(
+ 'Address preview', 'Preview')}
def clean(self):
data = super().clean()
| [construct_address_form->[update_base_fields],get_form_i18n_lines->[_convert_to_bound_fields],AddressForm->[PossiblePhoneNumberFormField],CountryAwareAddressForm->[validate_address->[add_field_errors],clean->[validate_address]],construct_address_form] | Clean - returns a copy of the data object with preview flag set to False. | Is this meaning specific to an address object? Is this field visible in the UI? |
@@ -210,6 +210,8 @@ class AddPeopleDialog extends Component {
this.setState({
addToCallInProgress: false
});
+
+ return true;
})
.catch(() => {
this.setState({
| [No CFG could be retrieved] | Handles the action of the add button. A component that converts a state object into a map of props that can be used to provide. | I'm unsure this will do what you want it to do. The return value of _onSubmit is always true so what happens is invitePeople will get fired then true is returned below and the modal is closed. Returning true here on line 218 is also inconsequential because Dialog does not know what to do with promises so it will not chain onto your invitePeople call, even if your invitePeople call were returned from _onSubmit. |
@@ -169,7 +169,8 @@ func (r *ReplicaChecker) checkDownPeer(region *core.RegionInfo) *Operator {
if stats.GetDownSeconds() < uint64(r.cluster.GetMaxStoreDownTime().Seconds()) {
continue
}
- return CreateRemovePeerOperator("removeDownReplica", r.cluster, OpReplica, region, peer.GetStoreId())
+
+ return r.handleReplica(region, peer, "Down")
}
return nil
}
| [checkBestReplacement->[selectWorstPeer,SelectBestReplacementStore],checkOfflinePeer->[SelectBestReplacementStore]] | checkDownPeer checks if the cluster is able to remove down replica peers. This is a helper function that creates a new operator that removes a replica from the cluster and. | Maybe `replacePeer` or `fixPeer`. |
@@ -105,6 +105,12 @@ public final class DatanodeIdYaml {
.setCurrentVersion(datanodeDetailsYaml.getCurrentVersion());
datanodeDetails = builder.build();
+ } catch (NullPointerException e) {
+ DatanodeDetails details = DatanodeDetails.newBuilder()
+ .setUuid(UUID.randomUUID()).build();
+ details.setInitialVersion(DatanodeVersions.CURRENT_VERSION);
+ details.setCurrentVersion(DatanodeVersions.CURRENT_VERSION);
+ return details;
}
return datanodeDetails;
| [DatanodeIdYaml->[getDatanodeDetailsYaml->[getPersistedOpStateExpiryEpochSec,getCertSerialId,getInitialVersion,getCurrentVersion,getIpAddress,getHostName,getPersistedOpState,DatanodeDetailsYaml]]] | Reads a datanode id file. | Handling an unchecked exception like this is usually bad practice. Especially in a large try/catch block like this where it's unclear what the cause is. Can we refactor this to explicitly handle the null reference before it is dereferenced? |
@@ -46,9 +46,10 @@ ax2.set_title('Band-pass filtered data')
# In addition, there are functions for applying the Hilbert transform, which is
# useful to calculate phase / amplitude of your signal
-# Filter signal, then take hilbert transform
+# Filter signal with a fairly steep filter, then take hilbert transform
raw_band = raw.copy()
-raw_band.filter(12, 18)
+raw_band.filter(12, 18, l_trans_bandwidth=2., h_trans_bandwidth=2.,
+ filter_length='auto', phase='zero')
raw_hilb = raw_band.copy()
hilb_picks = mne.pick_types(raw_band.info, meg=False, eeg=True)
raw_hilb.apply_hilbert(hilb_picks)
| [plot,print,subplots,set_title,data_path,apply_function,copy,read_raw_fif,filter,legend,crop,join,apply_hilbert,pick_types] | Plots the signal and the phase of the Plot amplitudes and phases of the frequency band. | Maybe while you're at it: new line 60/old line 58 is missing a noun (probably 'function'). |
@@ -131,7 +131,7 @@ def check_geth_version_for_tests(blockchain_type):
).communicate()
supported, _, our_version = is_supported_client(geth_version_string.decode())
if not supported:
- sys.exit(
+ pytest.exit(
f"You are trying to run tests with an unsupported GETH version. "
f"Your Version: {our_version} "
f"Min Supported Version {LOWEST_SUPPORTED_GETH_VERSION} "
| [timeout_for_setup_and_call->[handler->[report],report],pytest_runtest_call->[timeout_for_setup_and_call],pytest_runtest_teardown->[handler->[report]],set_item_timeouts->[timeout_from_marker],pytest_runtest_setup->[timeout_for_setup_and_call],pytest_runtest_protocol->[set_item_timeouts]] | Check GETH version for tests. | huh, I never know that `sys.exit` accepted a str. |
@@ -132,7 +132,7 @@ public class KVMGuru extends HypervisorGuruBase implements HypervisorGuru {
guestOsMapping = _guestOsHypervisorDao.findByOsIdAndHypervisor(guestOS.getId(), getHypervisorType().toString(), host.getHypervisorVersion());
}
if (guestOsMapping == null || host == null) {
- to.setPlatformEmulator("Other");
+ to.setPlatformEmulator(guestOS.getDisplayName());
} else {
to.setPlatformEmulator(guestOsMapping.getGuestOsName());
}
| [KVMGuru->[implement->[setVmQuotaPercentage],setVmQuotaPercentage->[getVmSpeed,getHostCPUSpeed],getCommandHostDelegation->[getHypervisorType]]] | Override to add a missing relation to a VM. | @ustcweizhou how about we check and fallback to `"Other"` in case `guestOS.getDisplayName()` is null or empty (unlikely but just to make code more defensive)? |
@@ -40,6 +40,10 @@ def archive_model(serialization_prefix: str,
logger.error("weights file %s does not exist, unable to archive model", weights_file)
return
+ config_file = os.path.join(serialization_prefix, "_model_params.json")
+ if not os.path.exists(config_file):
+ logger.error("config file %s does not exist, unable to archive model", config_file)
+
archive_file = os.path.join(serialization_prefix, "model.tar.gz")
logger.info("archiving weights and vocabulary to %s", archive_file)
with tarfile.open(archive_file, 'w:gz') as archive:
| [archive_model->[error,info,add,join,exists,open],load_archive->[Archive,extractall,mkdtemp,from_file,cached_path,duplicate,join,info,open,load,rmtree],NamedTuple,getLogger] | Archives the model weights training configuration and its vocabulary to a tar. gz file. | The prepended underscore here is a holdover from when the "serialization prefix" was actually a prefix, not a directory. It should be removed (and the other places we have underscores, like where we're saving stdout, stderr, and the logs). Also, we should rename `serialization_prefix`... Another issue is that if you're using a newer version of the code with an older serialized model, and the default parameters have changed (like recently happened with our tokenizer), the loading could break. It'd be a bit more robust to parse the log file to get the _actual_ parameters that were used (including defaults). But that's way too much work to worry about right now. |
@@ -220,6 +220,9 @@ public abstract class AbstractMappingMessageRouter extends AbstractMessageRouter
MessageChannel channel = resolveChannelForName(channelName, message);
if (channel != null) {
channels.add(channel);
+ if (!mapped && !this.dynamicChannels.contains(channelName)) {
+ this.dynamicChannels.add(channelName);
+ }
}
}
| [AbstractMappingMessageRouter->[addChannelFromString->[addChannelFromString,resolveChannelForName],addToCollection->[addChannelFromString,addToCollection],determineTargetChannels->[getChannelKeys]]] | Adds a channel to the given collection of channels based on the given channel key. This method adds a channel to the list of channels. | I wonder if this logic should be kinda cache with some simple eviction policy in face of expiration time. The general point that we may route to some channel only once, but it stands in the `List` forever. |
@@ -3305,6 +3305,14 @@ def log_sigmoid(x, name=None):
Specifically, `y = log(1 / (1 + exp(-x)))`. For numerical stability,
we use `y = -tf.nn.softplus(-x)`.
+ Usage Example:
+ >>> tensor1 = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
+ >>> tensor2 = [2.0, 5.7]
+ >>> print(tf.math.log_sigmoid(tensor1, name='log_sigmoid_tensor1'))
+ tf.Tensor([-0.6931472 -0.31326166 -0.12692805 -0.04858733 -0.01814996 -0.00671535], shape=(6,), dtype=float32)
+ >>> print(tf.math.log_sigmoid(tensor2, name='log_sigmoid_tensor2'))
+ tf.Tensor([-0.12692805 -0.00334038], shape=(2,), dtype=float32)
+
Args:
x: A Tensor with type `float32` or `float64`.
name: A name for the operation (optional).
| [reduce_max->[_ReductionDims],truediv->[_truediv_python3],reduce_sum_with_dims->[_may_reduce_to_scalar],tensor_not_equals->[not_equal],tensordot->[_tensordot_axes->[range],_tensordot_reshape->[range,reduce_prod],matmul,_tensordot_axes,_tensordot_reshape],reduced_shape->[range,cast],sparse_segment_mean->[sparse_segment_mean],reduce_min->[_may_reduce_to_scalar,_ReductionDims],bincount->[reduce_max,reduce_prod,bincount,cast],conj->[conj],reduce_euclidean_norm->[_may_reduce_to_scalar,_ReductionDims],bincount_v1->[bincount],div->[_div_python2],to_complex128->[cast],reduce_any->[_may_reduce_to_scalar,_ReductionDims],matvec->[matmul],erfinv->[erfinv],cumprod->[cumprod],_may_reduce_to_scalar->[_has_fully_defined_shape],sparse_segment_sqrt_n_v2->[sparse_segment_sqrt_n],to_double->[cast],reduce_sum->[_ReductionDims],reduce_all->[_may_reduce_to_scalar,_ReductionDims],cast->[cast],sparse_segment_mean_v2->[sparse_segment_mean],reduce_variance->[reduce_mean],reduce_prod->[_may_reduce_to_scalar,_ReductionDims],sparse_segment_sqrt_n->[sparse_segment_sqrt_n],div_no_nan->[div_no_nan],trace->[reduce_sum],add_n->[add_n],sqrt->[sqrt],reduce_prod_v1->[reduce_prod],angle->[angle],_sparse_dense_truediv->[cast],to_bfloat16->[cast],scalar_mul_v2->[scalar_mul],tensor_equals->[equal],_ReductionDims->[range],accumulate_n->[add_n,_input_error],exp->[exp],_as_indexed_slices_list->[_as_indexed_slices,cast],sigmoid->[sigmoid],saturate_cast->[cast],unsorted_segment_sqrt_n->[_unsorted_segment_N],reduce_logsumexp->[_may_reduce_to_scalar,reduce_max_with_dims,_ReductionDims,reduce_sum_with_dims],sign->[sign],matmul->[cast],to_complex64->[cast],real->[real],_range_tensor_conversion_function->[range],round->[round],not_equal->[not_equal],_truediv_python3->[cast],cumsum->[cumsum],reciprocal_no_nan->[div_no_nan],reduce_std->[reduce_variance],xlog1py->[xlog1py],to_int64->[cast],sparse_segment_sum_v2->[sparse_segment_sum],reduce_max_with_dims->[_may_reduce_to_scalar],sobol_sample->[sobol_sample],equal->[equal],divide->[DivideDelegateWithName],to_float->[cast],reduce_mean->[_may_reduce_to_scalar,_ReductionDims],logical_and->[logical_and],sparse_segment_sum->[sparse_segment_sum],_as_indexed_slices->[range],count_nonzero_v2->[reduce_sum,cast,not_equal],to_int32->[cast],ceil->[ceil],imag->[imag],cumulative_logsumexp->[cumulative_logsumexp],unsorted_segment_mean->[_unsorted_segment_N],range->[cast],ndtri->[ndtri],_set_doc,_OverrideBinaryOperatorHelper] | Computes the log sigmoid of x element - wise. | You don't need to use print() here. |
@@ -68,13 +68,13 @@ func GetFileUserSid(name string) (*windows.SID, error) {
return userSID, nil
}
-func IsPipeowner(name string) (bool, error) {
+func IsPipeowner(log logger.Logger, name string) (bool, error) {
userSid, err := currentProcessUserSid()
if err != nil {
return false, err
}
- fileSid, err := GetFileUserSid(name)
+ pipeSid, err := GetFileUserSid(name)
if err == PipeBusyError {
// If at least one instance of the pipe has been created, this function
// will wait timeout milliseconds for it to become available.
| [NewLazyDLL,GetTokenUser,NewProc,Pointer,Close,EqualSid,OpenCurrentProcessToken,UTF16PtrFromString,Call] | GetUserSid returns the SID of the user in the named pipe. | it would be nice to put debugs on the entry and exit of this function. |
@@ -300,7 +300,7 @@ class DataclassTransformer:
Context(line=attr.line, column=attr.column),
)
- found_default = found_default or attr.has_default
+ found_default = found_default or attr.has_default and attr.is_in_init
return all_attrs
| [DataclassTransformer->[_freeze->[to_var],collect_attributes->[DataclassAttribute,deserialize],transform->[serialize,to_argument]],dataclass_class_maker_callback->[DataclassTransformer,transform]] | Collect all attributes declared in the dataclass and its parents. A base class for all dataclass attributes. Emulate frozen classes. | Style nit: add parentheses to make precedence explicit. |
@@ -11,6 +11,7 @@ using System.Reflection.PortableExecutable;
using Internal.TypeSystem;
using Internal.TypeSystem.Ecma;
+using System.Runtime.InteropServices;
namespace ILCompiler
{
| [CompilerTypeSystemContext->[Instantiation->[ConvertInstantiationToCanonForm,Disabled,CanonicalReferenceTypes,Assert],ModuleHashtable->[GetValueHashCode->[GetHashCode],ModuleData->[Fail],GetKeyHashCode->[GetHashCode],CompareKeyToValue->[ReferenceEquals,Module],CompareValueToValue->[ReferenceEquals,Module]],TypeDesc->[Disabled,CanonicalReferenceTypes,Assert,ConvertToCanon],PEReader->[DangerousGetHandle,ByteLength,None,SafeMemoryMappedViewHandle,Dispose,Open,Read,CreateFromFile,Length,CreateViewAccessor],VirtualMethodAlgorithm->[IsArray,Assert],EcmaModule->[AddModule,Create,GetOrAddModuleFromPath,FilePath,TryGetValue,OpenAssociatedSymbolFile,OpenPEFile,Name,Dispose,GetString,Equals,Module,MetadataReader,OrdinalIgnoreCase,Get,AddOrGetExisting,FileLoadErrorGeneric,ThrowFileNotFoundException],PdbSymbolReader->[ChangeExtension,CodeView,Path,Exists,TryOpen,IsPathRooted,TryOpenSymbolReaderForMetadataFile,GetMetadataStringDecoder,GetDirectoryName,ReadDebugDirectory,Type],SimpleNameHashtable->[GetValueHashCode->[SimpleName,GetHashCode],ModuleData->[Fail],GetKeyHashCode->[GetHashCode],CompareKeyToValue->[SimpleName,Equals],CompareValueToValue->[SimpleName,Equals],OrdinalIgnoreCase],ModuleDesc->[GetModuleForSimpleName,Name],Disabled]] | Creates an instance of a class that represents a set of metadata objects that can be read from Returns true if the key is not found in the hash map. | Is this needed? |
@@ -76,11 +76,10 @@ func startClusterOperatorMonitoring(ctx context.Context, m Recorder, client conf
var conditions []monitorapi.Condition
for i := range co.Status.Conditions {
c := &co.Status.Conditions[i]
- previous := findOperatorStatusCondition(oldCO.Status.Conditions, c.Type)
- if previous == nil {
- continue
- }
- if c.Status != previous.Status {
+ previousCondition := findOperatorStatusCondition(oldCO.Status.Conditions, c.Type)
+ // If we don't have a previous state, then we should always mark the starting state with an event.
+ // We recently had a PR that caused the kube-apiserver operator be permanently degraded and it didn't show up.
+ if previousCondition == nil || c.Status != previousCondition.Status {
var msg string
switch {
case len(c.Reason) > 0 && len(c.Message) > 0:
| [Before,Now,AddSampler,NewSharedIndexInformer,HasPrefix,Add,Done,ClusterStatusConditionType,AddEventHandler,Watch,SplitN,ConfigV1,Join,GetStore,ClusterOperators,Split,ConditionStatus,OperatorLocator,Sprintf,List,ClusterVersions,Run,Record] | startClusterOperatorMonitoring starts monitoring the given condition. missing conditions are not supported. | do we want to log here that previous condition was missing so when we go through the log we have more context? |
@@ -106,8 +106,13 @@ class LinkCandidate(Candidate):
return self._dist
def get_dependencies(self):
- # type: () -> Sequence[InstallRequirement]
- return [self._make_install_req(str(r)) for r in self.dist.requires()]
+ # type: () -> Sequence[Requirement]
+ return [
+ self._factory.make_requirement(
+ self._factory.make_install_req(str(r), self._ireq),
+ )
+ for r in self.dist.requires()
+ ]
def get_install_requirement(self):
# type: () -> Optional[InstallRequirement]
| [LinkCandidate->[__ne__->[__eq__],__init__->[make_install_req_from_link]]] | Returns a sequence of all dependencies of the distribution. | We can probably just merge `factory.make_requirement(factory.make_install_req(spec, ireq))` into `factory.make_requirement(spec, ireq)`. |
@@ -0,0 +1,13 @@
+module Idv
+ module Acuant
+ class FakeLiveness
+ # rubocop:disable all
+ GOOD_LIVENESS_DATA = {'LivenessResult':{'Score':99,'LivenessAssessment':'Live'},'Error':nil,'ErrorCode':nil,'TransactionId':'4a11ceed-7a54-45fa-9528-3945b51a1e23'}
+ # rubocop:enable
+
+ def liveness(body)
+ [true, GOOD_LIVENESS_DATA.to_json]
+ end
+ end
+ end
+end
| [No CFG could be retrieved] | No Summary Found. | WDYT of adding line breaks in here? |
@@ -43,7 +43,7 @@ public class CronValidator extends AbstractValidator
Object value = element.getValue();
String str = String.valueOf(value);
- if (!CronExpression.isValidExpression(str))
+ if (!StringUtils.isEmpty(str) && !CronExpression.isValidExpression(str))
{
ctx.addResult(new Message(element.path(), "\"%1$s\" is not in Cron format", str));
}
| [CronValidator->[validate->[getValue,path,Message,dataElement,addResult,isValidExpression,valueOf]]] | Checks if the element is missing a cron expression. | Why this change? The original intention is that if a schedule is set then it should have a valid cron expression. Run once occurs when a schedule is not set. |
@@ -22,6 +22,12 @@ export const GTAG_CONFIG = /** @type {!JsonObject} */ ({
'dr': 'DOCUMENT_REFERRER',
'dl': 'SOURCE_URL',
},
+ 'conversion': {
+ 'gclsrc': 'QUERY_PARAM(gclsrc)',
+ 'hasGcl': '$IF(QUERY_PARAM(gclid, 1, 0))',
+ 'hasDcl': '$IF(QUERY_PARAM(dclid, 1, 0))',
+ 'enabled': true,
+ },
},
},
'vars': {
| [No CFG could be retrieved] | A GTAG config for a specific . The url - based version of the standard library. | Looks like the parenthesis is still in the wrong place :( |
@@ -24,10 +24,10 @@ type ComponentMatch struct {
// The source of the match. Generally only a single source is
// available.
- Image *imageapi.DockerImage
- ImageStream *imageapi.ImageStream
+ DockerImage *dockerv10.DockerImage
+ ImageStream *imagev1.ImageStream
ImageTag string
- Template *templateapi.Template
+ Template *templatev1.Template
// Input to generators extracted from the source
Builder bool
| [Exact->[Exact],Inexact->[Exact]] | app imports a component from the api. exact returns all ComponentMatches that are not an exact match. | There are places in the code where you're using `imageapi.DockerIamge` please fix them to use this type only. |
@@ -1158,6 +1158,17 @@ class ProductVariantCreate(ModelMutation):
):
cleaned_input = super().clean_input(info, instance, data)
+ weight = cleaned_input.get("weight")
+ if weight and weight.value < 0:
+ raise ValidationError(
+ {
+ "weight": ValidationError(
+ "Product variant can't have negative weight.",
+ code=ProductErrorCode.INVALID.value,
+ )
+ }
+ )
+
if "cost_price" in cleaned_input:
cost_price = cleaned_input.pop("cost_price")
if cost_price and cost_price < 0:
| [CollectionCreate->[save->[save],Arguments->[CollectionCreateInput]],ProductImageDelete->[perform_mutation->[ProductImageDelete]],ProductImageReorder->[perform_mutation->[save,ProductImageReorder]],ProductImageCreate->[Arguments->[ProductImageCreateInput],perform_mutation->[ProductImageCreate]],VariantImageAssign->[perform_mutation->[VariantImageAssign]],CollectionReorderProducts->[perform_mutation->[CollectionReorderProducts]],ProductTypeUpdate->[Arguments->[ProductTypeInput]],ProductUpdate->[save->[save],Arguments->[ProductInput]],CollectionUpdate->[save->[save],Arguments->[CollectionInput]],VariantImageUnassign->[perform_mutation->[VariantImageUnassign]],AttributeAssignmentMixin->[save->[_pre_save_values],_validate_input->[_check_input_for_product,_check_input_for_variant],clean_input->[_resolve_attribute_global_id,_resolve_attribute_nodes,_validate_input]],ProductImageUpdate->[Arguments->[ProductImageUpdateInput],perform_mutation->[save,ProductImageUpdate]],CategoryCreate->[save->[save],Arguments->[CategoryInput]],ProductVariantUpdate->[Arguments->[ProductVariantInput]],CollectionAddProducts->[perform_mutation->[CollectionAddProducts]],CategoryUpdate->[Arguments->[CategoryInput]],ProductCreate->[save->[save],Arguments->[ProductCreateInput],clean_attributes->[clean_input],clean_input->[clean_attributes]],ProductTypeCreate->[Arguments->[ProductTypeInput]],ProductVariantCreate->[save->[save,create_variant_stocks],Arguments->[ProductVariantCreateInput],clean_attributes->[clean_input],clean_input->[check_for_duplicates_in_stocks,clean_attributes,validate_duplicated_attribute_values]],CollectionRemoveProducts->[perform_mutation->[CollectionRemoveProducts]]] | This method is called to clean the input of a . clean_input - Clean input for product_type . | can be extracted to something like `validate_weight` and used here and there as there are already 3 places with the same code |
@@ -16,12 +16,12 @@ import (
"github.com/Unknwon/com"
"github.com/go-xorm/xorm"
- "github.com/go-gitea/git"
- api "github.com/go-gitea/go-sdk/gitea"
+ "code.gitea.io/git"
+ api "code.gitea.io/go-sdk/gitea"
- "github.com/go-gitea/gitea/modules/base"
- "github.com/go-gitea/gitea/modules/log"
- "github.com/go-gitea/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
)
type ActionType int
| [GetRepoLink->[GetRepoPath],GetIssueTitle->[GetIssueInfos],AvatarLink->[AvatarLink],GetIssueContent->[GetIssueInfos],ShortRepoPath->[ShortRepoName,ShortRepoUserName],ToApiPayloadCommits] | This function is exported to the main function of the functions below. represents user operation type and other information to repository. | This is totally wrong... |
@@ -93,6 +93,11 @@ func dataSourceAwsInstance() *schema.Resource {
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
+ "ipv6_addresses": {
+ Type: schema.TypeList,
+ Computed: true,
+ Elem: &schema.Schema{Type: schema.TypeString},
+ },
"iam_instance_profile": {
Type: schema.TypeString,
Computed: true,
| [IgnoreAws,BoolValue,Ec2KeyValueTags,HasPrefix,Set,Int64Value,GetOk,DescribeInstances,Errorf,SetId,InstanceProfileARNToName,IgnoreConfig,DescribeInstanceAttribute,Id,Get,Map,Printf,StringValue,Sprintf,String,WriteString] | The tags schema computed function. The network interface schema has a special number of fields that can be computed by the network_. | Unless the order of the returned IPv6 addresses is significant, we prefer `schema.TypeSet` for collections. |
@@ -37,6 +37,7 @@ class UpdateCommand extends Command
new InputOption('no-custom-installers', null, InputOption::VALUE_NONE, 'Disables all custom installers.'),
new InputOption('no-scripts', null, InputOption::VALUE_NONE, 'Skips the execution of all scripts defined in composer.json file.'),
new InputOption('verbose', 'v', InputOption::VALUE_NONE, 'Shows more details including new commits pulled in when updating packages.'),
+ new InputOption('optimize-autoloaders', null, InputOption::VALUE_NONE, 'Optimize autoloaders during autoloader dump')
))
->setHelp(<<<EOT
The <info>update</info> command reads the composer.json file from the
| [UpdateCommand->[execute->[disableCustomInstallers,getOption,getArgument,getComposer,run,getIO,setUpdateWhitelist],configure->[setHelp]]] | Configures the command that will update the current lease. | you should also add the `-o` shortcut, like in `dump-autoload` (especially as the option name is even longer here) |
@@ -1082,7 +1082,7 @@ def plot_evoked_joint(evoked, times="peaks", title='', picks=None,
ts_args_def = dict(picks=None, unit=True, ylim=None, xlim='tight',
proj=False, hline=None, units=None, scalings=None,
titles=None, gfp=False, window_title=None,
- spatial_colors=True)
+ spatial_colors=True, zorder='std')
for key in ts_args_def:
if key not in ts_args:
ts_args_pass[key] = ts_args_def[key]
| [plot_evoked_joint->[plot_evoked_joint,_connection_line,_plot_evoked],plot_evoked_image->[_plot_evoked],_plot_evoked_white->[whitened_gfp],plot_evoked->[_plot_evoked],_plot_evoked->[_plot_legend,_rgb]] | Plot evoked data as butterfly plot and add topomaps for selected time points Plots a single object. Plot a butterfly topomap of the . plot the topomap of the time series and the connection lines between the time series and. | Why does this default to 'std'? The docs say it defaults to 'unsorted'. |
@@ -522,6 +522,14 @@ class TorchAgent(ABC, Agent):
help='Decay factor for LR scheduler, or how much LR is multiplied by '
'when it is lowered.',
)
+ lr_group.add_argument(
+ '--max-lr-steps',
+ type=int,
+ default=-1,
+ help='Number of train steps the scheduler should take after warmup. '
+ 'Training is terminated after this many steps. This should only be '
+ 'set for --lr_scheduler invsqrt, cosine, or linear',
+ )
lr_group.add_argument(
'--warmup-updates',
type=int,
| [History->[_update_vecs->[parse],update_history->[_update_raw_strings,_update_vecs,_update_strings],add_reply->[_update_raw_strings,_update_vecs,_update_strings]],TorchAgent->[_vectorize_text->[_add_start_end_tokens],_set_text_vec->[get_history_str,_check_truncate,get_history_vec],vectorize->[_set_text_vec,_set_label_cands_vec,_set_label_vec],reset->[reset],build_history->[history_class],build_dictionary->[dictionary_class],act->[self_observe],_set_label_vec->[_vectorize_text,_check_truncate],batch_act->[batchify,match_batch],batchify->[is_valid,Batch],zero_grad->[zero_grad],backward->[backward],load->[load,load_state_dict],init_optim->[optim_opts],observe->[update_history,vectorize],self_observe->[reset,add_reply],state_dict->[state_dict],_set_label_cands_vec->[_vectorize_text,_check_truncate],save->[save,state_dict],update_params->[_is_lr_warming_up],load_state_dict->[load_state_dict],_copy_embeddings->[_get_embtype,_project_vec],add_cmdline_args->[optim_opts,dictionary_class],receive_metrics->[_is_lr_warming_up]]] | Add command line arguments for the TorchAgent. Adds arguments to the optim group and the optimizer group for the given . Adds a sequence of arguments to the command line. Adds arguments related to a sequence of flags to the command line. | It might make sense to move the whole LR group parameters into the LRScheduler.add_cmdline_args, and then just call that from here. |
@@ -207,7 +207,7 @@ $space_check = (disk_free_space($config['install_dir']) / 1024 / 1024);
if ($space_check < 512 && $space_check > 1) {
print_warn('Disk space where '.$config['install_dir'].' is located is less than 512Mb');
}
-
+d
if ($space_check < 1) {
print_fail('Disk space where '.$config['install_dir'].' is located is empty!!!');
}
| [format,rewind] | Test if the system has a known number of unique identifiers. - - - - - - - - - - - - - - - - - -. | Think this was added by accident :) |
@@ -169,7 +169,7 @@ public final class KeyValueContainerUtil {
File metadataPath = new File(kvContainerData.getMetadataPath());
// Verify Checksum
- ContainerUtils.verifyChecksum(kvContainerData);
+ ContainerUtils.verifyChecksum(kvContainerData, config);
File dbFile = KeyValueContainerLocationUtil.getContainerDBFile(
metadataPath, containerID);
| [KeyValueContainerUtil->[createContainerMetaData->[IllegalArgumentException,error,equals,ReferenceCountedDB,getParentFile,addDB,checkNotNull,mkdirs,IOException,DatanodeStoreSchemaTwoImpl,DatanodeStoreSchemaOneImpl,deleteDirectory,getAbsolutePath],parseKVContainerData->[getSchemaVersion,size,updateDeleteTransactionId,warn,setDbFile,updateBlockCommitSequenceId,verifyChecksum,getUncachedDatanodeStore,getContainerID,File,getContainerDBFile,setSchemaVersion,setBytesUsed,error,setKeyCount,stop,incrPendingDeletionBlocks,getStore,getMetadataPath,exists,close,initializeUsedBytesAndBlockCount,RuntimeException,getMetadataTable,getDB,get,getDeletingKeyFilter],getBlockLength->[getChunks,getLen,getFromProtoBuf],getMetadataDirectory->[checkNotNull,get,getMetadataPath],getDataDirectory->[checkNotNull,get,getChunksPath],initializeUsedBytesAndBlockCount->[hasNext,setBytesUsed,error,getBlockLength,setKeyCount,getContainerID,getDeletingKeyFilter,getBlockIterator,getUnprefixedKeyFilter,nextBlock],removeContainer->[getChunksPath,removeDB,getParentFile,checkNotNull,getMetadataPath,deleteDirectory,File],getLogger]] | Parse the KeyValueContainerData and fill in the necessary fields. region KvContainerData Implementation. | Any reason why moved the config check into the method? If we keep it here, it is more readable. `if (enabled) { verifyChecksum }` What do you think? |
@@ -146,10 +146,10 @@ void process_lcd_eb_command(const char* command) {
char message_buffer[MAX_CURLY_COMMAND];
sprintf_P(message_buffer,
- PSTR("{T0:%03.0f/%03i}{T1:000/000}{TP:%03.0f/%03i}{TQ:%03i}{TT:%s}"),
- thermalManager.degHotend(0), thermalManager.degTargetHotend(0),
+ PSTR("{T0:%03i/%03i}{T1:000/000}{TP:%03i/%03i}{TQ:%03i}{TT:%s}"),
+ int(thermalManager.degHotend(0)), thermalManager.degTargetHotend(0),
#if HAS_HEATED_BED
- thermalManager.degBed(), thermalManager.degTargetBed(),
+ int(thermalManager.degBed()), thermalManager.degTargetBed(),
#else
0, 0,
#endif
| [No CFG could be retrieved] | Process an LCD command. Process an LCD J command. | This is the right thing to do - I'll test it but I expect it to just work, and it fixes the complaint when people don't select the right menu options when building. |
@@ -15,6 +15,7 @@ namespace CoreNodeModels
[NodeDescription("WatchNodeDescription", typeof(Resources))]
[NodeSearchTags("WatchNodeSearchTags", typeof(Resources))]
[IsDesignScriptCompatible]
+ [OutPortTypes("var")]
[AlsoKnownAs("Dynamo.Nodes.Watch", "DSCoreNodesUI.Watch")]
public class Watch : NodeModel
{
| [Watch->[OnEvaluationComplete->[CachedValue,EvaluationComplete,HasRunOnce],BuildOutputAst->[BuildStringNode,BuildFunctionObject,GenerateBridgeDataAst,BuildAssignment,GetAstIdentifierForOutputIndex,ToString,BuildIdentifier,BuildNullNode],Dispose->[ToString,Dispose,UnregisterCallback],OnBuilt->[ToString,RegisterCallback,OnBuilt],RegisterAllPorts,CORE_VIEW,WatchPortDataInputToolTip,Disabled,WatchPortDataResultToolTip,Add]] | Creates a new Watch object. if - Returns an array of identifiers if the object is not partially applied. | need to add "node output: var" to library tooltip |
@@ -14,7 +14,8 @@
<fieldset class="crayons-field">
<legend class="crayons-field__label">Site Theme</legend>
<div class="theme-selector-field grid gap-4 grid-cols-2 l:grid-cols-3">
- <% Users::Setting.config_themes.keys.each do |theme| %>
+ <%# This is temporary while we're migrating themes %>
+ <% %w[light_theme dark_theme].each do |theme| %>
<%= render partial: "theme_selector", locals: { f: f, theme: theme } %>
<% end %>
</div>
| [No CFG could be retrieved] | Displays a hidden field with a theme selector and a navbar with a customizable form for Renders the nag records that are part of the system. | We can't delete the other themes just yet because of the timing of our DUS but we don't want users to select them anymore. |
@@ -700,10 +700,7 @@ class CookedPostProcessor
def post_process_images
extract_images.each do |img|
- unless add_image_placeholder!(img)
- limit_size!(img)
- convert_to_link!(img)
- end
+ convert_to_link!(img) unless add_image_placeholder!(img)
end
end
| [CookedPostProcessor->[grant_badges->[has_emoji?],create_icon_node->[create_node],downloaded_images->[downloaded_images],post_process_oneboxes->[add_image_placeholder!,limit_size!],create_span_node->[create_node],post_process_images->[add_image_placeholder!,convert_to_link!,limit_size!],optimize_image!->[loading_image,each_responsive_ratio],create_link_node->[create_node]]] | This method is called after the user has created a node with a link to the image and. | Does this mean we now onebox every image in a post, regardless of size? |
@@ -415,8 +415,9 @@ TEMPLATE_DIRS = (
)
STATICFILES_FINDERS = (
- 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
- 'django.contrib.staticfiles.finders.FileSystemFinder',
+ 'pipeline.finders.AppDirectoriesFinder',
+ 'pipeline.finders.FileSystemFinder',
+ 'pipeline.finders.CachedFileFinder',
'pipeline.finders.PipelineFinder',
)
| [get_user_url->[reverse],get_locales->[_Language,items,join,open,load],JINJA_CONFIG->[MemcachedBytecodeCache,isinstance],abspath,path,node,_,dict,dirname,namedtuple,setup_loader,sorted,tuple,dumps,join,items,get_locales,%,lower,reverse_lazy] | Creates a list of all components of a given object. This function returns a list of all packages that are installed in the system. | It turns out this finder tries to ignore files in a `build` folder, effectively making our system moot since we use "build" as the name of the folder we generate the built files into |
@@ -67,8 +67,16 @@ namespace :deploy do
end
end
+ desc 'Modify permissions on /srv/idp'
+ task :mod_perms do
+ on roles(:web), in: :parallel do
+ execute :sudo, :chown, '-R', 'ubuntu:nogroup', deploy_to
+ end
+ end
+
before 'assets:precompile', :browserify
after 'deploy:updated', 'newrelic:notice_deployment'
after 'deploy:log_revision', :deploy_json
+ after :deploy, 'deploy:mod_perms'
end
# rubocop:enable Metrics/BlockLength
| [fetch,desc,upload!,ask,namespace,within,new,on,roles,to_json,before,task,set,require,after,match,execute] | end of function _browserify_. | should we make this a string `'deploy'` for consistency? |
@@ -679,7 +679,7 @@ class FileSystem(with_metaclass(abc.ABCMeta, BeamPlugin)):
def _match(pattern, limit):
"""Find all matching paths to the pattern provided."""
- if pattern.endswith('/'):
+ if pattern.endswith('/') or pattern.endswith('\\'):
pattern += '*'
# Get the part of the pattern before the first globbing character.
# For example scheme://path/foo* will become scheme://path/foo for
| [CompressedFile->[closed->[closed],_fetch_to_internal_buffer->[write],readline->[_fetch_to_internal_buffer,write,_read_from_internal_buffer,readline],write->[write],_rewind->[_clear_read_buffer,_rewind_file,_initialize_decompressor],__init__->[is_valid_compression_type],seek->[_rewind,read],close->[close,write,writeable,readable],__exit__->[close],read->[_fetch_to_internal_buffer,_read_from_internal_buffer,read],flush->[write,writeable,flush]],FileSystem->[match->[_match->[_list,FileMetadata,has_dirs,match,MatchResult,_url_dirname,match_files],BeamIOError,_match],_url_dirname->[_split_scheme,_combine_scheme],_get_compression_type->[is_valid_compression_type,detect_compression_type]]] | Returns a list of FileMetadata objects that match the patterns provided. Match the patterns and limits and return a list of matches. | Do we other places that needs this change? |
@@ -49,6 +49,11 @@ namespace Js
return this->prefixEnumerator == nullptr && this->arrayEnumerator == nullptr && this->propertyEnumerator.IsNullEnumerator();
}
+ bool JavascriptStaticEnumerator::CanUseJITFastPath() const
+ {
+ return this->propertyEnumerator.CanUseJITFastPath() && this->currentEnumerator == nullptr;
+ }
+
uint32 JavascriptStaticEnumerator::GetCurrentItemIndex()
{
if (currentEnumerator)
| [Clear->[Clear],Reset->[Reset],MoveAndGetNext->[MoveAndGetNextFromEnumerator,MoveAndGetNext],Initialize->[Initialize],GetCurrentItemIndex->[GetCurrentItemIndex],IsNullEnumerator->[IsNullEnumerator]] | IsNullEnumerator - check if the current Enumerator is null - enumerable. | nit : should you check the this->currentEnumerator check first? |
@@ -52,12 +52,16 @@ def _scopes_to_resource(*scopes):
__all__ = [
+ "_scopes_to_resource",
"AadClient",
"AadClientBase",
"AuthCodeRedirectServer",
"ConfidentialClientCredential",
+ "get_default_authority",
+ "InteractiveCredential",
"MsalTransportAdapter",
"MsalTransportResponse",
+ "normalize_authority",
"PublicClientCredential",
"wrap_exceptions",
]
| [_scopes_to_resource->[ValueError,endswith,len],normalize_authority->[,rstrip,ValueError,urlparse],get_default_authority->[normalize_authority,get]] | All classes in this module are declared as private. | It looks weird we want to expose a method start with '_' |
@@ -4878,6 +4878,10 @@ p {
return untrailingslashit( $base ) . '/xmlrpc.php';
}
+ public static function connection() {
+ return self::init()->connection_manager;
+ }
+
/**
* Creates two secret tokens and the end of life timestamp for them.
*
| [Jetpack->[generate_secrets->[generate_secrets],verify_json_api_authorization_request->[add_nonce],get_locale->[guess_locale_from_lang],authenticate_jetpack->[verify_xml_rpc_signature],wp_rest_authenticate->[verify_xml_rpc_signature],admin_page_load->[disconnect,unlink_user],jetpack_getOptions->[get_connected_user_data],build_connect_url->[build_connect_url],get_secrets->[get_secrets],delete_secrets->[delete_secrets],register->[do_stats,stat,validate_remote_register_response]]] | Get the URL of the XML - RPC API. | some indentation weirdness in this method |
@@ -17,6 +17,7 @@ public class DefaultWorker {
public static void main(String[] args) {
try {
System.setProperty("ray.worker.mode", "WORKER");
+ System.setProperty("ray.run-mode", "CLUSTER");
Thread.setDefaultUncaughtExceptionHandler((Thread t, Throwable e) -> {
LOGGER.error("Uncaught worker exception in thread {}: {}", t, e);
});
| [DefaultWorker->[main->[internal,RuntimeException,error,init,setProperty,run,info,setDefaultUncaughtExceptionHandler],getLogger]] | Main method of RayWorker. | Why add this line? |
@@ -277,7 +277,7 @@ namespace System.Xml
public override Encoder GetEncoder()
{
- return null;
+ return null!;
}
internal static Encoding UCS4_Littleendian
| [Ucs4Decoder1234->[GetFullChars->[Ucs4ToUTF16]],UTF16Decoder->[GetChars->[GetCharCount],GetCharCount->[GetCharCount]],Ucs4Decoder3412->[GetFullChars->[Ucs4ToUTF16]],Ucs4Encoding->[GetChars->[GetChars],GetCharCount->[GetCharCount]],Ucs4Decoder2143->[GetFullChars->[Ucs4ToUTF16]],Ucs4Decoder->[Convert->[GetFullChars],GetChars->[GetFullChars]],Ucs4Decoder4321->[GetFullChars->[Ucs4ToUTF16]]] | Returns a encoder for a sequence number. | Depending on how pervasive this particular subtype is, it might indicate that the base class might need to be annotated differently. |
@@ -492,7 +492,7 @@ export class AmpAnalytics extends AMP.BaseElement {
initializeLinker_() {
const type = this.element.getAttribute('type');
this.linkerManager_ = new LinkerManager(this.getAmpDoc(),
- this.config_, type, this.element);
+ /** @type {!JsonObject} */ (this.config_), type, this.element);
this.linkerManager_.init();
}
| [No CFG could be retrieved] | Handle nested requests. Handle a request for a JSON config block. | I think it should already have this type? |
@@ -59,12 +59,12 @@ function initCommands() {
'display-name':
APP.conference.changeLocalDisplayName.bind(APP.conference),
'toggle-audio': () => {
- sendAnalyticsEvent(API_TOGGLE_AUDIO);
+ sendAnalytics(createApiEvent('toggle-audio'));
logger.log('Audio toggle: API command received');
APP.conference.toggleAudioMuted(false /* no UI */);
},
'toggle-video': () => {
- sendAnalyticsEvent(API_TOGGLE_VIDEO);
+ sendAnalytics(createApiEvent('toggle-video'));
logger.log('Video toggle: API command received');
APP.conference.toggleVideoMuted(false /* no UI */);
},
| [No CFG could be retrieved] | Initializes the commands for the given . on request to check if command is available. | I'm surprised more api events don't have analytics. Maybe they need to be added for this pass? |
@@ -158,6 +158,7 @@ def test_transfer(raiden_network):
assert isinstance(a1_recv_messages[0], DirectTransfer)
+@pytest.mark.timeout(10)
@pytest.mark.parametrize('blockchain_type', ['mock'])
@pytest.mark.parametrize('channels_per_node', [2])
@pytest.mark.parametrize('number_of_nodes', [10])
| [test_mediated_transfer->[get_paths_of_length,assert_path_in_shortest_paths,get_channel,MediatedTransferTestHelper],test_receive_mediatedtransfer_invalid_address->[get_paths_of_length,sign_and_send,MediatedTransferTestHelper],test_receive_mediatedtransfer_outoforder->[get_paths_of_length,get_app_from_address,sign_and_send,MediatedTransferTestHelper],MediatedTransferTestHelper->[get_paths_of_length->[get_paths_of_length]],test_receive_hashlocktransfer_unknown->[sign_and_send],test_transfer_from_outdated->[sign_and_send],test_receive_mediatedtransfer_unknown->[sign_and_send],test_receive_directtransfer_unknown->[sign_and_send],test_receive_directtransfer_invalidlocksroot->[sign_and_send],test_receive_directtransfer_wrongtoken->[sign_and_send],test_receive_directtransfer_outoforder->[sign_and_send]] | Test that a token is in a medium transfer. | Why such a small timeout addition here? |
@@ -868,7 +868,7 @@ export default Component.extend(
`#${this.selectKit.uniqueID}-body`
);
- const placementStrategy = this.site.mobileView ? "absolute" : "fixed";
+ const placementStrategy = this?.site?.mobileView ? "absolute" : "fixed";
const verticalOffset = 3;
this.popper = createPopper(anchor, popper, {
| [No CFG could be retrieved] | Create a popper that will show the reference element when the popup is opened. Adjusts the popper width of the popper element based on the width of the modal. | site is not defined in wizard |
@@ -348,6 +348,12 @@ func (r *replicaChecker) checkOfflinePeer(region *RegionInfo) Operator {
if store.isUp() {
continue
}
+
+ // check the number of replicas firstly
+ if len(region.GetPeers()) > r.opt.GetMaxReplicas() {
+ return newRemovePeer(region, peer)
+ }
+
newPeer, _ := r.selectBestPeer(region)
if newPeer == nil {
return nil
| [selectBestReplacement->[selectBestPeer],Schedule->[GetResourceKind],checkBestReplacement->[selectBestReplacement,selectWorstPeer],SelectSourceRegion->[Len],adjustBalanceLimit->[Len],selectBestStore->[Len],checkOfflinePeer->[selectBestPeer],transferPeer->[GetResourceKind]] | checkOfflinePeer checks if there is a peer that is offline. | the peer here is in the offline store? |
@@ -485,8 +485,7 @@ func TestTxManager_BumpGasUntilSafe_confirmed(t *testing.T) {
gasThreshold := sentAt + config.EthGasBumpThreshold()
minConfs := config.MinOutgoingConfirmations() - 1
require.NoError(t, app.Store.ORM.CreateHead(cltest.Head(gasThreshold+minConfs-1)))
- ethMock, err := app.MockStartAndConnect()
- require.NoError(t, err)
+ require.NoError(t, app.StartAndConnect())
txm := store.TxManager
from := cltest.GetAccountAddress(t, store)
| [NewAddress,JustError,NewStore,New,Connect,ReloadNonce,Accounts,NotNil,Len,GetStore,NewEthTxManager,OnNewHead,Parallel,NoError,Register,EqualError,PublicLastSafeNonce,EthTx,AllCalled,TxFrom,NewTestConfig,NewConfig,StartAndConnect,Greater,NewBig,ToInt,NewLink,WithdrawLINK,NewApplicationWithConfigAndKey,NotEqual,MockCallerSubscriberClient,EthGasPriceDefault,Head,GetAvailableAccount,Twice,AssertError,HexToAddress,NewAccount,Return,EthGasBumpThreshold,NewInt,CreateTxWithNonce,Context,GetAndIncrementNonce,BumpGasUntilSafe,MockStartAndConnect,ChainID,EventuallyAllCalled,SetLastSafeNonce,Nil,MinOutgoingConfirmations,Once,Equal,Times,Hex,CheckAttempt,NewManagedAccount,DecodeString,AssertExpectations,NewApplicationWithKey,NextActiveAccount,Transactions,During,Run,True,Disconnect,NewHash,Unlock,KeysDir,Set,GetAccountAddress,MustDecode,Error,Int,AddTxAttempt,NewKeyStore,Uint64ToHex,On,ShouldCall,Contains,CreateHead,RegisterError,CreateTxWithGas,CreateTx,FindTx,Nonce] | TestTxManager_BumpGasUntilSafe_confirmed tests the transaction manager for the transaction TestTxManager_BumpGasUntilSafe_safe tests the state of a transaction by. | Maybe put StartAndConnect onto its own line? The pattern should probably be moved away from the code base, it would be clearer. |
@@ -246,7 +246,7 @@ Vector& ElasticIsotropic3D::CalculateValue(
r_flags.Set( ConstitutiveLaw::COMPUTE_STRESS, true );
ElasticIsotropic3D::CalculateMaterialResponsePK2(rParameterValues);
- noalias(rValue) = rParameterValues.GetStressVector();
+ noalias(rValue) = static_cast<Vector>(rParameterValues.GetStressVector());
// Previous flags restored
r_flags.Set( ConstitutiveLaw::COMPUTE_CONSTITUTIVE_TENSOR, flag_const_tensor );
| [InitializeMaterialResponseKirchhoff->[InitializeMaterialResponseCauchy],FinalizeMaterialResponseKirchhoff->[FinalizeMaterialResponseCauchy],CalculateElasticMatrix->[CheckClearElasticMatrix]] | Calculate the value of a node in the chain. get the value of the last non - null variable. | this is not correct... beware, ou cannot statically cast an array_1d to a Vector! |
@@ -197,7 +197,7 @@ class OpTest(unittest.TestCase):
def _get_io_vars(self, block, numpy_inputs):
inputs = {}
- for name, value in numpy_inputs.iteritems():
+ for name, value in numpy_inputs.items():
if isinstance(value, list):
var_list = [
block.var(sub_name) for sub_name, sub_value in value
| [OpTest->[_get_inputs->[_get_io_vars],_calc_output->[_get_outputs,_get_inputs,feed_var,_append_ops],check_output->[check_output_with_place,_get_places],_get_outputs->[_get_io_vars],infer_dtype_from_inputs_outputs->[infer_dtype->[try_call_once],infer_dtype],check_grad_with_place->[__assert_is_close,get_numeric_gradient],check_output_customized->[calc_output,_get_places],_get_gradient->[_get_inputs,feed_var,_append_ops],_append_ops->[infer_dtype_from_inputs_outputs],check_grad->[_get_places],check_output_with_place->[_calc_output,find_actual],__assert_is_close->[err_msg]],get_numeric_gradient->[__set_elem__,product,__get_elem__,get_output]] | Get the variables in the block that are in the numpy inputs. | add a todo to make them six.iteritems? |
@@ -66,6 +66,10 @@ def morlet(sfreq, freqs, n_cycles=7.0, sigma=None, zero_mean=False):
Ws = list()
n_cycles = np.atleast_1d(n_cycles)
+ if np.any([f <= 0 for f in freqs]):
+ raise ValueError("all computed frequencies in freqs must be "
+ "greater than 0.")
+
if (n_cycles.size != 1) and (n_cycles.size != len(freqs)):
raise ValueError("n_cycles should be fixed or defined for "
"each frequency.")
| [AverageTFR->[__iadd__->[_check_compat],__isub__->[_check_compat],plot_joint->[_plot,plot_joint],__add__->[copy,_check_compat],__sub__->[copy,_check_compat],plot_topo->[copy]],_preproc_tfr_instance->[copy,_preproc_tfr],_get_data->[data],tfr_morlet->[_tfr_aux],_tfr_aux->[_compute_tfr],tfr_multitaper->[_tfr_aux],tfr_array_morlet->[_compute_tfr],_time_frequency_loop->[_cwt],cwt->[_cwt],EpochsTFR->[__abs__->[copy],average->[copy,AverageTFR]],read_tfrs->[AverageTFR],_compute_tfr->[morlet,_make_dpss]] | Compute Morlet wavelets for a given frequency range and a given number of cycles Watermann Watermann Wert - Wert - Wert - W. | I would mkae sure freqs is an array and do: if np.any(freqs <= 0) list comprehensions are slow and usually avoided in numericla code. |
@@ -35,8 +35,11 @@ class PyMpi4py(PythonPackage):
homepage = "https://pypi.python.org/pypi/mpi4py"
url = "https://pypi.io/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz"
+ version('develop', git='https://github.com/mpi4py/mpi4py.git', branch='master')
version('2.0.0', '4f7d8126d7367c239fd67615680990e3')
version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c')
+ depends_on('python@2.7:2.8,3.3:')
depends_on('py-setuptools', type='build')
depends_on('mpi')
+ depends_on('py-cython',when='@develop', type='build')
| [PyMpi4py->[depends_on,version]] | This site is for a list of packages that are not installed on the system. | if you know that this will be in the next release (say `2.1.0`), you can do `when='@2.1.0:'` which at this time will only be satisfied by `@develop`. |
@@ -36,7 +36,7 @@ class TestDatasetUtils:
utils.download_url(url, tmpdir)
def test_download_url_dispatch_download_from_google_drive(self, mocker, tmpdir):
- url = "https://drive.google.com/file/d/1hbzc_P1FuxMkcabkgn9ZKinBwW683j45/view"
+ url = "https://drive.google.com/file/d/1GO-BHUYRuvzr1Gtp2_fqXRsr9TIeYbhV/view"
id = "1hbzc_P1FuxMkcabkgn9ZKinBwW683j45"
filename = "filename"
| [TestDatasetUtils->[test_download_url_dont_exist->[raises,download_url],test_download_url_dispatch_download_from_google_drive->[assert_called_once_with,download_url,patch],test_download_url->[skip,download_url,listdir,len],test_download_url_retry_http->[skip,download_url,listdir,len]],main] | Download file from Google drive using download_url. | Perhaps, you need to update `id` here. |
@@ -85,6 +85,8 @@ public class HiveDataset implements PrioritizedCopyableDataset {
public static final String DATASET_NAME_PATTERN_KEY = "hive.datasetNamePattern";
public static final String DATABASE = "Database";
public static final String TABLE = "Table";
+ public static final String DATASET_STAGING_PATH = "dataset.staging.path";
+ public static final String DATASET_PREFIX_TOBEREPLACED = "hive.dataset.copy.target.table.prefixToBeReplaced";
public static final String DATABASE_TOKEN = "$DB";
public static final String TABLE_TOKEN = "$TABLE";
| [HiveDataset->[getPartitionsFromDataset->[sortPartitions]]] | Creates a new instance of HiveDataset. The object is a singleton object that represents the sequence of all the columns in the. | Isn't the same thing already defined somewhere else ? |
@@ -891,15 +891,15 @@ public abstract class HttpObjectDecoder extends ByteToMessageDecoder {
this.maxLength = maxLength;
}
- public AppendableCharSequence parse(ByteBuf buffer) {
+ public AppendableCharSequence parse(Buffer buffer) {
final int oldSize = size;
seq.reset();
- int i = buffer.forEachByte(this);
+ int i = buffer.openCursor().process(this);
if (i == -1) {
size = oldSize;
return null;
}
- buffer.readerIndex(i + 1);
+ buffer.skipReadable(i + 1);
return seq;
}
| [HttpObjectDecoder->[userEventTriggered->[userEventTriggered],LineParser->[parse->[reset,parse],process->[increaseCount,process]],readHeaders->[isContentAlwaysEmpty],resetNow->[isSwitchingToNonHttp1Protocol,reset],splitHeader->[isDecodingRequest],HeaderParser->[parse->[reset]],decodeLast->[decodeLast]]] | Parse the sequence. | do we need to "close" the cursor or something similar ? |
@@ -11,7 +11,12 @@ namespace System.Runtime.Intrinsics.Arm
/// This class provides access to the ARM base hardware instructions via intrinsics
/// </summary>
[CLSCompliant(false)]
- public abstract class ArmBase
+#if USE_INTERNAL_ACCESSIBILITY
+ internal
+#else
+ public
+#endif
+ abstract class ArmBase
{
internal ArmBase() { }
| [No CFG could be retrieved] | Creates an abstract class that implements the standard C ++ API for the given unused parameter. Dead - count of zero - bit types in the system. | I think it would make sense to remove the CLSCompliant attribute when `USE_INTERNAL_ACCESSIBILITY` is enabled. Getting build warnings which I have to explicitly disable. |
@@ -28,6 +28,13 @@ func TestMain(m *testing.M) {
if err := StartStackContainers(); err != nil {
log.Fatal(err)
}
+ log.Println("INFO: cleaning up Elasticsearch...")
+ if err := cleanupElasticsearch(); err != nil {
+ log.Fatal(err)
+ }
+ if err := InitFleet(); err != nil {
+ log.Fatal(err)
+ }
log.Println("INFO: setting up fleet...")
if err := InitFleet(); err != nil {
log.Fatal(err)
| [Run,Exit,Fatal,Println] | TestMain is the main entry point for the tests. It starts the stack containers sets up. | How come we initialize fleet both here and in line 38? |
@@ -61,6 +61,10 @@ module SubmissionsHelper
end
end
+ def get_grouping_tags(grouping)
+ return grouping.tags
+ end
+
def get_grouping_group_name(assignment, grouping)
group_name = ''
if !grouping.has_submission?
| [construct_file_manager_table_rows->[construct_file_manager_table_row],get_grouping_final_grades->[get_grouping_state]] | Returns any tr attributes for the given node. | Redundant `return` detected. |
@@ -524,7 +524,7 @@ class TextSinkTest(unittest.TestCase):
def test_write_dataflow(self):
pipeline = TestPipeline()
- pcoll = pipeline | beam.core.Create('Create', self.lines)
+ pcoll = pipeline | 'Create' >> beam.core.Create(self.lines)
pcoll | 'Write' >> WriteToText(self.path) # pylint: disable=expression-not-assigned
pipeline.run()
| [TextSourceTest->[test_read_single_file->[_run_read_test],test_read_empty_single_file->[_run_read_test],test_read_single_file_windows_eol->[_run_read_test],test_read_single_file_last_line_no_eol_gzip->[_run_read_test],test_read_single_file_single_line_no_eol->[_run_read_test],test_read_single_file_larger_than_default_buffer->[_run_read_test],test_read_file_pattern->[_run_read_test],test_read_file_pattern_with_empty_files->[_run_read_test],test_read_empty_single_file_no_eol_gzip->[_run_read_test],test_dataflow_single_file_with_coder->[DummyCoder],test_read_single_file_with_empty_lines->[_run_read_test],test_read_single_file_last_line_no_eol->[_run_read_test],test_read_single_file_smaller_than_default_buffer->[_run_read_test],test_read_single_file_mixed_eol->[_run_read_test],test_read_single_file_single_line_no_eol_gzip->[_run_read_test]],TextSinkTest->[test_write_text_file->[_write_lines],test_write_gzip_file->[_write_lines],test_write_text_file_empty->[_write_lines],test_write_bzip2_file->[_write_lines],test_write_gzip_file_auto->[_write_lines],test_write_gzip_file_empty->[_write_lines],test_write_bzip2_file_auto->[_write_lines]]] | Test write dataflow. | The 'Create' Labels here are unneeded. |
@@ -33,9 +33,10 @@ func ConvertJSONSchemaPropsToOpenAPIv2Schema(in *apiextensions.JSONSchemaProps)
}
// dirty hack to temporarily set the type at the root. See continuation at the func bottom.
- // TODO: remove for Kubernetes 1.15
oldRootType := in.Type
if len(in.Type) == 0 {
+ shallowClone := *in
+ in = &shallowClone
in.Type = "object"
}
| [HasPrefix,ConvertJSONSchemaPropsWithPostProcess,String] | ConvertJSONSchemaPropsToOpenAPIv2Schema converts a JSON schemaProps object into an afddb578e9db18ffb6b7802d92R. | have no idea why we didn't do that in the first place. But I remember a discussion, but not the arguments. |
@@ -137,7 +137,7 @@ class RelatedDummy extends ParentDummy
/**
* Set relatedToDummyFriend.
*
- * @param relatedToDummyFriend the value to set
+ * @param relatedToDummyFriend $relatedToDummyFriend
*/
public function setRelatedToDummyFriend(RelatedToDummyFriend $relatedToDummyFriend)
{
| [No CFG could be retrieved] | set relatedToDummyFriend - set relatedToDummyFriend - set relatedToDummy. | It should be `@param RelatedToDummyFriend $relatedToDummyFriend` |
@@ -285,7 +285,7 @@ int ossl_rsa_sp800_56b_derive_params_from_pq(RSA *rsa, int nbits,
if (rsa->dmp1 == NULL)
rsa->dmp1 = BN_secure_new();
if (rsa->dmp1 == NULL)
- goto err;
+ goto err;
BN_set_flags(rsa->dmp1, BN_FLG_CONSTTIME);
if (!BN_mod(rsa->dmp1, rsa->d, p1, ctx))
goto err;
| [ossl_rsa_sp800_56b_generate_key->[ossl_rsa_sp800_56b_validate_strength,ossl_rsa_sp800_56b_derive_params_from_pq,ossl_rsa_fips186_4_gen_prob_primes]] | ossl_rsa_sp800_56b_derive_params_from_ Private methods of BN_read_bits if rsa = null return ret ;. | Too much indentation. That goes for all of this file |
@@ -73,7 +73,11 @@ def default_sources_for_target_type(tgt_type: Type[Target]) -> Tuple[str, ...]:
@frozen_after_init
@dataclass(order=True, unsafe_hash=True)
class PutativeTarget:
- """A potential target to add, detected by various heuristics."""
+ """A potential target to add, detected by various heuristics.
+
+ This class uses the term "target" in the loose sense. It can also represent an invocation of a
+ target-genernating macro.
+ """
# Note that field order is such that the dataclass order will be by address (path+name).
path: str
| [restrict_conflicting_sources->[DisjointSourcePutativeTarget,restrict_sources],make_content_str->[generate_build_file_stanza],edit_build_files->[make_content->[make_content_str],make_content,group_by_build_file,EditedBuildFiles],determine_all_owned_sources->[AllOwnedSources],PutativeTarget->[for_target_type->[default_sources_for_target_type],generate_build_file_stanza->[fmt_val->[fmt_val],fmt_val]],tailor->[merge,PutativeTargetsSearchPaths,Tailor,specs_to_dirs,group_by_build_file,EditBuildFilesRequest,alias_for,realias,PutativeTargets],rename_conflicting_targets->[PutativeTargets,rename,UniquelyNamedPutativeTargets]] | A tuple of tuples containing the path name and names of the files that should be created. This is a helper method to allow the creation of a new target from the given. | Noticed this typo. Going to fix and then merge without CI, to save a tree. |
@@ -39,7 +39,7 @@ public class ContainerReplicaHistoryListCodec
implements Codec<ContainerReplicaHistoryList> {
// UUID takes 2 long to store. Each timestamp takes 1 long to store.
- static final int SIZE_PER_ENTRY = 4 * Long.BYTES;
+ static final int SIZE_PER_ENTRY = 5 * Long.BYTES;
private final Codec<Long> lc = new LongCodec();
@Override
| [ContainerReplicaHistoryListCodec->[toPersistedFormat->[toPersistedFormat]]] | Serializes the contents of a container replica history list to a byte array. | Where is this used, is it in a map? Do we need to account for reference overheads? |
@@ -79,7 +79,8 @@ module.exports = UpgradeGenerator.extend({
this.log(`Regenerating application with JHipster ${version}...`);
let generatorCommand = 'yo jhipster';
if (semver.gte(version, FIRST_CLI_SUPPORTED_VERSION)) {
- generatorCommand = this.clientPackageManager === 'yarn' ? '$(yarn bin)/jhipster' : '$(npm bin)/jhipster';
+ const generatorDir = this.clientPackageManager === 'yarn' ? shelljs.exec('yarn bin') : shelljs.exec('npm bin');
+ generatorCommand = `${generatorDir.replace('\n', '')}/jhipster`;
}
shelljs.exec(`${generatorCommand} --with-entities --force --skip-install`, { silent: this.silent }, (code, msg, err) => {
if (code === 0) this.log(chalk.green(`Successfully regenerated application with JHipster ${version}`));
| [No CFG could be retrieved] | This function is responsible for generating the application with JHipster Check if the node package has a version number and if so generate it. | I would turn off console output: ~~~js shelljs.exec('yarn bin', {silent: true}) ~~~ |
@@ -22,7 +22,7 @@ class Portage(CMakePackage):
version('3.0.0', sha256='7a5a21ffbc35fa54a5136d937cfda6f836c7496ff2b5adf54deb4107501333da')
version('master', branch='master', submodules=True)
- variant('mpi', default=True, description='Support MPI')
+ variant('mpi', default=False, description='Support MPI')
variant('tangram', default=False, description='Use Tangram interface reconstruction package')
variant('jali', default=False, description='Include support for Jali mesh framework')
variant('flecsisp', default=False, description='Include support for FleCSI mesh framework')
| [Portage->[cmake_args->[append],variant,conflicts,depends_on,version]] | A framework that can build a highly customized hybrid parallel portage application with a single - This function is used to check if a sequence number has a required dependency on it. | Remind me why we are making this change? |
@@ -34,7 +34,7 @@ type Operations interface {
SetHostname(hostname string, aliases ...string) error
Apply(endpoint *NetworkEndpoint) error
- MountLabel(ctx context.Context, label, target string) error
+ MountLabel(ctx context.Context, devicePath, target string) error
Fork() error
SessionLog(session *SessionConfig) (dio.DynamicMultiWriter, error)
| [No CFG could be retrieved] | Invite returns a function to invoke after the session state has been persisted. | Please add the type (devicePath string) there to ease reading instead of saving some keypresses. |
@@ -108,8 +108,7 @@ class RequirementSet(object):
tags = pep425tags.get_supported()
if (self.check_supported_wheels and not wheel.supported(tags)):
raise InstallationError(
- "%s is not a supported wheel on this platform." %
- wheel.filename
+ "{} is not a supported wheel on this platform.".format(wheel.filename)
)
# This next bit is really a sanity check.
| [RequirementSet->[add_requirement->[add_unnamed_requirement,add_named_requirement]]] | Adds a new requirement to the requirements set. Add a dependency if it doesn t already exist. Return the existing requirement and the new one. | This should be split across multiple lines. |
@@ -946,7 +946,7 @@ class RaidenMessageHandler(object):
channel = graph.partneraddress_channel[message.sender]
- if not channel.isopen:
+ if not channel.can_transfer:
raise TransferWhenClosed(
'Direct transfer received for a closed channel: {}'.format(
pex(channel.channel_address),
| [RaidenMessageHandler->[message_secret->[register_secret,handle_secret],message_mediatedtransfer->[mediate_mediated_transfer,target_mediated_transfer],message_revealsecret->[register_secret]],StateMachineEventHandler->[handle_tokenadded->[register_channel_manager],handle_settled->[find_channel_by_address],handle_withdraw->[register_secret],handle_closed->[find_channel_by_address],handle_channelnew->[register_netting_channel,connection_manager_for_token],dispatch->[dispatch],handle_balance->[connection_manager_for_token],on_event->[sign,handle_secret,send_async]],RaidenService->[register_registry->[get_channel_details,get_block_number],handle_secret->[sign,register_secret,send_async],send_and_wait->[send_and_wait],get_channel_details->[register_channel_for_hashlock->[register_channel_for_hashlock]],transfer_async->[create_default_identifier],stop->[stop],_direct_or_mediated_transfer->[sign,send_async],mediate_mediated_transfer->[get_block_number],sign->[sign],register_secret->[sign,register_secret,send_async],register_netting_channel->[get_channel_details,get_block_number],target_mediated_transfer->[get_block_number],start_mediated_transfer->[RandomSecretGenerator,get_block_number,create_default_identifier],register_channel_manager->[get_channel_details,get_block_number],send_async->[send_async]]] | Direct transfer a message from a node to a channel. | Same thing with the direct transfer, a channel must accept a message if it can't transfer |
@@ -41,7 +41,6 @@ public class LocalJobLauncherTest extends JobLauncherTestBase {
this.properties = new Properties();
this.properties.load(new FileReader("gobblin-test/resource/gobblin.test.properties"));
this.properties.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_ENABLED_KEY, "true");
- this.properties.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "true");
this.properties
.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_JDBC_DRIVER_KEY, "org.apache.derby.jdbc.EmbeddedDriver");
this.properties.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY, "jdbc:derby:memory:gobblin1;create=true");
| [LocalJobLauncherTest->[tearDown->[getConnection],testCancelJob->[runTestWithCancellation,loadJobProps],startUp->[getProperty,prepareJobHistoryStoreDatabase,setProperty,Properties,FsStateStore,FileReader,load],loadJobProps->[putAll,setProperty,Properties,FileReader,load],testLaunchJob->[runTest,loadJobProps],testLaunchJobWithPullLimit->[setProperty,loadJobProps,runTestWithPullLimit],testLaunchJobWithFork->[runTestWithFork,setProperty,name,loadJobProps],testLaunchJobWithMultiWorkUnit->[runTest,toString,setProperty,loadJobProps]]] | Method to start up the configuration. | Why is this being removed? |
@@ -208,13 +208,12 @@ int PKCS12_add_safe(STACK_OF(PKCS7) **psafes, STACK_OF(PKCS12_SAFEBAG) *bags,
PKCS7 *p7 = NULL;
int free_safes = 0;
- if (!*psafes) {
+ if (*psafes == NULL) {
*psafes = sk_PKCS7_new_null();
- if (!*psafes)
+ if (*psafes == NULL)
return 0;
free_safes = 1;
- } else
- free_safes = 0;
+ }
if (nid_safe == 0)
#ifdef OPENSSL_NO_RC2
| [No CFG could be retrieved] | Add a key to the bag and the safe bags. Package private methods. | Why is the else removed? |
@@ -58,6 +58,7 @@ class Flow(Serializable):
self.tasks = set() # type: Set[Task]
self.edges = set() # type: Set[Edge]
+ self.set_key_tasks([])
for t in tasks or []:
self.add_task(t)
| [Flow->[copy->[copy],sorted_tasks->[copy,upstream_tasks,downstream_tasks,update],upstream_tasks->[edges_to],update->[add_edge,add_task],generate_task_ids->[generate_flow_id,sorted_tasks,edges_to,edges_from],chain->[add_edge],edges_to->[all_upstream_edges],set_dependencies->[add_edge,add_task],run->[parameters,run],edges_from->[all_downstream_edges],serialize->[parameters,serialize,_generate_obj_ids],downstream_tasks->[edges_from],add_edge->[copy,add_task]]] | Initialize a object with the given parameters. | I'd vote to include this as an init argument as well, so users don't have to declaratively call this method if they don't want to. |
@@ -178,7 +178,7 @@ void IntBounds::SetBound(
// Aggressively merge the constant lower or upper bound of the base value, adjusted by the offset
ValueInfo const * const baseValueInfo = baseValue->GetValueInfo();
- int constantBoundBase;
+ int constantBoundBase = 0xCCCCCCCC;
const bool success =
Lower
? baseValueInfo->TryGetIntConstantLowerBound(&constantBoundBase, true)
| [No CFG could be retrieved] | This is a private method for the IntBounds class. This method assumes that the value is in the bounds set and that the offset is in the. | >0xCCCCCCCC [](start = 28, length = 10) Why these value? |
@@ -1,10 +1,6 @@
module PersonalKeyValidator
extend ActiveSupport::Concern
- included do
- validate :valid_personal_key?
- end
-
private
def normalize_personal_key(personal_key = nil)
| [normalize_personal_key->[normalize,blank?],personal_key_generator->[new],valid_personal_key?->[regexp,verify],included,validate,extend] | normalize_personal_key returns nil if no such key exists. | I think the `PersonalKeyFormatter#regexp` should return a regex object. I'm assuming it returns a plain string for something in a view or with JS, should we break out two methods? `regexp_str` and `regexp`? |
@@ -171,7 +171,14 @@ def deserialize_properties(props_struct: struct_pb2.Struct) -> Any:
raise AssertionError("Unrecognized signature when unmarshaling resource property")
# Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways.
- return {k: deserialize_property(v) for (k, v) in list(props_struct.items())}
+ output = {}
+ for k, v in list(props_struct.items()):
+ value = deserialize_property(v)
+ # We treat values that deserialize to "None" as if they don't exist.
+ if value is not None:
+ output[k] = value
+
+ return output
def deserialize_property(value: Any) -> Any:
| [serialize_property->[serialize_property],translate_output_properties->[translate_output_properties],deserialize_property->[deserialize_properties,deserialize_property],resolve_outputs->[deserialize_properties,serialize_property,translate_output_properties,deserialize_property]] | Deserializes a protobuf struct_pb2. Struct into a dictionary containing normal Python types Deserialize - deserialization of . | This code (and the corresponding serialization code) is also subtle. `deserialize_property`, when called on the `UNKNOWN` sentinel, produces None. If value is `None`, that implies that it was unknown to the engine. By not adding it to the output dictionary, when we resolve all of the outstanding futures on a resource when `RegisterResource` returns, we'll report that an output is known if its value is not None. So, from this direction, everything works fine - user code never observes UNKNOWN because applies on unknown outputs don't run during previews. From the serialization direction, all resources initialize their output properties to the value `None`, so if they are not provided they are not known to the SDK when calling `RegisterResource` and it does not make sense to include them in the `RegisterResource` RPC call. |
@@ -167,6 +167,15 @@ def add_parser(subparsers, parent_parser):
help=DIFF_DESCRIPTION,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
+ diff_parser.add_argument(
+ "--targets",
+ nargs="*",
+ help=(
+ "Source paths to a data files or directories. Default None. "
+ "If not specified, compares all files and directories `). "
+ "that are under DVC control in the current working space."
+ ),
+ ).complete = completion.FILE
diff_parser.add_argument(
"a_rev",
help="Old Git commit to compare (defaults to HEAD)",
| [_show_md->[_digest],add_parser->[add_parser],CmdDiff->[_format->[_digest],run->[_show_md,_format]]] | Adds a parser to subparsers to diff a specific branch of a git repository or a workspace. | How about "Limit command scope to these tracked files or directories. Accepts one or more file paths." |
@@ -73,11 +73,12 @@ public abstract class GlobalConfigurationCategory implements ExtensionPoint, Mod
public static class Security extends GlobalConfigurationCategory {
@Override
public String getShortDescription() {
- return Messages.GlobalSecurityConfiguration_Description();
+ return hudson.security.Messages.GlobalSecurityConfiguration_Description();
}
public String getDisplayName() {
return hudson.security.Messages.GlobalSecurityConfiguration_DisplayName();
}
}
+
}
| [GlobalConfigurationCategory->[all->[lookup],get->[get],Unclassified->[getDisplayName->[ConfigureLink_DisplayName],getShortDescription->[ConfigureLink_Description]],Security->[getDisplayName->[GlobalSecurityConfiguration_DisplayName],getShortDescription->[GlobalSecurityConfiguration_Description]]]] | Get the short description of the global security configuration. | BTW not an issue introduced in this PR, but would be nice at some point to use `core/move-l10n.groovy` to put this message in the natural place. |
@@ -393,11 +393,11 @@ class ASTConverter:
def visit_Module(self, mod: ast3.Module) -> MypyFile:
body = self.fix_function_overloads(self.translate_stmt_list(mod.body))
-
+ ignores = [ti.lineno for ti in mod.type_ignores] + self.extra_type_ignores
return MypyFile(body,
self.imports,
False,
- {ti.lineno for ti in mod.type_ignores},
+ {*ignores},
)
# --- stmt ---
| [parse_type_string->[parse_type_comment],parse_type_comment->[ast3_parse],stringify_name->[stringify_name],TypeConverter->[visit_UnaryOp->[invalid_type,visit],visit_Num->[numeric_type],visit_Str->[parse_type_string],visit_Attribute->[invalid_type,visit],_extract_argument_name->[fail],visit_Subscript->[invalid_type,visit,translate_expr_list,fail],translate_argument_list->[visit],visit_raw_str->[parse_type_comment],visit_Constant->[parse_type_string,invalid_type],visit_List->[translate_argument_list],translate_expr_list->[visit],visit_Tuple->[translate_expr_list],visit_Call->[invalid_type,parent,visit,fail],visit->[invalid_type]],parse->[ast3_parse],ASTConverter->[visit_IfExp->[visit,set_line],visit_Str->[set_line],visit_Delete->[visit,translate_expr_list,set_line],visit_AsyncWith->[as_required_block,visit,parse_type_comment,set_line],visit_Raise->[visit,set_line],visit_While->[as_block,visit,as_required_block,set_line],visit_If->[as_block,visit,as_required_block,set_line],visit_Pass->[set_line],do_func_def->[fail,note,in_class,translate_expr_list,as_required_block,set_line,ast3_parse,is_no_type_check_decorator],visit_BinOp->[from_operator,visit,set_line],visit_Await->[visit,set_line],visit_ClassDef->[visit,translate_expr_list,as_required_block,set_line],visit_AugAssign->[from_operator,visit,set_line],visit_Dict->[translate_expr_list,set_line],visit_YieldFrom->[visit,set_line],visit_BoolOp->[translate_expr_list],visit_AsyncFor->[parse_type_comment,as_block,as_required_block,set_line,visit],visit_With->[as_required_block,visit,parse_type_comment,set_line],visit_Assign->[visit,translate_expr_list,parse_type_comment,set_line],visit_JoinedStr->[translate_expr_list,set_line],visit_Index->[visit],visit_Ellipsis->[set_line],visit_NamedExpr->[fail],visit_Import->[translate_module_id,set_line],visit_ImportFrom->[translate_module_id,set_line],as_block->[translate_stmt_list,set_line],visit_Try->[as_block,visit,as_required_block,set_line],visit_Lambda->[as_required_block,transform_args,set_line],visit_List->[visit,set_line],translate_expr_list->[visit],group->[group,set_line],visit_Compare->[from_comp_operator,translate_expr_list,set_line],visit_DictComp->[visit,translate_expr_list,set_line],fail_arg->[fail],visit_Expr->[visit,set_line],visit_Continue->[set_line],visit_Starred->[visit,set_line],visit_FormattedValue->[visit,set_line],visit_ExtSlice->[translate_expr_list],visit_Subscript->[visit,set_line],visit_Yield->[visit,set_line],visit_NameConstant->[set_line],visit_AnnAssign->[visit,set_line],visit_GeneratorExp->[visit,translate_expr_list,set_line],visit_Return->[visit,set_line],visit_Bytes->[set_line],visit_Constant->[set_line],make_argument->[parse_type_comment,visit,fail],visit_For->[parse_type_comment,as_block,as_required_block,set_line,visit],visit_Tuple->[visit,set_line],visit_ListComp->[set_line],visit_Set->[translate_expr_list,set_line],visit_Break->[set_line],visit_Nonlocal->[set_line],visit_Global->[set_line],visit_UnaryOp->[visit,set_line],visit_Num->[set_line],visit_Attribute->[visit,set_line],visit_SetComp->[set_line],visit_Assert->[visit,set_line],visit_Module->[fix_function_overloads,translate_stmt_list],visit_Name->[set_line],visit_Slice->[visit],as_required_block->[translate_stmt_list,set_line],visit_Call->[visit,translate_expr_list,set_line],translate_stmt_list->[visit]]] | Return a MypyFile representing the module. | That's an extra copy -- maybe use `.extend()`? |
@@ -18,6 +18,8 @@ describe Idv::CaptureDocController do
before do
stub_analytics
allow(@analytics).to receive(:track_event)
+ allow(LoginGov::Hostdata::EC2).to receive(:load).
+ and_return(OpenStruct.new(region: 'us-west-2', domain: 'example.com'))
end
describe '#index' do
| [mock_next_step->[to,and_return],create,script_src,let,mock_next_step,describe,to_not,render_template,create!,it,idv_capture_doc_step_url,travel,env,to,before,request_token,with,require,day,include,have_actions,receive,now,each,id,call,redirect_to,context,uuid,get,mock_session,and_return] | requires rails_helper + Idv idv_capture_doc_step_url idv_capture_doc_step. | I feel like we should update the hostdata gem to make `LoginGov::Hostdata::EC2` a plain struct so we can just do like: `LoginGov::Hostdata::EC2.new(region:.., domain:...)` |
@@ -71,12 +71,12 @@ public class System2Test {
}
@Test
- public void testIsOsWindows() throws Exception {
+ public void testIsOsWindows() {
assertThat(System2.INSTANCE.isOsWindows()).isEqualTo(SystemUtils.IS_OS_WINDOWS);
}
@Test
- public void testIsJavaAtLeast17() throws Exception {
+ public void testIsJavaAtLeast17() {
if (SystemUtils.IS_JAVA_1_6) {
assertThat(System2.INSTANCE.isJavaAtLeast17()).isFalse();
} else {
| [System2Test->[close->[close,MyCloseable],close_throws_exception_on_error->[close]]] | Test if system is OS Windows or JavaAtLeast17. | Stupid question here: since SonarQube is now using Java8 and that classes won't be loaded by Java 7 or Java 6, does this test shouldn't be dropped ? |
@@ -178,6 +178,8 @@ public class RemoteInterpreterServer extends Thread
server = new TThreadPoolServer(
new TThreadPoolServer.Args(serverTransport).processor(processor));
remoteWorksResponsePool = Collections.synchronizedMap(new HashMap<String, Object>());
+
+ clusterManagerClient.start(interpreterGroupId);
}
@Override
| [RemoteInterpreterServer->[getProgress->[getProgress,getInterpreter],angularRegistryPush->[info],completion->[getInterpreter,completion],angularObjectAdd->[angularObjectUpdate],getFormType->[getInterpreter],getApplicationContext->[createAppOutput],loadApplication->[getApplicationContext,info],runApplication->[run,info],main->[handle->[shutdown],RemoteInterpreterServer],unloadApplication->[info],getStatus->[info],InterpretJob->[jobRun->[processInterpreterHooks,open,interpret],processInterpreterHooks->[onPreExecute,onPostExecute]],convert->[convert],close->[close],cancel->[cancel,getInterpreter,info],open->[getInterpreter,open],interpret->[getInterpreter]]] | Starts the interpreter process if necessary. | Even in single node deployment, this code is still called ? |
@@ -1165,7 +1165,7 @@ namespace System.Xml
{
if (!_useCurNode)
{
- IXmlLineInfo lineInfo = reader as IXmlLineInfo;
+ IXmlLineInfo? lineInfo = reader as IXmlLineInfo;
if (lineInfo != null)
{
return lineInfo.LineNumber;
| [XmlSubtreeReader->[ReadContentAsBoolean->[ReadContentAsBoolean],ReadContentAsFloat->[ReadContentAsFloat],ReadValueChunk->[ReadValueChunk,ReadElementContentAsBase64,ReadElementContentAsBinHex,ReadContentAsBase64,ReadContentAsBinHex],ReadContentAs->[ReadContentAs],MoveToFirstAttribute->[MoveToFirstAttribute],ReadContentAsDecimal->[ReadContentAsDecimal],AddNamespace->[AddNamespace,Set],ReadContentAsInt->[ReadContentAsInt],ReadElementContentAsBase64->[ReadContentAsBase64,Read,ReadElementContentAsBase64],ReadContentAsBase64->[ReadContentAsBase64],ReadContentAsObject->[ReadContentAsObject],Close->[Read,MoveToElement],MoveToNextAttribute->[MoveToNextAttribute],ReadContentAsDouble->[ReadContentAsDouble],ReadContentAsLong->[ReadContentAsLong],LookupPrefix->[LookupPrefix],GetAttribute->[GetAttribute],Read->[Read,MoveToElement],ReadElementContentAsBinHex->[Read,ReadElementContentAsBase64,ReadElementContentAsBinHex,ReadContentAsBase64,ReadContentAsBinHex],FinishReadContentAsType->[ProcessNamespaces],ReadContentAsBinHex->[ReadContentAsBase64,ReadElementContentAsBase64,ReadContentAsBinHex],ProcessNamespaces->[MoveToFirstAttribute,MoveToNextAttribute,LookupNamespace,MoveToElement],FinishReadContentAsBinary->[ProcessNamespaces,ReadContentAsBase64,ReadContentAsBinHex],Skip->[Read,Skip,MoveToElement],MoveToNsAttribute->[MoveToElement],InitReadElementContentAsBinary->[ProcessNamespaces,Read],LookupNamespace->[LookupNamespace],FinishReadElementContentAsBinary->[ProcessNamespaces,Read,ReadElementContentAsBase64,ReadElementContentAsBinHex,ReadContentAsBase64,ReadContentAsBinHex],InitReadContentAsType->[ReadContentAsBase64,ReadContentAsBinHex,ReadElementContentAsBase64,ReadElementContentAsBinHex],MoveToAttribute->[MoveToAttribute],ReadAttributeValue->[ReadAttributeValue],MoveToElement->[MoveToElement],ReadContentAsString->[ReadContentAsString],Dispose->[Close],GetNamespacesInScope->[GetNamespacesInScope],Set,ReadElementContentAsBase64,ReadElementContentAsBinHex,ReadContentAsBase64,ReadContentAsBinHex]] | region IXmlReader interface. region IXmlLineInfo implementation. | please use `as` => `is` here and above |
@@ -631,6 +631,8 @@ $config['auth_ldap_groupmemberattr'] = 'memberUid';
$config['auth_ldap_emailattr'] = 'mail';
$config['auth_ldap_cache_ttl'] = 300;
// How long in seconds should ldap* module cache user information in $_SESSION
+$config['auth_ldap_userdn'] = true;
+// Uses a users full DN as the value of the member attribute in a group (instead of member: username, it’s member: uid=username,ou=groups,dc=domain,dc=com).
// Active Directory Authentication
$config['auth_ad_user_filter'] = "(objectclass=user)";
| [No CFG could be retrieved] | Configure the authentication options for a given node Check if the system is on the system and if so set the configuration options. | This changes the default for all installs. Should be set to `false` to maintain backwards compatibility. |
@@ -13,7 +13,13 @@ import (
"github.com/weaveworks/cortex/util"
)
-// NewQueryable creates a new promql.Engine for cortex.
+// NewEngine creates a new promql.Engine for cortex.
+func NewEngine(distributor Querier, chunkStore chunk.Store) *promql.Engine {
+ queryable := NewQueryable(distributor, chunkStore)
+ return promql.NewEngine(queryable, nil)
+}
+
+// NewQueryable creates a new Queryable for cortex.
func NewQueryable(distributor Querier, chunkStore chunk.Store) Queryable {
return Queryable{
Q: MergeQuerier{
| [QueryRange->[Query],LabelValuesForLabelName->[LabelValuesForLabelName],QueryInstant->[QueryRange],MetricsForLabelMatchers->[MetricsForLabelMatchers]] | NewQueryable returns a new Queryable object that queries all samples in a given time range LabelValuesForLabelName returns all of the label values that are associated with a given label. | Should remove `NewQueryable` function below and use `NewEngine` in `main.go`, no point in having both. |
@@ -217,6 +217,10 @@ abstract class CommandWithUpgrade extends \WP_CLI_Command {
// Only attempt to update if there is something to update
if ( !empty( $items_to_update ) ) {
+ $cache_manager = \WP_CLI::get_http_cache_manager();
+ foreach ($items_to_update as $item) {
+ $cache_manager->whitelist_package($item['update_package'], $this->item_type, $item['name'], $item['update_version']);
+ }
$upgrader = $this->get_upgrader( $assoc_args );
$result = $upgrader->bulk_upgrade( wp_list_pluck( $items_to_update, 'update_id' ) );
}
| [CommandWithUpgrade->[_list->[get_all_items],install->[install,install_from_repo],update_many->[filter_item_list,get_item_list,get_upgrader],get_upgrader->[get_upgrader_class],status->[status_single],status_all->[get_all_items]]] | Update many items in the database. \ brief Show a page if the user is not authorized to access the page. \. | Wouldn't it be esier to pass `$item` as a whole? |
@@ -94,6 +94,7 @@ class CppInfo(_CppInfo):
class _BaseDepsCppInfo(_CppInfo):
def __init__(self):
super(_BaseDepsCppInfo, self).__init__()
+ self.public_deps = []
def update(self, dep_cpp_info):
| [DepsCppInfo->[loads->[DepsCppInfo,CppInfo],__getattr__->[_get_cpp_info->[],_BaseDepsCppInfo]],CppInfo->[__getattr__->[_get_cpp_info->[_CppInfo],_get_cpp_info]],_BaseDepsCppInfo->[update->[merge_lists]]] | Initialize dependencies CppInfo object. | ``public_deps`` shouldn't be necessary here, it is fixed in the other PR. |
@@ -66,7 +66,7 @@
return Task.FromResult(0);
}
- public Task Handle(object message, IMessageProcessingContext context)
+ public Task Handle(object message, IMessageProcessingContext context, Type sagaType)
{
testContext.NotFoundHandlerCalled = true;
return Task.FromResult(0);
| [When_message_has_a_saga_id->[Task->[MessageHandlerCalled,False,NotFoundHandlerCalled,TimeoutHandlerCalled,Run,True],SagaEndpoint->[MessageWithSagaIdSaga->[Task->[NotFoundHandlerCalled,FromResult,MessageHandlerCalled,TimeoutHandlerCalled],ConfigureHowToFindSaga->[DataId,ToSaga]],MessageWithSagaIdHandler->[Task->[FromResult,Done]]]]] | This method is called when a message is received from the Saga. | Nitpick: Would it be more logical to pass the saga type as the second argument since the message and the saga type is the "input" and the processing context contains the "actions" that the user can take? (Could we even consider a SagaHandlerContext perhaps?) |
@@ -22,6 +22,8 @@ import (
"fmt"
"time"
+ "github.com/elastic/apm-server/model"
+
"github.com/santhosh-tekuri/jsonschema"
"github.com/elastic/apm-server/model/metricset/generated/schema"
| [decodeSamples->[New,Float64,Errorf],Transform->[NewLogger,Warnf,IsZero,Inc,Set,Put],Prune,MapStr,CreateSchema,TimeEpochMicro,New,NewInt,NewRegistry,decodeSamples] | Creates a new metricset with the given name and description. ModelSchema returns a JSON schema that represents a single event in the model. | again nitpick with combining imports from apm-server |
@@ -50,6 +50,16 @@ func NewDeleteStoreCommand() *cobra.Command {
return d
}
+// NewSetStoreStateCommand returns a state subcommand of storeCmd.
+func NewSetStoreStateCommand() *cobra.Command {
+ d := &cobra.Command{
+ Use: "state <store_id> [Up|Offline|Tombstone]",
+ Short: "set the store's state",
+ Run: setStoreStateCommandFunc,
+ }
+ return d
+}
+
// NewLabelStoreCommand returns a label subcommand of storeCmd.
func NewLabelStoreCommand() *cobra.Command {
l := &cobra.Command{
| [Printf,Join,Println,Sprintf,AddCommand,ParseFloat,Atoi] | NewStoreCommand returns a command that returns a store subcommand of a root command. deleteStoreCommandFunc handles the delete - store command. | can we set the tombstone state directly? Seem it is dangerous. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.