patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -111,8 +111,8 @@ class CoreferenceResolver(Model): def forward(self, # type: ignore text: Dict[str, torch.LongTensor], spans: torch.IntTensor, - span_labels: torch.IntTensor = None, - metadata: List[Dict[str, Any]] = None) -> Dict[str, torch.Tensor]: + metadata: List[Dict[str, Any]], + span_labels: torch.IntTensor = None) -> Dict[str, torch.Tensor]: # pylint: disable=arguments-differ """ Parameters
[CoreferenceResolver->[forward->[,_compute_span_pair_embeddings,size,max,_endpoint_span_extractor,_lexical_dropout,_text_field_embedder,_attentive_span_extractor,_context_layer,int,_mention_pruner,logsumexp,min,get_device_of,_mention_recall,cat,flatten_and_batch_shift_indices,masked_log_softmax,log,get_text_field_mask,long,batched_index_select,_compute_coreference_scores,flattened_index_select,_conll_coref_scores,floor,_compute_antecedent_gold_labels,_generate_valid_antecedents,relu,unsqueeze],_compute_span_pair_embeddings->[size,_distance_embedding,cat,bucket_values,expand,unsqueeze],_compute_coreference_scores->[cat,new_zeros,_antecedent_scorer,size],_compute_antecedent_gold_labels->[,expand_as,cat],decode->[output_dict,item,append,len,span,zip,enumerate,clusters],_generate_valid_antecedents->[,get_range_vector,relu],get_metrics->[get_metric],__init__->[Sequential,SelfAttentiveSpanExtractor,EndpointSpanExtractor,TimeDistributed,get_output_dim,InitializerApplicator,Linear,super,Dropout,ConllCorefScores,Pruner,initializer,MentionRecall,Embedding]],register,getLogger]
Forward computation of the n - grammatical clustering. Get the embeddings of the missing - field in the last batch. This function is called to compute the final indices of the non - empty antecedent \ static method for computing the top - level antecedent embedding and scores.
My first thought was that changing the order of this was a problem for backwards compatibility, but because it's a `Model.forward` method, we'll pass a dictionary of parameters, anyway, so this should probably be ok.
@@ -59,4 +59,10 @@ public enum BucketLayout { "Error: BucketLayout not found, type=" + this); } } + + public static BucketLayout fromString(String value) { + // Todo: should we throw error if user configured unsupported value + // during OM startup or bucket creation time. + return StringUtils.isBlank(value) ? LEGACY : BucketLayout.valueOf(value); + } }
[toProto->[IllegalArgumentException]]
Returns the proto representation of this bucket layout.
If we validate the value from config from startup, we don't need this isBlank check here.
@@ -16,15 +16,10 @@ const ( ) func TestBackgroundIdentifier(t *testing.T) { - doWithSigChainVersions(func(sigVersion libkb.SigVersion) { - _testBackgroundIdentifier(t, sigVersion) - }) -} - -func _testBackgroundIdentifier(t *testing.T, sigVersion libkb.SigVersion) { t.Skip() tc := SetupEngineTest(t, "track") defer tc.Cleanup() + sigVersion := libkb.GetDefaultSigVersion(tc.G) fu := CreateAndSignupFakeUser(tc, "track") fakeClock := clockwork.NewFakeClockAt(time.Now())
[GetUID,Now,Duration,NewFakeClockAt,Add,Errorf,Skip,After,Logf,Debug,Cleanup,Advance,Remove,Fatalf,Sprintf,SetSnooperChannel,ResetLoginState,Fatal,SetClock]
engine import imports a background identifier. TestArgs is the arguments for the BackgroundIdentifier test.
note: currently this returns V1 unless you're feature flagged as admin
@@ -5,10 +5,13 @@ from github import Github from datetime import date, datetime import subprocess as sp from azure.storage.blob import BlobClient +import reply_generator as rg +from update_issue_body import update_issue_body +import traceback _NULL = ' ' _FILE_OUT = 'release_issue_status.csv' -_PYTHON_SDK_ADMINISTRATORS = {'msyyc', 'RAY-316', 'BigCat20196'} +_PYTHON_SDK_ADMINISTRATORS = ['msyyc', 'RAY-316', 'BigCat20196'] def my_print(cmd):
[print_check->[my_print],main->[_extract_language,_whether_author_comment,_extract_author_latest_comment,_judge_status,_extract,_latest_comment_time,IssueStatus,output,print_check,my_print],main]
Print the command and return the nanoseconds.
it is better to use set to use its built-in function `difference`
@@ -161,6 +161,14 @@ class InfoTest(unittest.TestCase): self.assertEqual(["Hello2/0.1@PROJECT", "Hello0/0.1@lasote/stable", "Date", "Hello1/0.1@lasote/stable", "Date"], lines) + self.client.run("info --only=invalid", ignore_error=True) + self.assertIn("Invalid --only value", self.client.user_io.out) + self.assertNotIn("with --path specfied, allowed values:", self.client.user_io.out) + + self.client.run("info --paths --only=bad", ignore_error=True) + self.assertIn("Invalid --only value", self.client.user_io.out) + self.assertIn("with --path specfied, allowed values:", self.client.user_io.out) + def reuse_test(self): self.client = TestClient() self._create("Hello0", "0.1")
[InfoTest->[reuse_test->[_create,clean_output],graph_test->[check_file->[check_digraph_line],create_export->[_create,create_export],check_digraph_line->[check_conan_ref],check_file,create_export],only_names_test->[_create],diamond_build_order_test->[_create],graph_html_test->[create_export->[_create,create_export],create_export],build_order_test->[_create]]]
This test tests the names of all the components of a . Check if a specific is present in the user s output.
specfied => specified
@@ -192,13 +192,8 @@ class ProductVariant(CountableDjangoObjectType): "This field will be removed after 2020-07-31." ), ) - price_override = graphene.Field( - Money, - description=( - "Override the base price of a product if necessary. A value of `null` " - "indicates that the default product price is used." - ), - ) + price = graphene.Field(Money, description=("Base price of a product variant."),) + pricing = graphene.Field( VariantPricingInfo, description=(
[ProductVariant->[resolve_meta->[resolve_meta],resolve_pricing->[calculate_pricing_info->[calculate_pricing_with_product->[calculate_pricing_with_collections->[VariantPricingInfo]],calculate_pricing_with_variants->[calculate_pricing_with_collections->[]]]],resolve_private_meta->[resolve_private_meta]],Category->[resolve_meta->[resolve_meta],resolve_private_meta->[resolve_private_meta]],Product->[resolve_meta->[resolve_meta],resolve_pricing->[calculate_pricing_info->[calculate_pricing_with_product->[calculate_pricing_with_collections->[]],calculate_pricing_with_variants->[calculate_pricing_with_collections->[ProductPricingInfo]]]],resolve_private_meta->[resolve_private_meta],resolve_margin->[Margin]],Collection->[resolve_meta->[resolve_meta],resolve_private_meta->[resolve_private_meta]],ProductType->[resolve_available_attributes->[resolve_attributes],resolve_meta->[resolve_meta],resolve_private_meta->[resolve_private_meta]]]
This function returns a list of all the attributes that are assigned to the product type. Represents availability of a product variant in the storefront.
I would extend this description with sth like: "This field is restricted for admins. Use the `pricing` field to get the public price for customers." It will be less confusing for API users when they see that the regular price field is protected with permissions.
@@ -198,9 +198,16 @@ ADDON_TYPE_CHOICES_API = { ADDON_LPAPP: 'language', _ADDON_PERSONA: 'persona', ADDON_STATICTHEME: 'statictheme', + ADDON_PERMISSION_ENABLER: 'enabler', } -ADDON_TYPES_WITH_STATS = (ADDON_EXTENSION, ADDON_STATICTHEME, ADDON_DICT, ADDON_LPAPP) +ADDON_TYPES_WITH_STATS = ( + ADDON_EXTENSION, + ADDON_STATICTHEME, + ADDON_DICT, + ADDON_LPAPP, + ADDON_PERMISSION_ENABLER, +) # Edit addon information MAX_TAGS = 10
[_,namedtuple,join,compile,_dimensions]
A slug to ID map for the update API. A list of all possible components of a network network.
Some of the constants above are `enabler` and some are `permission-enabler`. Not sure which is better but can we have them the same for consistency?
@@ -1404,7 +1404,7 @@ export class AmpStory extends AMP.BaseElement { // Step out if trying to navigate to the currently active page. if (this.activePage_ && this.activePage_.element.id === targetPageId) { - return Promise.resolve(); + return this.activePage_.rewindAllMedia(); } // If the next page might be paywall protected, and the access
[AmpStory->[isBrowserSupported->[Boolean,CSS],initializeMediaQueries_->[forEach,getMediaQueryService,onMediaQueryMatch,getAttribute],isStandalone_->[STANDALONE],onUIStateUpdate_->[DESKTOP_PANELS,VERTICAL,scopedQuerySelectorAll,setImportantStyles,LOAD_END,MOBILE,DESKTOP_FULLBLEED,element,length],maybePreloadBookend_->[CAN_SHOW_BOOKEND],closeOpacityMask_->[dev,toggle],initializeStoryNavigationPath_->[NAVIGATION_PATH,getHistoryState,SET_NAVIGATION_PATH],initializeLiveStory_->[ADD_TO_ACTIONS_WHITELIST,DOM_UPDATE,UI_STATE,DESKTOP_PANELS],onNoPreviousPage_->[dict,ADVANCEMENT_MODE,CAN_SHOW_PREVIOUS_PAGE_HELP],maybeLoadStoryEducation_->[extensionsFor],layoutCallback->[resolve,isBrowserSupported,TOGGLE_SUPPORTED_BROWSER],switchTo_->[DESKTOP_PANELS,setState,isAd,shift,MUTED_STATE,isAutoAdvance,CURRENT_PAGE_INDEX,removeAttributeInMutate,TOGGLE_AD,ADVANCE_TO_ADS,VISITED,SET_ADVANCEMENT_MODE,UI_STATE,muteAllMedia,NOT_ACTIVE,CHANGE_PAGE,PAUSED_STATE,beforeVisible,PLAYING,element,length,resolve,unqueueStepInRAF,AD_SHOWING,TOGGLE_ACCESS,setAttributeInMutate],pause_->[TOGGLE_PAUSED,PAUSED_STATE],triggerActiveEventForPage_->[actionServiceForDoc,HIGH],isLandscapeSupported_->[SUPPORTS_LANDSCAPE],getElementDistance->[getDistance],getMaxMediaElementCounts->[min,VIDEO,AUDIO],onAdStateUpdate_->[MUTED_STATE],setOrientationAttribute_->[ORIENTATION],setThemeColor_->[dev,content,name,computedStyle],getInitialPageId_->[lastItem,NAVIGATION_PATH,getHistoryState,parseQueryString,id],onPausedStateUpdate_->[PLAYING,PAUSED],registerAndPreloadBackgroundAudio_->[upgradeBackgroundAudio,childElement,LOAD_END,tagName],initializeStandaloneStory_->[classList],initializeListeners_->[STORY_UNMUTED,AD_STATE,NEXT_PAGE,PREVIOUS_PAGE,MUTED_STATE,getMode,NO_NEXT_PAGE,PAGE_PROGRESS,setWhitelist,STORY_MUTED,BOOKEND_STATE,STORY_IS_MUTED,stopPropagation,endsWith,MOBILE,preventDefault,UI_STATE,DISPATCH_ACTION,NO_PREVIOUS_PAGE,getState,NEXT,ADVANCEMENT_MODE,REPLAY,SIDEBAR_STATE,PAUSED_STATE,ACTIONS_WHITELIST,parseQueryString,getDetail,STORY_ADVANCEMENT_MODE,actionServiceForDoc,SUPPORTED_BROWSER_STATE,SWITCH_PAGE,debounce,slice],getUIType_->[DESKTOP_PANELS,VERTICAL,MOBILE,DESKTOP_FULLBLEED],openOpacityMask_->[dev,toggle],insertPage->[setAttribute,RETURN_TO,PUBLIC_ADVANCE_TO,isAd,ADVANCE_TO,AUTO_ADVANCE_TO,dev,CAN_INSERT_AUTOMATIC_AD,element,id,isExperimentOn],initializeBookend_->[dict,whenUpgradedToCustomElement,getImpl,createElementWithAttributes],isActualPage_->[escapeCssSelectorIdent],setDesktopPositionAttributes_->[element,escapeCssSelectorIdent,removeAttribute,getPreviousPageId,DESKTOP_POSITION,scopedQuerySelectorAll,prototype,getNextPageId,forEach,push],isLayoutSupported->[CONTAINER],onSidebarStateUpdate_->[CLOSE,HIGH,actionServiceForDoc,TOGGLE_SIDEBAR,OPEN,execute],initializeStoryPlayer_->[extensionsFor],initializePageIds_->[SET_PAGE_IDS,user,prototype,length,id,map],initializeListenersForDev_->[getMode,getDetail,DEV_LOG_ENTRIES_AVAILABLE],getPageById->[devAssert],maybeLockScreenOrientation_->[mozLockOrientation,dev,message,lockOrientation,msLockOrientation],hideBookend_->[TOGGLE_BOOKEND],forceRepaintForSafari_->[DESKTOP_PANELS,toggle,UI_STATE],onKeyDown_->[BOOKEND_STATE,RTL_STATE,RIGHT_ARROW,key,LEFT_ARROW,SET_ADVANCEMENT_MODE,MANUAL_ADVANCE],initializePages_->[ADD_TO_ACTIONS_WHITELIST,all,prototype,getImpl,isExperimentOn],resume_->[TOGGLE_PAUSED],upgradeCtaAnchorTagsForTracking_->[element,scopedQuerySelectorAll,setAttribute,prototype],getPageDistanceMapHelper_->[getAdjacentPageIds],validateConsent_->[tagName,dev,indexOf,childElementByTag,removeChild,forEach,length,childElements],onSelectPage_->[VIEWER_SELECT_PAGE,SET_ADVANCEMENT_MODE],onResize->[TOGGLE_UI,MOBILE,TOGGLE_VIEWPORT_WARNING],initializeStoryAccess_->[hasAttribute,areFirstAuthorizationsCompleted,removeAttribute,user,accessServiceForDocOrNull,onApplyAuthorizations],replay_->[then,BOOKEND_STATE,removeAttributeInMutate,dev,NEXT,VISITED,SET_NAVIGATION_PATH],onAccessApplyAuthorizations_->[element,TOGGLE_ACCESS,NEXT],onNoNextPage_->[dict,ADVANCEMENT_MODE],lockBody_->[setImportantStyles,documentElement,body],maybeTriggerViewportWarning_->[TOGGLE_PAUSED,PAUSED_STATE,TOGGLE_VIEWPORT_WARNING,VIEWPORT_WARNING_STATE],initializeStyles_->[length],rewriteStyles_->[textContent,isExperimentOn],whenPagesLoaded_->[DESKTOP_PANELS,all,filter,LOAD_END,element,UI_STATE],updateAudioIcon_->[TOGGLE_STORY_HAS_BACKGROUND_AUDIO,TOGGLE_STORY_HAS_AUDIO],constructor->[timerFor,forElement,getStoreService,registerServiceBuilder,platformFor,TOGGLE_RTL,isRTL,getVariableService,for,viewerForDoc,getAnalyticsService,createPseudoLocale],isSwipeLargeEnoughForHint_->[abs],next_->[next,devAssert],addPage->[isAd],showBookend_->[TOGGLE_BOOKEND],buildCallback->[setAttribute,nodeType,then,TEXT_NODE,resolve,childNodes,NEXT,TOGGLE_UI,SIDEBAR_STATE,TOGGLE_CAN_SHOW_BOOKEND,GO_TO_PAGE,forEach,historyForDoc,SET_ADVANCEMENT_MODE,isExperimentOn],getPagesByDistance_->[indexOf,isExperimentOn,NAVIGATION_PATH,keys],onSupportedBrowserStateUpdate_->[dev,TOGGLE_PAUSED,PAUSED_STATE],toggleElementsOnBookend_->[DESKTOP_PANELS,resetStyles,scopedQuerySelectorAll,prototype,setImportantStyles,UI_STATE],previous_->[previous,devAssert],buildPaginationButtons_->[create,CAN_SHOW_PAGINATION_BUTTONS],updateNavigationPath_->[NEXT,pop,NAVIGATION_PATH,setHistoryState,SET_NAVIGATION_PATH,length,PREVIOUS,push],performTapNavigation_->[DESKTOP_PANELS,NEXT,PREVIOUS,SET_ADVANCEMENT_MODE,UI_STATE,MANUAL_ADVANCE],preloadPagesByDistance_->[forEach,setDistance],getPageIndexById->[findIndex,user,element],hasBookend_->[components,resolve,CAN_SHOW_BOOKEND,MOBILE,UI_STATE],initializeOpacityMask_->[HIGH,dev,actionServiceForDoc,classList,addEventListener,toggle,execute],installGestureRecognizers_->[HIDDEN,INTERACTIVE_COMPONENT_STATE,state,BOOKEND_STATE,SYSTEM_UI_IS_VISIBLE_STATE,CAN_SHOW_NAVIGATION_OVERLAY_HINT,get,data,event,ACCESS_STATE,SIDEBAR_STATE,onGesture],pauseStoryUntilConsentIsResolved_->[getConsentPolicyState,then,TOGGLE_PAUSED],initializeSidebar_->[TOGGLE_HAS_SIDEBAR,ADD_TO_ACTIONS_WHITELIST],layoutStory_->[setState,then,all,NOT_ACTIVE,shouldShowStoryUrlInfo,NEXT,user,build,getHistoryState,BOOKEND_ACTIVE,whenUpgradedToCustomElement,whenBuilt,ATTACHMENT_PAGE_ID],getPageIndex->[findIndex],markStoryAsLoaded_->[INI_LOAD,STORY_LOADED,dispatch],getNextPage->[getNextPageId],getPageContainingElement_->[findIndex,element,closest],BaseElement],VIDEO,registerServiceForDoc,extension,AUDIO,registerElement]
Switches to the specified page in the given direction. Private method for handling action that does not impact the UI or UX. Private method for handling a single page in the navigation.
There is more to rewinding a page than just rewinding its media. Having this code here might have side effects as well. If this is trying to solve the replay case, there's a `replay_()` method where you could set the page to `PageState.NOT_ACTIVE` and back to `PLAYING`, which will restart media, page advancement (time or media based), analytics, etc.
@@ -351,7 +351,6 @@ func unversionedRESTMux(grpcURI string, dopts []grpc.DialOption) (http.Handler, "gateway": pb_gateway.RegisterGatewayHandlerFromEndpoint, "legacy": pb_legacy.RegisterLegacyDataCollectorHandlerFromEndpoint, "license": pb_license.RegisterLicenseHandlerFromEndpoint, - "authz": pb_authz.RegisterAuthorizationHandlerFromEndpoint, "secrets": pb_secrets.RegisterSecretsServiceHandlerFromEndpoint, "cc_reporting": pb_cc_reporting.RegisterReportingServiceHandlerFromEndpoint, "cc_stats": pb_cc_stats.RegisterStatsServiceHandlerFromEndpoint,
[ProfileTarHandler->[Header,Write,Itoa,Error,Infof,Sprintf,Decode,authRequest,ComplianceProfilesServiceClient,Recv,GetData,NewDecoder,Split,Set,ReadTar],ProfileCreateHandler->[Copy,Write,Create,Error,Sprintf,Marshal,Decode,authRequest,Bytes,ComplianceProfilesServiceClient,Close,FormFile,Send,CloseAndRecv,Get,NewDecoder,Split,Query],RegisterGRPCServices->[Warn,ComplianceReportingServiceClient,ChefIngesterClient,RegisterStatsServiceServer,NewJobsHandler,NewChefIngestJobSchedulerServer,NewEventFeedServer,IngestStatusClient,NewApplicationsHandler,NewProfilesHandler,RegisterJobSchedulerServer,RegisterEventFeedServer,NewReportingHandler,Register,RegisterLicenseServer,TeamsV1Client,ComplianceStatsServiceClient,DatafeedClient,ProjectsClient,LicenseControlClient,DeploymentServiceClient,AuthorizationClient,RegisterTokensServer,PoliciesClient,Wrap,Notifier,NewLicenseServer,NewCfgMgmtServer,NewInfraProxyHandler,RegisterConfigMgmtServer,RegisterApplicationsServiceServer,RegisterDeploymentServer,InfraProxyClient,RegisterLegacyDataCollectorServer,RegisterInfraProxyServer,FeedClient,NodeManagerClient,NewServer,NodesClient,ComplianceProfilesServiceClient,Fatal,NewNodeManagerHandler,RegisterTeamsServer,WithFields,RegisterDataLifecycleServer,RegisterTelemetryServer,TokensMgmtClient,ApplicationsClient,SecretClient,automateURL,RegisterSecretsServiceServer,RegisterGatewayServer,NewTeamsServer,ComplianceVersionServiceClient,RegisterReportingServiceServer,PurgeClient,NewAuthzServer,UsersMgmtClient,NewLegacyIngestServer,NewNodesHandler,RegisterAuthorizationServer,NewChefIngestServer,NewStatsHandler,RegisterUsersServer,NewDeploymentServer,RegisterJobsServiceServer,AuthorizationV2Client,NewGatewayServer,RegisterRulesServer,NewNotificationsServer,RegisterProfilesServiceServer,CfgMgmtClient,NewTelemetryServer,ComplianceJobsServiceClient,NewDatafeedHandler,trialLicenseURL,RegisterNotificationsServer,NotificationsClient,RegisterChefIngesterServer,ChefIngesterJobSchedulerClient,RegisterDatafeedServiceServer,RegisterNodesServiceServer,TeamsV2Client,NewSecretsHandler,RegisterPoliciesServer,RegisterNodeManagerServiceServer],configMgmtNodeExportHandler->[Write,Error,CfgMgmtClient,NodeExport,Decode,authRequest,GetContent,NewDecoder,Recv],configMgmtReportExportHandler->[Write,Error,CfgMgmtClient,Decode,authRequest,ReportExport,GetContent,NewDecoder,Recv],authRequest->[Context,ServiceSubjectFromCert,IsAuthorized,NewOutgoingContext,Authenticate,New,AuthenticationClient,NewOutgoingProjectsContext,Ctx,ProjectsFromMetadata,Wrap,HasPrefix,Err],NodeExportHandler->[Error,ComplianceReportingServiceClient,Decode,authRequest,NewDecoder,ExportNode],DeploymentStatusHandler->[Header,DeploymentServiceClient,Status,Write,Error,Marshal,authRequest,MarshalIndent,String,Set,WriteHeader,Query],ReportExportHandler->[Error,ComplianceReportingServiceClient,Decode,authRequest,Export,NewDecoder],Write,Wrapf,Error,Infof,WithMetadata,HandlerFunc,Background,WithCancel,GetContent,NewServeMux,WithMarshalerOption,Recv,WithIncomingHeaderMatcher,MapMethodTo]
Universal REST Gateway registration InfraProxyHandlerFromEndpoint creates a RESTMux that can serve a single ethernet proxy.
Remove from the `/api/v0` top-level path component here; add to the `/apis` path component in L378 below.
@@ -433,7 +433,7 @@ final class ProTechAi { } if (sea) { final Route r = new Route(neighbor, current); - if (MoveValidator.validateCanal(r, null, player, data) != null) { + if (MoveValidator.validateCanal(r, null, player) != null) { continue; } }
[ProTechAi->[determineEnemyBlitzStrength->[strength]]]
Finds attackers. This method is called when a route is missing.
Avoid deeply nested control flow statements.
@@ -157,7 +157,13 @@ public class ExpressionTypeManager { final SqlType leftSchema = expressionTypeContext.getSqlType(); process(node.getRight(), expressionTypeContext); final SqlType rightSchema = expressionTypeContext.getSqlType(); - ComparisonUtil.isValidComparison(leftSchema, node.getType(), rightSchema); + if (!ComparisonUtil.isValidComparison(leftSchema, node.getType(), rightSchema)) { + throw new KsqlStatementException("Cannot compare " + + node.getLeft().toString() + " (" + leftSchema.toString() + ") to " + + node.getRight().toString() + " (" + rightSchema.toString() + ") " + + "with " + node.getType() + ".", + node.toString()); + } expressionTypeContext.setSqlType(SqlTypes.BOOLEAN); return null; }
[ExpressionTypeManager->[Visitor->[visitSearchedCaseExpression->[setSqlType],visitSubscriptExpression->[getSqlType,setSqlType],visitFunctionCall->[getSqlType,setSqlType,getExpressionSqlType],visitCreateMapExpression->[setSqlType],visitDereferenceExpression->[getSqlType,setSqlType],visitLikePredicate->[setSqlType],visitIsNotNullPredicate->[setSqlType],visitDoubleLiteral->[setSqlType],visitStringLiteral->[setSqlType],visitStructExpression->[getSqlType,setSqlType],visitBooleanLiteral->[setSqlType],visitNullLiteral->[setSqlType],visitIsNullPredicate->[setSqlType],visitLongLiteral->[setSqlType],visitComparisonExpression->[getSqlType,setSqlType],visitCreateArrayExpression->[setSqlType],visitArithmeticBinary->[getSqlType,setSqlType],visitCast->[getSqlType,setSqlType],visitColumnReference->[setSqlType],visitBetweenPredicate->[setSqlType],visitIntegerLiteral->[setSqlType],validateWhenClauses->[getSqlType],visitDecimalLiteral->[setSqlType],visitNotExpression->[setSqlType]]]]
Visit a ComparisonExpression. This method checks if the node is a BETWEEN predicate and if so.
Another misuse of `KsqlStatementException` - just throw `KsqlException`.
@@ -1364,6 +1364,9 @@ uint32_t mame_ui_manager::handler_ingame(render_container &container) if (machine().ui_input().pressed(IPT_UI_THROTTLE)) machine().video().set_throttled(!machine().video().throttled()); + // update unthrottle mute state + machine().sound().unthrottle_mute(!machine().video().throttled() && m_unthrottle_mute); + // check for fast forward if (machine().ioport().type_pressed(IPT_UI_FAST_FORWARD)) {
[No CFG could be retrieved]
Handle a user input and handle a specific . - - - - - - - - - - - - - - - - - -.
You only want to be setting the mute state if you actually toggled the throttle setting here (and also once on start when you initially set it), otherwise it's an out-of-line call into the sound manager and a virtual call into the OSD sound module every frame for no reason.
@@ -104,8 +104,9 @@ def download(url, module_name, md5sum, save_name=None): def fetch_all(): - for module_name in filter(lambda x: not x.startswith("__"), - dir(paddle.dataset)): + for module_name in [ + x for x in dir(paddle.dataset) if not x.startswith("__") + ]: if "fetch" in dir( importlib.import_module("paddle.dataset.%s" % module_name)): getattr(
[fetch_all_recordio->[must_mkdirs],download->[md5file],convert->[write_data,reader],must_mkdirs]
Fetch all missing items from all modules.
(x for x in ...)
@@ -13,7 +13,7 @@ module Users def send_code result = otp_delivery_selection_form.submit(delivery_params) - analytics.track_event(Analytics::OTP_DELIVERY_SELECTION, result.to_h) + add_tracking(result) if result.success? handle_valid_otp_params(user_select_delivery_preference, user_selected_default_number) update_otp_delivery_preference_if_needed
[TwoFactorAuthenticationController->[exceeded_otp_send_limit?->[exceeded_otp_send_limit?],phone_configuration->[phone_configuration],phone_redirect->[phone_enabled?],delivery_preference->[delivery_preference],redirect_to_otp_verification_with_error->[delivery_preference]]]
This method is called when a user selects a new code and selects a new one. It.
Couldn't this also send a voice call?
@@ -57,11 +57,11 @@ define([ * The following properties are part of the {@link Cesium3DTileContent} interface. */ this.state = Cesium3DTileContentState.UNLOADED; - this.contentReadyToProcessPromise = when.defer(); - this.readyPromise = when.defer(); this.batchTableResources = undefined; this.featurePropertiesDirty = false; + this._contentReadyToProcessPromise = when.defer(); + this._readyPromise = when.defer(); this._featuresLength = 0; this._features = undefined; }
[No CFG could be retrieved]
A batched 3D tile - based tile - based tile - based tile - based tile Cesium 3D Tile Content.
Up to you, but I would recommend changing `_readyPromise` to `_readyDeferred` and `contentReadyToProcessPromise` to `contentReadyToProcessDeferred` throughout this PR to avoid confusion in the future and make it clear that these are deferreds and not promises.
@@ -222,13 +222,13 @@ namespace Dynamo.Publish.Models } } - internal void SendAsynchronously(IEnumerable<IWorkspaceModel> workspaces, WorkspaceProperties workspaceProperties = null) + internal void SendAsynchronously(HomeWorkspaceModel workspace, WorkspaceProperties workspaceProperties = null) { State = UploadState.Uploading; Task.Factory.StartNew(() => { - var result = this.Send(workspaces, workspaceProperties); + var result = this.Send(workspace, workspaceProperties); var serverResponce = serverResponceRegex.Match(result); if (serverResponce.Success)
[PublishModel->[ClearState->[None,Uninitialized],Authenticate->[Login,AuthProviderNotFound,AuthenticationFailed],OnCustomizerURLChanged->[CustomizerURLChanged],SendAsynchronously->[Match,Value,None,Succeeded,InvalidNodes,Send,StartNew,UnknownServerError,Concat,Uploading,Success],Send->[AuthProviderNotFound,AddRange,FailedMessage,TryGetFunctionWorkspace,Add,First,Nodes,Contains,InvalidNodeNames,Dependencies,AuthenticationFailed,FunctionId,Send,IsNullOrWhiteSpace,CustomNodeWorkspaces,Username,ServerNotFound],OnUploadStateChanged->[UploadStateChanged],ManagerErrorMessage,Value,LoginState,IgnoreCase,OpenExeConfiguration,Failed,None,ServerNotFoundMessage,GetSection,Uninitialized,LoggedIn,OnCustomizerURLChanged,Location,PageErrorMessage,WorkspacesSendSucceededServerResponse,IsNullOrWhiteSpace,OnUploadStateChanged]]
This method authenticates the user and sends the workspaces asynchronously.
It was an error to pass all the workspaces to this method. We now isolate out the `HomeWorkspaceModel` and pass it directly.
@@ -3118,17 +3118,7 @@ namespace tools } std::string wallet_file = req.filename.empty() ? "" : (m_wallet_dir + "/" + req.filename); { - std::vector<std::string> languages; - crypto::ElectrumWords::get_language_list(languages, false); - std::vector<std::string>::iterator it; - - it = std::find(languages.begin(), languages.end(), req.language); - if (it == languages.end()) - { - crypto::ElectrumWords::get_language_list(languages, true); - it = std::find(languages.begin(), languages.end(), req.language); - } - if (it == languages.end()) + if (!crypto::ElectrumWords::is_valid_language(req.language)) { er.code = WALLET_RPC_ERROR_CODE_UNKNOWN_ERROR; er.message = "Unknown language: " + req.language;
[on_stop_mining->[not_open],on_sign_multisig->[not_open],on_finalize_multisig->[not_open],on_set_tx_notes->[not_open],on_get_tx_key->[not_open],on_getbalance->[not_open],on_check_reserve_proof->[not_open],on_get_transfer_by_txid->[fill_transfer_entry,not_open],on_tag_accounts->[not_open],on_make_integrated_address->[not_open],on_start_mining->[not_open],on_restore_deterministic_wallet->[handle_rpc_exception],on_set_daemon->[not_open],on_untag_accounts->[not_open],on_exchange_multisig_keys->[not_open],on_get_payments->[not_open],on_incoming_transfers->[not_open],on_transfer->[fill_response,validate_transfer,not_open],on_split_integrated_address->[not_open],on_store->[not_open],on_get_account_tags->[not_open],on_close_wallet->[not_open],on_set_attribute->[not_open],on_import_multisig->[not_open],on_submit_transfer->[not_open],on_sweep_all->[fill_response,validate_transfer,not_open],on_get_address_book->[not_open],on_generate_from_keys->[handle_rpc_exception],on_label_address->[not_open],on_add_address_book->[not_open],on_getheight->[not_open],on_describe_transfer->[not_open],on_get_accounts->[,not_open],on_sweep_single->[fill_response,validate_transfer,not_open],on_is_multisig->[not_open],on_export_multisig->[not_open],on_edit_address_book->[not_open],on_submit_multisig->[not_open],on_check_tx_proof->[not_open],on_get_bulk_payments->[not_open],on_stop_wallet->[not_open],on_get_reserve_proof->[not_open],on_check_spend_proof->[not_open],on_delete_address_book->[not_open],on_change_wallet_password->[not_open],on_refresh->[not_open],on_get_transfers->[fill_transfer_entry,not_open],on_label_account->[not_open],on_prepare_multisig->[not_open],on_get_tx_proof->[not_open],on_import_outputs->[not_open],on_check_tx_key->[not_open],on_sweep_dust->[fill_response,not_open],on_export_key_images->[not_open],on_query_key->[not_open],on_estimate_tx_size_and_weight->[not_open],on_get_attribute->[not_open],on_relay_tx->[not_open],on_get_tx_notes->[not_open],on_parse_uri->[not_open],t_executor->[create_daemon->[t_daemon],run_interactive->[t_daemon],run_non_interactive->[t_daemon]],on_transfer_split->[fill_response,validate_transfer,not_open],fill_transfer_entry->[set_confirmations],on_rescan_spent->[not_open],t_daemon->[run->[set_wallet,init,stop,run]],on_set_account_tag_description->[not_open],on_create_account->[not_open],on_getaddress->[not_open],on_sign->[not_open],on_make_uri->[not_open],on_getaddress_index->[not_open],on_import_key_images->[not_open],on_validate_address->[not_open],on_create_address->[not_open],on_verify->[not_open],on_make_multisig->[not_open],init->[],on_get_spend_proof->[not_open],on_sign_transfer->[not_open],on_export_outputs->[not_open],handle_rpc_exception->[],on_rescan_blockchain->[not_open]]
on_create_wallet - create a new wallet This is the main entry point for the wallet - rpc. It is called from the command.
Just wondering if we should make these error messages more uniform between 2 of these cases. I guess these strings aren't part of the translation project are they?
@@ -102,7 +102,7 @@ func ListPullRequests(ctx *context.APIContext, form api.ListPullRequestsOptions) ctx.Error(http.StatusInternalServerError, "GetHeadRepo", err) return } - apiPrs[i] = prs[i].APIFormat() + apiPrs[i] = convert.ToPullRequest(prs[i]) } ctx.SetLinkHeader(int(maxResults), models.ItemsPerPage)
[Status,GetHeadRepo,IsPoster,Merge,IsErrDependenciesLeft,Info,UpdateIssueDeadline,ChangeStatus,IsPullCommitStatusPass,IsErrPullRequestNotExist,GetUserRepoPermission,ChangeMilestoneAssign,IsErrUserDoesNotHaveAccessToRepo,IsErrRebaseConflicts,ServerError,MakeIDsFromAPIAssigneesToAdd,IsUserRepoAdmin,GetBaseRepo,ParamsInt64,GetUserByName,Split,IsTrace,Day,SetLinkHeader,UpdateAssignees,IsErrMergeConflicts,Close,PullRequests,CanReadIssuesOrPulls,TimeStamp,RepoPath,GetLabelsInRepoByIDs,LoadIssue,JSON,StateType,IsErrUserNotExist,IsBranchExist,Month,HasForkedRepo,IsErrMergePushOutOfDate,NotifyNewPullRequest,CanWrite,GetPullRequestByIndex,GetUnmergedPullRequest,GetUserByID,NotFound,IsWorkInProgress,IsErrMergeUnrelatedHistories,Trace,TrimSpace,QueryTrim,GetDefaultMergeMessage,QueryStrings,CanAutoMerge,ReplaceLabels,MergeStyle,UpdateIssueByAPI,APIFormat,GetMilestoneByRepoID,Sprintf,QueryInt,QueryInt64,GetCompareInfo,IsErrMilestoneNotExist,LoadAttributes,IsZero,IsErrInvalidMergeStyle,Error,OpenRepository,Year,Location,CanRead,ReadBy,Unix,Written,CanBeAssigned,Date,GetDefaultSquashMessage,NewPullRequest]
GetPullRequest returns a list of pull requests that have a specific number of commits. returns the base which is the base of the repository and the pull request that.
I'm OK with this, but we should consider naming these functions `ToPullRequestAPI` or `ToAPIPullRequest` in case there will be conversions in the other direction or for other purposes.
@@ -19,6 +19,7 @@ from __future__ import division from __future__ import print_function import collections +import numpy as np import numpy as np
[tile_batch->[_tile_batch],_maybe_tensor_gather_helper->[_check_maybe],BeamSearchDecoder->[output_dtype->[BeamSearchDecoderOutput,_rnn_output_size],step->[_merge_batch_beams,_split_batch_beams],_maybe_split_batch_beams->[_check_maybe,_split_batch_beams],finalize->[FinalBeamSearchDecoderOutput],initialize->[BeamSearchDecoderState],output_size->[BeamSearchDecoderOutput],_maybe_merge_batch_beams->[_check_maybe,_merge_batch_beams]],_beam_search_step->[BeamSearchDecoderOutput,BeamSearchDecoderState]]
A decoder that performs beam search. Returns a BeamSearchDecoderOutput object for the given sequence of non - empty items.
please remove this (already imported below). I think this may caused by the rebase.
@@ -268,12 +268,6 @@ public class DefaultHttp2ConnectionEncoder implements Http2ConnectionEncoder { // There were previous DATA frames sent. We need to send the HEADERS only after the most // recent DATA frame to keep them in sync... - // Wrap the original promise in an aggregate which will complete the original promise - // once the headers are written. - final ChannelPromiseAggregator aggregatePromise = new ChannelPromiseAggregator(promise); - final ChannelPromise innerPromise = ctx.newPromise(); - aggregatePromise.add(innerPromise); - // Only write the HEADERS frame after the previous DATA frame has been written. final Http2Stream theStream = stream; lastDataWrite.addListener(new ChannelFutureListener() {
[DefaultHttp2ConnectionEncoder->[lastWriteForStream->[lastWriteForStream],Builder->[build->[DefaultHttp2ConnectionEncoder]],writeSettings->[writeSettings],writePriority->[writePriority],newBuilder->[Builder],writeWindowUpdate->[writeWindowUpdate],initialOutboundWindowSize->[initialOutboundWindowSize],configuration->[configuration],writeRstStream->[writeRstStream],writeData->[writeData],writePushPromise->[writePushPromise],writePing->[writePing],updateOutboundWindowSize->[updateOutboundWindowSize],writeGoAway->[writeGoAway],writeFrame->[writeFrame],writeSettingsAck->[writeSettingsAck],close->[close],writeHeaders->[operationComplete->[writeHeaders],writeHeaders,lastWriteForStream]]]
Sends the given headers to the given stream. A promise that is resolved when the headers have been written.
+1. I thought this had already been removed ;)
@@ -120,6 +120,10 @@ AmpDateCountdown['props'] = { 'whenEnded': {attr: 'when-ended', type: 'string'}, 'locale': {attr: 'locale', type: 'string'}, 'biggestUnit': {attr: 'biggest-unit', type: 'string'}, + 'countUp': { + attrs: ['data-count-up'], + parseAttrs: (el) => el.hasAttribute('data-count-up'), + }, }; /**
[No CFG could be retrieved]
Provides a countdown for the given element. countdown - countdown.
This is to match the behavior in `0.1` which checks if the attribute `data-count-up` exists at all. So `data-count-up="false"` should still register as the component SHOULD do the count up functionality. Doing `attr: 'data-count-up', type: 'boolean'` does not catch this use case properly. Can also consider updating `0.1` if this behavior seems to strange.
@@ -73,12 +73,11 @@ class PubSubSource(dataflow_io.NativeSource): 'PubSubSource is not supported in local execution.') -class PubSubSink(dataflow_io.NativeSink): +class _PubSubSink(dataflow_io.NativeSink): """Sink for writing to a given Cloud Pub/Sub topic.""" - def __init__(self, topic, coder=coders.StrUtf8Coder()): + def __init__(self, topic): self.topic = topic - self.coder = coder @property def format(self):
[PubSubSource->[display_data->[DisplayDataItem],__init__->[StrUtf8Coder],reader->[NotImplementedError]],PubSubSink->[display_data->[DisplayDataItem],writer->[NotImplementedError],__init__->[StrUtf8Coder]]]
Initialize the object with a topic and a coder.
For the same reason as above, can you rename this to `_PubSubByteSink`?
@@ -138,6 +138,8 @@ class Charm(Package): for libdir in spec["mpi"].libs.directories ]) if "+papi" in spec: + if "+tracing" not in spec: + raise InstallError("+papi variant requires +tracing variant.") options.extend(["papi", "--basedir=%s" % spec["papi"].prefix]) if "+smp" in spec: if 'backend=multicore' in spec:
[Charm->[install->[join_path,walk,basename,machine,append,build,Executable,copy2,extend,startswith,rename,format,islink,remove,InstallError,rmtree],variant,depends_on,version,patch]]
Install a charm - related version of a given protocol specification. Check if a node - specific is available and if so build it. This function is called when a unit of work is needed to create a .
make it a conflict
@@ -281,11 +281,8 @@ public class ShortcutManager implements NativePreviewHandler, if (handleKeyDown(event.getNativeEvent())) { event.cancel(); - resetKeyBuffer(); - } - else - { - updateKeyBuffer(event.getNativeEvent()); + keyBuffer_.clear(); + events_.fireEvent(new RStudioCommandExecutedEvent()); } } }
[ShortcutManager->[onPreviewNativeEvent->[cancel,updateKeyBuffer,resetKeyBuffer],dispatch->[dispatch,isEnabled],onKeyDown->[cancel,updateKeyBuffer,resetKeyBuffer],updateKeyBuffer->[cancel,resetKeyBuffer,isEnabled],register->[register],ShortcutManager]]
onPreviewNativeEvent - This method is called when a native preview event is received.
This event should probably be renamed, since currently it's only fired in response to command execution via a keyboard shortcut. Alternatively, if possible, it should be inserted in a place so that it really is executed after an AppCommand is executed.
@@ -9,10 +9,6 @@ import ( "github.com/smartcontractkit/chainlink/core/store/models" ) -func (rs *RegistrySynchronizer) OnConnect() {} - -func (rs *RegistrySynchronizer) OnDisconnect() {} - func (rs *RegistrySynchronizer) JobID() models.JobID { return models.JobID{} }
[HandleLog->[Deliver,DecodedLog,Hex,TypeOf,ValueOf,Debugw,Warnf,IsNil,Errorf,RawLog]]
OnConnect - connect - disconnect - connect callback.
I look forward to the day that run manager v1 is removed and we can axe these callbacks forever.
@@ -50,7 +50,7 @@ public class SegmentLoaderConfig private int announceIntervalMillis = 0; // do not background announce @JsonProperty("numLoadingThreads") - private int numLoadingThreads = JvmUtils.getRuntimeInfo().getAvailableProcessors(); + private int numLoadingThreads = Math.min(1, JvmUtils.getRuntimeInfo().getAvailableProcessors() / 6); @JsonProperty("numBootstrapThreads") private Integer numBootstrapThreads = null;
[SegmentLoaderConfig->[withLocations->[SegmentLoaderConfig]]]
Config for a single . This class is a public interface to the object that is passed to the SegmentManager.
Oops, I think this should be `Math.max` so the value isn't always 0 or 1?
@@ -1936,7 +1936,12 @@ public class TemplateManagerImpl extends ManagerBase implements TemplateManager, UserVmVO userVm = _userVmDao.findById(vmId); if (userVm != null) { _userVmDao.loadDetails(userVm); - details.putAll(userVm.getDetails()); + Map<String, String> vmDetails = userVm.getDetails(); + vmDetails = vmDetails.entrySet() + .stream() + .filter(map -> map.getValue() != null) + .collect(Collectors.toMap(map -> map.getKey(), map -> map.getValue())); + details.putAll(vmDetails); } } }
[TemplateManagerImpl->[addTemplateToZone->[addTemplateToZone],deleteIso->[delete,templateIsDeleteable,getAdapter],registerTemplate->[getAdapter],extract->[extract],copyTemplate->[copy,getImageStore],attachISOToVM->[attachISOToVM,prepareIso],prepareTemplateInAllStoragePools->[prepareTemplateInOneStoragePool],deleteTemplate->[delete,getAdapter],registerIso->[getAdapter],delete->[delete,getAdapter],registerTemplateForPostUpload->[registerPostUploadInternal,getAdapter],registerIsoForPostUpload->[registerPostUploadInternal,getAdapter]]]
create a private template record. Create a private template record from the specified volume and snapshot. find next template in sequence create a new template object get a template if it exists or throw a CloudRuntimeException.
@shwstppr changes LGTM ,to allow template creation from volume. Do you know any reason to keep some keys (eg. _kvm.vnc.address_) with value null? Ideally, value in the details table shouldn't accept null.
@@ -126,7 +126,7 @@ func CreateConfigToOCISpec(config *CreateConfig) (*spec.Spec, error) { //nolint g.RemoveMount("/dev/mqueue") devMqueue := spec.Mount{ Destination: "/dev/mqueue", - Type: "bind", + Type: bindMount, Source: "/dev/mqueue", Options: []string{"bind", "nosuid", "noexec", "nodev"}, }
[GetVolumesFrom,IsRootless,SetLinuxResourcesCPURealtimePeriod,RemoveHostname,SetLinuxResourcesMemoryReservation,IsContainer,AddLinuxSysctl,AddLinuxGIDMapping,AddProcessRlimits,ParseIDMapFile,Hostname,SetLinuxResourcesPidsLimit,SetLinuxResourcesCPUCpus,Wrap,GetVolumeMounts,HasPrefix,SetLinuxResourcesMemorySwap,SetLinuxResourcesCPUPeriod,SetProcessApparmorProfile,AddLinuxMaskedPaths,ParseUlimit,TweakCapabilities,Clean,IsHost,SetLinuxResourcesCPUShares,ParseTmpfsOptions,New,AddLinuxUIDMapping,Errorf,CreateBlockIO,SetLinuxResourcesMemorySwappiness,AddAnnotation,SetupPrivileged,AddProcessEnv,RemoveMount,Debugf,SetLinuxResourcesCPUQuota,SplitN,Wrapf,SetProcessNoNewPrivileges,SetLinuxResourcesCPUMems,Debug,IsNone,SetLinuxResourcesMemoryKernel,SetLinuxResourcesMemoryDisableOOMKiller,AddPrivilegedDevices,ToUpper,GetMounts,SetProcessArgs,AddMount,SetProcessTerminal,Split,SetProcessOOMScoreAdj,IsBridge,SetHostname,initFSMounts,IsSlirp4netns,SetRootReadonly,AddOrReplaceLinuxNamespace,RemoveLinuxNamespace,IsUserDefined,SetProcessCwd,AddLinuxReadonlyPaths,Abs,SetLinuxResourcesCPURealtimeRuntime,SetLinuxResourcesMemoryLimit]
Mount is a helper function to create a mountpoint that can be mounted on the same machine bind adds the necessary mounts to the process and cgroup if necessary.
we could probably just drop the `Type` altogether. The OCI runtime looks at the options to see if it is a bind mount (either contains `bind` or `rbind`)
@@ -833,6 +833,14 @@ abstract class AbstractChannelHandlerContext extends DefaultAttributeMap task = WriteTask.newInstance(next, m, promise); } safeExecute(executor, task, promise, m); + if (!flush) { + /** + * Whenever there is a write, if auto-flush is enabled, the eventloop is required to be woken up, + * otherwise the write may never be flushed or have latency. + * This method just wakes up the eventloop once till the next auto-flush task is run. + */ + pipeline.wakeUpForAutoFlushIfRequired(); + } } }
[AbstractChannelHandlerContext->[invokeWrite->[write],channel->[channel],invokeUserEventTriggered->[run->[invokeUserEventTriggered],executor,fireUserEventTriggered,invokeUserEventTriggered],connect->[executor,connect],invokeChannelReadComplete->[run->[invokeChannelReadComplete],executor,invokeChannelReadComplete,fireChannelReadComplete],write->[executor,invokeWriteAndFlush,write,invokeWrite],invokeRead->[read],invokeDisconnect->[disconnect],close->[executor,close],invokeChannelRegistered->[run->[invokeChannelRegistered],executor,invokeChannelRegistered,fireChannelRegistered],invokeFlush0->[flush],invokeChannelWritabilityChanged->[run->[invokeChannelWritabilityChanged],executor,invokeChannelWritabilityChanged,fireChannelWritabilityChanged],invokeDeregister->[deregister],voidPromise->[voidPromise],newSucceededFuture->[executor,channel],invokeConnect->[connect],WriteTask->[newObject->[WriteTask],newInstance->[init]],invokeChannelActive->[run->[invokeChannelActive],executor,fireChannelActive,invokeChannelActive],deregister->[executor,deregister],disconnect->[executor,disconnect],newFailedFuture->[executor,channel],newProgressivePromise->[executor,channel],invokeWriteAndFlush->[invokeWrite0,writeAndFlush,invokeFlush0],notifyHandlerException->[invokeExceptionCaught],attr->[attr],WriteAndFlushTask->[newObject->[WriteAndFlushTask],write->[invokeFlush,write],newInstance->[init]],newPromise->[executor,channel],invokeBind->[bind],invokeExceptionCaught->[run->[invokeExceptionCaught],executor,invokeExceptionCaught,fireExceptionCaught],invokeChannelUnregistered->[run->[invokeChannelUnregistered],executor,invokeChannelUnregistered,fireChannelUnregistered],bind->[executor,bind],writeAndFlush->[write,writeAndFlush],invokeWrite0->[write],invokeChannelInactive->[run->[invokeChannelInactive],executor,fireChannelInactive,invokeChannelInactive],AbstractWriteTask->[write->[invokeWrite],run->[write]],invokeFlush->[flush],hasAttr->[hasAttr],invokeChannelRead->[run->[invokeChannelRead],executor,fireChannelRead,invokeChannelRead],invokeClose->[close],toString->[channel],validatePromise->[channel],read->[executor],flush->[executor]]]
Write the given message to the channel.
Hmm ... can't we do that in the channel handler in the `write` method? I think it would be best if we kept the auto-flush stuff contained in a channel handler.
@@ -2611,7 +2611,7 @@ namespace System.Windows.Forms int result = NativeMethods.S_FALSE; try { - // The activityId can be any string. It cannot be null. It isn�t used currently. + // The activityId can be any string. It cannot be null. It isnt used currently. result = UnsafeNativeMethods.UiaRaiseNotificationEvent( this, notificationKind,
[AccessibleObject->[GetItem->[GetItem],GetPropertyValue->[GetPropertyValue],accDoDefaultAction->[accDoDefaultAction,DoDefaultAction],get_accRole->[get_accRole],get_accState->[get_accState],get_accDescription->[get_accDescription],set_accName->[set_accName],GetMethods->[GetMethods],SysNavigate->[GetSysChild,AsVariant],Reset->[Reset],GetFields->[GetFields],get_accHelp->[get_accHelp],GetColumnHeaders->[GetColumnHeaders],SetValue->[SetValue],SetFocus->[SetFocus],Navigate->[FragmentNavigate],GetMember->[GetMember],set_accValue->[set_accValue],Object->[Navigate],GetProperties->[GetProperties],InvokeMember->[GetMember,InvokeMember],Skip->[Skip],accLocation->[accLocation],GetMembers->[GetMembers],GetWindow->[GetWindow],get_accHelpTopic->[GetHelpTopic,get_accHelpTopic],accSelect->[Select,accSelect],ElementProviderFromPoint->[ElementProviderFromPoint],Select->[Select,accSelect],Collapse->[Collapse],Clone->[Clone],UseStdAccessibleObjects->[UseStdAccessibleObjects],get_accKeyboardShortcutInternal->[get_accKeyboardShortcut],Toggle->[Toggle],Invoke->[Invoke],GetPatternProvider->[IsPatternSupported],GetEmbeddedFragmentRoots->[GetEmbeddedFragmentRoots],Expand->[Expand],GetFocus->[GetFocus],GetRowHeaderItems->[GetRowHeaderItems],GetColumnHeaderItems->[GetColumnHeaderItems],get_accChild->[get_accChild],get_accDefaultAction->[get_accDefaultAction],get_accNameInternal->[get_accName],ContextSensitiveHelp->[ContextSensitiveHelp],Next->[Next],EnumVariantObject->[NextFromSystem->[Next],Reset->[Reset],GotoItem->[Next,Skip,Reset],Skip->[Skip],Next->[GetSysChildOrder,GetChildCount]],DoDefaultAction->[accDoDefaultAction,DoDefaultAction],get_accValue->[get_accValue],GetRowHeaders->[GetRowHeaders],QueryService->[IsIAccessibleExSupported],GetChildId,GetChildCount],InternalAccessibleObject->[GetItem->[GetItem,AsNativeAccessible],get_accSelection->[AsNativeAccessible],get_accFocus->[AsNativeAccessible],Toggle->[Toggle],get_accKeyboardShortcut->[get_accKeyboardShortcut],GetPropertyValue->[GetPropertyValue],accDoDefaultAction->[accDoDefaultAction],get_accRole->[get_accRole],get_accState->[get_accState],get_accDescription->[get_accDescription],set_accName->[set_accName],GetMethods->[GetMethods],Reset->[Reset],accHitTest->[AsNativeAccessible,accHitTest],GetFields->[GetFields],GetColumnHeaderItems->[GetColumnHeaderItems,AsArrayOfNativeAccessibles],GetColumnHeaders->[GetColumnHeaders,AsArrayOfNativeAccessibles],SetValue->[SetValue],SetFocus->[SetFocus],Navigate->[AsNativeAccessible,Navigate],GetMember->[GetMember],set_accValue->[set_accValue],get_accName->[get_accName],GetObjectForChild->[GetObjectForChild],GetRowHeaderItems->[GetRowHeaderItems,AsArrayOfNativeAccessibles],InvokeMember->[InvokeMember],Skip->[Skip],accLocation->[accLocation],GetMembers->[GetMembers],GetWindow->[GetWindow],get_accHelpTopic->[get_accHelpTopic],accSelect->[accSelect],ElementProviderFromPoint->[ElementProviderFromPoint,AsNativeAccessible],Select->[Select],Collapse->[Collapse],accNavigate->[accNavigate,AsNativeAccessible],get_accParent->[AsNativeAccessible],AsArrayOfNativeAccessibles->[AsNativeAccessible],Clone->[Clone],get_accHelp->[get_accHelp],Invoke->[Invoke],GetProperties->[GetProperties],GetPatternProvider->[GetPatternProvider],GetEmbeddedFragmentRoots->[GetEmbeddedFragmentRoots,AsArrayOfNativeAccessibles],get_accChild->[AsNativeAccessible,get_accChild],Next->[Next],ConvertReturnedElement->[ConvertReturnedElement],get_accDefaultAction->[get_accDefaultAction],ContextSensitiveHelp->[ContextSensitiveHelp],GetRuntimeId->[GetRuntimeId],GetFocus->[AsNativeAccessible,GetFocus],GetSelection->[GetSelection,AsArrayOfNativeAccessibles],Expand->[Expand],DoDefaultAction->[DoDefaultAction],get_accValue->[get_accValue],GetRowHeaders->[GetRowHeaders,AsArrayOfNativeAccessibles],QueryService->[QueryService],AsNativeAccessible]]
Raise an AutomationNotification.
Consider `is not`
@@ -194,7 +194,9 @@ class PuppeteerController { const nodeListHandle = await frame.waitForFunction( (root, selector) => { const nodeList = root./*OK*/ querySelectorAll(selector); - return nodeList.length > 0 ? nodeList : null; + return nodeList.length > 0 + ? Array.prototype.slice.call(nodeList) + : null; }, {timeout: DEFAULT_WAIT_TIMEOUT}, root,
[PuppeteerController->[type->[type],maybeInstallXpath_->[evaluate],evaluate->[evaluate]]]
Finds elements in the current frame.
`toArray` (or `Array.from`) would be more clear on the intent.
@@ -9,10 +9,10 @@ class GobiertoBudgets::BudgetsController < GobiertoBudgets::ApplicationControlle @site_stats = GobiertoBudgets::SiteStats.new site: @site, year: @year - @top_income_budget_lines = GobiertoBudgets::TopBudgetLine.limit(5).where(site: current_site, year: @year, place: @site.place, kind: GobiertoBudgets::BudgetLine::INCOME).all - @top_expense_budget_lines = GobiertoBudgets::TopBudgetLine.limit(5).where(site: current_site, year: @year, place: @site.place, kind: GobiertoBudgets::BudgetLine::EXPENSE).all - @place_budget_lines = GobiertoBudgets::BudgetLine.all(where: { site: current_site, place: @place, level: 1, year: @year, kind: @kind, area_name: @area_name }) - @interesting_expenses = GobiertoBudgets::BudgetLine.all(where: { site: current_site, place: @place, level: 2, year: @year, kind: GobiertoBudgets::BudgetLine::EXPENSE, area_name: @interesting_area }) + @top_income_budget_lines = GobiertoBudgets::TopBudgetLine.limit(5).where(site: current_site, year: @year, kind: GobiertoBudgets::BudgetLine::INCOME).all + @top_expense_budget_lines = GobiertoBudgets::TopBudgetLine.limit(5).where(site: current_site, year: @year, kind: GobiertoBudgets::BudgetLine::EXPENSE).all + @place_budget_lines = GobiertoBudgets::BudgetLine.all(where: { site: current_site, level: 1, year: @year, kind: @kind, area_name: @area_name }) + @interesting_expenses = GobiertoBudgets::BudgetLine.all(where: { site: current_site, level: 2, year: @year, kind: GobiertoBudgets::BudgetLine::EXPENSE, area_name: @interesting_area }) @sample_budget_lines = (@top_income_budget_lines + @top_expense_budget_lines).sample(3)
[guide->[last,new],load_year->[gobierto_budgets_budgets_path,redirect_to,nil?,to_i,last],index->[all,new,any_data?,budgets_data_updated_at,budgets_execution_summary,sample,area_name],load_place->[nil?,place],before_action]
This method is called by the budget_line_generator when a budget_line_generator Returns true if there is a budget line with any data.
Line is too long. [187/180]
@@ -423,4 +423,10 @@ public class LobbyMenu extends JMenuBar { parentMenu.add(menuFileExit); } } + + private static Date toDate(final TemporalAmount amount) { + return !amount.equals(ChronoUnit.FOREVER.getDuration()) + ? Date.from(LocalDateTime.ofInstant(Instant.now(), ZoneOffset.UTC).plus(amount).toInstant(ZoneOffset.UTC)) + : null; + } }
[LobbyMenu->[addUpdateAccountMenu->[addActionListener,isAnonymousLogin,updateAccountDetails,JMenuItem,add,setEnabled],createAdminMenu->[add,createDiagnosticsMenu,createToolboxMenu,JMenu],addHelpMenu->[newOpenUrlConfirmationDialog,addActionListener,add,JMenuItem],updateAccountDetails->[getPassword,getUserInfo,getName,show,getEmail,nullToEmpty,emptyToNull,getUserName,getRemote,crypt,showMessageDialog,hashPasswordWithSalt,newUpdatePanel,save,updateUser,load],createAccountMenu->[add,addUpdateAccountMenu,JMenu],createFileMenu->[add,addExitMenu,JMenu],addBanUsernameMenu->[addActionListener,banUsername,getByName,showConfirmDialog,getModeratorControllerName,plusMillis,from,logQuietly,Node,isValidUserName,showInputDialog,getRemote,JMenuItem,add,length,setEnabled,requestTimespanSupplication],addUnbanMacAddressMenu->[addActionListener,getByName,showConfirmDialog,from,matches,logQuietly,Node,banMac,showInputDialog,getRemote,JMenuItem,add,length,startsWith,setEnabled,getModeratorControllerName],addDisplayPlayersInformationMenu->[Font,Thread,start,getOnlinePlayers,toString,StringBuilder,setHorizontalScrollBarPolicy,setEnabled,setViewportView,getModeratorControllerName,setWrapStyleWord,setDefaultCloseOperation,setText,of,JTextArea,invokeLater,setBackground,setLocation,setLayout,setLineWrap,setFont,BorderLayout,setEditable,getBackground,append,getPlayersThatLeft_Last10,setVisible,getRemote,JMenuItem,setFocusable,JButton,setAutoscrolls,setResizable,setVerticalScrollBarPolicy,addActionListener,setMinimumSize,setLocationRelativeTo,JDialog,dispose,Dimension,setSize,add,JScrollPane],addExitMenu->[of,isMac,JMenuItem,add,shutdown],createToolboxMenu->[JMenu,addBanUsernameMenu,addUnbanMacAddressMenu,addUnbanUsernameMenu,add,addBanMacAddressMenu],addChatTimeMenu->[setSelected,addActionListener,isSelected,JCheckBoxMenuItem,add,setShowChatTime],createSettingsMenu->[JMenu,addToMenu,addGlobalSoundSwitchMenu,addChatTimeMenu,add],addUnbanUsernameMenu->[addActionListener,banUsername,getByName,showConfirmDialog,from,logQuietly,Node,isValidUserName,showInputDialog,getRemote,JMenuItem,add,length,setEnabled,getModeratorControllerName],addBanMacAddressMenu->[addActionListener,getByName,showConfirmDialog,getModeratorControllerName,plusMillis,from,matches,logQuietly,Node,banMac,showInputDialog,getRemote,JMenuItem,add,length,startsWith,setEnabled,requestTimespanSupplication],createHelpMenu->[add,addHelpMenu,JMenu],createDiagnosticsMenu->[add,addDisplayPlayersInformationMenu,JMenu],requestTimespanSupplication->[toArray,equals,showOptionDialog,showInputDialog,parseLong,add],createAdminMenu,createAccountMenu,createFileMenu,isMac,isAdmin,createSettingsMenu,createHelpMenu,registerMacShutdownHandler]]
Add an Exit menu item to the given parent menu.
Reading through this a second time, I realized that this method is doing more than simply converting a `TemporalAmount` to a `Date`. `TemporalAmount` represents a duration, while a `Date` is an absolute point in time, so it's really adding the given duration to "now," and then doing the conversion. That is, this function isn't "pure" in the sense that it has a hidden dependency on global state. Capturing that in the method name would be helpful. Is there a better method name here? `nowPlusDuration()`? `nowOffsetBy()`? Those could be a bit too generic in this context. Maybe something more context-dependent like `getBanExpirationDate()`? Meh, I hate naming things...
@@ -14,6 +14,13 @@ import ( "github.com/sirupsen/logrus" ) +// logDrivers stores the currently available log drivers, do not modify +var logDrivers []string + +func init() { + logDrivers = append(logDrivers, define.KubernetesLogging, define.NoLogging) +} + // Log is a runtime function that can read one or more container logs. func (r *Runtime) Log(ctx context.Context, containers []*Container, options *logs.LogOptions, logChannel chan *logs.LogLine) error { for _, ctr := range containers {
[ReadLog->[Wrapf,LogDriver,readFromJournal,readFromLogFile],Log->[ReadLog],readFromLogFile->[Partial,Events,Done,StopAtEOF,Add,LogPath,IsNotExist,GetLogFile,NewLogLine,Cause,Errorf,State,Since,ID,Until,Wrapf,Name,Sprintf,Sleep]]
Log logs the logs of the given containers.
Isn't journald supported here?
@@ -8,8 +8,8 @@ class TestAddOp(unittest.TestCase): def setUp(self): self.type = "add_two" - self.X = numpy.random.random((342, 345)).astype("float32") - self.Y = numpy.random.random((342, 345)).astype("float32") + self.X = numpy.random.random((102, 105)).astype("float32") + self.Y = numpy.random.random((102, 105)).astype("float32") self.Out = self.X + self.Y
[TestAddOp->[setUp->[random]],main]
Set up the object with random values.
Sometimes, GPU memory is not enough for unit-test, so reduce the size
@@ -107,7 +107,7 @@ export default (state: UserManagementState = initialState, action): UserManageme ...state, updating: false, updateSuccess: true, - user: action.payload.data + user: {} }; case SUCCESS(ACTION_TYPES.DELETE_USER): return {
[No CFG could be retrieved]
This function returns the state of the given n - item object. Create an action that fetches a list of users.
@sazeez maybe you can remove all the return statement here as it is the same just the case after
@@ -278,7 +278,10 @@ class ConditionalRandomField(torch.nn.Module): return torch.sum(log_numerator - log_denominator) - def viterbi_tags(self, logits: torch.Tensor, mask: torch.Tensor) -> List[List[int]]: + def viterbi_tags(self, + logits: torch.Tensor, + mask: torch.Tensor, + keep_scores: bool = False) -> Union[List[List[int]], Tuple[List[List[int]], List[float]]]: """ Uses viterbi algorithm to find most likely tags for the given inputs. If constraints are applied, disallows all other transitions.
[ConditionalRandomField->[forward->[_input_likelihood,_joint_likelihood]]]
Computes the log likelihood of a node in the network. This function returns all tags except the last tag.
I don't like functions that return union types, they make for a muddy API. I would rather just change the function to return (say) `List[Tuple[List[int], float]]` always, and then modify the handful of downstream consumers to behave gracefully and ignore the scores if they don't want them.
@@ -90,7 +90,10 @@ namespace Dynamo .Select(x => x.CachedValue); var geoms = new List<GeometryObject>(); - values.ToList().ForEach(md=>RevitGeometryFromMirrorData(md, ref geoms)); + foreach (var value in values) + { + RevitGeometryFromMirrorData(value, ref geoms); + } Draw(geoms); }
[RevitVisualizationManager->[RevitGeometryFromMirrorData->[RevitGeometryFromMirrorData]]]
if the user has selected a missing value in the model or if the user has selected a.
This was introduced just as a readability/performance fix. I don't think it is related.
@@ -480,7 +480,12 @@ class Stage(object): else: print_errors(errors) - err_msg = 'All fetchers failed for {0}'.format(self.name) + # Simplify the error name by removing the prefix when present + name = self.name if not self.name.startswith(stage_prefix) else \ + self.name.replace(stage_prefix, '') + err = 'Manual download is required' if manual_download else \ + 'All fetchers failed' + err_msg = '{0} for {1}'.format(err, name) self.fetcher = self.default_fetcher raise fs.FetchError(err_msg, None)
[get_stage_root->[_resolve_paths,_first_accessible_path],get_checksums_for_versions->[fetch,Stage],_first_accessible_path->[_create_stage_root],Stage->[check->[check],fetch->[generate_fetchers,fetch,print_errors],cache_mirror->[check,fetch],__init__->[get_stage_root]],StageComposite->[__exit__->[__exit__],__enter__->[__enter__]],purge->[get_stage_root]]
Downloads an archive or checks out code from a repository. Yields a sequence of objects from the fetcher.
What do you think if, instead of passing a Boolean like `manual_download`, we permit packages to pass a custom error message (a string)? If I am not missing something that would permit, without a lot of changes in the current logic, to have packages that can point you at how to obtain the software in the download error message etc.
@@ -372,7 +372,7 @@ bootstrap4 = { TEST_RUNNER = '' ALLOWED_HOSTS = get_list( - os.environ.get('ALLOWED_HOSTS', 'localhost,127.0.0.1')) + os.environ.get('ALLOWED_HOSTS', '*')) SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
[get_list->[strip,split],get_bool_from_env->[literal_eval,ValueError,format],get_host->[get_current],pgettext_lazy,getenv,join,config,literal_eval,int,get_list,insert,CACHES,bool,parse,append,dirname,setdefault,normpath,get_currency_fraction,_,get,get_bool_from_env]
Creates a new session object based on the configuration of the n - tuple. Get a list of all images in the system.
I'm not sure what are the downsides of having a wildcard as ALLOWED_HOSTS?
@@ -2480,6 +2480,10 @@ describe('$location', function() { expect(parseLinkAndReturn(locationUrl, 'someIgnoredAbsoluteHref', '#test')).toEqual('http://server/pre/otherPath#test'); }); + it('should cope with double slashes in the path', function() { + expect(parseLinkAndReturn(locationUrl, 'http://server/pre///other/path')).toEqual('http://server/pre///other/path'); + }); + it('should complain if no base tag present', function() { module(function($locationProvider) {
[No CFG could be retrieved]
Adds a module with the expectation that the user has specified a specific configuration object. Checks that the module is enabled and that it s enabled and that it s enabled and that.
I wonder if a more integration-style test that checks how the app URL is used to resolve a template would be useful? With this change, we still end up with multiple slashes in the URL, so we could still accidentally drop a path segment if we later parse it using `<a href>` again (not sure we do, but protecting from it would be nice).
@@ -80,15 +80,16 @@ class Cuda(Package): depends_on('libxml2', when='@10.1.243:') def setup_build_environment(self, env): - env.set('CUDAHOSTCXX', self.compiler.cxx) if self.spec.satisfies('@10.1.243:'): libxml2_home = self.spec['libxml2'].prefix env.set('LIBXML2HOME', libxml2_home) env.append_path('LD_LIBRARY_PATH', libxml2_home.lib) + def setup_dependent_build_environment(self, env, dependent_spec): + env.set('CUDAHOSTCXX', self.compiler.cxx) + def setup_run_environment(self, env): env.set('CUDA_HOME', self.prefix) - env.set('CUDAHOSTCXX', self.compiler.cxx) def install(self, spec, prefix): if os.path.exists('/tmp/cuda-installer.log'):
[Cuda->[install->[join_path,remove,die,satisfies,append,which,runfile,chmod,glob,exists],setup_run_environment->[set],setup_build_environment->[append_path,set,satisfies],libs->[append,find_libraries,split,LibraryList],system,depends_on,machine,conflicts,get,format,items,version]]
Setup the build environment for the CUDA build. This is the main entry point for the toolkit.
should probably be the compiler of the dependent spec?
@@ -480,8 +480,9 @@ class TensorFlowEstimator(BaseEstimator): if not os.path.exists(model_def_filename): raise ValueError("Restore folder doesn't contain model definition.") # list of parameters that are allowed to be reconfigured - reconfigurable_params = ['config_addon'] - with open(model_def_filename) as fmodel: + reconfigurable_params = ['_config'] + _config = config + with gfile.Open(model_def_filename) as fmodel: model_def = json.loads(fmodel.read()) # TensorFlow binding requires parameters to be strings not unicode. # Only issue in Python2.
[TensorFlowEstimator->[partial_fit->[fit],save->[save,_write_with_backup],predict_proba->[_predict],get_tensor_value->[get_tensor],predict->[_predict],fit->[_setup_summary_writer,_setup_training],restore->[_restore,TensorFlowEstimator]]]
Restores model from given path.
We'll probably need to check this in restore/save tests.
@@ -57,6 +57,9 @@ import org.apache.hadoop.hdds.annotation.InterfaceStability; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.utils.LegacyHadoopConfigurationSource; +import org.apache.hadoop.http.FilterContainer; +import org.apache.hadoop.http.FilterInitializer; +import org.apache.hadoop.http.lib.StaticUserWebFilter; import org.apache.hadoop.jmx.JMXJsonServlet; import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.security.AuthenticationFilterInitializer;
[HttpServer2->[getAttribute->[getAttribute],join->[join],addDefaultApps->[addNoCacheFilter],userHasAdministratorAccess->[getAttribute],addInternalServlet->[setPathSpec,setName,addInternalServlet,addServlet],setAttribute->[setAttribute],bindForSinglePort->[bindListener,constructBindException],Builder->[createHttpsChannelConnector->[createHttpChannelConnector],loadSSLConfiguration->[getPasswordString],build->[HttpServer2,loadSSLConfiguration]],createWebAppContext->[build],StackServlet->[doGet->[isInstrumentationAccessAllowed]],getFilterHolder->[setName],start->[start],getEnum->[toString],isInstrumentationAccessAllowed->[getAttribute],bindForPortRange->[bindListener,constructBindException],stop->[stop],setHeaders->[toString],addContext->[addNoCacheFilter],hasAdministratorAccess->[getAttribute],QuotingInputFilter->[initHttpHeaderMap->[hasMoreElements,nextElement],doFilter->[doFilter,RequestQuoter],RequestQuoter->[getParameter->[getParameter],getServerName->[getServerName],getParameterValues->[getParameterValues],getParameterMap->[getParameterMap],getRequestURL->[toString],getParameterNames->[hasMoreElements->[hasMoreElements],nextElement->[nextElement],getParameterNames]]],constructBindException->[getPort],defineFilter->[addFilter,defineFilter],openListeners->[bindForPortRange,bindForSinglePort,getPort],addFilterPathMapping->[setPathSpec],toString->[toString],initSpnego->[defineFilter],constructSecretProvider->[constructSecretProvider]]]
Imports a single object from the Hibernate package. region lag - import.
Are these classes copied from Hadoop being removed and dependency on Hadoop being re-introduced intentionally? CC @elek
@@ -13,7 +13,7 @@ if (isset($totalarray['pos'])) { while ($i < $totalarray['nbfield']) { $i++; if (!empty($totalarray['pos'][$i])) { - print '<td class="right">'.price($totalarray['val'][$totalarray['pos'][$i]]).'</td>'; + print '<td class="right">'.price(!empty($totalarray['val'][$totalarray['pos'][$i]])?:0).'</td>'; } else { if ($i == 1) { if (is_null($limit) || $num < $limit) {
[textwithpicto,transnoentitiesnoconv,trans]
<?php Show total line of totalizable fields.
Avoid using ?:, result is not as expected.
@@ -16532,13 +16532,13 @@ example usage control.Visible = false; hwndParent = IntPtr.Zero; - if (inPlaceUiWindow != null && UnsafeNativeMethods.IsComObject(inPlaceUiWindow)) { - UnsafeNativeMethods.ReleaseComObject(inPlaceUiWindow); + if (inPlaceUiWindow != null && Marshal.IsComObject(inPlaceUiWindow)) { + Marshal.ReleaseComObject(inPlaceUiWindow); inPlaceUiWindow = null; } - if (inPlaceFrame != null && UnsafeNativeMethods.IsComObject(inPlaceFrame)) { - UnsafeNativeMethods.ReleaseComObject(inPlaceFrame); + if (inPlaceFrame != null && Marshal.IsComObject(inPlaceFrame)) { + Marshal.ReleaseComObject(inPlaceFrame); inPlaceFrame = null; } }
[Control->[OnSystemColorsChanged->[OnSystemColorsChanged,Invalidate],UpdateRoot->[GetTopLevel],OnFontChanged->[GetAnyDisposingInHierarchy,Font,DisposeFontHandle,GetStyle,Invalidate],AccessibilityNotifyClients->[AccessibilityNotifyClients],OnParentFontChanged->[OnFontChanged],AutoValidate->[AutoValidate],OnParentBackColorChanged->[OnBackColorChanged],WmKeyChar->[ProcessKeyMessage,DefWndProc],WmWindowPosChanging->[ActiveXUpdateBounds,DefWndProc],WmMouseHover->[OnMouseHover,DefWndProc],AdjustWindowRectEx->[AdjustWindowRectEx],ScaleBitmapLogicalToDevice->[ScaleBitmapLogicalToDevice],OnDragOver->[OnDragOver],Close->[Close],GetNeighboringToolsRectangles->[GetNeighboringToolsRectangles],CreateControl->[CreateHandle,CreateControl],WmParentNotify->[DefWndProc,ReflectMessageInternal],WmExitMenuLoop->[DefWndProc],ScaleFont->[DisposeFontHandle],OnParentEnabledChanged->[OnEnabledChanged,GetState],Refresh->[Invalidate],WmNotifyFormat->[DefWndProc,ReflectMessageInternal],ResetPadding->[ResetPadding],DefWndProc->[DefWndProc],SetVisibleCore->[SelectNextIfFocused,SetState,OnVisibleChanged,GetState,GetVisibleCore,GetTopLevel,CreateControl],CanShowToolTipsNow->[CanShowToolTipsNow],OnForeColorChanged->[GetAnyDisposingInHierarchy,Invalidate],OnRightToLeftChanged->[GetAnyDisposingInHierarchy],OnDragLeave->[OnDragLeave],SetAutoSizeMode->[SetAutoSizeMode],WmCtlColorControl->[DefWndProc],SetAcceptDrops->[GetState],PreProcessControlState->[GetState2,IsInputKey,PreProcessMessage,IsInputChar,OnPreviewKeyDown],ShouldSerializeVisible->[GetState],UpdateStyles->[OnStyleChanged],ProcessKeyMessage->[ProcessKeyEventArgs],WmPrintClient->[OnPrint],OnDragDrop->[OnDragDrop],InitLayout->[InitLayout],WmPaint->[PaintWithErrorHandling,Dispose,GetStyle],OnParentRightToLeftChanged->[OnRightToLeftChanged],GetExtent->[ToString,GetExtent],WmMenuSelect->[DefWndProc],PerformContainerValidation->[PerformControlValidation,GetStyle,PerformContainerValidation],SelectNextControl->[Select],WmCaptureChanged->[OnMouseCaptureChanged,DefWndProc],OnVisibleChanged->[GetAnyDisposingInHierarchy,OnParentBecameInvisible,OnParentVisibleChanged,CreateControl],OnParentChanged->[OnTopMostActiveXParentChanged],SetClientSite->[SetClientSite],ShouldPerformContainerValidation->[GetStyle],WmKillFocus->[InvokeLostFocus,DefWndProc],GetUserClassID->[ToString],CreateHandle->[CreateHandle],ThreadMethodEntry->[Close],Invoke->[Invoke],PrintToMetaFile->[Size],ProcessKeyPreview->[ProcessKeyPreview],WmDpiChangedAfterParent->[OnDpiChangedAfterParent,DefWndProc],TranslateAccelerator->[TranslateAccelerator],SendToBack->[GetTopLevel],ActiveXImpl->[AmbientProperty->[ToString],Unadvise->[ThrowHr,RemoveAt],InPlaceDeactivate->[UIDeactivate],Load->[GetStreamName,Load,IsResourceProp,FromBase64WrappedString,ToString],Advise->[Add],ShowProperties->[EnableModeless],SetExtent->[ToString,Size],InPlaceActivate->[FocusInternal,GetWindow,SetObjectRects,CreateControl],GetControlInfo->[ToString],Draw->[PrintToMetaFile,CreateHandle],Close->[InPlaceDeactivate],DoVerb->[InPlaceDeactivate,ToString,SelectNextControl,UIDeactivate],Save->[IsResourceProp,Save,GetStreamName],OnHandleChange->[OnHandleChange],AdviseHelper->[ComConnectionPoint->[Advise->[Invoke]],AdviseConnectionPoint->[AdviseConnectionPoint,Advise],ComConnectionPointContainer->[ComConnectionPoint->[Advise->[],Invoke]]],SetObjectRects->[ToString,Invalidate],FromBase64WrappedString->[ToString],OnAmbientPropertyChange->[CallParentPropertyChanged,GetAmbientProperty],GetExtent->[Size],PropertyBagStream->[Read->[Read,Contains]],SetClientSite->[OnTopMostActiveXParentChanged,Invoke,Dispose,GetAmbientProperty],OnMessage->[OnMessage],QuickActivate->[SetAdvise,GetMiscStatus,SetClientSite],CallParentPropertyChanged->[OnParentFontChanged,OnParentEnabledChanged,OnParentRightToLeftChanged,OnParentBackColorChanged,OnParentBindingContextChanged,OnParentBackgroundImageChanged,OnParentForeColorChanged,OnParentVisibleChanged],TranslateAccelerator->[ToString,TranslateAccelerator,Contains],GetAmbientProperty->[Invoke],OnFocus->[ToString,InPlaceActivate,OnFocus],GetMnemonicList->[GetMnemonicList,Add],ToString],WmGetControlName->[MarshalStringToMessage],WmOwnerDraw->[DefWndProc,ReflectMessageInternal],OnMnemonic->[ToString,ProcessMnemonic],Dispose->[DestroyHandle,Dispose,ResetBindings],PrintToMetaFile_SendPrintMessage->[Size,GetStyle],WndProc->[WmDestroy,WmGetControlType,WmNotify,WmKeyChar,WmEraseBkgnd,WmWindowPosChanging,WmClose,WmMouseUp,WmMouseHover,WmUpdateUIState,WmCommand,WmMove,WmParentNotify,WmExitMenuLoop,WmNotifyFormat,InvokeMarshaledCallbacks,WmHelp,WmMouseMove,DefWndProc,SetState,OnNotifyMessage,GetStyle,WmCtlColorControl,WmShowWindow,WmMenuChar,WmCreate,WmMouseDown,WmPrintClient,WmPaint,WmInitMenuPopup,ReflectMessageInternal,WmMeasureItem,WmMenuSelect,WmCaptureChanged,WmMouseLeave,WmKillFocus,WmDisplayChange,WmGetObject,WmSetCursor,WmMouseWheel,WmQueryNewPalette,WmSetFocus,WmDrawItem,WmContextMenu,WmDpiChangedBeforeParent,WmDpiChangedAfterParent,WmWindowPosChanged,WmMouseEnter,WmGetControlName],GetContentExtent->[GetExtent],GetOwnNeighboringToolsRectangles->[Add],ResumeLayout->[InitLayout,ResumeLayout,PerformLayout,OnLayoutResuming,GetState],Unadvise->[Unadvise],SetContentExtent->[SetExtent],Load->[Load],GetCaptionForTool->[GetCaptionForTool],PaintTransparentBackground->[InvokePaintBackground,Control,PaintTransparentBackground,InvokePaint],WmMouseUp->[OnMouseDoubleClick,DefWndProc,SetState,GetStyle,OnMouseClick,OnClick,OnMouseUp,OnDoubleClick,GetState],DisposeAxControls->[DisposeAxControls],OnLayoutResuming->[OnChildLayoutResuming],GetVisibleCore->[GetVisibleCore],OnHandleCreated->[GetState2,GetStyle,ListenToUserPreferenceChanged,GetTopLevel,GetState],ProcessKeyEventArgs->[OnKeyDown,OnKeyUp,OnKeyPress],WmMove->[UpdateBounds,DefWndProc],DoVerb->[ToString,DoVerb],OnMove->[Invalidate],WmHelp->[OnHelpRequested,DefWndProc],GetClientSite->[GetClientSite],OnParentBindingContextChanged->[OnBindingContextChanged],ScaleCore->[AssertLayoutSuspendCount,Scale,ResumeLayout],PreProcessMessage->[IsInputKey,GetState2,IsInputChar],UpdateStylesCore->[Invalidate,SetState],ControlNativeWindow->[OnMessage->[WndProc],OnHandleChange->[OnHandleChange,SetHandle],OnThreadException->[WndProcException],ToString->[ToString],WndProc->[OnMessage,HookMouseEvent,SetState,UnhookMouseEvent,ResetMouseEventArgs,GetState]],OnTopMostActiveXParentChanged->[OnTopMostActiveXParentChanged],CanProcessMnemonic->[CanProcessMnemonic,TraceCanProcessMnemonic],WmMenuChar->[WmMenuChar],ListenToUserPreferenceChanged->[GetState2],WmMouseDown->[FocusInternal,DefWndProc,SetState,GetState2,OnMouseDown,GetStyle],WmShowWindow->[DefWndProc,SetState,OnVisibleChanged,GetState,GetTopLevel,CreateControl],OnPrint->[DefWndProc,GetStyle],IsDirty->[IsDirty],EnumVerbs->[EnumVerbs],OnResize->[Invalidate,GetState],UnhookMouseEvent->[SetState],OnBackgroundImageChanged->[GetAnyDisposingInHierarchy,Invalidate],WmMeasureItem->[WmMeasureItem],UserPreferenceChanged->[OnSystemColorsChanged],WmMouseLeave->[OnMouseLeave,DefWndProc],ControlVersionInfo->[IndexOf],Select->[Select],FontHandleWrapper->[Dispose->[ToString,Dispose],ToString,Dispose,Add],GetAdvise->[GetAdvise],SetTopLevelInternal->[SetParentHandle,GetState2,SetState,ListenToUserPreferenceChanged,GetTopLevel,CreateControl],OnDocWindowActivate->[OnDocWindowActivate,ToString],OnBackgroundImageLayoutChanged->[GetAnyDisposingInHierarchy,Invalidate],WmSetFocus->[InvokeGotFocus,DefWndProc],SelectNextControlInternal->[SelectNextControl],IsHoveredWithMouse->[Contains],OnParentBackgroundImageChanged->[OnBackgroundImageChanged],Scale->[AssertLayoutSuspendCount,Scale,ResumeLayout],WmWindowPosChanged->[UpdateChildControlIndex,UpdateBounds,DefWndProc],UpdateBounds->[OnSizeChanged,OnClientSizeChanged,AdjustWindowRectEx,OnLocationChanged,UpdateBounds,GetTopLevel],WmDestroy->[DefWndProc,UnhookMouseEvent,SetState,ReleaseUiaProvider,OnHandleDestroyed,OnMouseLeave,GetState],OnFrameWindowActivate->[OnFrameWindowActivate],WmGetControlType->[MarshalStringToMessage],MarshaledInvoke->[WaitForWaitHandle,InvokeMarshaledCallbacks],HookMouseEvent->[GetState],WmNotify->[DefWndProc,ReflectMessageInternal],InPlaceDeactivate->[InPlaceDeactivate],OnQueryContinueDrag->[OnQueryContinueDrag],Advise->[Advise],ResetMouseEventArgs->[HookMouseEvent,GetState],MetafileDCWrapper->[Dispose],CheckParentingCycle->[CheckParentingCycle],LogicalToDeviceUnits->[LogicalToDeviceUnits],SetExtent->[SetExtent],Draw->[Draw],OnHandleDestroyed->[GetAnyDisposingInHierarchy,ListenToUserPreferenceChanged,GetState],WmCommand->[DefWndProc,ReflectMessageInternal],OnParentInvalidated->[Invalidate],OnSizeChanged->[OnResize],InvokeMarshaledCallbacks->[InvokeMarshaledCallback],PaintWithErrorHandling->[PaintException,GetStyle,OnPaint,Invalidate,OnPaintBackground,GetState],ScaleChildControls->[Scale],OnDragEnter->[OnDragEnter],OnChildLayoutResuming->[OnChildLayoutResuming],ControlCollection->[IndexOf->[IndexOf],Clear->[SuspendLayout,RemoveAt,ResumeLayout],Clone->[AddRange],Add->[OnControlAdded,InitLayout,SetParentHandle,ResumeLayout,SuspendLayout,AssertLayoutSuspendCount,AssignParent,Add,CheckParentingCycle,SendToBack,GetTopLevel,CreateControl],AddRange->[AssertLayoutSuspendCount,SuspendLayout,Add,ResumeLayout],GetChildIndex->[GetChildIndex,IndexOf],SetChildIndex->[SetChildIndexInternal],RemoveAt->[Remove],Contains->[Contains],RemoveByKey->[IndexOfKey,RemoveAt,IsValidIndex],ArrayList->[Add],Remove->[Remove,AssignParent,SetParentHandle,OnControlRemoved],SetChildIndexInternal->[GetChildIndex,UpdateZOrder],IndexOfKey,ToString,IsValidIndex],WmCreate->[DefWndProc,GetStyle,UpdateChildZOrder,UpdateBounds,OnHandleCreated],GetWindow->[GetWindow],OnParentVisibleChanged->[GetState],DrawToBitmap->[CreateHandle],OnEnabledChanged->[GetAnyDisposingInHierarchy,Invalidate,GetStyle],WmDisplayChange->[DefWndProc,Invalidate],OnInvalidated->[ActiveXViewChanged],WmGetObject->[DefWndProc],WmMouseWheel->[OnMouseWheel,DefWndProc],Font->[Font],ProcessCmdKey->[ProcessCmdKey],IntPtr->[Dispose,CreateGraphicsInternal,GetStyle],UIDeactivate->[UIDeactivate],WmQueryNewPalette->[DefWndProc,Invalidate],ScaleControl->[Size,ScaleControl,AdjustWindowRectEx],PaintBackground->[RenderColorTransparent,PaintBackground],OnGiveFeedback->[OnGiveFeedback],ContextSensitiveHelp->[ToString,ContextSensitiveHelp,OnHelpRequested],WmContextMenu->[Show,WmContextMenu,DefWndProc,Contains],GetClassID->[ToString],PerformLayout->[GetAnyDisposingInHierarchy,GetState2,OnLayout,PerformLayout,GetState],ShouldSerializeEnabled->[GetState],RecreateHandleCore->[FocusInternal,OnParentHandleRecreated,DestroyHandle,OnParentHandleRecreating,CreateControl,GetState,CreateHandle],ChildGotFocus->[ChildGotFocus,ActiveXOnFocus],SetAdvise->[SetAdvise],ActiveXVerbEnum->[Next->[ToString]],SetBounds->[Size,SetBoundsCore,InitScaling],GetMiscStatus->[ToString,GetStyle],ProcessDialogChar->[ProcessDialogChar],DestroyHandle->[DestroyHandle],OnLayout->[ActiveXViewChanged],WmEraseBkgnd->[PaintWithErrorHandling,DefWndProc,GetStyle],OnParentCursorChanged->[OnCursorChanged],WmClose->[DefWndProc],WmUpdateUIState->[OnChangeUICues,Invalidate,DefWndProc],EndUpdateInternal->[EndUpdateInternal],GetControlInfo->[IsInputKey,GetControlInfo],ActiveXPropPage->[Size,Show,Add],ControlAccessibleObject->[GetSysChild->[IndexOf,GetChildControlsInTabOrder,Next],NotifyClients->[ToString],GetSysChildOrder->[GetSysChildOrder,GetChildWindowsInTabOrder,GetStyle],IsIAccessibleExSupported->[IsIAccessibleExSupported],GetPropertyValue->[GetPropertyValue],ToString->[ToString],GetHelpTopic->[GetHelpTopic],GetStyle],SetBoundsCore->[InitScaling,InitLayout,ResumeLayout,GetState],Save->[Save],SetParentHandle->[GetTopLevel,RecreateHandle],OnGotFocus->[ChildGotFocus,ActiveXOnFocus],WmMouseMove->[OnMouseMove,DefWndProc,GetStyle],SetClientSizeCore->[OnClientSizeChanged],WmDrawItemMenuItem->[WmDrawItem],OnPaddingChanged->[Invalidate,GetStyle],OnLostFocus->[ActiveXOnFocus],IContainerControl->[IsFocusManagingContainerControl],Rectangle->[Size],SetObjectRects->[SetObjectRects],ShowsOwnToolTip->[ShowsOwnToolTip],OnParentBecameInvisible->[OnParentBecameInvisible,GetState],OnBackColorChanged->[GetAnyDisposingInHierarchy,Invalidate,GetState],PrintToMetaFileRecursive->[PrintToMetaFileRecursive],OnAmbientPropertyChange->[OnAmbientPropertyChange],WmInitMenuPopup->[DefWndProc],PerformControlValidation->[NotifyValidated,NotifyValidating],SuspendLayout->[OnLayoutSuspended],AllowsToolTip->[AllowsToolTip],AreCommonNavigationalKeysDown->[IsKeyDown],AllowsChildrenToShowToolTips->[AllowsChildrenToShowToolTips],WmSetCursor->[DefWndProc],QuickActivate->[QuickActivate],SelectNextIfFocused->[SelectNextControlInternal],ProcessDialogKey->[ProcessDialogKey],EndInvoke->[WaitForWaitHandle],Size->[Size,LogicalToDeviceUnits,GetStyle,AdjustWindowRectEx],OnParentForeColorChanged->[OnForeColorChanged],UpdateWindowFontIfNeeded->[GetStyle],WmMouseEnter->[DefWndProc,OnMouseEnter],WmDpiChangedBeforeParent->[OnDpiChangedBeforeParent,Font,RescaleConstantsForDpi,DefWndProc,Size],Invalidate->[CreateGraphicsInternal,Invalidate,GetStyle],AddReflectChild,AssertLayoutSuspendCount,Control,GetAllocationStack,DisposeFontHandle,Size,RemoveReflectChild,GetState]]
InPlaceDeactivate - Deactivate this object if it is in place active.
I think it is a good idea in general to remove the obfuscation between Marshaling and the source of the marshall call.
@@ -51,6 +51,18 @@ static int cms_set_pkey_param(EVP_PKEY_CTX *pctx, # define SMIME_ENCRYPTED_ENCRYPT (14 | SMIME_OP) # define SMIME_SIGN_RECEIPT (15 | SMIME_IP | SMIME_OP) # define SMIME_VERIFY_RECEIPT (16 | SMIME_IP) +# define CMS_CADES 0x100000 + + +struct ESS_cert_id_v2_st { + X509_ALGOR *hash_alg; /* Default: SHA-256 */ + ASN1_OCTET_STRING *hash; + ESS_ISSUER_SERIAL *issuer_serial; +}; +struct ESS_signing_cert_v2_st { + STACK_OF(ESS_CERT_ID_V2) *cert_ids; + STACK_OF(POLICYINFO) *policy_info; +}; static int verify_err = 0;
[No CFG could be retrieved]
This function creates a new receipt request from the given arguments. Enumerates the types of options that can be used to encrypt and decrypt a file.
Declaring again these structures means that a few access methods are missing. You better declare them in a first commit.
@@ -32,6 +32,7 @@ class PyPytables(Package): url = "https://github.com/PyTables/PyTables/archive/v.3.2.2.tar.gz" version('3.2.2', '7cbb0972e4d6580f629996a5bed92441') + version('3.3.0', '056c161ae0fd2d6e585b766adacf3b0b', url='https://github.com/PyTables/PyTables/archive/v3.3.0.tar.gz') extends('python') depends_on('hdf5')
[PyPytables->[install->[setup_py],depends_on,version,extends]]
PyPytables is a package for managing hierarchical datasets and designed to efficiently and.
Can the URL go on the next line? Also... 3.3.0 need to come before 3.2.2
@@ -3601,12 +3601,11 @@ def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool: """Does t have wider arguments than s?""" # TODO should an overload with additional items be allowed to be more # general than one with fewer items (or just one item)? - # TODO check argument kinds and otherwise make more general if isinstance(t, CallableType): if isinstance(s, CallableType): - t, s = unify_generic_callables(t, s) - return all(is_proper_subtype(args, argt) - for argt, args in zip(t.arg_types, s.arg_types)) + return is_callable_compatible(t, s, + is_compat=is_proper_subtype, + ignore_return=True) elif isinstance(t, FunctionLike): if isinstance(s, FunctionLike): if len(t.items()) == len(s.items()):
[TypeChecker->[analyze_async_iterable_item_type->[accept],visit_try_without_finally->[check_assignment,accept],visit_class_def->[accept],iterable_item_type->[lookup_typeinfo],visit_for_stmt->[accept_loop],visit_operator_assignment_stmt->[check_assignment,accept],check_return_stmt->[get_generator_return_type,accept,get_coroutine_return_type],visit_del_stmt->[accept],check_multi_assignment->[check_assignment,accept],check_async_with_item->[check_assignment,accept],check_multi_assignment_from_union->[check_multi_assignment],visit_decorator->[check_method_override,accept,check_func_item],_visit_overloaded_func_def->[accept],should_suppress_optional_error->[contains_none],visit_assert_stmt->[accept],accept->[accept],set_inference_error_fallback_type->[set_inferred_type],partition_by_callable->[make_fake_callable,partition_by_callable],check_multi_assignment_from_tuple->[check_multi_assignment_from_union,check_assignment,accept,check_rvalue_count_in_assignment],check_override->[erase_override],visit_try_stmt->[accept],check_member_assignment->[check_simple_assignment],conditional_callable_type_map->[partition_by_callable],warn->[warn],check_except_handler_test->[accept],visit_with_stmt->[accept],try_infer_partial_type_from_indexed_assignment->[accept],check_for_missing_annotations->[get_coroutine_return_type,is_unannotated_any],function_type->[named_type,function_type],visit_print_stmt->[accept],make_fake_callable->[intersect_instance_callable],visit_assignment_stmt->[accept],fail->[fail],note->[note],check_with_item->[check_assignment,accept],visit_block->[accept],analyze_index_variables->[check_assignment],type_type->[named_type],check_func_def->[accept,get_generator_receive_type,get_coroutine_return_type,get_generator_return_type,get_generator_yield_type,is_async_generator_return_type,is_generator_return_type],type_check_raise->[accept],contains_none->[contains_none],check_overlapping_op_methods->[check_overlapping_op_methods],check_lvalue->[check_lvalue,accept],get_generator_receive_type->[is_generator_return_type,is_async_generator_return_type],visit_while_stmt->[accept_loop],analyze_iterable_item_type->[accept],check_compatibility_super->[accept],accept_loop->[accept],check_assignment_to_multiple_lvalues->[check_assignment],visit_expression_stmt->[accept],find_isinstance_check->[conditional_callable_type_map,find_isinstance_check],lookup_qualified->[lookup],check_multi_assignment_from_iterable->[type_is_iterable,check_assignment],get_generator_return_type->[is_generator_return_type],check_indexed_assignment->[accept],check_simple_assignment->[accept],get_generator_yield_type->[is_generator_return_type,is_async_generator_return_type],flatten_lvalues->[flatten_lvalues],visit_if_stmt->[accept],lvalue_type_for_inference->[append_types_for_inference],check_assignment->[accept,check_setattr_method,check_getattr_method],str_type->[named_type],get_types_from_except_handler->[get_types_from_except_handler]],flatten->[flatten],get_isinstance_type->[flatten_types],is_static->[is_static],CheckerScope->[active_self_type->[active_class],enclosing_class->[top_function]],flatten_types->[flatten_types],expand_func->[accept],is_valid_inferred_type_component->[is_valid_inferred_type_component]]
Checks if t has more general arguments than s.
Why can we skip unification here?
@@ -91,7 +91,7 @@ namespace Dynamo.PackageManager if (packagesDirectories == null) throw new ArgumentNullException("packagesDirectories"); - this.packagesDirectories.AddRange(packagesDirectories); + //this.packagesDirectories.AddRange(packagesDirectories); this.packagesDirectories.Add(StandardLibraryDirectory); var error = PathHelper.CreateFolderIfNotExist(DefaultPackagesDirectory);
[PackageLoader->[TryLoadPackageIntoLibrary->[OnRequestLoadNodeLibrary,OnRequestLoadCustomNodeDirectory,Add],DoCachedPackageUninstalls->[Add],Load->[TryLoadPackageIntoLibrary,OnPackagesLoaded],Add->[OnPackageAdded,Add],IsUnderPackageControl->[IsUnderPackageControl],LoadCustomNodesAndPackages->[LoadAll,Add],Remove->[OnPackageRemoved,Remove],LoadPackages->[TryLoadPackageIntoLibrary,OnPackagesLoaded],Package->[Add]]]
PackageLoader provides a way to load a single package from the node library. Remove a packa file from the system.
Don't load all packages, just the ones in the standard lib.
@@ -20,14 +20,7 @@ public final class DefaultProfileUtil { * @param app the spring application */ public static void addDefaultProfile(SpringApplication app) { - Map<String, Object> defProperties = new HashMap<>(); - /* - * The default profile to use when no other profiles are defined - * This cannot be set in the <code>application.yml</code> file. - * See https://github.com/spring-projects/spring-boot/issues/1219 - */ - defProperties.put(SPRING_PROFILE_DEFAULT, Constants.SPRING_PROFILE_DEVELOPMENT); - app.setDefaultProperties(defProperties); + app.setAdditionalProfiles(Constants.SPRING_PROFILE_DEVELOPMENT, Constants.SPRING_PROFILE_SWAGGER); } /**
[No CFG could be retrieved]
This class is used to determine the active profiles.
@jdubois If we run by default this would cause a double loading of `dev` profile as it would have been already loaded from the `application.yml` file once. If we run in prod, it will also load `dev` which is very bad. If we decide to do this conditionally at this point we would not have any profile info in the first place to decide if we need to set this or not that is why I went with `app.setDefaultProperties(defProperties);` as spring would disregard it when there is a profile set using any of the spring suggested way
@@ -237,8 +237,7 @@ namespace Kratos /***********************************************************************************/ void MPMExplicitUtilities::CalculateMUSLGridVelocity( - Element& rElement, - const Vector& rN) + Element& rElement) { KRATOS_TRY
[No CFG could be retrieved]
Updates the MUSLGridVelocity and MUSLPosition based on the current acceleration necessary for the R - KR.
creating on the fly? ouch :sweat_smile:
@@ -54,10 +54,14 @@ class XmlFileLoader11 extends XmlFileLoader10 /* @var \DOMNode $node */ $template = $node->nodeValue; $type = $node->attributes->getNamedItem('type')->nodeValue; + $parentTemplateNode = $node->attributes->getNamedItem('parent-template'); + if ($parentTemplateNode) { + $parentTemplate = $parentTemplateNode->nodeValue; + } - $webspace->addDefaultTemplate($type, $template); + $webspace->addDefaultTemplate($type, $template, isset($parentTemplate) ? $parentTemplate : null); if ('homepage' === $type) { - $webspace->addDefaultTemplate('home', $template); + $webspace->addDefaultTemplate('home', $template, isset($parentTemplate) ? $parentTemplate : null); } }
[XmlFileLoader11->[generateDefaultTemplates->[addDefaultTemplate,query,getKey,getNamedItem,getDefaultTemplates],generateTemplates->[addTemplate,query,getNamedItem],parseXml->[item,generateExcludedTemplates,setResourceLocatorStrategy],generateExcludedTemplates->[query,addExcludedTemplate]]]
Generates the default templates for the webspace.
Why did you handle that here in a different and more complicated way as opposed to `XmlFileLoader10`?
@@ -2915,7 +2915,10 @@ out_lock: &out->pqo_space); out_svc: - out->pqo_op.po_map_version = ds_pool_get_version(svc->ps_pool); + if (map_version == 0) + out->pqo_op.po_map_version = ds_pool_get_version(svc->ps_pool); + else + out->pqo_op.po_map_version = map_version; ds_rsvc_set_hint(&svc->ps_rsvc, &out->pqo_op.po_hint); pool_svc_put_leader(svc); out:
[No CFG could be retrieved]
read the n - ary entry from the DP and write it to the DP. Get the target state of the leader.
This looks like a regression introduced by 758b4d7. My bad. Thanks.
@@ -142,14 +142,15 @@ func (c *tableNamespaceClassifier) GetRegionNamespace(regionInfo *core.RegionInf c.RLock() defer c.RUnlock() + isMeta := Key(regionInfo.StartKey).IsMeta() tableID := Key(regionInfo.StartKey).TableID() - if tableID == 0 { + if tableID == 0 && !isMeta { return namespace.DefaultNamespace } for name, ns := range c.nsInfo.namespaces { _, ok := ns.TableIDs[tableID] - if ok { + if ok || (isMeta && ns.Meta) { return name } }
[saveNamespace->[GetID,namespacePath],AddNamespaceStoreID->[AddStoreID],AddNamespaceTableID->[AddTableID],loadNamespaces->[GetID,getNamespaceCount,setNamespace,namespacePath]]
GetRegionNamespace returns the region namespace for the given region.
any test to cover meta here?
@@ -72,7 +72,7 @@ else: # backslash replacement for all versions. def backslashreplace_decode_fn(err): raw_bytes = (err.object[i] for i in range(err.start, err.end)) - if sys.version_info[0] == 2: + if PY2: # Python 2 gave us characters - convert to numeric bytes raw_bytes = (ord(b) for b in raw_bytes) return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
[expanduser->[expanduser],samefile->[samefile],get_terminal_size->[ioctl_GWINSZ,get_terminal_size],console_to_str->[str_to_display]]
This function is used to decode a sequence of bytes into a sequence of bytes.
This check can be removed now since this function is only used in Python 2.
@@ -1,3 +1,5 @@ +var STORAGE_TOGGLE_KEY = "scinote-sidebar-toggled"; + (function(global) { 'use strict'; global.SideBarToggle = (function() {
[No CFG could be retrieved]
Initialize the sidebar toggle.
Do you need to declare this variable globally?
@@ -524,7 +524,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, # Delay creating spec.prefix until verification is complete # and any relocation has been done. else: - install_tree(workdir, spec.prefix, symlinks=True) + shutil.copytree(workdir, spec.prefix, symlinks=True) finally: shutil.rmtree(tmpdir)
[make_package_relative->[read_buildinfo_file],make_package_placeholder->[read_buildinfo_file],tarball_path_name->[tarball_name,tarball_directory_name],build_tarball->[tarball_name,checksum_tarball,NoOverwriteException,tarball_directory_name,tarball_path_name,sign_tarball,write_buildinfo_file,generate_index],read_buildinfo_file->[buildinfo_file_name],download_tarball->[tarball_path_name],sign_tarball->[NoGpgException,NoOverwriteException,PickKeyException,has_gnupg2,NoKeyException],write_buildinfo_file->[buildinfo_file_name],extract_tarball->[tarball_name,checksum_tarball,NoOverwriteException,NewLayoutException,NoChecksumException,NoVerifyException,relocate_package],relocate_package->[read_buildinfo_file]]
extracts a binary tarball for a given package into install area. This function is called when a package is installed. It will extract the tarball in a temporary.
For some reason, using the new `install_tree` wrapper here caused `test_buildcache` to crash. I have no idea why.
@@ -103,4 +103,13 @@ public class ReconServer extends GenericCli { .getInstance(OzoneManagerServiceProvider.class); ozoneManagerServiceProvider.stop(); } + + OzoneManagerServiceProvider getOzoneManagerServiceProvider() { + return injector.getInstance(OzoneManagerServiceProvider.class); + } + + ReconStorageContainerManager getReconStorageContainerManager() { + return injector.getInstance(ReconStorageContainerManager.class); + } + }
[ReconServer->[call->[configureServlets->[packages],error,createOzoneConfiguration,start,createReconSchema,ReconTaskBindingModule,ReconSchemaGenerationModule,ReconRestServletModule,createInjector,setInjector,getInstance,stop,addShutdownHook,ReconControllerModule,info,Thread,setConfiguration],main->[run],stop->[info,getInstance,stop],getLogger]]
Stop the Recon server.
If using DI properly this should never be needed, unless used in non-DI code path
@@ -12,9 +12,9 @@ class Comment < ApplicationRecord COMMENTABLE_TYPES = %w[Article PodcastEpisode].freeze TITLE_DELETED = "[deleted]".freeze TITLE_HIDDEN = "[hidden by post author]".freeze - MAX_USER_MENTIONS = 7 # Explicitly set to 7 to accomodate DEV Top Seven Posts - # The date that we began limiting the nubmer of user mentions in a comment. - MAX_USER_MENTION_LIVE_AT = Time.utc(2021, 3, 11).freeze + MAX_USER_MENTIONS = 7 # Explicitly set to 7 to accommodate DEV Top 7 Posts + # The date that we began limiting the number of user mentions in a comment. + MAX_USER_MENTION_LIVE_AT = Time.utc(2021, 3, 12).freeze belongs_to :commentable, polymorphic: true, optional: true belongs_to :user
[Comment->[update_descendant_notifications->[update_notifications],wrap_timestamps_if_video_present!->[path],update_notifications->[update_notifications],expire_root_fragment->[root_exists?]]]
Comment class for a single application record. The published article.
I chose to not make this a SiteConfig configurable date because I don't expect we'll change it again -- and we definitely don't want this to be configurable for Forem admins, so that also factored into my decision. The other thing to consider is that this would be a non-UI SiteConfig, which feels a bit hacky/like an antipattern, so that was another reason why I chose to just bump it here
@@ -83,6 +83,11 @@ module Users render :edit end + def update_user + resource.update(confirmed_at: Time.current) unless resource.confirmed? + resource.update(password: user_params[:password]) + end + def mark_profile_inactive resource.active_profile&.deactivate(:password_reset) end
[ResetPasswordsController->[handle_successful_password_reset->[update]]]
if the user responds with a token that has expired redirect to the new user password path otherwise.
not sure how you feel about removing Devise, I have been trying to do it when I can since I think at the level of customization we have it is more confusing than helpful. If you are interested in moving away from Devise, removing references to `resource` would help :)
@@ -83,6 +83,9 @@ class RemoteBASE(object): DEFAULT_NO_TRAVERSE = True DEFAULT_VERIFY = False + CACHE_MODE = None + SHARED_MODE_MAP = {None: (None, None), "group": (None, None)} + state = StateNoop() def __init__(self, repo, config):
[RemoteBASE->[_check_requires->[RemoteMissingDepsError],all->[path_to_checksum,list_cache_paths],_download_dir->[walk_files],remove->[RemoteActionNotImplemented],changed_cache->[is_dir_checksum,changed_cache_file,_changed_dir_cache],_cache_is_copy->[link],load_dir_cache->[DirCacheError],symlink->[RemoteActionNotImplemented],upload->[_handle_transfer_exception,RemoteActionNotImplemented],copy->[RemoteActionNotImplemented],move->[remove],download->[isdir,RemoteActionNotImplemented],gc->[is_dir_checksum,all,checksum_to_path_info,remove],hardlink->[RemoteActionNotImplemented],_checkout_dir->[safe_remove,changed,exists,get_dir_cache,save,makedirs,checksum_to_path_info,link],open->[RemoteActionNotImplemented],already_cached->[changed_cache,get_checksum],get_files_number->[is_dir_checksum,get_dir_cache],_save_dir->[get_dir_cache,_save_file],_remove_redundant_files->[safe_remove,walk_files],extract_used_local_checksums->[_get_unpacked_dir_names],get_checksum->[is_dir_checksum,get_dir_checksum,get_file_checksum],_download_file->[_handle_transfer_exception],safe_remove->[exists,already_cached,remove],_save->[isdir,_save_dir,_save_file],_changed_dir_cache->[get_dir_cache,changed_cache_file],_checkout->[is_dir_checksum,_checkout_file,_checkout_dir],reflink->[RemoteActionNotImplemented],save->[RemoteActionNotImplemented],_checkout_file->[safe_remove,exists,save,checksum_to_path_info,link],checkout->[changed,changed_cache,safe_remove],cache_exists->[exists_with_progress->[exists],all],get_dir_checksum->[_collect_dir],save_info->[get_checksum],_collect_dir->[_calculate_checksums],changed->[get_checksum],_get_dir_info_checksum->[get_file_checksum],_save_file->[link],_try_links->[_verify_link],changed_cache_file->[exists,checksum_to_path_info,remove,get_checksum]]]
Initialize a cache object from a repository and configuration.
Generalising it because SSH could use the same optimization a bit later.
@@ -24,6 +24,7 @@ import io.prestosql.connector.CatalogName; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.plugin.base.security.AllowAllSystemAccessControl; import io.prestosql.plugin.base.security.FileBasedSystemAccessControl; +import io.prestosql.plugin.base.security.ForwardingSystemAccessControl; import io.prestosql.plugin.base.security.ReadOnlySystemAccessControl; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.CatalogSchemaName;
[AccessControlManager->[checkCanInsertIntoTable->[checkCanInsertIntoTable],checkCanRenameSchema->[checkCanRenameSchema],filterSchemas->[filterSchemas],checkCanShowColumns->[checkCanShowColumns],catalogAuthorizationCheck->[getConnectorAccessControl],checkCanDropSchema->[checkCanDropSchema],checkCanShowTables->[checkCanShowTables],checkCanCreateSchema->[checkCanCreateSchema],checkCanSetUser->[checkCanSetUser],checkCanDropColumn->[checkCanDropColumn],checkCanSelectFromColumns->[checkCanSelectFromColumns],checkCanShowRoles->[checkCanShowRoles],checkCanDeleteFromTable->[checkCanDeleteFromTable],checkCanRenameColumn->[checkCanRenameColumn],checkCanKillQueryOwnedBy->[checkCanKillQueryOwnedBy],checkCanRevokeTablePrivilege->[checkCanRevokeTablePrivilege],checkCanRevokeRoles->[checkCanRevokeRoles],checkCanSetRole->[checkCanSetRole],checkCanExecuteQuery->[checkCanExecuteQuery],CatalogAccessControlEntry->[toConnectorSecurityContext->[toConnectorSecurityContext,getCatalogName]],checkCanShowRoleGrants->[checkCanShowRoleGrants],checkCanRenameTable->[checkCanRenameTable],filterCatalogs->[filterCatalogs],checkCanShowSchemas->[checkCanShowSchemas],checkCanSetTableComment->[checkCanSetTableComment],filterColumns->[filterColumns],checkCanDropTable->[checkCanDropTable],checkCanShowCurrentRoles->[checkCanShowCurrentRoles],filterTables->[filterTables],checkCanSetSystemSessionProperty->[checkCanSetSystemSessionProperty],checkCanAccessCatalog->[checkCanAccessCatalog],checkCanExecuteProcedure->[checkCanExecuteProcedure],checkCanRenameView->[checkCanRenameView],checkCanGrantRoles->[checkCanGrantRoles],checkCanDropView->[checkCanDropView],checkCanDropRole->[checkCanDropRole],checkCanCreateViewWithSelectFromColumns->[checkCanCreateViewWithSelectFromColumns],checkCanViewQueryOwnedBy->[checkCanViewQueryOwnedBy],checkCanCreateRole->[checkCanCreateRole],checkCanCreateTable->[checkCanCreateTable],checkCanShowCreateTable->[checkCanShowCreateTable],checkCanSetCatalogSessionProperty->[checkCanSetCatalogSessionProperty],checkCanGrantTablePrivilege->[checkCanGrantTablePrivilege],checkCanSetSchemaAuthorization->[checkCanSetSchemaAuthorization],checkCanCreateView->[checkCanCreateView],checkCanImpersonateUser->[checkCanImpersonateUser]]]
Reads a single integer value from a pre - STOSQL database. Imports all the components of the NIO - related object.
I'd rephrase the commit as "Improve robustness of InitializeSystemAccessControl", and in the commit message indicate that this makes it easier to evolve the SystemAccessControl interface by adding new methods.
@@ -85,7 +85,7 @@ public final class DefaultExtensionManager implements ExtensionManager, MuleCont private ExtensionActivator extensionActivator; @Override - public void initialise() throws InitialisationException { + public void initialise() { extensionRegistry = new ExtensionRegistry(new DefaultRegistry(muleContext)); extensionErrorsRegistrant = new ExtensionErrorsRegistrant(muleContext.getErrorTypeRepository(),
[DefaultExtensionManager->[registerExtension->[registerExtension],createImplicitConfiguration->[registerConfigurationProvider],getConfigurationProvider->[getConfigurationProvider],getExtensions->[getExtensions],getExtension->[getExtension],stop->[stop],registerConfigurationProvider->[registerConfigurationProvider],start->[start],getConfiguration->[getConfiguration]]]
Initialise the extension registry.
You are throwing exceptions, make them throw `InitialisationException` if not, you are not compliant with the API.
@@ -311,9 +311,9 @@ namespace ILCompiler foreach (string inputFile in _inputFiles) { string relativeMsilPath = Path.GetRelativePath(_compositeRootPath, inputFile); - if (relativeMsilPath.StartsWith(s_folderUpPrefix)) + if (relativeMsilPath == inputFile || relativeMsilPath.StartsWith(s_folderUpPrefix)) { - // Input file not in the composite root, emit to root output folder + // Input file not under the composite root, emit to root output folder relativeMsilPath = Path.GetFileName(inputFile); } string standaloneMsilOutputFile = Path.Combine(outputDirectory, relativeMsilPath);
[ReadyToRunCodegenCompilation->[IsLayoutFixedInCurrentVersionBubble->[IsLayoutFixedInCurrentVersionBubble],IsInheritanceChainLayoutFixedInCurrentVersionBubble->[IsLayoutFixedInCurrentVersionBubble]],Compilation->[IsEffectivelySealed->[IsEffectivelySealed],MethodIL->[MethodIL],CanInline->[CanInline]]]
Compiles the components in the composite and writes the resulting MSIL file to the specified outputFile.
does this check need to be case agnostic?
@@ -816,9 +816,11 @@ out: reset_req_limit(dx, spi, SCHED_REQ_UPDATE, u_max); reset_req_limit(dx, spi, SCHED_REQ_FETCH, f_max); reset_req_limit(dx, spi, SCHED_REQ_GC, gc_max); + reset_req_limit(dx, spi, SCHED_REQ_SCRUB, scrub_max); reset_req_limit(dx, spi, SCHED_REQ_MIGRATE, mig_max); process_req_list(dx, pool2req_list(spi, SCHED_REQ_GC)); + process_req_list(dx, pool2req_list(spi, SCHED_REQ_SCRUB)); process_req_list(dx, pool2req_list(spi, SCHED_REQ_MIGRATE)); return 0;
[No CFG could be retrieved]
The function that handles the throttling of the in the case of a light or se Enqueue a request on the FIFO of a stream.
So the scrub ULT isn't throttled yet in current patch? I don't see you adjust the 'scrub_max' according to pending I/O ULTs. And so far there is only single scrub ULT per pool (scrub_max is always 1 before adjusting), so actually the throttling algorithm is to decide delay this single ULT to next cycle or not?
@@ -351,6 +351,16 @@ class OpTest(unittest.TestCase): # fetch_list = map(block.var, fetch_list) if not isinstance(fetch_list[0], fluid.framework.Variable): fetch_list = list(map(block.var, fetch_list)) + + if enable_inplace is not None: + build_strategy = fluid.BuildStrategy() + build_strategy.enable_inplace = enable_inplace + build_strategy.memory_optimize = False + + compiled_prog = fluid.CompiledProgram(program).with_data_parallel( + build_strategy=build_strategy, places=place) + program = compiled_prog + executor = Executor(place) outs = executor.run(program, feed=feed_map,
[OpTest->[_calc_dygraph_output->[_create_var_from_numpy],_get_inputs->[_get_io_vars],_calc_output->[_get_outputs,_get_inputs,feed_var,_append_ops],check_output->[check_output_with_place,_get_places],_get_outputs->[_get_io_vars],infer_dtype_from_inputs_outputs->[infer_dtype->[try_call_once],infer_dtype],check_grad_with_place->[_assert_is_close,get_numeric_gradient],check_output_customized->[calc_output,_get_places],_get_gradient->[_get_inputs,feed_var,_append_ops],_append_ops->[infer_dtype_from_inputs_outputs],check_grad->[_get_places],_assert_is_close->[err_msg],check_output_with_place->[_calc_dygraph_output,_calc_output,find_actual]],get_numeric_gradient->[__set_elem__,product,__get_elem__,get_output]]
Calculate the output of the .
Why set `build_strategy.memory_optimize = False`?
@@ -85,7 +85,7 @@ func NewCommandDiagnostics(name string, fullName string, out io.Writer) *cobra.C cmd := &cobra.Command{ Use: name, Short: "This utility helps you troubleshoot and diagnose.", - Long: fmt.Sprintf(longDescription, fullName), + Long: fmt.Sprintf(longDescription, fullName, strings.Join(availableDiagnostics().List(), ",")), Run: func(c *cobra.Command, args []string) { kcmdutil.CheckErr(o.Complete())
[Run->[Warnings,Error,Sprintf,CanRun,Name,Description,LogEntry,Check,Errors,Notice,Logs,Stack],RunDiagnostics->[IsNotExist,Error,Stat,NewAggregate,Sprintf,detectClientConfig,buildRawConfig,buildClusterDiagnostics,List,buildHostDiagnostics,EvalTemplate,Insert,NewString,Errorf,buildClientDiagnostics,Notice,Run,Stack],Complete->[NewLogger],StringVar,RecommendedLoggerOptionFlags,Exit,GLog,Has,New,AddFlag,Complete,NewString,CheckErr,BindDiagnosticFlag,RunDiagnostics,Lookup,Summary,NewFlagSet,Sprintf,SetOutput,BindLoggerOptionFlags,Insert,NewRecommendedDiagnosticFlag,Flags,BoolVar]
NewCommandDiagnostics creates a new command that runs the diagnostic of a specific node. Flags for config file.
join with ", "
@@ -516,7 +516,7 @@ aggregate_pool_space(struct daos_pool_space *agg_ps, D_ASSERT(agg_ps && ps); if (ps->ps_ntargets == 0) { - D_ERROR("Skip emtpy space info\n"); + D_DEBUG(DB_TRACE, "Skip emtpy space info\n"); return; }
[No CFG could be retrieved]
Services for the in the pool. - - - - - - - - - - - - - - - - - -.
Isn't this an unexpected error? Why it's changed from error to debug?
@@ -141,7 +141,15 @@ public class CountText extends AbstractProcessor { .allowableValues(getStandardCharsetNames()) .defaultValue(StandardCharsets.UTF_8.displayName()) .build(); - + public static final PropertyDescriptor ADJUST_IMMEDIATELY = new PropertyDescriptor.Builder() + .name("ajust-immediately") + .displayName("Call Immediate Adjustment") + .description("If true, the counter will be updated immediately, without regard to whether the ProcessSession is commit or rolled back; otherwise, the counter will be incremented only if and when the ProcessSession is committed.") + .required(true) + .allowableValues("true", "false") + .defaultValue("false") + .addValidator(StandardValidators.BOOLEAN_VALIDATOR) + .build(); private static Set<String> getStandardCharsetNames() { return STANDARD_CHARSETS.stream().map(c -> c.displayName()).collect(Collectors.toSet()); }
[CountText->[onTrigger->[getMessage,DecimalFormat,InputStreamReader,nanoTime,isDebugEnabled,info,put,generateMetricsMessage,putAllAttributes,countWordsInLine,set,valueOf,transfer,debug,error,AtomicBoolean,adjustCounter,readLine,read,format,isInfoEnabled,length,get,BufferedReader],generateMetricsMessage->[append,toString,join,StringBuilder,add],onSchedule->[isSet,getValue,asBoolean],countWordsInLine->[split,debug,join,isDebugEnabled,asList,length],getStandardCharsetNames->[toSet,collect],unmodifiableList,build,unmodifiableSet,compile,asList,name]]
Get the set of standard charset names.
This line causes the following check-style issue: > [WARNING] src/main/java/org/apache/nifi/processors/standard/CountText.java:[148] (sizes) LineLength: Line is longer than 200 characters (found 241).
@@ -1291,6 +1291,12 @@ void nmk16_state::bjtwin_map(address_map &map) map(0x0f0000, 0x0fffff).ram().share("mainram"); } +void nmk16_state::sabotenb_map(address_map &map) +{ + bjtwin_map(map); + map(0x094003, 0x094003).w(FUNC(nmk16_state::sabotenb_scroll_w)); // sabotenb specific? +} + static INPUT_PORTS_START( vandyke ) PORT_START("IN0") PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_COIN1 )
[No CFG could be retrieved]
region 0x07ff 0x08ff 0x08ff 0x08ff 0 Get the values in the NIC.
I highly doubt Saboten Bombers uses scrolly any different than Bombjack Twin, more likely they have the same meaning and the two memory maps can be merged (and the note for bjtwin at $94002/3 being wrong).
@@ -128,11 +128,11 @@ func syncTables() error { return x.StoreEngine("InnoDB").Sync2(tables...) } -// NewInstallTestEngine creates a new xorm.Engine for testing during install +// InitInstallTestEngineWithMigration creates a new xorm.Engine for testing during install // // This function will cause the basic database schema to be created -func NewInstallTestEngine(ctx context.Context, migrateFunc func(*xorm.Engine) error) (err error) { - x, err = GetNewEngine() +func InitInstallTestEngineWithMigration(ctx context.Context, migrateFunc func(*xorm.Engine) error) (err error) { + x, err = NewEngine() if err != nil { return fmt.Errorf("failed to connect to database: %w", err) }
[TableName,NewEngine,Exec,SetMapper,Count,ValueOf,Exist,Find,DBType,ShowSQL,DBConnStr,SetParams,SetDefaultContext,SetMaxOpenConns,Limit,Ping,New,Dialect,Select,OrderBy,Errorf,Type,Sync2,ColumnsSeq,TrimSpace,Indirect,SetConnMaxLifetime,SetSchema,Name,ToLower,Where,TableInfo,Get,SetLogger,DumpTablesToFile,Sprintf,Table,StoreEngine,SetMaxIdleConns,DumpTables]
NewInstallTestEngine creates a new xorm. Engine for the given database. It will SetEngine sets the engine for the new database.
I think it is `InitInstallEngineWithMigration`, no `Test` here
@@ -446,7 +446,7 @@ define([ var vertexBuffer = collection._vertexBuffer; var createVertexBuffer = collection._createVertexBuffer; - var instancesLength = collection.length; + var featuresLength = collection.length; var dynamic = collection._dynamic; var viewMatrix = context.uniformState.view; var center = dynamic ? Cartesian3.ZERO : collection._boundingVolume.center;
[No CFG could be retrieved]
Returns a function that can be used to create a unique id for a vertex. region Instance Buffer.
Since `ModelInstanceCollection` isn't strictly a 3D Tiles class, I think `instances` makes more sense here.
@@ -21,7 +21,9 @@ define("NOCSRFCHECK",1); // We accept to go on this page from external web site. $entity=(! empty($_GET['entity']) ? (int) $_GET['entity'] : (! empty($_POST['entity']) ? (int) $_POST['entity'] : 1)); if (is_numeric($entity)) define("DOLENTITY", $entity); -require '../../main.inc.php'; +$res=0; +if (! $res && file_exists("../../main.inc.php")) $res=@include("../../main.inc.php"); // to work if your module directory is into a subdir of root htdocs directory +if (! $res) die("Include of main fails"); if (empty($conf->stripe->enabled)) accessforbidden('',0,0,1); require_once DOL_DOCUMENT_ROOT.'/core/lib/admin.lib.php';
[fetch,getStripeCustomerAccount,addPaymentToBank,create,add_url_line,setDefaultLang,fetch_object,getSommePaiement,addline,createFromOrder,createPaymentStripe,classifyBilled,begin,switchEntity,load,getSumCreditNotesUsed,escape,getrights,generateDocument,query,getStripeAccount,validate,trans,num_rows,set_paid,commit,getSumDepositsUsed]
Displays a single in the current page. This method is used to find the endpoint s secret in your webhook settings.
Don't understand this. ipn.php file is always at same place and the main.inc.php always at same place too compared to ipn.php So this should be useless.
@@ -153,7 +153,8 @@ public class MongoMetadata @Override public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) { - mongoSession.createTable(tableMetadata.getTable(), buildColumnHandles(tableMetadata)); + MongoTableHandle table = new MongoTableHandle(tableMetadata.getTable()); + mongoSession.createTable(table, buildColumnHandles(tableMetadata)); } @Override
[MongoMetadata->[dropTable->[dropTable],listTableColumns->[listTables],beginCreateTable->[createTable,dropTable],createTable->[createTable],getTableProperties->[getColumnHandles],getTableHandle->[getTableHandle],getTableMetadata->[getTableMetadata,getTableHandle],listTables->[listTables,listSchemaNames]]]
Drops a table from the database.
Revert the change for removing the empty line.
@@ -1741,9 +1741,11 @@ app.use('(/dist)?/rtv/*/v0/analytics-vendors/:vendor.json', (req, res) => { }); // Used by test/unit/test-3p.js to test script loading. -app.use('/test/script', function (req, res) { +app.use('/test/script', function (req, res, next) { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/javascript'); res.send("typeof require === 'function' && require('foo.js');"); - res.status(200).send(); + next(); }); module.exports = app;
[No CFG could be retrieved]
Used by unit tests to load test script.
I don't believe this is correct. When you call `next`, you're asking for laster handlers to be called. But you've already called `send()`, which means the later handlers cannot set header nor push more data. So there doesn't seem to be a point in calling into later handlers.
@@ -1,9 +1,12 @@ -"""Functions to plot EEG sensor montages +import numpy as np + + +"""Functions to plot EEG sensor montages or digitizer montages """ def plot_montage(montage, scale_factor=1.5, show_names=False, show=True): - """Plot EEG sensor montage + """Plot a montage Parameters ----------
[plot_montage->[text,show,add_subplot,set_xlabel,set_ylabel,set_zlabel,scatter,zip,figure]]
Plot EEG sensor montage.
? this should go before the imports
@@ -1144,14 +1144,7 @@ public abstract class MethodHandle /*[ENDIF]*/ VMLangAccess access = VM.getVMLangAccess(); Object internalRamClass = access.createInternalRamClass(j9class); - Class<?> classObject = null; - if (JITHELPERS.is32Bit()) { - classObject = JITHELPERS.getClassFromJ9Class32((int)j9class); - } else { - classObject = JITHELPERS.getClassFromJ9Class64(j9class); - } - - Objects.requireNonNull(classObject); + Class<?> classObject = getClassFromJ9Class(j9class); type = MethodType.vmResolveFromMethodDescriptorString(methodDescriptor, access.getClassloader(classObject), null); final MethodHandles.Lookup lookup = new MethodHandles.Lookup(classObject, false);
[No CFG could be retrieved]
Resolves a method handle for a dynamic entry. Reads the bootstrap_method_ref from the binary - data.
As part of this refactoring, can you move the local definition here so it's `MethodType type = MethodType.vmResolveFromMethodDescriptorString(....)`
@@ -166,7 +166,10 @@ module ApplicationHelper end user_name = user.full_name - user_name << ' ' + I18n.t('atwho.res.removed') if !user_still_in_team + + unless skip_user_status || user_still_in_team + user_name << " #{I18n.t('atwho.res.removed')}" + end raw("<img src='#{user_avatar_absolute_url(user, :icon_small)}'" \ "alt='avatar' class='atwho-user-img-popover'" \
[project_page?->[in?],display_tooltip->[truncate,length,sanitize_input,strip],sample_groups_page_my_module?->[nil?],smart_annotation_filter_resources->[html],sample_groups_page_project?->[nil?],popover_for_user_name->[email,t,strftime,include?,missing_avatar,user_avatar_absolute_url,first,name,full_name,capitalize,raw],smart_annotation_notification->[fetch,base62_decode,include?,generate_annotation_notification,each,gsub,match,find_by_id],missing_avatar->[avatar],generate_annotation_notification->[assignments_notification,create,sanitize_input],user_avatar_absolute_url->[empty?,avatar,include?,missing_avatar,ssl?,url_for,present?,respond_to?],sample_types_page_my_module?->[nil?],sample_types_page_project?->[nil?],sample_groups_page_experiment?->[nil?],all_projects_page?->[in?],module_repository_page?->[nil?],sample_types_page_expermient?->[nil?],smart_annotation_filter_users->[base62_decode,popover_for_user_name,gsub,match,find_by_id],smart_annotation_parser->[is_a?,smart_annotation_filter_users,smart_annotation_filter_resources],include]
Displays a popover for a user. \ nag - user.
Layout/TrailingWhitespace: Trailing whitespace detected.
@@ -1259,11 +1259,12 @@ public class AppenderatorImpl implements Appenderator final File persistedFile; final File persistDir = createPersistDirIfNeeded(identifier); + final IndexSpec indexSpec = tuningConfig.getIndexSpec(); persistedFile = indexMerger.persist( indexToPersist.getIndex(), identifier.getInterval(), new File(persistDir, String.valueOf(indexToPersist.getCount())), - tuningConfig.getIndexSpecForIntermediatePersists(), + indexSpec, tuningConfig.getSegmentWriteOutMediumFactory() );
[AppenderatorImpl->[abandonSegment->[apply->[close],getBytesInMemory,pushBarrier,add],getBytesInMemory->[getBytesInMemory],persistHydrant->[createPersistDirIfNeeded],getQueryRunnerForIntervals->[getQueryRunnerForIntervals],createPersistDirIfNeeded->[computeIdentifierFile,computePersistDir],getQueryRunnerForSegments->[getQueryRunnerForSegments],push->[persistAll,add],computeIdentifierFile->[computePersistDir],close->[add],persistAll->[getBytesInMemory,getDataSource,add,throwPersistErrorIfExists],getDataSource->[getDataSource],add->[getDataSource,add,throwPersistErrorIfExists],clear->[add,throwPersistErrorIfExists],computeDescriptorFile->[computePersistDir],getOrCreateSink->[add],mergeAndPush->[push,add],unlockBasePersistDirectory->[close],bootstrapSinksFromDisk->[add]]]
Persist the given Hydrant.
Change doesn't belong to this PR
@@ -66,12 +66,12 @@ namespace Content.IntegrationTests.Tests.Destructible { var bluntDamageType = sPrototypeManager.Index<DamageTypePrototype>("Blunt"); - Assert.True(sDamageableComponent.ChangeDamage(bluntDamageType, 10, true)); + Assert.True(sDamageableComponent.TryChangeDamage(bluntDamageType, 10, true)); // No thresholds reached yet, the earliest one is at 20 damage Assert.IsEmpty(sThresholdListenerComponent.ThresholdsReached); - Assert.True(sDamageableComponent.ChangeDamage(bluntDamageType, 10, true)); + Assert.True(sDamageableComponent.TryChangeDamage(bluntDamageType, 10, true)); // Only one threshold reached, 20 Assert.That(sThresholdListenerComponent.ThresholdsReached.Count, Is.EqualTo(1));
[DestructibleThresholdActivationTest->[Task->[server,Count,IoCManager,Breakage,Damage,WaitRunTicks,sDestructibleEntity,ChangeDamage,Max,Min,Acts,Threshold,Empty,WaitPost,StartServerDummyTicker,CreateMap,Null,Behaviors,WaitAssertion,Key,TriggersOnce,EqualTo,WaitIdleAsync,IsEmpty,SpawnEntity,That,NotNull,Spawn,Clear,TotalDamage,Items,ThresholdsReached,sPrototypeManager,Trigger,Heal,Thresholds,Sound,Triggered,True]]]
Tests if a specific object is missing a threshold. Threshold 20 has a value that is greater than 1 and is greater than 1. Heal the entity down to 50 damage and down to 50 damage and Private methods Check if the given threshold is defined in the prototype.
same here with test prototypes
@@ -36,6 +36,7 @@ import java.util.List; public class LookupExprMacro implements ExprMacroTable.ExprMacro { + private static final byte LOOKUP_EXPR_KEY = 0x01; private static final String FN_NAME = "lookup"; private final LookupExtractorFactoryContainerProvider lookupExtractorFactoryContainerProvider;
[LookupExprMacro->[apply->[LookupExpr->[eval->[apply],visit->[LookupExpr,visit],stringify->[stringify]],LookupExpr,name]]]
Creates a macro that can be used to create a new object. Creates a new object for the given lookup name.
Can we centralize these? Otherwise I worry about accidentally reusing IDs.
@@ -51,9 +51,15 @@ func promHistogramToES(cc CounterCache, name string, labels common.MapStr, histo } // take count for this period (rate) + deacumulate - count := cc.RateUint64(name+labels.String()+fmt.Sprintf("%f", bucket.GetUpperBound()), bucket.GetCumulativeCount()) - sumCount - counts = append(counts, count) - sumCount += count + count := cc.RateUint64(name+labels.String()+fmt.Sprintf("%f", bucket.GetUpperBound()), bucket.GetCumulativeCount()) + + if count == 0 { + counts = append(counts, count) + continue + } + + counts = append(counts, count-sumCount) + sumCount = count } res := common.MapStr{
[RateUint64,GetCumulativeCount,Sprintf,GetUpperBound,GetBucket,Inf,String,NaN]
Get the values and counts for a single .
@exekias I have seen with CockroachDB that this rate is sometimes zero when `sumCount` has already accumulated other counters, so the value overflows here and data fails to be ingested. Not sure why it happens, I want to investigate more, but I think that it may be that CockroachDB changes or at least adds buckets on the go, not sure about the implications of this if this is the case. By now I am workarounding this issue by explicitly handling zero-valued count rate.
@@ -141,6 +141,10 @@ public class BuildCommand extends CLICommand { QueueTaskFuture<? extends AbstractBuild> f = job.scheduleBuild2(0, new CLICause(Jenkins.getAuthentication().getName()), a); if (wait || sync || follow) { + if (f == null) { + stderr.println("Build scheduling Refused by an extension, hence not in Queue"); + return -1; + } AbstractBuild b = f.waitForStart(); // wait for the start stdout.println("Started "+b.getFullDisplayName());
[BuildCommand->[run->[findNearest,getValue,getParameterDefinitions,createValue,isDisabled,ParametersAction,getResult,cancel,containsKey,StreamTaskListener,getParameterDefinition,CLICause,getParameterDefinitionNames,AbortException,getProperty,poll,BuildCommand_CLICause_CannotBuildConfigNotSaved,getKey,isEmpty,getDefaultParameterValue,waitForStart,sleep,println,isBuildable,getFullDisplayName,format,isHoldOffBuildUntilSave,scheduleBuild2,entrySet,getName,writeWholeLogTo,checkPermission,getClientCharset,get,BuildCommand_CLICause_CannotBuildUnknownReasons,add,BuildCommand_CLICause_CannotBuildDisabled],CLICause->[print->[BuildCommand_CLICause_ShortDescription,println,encodeTo],getShortDescription->[BuildCommand_CLICause_ShortDescription]],getShortDescription->[BuildCommand_ShortDescription],printUsageSummary->[println]]]
Runs the build. Wait for a build to finish.
Would recommend making this text a constant so `BuildCommandTest.groovy` can refer to it symbolically. (Most user text would go into a `Messages.properties` but I am not sure what the policy is for text rendered to CLI commands.)
@@ -5,4 +5,16 @@ const {powerMonitor, PowerMonitor} = process.atomBinding('power_monitor') Object.setPrototypeOf(PowerMonitor.prototype, EventEmitter.prototype) EventEmitter.call(powerMonitor) +powerMonitor.on('newListener', (event) => { + if (event === 'shutdown' && powerMonitor.listenerCount('shutdown') === 0) { + powerMonitor.blockShutdown() + } +}) + +powerMonitor.on('removeListener', (event) => { + if (event === 'shutdown' && powerMonitor.listenerCount('shutdown') === 0) { + powerMonitor.unblockShutdown() + } +}) + module.exports = powerMonitor
[No CFG could be retrieved]
This module is exported to allow the power monitor to be used in a future version of the.
Is the new "shutdown" listener added only _after_ this function is called?
@@ -44,7 +44,7 @@ public class RedisWebAuthnCredentialRepository extends BaseWebAuthnCredentialRep @Override public Collection<CredentialRegistration> getRegistrationsByUsername(final String username) { - val keys = (Set<String>) this.redisTemplate.keys(buildRedisKeyForRecord(username)); + val keys = (Set<String>) RedisUtils.keys(this.redisTemplate, buildRedisKeyForRecord(username)); if (keys != null) { return toCredentialRegistrationsAsStream(keys).collect(Collectors.toSet()); }
[RedisWebAuthnCredentialRepository->[toCredentialRegistrationsAsStream->[flatMap],update->[toList,build,buildRedisKeyForRecord,writeValueAsString,set,collect,isEmpty,encode,delete],buildRedisKeyForRecord->[toLowerCase],stream->[getPatternRedisKey,empty,toCredentialRegistrationsAsStream,keys],getRegistrationsByUsername->[toSet,buildRedisKeyForRecord,collect,keys],getSimpleName]]
Gets registrations by username.
Remove the cast
@@ -204,6 +204,7 @@ func (fs *filesystem) fileAttr(f *pfs.File) (*fuse.Attr, fuse.Status) { if err != nil { return nil, toStatus(err) } + fmt.Printf("Size: %d\n", fi.SizeBytes) return &fuse.Attr{ Mode: fileMode(fi), Size: fi.SizeBytes,
[OpenDir->[ListRepo,commit,parsePath,ListFileF],getAttr->[fileAttr,parsePath,repoAttr],parsePath->[Join,NewRepo,Split,NewFile,commit],repoAttr->[InspectRepo],GetAttr->[getAttr],commit->[InspectBranch,Unlock,RLock,Lock,IsUUIDWithoutDashes,RUnlock],fileAttr->[InspectFile],Root,Error,getUnmount,Contains,MountRoot,Notify,getCommits,getFuse,Errorf,Unmount,NewDefaultFileSystem,Serve,NewPathNodeFs]
fileAttr returns fuse. Attr for a file in a commit.
Is this a debug line?
@@ -328,6 +328,9 @@ type folderBranchOps struct { // Closed on shutdown shutdownChan chan struct{} + // Closed once we're done shutting down. Any goroutine that logs must be + // registered with this WaitGroup, to avoid test races. + doneWg sync.WaitGroup // Can be used to turn off notifications for a while (e.g., for testing) updatePauseChan chan (<-chan struct{})
[waitForRootBlockFetch->[logIfErr],kickOffPartialMarkAndSweep->[Lock,Unlock,doPartialMarkAndSweep],handleTLFBranchChange->[setHeadSuccessorLocked,isUnmergedLocked,id,Lock,handleUnflushedEditNotifications,Unlock,setBranchIDLocked],SetInitialHeadFromServer->[kickOffPartialSyncIfNeeded,kickOffRootBlockFetch,waitForRootBlockFetch,id,startOp,endOp,Lock,validateHeadLocked,identifyOnce,getHead,Unlock,setInitialHeadTrustedLocked],runUnlessShutdown->[newCtxWithFBOID],setMtimeLocked->[notifyAndSyncOrSignal,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename,nowUnixNano],processMissedLookup->[makeFakeFileEntry,makeFakeDirEntry],setHeadSuccessorLocked->[id,Lock,setHeadLocked,Unlock,setInitialHeadTrustedLocked],kickOffPartialMarkAndSweepIfNeeded->[getProtocolSyncConfigUnlocked,kickOffPartialMarkAndSweep],makeFakeFileEntry->[makeFakeEntryID],NewNotificationChannel->[getEditMonitoringChannel],getUnmergedMDUpdatesLocked->[id,getCurrMDRevision],pathFromNodeForRead->[pathFromNodeHelper],updateLastGetHeadTimestamp->[Lock,Unlock],makeFakeDirEntry->[makeFakeEntryID,nowUnixNano],applyMDUpdates->[Lock,applyMDUpdatesLocked,Unlock],getCachedDirOpsCount->[Lock,Unlock],canonicalPath->[String,pathFromNodeForRead,PathType],onMDFlush->[newCtxWithFBOID,handleMDFlush],getHead->[commitHeadLocked,updateLastGetHeadTimestamp],finalizeResolution->[Lock,finalizeResolutionLocked,Unlock],getProtocolSyncConfigUnlocked->[getProtocolSyncConfig],reResolveAndIdentify->[id],commitHeadLocked->[id],startMonitorChat->[Lock,Unlock],getDirChildren->[pathFromNodeForRead,getMDForReadNeedIdentify],cacheHashBehavior->[id],finalizeMDRekeyWriteLocked->[setHeadSuccessorLocked,Lock,waitForJournalLocked,Unlock,loadCachedMDChanges,setBranchIDLocked],makeRecentFilesSyncConfig->[id],createEntryLocked->[syncDirUpdateOrSignal,checkForUnlinkedDir,id,branch,canonicalPath,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename,nowUnixNano],handleEditNotifications->[sendEditNotifications,makeEditNotifications],SetInitialHeadToNew->[initMDLocked,startOp,endOp,Lock,identifyOnce,Unlock],makeEncryptedPartialPathsLocked->[id,cacheHashBehavior,getProtocolSyncConfig],maybeUnembedAndPutBlocks->[id],getMDForWriteLockedForFilename->[getMDForWriteOrRekeyLocked],setInitialHeadTrustedLocked->[setHeadLocked],doFastForwardLocked->[setHeadSuccessorLocked,kickOffRootBlockFetch,waitForRootBlockFetch],doPartialMarkAndSweep->[id,startOp,endOp,markRecursive],locallyFinalizeTLF->[setHeadSuccessorLocked,id,Lock,oa,Unlock],doFavoritesOp->[deleteFromFavorites,addToFavoritesByHandle,addToFavorites],blockUnmergedWrites->[Lock],getAndApplyNewestUnmergedHead->[setHeadSuccessorLocked,id,Lock,notifyBatchLocked,Unlock],finalizeResolutionLocked->[newCtxWithFBOID,id,Lock,notifyOneOpLocked,finalizeBlocks,handleEditNotifications,handleUnflushedEditNotifications,setHeadConflictResolvedLocked,Unlock,loadCachedMDChanges,setBranchIDLocked],checkNodeForRead->[branch,GetTLFHandle,checkNode],ClearPrivateFolderMD->[id,Lock,Unlock],SetSyncConfig->[newCtxWithFBOID,kickOffRootBlockFetch,id,Lock,getLatestMergedMD,kickOffPartialSync,reResolveAndIdentify,getProtocolSyncConfig,branch,Unlock,makeEncryptedPartialPathsLocked,triggerMarkAndSweepLocked],waitForJournalLocked->[id],rekeyLocked->[getAndApplyMDUpdates,finalizeMDRekeyWriteLocked,isUnmergedLocked,startOp,endOp,getMDForRekeyWriteLocked,getHead],removeEntryLocked->[checkForUnlinkedDir,notifyAndSyncOrSignal,unrefEntryLocked],SetEx->[startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,setExLocked],GetEditHistory->[id,getHead],kickOffPartialSync->[Lock,Unlock,doPartialSync],notifyOneOpLocked->[id,pathFromNodeForRead,getUnlinkPathBeforeUpdatingPointers,searchForNode],getProtocolSyncConfig->[id],getMDForReadHelper->[getMDForRead],unstageLocked->[getAndApplyMDUpdates,isUnmergedLocked,id,finalizeMDWriteLocked,notifyBatchLocked,undoUnmergedMDUpdatesLocked,getSuccessorMDForWriteLocked,maybeUnembedAndPutBlocks],checkNodeForWrite->[String,checkNode],receiveNewEditChat->[getEditMonitoringChannel],notifyOneOp->[Lock,Unlock,notifyOneOpLocked],setExLocked->[notifyAndSyncOrSignal,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename,nowUnixNano],getMDForReadNeedIdentify->[getMDForReadHelper],getCurrMDRevision->[getCurrMDRevisionLocked],getMDForMigrationLocked->[getMDForWriteOrRekeyLocked],finalizeGCOp->[setBranchIDLocked,setHeadSuccessorLocked,Lock,finalizeBlocks,getSuccessorMDForWriteLocked,Unlock,loadCachedMDChanges,maybeUnembedAndPutBlocks],commitFlushedMD->[id,kickOffPartialSyncIfNeeded,kickOffRootBlockFetch],Rename->[renameLocked,startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite],Lock->[Lock],setHeadPredecessorLocked->[setHeadLocked],CreateDir->[startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,createEntryLocked],RemoveEntry->[removeEntryLocked,startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename],clearConflictView->[id,Lock,Unlock],getMDForReadNoIdentify->[getMDForReadHelper],makeEditNotifications->[id],kickOffPartialSyncIfNeeded->[getProtocolSyncConfigUnlocked,makeRecentFilesSyncConfig,kickOffPartialSync],getMDForRekeyWriteLocked->[getMDForWriteOrRekeyLocked],Lookup->[transformReadError,startOp,endOp,lookup,checkNodeForRead],notifyAndSyncOrSignal->[syncDirUpdateOrSignal],finalizeBlocks->[id,cacheHashBehavior],pathFromNodeForMDWriteLocked->[pathFromNodeHelper],statEntry->[transformReadError,pathFromNodeForRead,getMDForReadNoIdentify,checkNodeForRead,statUsingFS,getMDForReadNeedIdentify],MigrateToImplicitTeam->[Lock,Unlock,finalizeMDRekeyWriteLocked,getMDForMigrationLocked],getSuccessorMDForWriteLocked->[getSuccessorMDForWriteLockedForFilename],SetMtime->[startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,setMtimeLocked],createLinkLocked->[checkForUnlinkedDir,notifyAndSyncOrSignal,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename,nowUnixNano],GetTLFHandle->[getHead],applyMDUpdatesLocked->[kickOffPartialSyncIfNeeded,setHeadSuccessorLocked,kickOffRootBlockFetch,isUnmergedLocked,waitForRootBlockFetch,Lock,getJournalPredecessorRevision,notifyOneOpLocked,getCurrMDRevisionLocked,Unlock],recomputeEditHistory->[getEditMessages,id],getAndApplyMDUpdates->[id,getLatestMergedRevision],removeDirLocked->[removeEntryLocked,pathFromNodeForMDWriteLocked,getMDForWriteLockedForFilename],SyncAll->[syncAllLocked,startOp,doMDWriteWithRetryUnlessCanceled,endOp],getUnlinkPathBeforeUpdatingPointers->[pathFromNodeForRead],waitForAndProcessUpdates->[maybeFastForward,getAndApplyMDUpdates,deferLogIfErr],sendEditNotifications->[id,getConvID],Shutdown->[Shutdown],kickOffEditActivityPartialSync->[getProtocolSyncConfigUnlocked,makeRecentFilesSyncConfig,kickOffPartialSync],markRecursive->[markRecursive],Reset->[Lock,Unlock,GetTLFHandle],registerForUpdatesShouldFireNow->[Lock,Unlock],Write->[startOp,endOp,Write,checkNodeForWrite,signalWrite,getMDForRead],Read->[transformReadError,startOp,endOp,pathFromNodeForRead,checkNodeForRead,Read,getMDForReadNeedIdentify],setHeadLocked->[commitFlushedMD,id,getJournalPredecessorRevision,validateHeadLocked,startMonitorChat,setBranchIDLocked],oa->[id],GetNodeMetadata->[id,startOp,endOp,oa,statEntry],undoUnmergedMDUpdatesLocked->[getUnmergedMDUpdatesLocked,Lock,id,setHeadPredecessorLocked,setLatestMergedRevisionLocked,undoMDUpdatesLocked,Unlock,setBranchIDLocked],syncAllLocked->[id,startOp,endOp,finalizeMDWriteLocked,branch,startSyncLocked,getSuccessorMDForWriteLocked,getHead],CreateFile->[id,startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,createEntryLocked],PushStatusChange->[PushStatusChange],getLatestMergedMD->[id,getLatestMergedRevision],doPartialSync->[startOp,endOp,syncOneNode],registerAndWaitForUpdates->[ctxWithFBOID,locallyFinalizeTLF,Lock,runUnlessShutdown,Unlock],GetUpdateHistory->[id,startOp,endOp,oa,String],getTrustedHead->[commitHeadLocked,updateLastGetHeadTimestamp],getUnmergedMDUpdates->[id,Lock,Unlock,getCurrMDRevision],doMDWriteWithRetry->[Lock,Unlock,maybeWaitForSquash],handleEditActivity->[id,getEditMessages,initEditChatChannels,getLatestMergedRevision,recomputeEditHistory,kickOffEditActivityPartialSync,String],syncAllUnlocked->[syncAllLocked,Lock,Unlock],getEditMonitoringChannel->[Lock,Unlock],checkForUnlinkedDir->[String],getConvID->[id,Lock,Unlock],lookup->[statUsingFS,getMDForReadNeedIdentify,processMissedLookup],ForceFastForward->[newCtxWithFBOID,kickOffPartialSyncIfNeeded,doFastForwardLocked,id,Lock,Unlock],RemoveDir->[removeDirLocked,startOp,endOp,doMDWriteWithRetryUnlessCanceled,RemoveDir,checkNodeForWrite],SyncFromServer->[syncAllUnlocked,getAndApplyMDUpdates,id,startOp,endOp,isUnmerged],monitorEditsChat->[newCtxWithFBOID,Lock,recomputeEditHistory,Unlock,handleEditActivity],maybeFastForward->[kickOffPartialSyncIfNeeded,doFastForwardLocked,isUnmergedLocked,id,Lock,getJournalPredecessorRevision,isUnmerged,Unlock],getMDForWriteOrRekeyLocked->[id,Lock,identifyOnce,setHeadLocked,getTrustedHead,Unlock],registerForUpdates->[id,registerForUpdatesShouldFireNow,startOp,endOp,getLatestMergedRevision],backgroundFlusher->[id,getCachedDirOpsCount,SyncAll,runUnlessShutdown],isUnmerged->[Lock,Unlock],GetDirChildren->[getDirChildren,transformReadError,startOp,endOp,checkNodeForRead],partialMarkAndSweepLoop->[kickOffPartialMarkAndSweepIfNeeded],getSuccessorMDForWriteLockedForFilename->[getMDForWriteLockedForFilename],getRootNode->[startOp,endOp,Lock,getMDForWriteOrRekeyLocked,Unlock,getMDForRead],TeamAbandoned->[newCtxWithFBOID,locallyFinalizeTLF],statUsingFS->[makeFakeFileEntry,makeFakeDirEntry],GetSyncConfig->[id,branch,getHead,getProtocolSyncConfigUnlocked],CreateLink->[startOp,endOp,doMDWriteWithRetryUnlessCanceled,checkNodeForWrite,createLinkLocked],newCtxWithFBOID->[ctxWithFBOID],unblockUnmergedWrites->[Unlock],initMDLocked->[cacheHashBehavior,id,Lock,setNewInitialHeadLocked,oa,getHead,Unlock,maybeUnembedAndPutBlocks],doMDWriteWithRetryUnlessCanceled->[doMDWriteWithRetry],getJournalPredecessorRevision->[id,String],finalizeMDWriteLocked->[setHeadSuccessorLocked,isUnmergedLocked,id,Lock,finalizeBlocks,handleEditNotifications,handleUnflushedEditNotifications,Unlock,loadCachedMDChanges,setBranchIDLocked],makeFakeEntryID->[String],TeamNameChanged->[newCtxWithFBOID,id,Lock,oa,Unlock],initEditChatChannels->[id,String],InvalidateNodeAndChildren->[startOp,endOp],transformReadError->[id,GetTLFHandle],onTLFBranchChange->[newCtxWithFBOID,handleTLFBranchChange],markForReIdentifyIfNeeded->[Lock,Unlock],setNewInitialHeadLocked->[setHeadLocked],identifyOnce->[Lock,Unlock],setHeadConflictResolvedLocked->[setHeadLocked],handleUnflushedEditNotifications->[id,makeEditNotifications],Truncate->[startOp,endOp,signalWrite,checkNodeForWrite,Truncate,getMDForRead],getMDForRead->[getTrustedHead,identifyOnce],PushConnectionStatusChange->[getEditMonitoringChannel],getMostRecentFullyMergedMD->[getMDForReadHelper,getJournalPredecessorRevision,id],syncDirUpdateOrSignal->[signalWrite],renameLocked->[checkForUnlinkedDir,notifyAndSyncOrSignal,pathFromNodeForMDWriteLocked,unrefEntryLocked,getMDForWriteLockedForFilename,nowUnixNano],Stat->[statEntry,deferLogIfErr],handleMDFlush->[commitFlushedMD,id,Lock,setLatestMergedRevisionLocked,handleEditNotifications,Unlock],FolderStatus->[startOp,endOp],getMDForReadNeedIdentifyOnMaybeFirstAccess->[Lock,Unlock,getMDForRead,getMDForWriteOrRekeyLocked],UnstageForTesting->[startOp,endOp,isUnmerged,unstageLocked,doMDWriteWithRetry],undoMDUpdatesLocked->[Lock,setHeadPredecessorLocked,notifyOneOpLocked,getCurrMDRevisionLocked,Unlock],Unlock->[Unlock],String]
PathFromNode is called from the base folder block manager to determine if a specific has Cancels the goroutine currently currently editing the TlfHistory.
By searching around in this file for `go `, I see a number of goroutines that you aren't tracking with this waitgroup. I guess some of them are because they're already being tracked by another waitgroup, like `fbo.partialSyncs` and `fbo.editActivity`. However those are only waited on as part of `SyncFromServer`, which is only called during shutdown by the first `KBFSOps` instance to shutdown, so those goroutines could still cause problems for the other instances. There are others, like the one in `fbo.commitHeadLocked()` that only log in error conditions -- maybe we don't care about those ones? Though they could still easily get shutdown errors, and thus log.
@@ -148,6 +148,14 @@ public class PrefixParser implements Callable<Void>, SubcommandWithParent { return; } + BucketLayout bucketLayout = info.getBucketLayout(); + if (!bucketLayout.isFileSystemOptimized()) { + System.out.println("Prefix tool only works for FileSystem Optimized" + + "bucket. Bucket Type is:" + bucketLayout); + metadataManager.stop(); + return; + } + long lastObjectId = info.getObjectID(); WithParentObjectId objectBucketId = new WithParentObjectId(); objectBucketId.setObjectID(lastObjectId);
[PrefixParser->[call->[parse],dumpTableInfo->[getValue,dumpInfo,getRangeKVs,getEffectivePath,get,getKey,toString,getPrefixFilter],dumpInfo->[getObjectID,ordinal,getParentObjectID,println],getParserStats->[ordinal],getEffectivePath->[Path],parse->[Path,getOzonePathKey,getParent,start,OzoneConfiguration,toString,getVolumeKey,OmMetadataManagerImpl,dumpTableInfo,WithParentObjectId,getBucketKey,getKeyTable,set,getDirectoryTable,hasNext,getEffectivePath,println,stop,isExist,getObjectID,iterator,exists,next,getBucketLayout,dumpInfo,get,setObjectID,ordinal],getPrefixFilter->[toString,addFilter],main->[call],values]]
Parse the given file and add it to the database. Dump the files and directories at the specified level.
@mukul1987 can you please modify message `"Bucket Type is:"` -> to -> `"Bucket Layout is:"`
@@ -273,6 +273,14 @@ class Article < ApplicationRecord end end + def contrived_description + text_portion = body_text.present? ? body_text[0..100].tr("\n", " ").strip.to_s : "" + text_portion = + text_portion.strip + "..." if body_text.size > 100 + return "A post by #{user.name}" if text_portion.blank? + + text_portion.strip + end + def body_text ActionView::Base.full_sanitizer.sanitize(processed_html)[0..7000] end
[Article->[username->[username],update_notifications->[update_notifications],set_cached_object->[username],readable_edit_date->[edited?]]]
trigger_index triggers the index for the given record.
can we call this `artificial_description` ? I had to google what `contrived` means #nonnativespeakersproblems
@@ -916,7 +916,7 @@ class TokenNetwork: partner_locked_amount, partner_locksroot, ) is False: - raise ChannelIncorrectStateError('local state can not be used to call settle') + raise RaidenUnrecoverableError('local state can not be used to call settle') our_maximum = transferred_amount + locked_amount partner_maximum = partner_transferred_amount + partner_locked_amount
[TokenNetwork->[settle_block_number->[detail_channel],channel_is_closed->[detail_channel],all_events_filter->[events_filter],update_transfer->[channel_is_closed],channel_is_settled->[detail_channel],_check_for_outdated_channel->[detail_channel],close->[channel_is_opened],channel_is_opened->[detail_channel],detail_participants->[ParticipantsDetails,_inspect_channel_identifier,detail_participant],unlock->[channel_is_settled],_verify_settle_state->[detail_participants],detail_participant->[ParticipantDetails,_call_and_check_result],settle->[channel_is_closed,channel_exists_and_not_settled],settlement_timeout_max->[settlement_timeout_max],channelnew_filter->[events_filter],detail_channel->[_inspect_channel_identifier,_call_and_check_result,ChannelData],set_total_deposit->[token_address,channel_is_opened,detail_participant],_inspect_channel_identifier->[_call_and_check_result],detail->[ChannelDetails,detail_participants,detail_channel],closing_address->[detail_participants,detail_channel],can_transfer->[channel_is_opened,detail_participant],settlement_timeout_min->[settlement_timeout_min],withdraw->[channel_is_opened]]]
This method is called to settle a channel. Try to settle a channel.
I don't understand this check. Why does it exist and why is it unrecoverable?
@@ -53,3 +53,15 @@ func ServiceURL(serviceName string) *url.URL { return s } + +// Update the VM display name on vSphere UI +func DisplayName(id, name string) string { + shortID := id[:shortIDLen] + nameMaxLen := maxVMNameLength - len(shortID) + prettyName := name + if len(prettyName) > nameMaxLen-1 { + prettyName = prettyName[:nameMaxLen-1] + } + + return fmt.Sprintf("%s-%s", prettyName, shortID) +}
[Parse,Hostname,Fatal]
E lecture de la lista de la lista.
do we need to make it smarter? like if pretty name is shorter than nameMaxLen, can we make the id longer than a shortID
@@ -203,6 +203,8 @@ mkdir -p %{?buildroot}/%{_unitdir} install -m 644 utils/systemd/daos-server.service %{?buildroot}/%{_unitdir} install -m 644 utils/systemd/daos-agent.service %{?buildroot}/%{_unitdir} +%pre server +getent group daos_admins >/dev/null || groupadd -r daos_admins %post server -p /sbin/ldconfig %postun server -p /sbin/ldconfig
[No CFG could be retrieved]
\ install - install all missing modules - install all missing modules - install all missing description of server.
probably need to bump release version for this
@@ -1143,7 +1143,13 @@ public abstract class SslContext { static TrustManagerFactory buildTrustManagerFactory( X509Certificate[] certCollection, TrustManagerFactory trustManagerFactory) throws NoSuchAlgorithmException, CertificateException, KeyStoreException, IOException { - final KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType()); + return buildTrustManagerFactory(certCollection, trustManagerFactory, KeyStore.getDefaultType()); + } + + static TrustManagerFactory buildTrustManagerFactory( + X509Certificate[] certCollection, TrustManagerFactory trustManagerFactory, String keyStore) + throws NoSuchAlgorithmException, CertificateException, KeyStoreException, IOException { + final KeyStore ks = KeyStore.getInstance(keyStore); ks.load(null, null); int i = 1;
[SslContext->[toPrivateKeyInternal->[toPrivateKey],newClientContextInternal->[defaultClientProvider,verifyNullSslContextProvider],newHandler->[newEngine,newHandler],newClientContext->[newClientContext],getPrivateKeyFromByteBuffer->[generateKeySpec],toX509CertificatesInternal->[toX509Certificates],newServerContext->[newServerContext],buildKeyManagerFactory->[buildKeyStore,buildKeyManagerFactory],newServerContextInternal->[defaultServerProvider],buildTrustManagerFactory->[buildTrustManagerFactory]]]
Build a trust manager factory from a list of X509Certificates.
Instead of creating a new function, just modify the old one, since it was package-private. Unless there's a lot a usages.
@@ -290,7 +290,7 @@ func bulkEncodePublishRequest( log.Errorf("Failed to encode event meta data: %+v", err) continue } - if opType, err := event.GetMetaStringValue(opTypeKey); err == nil && opType == opTypeDelete { + if opType, err := events.GetMetaStringValue(*event, events.FieldMetaOpType); err == nil && opType == events.FieldMetaOpTypeDelete.String() { // We don't include the event source in a bulk DELETE bulkItems = append(bulkItems, meta) } else {
[publishEvents->[SetLabel,Error,NewBatch,Failed,Bulk,Now,Duplicate,End,GetVersion,StartSpan,Acked,CaptureError,Errorf,Send,Sub,ErrTooMany,Dropped,Debugf],Test->[Test],Connect->[Connect],Close->[Close],Publish->[Events,RetryEvents,ACK,publishEvents],Unlock,IsEmpty,NewLogger,GetMetaStringValue,Error,New,Warnf,NewConnection,Select,Lock,Errorf,EncodeToString,Debugf]
bulkEncodePublishRequest encodes all bulk requests and returns slice of events successfully added to the list eventType returns the unique event id for the given event.
do we still store a string for `opType` in the event? Shouldn't it be the enum. Maybe a helper `events.GetOpType(*event)` would be helpful.
@@ -68,7 +68,7 @@ LocalPlayer::LocalPlayer(Client *client, const char *name): m_sneak_node(32767,32767,32767), m_sneak_node_exists(false), m_need_to_get_new_sneak_node(true), - m_sneak_node_bb_ymax(0), + m_sneak_node_bb_top(0,0,0,0,0,0), m_old_node_below(32767,32767,32767), m_old_node_below_type("air"), m_can_jump(false),
[getLightPosition->[v3f,floatToInt],getEyeOffset->[v3f],accelerateHorizontal->[normalize,getLength],getStandingNodePos->[v3f,getPosition,floatToInt],applyControl->[accelerateHorizontal,normalize,setYaw,crossProduct,getSpeed,accelerateVertical,getBool,setSpeed,event,v3f,getYaw,rotateXZBy,checkLocalPrivilege,setPitch], Player->[v3f,idef],getFootstepNodePos->[v3f,getPosition,floatToInt],move->[size,getStandingNodePos,getEyePosition,getPosition,fabs,move,setPosition,sanity_check,end,begin,v3s16,setSpeed,push_back,checkLocalPrivilege,getDistanceFrom,event,ndef,getMap,v3f,getSpeed,getNodeNoEx,intToFloat,collisionMoveSimple,getContent,get,MYMAX,getBool,itemgroup_get,rangelim,getCollisionBoxes,floatToInt]]
Initialize a new local player. old_node below_type = air.
Spaces after commas?
@@ -117,7 +117,9 @@ function dbQuery($sql, $parameters = array()) $mysql_error = mysqli_error($database_link); if (isset($config['mysql_log_level']) && ((in_array($config['mysql_log_level'], array('INFO', 'ERROR')) && !preg_match('/Duplicate entry/', $mysql_error)) || in_array($config['mysql_log_level'], array('DEBUG')))) { if (!empty($mysql_error)) { - logfile(date($config['dateformat']['compact']) . " MySQL Error: $mysql_error ($fullSql)"); + $error_msg = "MySQL Error: $mysql_error ($fullSql)"; + c_echo("%R$error_msg%n\n", $debug); + logfile(date($config['dateformat']['compact']) . ' ' . $error_msg); } } }
[No CFG could be retrieved]
Queries the database and returns a single result or false if no result is found.
Maybe drop the `$debug`? This is the kind of thing we want to see regardless of debug being enabled or not.
@@ -271,6 +271,12 @@ class TransformExecutor(_ExecutorService.CallableTask): self._transform_evaluator_registry = transform_evaluator_registry self._evaluation_context = evaluation_context self._input_bundle = input_bundle + # For non-empty bundles, store the window of the max EOW + if input_bundle._elements: + self._latest_main_input_window = input_bundle._elements[0].windows[0] + for elem in input_bundle.get_elements_iterable(): + if elem.windows[0].end > self._latest_main_input_window.end: + self._latest_main_input_window = elem.windows[0] self._fired_timers = fired_timers self._applied_ptransform = applied_ptransform self._completion_callback = completion_callback
[_CompletionCallback->[handle_result->[handle_result]],Executor->[start->[start],await_completion->[await_completion]],_ExecutorServiceParallelExecutor->[await_completion->[shutdown,await_completion],schedule_consumption->[parallel,schedule,serial,TransformExecutor],__init__->[_CompletionCallback,_TransformExecutorServices,_ExecutorService],start->[submit],_MonitorTask->[_add_work_if_necessary->[_is_executing,schedule_consumption],call->[_VisibleExecutorUpdate,poll,schedule_consumers,schedule_unprocessed_bundle,offer,submit],_fire_timers->[_CompletionCallback,schedule_consumption],_should_shutdown->[shutdown,offer,_VisibleExecutorUpdate]]],_ExecutorService->[shutdown->[shutdown],_ExecutorServiceWorker->[run->[_get_task_or_none,call,_update_name]],__init__->[_ExecutorServiceWorker]],TransformExecutor->[call->[handle_exception,complete],attempt_call->[handle_result]],_TransformEvaluationState->[schedule->[submit]],_TransformExecutorServices->[serial->[_SerialEvaluationState],__init__->[_ParallelEvaluationState]]]
Initialize the object with the given parameters.
<!--new_thread; commit:0d82258b7674656d3a6f86748f47d23de8cf512f; resolved:0--> Period at end of sentence. Can you add a TODO to modify the Bundle class to retrieve this more efficiently?
@@ -1252,6 +1252,7 @@ def test_create_stoploss_order_insufficient_funds(mocker, default_conf, caplog, @pytest.mark.usefixtures("init_persistence") def test_handle_stoploss_on_exchange_trailing(mocker, default_conf, fee, limit_buy_order, limit_sell_order) -> None: + # TODO-lev: test for short # When trailing stoploss is set stoploss = MagicMock(return_value={'id': 13434334}) patch_RPCManager(mocker)
[test_create_trade_no_signal->[patch_RPCManager],test_order_book_depth_of_market_high_delta->[patch_RPCManager],test_execute_trade_exit_market_order->[patch_RPCManager],test_check_handle_cancelled_buy->[patch_RPCManager],test_tsl_only_offset_reached->[patch_RPCManager],test_enter_positions_no_pairs_left->[patch_RPCManager],test_edge_should_ignore_strategy_stoploss->[patch_RPCManager],test_add_stoploss_on_exchange->[patch_RPCManager],test_handle_trade_roi->[patch_RPCManager],test_get_trade_stake_amount->[patch_RPCManager],test_check_handle_timedout_buy_usercustom->[patch_RPCManager],test_create_trades_multiple_trades->[patch_RPCManager],test_locked_pairs->[patch_RPCManager],test_handle_cancel_enter_corder_empty->[patch_RPCManager],test_execute_trade_exit_with_stoploss_on_exchange->[patch_RPCManager],test_create_trade->[patch_RPCManager],test_create_trade_too_small_stake_amount->[patch_RPCManager],test_disable_ignore_roi_if_buy_signal->[patch_RPCManager],test_may_execute_trade_exit_after_stoploss_on_exchange_hit->[patch_RPCManager],test_edge_overrides_stake_amount->[patch_RPCManager],test_enter_positions_no_pairs_in_whitelist->[patch_RPCManager],test_edge_overrides_stoploss->[patch_RPCManager],test_process_exchange_failures->[patch_RPCManager],test_order_dict_dry_run->[patch_RPCManager],test_sell_profit_only_enable_profit->[patch_RPCManager],test_check_available_stake_amount->[patch_RPCManager],test_get_valid_price->[patch_RPCManager],test_close_trade->[patch_RPCManager],test__safe_exit_amount_error->[patch_RPCManager],test_handle_overlapping_signals->[patch_RPCManager],test_handle_stoploss_on_exchange->[patch_RPCManager],test_sell_profit_only_disable_loss->[patch_RPCManager],test_create_trades_preopen->[patch_RPCManager],test_check_handle_timedout_buy_exception->[patch_RPCManager],test_handle_sle_cancel_cant_recreate->[patch_RPCManager],test_handle_cancel_enter->[patch_RPCManager],test_create_trade_zero_stake_amount->[patch_RPCManager],test_execute_trade_exit_down->[patch_RPCManager],test_create_trade_limit_reached->[patch_RPCManager],test_process_trade_handling->[patch_RPCManager],test_process_informative_pairs_added->[patch_RPCManager],test_check_handle_timedout_partial_fee->[patch_RPCManager],test_order_dict_live->[patch_RPCManager],test_check_handle_timedout_exception->[patch_RPCManager],test_process_trade_no_whitelist_pair->[patch_RPCManager],test_process_trade_creation->[patch_RPCManager],test_execute_trade_exit_down_stoploss_on_exchange_dry_run->[patch_RPCManager],test_check_handle_timedout_partial->[patch_RPCManager],test__safe_exit_amount->[patch_RPCManager],test_trailing_stop_loss_offset->[patch_RPCManager],test_update_closed_trades_without_assigned_fees->[patch_with_fee],test_enter_positions_global_pairlock->[patch_RPCManager],test_check_handle_timedout_buy->[patch_RPCManager],test_execute_entry->[patch_RPCManager],test_handle_cancel_exit_limit->[patch_RPCManager],test_check_handle_cancelled_sell->[patch_RPCManager],test_sell_not_enough_balance->[patch_RPCManager],test_order_book_depth_of_market->[patch_RPCManager],test_check_handle_timedout_partial_except->[patch_RPCManager],test_handle_trade_use_sell_signal->[patch_RPCManager],test_handle_stoploss_on_exchange_trailing->[patch_RPCManager],test_trailing_stop_loss->[patch_RPCManager],test_handle_cancel_exit_cancel_exception->[patch_RPCManager],test_handle_cancel_enter_exchanges->[patch_RPCManager],test_execute_trade_exit_up->[patch_RPCManager],test_edge_called_in_process->[patch_RPCManager],test_process_operational_exception->[patch_RPCManager],test_tsl_on_exchange_compatible_with_edge->[patch_RPCManager],test_check_handle_timedout_sell_usercustom->[patch_RPCManager],test_create_trade_no_stake_amount->[patch_RPCManager],test_order_book_ask_strategy->[patch_RPCManager],test_create_stoploss_order_invalid_order->[patch_RPCManager],test_create_trade_minimal_amount->[patch_RPCManager],test_sell_profit_only_disable_profit->[patch_RPCManager],test_ignore_roi_if_buy_signal->[patch_RPCManager],test_total_open_trades_stakes->[patch_RPCManager],test_check_handle_timedout_sell->[patch_RPCManager],test_sell_profit_only_enable_loss->[patch_RPCManager],test_execute_trade_exit_custom_exit_price->[patch_RPCManager],test_handle_trade->[patch_RPCManager],test_handle_stoploss_on_exchange_custom_stop->[patch_RPCManager],test_refind_lost_order->[reset_open_orders],test_trailing_stop_loss_positive->[patch_RPCManager]]
This test handles the case when the stoploss on exchange is set on the exchange. This is a mock for the NI - 1 2 - 2 2 - 3 2 - This test is a mock for freqtrade.
I left these as todo's because I have another PR that I'm going to make for freqtradebot where I make a lot more changes to the stoploss, but since I had to edit `create_stoploss_order` and `handle_trailing_stoploss_on_exchange` in freqtradebot to give the side argument, I wanted to make sure this wasn't forgotten
@@ -319,7 +319,10 @@ public class SecurityConfiguration extends WebSecurityConfigurerAdapter { // each scope as a GrantedAuthority, which we don't care about. if (authority instanceof OidcUserAuthority) { OidcUserAuthority oidcUserAuthority = (OidcUserAuthority) authority; - mappedAuthorities.addAll(SecurityUtils.extractAuthorityFromClaims(oidcUserAuthority.getUserInfo().getClaims())); + OidcUserInfo userInfo = oidcUserAuthority.getUserInfo(); + if (userInfo != null) { + mappedAuthorities.addAll(SecurityUtils.extractAuthorityFromClaims(userInfo.getClaims())); + } } }); return mappedAuthorities;
[No CFG could be retrieved]
Provides a converter that maps the ID Token to Spring Security Authorities. Set the claim set converter.
In which scenario you should get null over here? Isn't the UserInfo endpoint a requirement of `OIDC`?