patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -863,7 +863,7 @@ class SubmissionsController < ApplicationController else collected = collect_submissions_for_section(params[:section_to_collect], assignment, errors) if collected > 0 - flash[:success] = I18n.t('collect_submissions.successfully_collected', :collected => collected) + flash[:success] = I18n.t('collect_submissions.successfully_collected', collected: collected) end end end
[SubmissionsController->[downloads->[find,find_entry,group_name,join,first,send_file,nil?,get_output_stream,download_as_string,message,repo_name,get_revision,revision_number,t,last,count,to_i,access_repo,open,repository_folder,each,id,puts,short_identifier,get_latest_revision,render,files_at_path,mkdir,find_appropriate_grouping],update_submissions->[call,redirect_to,short_identifier,nil?,find,empty?,set_results_statistics,instance,raise,log,collect_submissions_for_section,post?,set_release_on_results,blank?,ta?,t,id,push],download_groupings_files->[submission_files,find,find_entry,join,send_file,map,get_output_stream,message,tr,repo_name,current_submission_used,exist?,t,filename,blank?,each,open,puts,delete,redirect_to,short_identifier,render,user_name,retrieve_file,mkdir],set_filebrowser_vars->[get_latest_revision,path_exists?,access_repo,filename,files_at_path,join,each,repository_folder,push],server_time->[render],populate_repo_browser->[directories_at_path,find,to_i,respond_to,js,render,access_repo,construct_repo_browser_directory_table_row,files_at_path,object_id,get_revision,join,construct_repo_browser_table_row,first,each,repository_folder,assignment],download_svn_repo_list->[get_svn_repo_list,short_identifier,send_data,find],collect_ta_submissions->[redirect_to,short_identifier,all,find,can_collect_now?,instance,push_groupings_to_queue,t,id],file_manager->[redirect_to,group,nil?,find,accepted_grouping_for,set_filebrowser_vars,id],download_svn_export_commands->[short_identifier,send_data,find,join,get_svn_export_commands],all_assignments_marked?->[where,size],collect_all_submissions->[redirect_to,short_identifier,groupings,find,can_collect_now?,instance,push_groupings_to_queue,t,id],browse->[get_filters,error_collecting,handle_paginate_event,to_s,all,find,to_i,detect,any?,blank?,ta?,present?,t,id,map],collect_and_begin_grading->[redirect_to,find,group_name,can_collect_grouping_now?,t,push_grouping_to_priority_queue,id],update_converted_pdfs->[nil?,find,is_converted,current_submission_used,each,is_pdf?],index->[all,render],update_files->[find,new,original_filename,can_collect_now?,commit_after_collection_message,accepted_grouping_for,set_filebrowser_vars,join,remove,has_jobs?,group,nil?,merge!,conflicts,message,replace,log,sanitize_file_name,t,to_i,instance,repository_external_commits_only?,access_repo,read,get_transaction,each,repository_folder,push,short_identifier,is_valid?,render,user_name,raise,rewind,add,commit,content_type],unrelease->[redirect_to,short_identifier,nil?,find,instance,unrelease_results,log,post?,each,t,length,id],download->[nil?,get_latest_revision,find,to_i,render,send_data,access_repo,download_as_string,find_appropriate_grouping,files_at_path,message,get_revision,join,is_binary?,t,repository_folder,escapeHTML,id],manually_collect_and_begin_grading->[redirect_to,to_i,find,manually_collect_submission,id],repo_browser->[repo,path_exists?,find,to_i,timestamp,close,repository_name,message,raise,get_revision,revision_number,join,first,each,repository_folder,assignment],populate_file_manager->[directories_at_path,find,respond_to,accepted_grouping_for,join,first,construct_file_manager_table_row,group,nil?,js,construct_file_manager_dir_table_row,get_revision,to_i,repository_external_commits_only?,access_repo,object_id,each,repository_folder,id,get_latest_revision,files_at_path],download_simple_csv_report->[get_simple_csv_report,send_data,find,short_identifier],download_detailed_csv_report->[short_identifier,send_data,find,get_detailed_csv_report],all,include,section,released_to_students,downcase,has_submission?,revision_timestamp,total_mark,remark_submitted?,grace_period_deduction_single,collect,before_filter,lambda,grouping,t,select,marking_state,helper_method],require]
Updates a single specific assignment s groupings or a single groupings based on the user s set_results_statistics Sets flash flash errors and redirect to browse.
Line is too long. [102/80]
@@ -68,6 +68,7 @@ function ajax_autocompleter($selected, $htmlname, $url, $urloption = '', $minLen $("input#search_'.$htmlname.'").keydown(function(e) { if (e.keyCode != 9) /* If not "Tab" key */ { + if (e.keyCode == 13) {return false;} /*disable "ENTER" key useful for barcode readers*/ console.log("Clear id previously selected for field '.$htmlname.'"); $("#'.$htmlname.'").val(""); }
[ajax_object_onoff->[trans],ajax_constantonoff->[transnoentities,trans]]
This function is used to display the autocomplete form for a given field in ajax. This method is used to search for a term in the editor. It will search for a Function to handle the change of the item This method is called when the user selects an option on the UI element and the user selects loop on each ui.
This seems ok. However, i don't understand why changing parameter 0 to 1 into ajax_autocompleter The goal of this parameter is to have field automatically filled when in the database, there is only 1 product. (in such a case, having a combo is useless). So i merged manually this change only. Please check if it's ok.
@@ -50,8 +50,8 @@ func TestTransactionsController_Index_Success(t *testing.T) { assert.Empty(t, links["prev"].Href) require.Len(t, txs, 2) - require.Equal(t, string(tx3.SentAt), txs[0].SentAt, "expected tx attempts order by sentAt descending") - require.Equal(t, string(tx2.SentAt), txs[1].SentAt, "expected tx attempts order by sentAt descending") + require.Equal(t, "4", txs[0].SentAt, "expected tx attempts order by sentAt descending") + require.Equal(t, "3", txs[1].SentAt, "expected tx attempts order by sentAt descending") } func TestTransactionsController_Index_Error(t *testing.T) {
[Context,EthTx,CreateTxAndAttempt,GetAccountAddress,AddTxAttempt,NewHTTPClient,Empty,Start,Len,AssertServerResponse,Equal,ParseJSONAPIResponse,GetStore,ParseResponseBody,Parallel,NoError,NewTx,Get,Register,NewApplicationWithKey,Transactions,NotEmpty,ParsePaginatedResponse,NewInt,String,MockEthClient,Run]
TestTransactionsController_Index_Error tests the number of transactions in the store. getTransactionCount - get transaction count.
So without the `string()` this tends to panic instead of fail properly.
@@ -4,8 +4,8 @@ class ShotsController < ApplicationController def show(username) @user = User.find_by(username: username) columns = ['works.id', 'works.title', 'seasons.name as season_name'] - works1 = @user.watching_works.where.not(season_id: nil).order(released_at: :desc).joins(:season).select(columns) - works2 = @user.watching_works.where(season_id: nil).select(:id, :title) + works1 = @user.works.watching.where.not(season_id: nil).order(released_at: :desc).joins(:season).select(columns) + works2 = @user.works.watching.where(season_id: nil).select(:id, :title) @seasons = works1.group_by(&:season_name) @seasons = @seasons.merge('ใใฎไป–' => works2.to_a) if works2.present? end
[ShotsController->[show->[merge,to_a,group_by,find_by,select,present?],layout]]
shows a specific user s last nworks.
Line is too long. [116/80]
@@ -45,8 +45,12 @@ def init_save_key(key_size, key_dir, keyname="key-letsencrypt.pem"): logger.exception(err) raise err + import zope.component + from letsencrypt import interfaces + config = zope.component.getUtility(interfaces.IConfig) # Save file - le_util.make_or_verify_dir(key_dir, 0o700, os.geteuid()) + le_util.make_or_verify_dir(key_dir, 0o700, os.geteuid(), + config.strict_permissions) key_f, key_path = le_util.unique_file( os.path.join(key_dir, keyname), 0o600) key_f.write(key_pem)
[pyopenssl_load_certificate->[_pyopenssl_load],get_sans_from_csr->[_get_sans_from_cert_or_req],get_sans_from_cert->[_get_sans_from_cert_or_req],dump_pyopenssl_chain->[_dump_cert]]
Initializes and saves a privkey. Key must be in PEM format on the filesystem.
Safe to move these imports to the top, also reduces duplicate code in lines 79-80
@@ -143,6 +143,13 @@ class Comment < ApplicationRecord HTMLEntities.new.decode(truncated_text) end + # A helper method to encode logic formerly part of spam detection. + def from_recently_registered_user? + return true unless user&.registered_at + + user.registered_at <= 5.days.ago + end + def video nil end
[Comment->[update_descendant_notifications->[update_notifications],wrap_timestamps_if_video_present!->[path],update_notifications->[update_notifications],expire_root_fragment->[root_exists?]]]
title returns the title of the page if it exists otherwise returns the title of the page.
SpamAssassin has a rule called "day old bread" for recently registered domain names that's effectively the same idea.
@@ -286,9 +286,6 @@ Number of files: {0.stats.nfiles}'''.format( yield item def add_item(self, item): - unknown_keys = set(item) - ITEM_KEYS - assert not unknown_keys, ('unknown item metadata keys detected, please update constants.ITEM_KEYS: %s', - ','.join(k.decode('ascii') for k in unknown_keys)) if self.show_progress: self.stats.show_progress(item=item, dt=0.2) self.items_buffer.add(item)
[ArchiveChecker->[check->[load],rebuild_refcounts->[mark_as_possibly_superseded->[add],verify_file_chunks->[add_reference],robust_iterator->[resync,report,RobustUnpacker,feed],mark_as_possibly_superseded,verify_file_chunks,robust_iterator,ChunkBuffer,add_reference,add,flush],orphan_chunks_check->[delete]],RobustUnpacker->[__next__->[feed],feed->[feed]],ChunkBuffer->[flush->[write_chunk]],Archive->[process_file->[add_item,show_progress,update,stat_attrs],iter_items->[unpack_many],load->[_load_meta],process_stdin->[add_item],process_fifo->[stat_attrs,add_item,update],process_dir->[stat_attrs,add_item,update],add_item->[show_progress,add],extract_item->[fetch_many,IncompatibleFilesystemEncodingError,flush],__init__->[DoesNotExist,CacheChunkBuffer,DownloadPipeline,Statistics,AlreadyExists],process_symlink->[stat_attrs,add_item,update],process_dev->[stat_attrs,add_item,update],save->[AlreadyExists,update,flush],list_archives->[Archive],calc_stats->[add_file_chunks->[add],add->[update],add_file_chunks,Statistics,add],set_meta->[_load_meta],rename->[AlreadyExists,set_meta]],ArchiveRecreater->[matcher_add_tagged_dirs->[exclude,iter_items,add],FakeTargetArchive->[__init__->[Statistics]],process_partial_chunks->[add],create_target_archive->[Archive],try_resume->[iter_items,update,add_item,Statistics,show_progress,delete],recreate->[is_temporary_archive,save],open_archive->[Archive],create_chunk_iterator->[fetch_many,_chunk_iterator],process_items->[iter_items,item_is_hardlink_master,show_progress],create_target_or_resume->[FakeTargetArchive],save->[update,Statistics,save,delete,rename],process_item->[add_item],process_chunks->[show_progress,Interrupted,add]]]
Add an item to the item buffer.
this validation is now done in Item class
@@ -23,6 +23,9 @@ var bucketName = []byte("index") const ( separator = "\000" dbReloadPeriod = 10 * time.Minute + + DBOperationRead = iota + DBOperationWrite ) // BoltDBConfig for a BoltDB index client.
[query->[Seek,Bucket,Equal,Cursor,Next,getDB,HasPrefix,View],Stop->[Close,Wait,Lock,Unlock],BatchWrite->[CreateBucketIfNotExists,getDB,Put,Update],getDB->[Join,Unlock,RLock,Lock,Open,RUnlock],loop->[NewTicker,reload,Stop,Done],RegisterFlags->[StringVar],QueryPages->[DoParallelQueries],reload->[IsNotExist,Join,Unlock,Stat,Error,RLock,Close,Lock,Log,RUnlock,Debug],IsNotExist,Stat,Add,IsDir,loop,Errorf,MkdirAll]
Local function to import a single chunk of a specific type from local storage. reload reloads the last known node id from the boltdb.
Could you explain why we may want this change into Cortex, instead of having Loki implementing its own `BoltIndexClient` with the feature it needs, please?
@@ -522,6 +522,8 @@ hidden_dim = 512 depth = 8 mix_hidden_lr = 1e-3 embedding_name = 'emb' +is_sparse = True +use_nccl_allreduce = False def db_lstm(word, predicate, ctx_n2, ctx_n1, ctx_0, ctx_p1, ctx_p2, mark,
[SE_ResNeXt50Small->[bottleneck_block,conv_bn_layer],TestTransformer->[test_main->[check_network_convergence],setUpClass->[prepare_batch_input]],shortcut->[conv_bn_layer],transformer->[transformer],TestMNIST->[test_batchnorm_fc->[check_network_convergence],test_simple_fc->[check_network_convergence]],TestResnet->[test_resnet->[check_network_convergence]],bottleneck_block->[squeeze_excitation,shortcut,conv_bn_layer],prepare_batch_input->[__pad_batch_data],TestCRFModel->[test_all->[db_lstm]],ParallelExecutorTestingDuringTraining->[test_parallel_testing->[simple_fc_net]]]
DB LSTM. Missing feature - cell feature.
Does it cover is_sparse=False and use_nccl_allreduce=True?
@@ -262,7 +262,9 @@ class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-att return config, Pipeline(config.transport, policies=policies) def _batch_send( - self, *reqs, # type: HttpRequest + self, + *reqs, # type: HttpRequest + container_name, # type: str **kwargs ): """Given a series of request, do a Storage batch call.
[StorageAccountHostsMixin->[close->[close],__exit__->[__exit__],__enter__->[__enter__]],TransportWrapper->[send->[send]]]
Send a batch request to the storage service.
_batch_send will not work if we add delete_blobs on blob_service_client because there won't be a container name there
@@ -80,7 +80,7 @@ func (c *collection) indexDir(index Index, indexVal string) string { indexDir := c.prefix // remove trailing slash indexDir = strings.TrimRight(indexDir, "/") - return fmt.Sprintf("%s__index_%s/%s", indexDir, index, indexVal) + return fmt.Sprintf("%s__index_%s/%v", indexDir, index, indexVal) } // See the documentation for `Index` for details.
[getIndexPath->[indexPath],WatchByIndex->[indexDir,Path,Get,Watch],Create->[Put,Get,Path],GetByIndex->[indexDir,Get],getMultiIndexPaths->[indexPath],Get->[Path,Get],Count->[Get],List->[Get],Delete->[getIndexPath,Get,getMultiIndexPaths,Path],WatchOne->[Path],Next->[Get],PutTTL->[getIndexPath,getMultiIndexPaths,Get,Path,Put],indexPath->[indexDir],DecrementBy->[Put,Get,Path],IncrementBy->[Put,Get,Path]]
indexDir returns the path to the index in the collection.
I think this will require a migration, right? I think we actually found this issue before (i.e. that `indexVal` is not printed correctly) or a similar one, but decided to not fix it because it'd break existing deployments.
@@ -227,9 +227,10 @@ class ExpressionChecker(ExpressionVisitor[Type]): def analyze_var_ref(self, var: Var, context: Context) -> Type: if var.type: - if isinstance(var.type, Instance): - if self.is_literal_context() and var.type.last_known_value is not None: - return var.type.last_known_value + var_type = get_proper_type(var.type) + if isinstance(var_type, Instance): + if self.is_literal_context() and var_type.last_known_value is not None: + return var_type.last_known_value if var.name() in {'True', 'False'}: return self.infer_literal_expr_type(var.name() == 'True', 'builtins.bool') return var.type
[ExpressionChecker->[visit_star_expr->[accept],analyze_ordinary_member_access->[analyze_ref_expr],check_overload_call->[check_call,infer_arg_types_in_empty_context],visit_await_expr->[accept],check_any_type_call->[infer_arg_types_in_empty_context],erased_signature_similarity->[check_argument_count,check_argument_types],visit_int_expr->[infer_literal_expr_type],check_op->[check_method_call_by_name,check_op_reversible,combine_function_signatures],defn_returns_none->[defn_returns_none],check_method_call_by_name->[is_literal_context],check_call_expr_with_callee_type->[transform_callee_type,method_fullname],visit_enum_call_expr->[accept],transform_callee_type->[apply_method_signature_hook],infer_overload_return_type->[check_call],check_lst_expr->[check_call],visit_unicode_expr->[infer_literal_expr_type],visit_index_expr->[is_literal_context],accept->[visit_call_expr,accept],named_type->[named_type],has_member->[has_member],visit_yield_from_expr->[check_awaitable_expr,accept,check_method_call_by_name],infer_function_type_arguments_pass2->[infer_function_type_arguments,infer_arg_types_in_context],visit_index_with_type->[visit_index_with_type,check_method_call_by_name],object_type->[named_type],check_callable_call->[apply_function_plugin],plausible_overload_call_targets->[has_shape,check_argument_count],visit_dict_expr->[check_call,check_method_call_by_name,check_typeddict_call_with_dict],infer_literal_expr_type->[is_literal_context],apply_type_arguments_to_callable->[apply_generic_arguments],check_awaitable_expr->[named_type,check_method_call_by_name],check_method_call->[transform_callee_type,method_fullname,check_call],check_union_method_call_by_name->[check_method_call_by_name],analyze_type_type_callee->[analyze_type_type_callee],union_overload_result->[union_overload_result,infer_overload_return_type],check_union_call_expr->[method_fullname,check_call_expr_with_callee_type],check_op_reversible->[lookup_operator->[make_local_errors,is_literal_context],lookup_definer,make_local_errors,check_method_call,check_method_call_by_name,lookup_operator],visit_str_expr->[infer_literal_expr_type],visit_tuple_expr->[check_lst_expr],visit_yield_expr->[accept],is_valid_keyword_var_arg->[named_type],apply_generic_arguments->[apply_generic_arguments],check_list_multiply->[check_op],visit_member_expr->[extract_refexpr_names],visit_bytes_expr->[infer_literal_expr_type],check_generator_or_comprehension->[check_call],visit_unary_expr->[check_method_call_by_name],infer_function_type_arguments->[infer_function_type_arguments,infer_arg_types_in_context],find_typeddict_context->[find_typeddict_context],visit_dictionary_comprehension->[check_call],visit_super_expr->[is_literal_context],check_call->[transform_callee_type,check_call],check_union_call->[check_call],bool_type->[named_type],visit_name_expr->[extract_refexpr_names],apply_method_signature_hook->[apply_method_signature_hook]],has_any_type->[accept],ArgInferSecondPassQuery->[visit_callable_type->[accept]],is_literal_type_like->[is_literal_type_like],has_bytes_component->[has_bytes_component],has_erased_component->[accept],has_uninhabited_component->[accept],any_causes_overload_ambiguity->[has_any_type],custom_equality_method->[custom_equality_method],arg_approximate_similarity->[is_typetype_like,arg_approximate_similarity]]
Analyze a variable reference.
Do things end up breaking if we return `var_type` and make the return value be ProperType?
@@ -14,10 +14,12 @@ module Redcarpet @options[:link_attributes]&.each do |attribute, value| link_attributes += %( #{attribute}="#{value}") end - if (/\A(https?:\/\/)/.match? link) || link.nil? + if (/https?:\/\/[\S]+/.match? link) || link.nil? %(<a href="#{link}"#{link_attributes}>#{content}</a>) - else + elsif /\.{1}/.match? link %(<a href="//#{link}"#{link_attributes}>#{content}</a>) + else + %(<a href="https://dev.to#{link}"#{link_attributes}>#{content}</a>) end end
[HTMLRouge->[link->[each,nil?,match?],slugify->[gsub,sanitize],header->[slugify],include],require]
Link to a node with a .
Instead of hardcoding dev.to into the url maybe we could use the `APP_DOMAIN` environment variable so links are sane in whatever environment we are?
@@ -92,7 +92,7 @@ func (cs *ContainerService) setAddonsConfig(isUpdate bool) { defaultBlobfuseFlexVolumeAddonsConfig := KubernetesAddon{ Name: BlobfuseFlexVolumeAddonName, - Enabled: to.BoolPtr(common.IsKubernetesVersionGe(o.OrchestratorVersion, "1.8.0") && DefaultBlobfuseFlexVolumeAddonEnabled && !cs.Properties.HasCoreOS()), + Enabled: to.BoolPtr(DefaultBlobfuseFlexVolumeAddonEnabled && common.IsKubernetesVersionGe(o.OrchestratorVersion, "1.8.0") && !cs.Properties.HasCoreOS() && !cs.Properties.IsAzureStackCloud()), Containers: []KubernetesContainerSpec{ { Name: BlobfuseFlexVolumeAddonName,
[setAddonsConfig->[Itoa,HasNSeriesSKU,BoolPtr,GetAzureCNICidr,IsKubernetesVersionGe,HasCoreOS,GetCloudSpecConfig,GetNonMasqueradeCIDR],BoolPtr,IsKubernetesVersionGe,GetAddonContainersIndexByName,Bool,IsAzureCNI]
setAddonsConfig sets the addons config Spec for the cluster - spec AddonConfig - A plugin to configure the necessary configuration for a new add - on. This function returns a list of KubernetesContainerSpec objects that can be used to create a new This is the default configuration for the cluster.
Carried this additional default gate over from the "last mile" "concatenate addons string" business logic area.
@@ -50,7 +50,7 @@ public class OverlordProxyServlet extends ProxyServlet try { final Server indexer = selector.pick(); if (indexer == null) { - throw new ISE("Cannot find instance of indexingService"); + throw new ISE("Can't find indexingService, did you configure druid.selectors.indexing.serviceName same as druid.service at overlord?"); } return new URI( request.getScheme(),
[OverlordProxyServlet->[rewriteURI->[getScheme,propagate,getRequestURI,pick,URI,getQueryString,ISE,getHost]]]
Rewrite the URI of a .
minor nit, not a big fan of contractions, I prefer to be explicit
@@ -236,6 +236,8 @@ public class ReadWriteIT { pipeline.getOptions().as(TestPipelineOptions.class).setBlockOnRun(false); TopicPath topic = createTopic(getProject(pipeline.getOptions())); + // Sleep for topic creation to propagate. + Thread.sleep(10000); SubscriptionPath subscription = createSubscription(topic); // Publish some messages
[ReadWriteIT->[getProject->[getProject],createTopic->[newAdminClient],readMessages->[apply],createSubscription->[newAdminClient],writeMessages->[CustomCreate,apply],testReadWrite->[getTestQuickstartReceived,getProject,createTopic,readMessages,createSubscription,writeMessages,apply,collectTestQuickstart],collectTestQuickstart->[apply->[addMessageReceived]]]]
This test method performs a read - write test on the specified pipeline.
Generally, its better to not add sleep to code like this, but rather some logic that checks if a condition is met. What if the sleep is not long enough in all situations? This seems like it would introduce flakey/incorrect behavior
@@ -94,7 +94,7 @@ class AdyenGatewayPlugin(BasePlugin): ), "label": "Client Key", }, - "Origin Url": { + "origin-url": { "type": ConfigurationTypeField.STRING, "help_text": ( "The origin URL of the page where you are rendering the Drop-in. This "
[AdyenGatewayPlugin->[get_supported_currencies->[get_supported_currencies,_get_gateway_config],_process_additional_action->[_get_gateway_config],confirm_payment->[_process_additional_action,_get_gateway_config],get_payment_gateway_for_checkout->[_get_gateway_config]]]
Configuration for the API key. Configuration for a specific user - defined .
What's a Drop-in? Should this link to Adyen docs?
@@ -77,7 +77,7 @@ describe "Api::V1::Casts" do "description_en" => cast.character.description_en, "description_source" => cast.character.description_source, "description_source_en" => cast.character.description_source_en, - "favorite_characters_count" => cast.character.favorite_characters_count, + "favorite_characters_count" => cast.character.favorite_users_count, }, "person" => { "id" => cast.person.id,
[create,favorite_characters_count,twitter_username,let,describe,api,nickname,favorite_people_count,twitter_username_en,blood_type,title_kana,released_at_about,it,twitter_image_url,name,media,birthday,work_records_with_body_count,to,blood_type_en,weight_en,facebook_og_image_url,sort_number,before,description_source_en,description_source,watchers_count,let!,gender_text,recommended_image_url,twitter_hashtag,nationality_en,name_kana,age,staffs_count,occupation_en,official_site_url,to_s,height,age_en,include,name_en,url_en,strftime,casts_count,media_text,nationality,twitter_avatar_url,episodes_count,url,wikipedia_url_en,title,nickname_en,id,context,token,wikipedia_url,get,no_episodes?,birthday_en,weight,eq,occupation,description_en,description,height_en]
This method returns a list of all the properties of a character. missing nanoseconds for missing values.
Style/TrailingCommaInHashLiteral: Avoid comma after the last item of a hash.
@@ -1,9 +1,5 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated.
[No CFG could be retrieved]
Creates a new object.
This is not generated using the official command line, otherwise this should not be in the diff. Please share how you generated this client,
@@ -350,8 +350,9 @@ class ClassLoader if (isset($this->prefixLengthsPsr4[$first])) { foreach ($this->prefixLengthsPsr4[$first] as $prefix => $length) { if (0 === strpos($class, $prefix)) { + $logicalPath=substr($logicalPathPsr4, $length); foreach ($this->prefixDirsPsr4[$prefix] as $dir) { - if (file_exists($file = $dir . DIRECTORY_SEPARATOR . substr($logicalPathPsr4, $length))) { + if (is_file($file = $dir . DIRECTORY_SEPARATOR . $logicalPath)) { return $file; } }
[ClassLoader->[findFile->[findFileWithExtension],loadClass->[findFile]]]
Finds a file with the given extension. Returns a new instance of the class that will be used to create the class.
why do you change `file_exists` with `is_file`?
@@ -408,6 +408,9 @@ namespace MonoGame.Tools.Pipeline try #endif { + if (!asm.ToString().Contains("MonoGame")) + continue; + var types = asm.GetTypes(); ProcessTypes(types); }
[ImporterTypeDescription->[Equals->[Equals],GetHashCode->[GetHashCode]],ProcessorTypeDescription->[ProcessorPropertyCollection->[IEnumerator->[GetEnumerator],GetEnumerator->[GetEnumerator],Equals]],PipelineTypes->[ImporterTypeDescription->[Equals->[],GetHashCode->[],Equals,RemapOldNames],Load->[Contains],ProcessorTypeDescription->[ProcessorPropertyCollection->[IEnumerator->[],GetEnumerator->[]],Equals,RemapOldNames]]]
Resolves all assemblies that contain the given paths.
This would fail on any custom pipeline extensions that don't include "MonoGame" in the file name. We need a better way to filter this.
@@ -15,9 +15,9 @@ module Api fields = fields_to_render(@@default_fields) respond_to do |format| - format.xml{render :xml => users.to_xml(:only => fields, :root => 'users', - :skip_types => 'true')} - format.json{render :json => users.to_json(:only => fields)} + format.xml{render xml: users.to_xml(only: fields, root: 'users', + skip_types: 'true')} + format.json{render json: users.to_json(only: fields)} end end
[UsersController->[create->[nil?,render,new,downcase,process_attributes,type,has_missing_params?,find_by_user_name,save,delete],show->[nil?,xml,render,respond_to,to_json,fields_to_render,json,to_xml,find_by_id],process_attributes->[each,find_by_name,id,blank?],update->[nil?,render,attributes,process_attributes,blank?,find_by_user_name,save,find_by_id],destroy->[render],index->[xml,render,respond_to,to_json,fields_to_render,json,to_xml,get_collection]]]
This view shows the n - node user record that is not found in the system.
`end` at 19, 29 is not aligned with `format.xml{render xml: users.to_xml(only: fields, root: 'users',` at 18, 8<br>Align the elements of a hash literal if they span more than one line.
@@ -62,13 +62,14 @@ public class SearchActionTest { @Before public void setUp() { - userSession.logIn().setGlobalPermissions(GlobalPermissions.SYSTEM_ADMIN); db.users().insertUser(newUserDto().setLogin(GRACE_HOPPER)); db.users().insertUser(newUserDto().setLogin(ADA_LOVELACE)); } @Test public void search_json_example() { + userSession.logIn().setRoot(); + dbClient.userTokenDao().insert(dbSession, newUserToken() .setCreatedAt(1448523067221L) .setName("Project scan on Travis")
[SearchActionTest->[fail_when_insufficient_privileges->[setGlobalPermissions,newRequest,expect],a_user_can_search_its_own_token->[setLogin,insert,isEqualTo,newRequest,setGlobalPermissions,commit],fail_when_login_does_not_exist->[expectMessage,newRequest,expect],newRequest->[parseFrom,propagate,IllegalStateException,getInputStream,setParam,setMediaType,execute],setUp->[setGlobalPermissions,setLogin,insertUser],search_json_example->[getResource,getInput,isSimilarTo,setLogin,insert,commit],SearchAction,create,getDbClient,standalone,WsActionTester,none,getSession]]
Setup the test.
same in `SearchActionTest` seems there is no test for anonymous user
@@ -344,6 +344,16 @@ module.exports = class extends BaseGenerator { } } } + + if (!this.options['skip-git']) { + this.gitExec('init', () => { + this.log('git init successful'); + this.gitExec(['add -A'], () => { + this.log('git add successful'); + this.gitExec(['commit -am "Initial application generated by JHipster"'], () => this.log('git commit successful')); + }); + }); + } }, afterRunHook() {
[No CFG could be retrieved]
End of the generator.
Shouldn't `checkGit` be called to see if the user has git installed ?
@@ -17,7 +17,7 @@ describe '้€š็Ÿฅใƒšใƒผใ‚ธ' do end end - context '่‡ชๅˆ†ใฎใƒใ‚งใƒƒใ‚ฏใ‚คใƒณใŒใ€Œใ„ใ„ใญ๏ผใ€ใ•ใ‚ŒใŸใจใ' do + context '่‡ชๅˆ†ใฎ่จ˜้ŒฒใŒใ€Œใ„ใ„ใญ๏ผใ€ใ•ใ‚ŒใŸใจใ' do let(:user1) { create(:registered_user) } let(:user2) { create(:registered_user) } let(:checkin) { create(:checkin, user: user1) }
[visit,create,let,to_not,describe,like_r,it,name,to,have_content,before,require,work_episode_checkin_path,have_link,unlike_r,work,context,episode,login_as,follow]
Requires the spec_helper module. http://www. webcontainer. org.
Prefer double-quoted strings unless you need single quotes to avoid extra backslashes for escaping.
@@ -20,9 +20,10 @@ namespace DSCoreNodesUI if (Equals(_value, null) || !_value.Equals(value)) { _value = value; - RequiresRecalc = !Equals(value, null); - RaisePropertyChanged("Value"); + RequiresRecalc = !Equals(value, null); } + //If the value exceeds the limit (overflow) UI has to be updated with MAX / MIN value + RaisePropertyChanged("Value"); } }
[BasicInteractive->[DeserializeCore->[DeserializeCore],SerializeCore->[SerializeValue,SerializeCore],SaveNode->[SerializeValue]]]
Abstract base class for handling null - reference exceptions. Deserialize an object from an XML element.
@ramramps, This won't just affect sliders. It will affect a lot of nodes. Is there a way to do this without modifying `BasicInteractive`?
@@ -12,7 +12,7 @@ class ServiceProviderController < ApplicationController private def authorize - if authorization_token == AppConfig.env.dashboard_api_token + if authorization_token == IdentityConfig.store.dashboard_api_token yield else head :unauthorized
[ServiceProviderController->[authorize->[head,dashboard_api_token],authorization_token->[headers],update->[authorize,render,run,use_dashboard_service_providers?],protect_from_forgery]]
If the current user is an authorization token and the block is given yield the otherwise head.
While we're here, should we secure compare this?
@@ -272,7 +272,8 @@ class CI_Session_redis_driver extends CI_Session_driver implements SessionHandle if (isset($this->_redis)) { try { - if ($this->_redis->ping() === '+PONG') + $ping = $this->_redis->ping(); + if ($ping === $this->_ping_response) { $this->_release_lock(); if ($this->_redis->close() === FALSE)
[CI_Session_redis_driver->[write->[setTimeout,_fail,_get_lock,_release_lock,set],close->[getMessage,_fail,ping,_release_lock,close],_get_lock->[setTimeout,set,setex,ttl],_release_lock->[delete],read->[_get_lock,get,_fail],destroy->[_cookie_destroy,_fail,delete],open->[_fail,php5_validate_id,connect,auth,select],validateSessionId->[exists]]]
Closes the session.
This is no longer necessary.
@@ -38,6 +38,7 @@ class OrderExtensionTest extends TestCase { $aggregationBuilderProphecy = $this->prophesize(Builder::class); + $aggregationBuilderProphecy->getStage(0)->shouldBeCalled()->willThrow(new OutOfRangeException('message')); $aggregationBuilderProphecy->sort(['name' => 'asc'])->shouldBeCalled(); $classMetadataProphecy = $this->prophesize(ClassMetadata::class);
[OrderExtensionTest->[testApplyToCollectionWithOrderOverriddenWithAssociation->[willReturn,prophesize,reveal,applyToCollection,shouldBeCalled],testApplyToCollectionWithWrongOrder->[willReturn,shouldNotBeCalled,prophesize,reveal,applyToCollection],testApplyToCollectionWithOrderOverriddenWithNoDirection->[willReturn,prophesize,reveal,applyToCollection,shouldBeCalled],testApplyToCollectionWithOrderOverridden->[willReturn,prophesize,reveal,applyToCollection,shouldBeCalled],testApplyToCollectionWithValidOrder->[willReturn,prophesize,reveal,applyToCollection,shouldBeCalled]]]
This method is used to test if the collection has a valid order.
Please remove the `shouldBeCalled` calls.
@@ -85,10 +85,10 @@ public class HotRodMergeTest extends BasePartitionHandlingTest { TestingUtil.waitForNoRebalance(cache(p0.node(0))); expectPartialTopology(client, initialTopology + 1); partition(0).merge(partition(1)); - eventuallyExpectCompleteTopology(client, initialTopology + 8); + eventuallyExpectCompleteTopology(client, initialTopology + 7); } - public void testNewTopologySentAfterOverlappingMerge(Method m) { + public void testNewTopologySentAfterOverlappingMerge() { TestingUtil.waitForNoRebalance(caches()); int initialTopology = advancedCache(0).getRpcManager().getTopologyId(); expectCompleteTopology(client, initialTopology);
[HotRodMergeTest->[createBeforeClass->[createBeforeClass],destroy->[destroy]]]
Test new topology sent after clean merge.
Why was the previous assumption `+ 8` and now it's `+ 7`?
@@ -0,0 +1,14 @@ +package tenantfederation + +import ( + "flag" +) + +type Config struct { + // Enabled switches on support for multi tenant query federation + Enabled bool `yaml:"enabled"` +} + +func (cfg *Config) RegisterFlags(f *flag.FlagSet) { + f.BoolVar(&cfg.Enabled, "tenant-federation.enabled", false, "If enabled on all components, queries can be federated across multiple tenants. The tenant IDs involved need to be specified separated by a `|` character in the `X-Scope-OrgID` header (experimental).") +}
[No CFG could be retrieved]
No Summary Found.
**suggestion(optional):** I think this help text can be reworked to explain how to use multi-tenant queries using `|` character in the X-Scope-OrgID. **nit(optional,if-minor):** It might make sense to reword the phrase `multi tenant query federation can be used`. Changing it to something that describes query data from multiple tenants without using phrasing similar to the config field. Ideally, the help text should describe what a feature means to someone who can't parse the meaning from the config field name.
@@ -106,6 +106,7 @@ namespace ProtoCore public Guid GraphNodeGuid; public int AstID; public string FileName; + public SymbolNode symbol; } }
[WebOutputStream->[Close->[Close]],BuildStatus->[LogSemanticError->[Write],ReportBuildResult->[Write],LogSyntaxError->[Write],LogWarning->[Write]]]
Exception message class. This constructor is used to create a message object from a source location related message.
I'm afraid it is inappropriate to add `symbol` to `warningEntry`. All fields are about when/where/what. They should be common and meaningful for every `WarningEntry`. So IMHO, firstly, it is hard to figure out the meaning of this field based on name `symbol` -- updated symbols or symbols referenced in this warning (it raise the other questions -- why not a list of symbols?). Secondly, most warning entries don't have this field, we can see in the following code, we need to check if `symbol != null`. So my idea is, if we really need this symbol filed, why not inherit from `WarningEntry`, for example, `SymbolResolutionWarning` so that it is category for warnings like symbol redefined, symbol not found and so on.
@@ -342,9 +342,9 @@ daos_test_cb_add(test_arg_t *arg, struct test_op_record *op, char **rbuf, daos_size_t *rbuf_size) { print_message("add rank %u\n", op->ae_arg.ua_rank); - test_rebuild_wait(&arg, 1); daos_reint_server(arg->pool.pool_uuid, arg->group, arg->dmg_config, op->ae_arg.ua_rank); + test_rebuild_wait(&arg, 1); return 0; }
[No CFG could be retrieved]
This function is called by the UF when it is called from the UF. -DER_NOMEM - DEPTH - START.
Why not move the rebuild_wait() after reint_server()? Do we need to wait rebuild for reintegration case?
@@ -68,9 +68,7 @@ class Address(models.Model): def __eq__(self, other): return self.as_data() == other.as_data() - def __hash__(self): - # FIXME: in Django 2.2 this is not present if __eq__ is defined - return super().__hash__() + __hash__ = models.Model.__hash__ def as_data(self): """Return the address as a dict suitable for passing as kwargs.
[UserManager->[create_superuser->[create_user]],User->[UserManager],Address->[get_copy->[as_data],PossiblePhoneNumberField]]
A method to check if two objects are equal.
Are there cases where `__eq__` would return `False` while `__hash__` remains `True`? Because this will break Python.
@@ -81,8 +81,16 @@ public class CrumbExclusionTest { try { fail(path + " should have been rejected: " + r.createWebClient().login("admin").getPage(new WebRequest(new URL(r.getURL(), path + "?script=11*11"), HttpMethod.POST)).getWebResponse().getContentAsString()); } catch (FailingHttpStatusCodeException x) { - assertEquals("status code using " + path, 403, x.getStatusCode()); - assertThat("error message using " + path, x.getResponse().getContentAsString(), containsString("No valid crumb was included in the request")); + switch (x.getStatusCode()) { + case 403: + assertThat("error message using " + path, x.getResponse().getContentAsString(), containsString("No valid crumb was included in the request")); + break; + case 400: // from Jetty + assertThat("error message using " + path, x.getResponse().getContentAsString(), containsString("Ambiguous path parameter in URI")); + break; + default: + fail("unexpected error code"); + } } } }
[CrumbExclusionTest->[regular->[assertTrue,getPage,getURL,lookupSingleton,WebRequest,URL],pathInfo->[to,createDummySecurityRealm,containsString,setSecurityRealm,setAuthorizationStrategy,getStatusCode,getContentAsString,assertThat,assertEquals,fail],CrumbExclusionImpl->[process->[getPathInfo,startsWith,doFilter]],JenkinsRule]]
This method checks whether the request is a regular action.
Or could just delete the `/..;/cli` branch, which is no longer really testing anything interesting here.
@@ -117,10 +117,13 @@ class WPSEO_Help_Center { ); $formatted_data['videoDescriptions'][] = array( - 'title' => __( 'Want to be a Yoast SEO Expert?', 'wordpress-seo' ), - 'description' => __( 'Follow our Yoast SEO for WordPress training and become a certified Yoast SEO Expert!', 'wordpress-seo' ), + /* translators: %s expands to Yoast SEO */ + 'title' => sprintf( __( 'Want to be a %s Expert?', 'wordpress-seo' ), 'Yoast SEO' ), + /* translators: %1$s expands to Yoast SEO */ + 'description' => sprintf( __( 'Follow our %1$s for WordPress training and become a certified %1$s Expert!', 'wordpress-seo' ), 'Yoast SEO' ), 'link' => WPSEO_Shortlinker::get( 'https://yoa.st/wordpress-training-vt' ), - 'linkText' => __( 'Enroll in the Yoast SEO for WordPress training ยป', 'wordpress-seo' ), + /* translators: %s expands to Yoast SEO */ + 'linkText' => sprintf( __( 'Enroll in the %s for WordPress training', 'wordpress-seo' ), 'Yoast SEO' ), ); }
[WPSEO_Help_Center->[localize_data->[add_contact_support_item,get_tabs,format_data,enqueue_localized_data],format_helpcenter_tab->[get_content,get_label,get_identifier],add_contact_support_item->[get_premium_message],format_data->[get_video_url,get_active_tab,get_label,get_name,get_extra_tabs],enqueue_localized_data->[localize_script],__construct->[get_name,add_tab]]]
Formats the data for the plugin. Add support - team links to the data array Get the necunicallcite object.
Please make `WordPress` not translatable as well (also below).
@@ -57,9 +57,15 @@ class AttributeAssignmentMixin: be unable to build or might only be partially built. """ - REFERENCE_VALUE_NAME_MAPPING = {AttributeEntityType.PAGE: "title"} - - ENTITY_TYPE_TO_MODEL_MAPPING = {AttributeEntityType.PAGE: page_models.Page} + REFERENCE_VALUE_NAME_MAPPING = { + AttributeEntityType.PAGE: "title", + AttributeEntityType.PRODUCT: "name", + } + + ENTITY_TYPE_TO_MODEL_MAPPING = { + AttributeEntityType.PAGE: page_models.Page, + AttributeEntityType.PRODUCT: product_models.Product, + } @classmethod def _resolve_attribute_nodes(
[AttributeAssignmentMixin->[save->[_pre_save_file_value,_pre_save_values,_pre_save_reference_values],clean_input->[AttrValuesInput,_validate_attributes_input,_resolve_attribute_global_id,_resolve_attribute_nodes]]]
Retrieve attributes nodes from given global IDs and or slugs.
@maarcingebala Maybe we should store this mapping in `saleor/attribute/__init__.py`?
@@ -1117,14 +1117,15 @@ class RenewableCert(object): os.path.join(self.archive_dir, "{0}{1}.pem".format(kind, target_version))) for kind in ALL_FOUR]) + old_privkey = os.path.join( + self.archive_dir, "privkey{0}.pem".format(prior_version)) + # Distinguish the cases where the privkey has changed and where it # has not changed (in the latter case, making an appropriate symlink # to an earlier privkey version) if new_privkey is None: # The behavior below keeps the prior key by creating a new # symlink to the old key or the target of the old key symlink. - old_privkey = os.path.join( - self.archive_dir, "privkey{0}.pem".format(prior_version)) if os.path.islink(old_privkey): old_privkey = os.readlink(old_privkey) else:
[RenewableCert->[next_free_version->[newest_available_version],save_successor->[next_free_version,update_configuration,config_with_defaults],new_lineage->[_write_live_readme_to,full_archive_path,lineagename_for_filename,_full_live_path,relevant_values,write_renewal_config,_relpath_from_file],_fix_symlinks->[_previous_symlinks],should_autodeploy->[autodeployment_is_enabled,add_time_interval,has_pending_deployment],available_versions->[current_target],names->[version,current_target],update_all_links_to->[_previous_symlinks,current_target,_update_link_to],version->[current_target],relative_archive_dir->[_relpath_from_file],newest_available_version->[available_versions],archive_dir->[full_archive_path],_update_symlinks->[get_link_target,relative_archive_dir],latest_common_version->[available_versions],_check_symlinks->[get_link_target],__init__->[config_with_defaults,lineagename_for_filename],current_target->[get_link_target],should_autorenew->[ocsp_revoked,latest_common_version,version,autorenewal_is_enabled,add_time_interval],current_version->[current_target],ensure_deployed->[latest_common_version],_consistent->[get_link_target],has_pending_deployment->[latest_common_version,current_version]],delete_files->[full_archive_path,renewal_file_for_certname,_full_live_path],relevant_values->[_relevant],update_configuration->[write_renewal_config],cert_path_for_cert_name->[renewal_file_for_certname]]
Save a new version of a private key as a successor of a new version of a prior Writes a new version of to the target file.
This probably isn't a big deal, but after writing out the file and before calling `chown`, the current effective group has permissions on the file rather than the group set by the user on the previous key. I think it might be worth avoiding this by: 1. Writing out the file with permissions in `BASE_PRIVKEY_MODE`. 2. Calling `chown`. 3. Calling `chmod`.
@@ -9,6 +9,6 @@ require 'bundler/setup' # Set up gems listed in the Gemfile. # it exists. Put just a line like this in there: # ENV['RAILS_ENV'] = 'production' rails_env_file = File.expand_path(File.join(File.dirname(__FILE__), 'rails_env.rb')) -if File.exists?(rails_env_file) +if File.exist?(rails_env_file) require rails_env_file end
[exists?,dirname,join,require,expand_path]
Check if a exists in the Rails environment.
Favor modifier if usage when having a single-line body. Another good alternative is the usage of control flow &&/||.
@@ -941,6 +941,9 @@ class IntelPackage(PackageBase): ['mkl_cblas', 'mkl_lapacke'], root=self.component_include_dir('mkl'), recursive=False) + if '+tbb' in self.spec or self.provides('tbb'): + result += self.tbb_headers() + debug_print(result) return result
[IntelPackage->[install->[install],blas_libs->[component_lib_dir,raise_lib_error,debug_print],setup_environment->[_expand_fields],setup_dependent_package->[debug_print],mpi_compiler_wrappers->[component_bin_dir],filter_compiler_wrappers->[component_bin_dir],_setup_dependent_env_callback->[component_lib_dir,debug_print,mpi_setup_dependent_environment,normalize_path],libs->[component_lib_dir,debug_print],headers->[debug_print,component_include_dir],uninstall_ism->[debug_print,normalize_path],normalize_path->[debug_print,normalize_suite_dir,_expand_fields],pset_components->[debug_print],component_bin_dir->[debug_print,_expand_fields,normalize_path],normalize_suite_dir->[debug_print],_tbb_abi->[debug_print],scalapack_libs->[component_lib_dir,raise_lib_error,debug_print],tbb_libs->[component_lib_dir,debug_print],_determine_license_type->[debug_print],component_lib_dir->[debug_print,_expand_fields,normalize_path],mpi_setup_dependent_environment->[debug_print,normalize_path],_gcc_executable->[debug_print],configure_rpath->[component_lib_dir,component_bin_dir],openmp_libs->[component_lib_dir,raise_lib_error,debug_print],file_to_source->[normalize_path,_expand_fields],component_include_dir->[debug_print,_expand_fields,normalize_path]]]
Returns a list of HeaderList objects for all component headers.
This should be: `result += self.tbb_headers()`
@@ -4,4 +4,9 @@ if ( ! class_exists( 'CLI_Command' ) ) { require_once __DIR__ . '/src/CLI_Command.php'; } +if ( ! class_exists( 'Alias_Command' ) ) { + require_once __DIR__ . '/src/Alias_Command.php'; +} + WP_CLI::add_command( 'cli', 'CLI_Command' ); +WP_CLI::add_command( 'alias', 'Alias_Command' );
[No CFG could be retrieved]
Adds a CLI_Command to the WordPress CLI.
With `WP_CLI::add_command( 'cli alias', 'Alias_Command' );` this should enable usage as `wp cli alias <get|list|add|..>`
@@ -185,14 +185,14 @@ class SamlResponseDoc end def transforms_nodeset - @_transforms ||= response_doc.xpath( + @transforms_nodeset ||= response_doc.xpath( '//ds:Reference/ds:Transforms', ds: Saml::XML::Namespaces::SIGNATURE, ) end def transform(algorithm) - @_transform ||= transforms_nodeset[0].xpath( + transforms_nodeset[0].xpath( "//ds:Transform[@Algorithm='#{algorithm}']", ds: Saml::XML::Namespaces::SIGNATURE, )[0]
[SamlResponseDoc->[assertion_statement_node->[xpath],asserted_session_index->[content],status->[xpath],signature_nodeset->[xpath],metadata_name_id_format->[content],status_code->[xpath],conditions_nodeset->[xpath],logout_assertion->[first],logout_asserted_session_index->[content],attribute_value_for->[to_s],transform->[xpath],organization_display_name->[content],transforms_nodeset->[xpath],response_session_index_assertion->[to_s],signed_info_nodeset->[xpath],xml_response->[value,decode64],phone_number->[at],signature_method_nodeset->[xpath],html_response->[at_css,decode64],attribute_node_for->[at],request_assertion->[xpath],raw_xml_response->[body,match?],saml_response->[new],response_assertion_nodeset->[xpath],issuer_nodeset->[xpath],signature_canon_method_nodeset->[xpath],organization_nodeset->[xpath],organization_name->[content],digest_method_nodeset->[xpath],saml_document->[parse],attribute_authority_organization_nodeset->[xpath],uuid->[to_s],attribute_authority_organization_display_name->[content],metadata_nodeset->[xpath],response_doc->[to_s,match?],attribute_authority_organization_name->[content],subject_nodeset->[xpath],logout_status_assertion->[content],attr_reader,include],require_relative]
Get the node set of a .
Since there's an `algorithm` argument that changes the result, I decided it was safer not to memoize this one at all
@@ -867,6 +867,17 @@ class NewSemanticAnalyzer(NodeVisitor[None], self.prepare_class_def(defn, info) return + is_named_tuple, info = self.named_tuple_analyzer.analyze_namedtuple_classdef(defn) + if is_named_tuple: + if info is None: + self.mark_incomplete(defn.name) + else: + self.prepare_class_def(defn, info) + with self.scope.class_scope(defn.info): + with self.named_tuple_analyzer.save_namedtuple_body(info): + self.analyze_class_body_common(defn) + return + # Create TypeInfo for class now that base classes and the MRO can be calculated. self.prepare_class_def(defn)
[names_modified_in_lvalue->[names_modified_in_lvalue],NewSemanticAnalyzer->[analyze_comp_for->[analyze_lvalue],name_not_defined->[is_incomplete_namespace,add_fixture_note,record_incomplete_ref,lookup_fully_qualified_or_none],check_classvar->[is_self_member_ref],visit_lambda_expr->[analyze_function],visit_for_stmt->[fail_invalid_classvar,anal_type,is_classvar,store_declared_types,visit_block,visit_block_maybe,analyze_lvalue],add_module_symbol->[add_symbol],add_symbol->[is_func_scope],visit_import_all->[dereference_module_cross_ref,add_submodules_to_parent_modules,process_import_over_existing_name,correct_relative_import],analyze_namedtuple_classdef->[analyze_namedtuple_classdef,analyze_class_body_common],basic_new_typeinfo->[object_type],visit_index_expr->[anal_type],accept->[accept],record_incomplete_ref->[defer],mark_incomplete->[defer],analyze_simple_literal_type->[named_type_or_none],anal_type->[type_analyzer],add_builtin_aliases->[found_incomplete_ref,track_incomplete_refs,mark_incomplete,named_type_or_none],analyze_type_expr->[accept,tvar_scope_frame],visit_overloaded_func_def->[add_func_to_symbol_table],is_class_scope->[is_func_scope],visit_with_stmt->[fail_invalid_classvar,anal_type,is_classvar,store_declared_types,visit_block,analyze_lvalue],fail_blocker->[fail],analyze_types->[anal_type],process_typevar_parameters->[expr_to_analyzed_type,object_type],process_type_annotation->[anal_type],visit_type_application->[anal_type],add_unknown_symbol->[qualified_name,add_symbol],process_module_assignment->[process_module_assignment],analyze_try_stmt->[analyze_lvalue],add_symbol_table_node->[is_func_scope],store_declared_types->[store_declared_types],visit_block_maybe->[visit_block],visit_member_expr->[dereference_module_cross_ref],add_imported_symbol->[add_symbol_table_node],visit_while_stmt->[visit_block_maybe],current_symbol_kind->[is_func_scope,is_class_scope],apply_class_plugin_hooks->[get_fullname->[get_fullname],get_fullname],analyze_tuple_or_list_lvalue->[analyze_lvalue],correct_relative_import->[correct_relative_import],visit_cast_expr->[anal_type],lookup_qualified->[dereference_module_cross_ref,lookup],check_and_set_up_type_alias->[analyze_alias,add_type_alias_deps],is_valid_del_target->[is_valid_del_target],visit_if_stmt->[visit_block_maybe,visit_block],visit_import_from->[add_submodules_to_parent_modules],visit__promote_expr->[anal_type],flatten_lvalues->[flatten_lvalues],is_module_scope->[is_func_scope,is_class_scope],add_local->[is_func_scope,add_symbol],analyze_lvalue->[analyze_lvalue],lookup_fully_qualified_or_none->[is_incomplete_namespace,record_incomplete_ref],check_classvar_in_signature->[check_classvar_in_signature]],replace_implicit_first_type->[replace_implicit_first_type],make_any_non_explicit->[accept]]
Analyze a class and its sub - classes. Analyzes a class definition and its decorators and checks for missing class attributes.
I'd suggest moving the new code to a separate method, as the method is already quite long.
@@ -32,6 +32,7 @@ public abstract class SourceStep<K> implements ExecutionStep<K> { final Formats formats; final Optional<TimestampColumn> timestampColumn; final LogicalSchema sourceSchema; + final int versionNumber; @VisibleForTesting public SourceStep(
[SourceStep->[getSources->[emptyList],requireNonNull]]
This class is used to create a new instance of a SourceStep based on the given parameters Returns the logical schema of the source column.
Can we rename this to `pseudocolumnVersion`? The current name is misleading since it makes it sound like we're versioning the step itself, but really we're just versioning the pseudocolumns.
@@ -34,6 +34,14 @@ namespace Microsoft.Xna.Framework private DisplayOrientation _supportedOrientations; private bool _synchronizedWithVerticalRetrace = true; private bool _drawBegun; + private bool _hardwareModeSwitch = true; + +#if WINDOWS && DIRECTX + + // FIXME : FULLSCREEN + + private bool _firstLaunch = true; +#endif bool disposed; #if !WINRT
[GraphicsDeviceManager->[Dispose->[Dispose],Initialize->[ApplyChanges]]]
Creates a new object of type GraphicsDeviceManager that can be used to manage GraphicsDevice objects Private methods Get the preferred back buffer format and height.
Another FIXME that needs an explanation.
@@ -330,6 +330,13 @@ namespace System.Xml.Schema CheckDupFlag(facet, RestrictionFlags.FractionDigits, SR.Sch_DupFractionDigitsFacet); _derivedRestriction.FractionDigits = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(_nonNegativeInt, facet, SR.Sch_FractionDigitsFacetInvalid, null, null)); + if ((_baseFixedFlags & RestrictionFlags.FractionDigits) != 0) + { + if (_datatype.Restriction.FractionDigits != _derivedRestriction.FractionDigits) + { + throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet); + } + } if ((_derivedRestriction.FractionDigits != 0) && (_datatype.TypeCode != XmlTypeCode.Decimal)) { throw new XmlSchemaException(SR.Sch_FractionDigitsFacetInvalid, SR.Sch_FractionDigitsNotOnDecimal, facet);
[Numeric2FacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],UnionFacetsChecker->[Exception->[MatchEnumeration]],DurationFacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],ListFacetsChecker->[Exception->[MatchEnumeration]],QNameFacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],BinaryFacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],DateTimeFacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],StringFacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration]],FacetsChecker->[RestrictionFacets->[CompileMinLengthFacet,CompileMinInclusiveFacet,CompilePatternFacet,CompileWhitespaceFacet,CompileMaxLengthFacet,FinishFacetCompile,CompileMinExclusiveFacet,CompileTotalDigitsFacet,CompileLengthFacet,CompileFractionDigitsFacet,CompileFacetCombinations,CompileMaxInclusiveFacet,CompileEnumerationFacet,CompileMaxExclusiveFacet]],Numeric10FacetsChecker->[MatchEnumeration->[MatchEnumeration],Exception->[MatchEnumeration,Power]]]
CompileFractionDigitsFacet is called to compile the fraction digits facet.
I think before this check better to check `if ((_derivedRestriction.FractionDigits != 0) && (_datatype.TypeCode != XmlTypeCode.Decimal))` and throw corresponding exception, please move down
@@ -258,6 +258,7 @@ class HyperoptTuner(Tuner): trial['state'] = hp.JOB_STATE_DONE trials.insert_trial_docs([trial]) trials.refresh() + self.update_parameters_queue() def miscs_update_idxs_vals(self, miscs, idxs, vals, assert_all_vals_used=True,
[json2vals->[json2vals],HyperoptTuner->[receive_trial_result->[json2vals],update_search_space->[_choose_tuner,json2space],generate_parameters->[_split_index,json2parameter],__init__->[OptimizeMode]],json2parameter->[json2parameter],json2space->[json2space]]
Receives a result of a . Unpack idxs - vals format into a list of dictionaries that can be used by miscs.
release memory of queue?
@@ -14,6 +14,11 @@ def bundler_2_available? ENV["SUITE_NAME"] == "bundler2" end +# Load project files prepended with the bundler version, which is currently only ever bundler1 +def bundler_project_dependency_files(project) + project_dependency_files(File.join("bundler1", project)) +end + RSpec.configure do |config| config.around do |example| if bundler_2_available? && example.metadata[:bundler_v1_only]
[common_dir->[gem_dir],require_common_spec->[require],skip,metadata,run,bundler_2_available?,around,require,configure]
Checks if the Bundler2 environment variable is available.
This method provides a forward compatible way to load any fixtures modified in this PR, I left any existing calls to `project_dependency_files` alone to avoid bloating the diff.
@@ -593,7 +593,15 @@ func (l LoginUI) PromptRevokePaperKeys(arg keybase1.PromptRevokePaperKeysArg) (b if arg.Index == 0 { l.parent.Printf("Generating a new paper key.\n") } - return l.parent.PromptYesNo(fmt.Sprintf("Also revoke existing %q ?", arg.Device.Name), PromptDefaultNo) + prompt := fmt.Sprintf("Also revoke existing paper key \"%s...\" ?", arg.Device.Name) + + // XXX not sure if we need to support our existing paper keys, but without this + // someone is surely going to complain: + if strings.HasPrefix(arg.Device.Name, "Paper Key") { + prompt = fmt.Sprintf("Also revoke existing %q ?", arg.Device.Name) + } + + return l.parent.PromptYesNo(prompt, PromptDefaultNo) } func (l LoginUI) DisplayPaperKeyPhrase(arg keybase1.DisplayPaperKeyPhraseArg) error {
[FinishWebProofCheck->[GetDomain,GetCheckText,GetCached,GetHostname,GetDiff,GetHumanURL,GetHint,ReportHook,ToDisplayString,GetError,GetProtocol],Warning->[Warning],Printf->[OutputWriter],DisplayRecheckWarning->[render],FinishSocialProofCheck->[GetService,GetCached,GetHumanURL,GetDiff,GetHint,GetRemoteUsername,ReportHook,ToDisplayString,GetError],Shutdown->[Shutdown],OutputPrechecks->[render],PromptForConfirmation->[PromptForConfirmation,Prompt],Tablify->[Tablify],DisplayKey->[ReportHook],Prompt->[Prompt],OutputInstructions->[render],ppprompt->[GetSecret],PromptYesNo->[PromptYesNo],Println->[OutputWriter],DisplayCryptocurrency->[ReportHook],ReportLastTrack->[ReportHook],DefaultTabWriter->[NewTabWriter],PreProofWarning->[render],Confirm->[baseConfirm,ReportRevoked]]
PromptRevokePaperKeys prompts the user to revoke a new paper key.
We do not need to support existing paper keys!
@@ -201,9 +201,9 @@ public class TrashPolicyOzone extends TrashPolicyDefault { if (now >= end) { Collection<FileStatus> trashRoots; trashRoots = fs.getTrashRoots(true); // list all trash dirs - LOG.debug("Trash root Size: " + trashRoots.size()); + LOG.debug("Trash root Size: {}" + trashRoots.size()); for (FileStatus trashRoot : trashRoots) { // dump each trash - LOG.debug("Trashroot:" + trashRoot.getPath().toString()); + LOG.debug("Trashroot: {}" + trashRoot.getPath()); if (!trashRoot.isDirectory()) { continue; }
[TrashPolicyOzone->[Emptier->[run->[TrashPolicyOzone]],moveToTrash->[moveToTrash],initialize]]
This method is run in a thread that periodically checks if there is a checkpoint in the trash Returns a new instance of the class that will be used to create the class.
same here. replace + with ,
@@ -25,7 +25,7 @@ import ( const ( defaultPort = 8220 - retryOnBadConnTimeout = 5 * time.Minute + retryOnBadConnTimeout = 6 * time.Minute ) type requestFunc func(string, string, url.Values, io.Reader) (*http.Request, error)
[No CFG could be retrieved]
NewConfigFromURL returns a new Config object based on a given URL. Parse a URL and return a Config object.
Why this was changed as part of this PR?
@@ -49,7 +49,9 @@ class Jetpack_Simple_Payments { public function init_hook_action() { add_filter( 'rest_api_allowed_post_types', array( $this, 'allow_rest_api_types' ) ); add_filter( 'jetpack_sync_post_meta_whitelist', array( $this, 'allow_sync_post_meta' ) ); - $this->register_scripts_and_styles(); + if ( ! is_admin() ) { + $this->register_scripts_and_styles(); + } $this->register_shortcode(); $this->setup_cpts();
[Jetpack_Simple_Payments->[parse_shortcode->[get_blog_id,is_enabled_jetpack_simple_payments],is_enabled_jetpack_simple_payments->[get_blog_id],init_hook_action->[register_scripts_and_styles,register_shortcode]]]
Initialize the hook action.
These scripts are not useful even if we're on wp-admin without Gutenberg? I.e. classic editor won't be missing them?
@@ -101,9 +101,7 @@ namespace System.Linq.Expressions.Tests }"); } - // IsNotLinqExpressionsBuiltWithIsInterpretingOnly is not directly required, - // but this functionality relies on private reflection and that would not work with AOT - [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotLinqExpressionsBuiltWithIsInterpretingOnly))] + [Fact] [ActiveIssue ("https://github.com/dotnet/runtime/issues/53599", platforms: TestPlatforms.MacCatalyst, runtimes: TestRuntimes.Mono)] public static void ConstructorThrows_StackTrace() {
[InterpreterTests->[AssertStackTrace->[AssertStackTrace]]]
Checks if the exception of a node is thrown by a method call that throws an exception. Thrower. ctor.
Original motivation sounds .NET Native related.
@@ -34,7 +34,7 @@ from nni.feature_engineering.gradient_selector import FeatureGradientSelector class Benchmark(): - def __init__(self, files, test_size = 0.2): + def __init__(self, files=None, test_size = 0.2): self.files = files self.test_size = test_size
[Benchmark->[run_test->[train_test_split,print,download,score,load_svmlight_file,fit],download->[write,decode,read,close,exists,urlretrieve,open],run_all_test->[run_test]],Benchmark,SelectFromModel,ExtraTreesClassifier,FeatureGradientSelector,print,LogisticRegression,run_all_test,make_pipeline]
Initialize the class.
inconsistent spaces on parameters, better to remove the spaces after `test_size`
@@ -41,9 +41,14 @@ class CloudOutputDeviceManager: def __init__(self) -> None: # Persistent dict containing the remote clusters for the authenticated user. self._remote_clusters = {} # type: Dict[str, CloudOutputDevice] + + # Dictionary containing all the cloud printers loaded in Cura + self._um_cloud_printers = {} # type: Dict[str, GlobalStack] + self._account = CuraApplication.getInstance().getCuraAPI().account # type: Account self._api = CloudApiClient(CuraApplication.getInstance(), on_error = lambda error: Logger.log("e", str(error))) self._account.loginStateChanged.connect(self._onLoginStateChanged) + self.removed_printers_message = None # type: Optional[Message] # Ensure we don't start twice. self._running = False
[CloudOutputDeviceManager->[_onLoginStateChanged->[start,stop],_connectToOutputDevice->[_setOutputDeviceMetadata]]]
Initialize the object.
Is this intentionally public?
@@ -0,0 +1,5 @@ +from ._home import * +from . import decoder, utils, datapipes + +# Load this last, since itself but especially _builtin/* depends on the above being available +from ._api import *
[No CFG could be retrieved]
No Summary Found.
I thought asterisk import is prohibited by `flake8`. Are there such many symbols to be imported here?
@@ -375,7 +375,13 @@ module Engine revenue: 5, desc: '$5 revenue each time ANY tile is laid or upgraded.', sym: 'P5', - # abilities: [{ type: 'blocks_hexes', owner_type: 'player', hexes: ['C7'] }], + abilities: [ + { + type: 'tile_income', + terrain: nil, + income: 5, + }, + ], color: nil, }, {
[Game->[upgrades_to?->[include?,name,color],all_potential_upgrades->[name,include?],operating_round->[new],init_round->[new],setup->[partition,type],operating_order->[sort!,sort_by!,find_index,select,share_price,price],include_meta,register_colors,freeze],require_relative]
Descriptions for all possible tokens of a specific type. Railroad - specific configuration for a specific node.
Don't include terrain in the configuration, its confusing. Not specifying will default to nil.
@@ -1573,7 +1573,7 @@ namespace DotNetNuke.Entities.Urls bool foundAlias = false; //Do a specified PortalAlias check first - PortalAliasInfo portalAliasInfo = portalAliasCollection.SingleOrDefault(a => a.HTTPAlias == portalAlias.ToLower()); + PortalAliasInfo portalAliasInfo = portalAliasCollection.SingleOrDefault(a => a.HTTPAlias == portalAlias.ToLowerInvariant()); if (portalAliasInfo != null) { if (portalAliasInfo.PortalID == portalId)
[TabIndexController->[AddTabToTabDict->[AddCustomRedirectsToDictionary,AddPermanentRedirectToDictionary],FetchTabDictionary->[BuildTabDictionary],GetTabPath->[FetchTabPathDictionary],BuildTabDictionary->[AddCustomRedirectsToDictionary,AddSiteRootRedirects,AddTabToTabDict,AddStandardPagesToDict,AddToTabDict,AddInternalAliases],CreateRewritePath->[CreateRewritePath]]]
This method returns the portal alias with the specified portal id and the specified portal alias. This method returns the portalAliasInfo if it is a portalAliasInfo and the httpAlias.
Please use `String#Equals(String, StringComparison)`
@@ -1474,7 +1474,9 @@ struct Cxx11Generator : GeneratorBase } } else { if (af.cls_ & CL_ARRAY) { - initializer = "{}"; + // With array no initialize, gives a warning with gcc 4.8.5, for example + // warning: missing initializer for member โ€˜std::array<std::basic_string<char>, 5ul>::_M_elemsโ€™ [-Wmissing-field-initializers] + initializer = ""; } be_global->add_include("<utility>", BE_GlobalData::STREAM_LANG_H); be_global->lang_header_ <<
[No CFG could be retrieved]
END of function parse_value_seq Generate the members of a non - empty .
Does this change initialization for struct members that are arrays of fundamental types?
@@ -22,15 +22,15 @@ from pants.util.logging import LogLevel @dataclass(frozen=True) -class PutativeProtobufTargetsRequest: +class PutativeProtobufTargetsRequest(PutativeTargetsRequest): pass @rule(level=LogLevel.DEBUG, desc="Determine candidate Protobuf targets to create") async def find_putative_targets( - _: PutativeProtobufTargetsRequest, all_owned_sources: AllOwnedSources + req: PutativeProtobufTargetsRequest, all_owned_sources: AllOwnedSources ) -> PutativeTargets: - all_proto_files = await Get(Paths, PathGlobs(["**/*.proto"])) + all_proto_files = await Get(Paths, PathGlobs, req.search_paths.path_globs("*.proto")) unowned_proto_files = set(all_proto_files.files) - set(all_owned_sources) pts = [ PutativeTarget.for_target_type(
[find_putative_targets->[group_by_dir,basename,PutativeTargets,PathGlobs,sorted,set,for_target_type,Get],rules->[collect_rules,UnionRule],dataclass,rule]
Find putative targets for a given sequence number.
Should the file collection move into the `tailor` goal itself, and have the file name(s) to match be a class property of the `PutativeTargetsRequest` type? The "I expect that you will want to match files, so here's an API to help do that" is a bit roundabout if we think that all `tailor` implementations will do roughly the same thing.
@@ -86,8 +86,13 @@ public abstract class AbstractMessageSplitter extends AbstractReplyProducingMess return null; } - final MessageHeaders headers = message.getHeaders(); - final Object correlationId = headers.getId(); + Map<String, Object> messageHeaders = message.getHeaders(); + if (willAddHeaders(message)) { + messageHeaders = new HashMap<>(messageHeaders); + addHeaders(message, messageHeaders); + } + final Map<String, Object> headers = messageHeaders; + final Object correlationId = message.getHeaders().getId(); final AtomicInteger sequenceNumber = new AtomicInteger(1); return new FunctionIterator<Object, AbstractIntegrationMessageBuilder<?>>(iterator,
[AbstractMessageSplitter->[produceOutput->[produceOutput]]]
This method is called to handle a single message.
Diamond isn't feasible for Java < 8. Will be fixed on merge
@@ -11,6 +11,12 @@ from pants.engine.rules import goal_rule from pants.engine.selectors import Get +class DependencyType(Enum): + INTERNAL = "internal" + EXTERNAL = "external" + INTERNAL_AND_EXTERNAL = "internal-and-external" + + # TODO(#8762) Get this rule to feature parity with the dependencies task. class DependenciesOptions(LineOriented, GoalSubsystem): name = 'dependencies2'
[dependencies->[update,print_stdout,sorted,Dependencies,set,line_oriented],DependenciesOptions->[register_options->[register,super]]]
Create a dependency rule that runs dependencies on the target. Returns a list of dependencies for the system.
Internal and external are really unclear names to me, and I didn't know what they meant until I read your helptext on the flag. How would you feel about source and 3rdparty?
@@ -278,7 +278,9 @@ public interface EmbeddedCacheManager extends CacheContainer, Listenable, Closea * memory and in any backing cache store. * * @param cacheName name of cache to remove + * @deprecated obtain a {@link org.infinispan.commons.api.CacheContainerAdmin} instance using {@link #administration()} and invoke the {@link org.infinispan.commons.api.CacheContainerAdmin#removeCache(String)} method */ + @Deprecated // since 9.2 void removeCache(String cacheName); /**
[getCacheConfigurationNames->[UnsupportedOperationException],executor->[UnsupportedOperationException]]
Remove a cache from the cache manager.
there are sill usage of this method in the server. I guess they can be removed/upgraded.
@@ -1092,10 +1092,10 @@ static int auto_info(X509_REQ *req, STACK_OF(CONF_VALUE) *dn_sk, */ for (p = v->name; *p; p++) { #ifndef CHARSET_EBCDIC - spec_char = ((*p == ':') || (*p == ',') || (*p == '.')); + spec_char = *p == ':' || *p == ',' || *p == '.'; #else - spec_char = ((*p == os_toascii[':']) || (*p == os_toascii[',']) - || (*p == os_toascii['.'])); + spec_char = *p == os_toascii[':'] || *p == os_toascii[','] + || *p == os_toascii['.']; #endif if (spec_char) { p++;
[No CFG could be retrieved]
auto_info - auto - info function add_DN_object - add an object to the list of objects specified in the config.
I would keep the outer parentheses here.
@@ -37,6 +37,11 @@ public class AuthenticationResult */ private final String authorizerName; + + /** + * Name of authentiator whom created the results + */ + private final String authenticatedBy; /** * parameter containing additional context information from an Authenticator */
[No CFG could be retrieved]
Creates an AuthenticationResult object that can be used to authenticate a user by using the specified identity.
authentiator -> authenticator
@@ -244,6 +244,11 @@ X509 *TLSGenerateCertFromPrivKey(RSA *privkey) goto err3; } + if (getenv("CFENGINE_TEST_PURIFY_OPENSSL") != NULL) + { + RSA_blinding_off(privkey); + } + /* Not really needed since the other side does not verify the signature. */ ret = X509_sign(x509, pkey, md);
[No CFG could be retrieved]
Creates a new private key and adds the necessary information to the x509 certificate. This function returns the name of the certificate that was signed by the server.
I would prefer a check for "and the resulting string is non-empty" (or some "is this boolean true ?" test). If the environment variable is set but empty, getenv() returns an empty string, not NULL. So, in my shell, if I've exported CFENGINE_TEST_PURIFY_OPENSSL, setting it back to empty doesn't turn this feature off; I need to explicitly unexport the environment variable. This shall be an easy thing to get wrong. (In bash, export -n NAME; in dash (commonly used as sh), unset NAME; not sure what, if anything, does it in csh; but the main problem is that it'll catch someone out when they thought clearing the variable, by setting it empty, would suffice.)
@@ -118,6 +118,10 @@ class PythonInterpreterCache(Subsystem): def _interpreter_from_path(self, path, filters=()): try: executable = os.readlink(os.path.join(path, 'python')) + if not os.path.exists(executable): + if os.path.dirname(path) == self._cache_dir: + self._purge_interpreter(path) + return None except OSError: return None interpreter = PythonInterpreter.from_binary(executable, include_site_extras=False)
[PythonInterpreterCache->[_setup_cached->[_interpreter_from_path],setup->[unsatisfied_filters->[_matching],_setup_paths,_setup_cached,unsatisfied_filters,_matching],_setup_paths->[_setup_interpreter,_matching,_interpreter_from_path],select_interpreter_for_targets->[_matching,partition_targets_by_compatibility,UnsatisfiableInterpreterConstraintsError],_interpreter_from_path->[_matches],_resolve_and_link->[_safe_link],_matching->[_matches]]]
Returns the interpreter that matches the given path.
This is always `True` in production use from my reading of call-sites in this file. How about re-structuring this private function to allow killing this misleading conditional. Maybe re-name to `_interpreter_from_relpath` and do the `path = os.path.joint(self._cache_dir, relpath)` here allowing unambiguous removal of the conditional.
@@ -460,7 +460,8 @@ public abstract class AbstractLazyLoadRunMap<R> extends AbstractMap<Integer,R> i * If non-null, update this data structure. * Otherwise do a copy-on-write of {@link #index} */ - protected synchronized R load(File dataDir, Index editInPlace) { + private R load(File dataDir, Index editInPlace) { + assert Thread.holdsLock(this); try { R r = retrieve(dataDir); if (r==null) return null;
[AbstractLazyLoadRunMap->[headMap->[subMap],copy->[Index],getByNumber->[get],load->[put,get,Index,load],subMap->[subMap],purgeCache->[Index],reset->[put,getNumberOf,Index,createReference],putAll->[put],entrySet->[baseDirInitialized],get->[get],unwrap->[get],getIdOf->[getNumberOf],createReference->[getIdOf],put->[put],removeValue->[copy,getNumberOf,removeValue],_put->[put],tailMap->[subMap]]]
Load a node from the data directory.
So IIUC this will throw `AssertionException` if any concurrent call is done to this method without holding the thread lock, right? If so, is this not going to - possibly - break current calls (not taking this into account)?
@@ -95,6 +95,8 @@ def _save_split(epochs, fname, part_idx, n_parts): last = first + len(epochs.times) - 1 write_int(fid, FIFF.FIFF_FIRST_SAMPLE, first) write_int(fid, FIFF.FIFF_LAST_SAMPLE, last) + name = 'Unknown' if epochs._name is None else epochs._name + write_string(fid, FIFF.FIFF_COMMENT, name) # save baseline if epochs.baseline is not None:
[BaseEpochs->[equalize_event_counts->[_keys_to_idx,drop,drop_bad],plot_drop_log->[plot_drop_log],_get_data->[_get_epoch_from_raw,_detrend_offset_decim,_is_good_epoch,_project_epoch],get_data->[_get_data],drop_bad->[_reject_setup],resample->[resample],next->[_get_epoch_from_raw,_detrend_offset_decim,_is_good_epoch,_project_epoch],__getitem__->[_keys_to_idx],save->[_save_split,drop_bad]],EpochsArray->[__init__->[copy,_detrend_offset_decim,drop_bad]],combine_event_ids->[copy],equalize_epoch_counts->[drop,drop_bad],_minimize_time_diff->[_fix_fill],_concatenate_epochs->[_compare_epochs_infos,get_data],_finish_concat->[BaseEpochs,drop_bad],concatenate_epochs->[_finish_concat,_concatenate_epochs],average_movements->[copy,_evoked_from_epoch_data],EpochsFIF->[__init__->[_read_one_epoch_file,BaseEpochs,_RawContainer,copy]],_segment_raw->[Epochs],add_channels_epochs->[_check_merge_epochs,_dep_eeg_ref,get_data],bootstrap->[copy]]
Split epochs. Writes the last and first sample missing - block information to the file descriptor. end of file.
Why add this? We're deprecating it.
@@ -92,6 +92,7 @@ namespace Dynamo.Wpf.Views Actions.Close, Categories.Preferences); viewModel.PackagePathsViewModel.SaveSettingCommand.Execute(null); + viewModel.CommitPackagePathsForInstall(); PackagePathView.Dispose(); Close(); }
[PreferencesView->[CloseButton_Click->[Preferences,Execute,Dispose,Close,TrackEvent],AddStyle_SaveButton_Click->[FindName,IsWarningEnabled,ValidateExistingStyle,Visibility,IsNullOrEmpty,IsEnabled,Collapsed,GroupName,Parent,AddStyle,ToString,ResetAddStyleControl,Text],RemoveStyle_Click->[ToString,RemoveStyleEntry,FindName,Parent],Geometry_Scaling_Checked->[Preferences,PreferencesViewVisualSettingsGeoScaling,ConvertUIToScaleFactor,Format,HasUnsavedChanges,UpdateSavedChangesLabel,Switch,Item2,Nodes,MarkNodesAsModifiedAndRequestRun,Log,Item3,Children,ScaleFactorLog,Item1,TrackEvent],AddStyle_CancelButton_Click->[IsEnabled,Visibility,ResetAddStyleControl,Collapsed],OnMoreInfoClicked->[Relative,NodeAutocompleteDocumentationUriString,Execute],ReloadCPython_Click->[OnRequestPythonReset],InitRadioButtonsDescription->[DescriptionScaleRange,Bold,Add,ChangeScaleFactorPromptDescriptionDefaultSetting],ButtonColorPicker_Click->[OK,B,Background,G,ShowDialog,R,FromRgb],Log->[Info,Log],InstalledPackagesExpander_OnExpanded->[TrackCommandEvent,Source,OriginalSource],InstalledPackagesExpander_OnCollapsed->[Source,OriginalSource],AddStyleButton_Click->[IsEnabled,Visibility,Visible,Focus],SetupPreferencesViewModel->[SavedChangesTooltip,SavedChangesLabel,Empty],PreferencesPanel_MouseDown->[Move,Preferences,Left,ChangedButton,DragMove,TrackEvent],InitializeComponent,DataContext,Preferences,PreferencesViewModel,InitRadioButtonsDescription,Open,SetupPreferencesViewModel,TrackEvent]]
Close button click.
it's a bit weird to me that some of this logic lives in `PackagePathsViewModel` and some on `PreferencesViewModel` directly, guess it would be nice if it all lived in PackagePathViewModel and was one method we could call.
@@ -200,7 +200,11 @@ public abstract class Job { } private synchronized void completeWithError(Throwable error) { - setResult(error.getMessage()); + // TODO(jl): Remove this trick. Most of error are covered by InterpreterException + if (error instanceof InterpreterException && null != error.getCause()) { + error = error.getCause(); + } + setResult(new InterpreterResult(Code.ERROR, getStack(error))); setException(error); dateFinished = new Date(); }
[Job->[abort->[jobAbort],isTerminated->[isRunning,isReady,isPending],equals->[hashCode],isRunning->[isRunning],setException->[getStack],hashCode->[hashCode]]]
Complete with an error.
do we need to return the whole stack as the result? that seems a bit much to the user. how about just the message as before and log the exception?
@@ -210,7 +210,7 @@ namespace System.Net.Security } } - private Task? ProcessAuthenticationWithTelemetry(bool isAsync, bool isApm, CancellationToken cancellationToken) + private async ValueTask ProcessAuthenticationWithTelemetryAsync(bool isAsync, bool isApm, CancellationToken cancellationToken) { NetSecurityTelemetry.Log.HandshakeStart(_context!.IsServer, _sslAuthenticationOptions!.TargetHost); ValueStopwatch stopwatch = ValueStopwatch.StartNew();
[SslStream->[SendAuthResetSignal->[SetException],EnsureFullTlsFrameAsync->[HaveFullTlsFrame],ReadAsyncInternal->[HaveFullTlsFrame,ReturnReadBufferIfEmpty]]]
Process authentication with optional telemetry. Log failure and success log.
The only call site either assumes this completes synchronously or uses AsTask. This should just be `async Task`.
@@ -89,6 +89,11 @@ public class CursorProviderJanitor { public void releaseCursor(Cursor cursor) { try { cursor.release(); + if (cursors.remove(cursor)) { + if (provider.isClosed() && cursors.isEmpty()) { + releaseResources(); + } + } } catch (Exception e) { LOGGER.warn("Exception was found trying to release cursor resources. Execution will continue", e); } finally {
[CursorProviderJanitor->[close->[close],releaseResources->[close,releaseResources]]]
Release a cursor.
why not put this with an `&&` in the previous `if`?
@@ -40,9 +40,10 @@ public class LifecycleCallbacks implements ModuleLifecycle { cacheRegistry.registerInternalCache(SERVER_STATE_CACHE, getServerStateCacheConfig(globalConfiguration).build(), EnumSet.of(InternalCacheRegistry.Flag.PERSISTENT)); + SerializationContext serCtx = gcr.getComponent(PersistenceMarshallerImpl.class, KnownComponentNames.PERSISTENCE_MARSHALLER).getSerializationContext(); ClassWhiteList classWhiteList = gcr.getComponent(EmbeddedCacheManager.class).getClassWhiteList(); ClassLoader classLoader = globalConfiguration.classLoader(); - Marshaller marshaller = jbossMarshaller(classLoader, classWhiteList); + Marshaller jbossMarshaller = getJbossMarshaller(classLoader, classWhiteList); EncoderRegistry encoderRegistry = gcr.getComponent(EncoderRegistry.class); JsonTranscoder jsonTranscoder = new JsonTranscoder(classLoader, classWhiteList);
[LifecycleCallbacks->[jbossMarshaller->[asSubclass,newInstanceOrNull,loadClass],cacheManagerStarting->[running,getClassWhiteList,jbossMarshaller,JsonTranscoder,of,classLoader,XMLTranscoder,build,getComponent,JavaSerializationTranscoder,ProtostreamBinaryTranscoder,overrideMarshaller,registerTranscoder,registerInternalCache,getTranscoder,JBossMarshallingTranscoder],getServerStateCacheConfig->[ConfigurationBuilder,isClustered,cacheMode]]]
Registers the necessary components for the cache manager.
I have to admit I am not a fan of referencing `PersistenceMarshallerImpl` everywhere. The `PersistenceMarshaller` interface is already intrinsically bound to protostream through the `register` method, it doesn't seem far fetched for it to also be able to return the `SerializationContext` instance. But this can be fixed in a different PR.
@@ -352,6 +352,13 @@ RtpsUdpReceiveStrategy::deliver_sample_i(ReceivedDataSample& sample, link_->received(data, receiver_.source_guid_prefix_); recvd_sample_ = 0; + const RepoIdSet* readers = link_->updateWriterSeqReaders(sample.header_.publication_id_, sample.header_.sequence_); + if (readers) { + for (RepoIdSet::const_iterator i = readers->begin(); i != readers->end(); ++i) { + readers_withheld_.insert(*i); + } + } + if (data.readerId != ENTITYID_UNKNOWN) { RepoId reader; std::memcpy(reader.guidPrefix, link_->local_prefix(),
[No CFG could be retrieved]
The main method of this class. - - - - - - - - - - - - - - - - - -.
This will crash if there's an association occuring at the same time, since both threads will be accessing / changing the internal RepoIdSet at the same time. I'd change the method signature of updateWriterSeqReaders (or whatever we call it) to take in readers_withheld_ and then insert the GUIDs from the internal set while the RtpsUdpDataLink lock is held.
@@ -92,6 +92,15 @@ class FileDownloader implements DownloaderInterface $hostname = 'github.com'; } + $extra = $package->getExtra(); + + // in case the package provides custom context options we use them + if (isset($extra['context-options'])) { + $options = $extra['context-options']; + $oldOptions = $this->rfs->getOptions(); + $this->rfs->setOptions($options); + } + try { try { if (!$this->cache || !$this->cache->copyTo($this->getCacheKey($package), $fileName)) {
[FileDownloader->[getFileName->[getDistUrl],update->[remove,download],download->[processUrl,isInteractive,authorizeOAuthInteractively,authorizeOAuth,copy,ensureDirectoryExists,getCacheKey,removeDirectory,getDistUrl,getDistSha1Checksum,copyFrom,getFileName,isVerbose,clearCache,hasAuthentication,copyTo,getCode,write,getName],getCacheKey->[getDistReference,getVersion,getDistType,getName],clearCache->[remove,getCacheKey,getFileName],remove->[removeDirectory,write,getName],__construct->[gc,get]]]
Downloads a package into a given path. Checks if the user has access to the private repos and if so copies the file to the.
would it make sense to array_merge $options and $oldOptions before setting it in rfs? just in case there are $oldOptions defined which are not overwritten with context-options?
@@ -14,7 +14,9 @@ <div class="status-block" style="background: <%= status[:color] %>"> <%= status[:name] %> </div> - <div class="status-comment"><%= status[:status_comment] %></div> + <div class="status-comment"> + <%= status.next_status&.my_module_status_conditions&.collect(&:description)&.join('<br>')&.html_safe %> + </div> </div> <% end %>
[No CFG could be retrieved]
Renders a single .
Just use simple loop instead of collect, in order to avoid html_safe
@@ -145,6 +145,11 @@ void WorldSession::HandleArenaTeamInviteOpcode(WorldPacket & recvData) return; } + //check for fake packets and disaster addons what make client crash + std::string arenaTeamName = arenaTeam->GetName(); + if (!ChatHandler(this).isValidChatMessage(arenaTeamName.c_str())) + return; + #if defined(ENABLE_EXTRAS) && defined(ENABLE_EXTRA_LOGS) sLog->outDebug(LOG_FILTER_BATTLEGROUND, "Player %s Invited %s to Join his ArenaTeam", GetPlayer()->GetName().c_str(), invitedName.c_str()); #endif
[HandleArenaTeamLeaderOpcode->[SetCaptain,GetArenaTeamById,outDebug,GetMember,BroadcastEvent,GetName,GetCaptain,GetGUID,SendArenaTeamCommandResult,normalizePlayerName],HandleArenaTeamDisbandOpcode->[GetArenaTeamById,outDebug,GetCaptain,IsFighting,Disband,GetGUID],SendNotInArenaTeamPacket->[SendPacket,uint8,uint32],HandleArenaTeamRemoveOpcode->[GetArenaTeamById,outDebug,DelMember,GetMember,BroadcastEvent,GetName,GetCaptain,IsFighting,GetGUID,SendArenaTeamCommandResult,normalizePlayerName],HandleArenaTeamQueryOpcode->[SendStats,GetArenaTeamById,outDebug,Query],HandleArenaTeamInviteOpcode->[getIntConfig,GetId,GetArenaTeamId,GetSession,GetSlot,c_str,getLevel,empty,GetName,getBoolConfig,GetType,GetPlayer,GetSocial,GetTeamId,normalizePlayerName,GetArenaTeamById,outDebug,GetMembersSize,GetArenaTeamIdInvited,SendArenaTeamCommandResult,SetArenaTeamIdInvited],HandleArenaTeamDeclineOpcode->[SetArenaTeamIdInvited,outDebug],HandleArenaTeamAcceptOpcode->[getBoolConfig,GetGUID,GetArenaTeamById,GetArenaTeamId,outDebug,GetSlot,GetPlayerTeamIdByGUID,GetName,GetCaptain,AddMember,GetArenaTeamIdInvited,SendArenaTeamCommandResult,GetTeamId,BroadcastEvent],HandleArenaTeamLeaveOpcode->[GetArenaTeamById,outDebug,GetMembersSize,DelMember,GetName,IsFighting,GetCaptain,Disband,GetGUID,SendArenaTeamCommandResult,BroadcastEvent],SendArenaTeamCommandResult->[SendPacket,length,uint32],HandleArenaTeamRosterOpcode->[GetArenaTeamById,outDebug,Roster],HandleInspectArenaTeamsOpcode->[GetArenaTeamById,GetArenaTeamId,GuidHigh2TypeId,outDebug,GUID_HIPART,GUID_LOPART,GetGUID,Inspect]]
Handle an Arena Team invite opcode. if there is no match send a request to the other side.
please reword in `// check for fake packets and bad addons that cause client to crash`
@@ -97,11 +97,6 @@ namespace NServiceBus.Transports.Msmq return MessageQueueTransactionType.None; } - if (SuppressDistributedTransactions) - { - return MessageQueueTransactionType.Single; - } - return Transaction.Current != null ? MessageQueueTransactionType.Automatic : MessageQueueTransactionType.Single;
[MsmqMessageSender->[MessageQueueTransactionType->[Single,Current,None,Automatic,UseTransactionalQueues],ThrowFailedToSendException->[Machine,Queue,Format],Send->[Transaction,MessageQueueErrorCode,EnlistInReceiveTransaction,Format,HasActiveTransaction,UseJournalQueue,QueueNotFound,GetTransactionTypeForSend,TimeToReachQueue,ReplyToAddress,ResponseQueue,GetReturnAddress,UseConnectionCache,Send,Convert,ToString,ThrowFailedToSendException,GetFullPath,Destination,UseDeadLetterQueue]]]
GetTransactionTypeForSend - Get the transaction type for sending a message.
@johnsimons do u know why this is deleted as part of this change?
@@ -406,6 +406,8 @@ def thermald_thread(): power_monitor.calculate(peripheralState, startup_conditions["ignition"]) msg.deviceState.offroadPowerUsageUwh = power_monitor.get_power_used() msg.deviceState.carBatteryCapacityUwh = max(0, power_monitor.get_car_battery_capacity()) + current_power_draw = HARDWARE.get_current_power_draw() + msg.deviceState.powerDrawW = current_power_draw if current_power_draw is not None else 0 # Check if we need to disable charging (handled by boardd) msg.deviceState.chargingDisabled = power_monitor.should_disable_charging(startup_conditions["ignition"], in_car, off_ts)
[thermald_thread->[read_thermal,setup_eon_fan,set_offroad_alert_if_changed],handle_fan_eon->[set_eon_fan],read_thermal->[read_tz],main->[thermald_thread],main]
Thread that processes a single nvme or nvme frequency - lease. Get a single unknown node in the system. Get the next N - th state of a device. This function is called from the device state machine to populate the device state with the current th Check for last update time and alert if needed This function is called when the device is not available in the system.
Think the linter is unhappy about the implementation of this function in `selfdrive/hardware/eon/hardware.py` that returns None, but should probably just return 0. But think this warning is a bit lame, since it doesn't care that you already check for None here. Feel free to disable it in the pylintrc.
@@ -2173,7 +2173,6 @@ obj_comp_cb(tse_task_t *task, void *data) obj_auxi = tse_task_stack_pop(task, sizeof(*obj_auxi)); obj_auxi->to_leader = 0; obj_auxi->io_retry = 0; - obj_auxi->csum_retry = 0; switch (obj_auxi->opc) { case DAOS_OBJ_DKEY_RPC_ENUMERATE: arg = data;
[No CFG could be retrieved]
D - Key object update callback. Dkey - specific enumerate.
if you remove it it means it cannot be clear once it is set, probably with problem for example: 1) fetch first time met a csum err and retry with another shard_B 2) the RPC to that shard get temporary network err for some reason, or just pool map ESTALE, then retry 3) in the retried fetch see that csum retry flag and retry shard_C, if no other shard it will return error to user. but possibly just ESTALE and can be finished refresh pool map and retry to shard_B again. Any reason for the removing?
@@ -45,6 +45,14 @@ class NodeDistribution(object): NodeDistribution.VALID_PACKAGE_MANAGER_LIST.keys())) register('--yarnpkg-version', advanced=True, default='v0.19.1', fingerprint=True, help='Yarnpkg version. Used for binary utils') + register('--eslint-setupdir', advanced=True, fingerprint=True, + help='Find the package.json under this dir for installing eslint and plugins.') + register('--eslint-supportdir', advanced=True, default='bin/eslint', + help='Find the ESLint distribution under this dir.') + register('--eslint-config', advanced=True, fingerprint=True, + help='The path to the global eslint configuration file specifying all the rules') + register('--eslint-ignore', advanced=True, fingerprint=True, + help='The path to the global eslint ignore path') def create(self): # NB: create is an instance method to allow the user to choose global or scoped.
[NodeDistribution->[install_yarnpkg->[unpack_package],install_node->[unpack_package],__init__->[_normalize_version,validate_package_manager],_command_gen->[Command],node_command->[_command_gen],npm_command->[_command_gen],Command->[check_output->[_prepare_env,check_output],run->[_prepare_env]],Factory->[create->[create,NodeDistribution]],yarnpkg_command->[_command_gen]]]
Register options for this NodeDistributionFactory.
It seems like all of these options should potentially be on an "eslint" subsystem instead, which could depend on the `NodeDistribution` subsystem? Also, I don't think that having an explicit `setupdir` option is a good idea, as you could always just do it in a temp directory, right?
@@ -77,10 +77,11 @@ public class MetadataNotifierClient { // Create a method instance. HttpPost method = new HttpPost(notifier.getUrl()); - final UrlEncodedFormEntity entity = new UrlEncodedFormEntity(data); + final UrlEncodedFormEntity entity = new UrlEncodedFormEntity(data, Charset.forName("UTF-8")); final RequestConfig.Builder configBuilder = RequestConfig.custom(); configBuilder.setMaxRedirects(3); + method.addHeader("accept-charset", "UTF-8"); method.setEntity(entity); final boolean authenticationEnabled = StringUtils.isNotBlank(notifier.getUsername()) && notifier.getPassword() != null &&
[MetadataNotifierClient->[execute->[execute],webDelete->[execute]]]
Execute the web update.
I would use `StandardCharsets.UTF_8` constant here.
@@ -784,7 +784,7 @@ PlayerSAO::PlayerSAO(ServerEnvironment *env_, RemotePlayer *player_, u16 peer_id m_prop.hp_max = PLAYER_MAX_HP; m_prop.physical = false; m_prop.weight = 75; - m_prop.collisionbox = aabb3f(-0.3f, -1.0f, -0.3f, 0.3f, 0.75f, 0.3f); + m_prop.collisionbox = aabb3f(-0.3f, 0.0f, -0.3f, 0.3f, 1.75f, 0.3f); // start of default appearance, this should be overwritten by LUA m_prop.visual = "upright_sprite"; m_prop.visual_size = v2f(1, 2);
[checkMovementCheat->[getName,isAttached,setBasePosition],sendPosition->[isAttached],getDescription->[getName],step->[isAttached,getPropertyPacket,step,setHP],rightClick->[isAttached],setWieldedItem->[getWieldList,getInventory],setYawAndSend->[setYaw],getInventoryLocation->[getName],getWieldedItem->[getWieldList,getInventory],getWieldedItemOrHand->[getWieldList,getInventory],getSelectionBox->[getCollisionBox],setPitchAndSend->[setPitch],punch->[isAttached,getDescription,getHP,getName,getType,setHP],setPos->[isAttached,setBasePosition],getClientInitializationData->[getPropertyPacket,getName,getClientInitializationData,getHP], UnitSAO->[getType],moveTo->[isAttached,setBasePosition]]
- - - - - - - - - - - - - - - - - -.
On merge this can be changed to 1.77f to apply #6154
@@ -231,8 +231,12 @@ public class AccountResource { it.getAuthority()).collect(Collectors.toSet()) ); } - ) - .orElseThrow(RuntimeException::new); + ); + if (user.isPresent()) { + return user.get(); + } else { + throw new DefaultException("User could not be found"); + } } <%_ } _%> }
[No CFG could be retrieved]
The account. % = packageName % >. web. rest. vm. KeyAndPasswordVM ;.
`.orElseThrow(() -> new DefaultException("User could not be found"));` would be better
@@ -104,6 +104,16 @@ function Action(props) { externalLink = `https://twitter.com/intent/tweet?text=${encodeURIComponent( getContentToShare(props.action, props.locale) )}` + } else if (type === 'FacebookShare') { + buttonLink = undefined + foregroundImgSrc = 'images/growth/facebook-icon.svg' + title = fbt('Share this on Facebook', 'RewardActions.postThisOnFacebook') + externalLink = [ + 'https://www.facebook.com/dialog/share?', + `app_id=${process.env.FACEBOOK_CLIENT_ID}`, + `&href=${encodeURIComponent(props.action.content.link)}`, + '&display=popup' + ].join('') } else if (type === 'TwitterFollow') { buttonLink = undefined foregroundImgSrc = 'images/growth/twitter-icon.svg'
[No CFG could be retrieved]
Renders a link to the key of a listing. Renders a single nation block.
@sparrowDom Just a thought, What do you think about moving these external links stuff to growth rules?
@@ -174,6 +174,9 @@ describe('3p-frame', () => { '"type":"_ping_",' + '"_context":{"referrer":"http://acme.org/",' + '"ampcontextVersion": "$internalRuntimeVersion$",' + + '"ampcontextFilepath": "https://cdn.ampproject.org/' + + '$internalRuntimeVersion$' + + '/ampcontext-v0.js",' + '"canonicalUrl":"' + docInfo.canonicalUrl + '",' + '"sourceUrl":"' + locationHref + '",' + '"pageViewId":"' + docInfo.pageViewId + '","clientId":"cidValue",' +
[No CFG could be retrieved]
Creates a object. expects a hash of the values of the object.
move it to the previous line?
@@ -34,7 +34,7 @@ namespace System.Net.Test.Common Task serverTask = serverFunc(server); await new Task[] { clientTask, serverTask }.WhenAllOrAnyFailed().ConfigureAwait(false); - }, options: options).TimeoutAfter(millisecondsTimeout); + }, options: options).WaitAsync(TimeSpan.FromMilliseconds(millisecondsTimeout)); } }
[HttpRequestData->[FromHttpRequestMessageAsync->[ToString],GetSingleHeaderValue->[GetHeaderValues]],GenericLoopbackServer->[AcceptConnectionSendResponseAndCloseAsync->[HandleRequestAsync]]]
Creates a server asynchronously with the given client and server functions.
Wouldn't it make sense to also add a `WaitAsync(int millisecondsTimeout)` overload?
@@ -128,6 +128,8 @@ func TestStackCommands(t *testing.T) { // Error out, err := e.RunCommandExpectError("pulumi", "stack", "rm", "anor-londo", "--yes") assert.Empty(t, out) - assert.Contains(t, err, ".pulumi/stacks/pulumi-test/anor-londo.json: no such file or directory") + // local: .pulumi/stacks/pulumi-test/anor-londo.json: no such file or directory + // cloud: Stack 'integration-test-59f645ba/pulumi-test/anor-londo' not found + assert.Contains(t, err, "anor-londo") }) }
[DeleteEnvironment,Join,Equal,NewEnvironment,RunCommand,Contains,Empty,RunCommandExpectError,NoError,NotNil,GetStacks,Run,WriteFile]
Check if there is an anor - londo stack in the current working directory.
Can you file a bug so we can unify these messages? This is the sort of thing I would expect should not vary depending on backend.
@@ -141,6 +141,16 @@ class SimpleTagger(Model): predictions = output_dict["class_probabilities"].data.squeeze(0) _, argmax = predictions.max(-1) indices = argmax.squeeze(1).numpy() - tags = [self.vocabulary.get_token_from_index(x, namespace="tags") for x in indices] + tags = [self.vocab.get_token_from_index(x, namespace="tags") for x in indices] return {"tags": tags, "class_probabilities": predictions.numpy()} + + @classmethod + def from_params(cls, vocab: Vocabulary, params: Params) -> 'SimpleTagger': + hidden_size = params.pop("hidden_size", 200) + num_layers = params.pop("num_layers", 2) + token_embedder = TokenEmbedder.from_params(vocab, params.pop("token_embedder")) + return cls(vocab=vocab, + token_embedder=token_embedder, + hidden_size=hidden_size, + num_layers=num_layers)
[SimpleTagger->[forward->[softmax,size,tag_projection_layer,max,dim,sequence_loss,embedding,stacked_encoders,view],__init__->[TimeDistributed,get_vocab_size,LSTM,super,Linear,Embedding,CrossEntropyLoss],tag->[forward,output_dict,get_padding_lengths,max,squeeze,get_token_from_index,as_array,numpy,Variable,index]]]
Perform inference on a TextField to produce predicted tags and class probabilities over the possible tags.
Is there a benefit to using `cls()` over `SimpleTagger` (more wondering than anything else, I think it's marginally clearer.)?
@@ -757,6 +757,11 @@ func (b TLFIdentifyBehavior) WarningInsteadOfErrorOnBrokenTracks() bool { return b == TLFIdentifyBehavior_CHAT_GUI } +// All of the chat modes want to prevent tracker popups. +func (b TLFIdentifyBehavior) ShouldSuppressTrackerPopups() bool { + return b != TLFIdentifyBehavior_DEFAULT_KBFS +} + func (c CanonicalTLFNameAndIDWithBreaks) Eq(r CanonicalTLFNameAndIDWithBreaks) bool { if c.CanonicalName != r.CanonicalName { return false
[Match->[IsNil,String],Exists->[IsNil],ToShortIDString->[ToBytes],ToMediumID->[toBytes],UnixMicroseconds->[Time],ToJsonw->[IsNil],GetUID->[GetUID],UnixMilliseconds->[Time],GetKeyType->[ToBytes],GetName->[GetName],FindDevice->[Eq],GoError->[Error],Duration->[Duration],String->[String],NotEqual->[Equal],ToShortID->[toBytes],UnixSeconds->[Time],MarshalJSON->[String],IsIn->[Equal],Export->[GetUID,GetName],Error]
WarningInsteadOfErrorOnBrokenTracks returns true if the given TLFNameAndIDWithBreaks.
Maybe make this inclusive of the chat types, instead of in this direction?
@@ -2494,6 +2494,17 @@ def concatenate(tensors, axis=-1): Returns: A tensor. + + Example: + ```python + >>> a = tf.constant([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) + >>> b = tf.constant([[10, 20, 30], [40, 50, 60], [70, 80, 90]]) + >>> tf.keras.backend.concatenate((a, b), axis=1) + <tf.Tensor: id=14, shape=(3, 6), dtype=int32, numpy= + array([[ 1, 2, 3, 10, 20, 30], + [ 4, 5, 6, 40, 50, 60], + [ 7, 8, 9, 70, 80, 90]], dtype=int32)> + ``` """ if axis < 0: rank = ndim(tensors[0])
[var->[cast],all->[cast],batch_set_value->[get_session,placeholder,dtype,get_graph],gradients->[gradients],argmin->[argmin],set_value->[get_session,placeholder,dtype,get_graph],repeat->[ndim],gather->[gather],binary_crossentropy->[_constant_to_tensor,log],resize_images->[constant,permute_dimensions,shape,int_shape],_broadcast_normalize_batch_in_training->[ndim,shape],_get_session->[_current_graph],_get_available_gpus->[get_session],eye->[eye,variable],less->[less],freezable_variable->[get_graph],random_binomial->[ones,zeros,random_uniform],squeeze->[squeeze],conv2d->[_preprocess_padding,transpose,_preprocess_conv2d_input],track_tf_optimizer->[get_graph],shape->[shape],clear_session->[reset_uids],minimum->[minimum],l2_normalize->[l2_normalize],arange->[cast],in_top_k->[in_top_k],ctc_batch_cost->[expand_dims,ctc_label_dense_to_sparse,squeeze,transpose,cast,log],_is_current_explicit_device->[_get_current_tf_device],learning_phase_scope->[set_learning_phase],cumprod->[cumprod],_preprocess_conv1d_input->[transpose,_has_nchw_support],sin->[sin],pool2d->[_preprocess_padding,transpose,_preprocess_conv2d_input],zeros->[track_variable,zeros,variable],reshape->[reshape],softsign->[softsign],tanh->[tanh],batch_dot->[ndim,shape],dot->[ndim,is_sparse,shape,int_shape],switch->[ndim,reshape,shape,tile,ones_like,cast],random_normal_variable->[variable],cast->[cast],log->[log],in_test_phase->[in_train_phase],random_normal->[random_normal],separable_conv1d->[_preprocess_padding,expand_dims,_preprocess_conv1d_input,transpose,squeeze],get_session->[_get_session],ctc_decode->[transpose,cast,log],batch_normalization->[ndim,_has_nchw_support,zeros_like,batch_normalization,ones_like],abs->[abs],one_hot->[one_hot],_has_nchw_support->[_is_current_explicit_device,_get_available_gpus],sqrt->[_constant_to_tensor,sqrt],placeholder->[placeholder,get_graph],track_variable->[get_graph],cos->[cos],get_value->[eval,get_session],separable_conv2d->[_preprocess_padding,transpose,_preprocess_conv2d_input,separable_conv2d],_preprocess_conv2d_input->[transpose,_has_nchw_support],relu->[_to_tensor,greater,_constant_to_tensor,relu,cast],_initialize_variables->[_get_variables,get_graph],configure_and_create_distributed_session->[_create_session->[get_default_session_config,set_session],in_multi_worker_mode,_create_session],categorical_crossentropy->[_constant_to_tensor,log],rnn->[_process_single_input_t->[reverse],set_shape->[set_shape],swap_batch_timestep->[transpose],_expand_mask->[tile,expand_dims],_step->[flatten,_expand_mask],_process_single_input_t,flatten,expand_dims,_get_input_tensor,zeros_like,shape,constant,reverse,swap_batch_timestep,_expand_mask,cast,stack],ctc_label_dense_to_sparse->[range_less_than->[expand_dims],reshape,shape,reverse,tile,stack,transpose,cast,concatenate],pool3d->[_preprocess_conv3d_input,_preprocess_padding,transpose],batch_flatten->[prod,shape,reshape],exp->[exp],truncated_normal->[truncated_normal],permute_dimensions->[transpose],try_convert_scipy_to_sparse->[flatten,is_tensor_or_composite_tensor,_try_process_scipy_sparse_input],sigmoid->[sigmoid],map_fn->[map_fn],flatten->[reshape],print_tensor->[identity,get_graph],square->[square],ones->[ones,track_variable,variable],normalize_batch_in_training->[ndim,_broadcast_normalize_batch_in_training,_has_nchw_support,_fused_normalize_batch_in_training,_regular_normalize_batch_in_training],depthwise_conv2d->[depthwise_conv2d,_preprocess_padding,transpose,_preprocess_conv2d_input],function->[GraphExecutionFunction,EagerExecutionFunction],EagerExecutionFunction->[__call__->[try_convert_scipy_to_sparse,flatten,cast],__init__->[_scratch_graph,identity,flatten,get_graph]],expand_dims->[expand_dims],greater->[greater],softmax->[softmax],mean->[cast],local_conv2d->[local_conv],sign->[sign],random_uniform_variable->[variable],concatenate->[ndim,to_dense,is_sparse],softplus->[softplus],cast_variables_to_tensor->[_cast_variables_to_tensor->[identity]],random_uniform->[random_uniform],maximum->[maximum],local_conv1d->[local_conv],any->[cast],std->[cast],round->[round],not_equal->[not_equal],conv2d_transpose->[_preprocess_padding,shape,_preprocess_conv2d_input,conv2d_transpose,stack,transpose],stack->[stack],greater_equal->[greater_equal],to_dense->[is_sparse],cumsum->[cumsum],conv3d->[_preprocess_conv3d_input,_preprocess_padding,transpose],foldr->[foldr],identity->[identity],batch_get_value->[get_session],conv1d->[_preprocess_conv1d_input,temporal_padding,_preprocess_padding,transpose],constant->[constant],eval->[to_dense,get_value],in_train_phase->[switch,learning_phase],tile->[tile],bias_add->[ndim,_has_nchw_support,reshape,int_shape,bias_add],ones_like->[ones_like],transpose->[transpose],clip->[_constant_to_tensor],_get_current_tf_device->[_TfDeviceCaptureOp,get_graph],elu->[elu],pow->[pow],equal->[equal],zeros_like->[zeros_like],_fused_normalize_batch_in_training->[constant],sparse_categorical_crossentropy->[flatten,reshape,shape,_constant_to_tensor,transpose,cast,log],eager_learning_phase_scope->[learning_phase],GraphExecutionFunction->[_make_callable->[cast],__call__->[flatten,_call_fetch_callbacks,get_session,_make_callable,try_convert_scipy_to_sparse],__init__->[identity,flatten]],hard_sigmoid->[_constant_to_tensor],_preprocess_conv3d_input->[transpose,_has_nchw_support],foldl->[foldl],reverse->[reverse],repeat_elements->[reshape,shape,constant,ones,concatenate],unique_object_name->[get_default_graph_uid_map],less_equal->[less_equal],argmax->[argmax],stop_gradient->[stop_gradient],local_conv->[reshape,batch_dot,int_shape,permute_dimensions,concatenate],conv3d_transpose->[_preprocess_conv3d_input,_preprocess_padding,shape,conv3d_transpose,stack,transpose],_try_process_scipy_sparse_input->[expand_dims,concatenate]]
Concatenates a list of tensors alongside the specified axis.
say -1 even though it's the same in this case, since that's the default arg value.
@@ -30,6 +30,7 @@ import org.apache.beam.sdk.extensions.sql.impl.ParseException; import org.apache.beam.sdk.extensions.sql.impl.parser.impl.BeamSqlParserImpl; import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils; import org.apache.beam.sdk.extensions.sql.meta.Table; +import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider; import org.apache.beam.sdk.schemas.Schema; import org.junit.Test;
[BeamDDLTest->[testParseDropTable->[assertNotNull,executeDdl,get,assertNull,TestTableProvider,withTableProvider],testParseCreateTable_withoutTableComment->[executeDdl,get,JSONObject,mockTable,TestTableProvider,add,withTableProvider,JSONArray,put,assertEquals],testParseCreateTable_full->[executeDdl,get,JSONObject,mockTable,TestTableProvider,add,withTableProvider,JSONArray,put,assertEquals],testParseCreateTable_withoutLocation->[executeDdl,get,JSONObject,TestTableProvider,withTableProvider,assertEquals,mockTable],testParseCreateTable_withoutTblProperties->[executeDdl,get,JSONObject,TestTableProvider,withTableProvider,assertEquals,mockTable],testParseCreateTable_minimal->[executeDdl,build,get,TestTableProvider,withTableProvider,assertEquals],testParseCreateTable_withoutType->[withTableProvider,TestTableProvider,executeDdl],mockTable->[build,mockTable]]]
Test for parsing a single object. hello. add("james") properties. add("bond") .
why these imports are needed?
@@ -214,6 +214,7 @@ func (s *scheduler) addNewConfigs(now time.Time, cfgs map[string]configs.Version generation := s.latestConfig s.Unlock() +userLoop: for userID, config := range cfgs { rulesByGroup, err := config.Config.Parse() if err != nil {
[workItemDone->[addWorkItem,Defer],addNewConfigs->[computeNextEvalTime]]
addNewConfigs adds new configs to the scheduler. There are no other cases where there are no config updates in the current configuration.
~~Thank you, kind sir. :-D~~ I've been shown the error of my ways! Or at least, have been told that this is a no-no. I'm sure I'll understand why one day.
@@ -2,6 +2,7 @@ package io.quarkus.reactive.pg.client.runtime; import static io.quarkus.credentials.CredentialsProvider.PASSWORD_PROPERTY_NAME; import static io.quarkus.credentials.CredentialsProvider.USER_PROPERTY_NAME; +import static io.quarkus.vertx.core.runtime.SSLConfigHelper.*; import java.util.Map;
[PgPoolRecorder->[legacyToPostgreSQLConnectOptions->[PgConnectOptions,getAsInt,setPipeliningLimit,isPresent,matches,get,fromUri,substring,setCachePreparedStatements,setPassword,length,setUser],legacyToPoolOptionsLegacy->[setMaxSize,PoolOptions],legacyInitialize->[legacyToPostgreSQLConnectOptions,legacyToPoolOptionsLegacy,pool],toPgConnectOptions->[PgConnectOptions,find,getAsInt,setPipeliningLimit,isPresent,matches,get,fromUri,orElse,substring,setCachePreparedStatements,setPassword,getCredentials,length,setUser],toPoolOptions->[setMaxSize,isPresent,getAsInt,PoolOptions],initialize->[toPgConnectOptions,toPoolOptions,pool],configurePgPool->[getValue,legacyInitialize,instance,addShutdownTask,initialize]]]
Package for testing. get the producePgPoolProducer.
We try to avoid import statements using `*`.
@@ -135,14 +135,14 @@ public abstract class AbstractResourceBasedServiceRegistryDao extends AbstractSe if (!newService.equals(oldService)) { update(newService); - publishEvent(new CasRegisteredServicesRefreshEvent(this)); + createServiceRefreshEvent.andThen(publish); } else { LOGGER.debug("Service [{}] loaded from [{}] is identical to the existing entry. Entry may have already been saved " + "in the event processing pipeline", newService.getId(), file.getName()); } } }; - this.serviceRegistryConfigWatcher = new ServiceRegistryConfigWatcher(serviceRegistryDirectory, onCreate, onModify, onDelete); + this.serviceRegistryConfigWatcher = new PathWatcher(serviceRegistryDirectory, onCreate, onModify, onDelete, INTERVAL); this.serviceRegistryWatcherThread = new Thread(this.serviceRegistryConfigWatcher); this.serviceRegistryWatcherThread.setName(this.getClass().getName()); this.serviceRegistryWatcherThread.start();
[AbstractResourceBasedServiceRegistryDao->[delete->[delete],size->[size],save->[findServiceById]]]
Initialize the service registry.
If this actually needs to be 0 to limit monitoring intervals, how about an overloaded constructor that removes the need for the passing of that value?
@@ -27,6 +27,7 @@ from test_compute_mass_moment_of_inertia import TestComputeMassMomentOfInertia a # Simple patch tests from test_patch_test_small_strain import TestPatchTestSmallStrain as TTestPatchTestSmallStrain from test_patch_test_small_strain_bbar import TestPatchTestSmallStrainBbar as TTestPatchTestSmallStrainBbar +from test_patch_test_small_displacement_mixed_volumetric_strain import TestPatchTestSmallDisplacementMixedVolumetricStrain as TTestPatchTestSmallDisplacementMixedVolumetricStrain from test_patch_test_large_strain import TestPatchTestLargeStrain as TTestPatchTestLargeStrain from test_quadratic_elements import TestQuadraticElements as TTestQuadraticElements from test_patch_test_shells import TestPatchTestShells as TTestPatchTestShells
[AssembleTestSuites->[T3D2NTrussNonLinearTensionPlasticTest,T3D2NBeamCrNonLinearTest,TBigCubeSmallDeformationPlasticityVMTest,TTensileTestStructuralTest,TRayleighProcessTest,TShellT3ThickLinearStaticTests,T3D2NBeamCrTest,TShellT3AndQ4LinearStaticStructPinchedHemisphereTests,TFofi4PointTentCableTests,T3D2NTrussLinearTest,TTLTwoDTensionTriPatchTest,TSimpleJ2PlasticityTest,TBigCubeSmallDeformationPlasticityTTest,TTestAdjointStressResponseFunction,TSimpleMeshMovingTest,TTestMassResponseFunction,TMembraneQ4PointLoadTests,T2D2NBeamCrTest,TShellQ4ThickBendingRollUpTests,T3D2NTrussTest,T3D2NTrussNonLinearSnapthroughPlasticTest,T3D2NBeamCrLinearTest,TULTwoDTensionQuaPatchTest,TTestTotalLagrangian2D3N,TShellQ4ThickOrthotropicLaminateLinearStaticTests,TExplicitSolidBeam,TTLTwoDShearQuaPatchTest,TShellQ4ThinOrthotropicLaminateLinearStaticTests,T3D2NBeamCrDynamicTest,T3D2NTrussLinearCompressionPlasticTest,TShellT3ThickNonLinearDynamicTests,TTestEigenfrequencyResponseFunction,TSprismBendingPatchTests,TSprismMembranePatchTests,TShellT3AndQ4NonLinearStaticUnstructHingedCylRoofSnapthroughTests,TRigidSphereFailingExplicit,TEigen3D3NThinCircleTests,TSDTwoDTensionQuaPatchTest,TShellT3IsotropicLinearStaticStructScordelisLoRoofTests,TestLoader,TShellT3ThinBendingRollUpTests,TULThreeDShearTetraPatchTest,TShellT3AndQ4NonLinearStaticStructHingedCylRoofSnapthroughTests,TTestStrainEnergyResponseFunction,TULThreeDTensionTetraPatchTest,TRigidFaceTestWithImposeRigidMovementProcess,TPendulusULTest,TSDTwoDShearTriPatchTest,TBigCubeSmallDeformationPlasticityMCTest,TShellQ4ThinNonLinearStaticTests,TSDThreeDTensionHexaPatchTest,TULTwoDTensionTriPatchTest,TTestUpdatedLagrangian3D8N,TTLThreeDTensionHexaPatchTest,TTLThreeDShearTetraPatchTest,TTLTwoDShearTriPatchTest,hasattr,TTLThreeDTensionTetraPatchTest,TShellQ4ThinLinearDynamicTests,TShellT3ThickNonLinearStaticTests,TShellT3ThickOrthotropicLaminateLinearStaticTests,TShellT3ThinOrthotropicLaminateLinearStaticTests,TShellT3AndQ4LinearStaticUnstructPinchedHemisphereTests,TSmallDeformationPlasticityTest,T3D2NTrussLinearTensionPlasticTest,TShellT3AndQ4LinearStaticUnstructScordelisLoRoofTests,TTestSmallDisplacement2D4N,TULTwoDShearQuaPatchTest,TULThreeDShearHexaPatchTest,print,TShellT3AndQ4LinearStaticStructPinchedCylinderTests,TSDTwoDTensionTriPatchTest,TEigenTL3D8NCubeTests,TULTwoDShearTriPatchTest,TShellQ4ThinLinearStaticTests,addTest,TShellT3ThinDrillingRollUpTests,TSimpleSmallDeformationPlasticityTTest,T3D2NTrussDynamicTest,TTestAdjointStrainEnergyResponseFunction,TSDTwoDShearQuaPatchTest,TTestAdjointMaxStressResponseFunction,TShellT3AndQ4LinearStaticUnstructUnstructPinchedCylinderTests,TPendulusTLTest,TRigidSphereFailing,TTestAdjointDisplacementResponseFunction,TMembraneQ4TrussPointLoadTests,TSimpleSmallDeformationPlasticityMCTest,TBigCubeSmallDeformationPlasticityDPTest,TShellT3ThickLinearDynamicTests,TSDThreeDTensionTetraPatchTest,TTLThreeDShearHexaPatchTest,TShellT3AndQ4LinearStaticStructScordelisLoRoofTests,TRigidBlockTest,TSimpleSmallDeformationPlasticityDPTest,TShellQ4ThinNonLinearDynamicTests,TSimpleSmallDeformationPlasticityVMTest,TRigidEliminationTest,TEigenQ4Thick2x2PlateTests,TSDThreeDShearHexaPatchTest,TULThreeDTensionHexaPatchTest,TFofi4PointTentnoCableTests,TShellT3IsotropicScordelisTests,TSDThreeDShearTetraPatchTest,TTLTwoDTensionQuaPatchTest,addTests],abspath,PrintInfo,IsMPIAvailable,AssembleTestSuites,dirname,wait,CheckIfApplicationsAvailable,Popen,run,runTests]
Imports all of the types of a node in the system. Imports all the classes in the system that are not part of the current environment.
I think you forgot `test_cook_membrane`
@@ -43,6 +43,13 @@ namespace Dynamo.ViewModels SearchTextChanged(this, e); } + public event EventHandler WorkspaceChanged; + public void OnWorkspaceChanged(object sender, EventArgs e) + { + if (WorkspaceChanged != null) + WorkspaceChanged(this, e); + } + #endregion #region Properties/Fields
[SearchViewModel->[FocusSearch->[OnRequestFocusSearch],DefineFullCategoryNames->[DefineFullCategoryNames],SearchAndUpdateResults->[SearchAndUpdateResults],GetVisibleSearchResults->[GetVisibleSearchResults],Search->[SearchAndUpdateResults,Search],InsertClassesIntoTree->[InsertClassesIntoTree],OnSearchTextChanged]]
SearchTextChanged - Notify SearchTextChanged if it s not null.
When workspace has been changed, top result should be hidden.
@@ -58,8 +58,16 @@ public abstract class AbstractCacheTransaction implements CacheTransaction { /** Holds all the keys that were actually locked on the local node. */ private final AtomicReference<Set<Object>> lockedKeys = new AtomicReference<>(); - /** Holds all the locks for which the local node is a secondary data owner. */ - private final AtomicReference<Set<Object>> backupKeyLocks = new AtomicReference<>(); + /** + * Holds all the locks for which the local node is a secondary data owner. + * <p> + * A {@link CompletableFuture} is created for each key and it is completed when the backup lock is release for that + * key. A transaction, before acquiring the locks, must wait for all the backup locks (i.e. the {@link + * CompletableFuture}) is released, for all transaction created in the previous topology. + */ + @GuardedBy("this") + private Map<Object, CompletableFuture<Void>> backupKeyLocks; + //should we merge the locked and backup locked keys in a single map? protected final int topologyId;
[AbstractCacheTransaction->[getReleaseFutureForKeys->[getBackupLockedKeys,getLockedKeys],findAnyLockedOrBackupLocked->[getBackupLockedKeys,getLockedKeys],hasModification->[getModifications]]]
A base class for all cache transactions.
Nitpicking: is released, all transactions
@@ -357,7 +357,7 @@ function api_auth_key() { } // check that it is active - $api_user = get_api_user($CONFIG->site_id, $api_key); + $api_user = get_api_user($CONFIG->site_guid, $api_key); if (!$api_user) { // key is not active or does not exist throw new APIException(elgg_echo('APIException:BadAPIKey'));
[_php_api_exception_handler->[getMessage,getCode],get_call_method->[get],get_and_validate_api_headers->[get],pam_auth_usertoken->[isBanned],api_auth_hmac->[get],authenticate_method->[getFailureMessage,authenticate],cache_hmac_check_replay->[save,load]]
Check that an API key is present and that it is active and that the user is logged.
Plugins shouldn't use `$CONFIG`.