patch stringlengths 18 160k | callgraph stringlengths 4 179k | summary stringlengths 4 947 | msg stringlengths 6 3.42k |
|---|---|---|---|
@@ -114,7 +114,7 @@ function Unstable_FileUploader(props: PropsT) {
{afterFileDrop && (
<React.Fragment>
- {!!props.progressAmount && (
+ {props.progressAmount ? (
<ProgressBar
value={props.progressAmount}
overrides={{
| [No CFG could be retrieved] | A component that renders a single national file upload or national file drop in a drop React component for showing the negative button. | For an initial `0` progress it will render the spinner. Or is the expectation to have the progressAmount > 0? |
@@ -104,6 +104,7 @@ class ServiceBusServiceBusTest(ServiceBusTestCase):
self.sbs.create_subscription(topic_name, subscription_name, None, True)
#--Test cases for service bus service -------------------------------------
+
def test_create_service_bus_missing_arguments(self):
# Arrange
if AZURE_SERVICEBUS_NAMESPACE in os.environ:
| [ServiceBusServiceBusTest->[test_list_subscriptions->[_create_topic_and_subscription],test_receive_queue_message_timeout_50_http_timeout->[_create_queue],test_create_subscription_fail_on_exist->[_create_topic],test_create_rule_with_options_correlation_filter->[_create_topic_and_subscription],test_list_queues->[_create_queue],test_create_rule_with_options_empty_rule_action->[_create_topic_and_subscription],test_receive_subscription_message_delete->[_create_topic_and_subscription],test_unicode_receive_queue_message_binary_data->[_create_queue_and_send_msg],test_receive_subscription_message_peek_lock_mode->[_create_topic_and_subscription],test_receive_subscription_message_delete_with_slash->[_create_topic_and_subscription],test_receive_queue_message_read_delete_mode->[_create_queue_and_send_msg],test_send_queue_message_batch->[_create_queue],test_get_subscription_with_existing_subscription->[_create_topic_and_subscription],test_list_topics->[_create_topic],test_delete_rule_with_non_existing_rule->[_create_topic_and_subscription],test_send_queue_message_unicode->[_create_queue],test_delete_topic_with_existing_topic_fail_not_exist->[_create_topic],test_create_subscription_with_already_existing_subscription->[_create_topic],test_receive_queue_message_with_broker_properties->[_create_queue_and_send_msg],test_create_rule_with_options_sql_filter->[_create_topic_and_subscription],test_delete_rule_with_existing_rule->[_create_topic_and_subscription],test_receive_subscription_message_unlock->[_create_topic_and_subscription],test_receive_queue_message_timeout_50->[_create_queue],test_receive_subscription_message_read_delete_mode_throws_on_delete->[_create_topic_and_subscription],test_unicode_receive_subscription_message_binary_data->[_create_topic_and_subscription],test_get_rule_with_existing_rule_with_options->[_create_topic_and_subscription],test_receive_queue_message_delete->[_create_queue_and_send_msg],test_create_rule_with_options_true_filter->[_create_topic_and_subscription],test_create_subscription->[_create_topic],test_receive_queue_message_unlock->[_create_queue_and_send_msg],test_create_rule_with_already_existing_rule_fail_on_exist->[_create_topic_and_subscription],test_delete_topic_with_existing_topic->[_create_topic],test_get_queue_with_existing_queue->[_create_queue],test_unicode_receive_subscription_message_unicode_data->[_create_topic_and_subscription],test_list_topics_with_special_chars->[_create_topic],test_send_topic_message_batch->[_create_topic_and_subscription],test_receive_queue_message_peek_lock_mode->[_create_queue_and_send_msg],test_unicode_receive_queue_message_unicode_data->[_create_queue_and_send_msg],test_receive_queue_message_with_broker_properties_as_a_dict->[_create_queue_and_send_msg],test_create_subscription_with_already_existing_subscription_fail_on_exist->[_create_topic],test_receive_queue_message_read_delete_mode_throws_on_delete->[_create_queue_and_send_msg],test_create_rule_with_already_existing_rule->[_create_topic_and_subscription],test_get_subscription_with_non_existing_subscription->[_create_topic_and_subscription],test_delete_queue_with_existing_queue_fail_not_exist->[_create_queue],test_receive_queue_message_timeout_5->[_create_queue],test_send_topic_message_unicode->[_create_topic_and_subscription],test_delete_subscription_with_non_existing_subscription_fail_not_exist->[_create_topic],test_send_topic_message->[_create_topic_and_subscription],test_delete_rule_with_non_existing_rule_fail_not_exist->[_create_topic_and_subscription],test_delete_subscription_with_existing_subscription->[_create_subscription,_create_topic],test_unicode_create_subscription_unicode_name->[_create_topic],test_send_queue_message_with_custom_message_properties->[_create_queue],test_get_dead_letter_subscription->[_create_topic_and_subscription],test_send_queue_message->[_create_queue],test_delete_subscription_with_existing_subscription_fail_not_exist->[_create_subscription,_create_topic],test_create_rule_with_options_false_filter->[_create_topic_and_subscription],test_delete_rule_with_existing_rule_fail_not_exist->[_create_topic_and_subscription],test_send_queue_message_with_custom_message_type->[_create_queue],test_delete_subscription_with_non_existing_subscription->[_create_topic],test_receive_subscription_message_read_delete_mode->[_create_topic_and_subscription],test_list_queues_with_special_chars->[_create_queue],test_create_rule_with_options_sql_rule_action->[_create_topic_and_subscription],test_create_rule_no_options->[_create_topic_and_subscription],test_create_subscription_with_options->[_create_topic],test_unicode_create_rule_unicode_name->[_create_topic_and_subscription],test_delete_queue_with_existing_queue->[_create_queue],test_list_rules->[_create_topic_and_subscription],test_get_rule_with_non_existing_rule->[_create_topic_and_subscription],test_receive_queue_message_read_delete_mode_throws_on_unlock->[_create_queue_and_send_msg],_create_topic_and_subscription->[_create_topic],test_get_topic_with_existing_topic->[_create_topic],_create_queue_and_send_msg->[_create_queue],test_get_dead_letter_queue->[_create_queue],test_create_rule_no_options_fail_on_exist->[_create_topic_and_subscription],test_receive_queue_message_delete_with_slash->[_create_queue_and_send_msg],test_get_rule_with_existing_rule->[_create_topic_and_subscription],test_receive_subscription_message_read_delete_mode_throws_on_unlock->[_create_topic_and_subscription]]] | Test if create_service_bus_missing_arguments fails. | Bunch of newlines here too. |
@@ -76,7 +76,7 @@ static void zephyr_domain_thread_fn(void *p1, void *p2, void *p3)
for (;;) {
/* immediately go to sleep, waiting to be woken up by the timer */
- k_thread_suspend(_current);
+ k_sem_take(&zephyr_domain->sem, K_FOREVER);
dt->handler(dt->arg);
}
| [bool->[ll_sch_domain_get_pdata,platform_timer_get_atomic],void->[k_thread_suspend,k_wakeup,cpu_get_id],int->[k_thread_start,tr_info,k_timer_init,K_USEC,atomic_read,k_thread_cpu_mask_clear,k_thread_abort,k_thread_cpu_mask_enable,k_thread_create,k_thread_name_set,cpu_get_id,k_timer_stop,ll_sch_domain_get_pdata,tr_dbg,k_timer_start],timer_domain_init->[ll_sch_domain_set_pdata,domain_init,rzalloc],K_THREAD_STACK_DEFINE,K_KERNEL_STACK_ARRAY_DEFINE] | This function is called from the async thread thread thread. | Good catch. Yeah, code like this where you try to suspend yourself directly and be woken up from somewhere else is inherently racy, as the decision you just made ("I should be suspended") is based on data that isn't inside any kind of lock. Semaphore are the perfect primitive to use here. But if you have more complicated usages note we just added a condition variable that allows you to have arbitrary state inside a mutex, and atomically "go to sleep and release the lock" in a single call. |
@@ -83,12 +83,14 @@ $mesg=(GETPOST("msg") ? GETPOST("msg") : GETPOST("mesg"));
$search_day=GETPOST("search_day","int");
$search_month=GETPOST("search_month","int");
$search_year=GETPOST("search_year","int");
+$search_btn=GETPOST('button_search','alpha');
+$search_remove_btn=GETPOST('button_removefilter','alpha');
$limit = GETPOST('limit')?GETPOST('limit','int'):$conf->liste_limit;
$sortfield = GETPOST("sortfield",'alpha');
$sortorder = GETPOST("sortorder",'alpha');
$page = GETPOST("page",'int');
-if (empty($page) || $page == -1) { $page = 0; } // If $page is not defined, or '' or -1
+if (empty($page) || $page == -1 || !empty($search_btn) || !empty($search_remove_btn) || (empty($toselect) && $massaction === '0')) { $page = 0; } // If $page is not defined, or '' or -1
$offset = $limit * $page;
$pageprev = $page - 1;
$pagenext = $page + 1;
| [fetch,selectMassAction,showOutputField,select_country,fetch_object,jdate,getAlignFlag,select_all_categories,order,getDocumentsLink,select_salesrepresentatives,getNomUrl,get_form,getLoginUrl,select_dolusers,initHooks,typent_array,idate,load,plimit,transnoentities,executeHooks,selectProposalStatus,LibStatut,escape,thirdparty_and_contact_email_array,showCheckAddButtons,close,multiSelectArrayWithCheckbox,free,query,fetch_name_optionals_label,trans,num_rows,select_year,getOptionalsFromPost,selectarray,showFilterButtons,showdocuments] | Get the list of all product identifiers. Initialize a restricted area of a specific user. | What is this for ? || (empty($toselect) && $massaction === '0') |
@@ -2128,7 +2128,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
}
- buildParameters(sb, cmd);
+ buildParameters(sb, cmd, true);
SearchCriteria<IPAddressVO> sc = sb.create();
setParameters(sc, cmd, vlanType);
| [ManagementServerImpl->[updateClusterPassword->[getHypervisorType,updateHostsInCluster],excludeNonDPDKEnabledHosts->[getId],updateVmGroup->[updateVmGroup,getId],getHypervisors->[getId],searchForVlans->[getId],startSystemVM->[startConsoleProxy,startSecondaryStorageVm],listCapabilities->[getId],searchForAlerts->[getId],stopConsoleProxy->[getId],AlertPurgeTask->[runInContext->[getId]],getCloudIdentifierResponse->[signRequest],updateGuestOs->[getId],stopSystemVM->[getId,stopSecondaryStorageVm,stopConsoleProxy],searchForClusters->[getId],getVMPassword->[getId,getCaller],listStoragePoolsForMigrationOfVolume->[getHypervisorType,getId],getConsoleAccessUrlRoot->[getConsoleProxyForVm],findDetail->[findDetail],stopSecondaryStorageVm->[getId],cleanupDownloadUrlsInZone->[getId],generateRandomPassword->[generateRandomPassword],deleteEvents->[getId],addGuestOsMapping->[getId],destroySystemVM->[getId,destroyConsoleProxy,destroySecondaryStorageVm],updateHostPassword->[doInTransactionWithoutResult->[getId,findDetail],getHypervisorType],getStorageUsed->[getId],listGuestOSMappingByCriteria->[getId],uploadCertificate->[getId],listGuestOSCategoriesByCriteria->[getId],getHypervisorType->[getHypervisorType,getId],searchForPods->[getId],listGuestOSByCriteria->[getId],rebootSystemVM->[rebootSecondaryStorageVm,getId,rebootConsoleProxy],upgradeSystemVM->[getHypervisorType,getId],listHostsForMigrationOfVM->[getHypervisorType,searchForServers,getId,findDetail],updateHostsInCluster->[doInTransactionWithoutResult->[getId,findDetail]],removeGuestOsMapping->[getId,removeGuestOsMapping],updateGuestOsMapping->[getId],removeGuestOs->[getId],listCapacities->[getId],EventPurgeTask->[runInContext->[getId]],archiveEvents->[getId,archiveEvents],upgradeStoppedSystemVm->[getCaller],searchForServers->[getHypervisorType,searchForServers,getId],setParameters->[setParameters,getId],cleanupVMReservations->[cleanupVMReservations],hasSuitablePoolsForVolume->[getHypervisorType,getId],buildParameters->[getId],updateHypervisorCapabilities->[getId],addGuestOs->[getId],searchForIPAddresses->[getId],getVncPort->[getId],findAllSuitableStoragePoolsForVm->[getId],searchForSystemVm->[setParameters,getId],destroySecondaryStorageVm->[cleanupDownloadUrlsInZone]]] | search for public ip addresses. find the node in the network and the domain private int ipIndex = - 1 ; Checks if the specified virtual network is allocated or not. This method is called from the scan engine to find a . | @weizhouapache If _isAllocated_ is hardcoded in both the _buildParameters()_ calls, where exactly we use the 'allocatedonly' parameter from listPublicIpAddresses API cmd. |
@@ -250,7 +250,7 @@ const listingSchemaMetadata = {
{
schema: 'forRent-farmGarden_1.0.0.json',
translationName: {
- id: 'schema.farmGarden_1',
+ id: 'schema.farmGarden',
defaultMessage: 'Farm and Garden'
}
},
| [const,export,default] | schema - schema for the last announcing of a resource Demonstrates how to translate a missing header from a schema to a human readable string. | @jordajm this was probably a typo? |
@@ -351,7 +351,12 @@ func storageCondition(ruleType storage.RuleType, apiCondition *api.Condition) (s
return storage.Condition{}, err
}
- return storage.NewCondition(ruleType, apiCondition.Values, condAttr, storage.MemberOf)
+ condOp, err := fromAPIProjectRuleConditionOperators(apiCondition.Operator)
+ if err != nil {
+ return storage.Condition{}, err
+ }
+
+ return storage.NewCondition(ruleType, apiCondition.Values, condAttr, condOp)
}
// we want to reserve the option to return an error in this conversion
| [ListProjects->[ListProjects],DeleteProject->[DeleteProject],CreateProject->[CreateProject],CreateRule->[CreateRule],GetProject->[GetProject],UpdateProject->[UpdateProject]] | CreateRule creates a new rule in the store. storageCondition returns a storage. Condition for the given rule type and api. Condition. | shorten to `fromAPIConditionOperators`? |
@@ -1128,6 +1128,13 @@ void Server::handleCommand_Damage(NetworkPacket* pkt)
}
if (g_settings->getBool("enable_damage")) {
+ if (playerSAO->isDead()) {
+ verbosestream << "Server::ProcessData(): Info: "
+ "Ignoring damage as player " << player->getName()
+ << " is already dead." << std::endl;
+ return;
+ }
+
actionstream << player->getName() << " damaged by "
<< (int)damage << " hp at " << PP(playersao->getBasePosition() / BS)
<< std::endl;
| [handleCommand_Interact->[process_PlayerPos],handleCommand_PlayerPos->[process_PlayerPos]] | Damage a packet from a remote player. | note if i finish my pr on damage only serverside this will become obsolete |
@@ -57,7 +57,7 @@ class AssignmentsController < ApplicationController
def peer_review
assignment = Assignment.find(params[:id])
@assignment = assignment.is_peer_review? ? assignment : assignment.pr_assignment
- if @assignment.nil? || @assignment.is_hidden
+ if @assignment.nil? || @assignment.is_hidden || @section_hidden
render 'shared/http_status',
formats: [:html],
locals: {
| [AssignmentsController->[start_timed_assignment->[update],new->[new],create->[new],set_boolean_graders_options->[update]]] | shows a page with a nail node if the user has not submitted any of the. | `@section_hidden` is not defined here. Every time the client sends a new request, a new controller instance is created. So even though this is defined this in the `show` method, it will not be available here. |
@@ -77,12 +77,18 @@ static OSSL_FUNC_core_get_libctx_fn *c_get_libctx = NULL;
typedef struct fips_global_st {
const OSSL_CORE_HANDLE *handle;
+ SELF_TEST_POST_PARAMS selftest_params;
+ int fips_security_checks;
+ const char *fips_security_check_option;
} FIPS_GLOBAL;
static void *fips_prov_ossl_ctx_new(OSSL_LIB_CTX *libctx)
{
FIPS_GLOBAL *fgbl = OPENSSL_zalloc(sizeof(*fgbl));
+ fgbl->fips_security_checks = 1;
+ fgbl->fips_security_check_option = "1";
+
return fgbl;
}
| [No CFG could be retrieved] | These functions are used by the core of the library. Creates a new Handle from the given OSSL context. | do null check here |
@@ -48,5 +48,5 @@ class AllineaForge(Package):
return url + "allinea-forge-%s-Redhat-6.0-x86_64.tar" % version
def install(self, spec, prefix):
- textinstall = which('textinstall.sh')
+ textinstall = which(join_path(self.stage.source_path, 'textinstall.sh'))
textinstall('--accept-licence', prefix)
| [AllineaForge->[install->[which,textinstall],version]] | Installs the given spec. | Adding `./` should also work. Probably better to use `Executable` instead of `which` since we aren't searching `PATH`, we know exactly where it is already. |
@@ -69,11 +69,10 @@ public final class AsciiString extends ByteString implements CharSequence, Compa
int length2 = o2.length();
int minLength = Math.min(length1, length2);
if (a1 != null && a2 != null) {
- byte[] thisValue = a1.value;
- byte[] thatValue = a2.value;
- for (int i = 0; i < minLength; i++) {
- byte v1 = thisValue[i];
- byte v2 = thatValue[i];
+ final int a1Len = minLength + a1.arrayOffset();
+ for (int i = a1.arrayOffset(), j = a2.arrayOffset(); i < a1Len; i++, j++) {
+ byte v1 = a1.value[i];
+ byte v2 = a2.value[j];
if (v1 == v2) {
continue;
}
| [AsciiString->[contains->[indexOf],lastIndexOf->[b2c,lastIndexOf,charAt],of->[AsciiString],equals->[equals],compareToIgnoreCase->[compare],toUpperCase->[toUpperCase,AsciiString],toString->[toString],copy->[b2c],subSequence->[subSequence],compare->[compare],concat->[charAt,AsciiString,c2b],compareTo->[charAt,b2c],toAsciiStringArray->[AsciiString],startsWith->[regionMatches,startsWith],replace->[AsciiString,c2b],toCharArray->[toCharArray,b2c],regionMatches->[charAt,toLowerCase,regionMatches,b2c],parseChar->[charAt],matches->[matches],contentEquals->[regionMatches],equalsIgnoreCase->[charAt,equalsIgnoreCase,toLowerCase,b2c],trim->[AsciiString],split->[AsciiString,split,charAt],indexOf->[charAt,b2c,indexOf],toLowerCase->[toLowerCase,AsciiString],AsciiString]] | Compares two CharSequence objects. length1 - length2. | @Scottmitch can you add a benchmark that shows that keeping this large amounts of code is significantly faster than just using a single loop with `charAt`? |
@@ -13,7 +13,7 @@ return array(
'thewire' => "Микроблог",
'item:object:thewire' => "Микроблог",
- 'collection:object:thewire' => 'Wire posts',
+ 'collection:object:thewire' => 'Записи микроблога',
'collection:object:thewire:all' => "Все записи микроблога",
'collection:object:thewire:owner' => "Микроблог пользователя %s",
'collection:object:thewire:friends' => "Микроблоги друзей",
| [No CFG could be retrieved] | <?php uration of menu items and titles Собщении � Поль боль� Помикробл. | Don't make non English translations. These will get lost when we merge the translations from Transifex. Just add the English translations and after the merge translate it on Transifex, when we release all translations get merged. |
@@ -1398,7 +1398,9 @@ class Variable(object):
# TODO(minqiyang): Support lod_level in dygraph mode
if in_dygraph_mode():
raise Exception("Dygraph model DO NOT supprt lod")
- return self.desc.lod_level()
+
+ if self.type == core.VarDesc.VarType.LOD_TENSOR:
+ return self.desc.lod_level()
@property
def type(self):
| [cuda_places->[is_compiled_with_cuda,_cuda_ids],_varbase_creator->[convert_np_dtype_to_dtype_],Program->[_construct_from_desc->[Program,_sync_with_cpp,Block],__repr__->[__str__],parse_from_string->[Program,_sync_with_cpp,Block],to_string->[_debug_string_,to_string],_version->[_version],_prune_with_input->[Program,_sync_with_cpp,Block],_copy_param_info_from->[global_block],__init__->[Block],_create_block->[block,Block,current_block],_rollback->[current_block],_copy_data_info_from->[global_block],num_blocks->[num_blocks],_prune->[Program,_sync_with_cpp,Block],_sync_with_cpp->[Block,_sync_with_cpp,num_blocks],_inference_optimize->[type,_remove_var,has_attr,Program,_set_attr,Block,name,op,_remove_op,_sync_with_cpp],clone->[Program,_sync_with_cpp,_copy_param_info_from,Block],__str__->[to_string]],program_guard->[switch_main_program,switch_startup_program],_dygraph_only_->[__impl__->[in_dygraph_mode]],name_scope->[parent,in_dygraph_mode,child],NameScope->[child->[NameScope]],VariableMetaClass->[__instancecheck__->[in_dygraph_mode]],IrNode->[var->[var],append_output->[append_output],node_type->[node_type],remove_output->[remove_output],id->[id],append_input->[append_input],is_ctrl_var->[is_ctrl_var],is_var->[is_var],remove_input->[remove_input],clear_inputs->[clear_inputs],name->[name],op->[op],inputs->[IrNode],outputs->[IrNode],is_op->[is_op],clear_outputs->[clear_outputs]],is_compiled_with_cuda->[is_compiled_with_cuda],IrGraph->[create_op_node->[create_op_node,set_type,_update_desc_attr,name,IrOpNode],update_output_link->[append_output,remove_output,rename_output,append_input,remove_input,name],create_var_node->[create_var_node,set_type,set_shape,IrVarNode],all_var_nodes->[is_var,IrVarNode],build_adjacency_list->[build_adjacency_list,IrNode],_update_desc_attr->[_set_attr],resolve_hazard->[resolve_hazard,is_op,op],_find_node_by_name->[name],all_persistable_nodes->[var,is_var,IrVarNode],link_to->[append_output,append_input],all_op_nodes->[is_op,IrOpNode],has_circle->[has_circle],update_input_link->[append_output,remove_output,append_input,remove_input,name,rename_input],create_persistable_node->[set_type,set_shape,IrVarNode],create_var_node_from_desc->[create_var_node,IrVarNode],draw->[safe_remove_nodes,_convert_to_pdf,all_var_nodes,is_ctrl_var,all_op_nodes],create_op_node_from_desc->[create_op_node,IrOpNode],graph_num->[graph_num],topology_sort->[topology_sort,IrNode],all_nodes->[IrNode],clone->[IrGraph,clone]],_getitem_impl_->[get_new_list_tensor->[fill_constant],get_new_list_tensor,contain_var,fill_constant],ParamBase->[__str__->[to_string],__init__->[convert_np_dtype_to_dtype_,_dygraph_tracer]],Parameter->[__str__->[to_string],to_string->[to_string],__init__->[__init__]],Variable->[type->[type,in_dygraph_mode],lod_level->[lod_level,in_dygraph_mode],persistable->[in_dygraph_mode,persistable],_detectContinuesSlice->[_slice_indices],to_string->[_debug_string_,in_dygraph_mode],_sliceVar->[_cloneVar],shape->[in_dygraph_mode,shape],__init__->[convert_np_dtype_to_dtype_],stop_gradient->[in_dygraph_mode],_concatVar->[_cloneVar],__getitem__->[_getitem_impl_],dtype->[dtype,in_dygraph_mode],name->[name,in_dygraph_mode],_sliceAndConcatVar->[_sliceVar,_cloneVar,_slice_indices,_concatVar],_reconstructSliceinfo->[_detectEllipsis]],OpProtoHolder->[update_op_proto->[get_all_op_protos],__init__->[get_all_op_protos]],_full_name_scope->[parent,name],get_all_op_protos->[get_all_op_protos],cuda_pinned_places->[is_compiled_with_cuda,_cuda_ids],cpu_places->[_cpu_num],ParameterMetaClass->[__instancecheck__->[in_dygraph_mode]],IrVarNode->[type->[var],persistable->[var],set_shape->[var],shape->[var],__init__->[is_var],dtype->[var]],_get_var->[default_main_program,global_block],_dygraph_not_support_->[__impl__->[in_dygraph_mode]],IrOpNode->[set_attr->[_update_desc_attr],output_arg_names->[op],set_type->[op],rename_output->[op],__init__->[is_op],output->[op],_update_desc_attr->[_set_attr,op],inputs->[IrVarNode],outputs->[IrVarNode],input_arg_names->[op],rename_input->[op],input->[op]],dtype_is_floating->[convert_np_dtype_to_dtype_],Operator->[attr_type->[attr_type],to_string->[_debug_string_],_block_attr->[_block_attr_id],output->[output],_rename_input->[_rename_input],has_attr->[has_attr],_blocks_attr_ids->[_blocks_attr_ids],output_names->[output_names],all_attrs->[attr,attr_type,_block_attr,_blocks_attr],_update_desc_attr->[_set_attr],__str__->[to_string],type->[type,in_dygraph_mode],output_arg_names->[output_arg_names],__init__->[type,in_dygraph_mode,_full_name_scope,find_name,instance],attr->[attr],attr_names->[attr_names],input->[input],_block_attr_id->[_block_attr_id],_rename_output->[_rename_output],input_names->[input_names],input_arg_names->[input_arg_names]],require_version->[version_cmp],Block->[var->[type],_remove_var->[_remove_var],create_parameter->[_is_inited_by,in_dygraph_mode],_find_var_recursive->[var],_clone_variable->[create_var],to_string->[_debug_string_,to_string],append_op->[Operator,_dygraph_tracer,append_op,in_dygraph_mode],_copy_param_info_from->[iter_parameters,in_dygraph_mode],_set_forward_block_idx->[_set_forward_block_idx],_rename_var->[has_var,var,type,in_dygraph_mode,shape,_rename_var,dtype,Variable],_insert_op->[Operator,_insert_op],_prepend_op->[Operator,_dygraph_tracer,in_dygraph_mode,_prepend_op],_sync_with_cpp->[has_var,type,Operator,name,create_var],_remove_op->[_remove_op],create_var->[_varbase_creator,in_dygraph_mode,Variable],_var_recursive->[_find_var_recursive],__str__->[to_string]],load_op_library->[instance,load_op_library],NameScope,Program] | Returns a function to create a new variable with a lod level. | self.type != core.VarDesc.VarType.LOD_TENSOR |
@@ -51,6 +51,7 @@ GOOD_SOURCE = FileContent(f"{PACKAGE}/test_good.py", b"def test():\n pass\n")
BAD_SOURCE = FileContent(f"{PACKAGE}/test_bad.py", b"def test():\n assert False\n")
PY3_ONLY_SOURCE = FileContent(f"{PACKAGE}/test_py3.py", b"def test() -> None:\n pass\n")
LIBRARY_SOURCE = FileContent(f"{PACKAGE}/library.py", b"def add_two(x):\n return x + 2\n")
+BINARY_SOURCE = FileContent(f"{PACKAGE}/say_hello.py", b"print('Hello, test!')")
def create_python_library(
| [test_conftest_handling->[create_test_target,run_pytest],test_respects_passthrough_args->[create_test_target,run_pytest],test_single_failing_test->[create_test_target,run_pytest],test_coverage->[create_test_target,run_pytest],test_absolute_import->[create_test_target,create_python_library,run_pytest],test_thirdparty_transitive_dep->[create_test_target,create_python_library,setup_thirdparty_dep,run_pytest],test_transitive_dep->[create_test_target,create_python_library,run_pytest],test_execution_slot_variable->[create_test_target,run_pytest],test_uses_correct_python_version->[create_test_target,run_pytest],test_junit->[create_test_target,run_pytest],test_mixed_sources->[create_test_target,run_pytest],test_single_passing_test->[create_test_target,run_pytest],test_thirdparty_dep->[create_test_target,setup_thirdparty_dep,run_pytest],test_relative_import->[create_test_target,create_python_library,run_pytest],test_extra_env_vars->[create_test_target,run_pytest]] | Creates a python library from a list of source files. | Nit: better to inline this, as it's only used by one very specific test. |
@@ -301,6 +301,7 @@ Discourse::Application.routes.draw do
get "session/current" => "session#current"
get "session/csrf" => "session#csrf"
get "composer_messages" => "composer_messages#index"
+ post "composer/parse_html" => "composer#parse_html"
resources :static
post "login" => "static#enter", constraints: { format: /(json|html)/ }
| [new,redirect,mount,put,draw,resources,member,root,scope,constraints,post,require,each,match,development?,delete,namespace,collection,merge,get,each_with_index,require_dependency] | View of all the email routes. Get a list of all users. | A controller test or 2 please! What if the html content is 3 megabytes? can this take out the parser? |
@@ -189,7 +189,7 @@ static void get_language_names(GList *languages)
goto end;
}
- char *saved_locale = strdup(setlocale(LC_ALL, NULL));
+ saved_locale = strdup(setlocale(LC_ALL, NULL));
int n_elements = json_reader_count_elements(reader);
for(int i = 0; i < n_elements; i++)
| [No CFG could be retrieved] | Reads the json file and returns the object with the keys name and locale. region Language - specific functions. | And here we know that saved_locale cannot be NULL. |
@@ -129,6 +129,7 @@ void AddLoggerToPython(pybind11::module& m) {
logger_scope.def_static("Print", printDefault); // raw_function(printDefault,1))
logger_scope.def_static("PrintInfo",printInfo); // raw_function(printInfo,1))
logger_scope.def_static("PrintWarning", printWarning); //raw_function(printWarning,1))
+ logger_scope.def_static("Flush", flush);
logger_scope.def_static("GetDefaultOutput", &Logger::GetDefaultOutputInstance, return_value_policy::reference); //_internal )
;
| [printInfo->[printImpl],printWarning->[printImpl],printDefault->[printImpl]] | Adds a Logger to a Kratos module. | Why do you need an auxiliary function for this? |
@@ -94,6 +94,7 @@ import reactor.core.publisher.Mono;
public class ModuleFlowProcessingPhase
extends NotificationFiringProcessingPhase<ModuleFlowProcessingPhaseTemplate> implements Initialisable {
+ private static final String FLOW_BACK_PRESSURE_ERROR_TYPE_NOT_FOUND = "FLOW_BACK_PRESSURE error type not found";
private ErrorType sourceResponseGenerateErrorType;
private ErrorType sourceResponseSendErrorType;
private ErrorType sourceErrorResponseGenerateErrorType;
| [ModuleFlowProcessingPhase->[FlowProcessor->[getAnnotation->[getAnnotation],getLocation->[getLocation],getRootContainerLocation->[getRootContainerLocation],getAnnotations->[getAnnotations]]]] | Creates a new instance of the ModuleFlowProcessingPhase class. | who uses this? |
@@ -48,10 +48,13 @@ if (isset($_REQUEST['change_column'])) {
if (isset($_REQUEST['do_save_data'])) {
$regenerate = PMA_updateColumns($db, $table);
if ($regenerate) {
+ $message = PMA_Message::error('Error in the update SQL Query');
// @todo: find in which situation this happens, then
// do something appropriate
} else {
// continue to show the table's structure
+ $message = PMA_Message::success(__('Table %1$s has been altered successfully '));
+ $message->addParam($table);
unset($_REQUEST['selected']);
unset($_REQUEST['true_selected']);
}
| [isAjax,getScripts,addFile,getColumns,isUnique,addHTML,getChoice,hasColumn,getHeader] | Handle the user input for the structure action This function is used to determine the type of the group of rows to submit. | Not required to add message here again. |
@@ -161,9 +161,12 @@ func (ot *OneTime) Stop() {
// RunJobAt wait until the Stop() function has been called on the run
// or the specified time for the run is after the present time.
func (ot *OneTime) RunJobAt(initr models.Initiator, job models.JobSpec) {
+ if !initr.Time.Valid {
+ logger.Panicf("RunJobAt must have initiator with valid run at time: %v", initr)
+ }
select {
case <-ot.done:
- case <-ot.Clock.After(initr.Time.DurationFromNow()):
+ case <-ot.Clock.After(utils.DurationFromNow(initr.Time.Time)):
if err := ot.Store.MarkRan(&initr, true); err != nil {
logger.Error(err.Error())
return
| [Stop->[Wait,Stop,Lock,Unlock],AddJob->[Ended,AddFunc,Error,Errorw,InitiatorsFor,addJob,RLock,Now,RunJobAt,RUnlock],addJob->[AddJob],Start->[Unlock,New,addJob,Start,Lock,Jobs],RunJobAt->[After,DurationFromNow,MarkRan,Error],New] | RunJobAt runs a job at the given initiator. | Seems not very confident. We check this on creation of the initiator, do we need this check again? Could we use a time type that doesn't allow for null values? |
@@ -241,9 +241,9 @@ public class SecurityConfiguration extends WebSecurityConfigurerAdapter {
.and()
.headers()
<%_ if (clientTheme !== 'none') { _%>
- .contentSecurityPolicy("default-src 'self'; frame-src 'self' data:; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://storage.googleapis.com; style-src 'self' https://fonts.googleapis.com 'unsafe-inline'; img-src 'self' data:; font-src 'self' https://fonts.gstatic.com data:")
+ .contentSecurityPolicy(environment.getProperty("security.headers.content-security-policy","default-src 'self'; frame-src 'self' data:; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://storage.googleapis.com; style-src 'self' https://fonts.googleapis.com 'unsafe-inline'; img-src 'self' data:; font-src 'self' https://fonts.gstatic.com data:"))
<%_ } else { _%>
- .contentSecurityPolicy("default-src 'self'; frame-src 'self' data:; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://storage.googleapis.com; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:")
+ .contentSecurityPolicy(environment.getProperty("security.headers.content-security-policy","default-src 'self'; frame-src 'self' data:; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://storage.googleapis.com; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:"))
<%_ } _%>
.and()
.referrerPolicy(ReferrerPolicyHeaderWriter.ReferrerPolicy.STRICT_ORIGIN_WHEN_CROSS_ORIGIN)
| [No CFG could be retrieved] | A JSP - like API that allows to access the user s authentication key. This method is called by the application code to register and process requests. It is called by. | Are you sure this works? Does it require a `spring.` prefix? |
@@ -372,7 +372,7 @@ func (a *APIServer) runJob(ctx context.Context, jobInfo *pps.JobInfo, pool *pool
}
failed := false
- limiter := limit.New(a.numWorkers)
+ limiter := limit.New(a.numWorkers * 10)
// process all datums
df, err := newDatumFactory(ctx, pfsClient, jobInfo.Input)
if err != nil {
| [getTreeFromTag->[Bytes,WriteFromStreamingBytesClient,GetTag,Deserialize],getMasterLogger->[SetOutput,SetFlags],putTree->[putObject,Finish,Serialize],putObject->[CloseAndRecv,Chunk,Send,PutObject],scaleDownWorkers->[PipelineRcName,Get,ReplicationControllers,Update],runJob->[NewHashTree,InputCommits,InspectJob,Unlock,InspectCommit,BuildCommit,NewInfiniteBackOff,Now,Close,NewClientFromURLAndSecret,Clone,Merge,PushObj,Acquire,NewFile,Done,Put,updateJobState,New,Go,putObject,SetMaxConcurrentStreams,Lock,Errorf,Len,Process,Logf,Wait,Since,Datum,InspectFile,Release,getTreeFromTag,Join,NewSTM,InspectPipeline,Get,PutFile,MarshalToString,ReadWrite,WithCancel,putTree,TrimPrefix,Parse,RetryNotify],scaleUpWorkers->[GetExpectedNumWorkers,ReplicationControllers,PipelineRcName,Get,Update],jobSpawner->[InspectJob,InspectCommit,AfterFunc,Close,Clone,PipelineRcName,GetByIndex,Done,CreateJob,DurationFromProto,VisitInput,scaleUpWorkers,Errorf,newBranchSetFactory,Logf,PachDialOptions,Chan,Next,scaleDownWorkers,NewPool,GetExpectedNumWorkers,ReadOnly,runJob],master->[getMasterLogger,ReadWrite,Join,Unlock,NewSTM,NewDLock,Background,WithCancel,NewInfiniteBackOff,Lock,RetryNotify,Get,Logf,jobSpawner,Put],Percentile,DurationProto,Error,Mean,Contains,DurationFromProto,NewCommit,Now,TimestampProto,StandardDeviation] | runJob runs a job in a separate goroutine This function is used to find the parent job and the new branch. This function is used to update the total number of processed and skipped data in the process. | Not sure if I'm missing something, but why is this 10 while the other number is 5? |
@@ -556,6 +556,17 @@ func (c *coordinator) collectHotSpotMetrics() {
hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "hot_read_region_as_leader").Set(0)
}
+ stat, ok = status.AsPeer[storeID]
+ if ok {
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "total_read_bytes_as_peer").Set(stat.TotalLoads[statistics.RegionReadBytes])
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "total_read_keys_as_peer").Set(stat.TotalLoads[statistics.RegionReadKeys])
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "hot_read_region_as_peer").Set(float64(stat.Count))
+ } else {
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "total_read_bytes_as_peer").Set(0)
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "total_read_keys_as_peer").Set(0)
+ hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "hot_read_region_as_peer").Set(0)
+ }
+
if infl := pendings[storeID]; infl != nil {
hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "read_pending_influence_byte_rate").Set(infl.Loads[statistics.ByteDim])
hotSpotStatusGauge.WithLabelValues(storeAddress, storeLabel, "read_pending_influence_key_rate").Set(infl.Loads[statistics.KeyDim])
| [Schedule->[Schedule],run->[drivePushOperator,patrolRegions]] | collectHotSpotMetrics collects hot spot metrics Collect hot read and write metrics Updates metrics for hot read requests. | metrics also need to be updated |
@@ -25,7 +25,8 @@
<div class="wrapLeft">
<p><%= I18n.t(:welcome_to_markus, {:user_name => @current_user.first_name}) %></p>
- <% @assignments.each do |a| %>
+ <% # Display assignments and sort them in ASC order by their latest due date %>
+ <% @assignments.sort_by{|a| a.latest_due_date()}.each do |a| %>
<%= render :partial => "assignment_summary", :locals => { :assignment => a } %>
<% end %>
| [No CFG could be retrieved] | Renders the list of assignments for the nag. | Would it be better to sort `@assignments` in the controller or in the model instead of in the view? |
@@ -231,8 +231,10 @@ class NavierStokesEmbeddedMonolithicSolver(FluidSolver):
"maximum_iterations": 7,
"echo_level": 0,
"time_order": 2,
+ "time_scheme": "bdf2",
"compute_reactions": false,
"reform_dofs_at_each_step": false,
+ "consider_periodic_conditions": false,
"relative_velocity_tolerance": 1e-3,
"absolute_velocity_tolerance": 1e-5,
"relative_pressure_tolerance": 1e-3,
| [NavierStokesEmbeddedMonolithicSolver->[__UpdateFMALEStepCounter->[_is_fm_ale_step],__CreateDistanceModificationProcess->[__GetDistanceModificationDefaultSettings],_set_virtual_mesh_values->[_get_mesh_moving_util,_is_fm_ale_step],__init__->[EmbeddedFormulation],_set_embedded_formulation->[SetProcessInfo],__UndoFMALEOperations->[_get_mesh_moving_util,_is_fm_ale_step],ValidateSettings->[GetDefaultSettings,_get_fm_ale_solver_default_settings],_get_fm_ale_solver_default_settings->[_get_fm_ale_explicit_default_settings,_get_fm_ale_implicit_default_settings],_do_fm_ale_operations->[_get_mesh_moving_util,_is_fm_ale_step]]] | Returns the default settings for the KratosMultiphysics class. Missing parameters are not allowed in the solver. | I am wondering that if the user specifies this in the input but the element integrates in time this should be an error? Or does the element also have different way for the time-integration |
@@ -386,8 +386,11 @@ static void x509v3_cache_extensions(X509 *x)
BASIC_CONSTRAINTS_free(bs);
x->ex_flags |= EXFLAG_BCONS;
}
+ else if (i != -1) {
+ x->ex_flags |= EXFLAG_INVALID;
+ }
/* Handle proxy certificates */
- if ((pci = X509_get_ext_d2i(x, NID_proxyCertInfo, NULL, NULL))) {
+ if ((pci = X509_get_ext_d2i(x, NID_proxyCertInfo, &i, NULL))) {
if (x->ex_flags & EXFLAG_CA
|| X509_get_ext_by_NID(x, NID_subject_alt_name, -1) >= 0
|| X509_get_ext_by_NID(x, NID_issuer_alt_name, -1) >= 0) {
| [No CFG could be retrieved] | - - - - - - - - - - - - - - - - - - Handle key usage and key usage. | Style nit: on previous line. |
@@ -2035,6 +2035,11 @@ class Operator(object):
del op_attrs[role_var_name]
if len(self.desc.type()) != 0:
+ # NOTE(Aurelius84): prog.clone() will lead that var.op is always None,
+ # we add this to fix the problem.
+ for arg in self.desc.output_arg_names():
+ if block.has_var(arg) and block.var(arg).op is None:
+ block.var(arg).op = self
return
if type is None:
raise ValueError(
| [is_compiled_with_rocm->[is_compiled_with_rocm],_set_expected_place->[_set_dygraph_tracer_expected_place],cuda_places->[is_compiled_with_cuda,_cuda_ids],_dygraph_place_guard->[_set_dygraph_tracer_expected_place],_get_paddle_place->[is_compiled_with_cuda,is_compiled_with_xpu],_varbase_creator->[convert_np_dtype_to_dtype_],Program->[_construct_from_desc->[Program,_sync_with_cpp,Block],to_string->[type,_debug_string_,to_string],_copy_data_info_from->[type,var,num_blocks],clone->[Program,_copy_param_info_from,_sync_with_cpp,Block],_prune_with_input->[type,_copy_param_info_from,_is_optimize_op,Program,Block,_sync_with_cpp],all_parameters->[all_parameters],set_state_dict->[type,list_vars],_to_readable_code->[type,_to_readable_code],_sync_with_cpp->[Block,_sync_with_cpp,num_blocks],__str__->[_to_readable_code],__repr__->[__str__],parse_from_string->[Program,_sync_with_cpp,Block],_version->[_version],_copy_param_info_from->[type,global_block],state_dict->[condition->[is_belong_to_optimizer,is_parameter],is_persistable->[type],is_belong_to_optimizer->[is_persistable],type,list_vars],__init__->[Block],_create_block->[block,Block,current_block],_rollback->[current_block],num_blocks->[num_blocks],_remove_training_info->[Program,_sync_with_cpp,Block],_inference_optimize->[type,_remove_var,remove_output,has_attr,Program,_set_attr,Block,name,op,_remove_op,_sync_with_cpp],random_seed->[type],_copy_dist_param_info_from->[type]],set_flags->[_global_flags],program_guard->[switch_main_program,switch_startup_program],_dygraph_only_->[__impl__->[in_dygraph_mode]],name_scope->[parent,in_dygraph_mode,child],NameScope->[child->[NameScope]],is_compiled_with_xpu->[is_compiled_with_xpu],VariableMetaClass->[__instancecheck__->[in_dygraph_mode]],IrNode->[var->[var],append_output->[append_output],node_type->[node_type],remove_output->[remove_output],id->[id],append_input->[append_input],is_ctrl_var->[is_ctrl_var],is_var->[is_var],remove_input->[remove_input],clear_inputs->[clear_inputs],name->[name],op->[op],inputs->[IrNode],outputs->[IrNode],is_op->[is_op],clear_outputs->[clear_outputs]],is_compiled_with_cuda->[is_compiled_with_cuda],_static_only_->[__impl__->[in_dygraph_mode]],IrGraph->[create_op_node->[create_op_node,set_type,_update_desc_attr,name,IrOpNode],update_output_link->[append_output,remove_output,rename_output,append_input,remove_input,name],create_var_node->[create_var_node,set_type,set_shape,IrVarNode],all_var_nodes->[is_var,IrVarNode],build_adjacency_list->[build_adjacency_list,IrNode],_update_desc_attr->[_set_attr],resolve_hazard->[resolve_hazard,is_op,op],_find_node_by_name->[name],all_persistable_nodes->[var,is_var,IrVarNode],link_to->[append_output,append_input],all_op_nodes->[is_op,IrOpNode],has_circle->[has_circle],update_input_link->[append_output,remove_output,append_input,remove_input,name,rename_input],create_persistable_node->[set_type,set_shape,IrVarNode],create_var_node_from_desc->[create_var_node,IrVarNode],draw->[safe_remove_nodes,_convert_to_pdf,all_var_nodes,is_ctrl_var,all_op_nodes],create_control_dep_var->[create_control_dep_var,IrVarNode],create_op_node_from_desc->[create_op_node,IrOpNode],graph_num->[graph_num],topology_sort->[topology_sort,IrNode],all_nodes->[IrNode],clone->[IrGraph,clone]],ParamBase->[trainable->[type],__deepcopy__->[id,ParamBase],_copy_to->[ParamBase],__init__->[convert_np_dtype_to_dtype_]],Parameter->[__str__->[_to_readable_code],to_string->[to_string],__init__->[__init__]],Variable->[type->[type],lod_level->[lod_level],persistable->[persistable],_detectContinuesSlice->[_slice_indices],get_value->[type],to_string->[_debug_string_],_sliceVar->[_cloneVar],shape->[shape],__init__->[convert_np_dtype_to_dtype_],stop_gradient->[stop_gradient],_concatVar->[_cloneVar],set_value->[type,shape],dtype->[dtype],name->[name],is_parameter->[is_parameter],_sliceAndConcatVar->[_sliceVar,_cloneVar,_slice_indices,_concatVar],_reconstructSliceinfo->[_detectEllipsis]],OpProtoHolder->[update_op_proto->[get_all_op_protos],__init__->[get_all_op_protos]],_full_name_scope->[parent,name],get_all_op_protos->[get_all_op_protos],cuda_pinned_places->[is_compiled_with_cuda,_cuda_ids],cpu_places->[_cpu_num],ParameterMetaClass->[__instancecheck__->[in_dygraph_mode]],IrVarNode->[type->[var],persistable->[var],set_shape->[var],shape->[var],__init__->[is_var],dtype->[var]],_get_var->[default_main_program,global_block],_dygraph_not_support_->[__impl__->[in_dygraph_mode]],IrOpNode->[set_attr->[_update_desc_attr],output_arg_names->[op],set_type->[op],rename_output->[op],__init__->[is_op],output->[op],_update_desc_attr->[_set_attr,op],inputs->[IrVarNode],outputs->[IrVarNode],input_arg_names->[op],rename_input->[op],input->[op]],_get_paddle_place_list->[_get_paddle_place],dtype_is_floating->[convert_np_dtype_to_dtype_],Operator->[attr_type->[attr_type],to_string->[_debug_string_],_block_attr->[_block_attr_id],_is_optimize_op->[attr,has_attr],output->[output],_rename_input->[_rename_input],has_attr->[has_attr],_blocks_attr_ids->[_blocks_attr_ids],output_names->[output_names],all_attrs->[attr,attr_type,_block_attr,_blocks_attr],_to_readable_code->[type],_update_desc_attr->[_set_attr],__str__->[_to_readable_code],type->[type],output_arg_names->[output_arg_names],__init__->[type,in_dygraph_mode,_full_name_scope,find_name,instance],attr->[attr],attr_names->[attr_names],input->[input],_block_attr_id->[_block_attr_id],_rename_output->[_rename_output],_is_backward_op->[attr,has_attr],input_names->[input_names],input_arg_names->[input_arg_names]],require_version->[version_cmp],Block->[var->[type],_remove_var->[_remove_var],create_parameter->[_is_inited_by,in_dygraph_mode],_find_var_recursive->[var],_clone_variable->[create_var],to_string->[_debug_string_,to_string],append_op->[Operator,_dygraph_tracer,append_op,in_dygraph_mode],_copy_param_info_from->[iter_parameters,in_dygraph_mode],_set_forward_block_idx->[_set_forward_block_idx],_rename_var->[has_var,var,type,in_dygraph_mode,shape,_rename_var,dtype,Variable],_prepend_op->[Operator,_dygraph_tracer,in_dygraph_mode,_prepend_op],_to_readable_code->[type,_to_readable_code],_sync_with_cpp->[has_var,type,create_parameter,Operator,shape,stop_gradient,dtype,name,is_parameter,create_var],_remove_op->[_remove_op],create_var->[_varbase_creator,in_dygraph_mode,Variable],_var_recursive->[_find_var_recursive],_insert_op_without_sync->[Operator,_insert_op],__str__->[_to_readable_code]],xpu_places->[_xpu_ids,is_compiled_with_xpu],get_flags->[_global_flags],device_guard->[switch_device],NameScope,Program] | Initialize a new object with a single . Get the protocol buffer for a single n - node op. Adds the necessary information to the network descriptor to generate a . Check if a node in the block has a missing node in the block. | why not fix this program in prog.clone? |
@@ -66,8 +66,10 @@ public class DocumentValidationReportJsonWriterTest extends
json.has("has_error").isTrue();
json.has("number").isEquals(2);
json = json.has("violations").length(2);
- json.has(0).properties(4).has("message").isText();
- json.has(1).properties(4).has("message").isText();
+ json.has(0).properties(5).has("message").isText();
+ json.has(1).properties(5).has("message").isText();
+ json.childrenContains("messageKey", "label.schema.constraint.violation.NotNullConstraint.dublincore.title",
+ "label.schema.constraint.violation.PatternConstraint.dublincore.description");
json.childrenContains("invalid_value", null, null);
json.childrenContains("constraint.entity-type", "validation_constraint", "validation_constraint");
json.childrenContains("constraint.name", "NotNullConstraint", "PatternConstraint");
| [DocumentValidationReportJsonWriterTest->[test->[isText,childrenContains,properties,getSchema,DocumentValidationReport,jsonAssert,isEquals,get,getField,PathNode,PatternConstraint,asList,length,isTrue,ConstraintViolation],testNoErrors->[isFalse,properties,DocumentValidationReport,jsonAssert,isEquals,length]]] | Test if schema has validation violation. | Worth testing we have also the `messageKey` field here? |
@@ -106,6 +106,11 @@ EC2 = None
SECURITY_GROUP_NAME = 'certbot-security-group'
SUBNET_NAME = 'certbot-subnet'
+class Status(object):
+ """Possible statuses of client tests."""
+ PASS = 'pass'
+ FAIL = 'fail'
+
# Boto3/AWS automation functions
#-------------------------------------------------------------------------------
def should_use_subnet(subnet):
| [test_client_process->[install_and_launch_certbot,block_until_instance_ready],create_client_instances->[make_instance],block_until_instance_ready->[block_until_ssh_open],block_until_http_ready,make_security_group,cleanup,should_use_subnet,create_client_instances,block_until_instance_ready,make_instance,config_and_launch_boulder] | Should we use the given subnet?. | Why not just using a boolean ? `True` -> PASSED, `False` -> FAILED |
@@ -738,6 +738,9 @@ namespace Dynamo.DSEngine
var inputVar = node.GetAstIdentifierForOutputIndex(i).Value;
outputMap[inputVar] = string.Empty;
}
+
+ var previewVar = node.AstIdentifierForPreview.Value;
+ outputMap[previewVar] = string.Empty;
}
}
}
| [NodeToCodeUtils->[VariableNumbering->[BumpID],IdentifierVisitor->[VisitIdentifierListNode->[VisitIdentifierListNode]],MarkConnectivityForNode->[MarkConnectivityForNode],NodeToCodeResult->[VariableNumbering,GetNextName,GetInputOutputMap,ShortNameMapping,VariableRemapping],GetCliques->[MarkConnectivityForNode],ShortNameGenerator->[GetNextName->[BumpID]],ShortNameMapping->[GetNextName]]] | Get input and output map from selected nodes. | Two questions: 1. Does this apply also to `DSFunction` node that has only one output port? 2. The reason you can use `AstIdentifierForPreview`, is it because it happens to be the intermediate variable name that we need for conversion? |
@@ -192,11 +192,14 @@ class Install extends BaseModule
'$title' => $install_title,
'$checks' => self::$installer->getChecks(),
'$pass' => L10n::t('Site settings'),
+ '$hostname' => $configCache->get('config', 'hostname'),
+ '$ssl_policy' => $configCache->get('system', 'ssl_policy'),
+ '$basepath' => $configCache->get('system', 'basepath'),
+ '$urlpath' => $configCache->get('system', 'urlpath'),
'$dbhost' => $configCache->get('database', 'hostname'),
'$dbuser' => $configCache->get('database', 'username'),
'$dbpass' => $configCache->get('database', 'password'),
'$dbdata' => $configCache->get('database', 'database'),
- '$phpath' => $configCache->get('config', 'php_path'),
'$adminmail' => ['config-admin_email',
L10n::t('Site administrator email address'),
$configCache->get('config', 'admin_email'),
| [Install->[init->[getArgumentValue,getBaseURL,isInstall],checkSetting->[set,get],whatNext->[getBaseUrl],post->[getBasePath,getConfigCache,checkDB,getProfiler,installDatabase,createConfig],content->[checkEnvironment,getConfigCache,getChecks,get,getBaseURL]]] | Renders the content of the confirmation page. Renders the install settings page. This function returns a string with a link to the install page. a - > a - > a - > a - > a - > a - >. | Formatting: Indentation isn't consistent with the rest of the list (tabs). |
@@ -58,6 +58,10 @@ class UltraLightweightFaceDetection(Model):
resized_image = resize_image(image, (self.w, self.h))
meta = {'original_shape': image.shape,
'resized_shape': resized_image.shape}
+ if self.input_transform.reverse_input_channels:
+ resized_image = cv2.cvtColor(resized_image, cv2.COLOR_BGR2RGB)
+ resized_image = (resized_image - self.input_transform.mean_values) / self.input_transform.scale_values
+
resized_image = resized_image.transpose((2, 0, 1)) # Change data layout from HWC to CHW
resized_image = resized_image.reshape((self.n, self.c, self.h, self.w))
| [UltraLightweightFaceDetection->[postprocess->[Detection,transpose,nms,zip],_parse_outputs->[RuntimeError,format,items],preprocess->[reshape,transpose,resize_image],__init__->[len,_parse_outputs,super,iter,next]]] | Preprocess the input image for the object. | This code is repeated everywhere; it would make sense to move it to the `InputTransform` class itself (e.g. `resized_image = self.input_transform.apply(resized_image)`). |
@@ -369,13 +369,7 @@ public abstract class Cause {
@Exported(visibility = 3)
public String getUserName() {
- String userName = "anonymous";
- if (userId != null) {
- User user = User.get(userId, false);
- if (user != null)
- userName = user.getDisplayName();
- }
- return userName;
+ return User.get(userId, true).getDisplayName();
}
@Override
| [Cause->[print->[getShortDescription],UserCause->[hashCode->[hashCode],equals->[equals]],UserIdCause->[print->[getUserId,getUserName],getShortDescription->[getUserName],hashCode->[hashCode],equals->[equals]],UpstreamCause->[trim->[UpstreamCause,trim],equals->[equals],print->[print,indent],pointsTo->[pointsTo,equals],indent->[print],hashCode->[hashCode]],RemoteCause->[hashCode->[hashCode],equals->[equals]]]] | Gets the user name. | What if `userId == null`? Is that possible? The former code implies that it is. |
@@ -105,6 +105,7 @@ __all__ = [
'TypeCheckError',
]
+logger = logging.getLogger(__name__)
# This is missing in the builtin types module. str.upper is arbitrary, any
# method on a C-implemented type will do.
| [with_output_types->[annotate->[get_type_hints]],IOTypeHints->[copy->[IOTypeHints],with_defaults->[IOTypeHints]],with_input_types->[annotate->[get_type_hints]],_positional_arg_hints->[_positional_arg_hints],get_type_hints->[set_input_types,IOTypeHints],getcallargs_forhints->[getfullargspec,_unpack_positional_arg_hints,TypeCheckError],getfullargspec->[getfullargspec],_interleave_type_check->[wrapper->[_check_instance_type]],_check_instance_type->[TypeCheckError],_unpack_positional_arg_hints->[TypeCheckError,_unpack_positional_arg_hints],WithTypeHints->[with_output_types->[annotate->[],_get_or_create_type_hints],with_input_types->[annotate->[],_get_or_create_type_hints],get_type_hints->[_get_or_create_type_hints,get_type_hints],__init__->[IOTypeHints],_get_or_create_type_hints->[IOTypeHints]]] | This module defines a type - hinting function that can be used to hint the type of Return an ArgSpec with at least one positional argument and any number of other arguments whose name. | For my eductation, does this line prepend logging messages with the filename? |
@@ -3383,10 +3383,11 @@ class Archiver:
help='print statistics for the deleted archive')
subparser.add_argument('--cache-only', dest='cache_only', action='store_true',
help='delete only the local cache for the given repository')
- subparser.add_argument('--force', dest='forced',
- action='count', default=0,
+ subparser.add_argument('--force', dest='forced', action='count', default=0,
help='force deletion of corrupted archives, '
'use ``--force --force`` in case ``--force`` does not work.')
+ subparser.add_argument('--keep-security-info', dest='keep_security_info', action='store_true',
+ help='keep the local security info when deleting a repository')
subparser.add_argument('--save-space', dest='save_space', action='store_true',
help='work slower, but using less space')
subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
| [main->[get_args,run,Archiver],with_repository->[decorator->[wrapper->[argument]]],Archiver->[do_debug_search_repo_objs->[print_error,print_finding],_export_tar->[item_to_tarinfo->[print_warning,item_content_stream],build_filter,print_warning,build_matcher,item_to_tarinfo],do_prune->[print_error],do_mount->[print_error],do_check->[print_error],do_extract->[build_filter,print_warning,build_matcher],_list_archive->[_list_inner,build_matcher],do_delete->[print_error],run->[_setup_topic_debugging,prerun_checks,get_func,_setup_implied_logging],do_debug_dump_archive->[output->[do_indent],output],do_recreate->[print_error,build_matcher],do_key_export->[print_error],do_benchmark_crud->[measurement_run,test_files],_info_archives->[format_cmdline],_process->[print_file_status,_process,print_warning],do_config->[print_error,list_config],build_parser->[define_archive_filters_group->[add_argument],define_exclusion_group->[define_exclude_and_patterns],define_borg_mount->[define_archive_filters_group,add_argument,define_exclusion_group],add_common_group,define_archive_filters_group,CommonOptions,define_borg_mount,add_argument,process_epilog,define_exclusion_group],do_key_import->[print_error],CommonOptions->[add_common_group->[add_argument->[add_argument]]],do_diff->[build_matcher,print_output,print_warning],do_debug_dump_repo_objs->[decrypt_dump],parse_args->[get_func,resolve,preprocess_args,parse_args,build_parser],do_list->[print_error],do_create->[create_inner->[print_file_status,print_error,print_warning],create_inner],with_repository],main] | Build a parser for a specific node. Add options to the command line. Format a file with a sequence of flags. Add arguments and options for a specific . Command line interface for borg - Deduplicateated Backups This command is used to mount a file or archive with a specific . | ... **local** security info ... (see help text of --cache-only). |
@@ -422,7 +422,9 @@ void CardReader::manage_media() {
if (stat) { // Media Inserted
safe_delay(500); // Some boards need a delay to get settled
- mount(); // Try to mount the media
+ if (ENABLED(SD_MOUNT_ON_INSERT) && old_stat == 2) {
+ mount(); // Try to mount the media
+ }
#if MB(FYSETC_CHEETAH, FYSETC_CHEETAH_V12, FYSETC_AIO_II)
reset_stepper_drivers(); // Workaround for Cheetah bug
#endif
| [No CFG could be retrieved] | Manage the SD card events. The main entry point for the network. | This change breaks Marlin's SD card handling. - Before: Mount the SD card if an insert is detected - `SD_MOUNT_ON_INSERT` enabled: Only mount SD here when first booting up - `SD_MOUNT_ON_INSERT` disabled: Never mount an inserted SD card I'll see if I can fix this so it matches your stated intention. |
@@ -72,7 +72,7 @@ typedef struct dt_lib_print_settings_t
int unit;
int v_intent, v_pintent;
char *v_iccprofile, *v_piccprofile, *v_style;
- gboolean v_style_append;
+ gboolean v_style_append, v_black_point_compensation;
} dt_lib_print_settings_t;
typedef struct dt_lib_export_profile_t
| [No CFG could be retrieved] | Includes the header files of all of the relevant components. name - name of the element in the combo box. | This is going to invalidate old presets. I don't think that it is a problem right now since it's a new and unreleased module (and not an iop), but soonish you should think about doing that with `legacy_params`. |
@@ -34,6 +34,14 @@ namespace Microsoft.Xna.Framework
private DisplayOrientation _supportedOrientations;
private bool _synchronizedWithVerticalRetrace = true;
private bool _drawBegun;
+ private bool _hardwareModeSwitch = true;
+
+#if WINDOWS && DIRECTX
+
+ // FIXME : FULLSCREEN
+
+ private bool _firstLaunch = true;
+#endif
bool disposed;
#if !WINRT
| [GraphicsDeviceManager->[Dispose->[Dispose],Initialize->[ApplyChanges]]] | Creates a new object of type GraphicsDeviceManager that can be used to manage GraphicsDevice objects Private methods Get the preferred back buffer format and height. | Another FIXME that needs an explanation. |
@@ -136,7 +136,7 @@ public final class SparkRunner extends PipelineRunner<EvaluationResult> {
jssc.start();
// if recovering from checkpoint, we have to reconstruct the EvaluationResult instance.
- return contextFactory.getCtxt() == null ? new EvaluationContext(jssc.sc(),
+ return contextFactory.getCtxt() == null ? new EvaluationContext(jssc.sparkContext(),
pipeline, jssc) : contextFactory.getCtxt();
} else {
JavaSparkContext jsc = SparkContextFactory.getSparkContext(mOptions);
| [SparkRunner->[create->[SparkRunner],fromOptions->[SparkRunner]]] | Runs the pipeline using the SparkRunner. | apparently this was deprecated long ago... |
@@ -274,8 +274,8 @@ describes.realWin('AccessIframeAdapter', {
});
});
- // TODO(dvoytenko, #14336): Fails due to console errors.
- it.skip('should tolerate storage failures', () => {
+ it('should tolerate storage failures', () => {
+ const devErrorStub = sandbox.stub(dev(), 'error');
storageMock.expects('getItem')
.withExactArgs('amp-access-iframe')
.throws(new Error('intentional'))
| [No CFG could be retrieved] | This function mocks the response from AMP - 2. 0 and AMP - 3. missing storage should ignore absent storage. | It is no longer necessary to use this stub. You can use `allowConsoleError`, as described in #14406. |
@@ -61,13 +61,7 @@ import com.google.gwt.view.client.ListDataProvider;
import com.google.gwt.view.client.ProvidesKey;
import com.google.inject.Inject;
-import org.rstudio.core.client.CommandWithArg;
-import org.rstudio.core.client.ElementIds;
-import org.rstudio.core.client.Pair;
-import org.rstudio.core.client.ParallelCommandList;
-import org.rstudio.core.client.SerializedCommand;
-import org.rstudio.core.client.SerializedCommandQueue;
-import org.rstudio.core.client.StringUtil;
+import org.rstudio.core.client.*;
import org.rstudio.core.client.command.*;
import org.rstudio.core.client.command.EditorCommandManager.EditorKeyBinding;
import org.rstudio.core.client.command.EditorCommandManager.EditorKeyBindings;
| [ModifyKeyboardShortcutsWidget->[createMainWidget->[onPreviewNativeEvent->[test->[getElement,equals]],resetState],showToolTip->[getElement,bindNativeClickToSelectRow,describeCommand],shortcutInput->[shortcutInput],embedIcon->[getElement],sort->[compare->[getName,getKeySequence],sort],getAppCommandName->[getId],collectShortcuts->[onExecute->[execute->[getContext,setDefaultKeySequence,getName,getId,getKeySequence,execute,getValue,KeyboardShortcutEntry],compare->[getName,getContext],isCustomBinding,execute,KeyboardShortcutEntry,filter,sort],execute->[getName,KeyboardShortcutEntry],execute],describeCommand->[getKeySequence],onShortcutCellPreview->[getElement,getOriginalKeySequence,equals],onClick->[test->[equals]],addHandlers->[onColumnSort->[equals]],isExcludedCommand->[getId],filter->[getName,isCustomBinding,isModified,getValue],updateData->[getContext,getCommandType,equals,isModified,isCustomBinding,getKeySequence],addColumns->[getValue->[getName,getDisplayType,getKeySequence]],applyChanges->[getKeySequence,getId,getCommandType,getValue],editableTextColumn->[getValue->[getValue],onBrowserEvent->[onBrowserEvent],update->[getContext,getCommandType,restoreOriginalKeySequence,equals,getName,getId,setKeySequence,getOriginalKeySequence,KeyboardShortcutEntry]],textColumn->[getValue->[getValue]],onCellPreview->[onCellPreview]]] | Imports a single version of the GUI. Imports all components of the RSTudio library. | We try not to use wildcard imports. Please revert back to individual imports. |
@@ -93,8 +93,11 @@ auto parseShimcacheData(const std::string& token,
// Check to make sure the shimcache entry is greater than zero
if (!string_path.empty()) {
- // Get last modified time of the file
- shimcache.last_modified = littleEndianToUnixTime(shimcache_time);
+ // Sometimes Shimcache artifacts have 0 as timestamp, if so skip filetime
+ // converstion
+ shimcache.last_modified = (shimcache_time == "0000000000000000")
+ ? 0LL
+ : littleEndianToUnixTime(shimcache_time);
} else {
shimcache.last_modified = 0LL;
}
| [No CFG could be retrieved] | Reads the extra data from the Windows Store and returns the object. Entry for a Registry key. | This is a nit.... Looking at this together I wonder if `if (!string_path.empty() || shimcache_time == "0000000000000000"){` is cleaner |
@@ -484,6 +484,7 @@ non_tested_cars = [
TOYOTA.HIGHLANDER,
TOYOTA.HIGHLANDERH,
TOYOTA.HIGHLANDERH_TSS2,
+ TOYOTA.PRIUS_TSS2,
]
if __name__ == "__main__":
| [wait_for_sockets->[time,SubMaster,append,update,len],get_route_log->[write,print,get,isfile,replace,join,open,exit],all,getattr,join,put,manager_start,getpgid,clear_all,wait_for_sockets,values,prepare_managed_process,killpg,append,print,cast,start_managed_process,get_route_log,len,kill_managed_process,items,from_bytes,get,get_running,Popen,exit,Params,all_known_cars] | This function returns a list of the routes that should be applied to the given cars. This function is called by the HierarchyH_TSS2 module. It is called. | we'll need a route before merging this |
@@ -35,15 +35,6 @@ import org.apache.hudi.common.util.HoodieTimer;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.hive.util.HiveSchemaUtil;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hudi.sync.common.AbstractSyncHoodieClient;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
| [HoodieHiveClient->[close->[close],getAllTables->[getAllTables],constructChangePartitions->[getPartitionClause]]] | Imports a single object from a Hudi service. Imports the given object as a HoodieClient. | please revert the change to avoid using import * |
@@ -2930,14 +2930,8 @@ inline void gcode_G28() {
#endif
feedrate = homing_feedrate[Z_AXIS];
-
- #if HAS_BED_PROBE
- do_blocking_move_to_z(z_dest);
- #else
- line_to_z(z_dest);
- stepper.synchronize();
- #endif
-
+ line_to_z(z_dest);
+ stepper.synchronize();
destination[Z_AXIS] = current_position[Z_AXIS] = z_dest;
}
}
| [No CFG could be retrieved] | Synchronizes the components of a network segment with the current position. region Private methods. | I figured on saving one function call if not needed. I know it's a very minor thing. In the long run I'm sure we can continue to consolidate all these movement helpers. |
@@ -36,11 +36,11 @@ def CreateAnalyzer(optimization_settings, model_part_controller, external_analyz
internal_analyzer = EmptyAnalyzer()
dependency_graph, exist_dependencies = _CreateDependencyGraphRecursively(objectives)
- # dependency_graph = [ (response_id_1, [], weight_1),
- # (response_id_2, [], weight_2),
- # (response_id_3, [ (response_id_3a, [], weight_3a),
- # (response_id_3b, [], weight_3b),
- # (response_id_3c, [...], weight_3c) ], weight_3),
+ # dependency_graph = [ (response_id_1, weight_1, []),
+ # (response_id_2, weight_2, []),
+ # (response_id_3, [ (response_id_3a, weight_3a, []),
+ # (response_id_3b, weight_3b, []),
+ # (response_id_3c, weight_3c, [...]) ], weight_3),
# ... ]
if exist_dependencies:
| [_IdentifyInternalResponsesRecursively->[_IdentifyInternalResponsesRecursively],AnalyzerWithDependencies->[__ComputeCombinedGradientsRecursively->[__ComputeCombinedGradientsRecursively],__ComputeCombinedValuesRecursively->[__ComputeCombinedValuesRecursively],__GetIdentifiersRecursively->[__GetIdentifiersRecursively],__WriteResultsOfCombinedResponses->[__GetIdentifiersRecursively],__ComputeGradientNorms->[__GetIdentifiersRecursively],__RequestResponsesAccordingDependencies->[__GetIdentifiersRecursively]],Analyzer->[InitializeBeforeOptimizationLoop->[InitializeBeforeOptimizationLoop],FinalizeAfterOptimizationLoop->[FinalizeAfterOptimizationLoop],AnalyzeDesignAndReportToCommunicator->[AnalyzeDesignAndReportToCommunicator]],_CreateDependencyGraphRecursively->[_CreateDependencyGraphRecursively]] | Creates an analyzer based on the given optimization settings. | dependency type is missing in this description of the tree |
@@ -31,6 +31,18 @@ def test_register_secret_happy_path(secret_registry_proxy: SecretRegistry):
block_identifier='latest',
), 'Test setup is invalid, secret must be unknown'
+ chain = BlockChainService(
+ jsonrpc_client=secret_registry_proxy.client,
+ contract_manager=contract_manager,
+ )
+ chain.wait_until_block(129)
+
+ with pytest.raises(NoStateForBlockIdentifier):
+ secret_registry_proxy.is_secret_registered(
+ secrethash=secrethash_unregistered,
+ block_identifier=0,
+ )
+
secret_registry_proxy.register_secret(
secret=secret,
given_block_identifier='latest',
| [test_register_secret_batch_happy_path->[register_secret_batch,must_have_event,get_all_entries,secret_registered_filter,keccak,decode_event,range,make_secret,get_secret_registration_block_by_secrethash],test_register_secret_happy_path->[must_have_event,get_all_entries,register_secret,secret_registered_filter,keccak,decode_event,is_secret_registered,make_secret,get_secret_registration_block_by_secrethash],test_concurrent_secret_registration->[count_transactions->[transact],context,all,setattr,joinall,defaultdict,set,add,range,spawn,make_secret]] | Test happy path of SecretRegistry with a single secret. Get the secret registration block by secrethash. | please make this dependent on the configured value `STATE_PRUNING_AFTER_BLOCKS` |
@@ -297,9 +297,10 @@ export function invitePeopleAndChatRooms( // eslint-disable-line max-params
* @returns {boolean} Indication of whether adding people is currently enabled.
*/
export function isAddPeopleEnabled(state: Object): boolean {
+ const { peopleSearchUrl } = state['features/base/config'];
const { isGuest } = state['features/base/jwt'];
- return !isGuest;
+ return !isGuest && Boolean(peopleSearchUrl);
}
/**
| [No CFG could be retrieved] | Invite a user to a specific type of items. Checks if the given string looks like it could be a phone number. | Hum, we use add people dialog for adding people using peopleSearchUrl and/or for jigasi calling. So I don't thing this is correct. Cause you need to enable peopleSearchUrl whenever you have jigasi. |
@@ -1,5 +1,5 @@
class User < ApplicationRecord
- include SearchableModel, SettingsModel
+ include SearchableModel, SettingsModel, VariablesModel
include User::TeamRoles, User::ProjectRoles
acts_as_token_authenticatable
| [User->[has_linked_account?->[exists?],from_omniauth->[take],projects_by_teams->[try,zero?,group_by,joins],confirmation_required?->[enable_email_confirmations],statistics->[find_each,count],find_by_valid_wopi_token->[first,warn],time_zone_check->[add,nil?],get_wopi_token->[create,to_i,token,ttl,distinct,friendly_token,warn,each,delete],active_status_str->[t,active?],current_team->[current_team_id,find_by_id],filter_paperclip_errors->[key?,join,add,each,clear,delete],projects_tree->[is_admin_of_team?,where,order,includes],empty_avatar->[avatar_file_size,to_i,avatar_content_type,avatar_file_name,last,mime_type],search->[all,distinct,where,present?,not,id],active?->[present?],from_azure_jwt_token->[take],destroy_notifications->[in_groups_of,pluck,destroy_all],last_activities->[order],teams_ids->[pluck],fetch,megabyte,avatar,store_accessor,define_method,before_destroy,full_name,auto_strip_attributes,t,include,after_validation,parse,has_many,each,validates_attachment,gsub,has_attached_file,devise,default_settings,to_sym,validate,validates]] | A User class that represents a user record. The user_identities table is a list of all the user identities that have a single. | Style/MixinGrouping: Put include mixins in separate statements. |
@@ -23,6 +23,11 @@ import numpy as np
from lxml import etree
from tqdm import tqdm
+try:
+ xrange # Python 2
+except NameError:
+ xrange = range # Python 3
+
BBoxDesc = namedtuple('BBoxDesc', 'id, label, det_conf, xmin, ymin, xmax, ymax')
MatchDesc = namedtuple('MatchDesc', 'gt, pred')
Range = namedtuple('Range', 'start, end, label')
| [process_tracks->[extract_events],calculate_metrics->[match_events],match_detections->[iou],main->[split_to_tracks,process_tracks,calculate_metrics,match_detections,add_matched_predictions,load_annotation,load_detections],extract_events->[_extrapolate,_merge,_interpolate,_filter,_smooth],main] | Creates a new object with all the fields of a single object. Loads a list of BBoxDesc objects for the specified frame_id. | @cclauss, I think the more elegant way to fix the compatibility issue is to add `from builtins import range` import row and replace all `xrange` onto `range`. |
@@ -160,11 +160,7 @@ def generate_synapse_config() -> ContextManager:
)
return server_name, config_file
- try:
- yield generate_config
- finally:
- if delete_base_dir:
- shutil.rmtree(synapse_base_dir)
+ yield generate_config
@contextmanager
| [No CFG could be retrieved] | Context manager that generates a configuration file for the matrix server. Generate the config file for the server. | - How is this related to this PR? - Don't we need to clean up the `synapse_base_dir` any more? |
@@ -47,6 +47,9 @@ class TestContext(Context):
class DummyRunTracker(object):
"""A runtracker stand-in that does no actual tracking."""
+ def __init__(self):
+ self.logger = RunTrackerLogger(self)
+
class DummyArtifactCacheStats(object):
def add_hits(self, cache_name, targets): pass
| [create_context_from_options->[TestContext,DummyRunTracker],TestContext->[new_workunit->[DummyWorkUnit],DummyRunTracker->[DummyArtifactCacheStats]]] | Add misses to the cache. | Needs a blank line above to separate from doc string. |
@@ -28,12 +28,12 @@ type Team struct {
// IsOwnerTeam returns true if team is owner team.
func (t *Team) IsOwnerTeam() bool {
- return t.Name == OWNER_TEAM
+ return t.Name == ownerTeamName
}
-// IsTeamMember returns true if given user is a member of team.
-func (t *Team) IsMember(uid int64) bool {
- return IsTeamMember(t.OrgID, t.ID, uid)
+// IsMember returns true if given user is a member of team.
+func (t *Team) IsMember(uID int64) bool {
+ return IsTeamMember(t.OrgID, t.ID, uID)
}
func (t *Team) getRepositories(e Engine) (err error) {
| [RemoveRepository->[HasRepository,removeRepository],GetRepositories->[getRepositories],AddRepository->[HasRepository,addRepository],addRepository->[getMembers],GetMembers->[getMembers],HasRepository->[hasRepository],removeRepository->[getMembers],getRepositories,IsOwnerTeam,GetRepositories] | IsOwnerTeam returns true if the user is an owner of the team. | I think UID |
@@ -0,0 +1,3 @@
+# rubocop:disable Lint/EmptyClass
+class CreatorSetting; end
+# rubocop:enable Lint/EmptyClass
| [No CFG could be retrieved] | No Summary Found. | Tbh, I'm a bit confused about what purpose this model serves. I'd at least expect to see some `attr_accessor`s or similar. |
@@ -58,7 +58,7 @@ void IncompressiblePotentialFlowVelocityElement<TDim, TNumNodes>::GetValueOnInte
rValues[g] = velocity;
}
} else {
- KRATOS_ERROR << "GetValueOnIntegrationPoints for variable "
+ KRATOS_ERROR << "CalculateOnIntegrationPoints for variable "
<< rVariable.Name() << " not defined for " << this->Info();
}
| [No CFG could be retrieved] | Get the velocity of a variable in the system. | I think that you should call the base class in here. In any case this is out of the scope of this PR. |
@@ -367,11 +367,14 @@ class BuildExtension(build_ext, object):
self.build_lib = self.output_dir
def build_extensions(self):
+ if OS_NAME.startswith("darwin"):
+ self._valid_clang_compiler()
+
self._check_abi()
# Note(Aurelius84): If already compiling source before, we should check whether
# cflags have changed and delete the built shared library to re-compile the source
- # even though source file content keep unchanaged.
+ # even though source file content keep unchanged.
so_name = self.get_ext_fullpath(self.extensions[0].name)
clean_object_if_change_cflags(
os.path.abspath(so_name), self.extensions[0])
| [BuildCommand->[with_options->[cls_with_options->[__init__->[__init__]]]],setup->[setup],BuildExtension->[with_options->[cls_with_options->[__init__->[__init__]]],build_extensions->[object_filenames_with_cuda,build_extensions]]] | Build the extensions for the given object. Add missing flags to the compiler. Wrapper for the n - CUDA object suffix. xx. cu. o file. | why only need to check mac compiler |
@@ -113,6 +113,18 @@ public class SetupWizard {
*/
public HttpResponse doCompleteInstall(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
Jenkins j = Jenkins.getActiveInstance();
+ User u = j.getUser("admin");
+ // JENKINS-33572 - without creating a new 'admin' user, auth key erroneously remained
+ if(u != null && u.getProperty(AuthenticationKey.class) != null) {
+ // There must be a better way of removing things...
+ Iterator<Map.Entry<Descriptor<UserProperty>,UserProperty>> entries = u.getProperties().entrySet().iterator();
+ while(entries.hasNext()) {
+ Map.Entry<?, ?> entry = entries.next();
+ if(entry.getValue() instanceof AuthenticationKey) {
+ entries.remove();
+ }
+ }
+ }
j.setInstallState(InstallState.INITIAL_SETUP_COMPLETED);
InstallUtil.saveLastExecVersion();
PluginServletFilter.removeFilter(FORCE_SETUP_WIZARD_FILTER);
| [SetupWizard->[doCompleteInstall->[setSetupWizard,getActiveInstance,okJSON,removeFilter,setInstallState,saveLastExecVersion],doFilter->[getRequestURI->[getContextPath],HttpServletRequestWrapper,getRequestURI,doFilter,equals],setMasterKillSwitch,HudsonPrivateSecurityRealm,info,addProperty,getProperty,Filter,getKey,AuthenticationKey,addFilter,FullControlOnceLoggedInAuthorizationStrategy,setAllowAnonymousRead,commit,DefaultCrumbIssuer,createAccount,AssertionError,setSlaveAgentPort,getUser,BulkChange,getName,setCrumbIssuer,getLogger,setSecurityRealm,setAuthorizationStrategy,toLowerCase,save,getSecurityRealm]] | Complete the installation of the plugin. | I suppose you want to save the user after it. Otherwise the change won't be persisted |
@@ -41,7 +41,7 @@ class BidirectionalAttentionFlow(Model):
phrase_layer : ``Seq2SeqEncoder``
The encoder (with its own internal stacking) that we will use in between embedding tokens
and doing the bidirectional attention.
- attention_similarity_function : ``SimilarityFunction``
+ similarity_function : ``SimilarityFunction``
The similarity function that we will use when comparing encoded passage and question
representations.
modeling_layer : ``Seq2SeqEncoder``
| [BidirectionalAttentionFlow->[from_params->[from_params]]] | This is a basic layout for the passage model. This is a hack to avoid the shuffling of the n - tuple when using. | Same here, and with anywhere where you've changed the named arguments |
@@ -179,6 +179,7 @@ namespace Content.Shared.Body.Components
foreach (var component in Owner.GetAllComponents<IBodyPartRemoved>())
{
+ EntitySystem.Get<SharedHumanoidAppearanceSystem>().BodyPartRemoved(Owner.Uid, args);
component.BodyPartRemoved(args);
}
| [SharedBodyComponent->[TryAddPart->[TryAddPart,CanAddPart],SetPart->[SetPart],LookForFootRecursion->[TryGetSlot,LookForFootRecursion],HandleComponentState->[HandleComponentState,RemovePart,SetPart],OnRemove->[OnRemove],Initialize->[Initialize],Gib->[RemovePart,Gib],RemovePart->[RemovePart],ConnectedToCenterPartRecursion->[ConnectedToCenterPartRecursion],GetPartsOfType->[GetSlotsOfType],TryGetSlot->[GetSlot],TryDropPart->[GetHangingParts,RemovePart],CanAddPart->[CanAddPart]]] | private static final int DAMAGE_OFFSET = 0 ;. | You might want to move this just outside the `foreach`, otherwise this will be called for each component with a `IBodyPartRemoved` interface implemented! |
@@ -1,7 +1,5 @@
<?php
if (!$os) {
- if (strstr($sysObjectId, '.1.3.6.1.4.1.1588.2.2.1.1.1.5') || stristr($sysDescr, 'Brocade Communications Systems')) {
- $os = 'nos';
- }
-}
+ if (strstr($sysDescr, "Brocade VDX")) { $os = "nos"; }
+}
\ No newline at end of file
| [No CFG could be retrieved] | <?php ethernet_id = > ethernet_id. | Will this new check pick up the two checks it's removing or should it be in addition to? Can you separate the { $os = "nos"; } out as before so we can try and keep the code standards up :) |
@@ -43,6 +43,7 @@ class SuluCategoryExtension extends Extension implements PrependExtensionInterfa
$loader = new Loader\XmlFileLoader($container, new FileLocator(__DIR__ . '/../Resources/config'));
$loader->load('services.xml');
+ $loader->load('command.xml');
$this->configurePersistence($config['objects'], $container);
}
| [SuluCategoryExtension->[prepend->[prependExtensionConfig,hasExtension],load->[processConfiguration,configurePersistence,load]]] | Loads a node configuration. | Aren't commands discovered by symfony automatically? Why is this necessary? |
@@ -1949,5 +1949,7 @@ void Temperature::isr() {
}
#endif
+ cli();
+ in_temp_isr = false;
SBI(TIMSK0, OCIE0B); //re-enable Temperature ISR
}
| [No CFG could be retrieved] | region Elevation System Functions. | @Sebastianv650 Is clearing _all interrupts_ here at the end of the temperature ISR actually needed? This won't cause the stepper ISR to cease? |
@@ -285,7 +285,9 @@ export class Templates {
} else if (opt_querySelector) {
return scopedQuerySelector(parent, opt_querySelector);
} else {
- return childElementByTag(parent, 'template');
+ const template = childElementByTag(parent, 'template');
+ const templateByScript = childElementByTag(parent, 'script');
+ return template || templateByScript;
}
}
| [No CFG could be retrieved] | Find a specified template inside the parent. Returns null if not present. Get a promise that will be triggered when the element has a template class of the given type. | Use a query selector to do this in one pass. |
@@ -86,6 +86,7 @@ func (cfg *MemcachedClientConfig) RegisterFlagsWithPrefix(prefix, description st
f.UintVar(&cfg.CBFailures, prefix+"memcached.circuit-breaker-consecutive-failures", 10, description+"Trip circuit-breaker after this number of consecutive dial failures (if zero then circuit-breaker is disabled).")
f.DurationVar(&cfg.CBTimeout, prefix+"memcached.circuit-breaker-timeout", 10*time.Second, description+"Duration circuit-breaker remains open after tripping (if zero then 60 seconds is used).")
f.DurationVar(&cfg.CBInterval, prefix+"memcached.circuit-breaker-interval", 10*time.Second, description+"Reset circuit-breaker counts after this long (if zero then never reset).")
+ f.IntVar(&cfg.MaxItemSize, prefix+"memcached.max-item-size", 1024*1024, description+"The maximum size of an item stored in memcached. Bigger items are not stored. If set to 0, no maximum size is enforced.")
}
// NewMemcachedClient creates a new MemcacheClient that gets its server list
| [Set->[Set],updateMemcacheServers->[Set],updateLoop->[Stop]] | RegisterFlagsWithPrefix is used to add flags that are specific to memcached client. | I would default it to 0, otherwise it's a "breaking change". |
@@ -1173,10 +1173,11 @@ module.exports = class extends Generator {
let fieldType;
let fieldName;
const relationshipType = relationship.relationshipType;
+ const otherEntityIsEmbedded = relationship.otherEntityIsEmbedded;
if (relationshipType === 'one-to-many' || relationshipType === 'many-to-many') {
fieldType = `I${relationship.otherEntityAngularName}[]`;
fieldName = relationship.relationshipFieldNamePlural;
- } else if (dto === 'no') {
+ } else if (dto === 'no' || otherEntityIsEmbedded) {
fieldType = `I${relationship.otherEntityAngularName}`;
fieldName = relationship.relationshipFieldName;
} else {
| [No CFG could be retrieved] | Private functions - Functions - Functions - Functions - Functions - Functions - Functions - Functions - Functions The type of the variable. | otherEntityIsEmbedded is used only once, use relationship.otherEntityIsEmbedded instead |
@@ -193,7 +193,9 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
@Override
public Object apply(Result<SearchResultValue> input)
{
- return Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
+ return dimensionSpecs != null
+ ? Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue(), dimOutputNames)
+ : Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
}
};
}
| [SearchQueryQueryToolChest->[preMergeQueryDecoration->[run->[run]],SearchThresholdAdjustingQueryRunner->[run->[run]]]] | Gets the cache strategy. This method creates a Function that creates a SearchResultValue object from a list of objects. | will dimensionSpecs ever be null? Looks like it will either be an empty list or the query's dimension spec list |
@@ -667,6 +667,13 @@ class ProductTypeFilter(MetadataFilterBase):
model = ProductType
fields = ["search", "configurable", "product_type"]
+ @classmethod
+ def filter_product_type_searchable(cls, queryset, _name, value):
+ if not value:
+ return queryset
+ name_slug_qs = Q(name__ilike=value) | Q(slug__ilike=value)
+ return queryset.filter(name_slug_qs)
+
class ProductFilterInput(ChannelFilterInputObjectType):
class Meta:
| [_filter_minimal_price->[filter_products_by_minimal_price],_filter_stock_availability->[filter_products_by_stock_availability],_filter_variant_price->[filter_products_by_variant_price],filter_categories->[filter_products_by_categories],_filter_attributes->[filter_products_by_attributes],ProductFilter->[filter_stock_availability->[_filter_stock_availability],filter_is_published->[_filter_products_is_published],filter_minimal_price->[_filter_minimal_price],filter_attributes->[_filter_attributes],filter_variant_price->[_filter_variant_price]],filter_products_by_attributes->[_clean_product_attributes_boolean_filter_input,filter_products_by_attributes_values,_clean_product_attributes_date_time_range_filter_input,_clean_product_attributes_range_filter_input,_clean_product_attributes_filter_input],CollectionFilter->[filter_is_published->[_filter_collections_is_published]],filter_search->[product_search],filter_collections->[filter_products_by_collections]] | Creates a filter object for all category objects. | Would be good to cover this method |
@@ -186,6 +186,11 @@ def setup_logging(verbosity, no_color, user_log_file):
"()": IndentingFormatter,
"format": "%(message)s",
},
+ "indent_and_timestamp": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ "timestamp": True,
+ },
},
"handlers": {
"console": {
| [BetterRotatingFileHandler->[_open->[_open]],ColorizedStreamHandler->[format->[should_color,format],__init__->[__init__],_color_wrap],IndentingFormatter->[format->[format,get_indentation]]] | Configures and sets up logging for a specific level. The top - level object that represents a bunch of resources that can be reserved for logging. | `indent_with_timestamp` would be a little better (since timestamp is an argument to the indenting formatter). |
@@ -1558,13 +1558,13 @@ class BaseRaw(ProjMixin, ContainsMixin, UpdateChannelsMixin, SetChannelsMixin,
highpass=None, lowpass=None, filtorder=4, clipping=None,
show_first_samp=False, proj=True, group_by='type',
butterfly=False, decim='auto', noise_cov=None, event_id=None,
- show_scrollbars=True, verbose=None):
+ show_scrollbars=True, use_scalebars=True, verbose=None):
return plot_raw(self, events, duration, start, n_channels, bgcolor,
color, bad_color, event_color, scalings, remove_dc,
order, show_options, title, show, block, highpass,
lowpass, filtorder, clipping, show_first_samp, proj,
group_by, butterfly, decim, noise_cov=noise_cov,
- event_id=event_id, show_scrollbars=show_scrollbars,
+ event_id=event_id, show_scrollbars=show_scrollbars, use_scalebars=use_scalebars,
verbose=verbose)
@verbose
| [_start_writing_raw->[append],BaseRaw->[notch_filter->[notch_filter],_preload_data->[_read_segment],crop->[_update_times,set_annotations],resample->[_update_times,resample],__setitem__->[_parse_get_set_params],append->[_read_segment,_update_times,append,set_annotations],estimate_rank->[time_as_index],__getitem__->[_read_segment,_parse_get_set_params],save->[time_as_index]],_write_raw->[close,_write_raw],ToDataFrameMixin->[to_data_frame->[_set_pandas_dtype]]] | Plot a single - channel PSD. Config options for the area mode. | I think "show_scalebars" is more consistent with how we name other parameters, and is more specific too ("use" is a rather generic verb) |
@@ -191,12 +191,12 @@ class Motion {
/**
* Callback for regardless whether the motion succeeds or fails.
- * @param {!Function=} opt_callback
+ * @param {function()=} opt_callback
* @return {!Promise}
*/
thenAlways(opt_callback) {
const callback = opt_callback || NOOP_CALLBACK_;
- return this.then(callback, callback);
+ return /** @type {!Promise} */ (this.then(callback, callback));
}
/**
| [No CFG could be retrieved] | Runs the given context node and returns a promise that resolves when the motion succeeds or fails. Checks if the next action of the motion process should continue. | @dvoytenko had to cast here |
@@ -1030,6 +1030,12 @@ func PresentMessageUnboxed(ctx context.Context, g *globals.Context, rawMsg chat1
strings.ToLower(rawMsg.GetMessageType().String())))
}
}
+ // Disable reading exploding messages until fully we release support
+ if valid.IsEphemeral() && !valid.IsEphemeralExpired(time.Now()) {
+ if ekLib := g.GetEKLib(); ekLib != nil && !ekLib.ShouldRun(ctx) {
+ return miscErr(fmt.Errorf("Unable to decrypt because current client is out of date. Please update your version of Keybase"))
+ }
+ }
var strOutboxID *string
if valid.ClientHeader.OutboxID != nil {
so := valid.ClientHeader.OutboxID.String()
| [showLog->[showVerbose],Debug->[showLog],Less->[Less],Trace->[showLog],Debug] | PresentMessageUnboxed fills in the UI with the message that was previously unboxed Presents a message s name mentions. | @mmaxim could you confirm that this looks like the right error type to return here? |
@@ -399,11 +399,6 @@ def get_file_content(url, comes_from=None, session=None):
:param comes_from: Origin description of requirements.
:param session: Instance of pip.download.PipSession.
"""
- if session is None:
- raise TypeError(
- "get_file_content() missing 1 required keyword argument: 'session'"
- )
-
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
| [PipSession->[__init__->[MultiDomainBasicAuth,SafeFileCache,InsecureHTTPAdapter,LocalFSAdapter,user_agent]],_download_url->[resp_read,written_chunks],PipXmlrpcTransport->[__init__->[__init__]],_download_http_url->[_download_url,get],is_vcs_url->[_get_used_vcs_backend],unpack_http_url->[_copy_file],unpack_file_url->[is_dir_url,_copy_file,url_to_path],is_dir_url->[url_to_path],get_file_content->[get],unpack_url->[is_vcs_url,unpack_vcs_link,unpack_http_url,is_file_url,unpack_file_url,PipSession]] | Gets the content of a file. | I think it's nicer that `get_file_content` would fail if it's not passed `session`. I don't have to think about what the function does when using it. |
@@ -10,6 +10,7 @@ from devtools_testutils import AzureMgmtTestCase
class KeyVaultTestCase(AzureMgmtTestCase):
def setUp(self):
self.list_test_size = 7
+ self.plain_text = b"5063e6aaa845f150200547944fd199679c98ed6f99da0a0b2dafeaf1f4684496fd532c1c229968cb9dee44957fcef7ccef59ceda0b362e56bcd78fd3faee5781c623c0bb22b35beabde0664fd30e0e824aba3dd1b0afffc4a3d955ede20cf6a854d52cfd"
super(KeyVaultTestCase, self).setUp()
def tearDown(self):
| [KeyVaultTestCase->[tearDown->[super],setUp->[super]]] | Set up the test case. | Can this be a local variable in the test that needs it? |
@@ -50,7 +50,13 @@ packages_path = os.path.join(repos_path, "builtin")
mock_packages_path = os.path.join(repos_path, "builtin.mock")
#: User configuration location
-user_config_path = os.path.expanduser('~/.spack')
+user_config_path = re.sub(r'\$(?:spack\b|\{spack\})',
+ spack_root,
+ os.path.expanduser(
+ os.environ.get(
+ 'SPACK_USER_CONFIG',
+ '~/.spack')))
+
user_bootstrap_path = os.path.join(user_config_path, 'bootstrap')
user_bootstrap_store = os.path.join(user_bootstrap_path, 'store')
reports_path = os.path.join(user_config_path, "reports")
| [expanduser,ancestor,join] | Creates a list of paths to the built - in Spack packages and mock packages. | Isn't it better to make an empty value default to `~/.spack` too? `os.environ.get('SPACK_USER_CONFIG') or '~/.spack'`. Also I would prefer `SPACK_USER_CONFIG_PATH`. It's super long, but it can only mean one thing. |
@@ -2418,13 +2418,11 @@ err:
static int test_EVP_rsa_pss_with_keygen_bits(void)
{
int ret;
- OSSL_PROVIDER *provider;
EVP_PKEY_CTX *ctx;
EVP_PKEY *pkey;
const EVP_MD *md;
pkey = NULL;
ret = 0;
- provider = OSSL_PROVIDER_load(NULL, "default");
md = EVP_get_digestbyname("sha256");
ret = TEST_ptr((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA_PSS, NULL)))
| [No CFG could be retrieved] | - - - - - - - - - - - - - - - - - - region Public Key Provider. | This is optional, but preexisting nit: blank line here. |
@@ -489,7 +489,7 @@ bool CopyFileContents(const std::string &source, const std::string &target)
}
if(ferror(targetfile)){
errorstream<<target<<": IO error: "
- <<strerror(errno)<<std::endl;
+ <<porting::strerrno(errno)<<std::endl;
retval = false;
done = true;
}
| [No CFG could be retrieved] | Copy file contents from source to target. Copy source to target. | Can you do this in a separate commit? If you do already, then don't squash before merging :) |
@@ -0,0 +1 @@
+from pip.models.index import Index, PyPI # noqa
| [No CFG could be retrieved] | No Summary Found. | Instead of a `# noqa` here I'd prefer to define an `__all__` that has `Index` and `PyPI`. |
@@ -207,8 +207,9 @@ namespace System.Net.Http.Functional.Tests
return cookieHeaderValue;
}
- [ConditionalFact]
- public async Task GetAsync_SetCookieContainerAndCookieHeader_BothCookiesSent()
+ [Theory]
+ [MemberData(nameof(Async))]
+ public async Task GetAsync_SetCookieContainerAndCookieHeader_BothCookiesSent(bool async)
{
await LoopbackServerFactory.CreateServerAsync(async (server, url) =>
{
| [HttpClientHandlerTest_Cookies->[Task->[CookieContainer,GetCookieValue,GetCookieHeaderValue],CookieNamesValuesAndUseCookies->[GenerateCookie]]] | Get the cookie value. | Why were these conditional? Did they used to throw Skip exceptions and now they don't? |
@@ -130,6 +130,15 @@ func (c *Client) getConfigMap() *osmConfig {
}
cfg.ZipkinTracing = modeBool
+ // Parse MeshCIDRRanges: only required if egress is enabled
+ cidr, ok := configMap.Data[meshCIDRRangesKey]
+ if !ok {
+ if cfg.Egress {
+ log.Error().Err(errMissingKeyInConfigMap).Msgf("Missing key=%s, required when egress is enabled", meshCIDRRangesKey)
+ }
+ }
+ cfg.MeshCIDRRanges = cidr
+
return cfg
}
| [getConfigMapCacheKey->[Sprintf],run->[Msg,Error,Msgf,getConfigMapCacheKey,WaitForCacheSync,Info,Run],getConfigMap->[Error,Msgf,getConfigMapCacheKey,Err,GetByKey],Informer,Error,WithNamespace,AddEventHandler,ConfigMaps,GetStore,ParseBool,V1,ValueOf,Msgf,NewSharedInformerFactoryWithOptions,Core,FieldByName,String,Elem,run,Err,GetKubernetesEventHandlers] | getConfigMap returns the ConfigMap object if it exists. | This is a small thing - but I don't know what the alue of `cidr` is going to be when `ok != true` - is it ok to assign this then? Does it make sense for this to be `if cidr, ok := ...; ok {} else {}` or something like that? |
@@ -91,6 +91,10 @@ func GetBootstrapClusterRoles() []authorizationapi.ClusterRole {
{
ObjectMeta: kapi.ObjectMeta{
Name: ClusterAdminRoleName,
+ Annotations: map[string]string{
+ oapi.OpenShiftDescription: "A super-user that can perform any action in the cluster. When granted to a user within a project, they have full control over quota and membership and can perform every action on every resource in the project.",
+ roleSystemOnly: roleIsSystemOnly,
+ },
},
Rules: []authorizationapi.PolicyRule{
authorizationapi.NewRule("*").Groups("*").Resources("*").RuleOrDie(),
| [NormalizeResources,Sprintf,RuleOrDie,NewRule,Names,Convert,Groups,NewString,AllRoles,Resources] | GetBootstrapClusterRoles returns a list of bootstrap cluster roles appsGive a list of all the available apps. | instead of saying "granted to a user within a local policy" can you say "granted to a user on a project" |
@@ -25,8 +25,15 @@ try:
fluid_DEM_coupling_imports_available = True
except ImportError:
fluid_DEM_coupling_imports_available = False
-# List of tests that are available
-available_tests = []
+
+from KratosMultiphysics import Vector, Logger, Parameters
+
+
+def Say(*args):
+ KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.DETAIL)
+ Logger.PrintInfo("SwimmingDEM", *args)
+ Logger.Flush()
+ KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
if interpolation_imports_available:
class interpolation_test_linear(InterpolationTF.TestFactory):
| [AssembleTestSuites->[SetTestSuite],AssembleTestSuites] | Definitions of the classes that are used in the SMALL TESTS. Reads the neccesary file names for the candelier project and returns the corresponding. | Is this function used somewhere? |
@@ -20,6 +20,7 @@ package monitors
import (
"context"
"fmt"
+ "github.com/elastic/beats/v7/x-pack/functionbeat/function/core"
"github.com/elastic/beats/v7/heartbeat/eventext"
"github.com/elastic/beats/v7/heartbeat/monitors/jobs"
| [makeSchedulerTaskFunc->[prepareSchedulerJob],Start->[makeSchedulerTaskFunc]] | newConfiguredJob creates a configuredJob instance based on a configuration object. JobConfig is used to specify the configuration of a single job. | What I would like to see is moving `core.SyncClient` to libbeat, so everyone can use it. Also, it's not a good practice to use code from a different Beat. |
@@ -1503,7 +1503,7 @@ CHAKRA_API JsDeleteProperty(_In_ JsValueRef object, _In_ JsPropertyIdRef propert
CHAKRA_API JsDefineProperty(_In_ JsValueRef object, _In_ JsPropertyIdRef propertyId, _In_ JsValueRef propertyDescriptor, _Out_ bool *result)
{
- return ContextAPIWrapper<true>([&] (Js::ScriptContext *scriptContext, TTDRecorder& _actionEntryPopper) -> JsErrorCode {
+ return ContextAPINoScriptWrapper([&] (Js::ScriptContext *scriptContext, TTDRecorder& _actionEntryPopper) -> JsErrorCode {
PERFORM_JSRT_TTD_RECORD_ACTION(scriptContext, RecordJsRTDefineProperty, object, (Js::PropertyRecord *)propertyId, propertyDescriptor);
VALIDATE_INCOMING_OBJECT(object, scriptContext);
| [No CFG could be retrieved] | This function deletes a property of a JS object. Creates an array of unique keys. | `DefineOwnPropertyDescriptor` can also be proxied. |
@@ -3102,7 +3102,7 @@ def _ssim_helper(x, y, reducer, max_val, compensation=1.0, k1=0.01, k2=0.03):
compensation: Compensation factor. See above.
k1: Default value 0.01
k2: Default value 0.03 (SSIM is less sensitivity to K2 for lower values, so
- it would be better if we taken the values in range of 0< K2 <0.4).
+ it would be better if we took the values in the range of 0< K2 <0.4).
Returns:
A pair containing the luminance measure, and the contrast-structure measure.
| [resize_image_with_pad_v2->[_resize_fn->[resize_images_v2],_resize_image_with_pad_common],resize_bicubic->[resize_bicubic],_resize_images_common->[_ImageDimensions],crop_to_bounding_box->[_assert,_ImageDimensions,_CheckAtLeast3DImage,_is_tensor],resize_image_with_crop_or_pad->[max_->[_is_tensor],equal_->[_is_tensor],min_->[_is_tensor],_ImageDimensions,_assert,max_,crop_to_bounding_box,min_,pad_to_bounding_box,_CheckAtLeast3DImage,_is_tensor,equal_],resize_images_v2->[resize_fn->[resize_with_scale_and_translate],_resize_images_common],resize_nearest_neighbor->[resize_nearest_neighbor],ssim_multiscale->[_ssim_per_channel,_verify_compatible_image_shapes,do_pad,convert_image_dtype],draw_bounding_boxes->[draw_bounding_boxes_v2],transpose->[_AssertAtLeast3DImage,transpose],draw_bounding_boxes_v2->[draw_bounding_boxes_v2],resize_images->[_resize_images_common],rgb_to_grayscale->[convert_image_dtype],ssim->[_ssim_per_channel,_verify_compatible_image_shapes,convert_image_dtype],sobel_edges->[transpose],per_image_standardization->[_AssertAtLeast3DImage],adjust_gamma->[_assert],sample_distorted_bounding_box->[sample_distorted_bounding_box_v2],pad_to_bounding_box->[_assert,_ImageDimensions,_CheckAtLeast3DImage,_is_tensor],adjust_saturation->[convert_image_dtype,adjust_saturation],adjust_hue->[adjust_hue,convert_image_dtype],grayscale_to_rgb->[_AssertGrayscaleImage],generate_bounding_box_proposals->[generate_bounding_box_proposals],extract_glimpse_v2->[extract_glimpse],_ssim_per_channel->[_fspecial_gauss,_ssim_helper],resize_image_with_pad_v1->[_resize_fn->[resize_images],_resize_image_with_pad_common],_flip->[fix_image_flip_shape,_AssertAtLeast3DImage],extract_glimpse->[extract_glimpse],_Assert3DImage->[_Check3DImage],psnr->[_verify_compatible_image_shapes,convert_image_dtype],adjust_jpeg_quality->[convert_image_dtype,_is_tensor],decode_image->[_gif->[convert_image_dtype],_jpeg->[convert_image_dtype],_png->[convert_image_dtype],check_png->[_is_png],_bmp->[convert_image_dtype],is_jpeg],_random_flip->[fix_image_flip_shape,_AssertAtLeast3DImage],resize_bilinear->[resize_bilinear],central_crop->[_get_dim,_AssertAtLeast3DImage],rot90->[_AssertAtLeast3DImage],non_max_suppression_with_overlaps->[non_max_suppression_with_overlaps],combined_non_max_suppression->[combined_non_max_suppression],_resize_image_with_pad_common->[max_->[_is_tensor],resize_fn,_ImageDimensions,_assert,max_,pad_to_bounding_box,_CheckAtLeast3DImage]] | Helper function for computing the SSIM. is nanometric measure of the luminance and contrast - structure measures. | `0 < K2 < 0.4` since you fixed spacing on `None,None,None` above |
@@ -241,7 +241,7 @@ void GcodeSuite::G34() {
linear_fit_data lfd;
incremental_LSF_reset(&lfd);
LOOP_L_N(i, NUM_Z_STEPPER_DRIVERS) {
- SERIAL_ECHOLNPAIR("PROBEPT_", ('0' + i), ": ", z_measured[i]);
+ SERIAL_ECHOLNPAIR("PROBEPT_", int(i), ": ", z_measured[i]);
incremental_LSF(&lfd, z_stepper_align.xy[i], z_measured[i]);
}
finish_incremental_LSF(&lfd);
| [No CFG could be retrieved] | Creates the clearance height for the next probe. END of CALCULATED STEPPER POSITIONS. | Other recent changes suggest that a zero-based index is preferred with the `PROBEPT_` prefix. |
@@ -1,5 +1,6 @@
# frozen_string_literal: true
-ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__)
-require "bundler/setup" # Set up gems listed in the Gemfile.
-require "bootsnap/setup" # Speed up boot time by caching expensive operations.
+ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
+
+require 'bundler/setup' # Set up gems listed in the Gemfile.
+require 'bootsnap/setup' # Speed up boot time by caching expensive operations.
| [require,expand_path] | The main method of the class. | Style/StringLiterals: Prefer double-quoted strings unless you need single quotes to avoid extra backslashes for escaping. |
@@ -59,10 +59,10 @@ module.exports = {
preprocessors: {
'./test/fixtures/*.html': ['html2js'],
- './test/**/*.js': preprocessors,
- './ads/**/test/test-*.js': preprocessors,
- './extensions/**/test/**/*.js': preprocessors,
- './testing/**/*.js': preprocessors,
+ './test/**/*.js': ['browserify'],
+ './ads/**/test/test-*.js': ['browserify'],
+ './extensions/**/test/**/*.js': ['browserify'],
+ './testing/**/*.js': ['browserify'],
},
// TODO(rsimha, #15510): Sauce labs on Safari doesn't reliably support
| [No CFG could be retrieved] | The main entry point for the tests. Browserify config for the given module. | Here's an example of the 'caller' inlined, I think this is a cleaner change for this PR. |
@@ -189,7 +189,7 @@ class CI_Security {
$expire = time() + $this->_csrf_expire;
$secure_cookie = (bool) config_item('cookie_secure');
- if ($secure_cookie && ( ! isset($_SERVER['HTTPS']) OR $_SERVER['HTTPS'] == 'off' OR ! $_SERVER['HTTPS']))
+ if ($secure_cookie && (empty($_SERVER['HTTPS']) OR $_SERVER['HTTPS'] === 'off'))
{
return FALSE;
}
| [CI_Security->[_csrf_set_hash->[csrf_set_cookie],xss_clean->[xss_clean],_decode_entity->[entity_decode],_validate_entities->[xss_hash]]] | Set the CSRF cookie. | Maybe it's an idea to declare a function that checks if HTTPS is true or false... |
@@ -45,9 +45,17 @@ public class CloseSubcommand implements Callable<Void> {
@Override
public Void call() throws Exception {
try (ScmClient scmClient = parent.getParent().createScmClient()) {
- parent.getParent().checkContainerExists(scmClient, containerId);
+ checkContainerExists(scmClient, containerId);
scmClient.closeContainer(containerId);
return null;
}
}
+
+ public static void checkContainerExists(ScmClient scmClient, long containerId)
+ throws IOException {
+ ContainerInfo container = scmClient.getContainer(containerId);
+ if (container == null) {
+ throw new IllegalArgumentException("No such container " + containerId);
+ }
+ }
}
| [CloseSubcommand->[call->[closeContainer,checkContainerExists,createScmClient]]] | This method is called when the container is not found. | I think this belongs more to `ContainerCommands`, rather than one of the container-specific subcommands. |
@@ -479,11 +479,10 @@ function notifier_run(&$argv, &$argc){
if ($relocate)
$r = $recipients_relocate;
else
- $r = q("SELECT * FROM `contact` WHERE `id` IN (%s) AND NOT `blocked` AND NOT `pending` AND NOT `archive`",
+ $r = q("SELECT * FROM `contact` WHERE `id` IN (%s) AND NOT `blocked` AND NOT `pending` AND NOT `archive`".$sql_extra,
dbesc($recip_str)
);
-
$interval = ((get_config('system','delivery_interval') === false) ? 2 : intval(get_config('system','delivery_interval')));
// If we are using the worker we don't need a delivery interval
| [notifier_run->[get_curl_code,set_baseurl,get_hostname]] | This function is the main function that is invoked when the notifier is invoked. It is the This function is used to fetch a single record from the database. finds all contacts that are not self and not self This function is used to avoid race conditions with deletion of items. | Standards: Please add braces to this conditional statement. |
@@ -0,0 +1,17 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCovCore(PythonPackage):
+ """plugin core for use by pytest-cov, nose-cov and nose2-cov"""
+
+ homepage = "https://github.com/schlamar/cov-core"
+ url = "https://pypi.io/packages/source/c/cov-core/cov-core-1.15.0.tar.gz"
+
+ version('1.15.0', sha256='4a14c67d520fda9d42b0da6134638578caae1d374b9bb462d8de00587dba764c')
+
+ depends_on('py-coverage@3.6:', type=('build', 'run'))
| [No CFG could be retrieved] | No Summary Found. | Missing dependency on setuptools |
@@ -40,7 +40,9 @@ class BeamSearchDecoder(Adapter):
),
'softmaxed_probabilities': BoolField(
optional=True, default=False, description="Indicator that model uses softmax for output layer "
- )
+ ),
+ 'logits_output': StringField(optional=True, description='Logits output layer name'),
+ 'custom_label_map': StringField(optional=True, description='Label map')
})
return parameters
| [LPRAdapter->[process->[decode]],CTCGreedySearchDecoder->[process->[decode]],BeamSearchDecoder->[decode->[log_sum_exp,make_new_beam]]] | Return a dictionary of parameters for the CTC. | how it should be used? is it path to file with label map or just string or list of tokens? please note that as the rule, we use dictionary as label map |
@@ -155,6 +155,11 @@ public abstract class ZeppelinSparkClusterTest extends AbstractTestRestApi {
@Test
public void scalaOutputTest() throws IOException, InterruptedException {
+ if (!isHadoopVersionMatch()) {
+ LOGGER.info("Hadoop version mismatch, skip test");
+ return;
+ }
+
String noteId = null;
try {
// create new note
| [ZeppelinSparkClusterTest->[scalaOutputTest->[waitForRunning,waitForFinish],testSparkZeppelinContextDynamicForms->[waitForFinish],verifySparkVersionNumber->[waitForFinish],zRunTest->[waitForFinish],pySparkTest->[waitForFinish]]] | Test method for spark - notebook notebook. This method is called when a note is added to the notebook. It will wait for all. | IMO this would be better by using JUnit Assumption APIs, e.g. `assumeThat(isHadoopVersionMatch(), "Hadoop version mismatch, skipping the test")`. No need of the `return;` |
@@ -91,7 +91,9 @@ class ClientBase:
kwargs.setdefault('new_nonce_url', getattr(self.directory, 'newNonce'))
return self.net.post(*args, **kwargs)
- def update_registration(self, regr, update=None):
+ def update_registration(self, regr: messages.RegistrationResource,
+ update: Optional[Union[Dict[str, Any], messages.Registration]] = None
+ ) -> messages.RegistrationResource:
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
| [ClientV2->[new_order->[_authzr_from_response,_post],revoke->[_revoke],poll->[_authzr_from_response],finalize_order->[_post],poll_authorizations->[_authzr_from_response],_get_v2_account->[_post],new_account->[_regr_from_response,_post],query_registration->[_regr_from_response,_post],_post_as_get->[_post]],Client->[revoke->[_revoke],agree_to_tos->[update_registration],poll->[_authzr_from_response],request_challenges->[_authzr_from_response,_post],check_cert->[_get_cert],fetch_chain->[_get_cert],refresh->[check_cert],query_registration->[_send_recv_regr],request_domain_challenges->[request_challenges],poll_and_request_issuance->[retry_after,poll,request_issuance],request_issuance->[_post],register->[_regr_from_response,_post]],ClientNetwork->[_get_nonce->[_check_response,_add_nonce,head],_post_once->[_check_response,_wrap_in_jws,_send_request,_add_nonce,_get_nonce],get->[_check_response,_send_request],head->[_send_request]],BackwardsCompatibleClientV2->[new_order->[request_domain_challenges],revoke->[revoke],finalize_order->[fetch_chain,request_issuance],new_account_and_tos->[agree_to_tos,_assess_tos,new_account,register],__init__->[ClientV2,Client]],ClientBase->[_revoke->[_post],update_registration->[_send_recv_regr],deactivate_registration->[update_registration],_send_recv_regr->[_regr_from_response],deactivate_authorization->[_post],answer_challenge->[_post]],_ClientDeprecationModule] | Wrapper around self. net. post that adds the acme_version. and. | Would it be better to fix `deactivate_registration` than to have this union? It wouldn't be a compatibility break even if external users are passing in a dict, right? |
@@ -40,8 +40,12 @@ namespace Pulumi
// If no parent was provided, parent to the root resource.
LogExcessive($"Getting parent urn: t={type}, name={name}, custom={custom}, remote={remote}");
var parentUrn = options.Parent != null
- ? await options.Parent.Urn.GetValueAsync().ConfigureAwait(false)
- : await GetRootResourceAsync(type).ConfigureAwait(false);
+ ? await options.Parent.Urn.GetValueAsync(whenUnknown: "").ConfigureAwait(false)
+ : "";
+ if (parentUrn == "")
+ {
+ parentUrn = await GetRootResourceAsync(type).ConfigureAwait(false);
+ }
LogExcessive($"Got parent urn: t={type}, name={name}, custom={custom}, remote={remote}");
string? providerRef = null;
| [Deployment->[AddTransitivelyReferencedChildResourcesOfComponentResources->[AddTransitivelyReferencedChildResourcesOfComponentResources]]] | Asynchronously prepares a resource. Wait for all dependencies to be processed. This method creates a PrepareResult object that will log a warning if the object is not found. | This seemed like a reasonable thing to default to branch 2 when options.Parent is not null but unknown; however I'm not sure. Perhaps better to revert to original behavior simply with `default!`. |
@@ -0,0 +1,9 @@
+module DataUpdateScripts
+ class BackfillBroadcastableTypeForBroadcasts
+ def run
+ Broadcast.where(broadcastable_type: nil).each do |cast|
+ cast.update!(broadcastable_type: broadcast.type_of)
+ end
+ end
+ end
+end
| [No CFG could be retrieved] | No Summary Found. | this should probably use `find_each` but as I don't think we have so much data it's not a blocker |
@@ -137,6 +137,14 @@ void AddCustomStrategiesToPython()
(
"EigensolverDynamicScheme", init<>() )
;
+
+ // Explicit Central Differences Scheme Type
+ class_< ExplicitCentralDifferencesSchemeType,
+ bases< BaseSchemeType >, boost::noncopyable >
+ (
+ "ExplicitCentralDifferencesScheme", init< const double, const double, const double>() )
+ .def("Initialize", &ExplicitCentralDifferencesScheme<SparseSpaceType, LocalSpaceType>::Initialize)
+ ;
//********************************************************************
//*******************CONVERGENCE CRITERIA CLASSES*********************
| [AddCustomStrategiesToPython->[]] | Add custom strategies to the Python interpreter. Residual - Based Arc Length Strategy and Residual - Based Relaxation Scheme. Package containing all of the base scheme classes for a sequence of non - duplicate types. | this is already exposed by the baseclass |
@@ -1125,8 +1125,16 @@ func validateAppautoscalingCustomizedMetricSpecificationStatistic(v interface{},
func validateAppautoscalingPredefinedMetricSpecification(v interface{}, k string) (ws []string, errors []error) {
validMetrics := []string{
- "DynamoDBReadCapacityUtilization",
- "DynamoDBWriteCapacityUtilization",
+ applicationautoscaling.MetricTypeAlbrequestCountPerTarget,
+ applicationautoscaling.MetricTypeDynamoDbreadCapacityUtilization,
+ applicationautoscaling.MetricTypeDynamoDbwriteCapacityUtilization,
+ applicationautoscaling.MetricTypeEc2spotFleetRequestAverageCpuutilization,
+ applicationautoscaling.MetricTypeEc2spotFleetRequestAverageNetworkIn,
+ applicationautoscaling.MetricTypeEc2spotFleetRequestAverageNetworkOut,
+ applicationautoscaling.MetricTypeEcsserviceAverageCpuutilization,
+ applicationautoscaling.MetricTypeEcsserviceAverageMemoryUtilization,
+ applicationautoscaling.MetricTypeRdsreaderAverageCpuutilization,
+ applicationautoscaling.MetricTypeRdsreaderAverageDatabaseConnections,
}
metric := v.(string)
for _, o := range validMetrics {
| [MatchString,Sprintf,Contains,NormalizeJsonString,ToLower,String,Parse,Errorf,ParseCIDR,MustCompile,HasSuffix,Query] | validateAppautoscalingServiceNamespace validates a service namespace and returns an error if the service namespace validateConfigRuleSourceOwner - validates a config key for the type. | Something tells me this list is going to continue to grow and we just shouldn't be validating it at all, instead of playing catch up with Amazon... |
@@ -270,6 +270,18 @@ public class ThreadLeakCheckRule extends TestWatcher {
} else if (threadName.contains("ObjectCleanerThread")) {
// Required since upgrade to Netty 4.1.22 maybe because https://github.com/netty/netty/commit/739e70398ccb6b11ffa97c6b5f8d55e455a2165e
return true;
+ } else if (threadName.contains("RMI TCP")) {
+ return true;
+ } else if (threadName.contains("RMI Scheduler")) {
+ return true;
+ } else if (threadName.contains("RMI RenewClean")) {
+ return true;
+ } else if (threadName.contains("Signal Dispatcher")) {
+ return true;
+ } else if (threadName.contains("ForkJoinPool.commonPool")) {
+ return true;
+ } else if (threadName.contains("GC Daemon")) {
+ return true;
} else {
for (StackTraceElement element : thread.getStackTrace()) {
if (element.getClassName().contains("org.jboss.byteman.agent.TransformListener")) {
| [ThreadLeakCheckRule->[DumbReference->[finalize->[finalize]]]] | Checks if a thread is expected to be run. B11ffa97c6b5f55e455a2165e. | Are you sure you're not hiding a thread leakage? can't you just stop the RMI Stub on this case? |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.