patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -21,6 +21,6 @@ class AddressBookForm(forms.ModelForm): user_id=self.instance.user_id, alias=self.cleaned_data.get('alias') ).exclude(address=self.instance.address_id).exists(): self._errors['alias'] = self.error_class( - ['You are already using such alias for another address']) + [_('You are already using such alias for another address')]) return self.cleaned_data
[AddressBookForm->[clean->[error_class,filter,super]]]
Check if there is a duplicate alias for this user or if there is another alias for this.
Alias is a poor name for this field. I am not really sure we need the field at all (the UI does its best to always show you the full address).
@@ -150,7 +150,13 @@ class ShortcutDialog extends WidgetDialog { onchange: () => { if (this.dialog.get_value("type") == "DocType") { this.dialog.fields_dict.link_to.get_query = () => { - return { filters: { istable: false } }; + return { + query: "frappe.core.report.permitted_documents_for_user.permitted_documents_for_user.query_doctypes", + filters: { + istable: false, + user: frappe.session.user + } + }; }; } else { this.dialog.fields_dict.link_to.get_query = null;
[No CFG could be retrieved]
This method returns an array of fields that can be used to display a ShortcutDialog. This function is called when the user clicks on a DocType.
`istable: false` is not used in `query_doctypes`. You may need to update `query_doctypes` method if you need this filter.
@@ -31,4 +31,16 @@ const tiny_rom_entry *k573mcr_device::device_rom_region() const return ROM_NAME( k573mcr ); } + +INPUT_PORTS_START( k573mcr_meta_controls ) + PORT_START("META") + PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_SERVICE1) PORT_TOGGLE PORT_NAME("Insert/Eject Memory Card 1") + PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_SERVICE2) PORT_TOGGLE PORT_NAME("Insert/Eject Memory Card 2") +INPUT_PORTS_END + +ioport_constructor k573mcr_device::device_input_ports() const +{ + return INPUT_PORTS_NAME(k573mcr_meta_controls); +} + DEFINE_DEVICE_TYPE(KONAMI_573_MEMORY_CARD_READER, k573mcr_device, "k573mcr", "Konami 573 Memory Card Reader")
[No CFG could be retrieved]
region Public API.
I implemented meta controls for the memory card reader to simulate the action of inserting and ejecting the memory card slot. Is this ok? The intent was to make interactions like ending a credit in DDR smoother when it tells you to eject your card. Instead of waiting for the counter to tick down, it's possible to press the toggle button to eject your card and then press the button again to insert it, like you would on a real machine.
@@ -78,6 +78,7 @@ public class SearchAction implements QProfileWsAction { .setSince("5.2") .setDescription("Search quality profiles") .setHandler(this) + .setChangelog(new Change("6.5", format("The parameters '%s', '%s' and '%s' can be combined without any constraint", PARAM_DEFAULTS, PARAM_PROJECT, PARAM_LANGUAGE))) .setResponseExample(getClass().getResource("search-example.json")); action
[SearchAction->[doHandle->[buildResponse,load,validateRequest],writeParentFields->[getName,get,setParentKey,getParentKee,setParentName],writeLanguageFields->[getLanguage,setLanguageName,setLanguage,getName],toSearchWsRequest->[param,setLanguage],buildResponse->[toMap,writeParentFields,setNullable,setIsBuiltIn,identity,setActiveDeprecatedRuleCount,isDefault,getActiveDeprecatedRuleCount,getOrganizationUuid,getKey,isBuiltIn,getLastUsed,getParentKee,setProjectCount,getUserUpdatedAt,setLastUsed,formatDateTime,setActiveRuleCount,getActiveRuleCount,getProfiles,getRulesUpdatedAt,setKey,getKee,addProfilesBuilder,setIsInherited,setUserUpdatedAt,getName,getProjectCount,build,writeLanguageFields,newBuilder,setOrganization,collect,setIsDefault],validateRequest->[getLanguage,checkRequest,getDefaults,getProjectKey,getProfileName],getProject->[selectByKey,projectUuid,isRootProject,selectOrFailByUuid,checkFoundWithOptional],define->[getResource,setExampleValue,setPossibleValues,setBooleanPossibleValues,getLanguageKeys,setSince,setResponseExample],load->[IllegalArgumentException,getOrganizationByKey,orElseThrow,equals,toList,IllegalStateException,getOrganizationUuid,setDefaultProfileKeys,getKey,selectExistingQProfileUuids,getUuid,format,findProfiles,collect,openSession,getProjectKey,getOrganizationKey,getProject],handle->[doHandle,toSearchWsRequest,writeProtobuf]]]
This method defines the missing configuration for a specific key.
The Change (or at least the String) could be created once as a private static field since it's immutable.
@@ -18,5 +18,6 @@ final class OperationType public const ITEM = 'item'; public const COLLECTION = 'collection'; public const SUBRESOURCE = 'subresource'; + public const RESOURCE = 'resource'; public const TYPES = [self::ITEM, self::COLLECTION, self::SUBRESOURCE]; }
[No CFG could be retrieved]
Enumerate all the items in the system.
item, collection and subresources are "resources." What about "metadata" for instance?
@@ -343,7 +343,8 @@ class BucketSentenceIter : public DataIter { chars.push_back(c); } } - return {map, chars}; + // Note: Can't use {} because this would hit the explicit constructor + return tuple<unordered_map<wchar_t, mx_float>, vector<wchar_t>>(map, chars); } vector<vector<mx_float>> convertTextToSequences(const wstring& content, wchar_t spliter) {
[train->[LoadCheckpoint,NDArray,OutputPerplexity,SaveCheckpoint,saveCharIndices,LSTMUnroll,characterSize,maxSequenceLength],predictWithBuiltInRNNOp->[LSTMWithBuiltInRNNOp,LoadCheckpoint,NDArray],BucketSentenceIter->[void->[shuffle]],main->[predict,trainWithBuiltInRNNOp,predictWithBuiltInRNNOp,train],LSTMUnroll->[LSTM],predict->[LSTMUnroll,LoadCheckpoint,NDArray],trainWithBuiltInRNNOp->[LoadCheckpoint,RNNXavier,NDArray,OutputPerplexity,SaveCheckpoint,LSTMWithBuiltInRNNOp,saveCharIndices,characterSize,maxSequenceLength]]
load chars from file and return unordered_map chars ;.
My gcc-5.4 won't allow this
@@ -236,6 +236,17 @@ dss_tgt_nr_get(int ncores, int nr) return nr_default; } +static bool +enable_target_oversubscribe(void) +{ + bool oversub = false; + + if (getenv("ENABLE_TARGET_OVERSUBSCRIBE") == NULL) + return oversub; + d_getenv_bool("ENABLE_TARGET_OVERSUBSCRIBE", &oversub); + return oversub; +} + static int dss_topo_init() {
[int->[setenv,crt_finalize,printf,hwloc_bitmap_asprintf,hwloc_bitmap_set,daos_errno2der,hwloc_get_nbobjs_by_depth,hwloc_get_type_depth,crt_group_size,pl_fini,drpc_init,server_init_state_fini,hwloc_topology_load,D_ERROR,free,set_abt_max_num_xstreams,strnlen,strtoul,ABT_init,ABT_cond_create,pl_init,server_init_state_wait,strdup,dss_module_load,daos_fini,ABT_mutex_free,daos_debug_init,strsep,D_PRINT,modules_load,dss_topo_init,daos_hhash_fini,dss_tgt_nr_get,crt_group_config_save,D_GOTO,dss_module_fini,D_ASSERT,atoi,hwloc_get_obj_by_depth,d_hhash_set_ptrtype,daos_crt_init_opt_get,strlen,abt_init,abt_max_num_xstreams,dbtree_class_register,crt_group_config_path_set,dss_module_unload_all,D_FREE,hwloc_bitmap_isincluded,crt_group_rank,register_dbtree_classes,dss_abterr2der,D_INFO,max,gethostname,ds_iv_fini,getenv,getopt_long,crt_init_opt,daos_init,dss_srv_init,dss_module_setup_all,dss_module_init,dss_pmixless,hwloc_bitmap_alloc,daos_hhash_init,sprintf,D_ASPRINTF,sigaction,strcmp,ABT_mutex_create,hwloc_topology_init,server_init_state_init,usage,hwloc_get_nbobjs_by_type,D_ASSERTF,ds_iv_init,daos_debug_fini,getpid,snprintf,dss_srv_fini,drpc_fini,abt_fini],dss_pmixless->[d_getenv_bool,getenv],main->[perror,sigwait,dss_dump_ABT_state,sigaddset,parse,sigfillset,pthread_sigmask,server_fini,sigemptyset,D_ERROR,sigdelset,exit,server_init,daos_register_sighand],dss_self_rank->[D_ASSERTF,crt_group_rank],void->[dss_module_fini,strerror,backtrace_symbols_fd,ds_iv_fini,daos_fini,backtrace,ABT_mutex_free,crt_finalize,fprintf,ABT_mutex_lock,ABT_cond_free,fileno,dss_module_unload_all,ABT_mutex_unlock,pl_fini,sigaction,server_init_state_fini,D_ERROR,dss_module_cleanup_all,memset,daos_debug_fini,daos_hhash_fini,getpid,raise,ABT_cond_wait,ABT_finalize,dss_srv_fini,drpc_fini,abt_fini,exit,D_INFO],dss_init_state_set->[ABT_mutex_lock,ABT_mutex_unlock,D_INFO,ABT_cond_broadcast]]
This function is used to get the number of target threads for DSS. This function is used to get the objects that are available in the DSS topology. Allocate a core allocation bitmap.
the above two lines is not needed as d_get_env_bool() can handle it. and, tgt_oversub_enabled() maybe more align with naming rule.
@@ -1307,6 +1307,7 @@ namespace System.Diagnostics.Tracing Debug.Assert(m_eventData != null); // You must have initialized this if you enabled the source. try { + m_activeWritesCount++; ref EventMetadata metadata = ref m_eventData[eventId]; EventOpcode opcode = (EventOpcode)metadata.Descriptor.Opcode;
[No CFG could be retrieved]
This method writes an event with the specified related activity id. OnStop - This is called when the activity is stopped.
In order for the lockless two volatile system to work the writer thread needs to increase m_activeWritesCount count, then immediately check m_eventSourceEnabled and bail if it is false
@@ -0,0 +1,13 @@ +package client + +// PreallocTimeseries is a TimeSeries which preallocs slices on Unmarshall. +type PreallocTimeseries struct { + TimeSeries +} + +// Unmarshal implements proto.Message. +func (p *PreallocTimeseries) Unmarshal(dAtA []byte) error { + p.Labels = make([]LabelPair, 0, 20) + p.Samples = make([]Sample, 0, 10) + return p.TimeSeries.Unmarshal(dAtA) +}
[No CFG could be retrieved]
No Summary Found.
Do we expect the call have more than one sample per series, usually?
@@ -234,9 +234,14 @@ int_rprimitive = RPrimitive('builtins.int', is_unboxed=True, is_refcounted=True, short_int_rprimitive = RPrimitive('short_int', is_unboxed=True, is_refcounted=False, ctype='CPyTagged') # type: Final -# low level integer (corresponds to C's 'int'). -c_int_rprimitive = RPrimitive('c_int', is_unboxed=True, is_refcounted=False, +# low level integer (corresponds to C's 'int's). +c_int32_rprimitive = RPrimitive('c_int32', is_unboxed=True, is_refcounted=False, + ctype='int') # type: Final +c_int64_rprimitive = RPrimitive('c_int64', is_unboxed=True, is_refcounted=False, ctype='Py_ssize_t') # type: Final +# integer alias +c_int_rprimitive = c_int32_rprimitive +c_pyssize_t_rprimitive = c_int64_rprimitive # Floats are represent as 'float' PyObject * values. (In the future # we'll likely switch to a more efficient, unboxed representation.)
[RPrimitive->[accept->[visit_rprimitive]],RTuple->[deserialize->[deserialize_type,RTuple],serialize->[serialize],accept->[visit_rtuple],__init__->[TupleNameVisitor,accept]],RType->[__str__->[short_name],short_name->[short_name]],is_optional_type->[optional_value_type],TupleNameVisitor->[visit_rtuple->[accept]],RVoid->[accept->[visit_rvoid]],is_sequence_rprimitive->[is_str_rprimitive,is_list_rprimitive,is_tuple_rprimitive],RUnion->[serialize->[serialize],accept->[visit_runion],deserialize->[deserialize_type,RUnion]],RInstance->[accept->[visit_rinstance],setter_index->[getter_index],attr_type->[attr_type],struct_name->[struct_name]],RPrimitive,RTuple,RVoid]
A tagged pointer is represented as a tagged PyObject and can be coerced to the corresponding Return 2 - tuple of type CObjectError.
I have a few nits. I think that we can leave the `c_` prefix out, since "32-bit integer" is a fairly generic concept not specific to C and not likely to be confused with Python types. Also, the correspondence to C `int` is a bit of an accident (and not generally true, on all possible platforms). It would be better to use `int32_t`, which happens to be the same as `int` on all supported platforms.
@@ -533,3 +533,16 @@ def add_address_to_admin(email): address = create_address() user = User.objects.get(email=email) store_user_address(user, address, True, True) + + +def create_fake_collection(): + collection = get_or_create_collection(name='%s collection' % fake.word()) + for product in Product.objects.all().order_by('?')[:4]: + collection.products.add(product) + return collection + + +def create_collections(how_many=2): + for dummy in range(how_many): + collection = create_fake_collection() + yield 'Collection: %s' % (collection,)
[create_product_images->[create_product_image],create_fake_user->[create_address,get_email],create_fake_order->[create_delivery_group,create_address,get_email,create_payment,create_order_lines],create_product_sales->[create_fake_sale],create_product_type_with_attributes->[create_attributes_and_values],create_delivery_group->[price,shipping_method],create_order_lines->[create_order_line],add_address_to_admin->[create_address],create_users->[create_fake_user],create_groups->[create_fake_group],create_products_by_schema->[create_product_types_by_schema,create_products_by_type],create_shipping_methods->[price],create_product->[price],create_product_types_by_schema->[create_product_type_with_attributes],create_products_by_type->[get_variant_combinations,get_price_override,set_product_attributes],create_variant->[create_stock],create_orders->[create_fake_order]]
Adds an email address to the admin.
We can skip `all()`, as that's the default
@@ -187,6 +187,12 @@ public interface HoodieTimeline extends Serializable { */ Stream<HoodieInstant> getInstants(); + /** + * @return Get the stream of completed instants in reverse order + * TODO Change code references to getInstants() that reverse the instants later on to use this method instead. + */ + Stream<HoodieInstant> getReverseOrderedInstants(); + /** * @return true if the passed in instant is before the first completed instant in the timeline */
[makeInflightSavePointFileName->[join],getCommitFromCommitFile->[split],getCompactionRequestedInstant->[HoodieInstant],makeCommitFileName->[join],getCompletedInstant->[getTimestamp,getAction,HoodieInstant],makeInflightDeltaFileName->[join],makeRestoreFileName->[join],compareTimestamps->[test],makeRollbackFileName->[join],makeInflightCleanerFileName->[join],makeSavePointFileName->[join],makeInflightCompactionFileName->[join],makeInflightRollbackFileName->[join],makeFileNameAsComplete->[replace],makeRequestedCompactionFileName->[join],makeInflightRestoreFileName->[join],makeFileNameAsInflight->[join],makeInflightCommitFileName->[join],makeCleanerFileName->[join],getInflightInstant->[getTimestamp,getAction,HoodieInstant],getCompactionInflightInstant->[HoodieInstant],compareTo,join]
Returns true if the given timestamp is before the timeline starts.
@bhasudha can you file a ticket for this work for later ? and may be assign a "newbie" component
@@ -500,3 +500,10 @@ class OrderEvent(models.Model): def __repr__(self): return f"{self.__class__.__name__}(type={self.type!r}, user={self.user!r})" + + +class Invoice(models.Model): + order = models.ForeignKey(Order, on_delete=models.CASCADE) + number = models.CharField(max_length=64) + created = models.DateTimeField(auto_now_add=True) + url = models.URLField(max_length=256)
[OrderLine->[is_digital->[is_digital]],Order->[can_capture->[can_capture,get_last_payment],get_payment_status_display->[get_last_payment],can_void->[can_void,get_last_payment],total_authorized->[get_last_payment],total_captured->[get_last_payment],can_refund->[can_refund,get_last_payment],get_payment_status->[get_last_payment]]]
Return a string representation of the object.
should this be cascade?
@@ -28,8 +28,14 @@ class OgnDistributor { // Transfer NUM_TOKENS to the specified wallet. const value = this.token.toNaturalUnit(NUM_TOKENS) const contractAddress = this.token.contractAddress(networkId) - const receipt = await this.token.credit(networkId, wallet, value) - const txHash = receipt.transactionHash + const txHash = await this.token.credit(wallet, value) + const { status } = await this.token.waitForTxConfirmation(txHash, { + numBlocks: NumBlockConfirmation, + timeoutSec: ConfirmationTimeoutSec + }) + if (status !== 'confirmed') { + throw new Error(`Failure. status=${status} txHash=${txHash}`) + } logger.info(`${NUM_TOKENS} OGN -> ${wallet} TxHash=${txHash}`) // Send response back to client.
[No CFG could be retrieved]
OGN token credit process.
I'm not sure this needs to be dealt with now, but you might want to consider what happens in production if the tx takes longer than 2 minutes to get mined. For instance, will this record the error, assume it won't get mined(but it does), then retry later creating multiple transactions?
@@ -133,14 +133,14 @@ module Api user.attributes = attributes unless user.save # Some error occurred - render 'shared/http_status', :locals => { :code => '500', :message => - HttpStatusHelper::ERROR_CODE['message']['500'] }, :status => 500 + render 'shared/http_status', locals: { code: '500', message: + HttpStatusHelper::ERROR_CODE['message']['500'] }, status: 500 return end # Otherwise everything went alright. - render 'shared/http_status', :locals => {:code => '200', :message => - HttpStatusHelper::ERROR_CODE['message']['200']}, :status => 200 + render 'shared/http_status', locals: {code: '200', message: + HttpStatusHelper::ERROR_CODE['message']['200']}, status: 200 end # Process the parameters passed for user creation and update
[UsersController->[create->[nil?,render,new,downcase,process_attributes,type,has_missing_params?,find_by_user_name,save,delete],show->[nil?,xml,render,respond_to,to_json,fields_to_render,json,to_xml,find_by_id],process_attributes->[each,find_by_name,id,blank?],update->[nil?,render,attributes,process_attributes,blank?,find_by_user_name,save,find_by_id],destroy->[render],index->[xml,render,respond_to,to_json,fields_to_render,json,to_xml,get_collection]]]
Updates a with the given parameters. If the user is not found it will render an.
Align the elements of a hash literal if they span more than one line.<br>Space inside } missing.
@@ -599,10 +599,6 @@ ROM_START( hp9k310 ) ROM_LOAD16_BYTE( "1818-3771.bin", 0x000001, 0x008000, CRC(b9e4e3ad) SHA1(ed6f1fad94a15d95362701dbe124b52877fc3ec4) ) ROM_LOAD16_BYTE( "1818-3772.bin", 0x000000, 0x008000, CRC(a3665919) SHA1(ec1bc7e5b7990a1b09af947a06401e8ed3cb0516) ) - ROM_REGION( 0x800, IOCPU_TAG, 0 ) - ROM_LOAD( "1820-4784_1.bin", 0x000000, 0x000800, CRC(e929044a) SHA1(90849a10bdb8c6e38e73ce027c9c0ad8b3956b1b) ) - ROM_LOAD( "1820-4784_2.bin", 0x000000, 0x000800, CRC(8defcf50) SHA1(d3abfea468a43db7c2369500a3e390e77a8e22e6) ) - ROM_REGION( 0x4000, "graphics", ROMREGION_ERASEFF | ROMREGION_BE ) ROM_LOAD16_BYTE( "98544_1818-1999.bin", 0x000000, 0x002000, CRC(8c7d6480) SHA1(d2bcfd39452c38bc652df39f84c7041cfdf6bd51) ) ROM_END
[No CFG could be retrieved]
Configuration for HP9k320 A list of all possible hash regions for all of the NICs. SHA - 1 hash.
Is `1820-4784_2.bin` a legitimate alternate 8042 program? If so, it should be kept as an alternate BIOS for the `human_interface` device, not simply removed from MAME.
@@ -356,13 +356,12 @@ final public class GenerateExtensionConfigurationDoc { configItem.setWithinAMap(true); } } else { - type = typeArguments.get(0).toString(); + // FIXME: I assume this is for Optional<T> + TypeMirror realTypeMirror = typeArguments.get(0); + type = simpleTypeToString(realTypeMirror); } } else { - final String knownGenericType = getKnownGenericType(declaredType); - if (knownGenericType != null) { - type = knownGenericType; - } + type = simpleTypeToString(declaredType); } }
[GenerateExtensionConfigurationDoc->[recordConfigItems->[recordConfigItems]]]
Method recordConfigItems. Adds a config item to the list of config items.
Thanks for spotting this, it is for `Optional<T>` and `List<T>`.
@@ -37,7 +37,7 @@ class ManualAuthenticator(common.Plugin): Make sure your web server displays the following content at {uri} before continuing: -{achall.token} +{validation} Content-Type header MUST be set to {ct}.
[ManualAuthenticator->[perform->[append,_perform_single],cleanup->[killpg,debug,poll,conf,rmtree],add_parser_arguments->[add],_notify_and_wait->[raw_input,write],_perform_single->[debug,quote,Error,poll,sleep,simple_verify,public_key,uri,_notify_and_wait,format,gen_response_and_validation,Popen,conf,encode,json_dumps],more_info->[replace],__init__->[conf,super,mkdtemp],implements,classProvides],getLogger]
Imports the given object and creates the necessary components. The template for the .
(I know this is irrelevant to this particular PR) This can probably be cleaned up to make it more clear. In order to prove ownership of the webserver. Please place this file on the target webserver at .well-known.... If you don't have a webserver currently installed.... run....
@@ -235,6 +235,13 @@ ipcMain.on('ELECTRON_BROWSER_REQUIRE', function (event, module) { } }) +ipcMain.on('ELECTRON_BROWSER_READ_FILE', function (event, file) { + fs.readFile(file, (err, data) => { + if (err) event.returnValue = {err: err.message} + else event.returnValue = {data: data.toString()} + }) +}) + ipcMain.on('ELECTRON_BROWSER_GET_BUILTIN', function (event, module) { try { event.returnValue = valueToMeta(event.sender, electron[module])
[No CFG could be retrieved]
This function is used to call remote functions that need to be handled by the browser. onBrowserEvent - > Get meta data from the object registry.
The buffer can be sent directly, there is no need to convert it to string.
@@ -29,6 +29,12 @@ public class TuningConfigs // In the main tuningConfig class constructor, we set the maxBytes to 0 if null to avoid setting // maxBytes to max jvm memory of the process that starts first. Instead we set the default based on // the actual task node's jvm memory. - return maxBytesInMemory == 0 ? TuningConfig.DEFAULT_MAX_BYTES_IN_MEMORY : maxBytesInMemory; + long newMaxBytesInMemory = maxBytesInMemory; + if (maxBytesInMemory == 0) { + newMaxBytesInMemory = TuningConfig.DEFAULT_MAX_BYTES_IN_MEMORY; + } else if (maxBytesInMemory == -1) { + newMaxBytesInMemory = Long.MAX_VALUE; + } + return newMaxBytesInMemory; } }
[No CFG could be retrieved]
Returns the maxBytes in memory or the default value if null.
`else if maxBytesInMemory < 0`?
@@ -48,7 +48,11 @@ module Dependabot "/_apis/git/repositories/" + source.unscoped_repo + "/stats/branches?name=" + branch) - JSON.parse(response.body).fetch("commit").fetch("commitId") + commit = JSON.parse(response.body).fetch("commit", nil) + + raise NotFound unless commit + + commit.fetch("commitId") end def fetch_default_branch(_repo)
[Azure->[truncate_pr_description->[length,force_encoding],fetch_default_branch->[unscoped_repo,project,get,api_endpoint,gsub,organization],fetch_repo_contents_treeroot->[fetch,unscoped_repo,empty?,project,get,api_endpoint,organization],branch->[unscoped_repo,project,get,api_endpoint,first,organization],create_commit->[unscoped_repo,compact,project,to_json,api_endpoint,post,organization],fetch_repo_contents->[fetch,fetch_repo_contents_treeroot,unscoped_repo,project,get,api_endpoint,organization],auth_header_for->[ascii_only?,include?,delete],get->[fetch,status,excon_defaults,retry_connection_failures,get,raise],pull_requests->[fetch,unscoped_repo,project,get,api_endpoint,organization],post->[fetch,status,excon_defaults,merge,raise,post],commits->[fetch,unscoped_repo,empty?,project,get,api_endpoint,organization],fetch_commit->[fetch,unscoped_repo,project,get,api_endpoint,organization],create_pull_request->[truncate_pr_description,unscoped_repo,project,to_json,api_endpoint,post,organization,map],for_source->[new,hostname,find],initialize->[fetch,auth_header_for],fetch_file_contents->[unscoped_repo,project,get,body,api_endpoint,organization],attr_reader,freeze],require]
fetch commit id for a given branch.
Could we instead check if the `response.status` is 400 instead of checking if the commit is present? It seems to align better with what we expect the API to do
@@ -98,8 +98,8 @@ namespace System.Threading internal AsyncLocalValueChangedArgs([AllowNull] T previousValue, [AllowNull] T currentValue, bool contextChanged) { - PreviousValue = previousValue; - CurrentValue = currentValue; + PreviousValue = previousValue!; + CurrentValue = currentValue!; ThreadContextChanged = contextChanged; } }
[AsyncLocalValueMap->[ManyElementAsyncLocalValueMap->[IAsyncLocalValueMap->[UnsafeStore]]]]
Extended version of the that allows to store the value of an object in the Execution IAsyncLocalValueMap Crea un objeto IAsyncLocalValueMap.
@cston why do we get a warning here? This should just be flowing the maybe null state.
@@ -155,6 +155,12 @@ class Qmcpack(CMakePackage): elif '~mpi' in spec: args.append('-DQMC_MPI=0') + # Default is parallel collective I/O enabled + if '+phdf5' in spec: + args.append('-DENABLE_PHDF5=1') + elif '~phdf5' in spec: + args.append('-DENABLE_PHDF5=0') + # Default is real-valued single particle orbitals if '+complex' in spec: args.append('-DQMC_COMPLEX=1')
[Qmcpack->[install->[working_dir,mkdirp,install_tree],cmake_args->[join_path,append,joined,extend,format,join],check->[msg,working_dir,ctest],patch->[filter_file],depends_on,conflicts,version,patch,on_package_attributes,variant,run_after]]
Return the arguments for CMake. A function to set the flags for a single - node . Add flags to the command line for missing missing header files.
I would use `else` here, it is a binary variant. Same for other if-statements in the `package.py`.
@@ -19,7 +19,7 @@ namespace System.Text.Json.SourceGeneration public string TypeRef { get; private set; } /// <summary> - /// The name of the public JsonTypeInfo<T> property for this type on the generated context class. + /// The name of the public JsonTypeInfo&lt;T&gt; property for this type on the generated context class. /// For example, if the context class is named MyJsonContext, and the value of this property is JsonMessage; /// then users will call MyJsonContext.JsonMessage to access generated metadata for the type. /// </summary>
[TypeGenerationSpec->[FastPathIsSupported->[Object,IDictionaryOfTKeyTValue,IDictionary,AllowNamedFloatingPointLiterals,ConverterInstantiationLogic,Dictionary,IReadOnlyDictionary,IsStringType,WriteAsString,IsObjectType,NumberHandling,ImmutableDictionary,NotApplicable],Initialize->[GetTypeInfoPropertyName,GetCompilableName,IsValueType],TryFilterSerializableProps->[CanUseGetter,Always,IsPublic,ClrName,HasJsonInclude,Add,Count,DefaultIgnoreCondition,RuntimePropertyName,Assert,IncludeFields,WhenWritingNull,IsProperty,CanBeNull,TryAdd],GenerationModeIsSpecified->[Default],GetImmutableEnumerableConstructingTypeName,ImmutableEnumerable,CreateRangeMethodName,Metadata,GetImmutableDictionaryConstructingTypeName,Assert,FastPathIsSupported,Serialization,GenerationModeIsSpecified,ImmutableDictionary]]
Creates a new type object that can be used to create a new object. The CollectionKeyMetadata object contains the metadata for the collection elements.
@carlossanlop we can't use cref's here since the source generator doesn't reference STJ. Would it perhaps make sense to use xref's though?
@@ -127,7 +127,9 @@ public class CloseContainerCommandHandler implements CommandHandler { break; } } catch (NotLeaderException e) { - LOG.debug("Follower cannot close container #{}.", containerId); + if (LOG.isDebugEnabled()) { + LOG.debug("Follower cannot close container #{}.", containerId); + } } catch (IOException e) { LOG.error("Can't close container #{}", containerId, e); } finally {
[CloseContainerCommandHandler->[getAverageRunTime->[get],getContainerCommandRequestProto->[setContainerID,exportCurrentSpan,getDefaultInstance,getUuidString,setTraceID,setCloseContainer,build,setEncodedToken,newBuilder,setDatanodeUuid,setCmdType],getInvocationCount->[get],handle->[getContainerCommandRequestProto,getPipelineID,getProto,isDebugEnabled,info,markContainerUnhealthy,markContainerForClose,getDatanodeDetails,getEncodedToken,submitRequest,getContainerID,getContainer,getContainerState,getController,getForce,debug,error,incrementAndGet,isExist,monotonicNow,closeContainer],AtomicLong,getLogger]]
Handle a close container command. Check if the container can be closed.
I don't think we need isDebugEnabled
@@ -116,6 +116,12 @@ public class ListingBasedRollbackHelper implements Serializable { .withDeletedFileResults(filesToDeletedStatus).build()); } case APPEND_ROLLBACK_BLOCK: { + // collect all log files that is supposed to be deleted with this rollback + Map<FileStatus, Long> writtenLogFileSizeMap = FSUtils.getAllLogFiles(metaClient.getFs(), + FSUtils.getPartitionPath(config.getBasePath(), rollbackRequest.getPartitionPath()), + rollbackRequest.getFileId().get(), HoodieFileFormat.HOODIE_LOG.getFileExtension(), + rollbackRequest.getLatestBaseInstant().get()).collect(Collectors.toMap(HoodieLogFile::getFileStatus, value -> value.getFileStatus().getLen())); + Writer writer = null; try { writer = HoodieLogFormat.newWriterBuilder()
[ListingBasedRollbackHelper->[generateHeader->[getTimestamp,put,valueOf,ordinal],performRollback->[size,max,getSimpleName,min,getRollbackParallelism,maybeDeleteAndCollectStats,collect,setJobStatus],maybeDeleteAndCollectStats->[generateHeader,HoodieIOException,getSparkContext,build,singletonMap,getPath,getTimestamp,IllegalStateException,appendBlock,mapToPair,getType,HoodieRollbackException,getFileStatus,close,getPartitionPath,deleteBaseAndLogFiles,HoodieCommandBlock,deleteBaseFiles],collectRollbackStats->[size,max,getSimpleName,min,getRollbackParallelism,maybeDeleteAndCollectStats,collect,setJobStatus],deleteBaseAndLogFiles->[getBasePath,getName,equals,getBaseCommitTimeFromLogPath,listStatus,endsWith,getFileExtension,isLogFile,getFs,getCommitTime,info,getPartitionPath,put,getPath,delete],deleteBaseFiles->[getBasePath,getName,equals,listStatus,getFileExtension,getFs,contains,getCommitTime,info,getPartitionPath,put,getPath,delete],getLogger]]
If doDelete is true then delete the base files and log files and collect the stats. This method is called when a rollback action fails.
i think we should guard the option variables here. `rollbackRequest.getFileId()`, `rollbackRequest.getLatestBaseInstant()` with a `isPresent()` check
@@ -625,8 +625,13 @@ def save_inference_model(dirname, main_program._distributed_lookup_table, main_program._endpoints) - if not os.path.isdir(dirname): + # when a pserver and a trainer running on the same machine, mkdir may conflict + try: os.makedirs(dirname) + except OSError as e: + if e.errno != errno.EEXIST: + raise + if model_filename is not None: model_basename = os.path.basename(model_filename) else:
[get_parameter_value_by_name->[get_parameter_value],save_persistables->[save_vars],load_vars->[_clone_var_in_block_,load_vars],load_inference_model->[load_persistables],load_persistables->[load_vars],save_vars->[save_vars,_clone_var_in_block_],save_params->[save_vars],save_inference_model->[append_fetch_ops,prepend_feed_ops,save_persistables],load_params->[load_vars],get_parameter_value->[_clone_var_in_block_,is_parameter]]
Save the given model of a given to a given directory. This function is called when the user has requested to infer the model. This function is called when a new node is created. It will create a new node with.
throw the original exception if the exception is not errno.EEXIST
@@ -344,7 +344,8 @@ class ReviewForm(happyforms.Form): self.fields['versions'].queryset = ( self.helper.addon.versions.distinct().filter( channel=amo.RELEASE_CHANNEL_LISTED, - files__status=amo.STATUS_PUBLIC).order_by('created')) + files__status__in=(amo.STATUS_PUBLIC, amo.STATUS_PENDING)). + order_by('created')) # For the canned responses, we're starting with an empty one, which # will be hidden via CSS.
[ThemeReviewForm->[save->[save]],AllAddonSearchForm->[clean_application_id->[version_choices_for_app_id]],ReviewForm->[NonValidatingChoiceField]]
Initialize the object with a c . Add all actions that are not in a group.
`STATUS_PENDING` is for LWT
@@ -505,7 +505,7 @@ export class FixedLayer { if (!fe.placeholder) { // Never been transfered before: ensure that it's properly configured. setStyle(element, 'pointer-events', 'initial'); - fe.placeholder = this.doc.createElement('i-amp-fp'); + fe.placeholder = this.ampdoc.win.document.createElement('i-amp-fp'); fe.placeholder.setAttribute('i-amp-fixedid', fe.id); setStyle(fe.placeholder, 'display', 'none'); }
[No CFG could be retrieved]
Transfer an element to a fixed positioning layer. Failed move the element to the fixed position layer. + .
a `createElement` wrapper on ampDoc would be nice, maybe a TODO/task issue?
@@ -416,7 +416,7 @@ func GetReviewerByIssueIDAndUserID(issueID, userID int64) (review *Review, err e func getReviewerByIssueIDAndUserID(e Engine, issueID, userID int64) (review *Review, err error) { review = new(Review) - if _, err := e.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_id = ? AND type in (?, ?, ?))", + if _, err := e.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_id = ? AND original_author_id = 0 AND type in (?, ?, ?))", issueID, userID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest). Get(review); err != nil { return nil, err
[LoadAttributes->[loadAttributes],LoadReviewer->[loadReviewer],loadAttributes->[loadIssue,loadCodeComments,loadReviewer],LoadCodeComments->[loadCodeComments],HTMLURL->[HTMLURL],loadReviewer,loadCodeComments,toCond]
Get latest review of each reviwer sorted in order of review. updated_unix ASC MarkReviewsAsNotStale marks reviews as not stale for a given issue and commit.
What's the reason behind the original_author_id = 0 lock here?
@@ -328,7 +328,6 @@ def read_resource(res: 'CustomResource', async def do_read(): try: - log.debug(f"preparing read: ty={ty}, name={name}, id={opts.id}") resolver = await prepare_resource(res, ty, True, False, props, opts, typ) # Resolve the ID that we were given. Note that we are explicitly discarding the list of
[convert_providers->[convert_providers],get_resource->[do_get->[prepare_resource],resource_output],register_resource->[do_register->[_translate_ignore_changes,prepare_resource,_translate_additional_secret_outputs],resource_output],prepare_resource->[ResourceResolverOperations],read_resource->[do_read->[prepare_resource,_translate_additional_secret_outputs],resource_output]]
Reads a single resource with a specific reserved ID. Reads a single non - secret - managed resource. Reads a resource.
Doesn't really add much new information beyond what was logged at the beginning of the function, and Node.js doesn't have this additional logging.
@@ -340,11 +340,12 @@ public class Maven extends Builder { } } + Set<String> sensitiveVars = build.getSensitiveBuildVariables(); + final VariableResolver<String> resolver = new Union<String>(new ByMap<String>(env), vr); + args.addKeyValuePairsFromPropertyString("-D", this.properties, resolver, sensitiveVars); + if (isInjectBuildVariables()) { - Set<String> sensitiveVars = build.getSensitiveBuildVariables(); - args.addKeyValuePairs("-D",build.getBuildVariables(),sensitiveVars); - final VariableResolver<String> resolver = new Union<String>(new ByMap<String>(env), vr); - args.addKeyValuePairsFromPropertyString("-D",this.properties,resolver,sensitiveVars); + args.addKeyValuePairs("-D", build.getBuildVariables(), sensitiveVars); } if (usesPrivateRepository())
[Maven->[perform->[DecideDefaultMavenCommand,isInjectBuildVariables,getSettings,getGlobalSettings,usesPrivateRepository,getMaven],getDescriptor->[getDescriptor],buildEnvVars->[buildEnvVars],DescriptorImpl->[getHelpFile->[getHelpFile],getInstallations->[],setInstallations->[]],MavenInstallation->[forNode->[MavenInstallation],isMaven2_1->[meetsMavenReqVersion],getExecutable->[call],forEnvironment->[MavenInstallation],DescriptorImpl->[getHelpFile->[],getInstallations->[getInstallations],setInstallations->[setInstallations]],getExists->[getExecutable]]]]
Performs the necessary tasks to execute the specified command. Checks if a target can be built based on the specified arguments.
Thanks sincerely for this fix. I was completely perplexed when my maven jobs were not honoring properties passed in the properties section, but were honoring them when passed in the JVM properties. I think this may be severe enough that it justifies an exception to the LTS policy so that it can be included in the next LTS release.
@@ -184,6 +184,10 @@ type SourceRepositoryEnumerator struct { Tester dockerfile.Tester } +// ErrNoLanguageDetected is the error returned when no language can be detected by all +// source code detectors. +var ErrNoLanguageDetected = fmt.Errorf("No language matched the source repository") + // Detect extracts source code information about the provided source repository func (e SourceRepositoryEnumerator) Detect(dir string) (*SourceRepositoryInfo, error) { info := &SourceRepositoryInfo{
[LocalPath->[String],RemoteURL,IsDockerBuild,ContextDir]
Detect returns a SourceRepositoryInfo for the given directory.
Seems that we favor `errors.New` in our code base.
@@ -37,7 +37,7 @@ class SuluSecurityListenerTest extends ProphecyTestCase { parent::setUp(); - $this->securityChecker = $this->prophesize('Sulu\Bundle\SecurityBundle\Permission\SecurityCheckerInterface'); + $this->securityChecker = $this->prophesize(SecurityCheckerInterface::class); $this->securityListener = new SuluSecurityListener($this->securityChecker->reveal()); $this->filterControllerEvent = $this->prophesize('Symfony\Component\HttpKernel\Event\FilterControllerEvent'); }
[SuluSecurityListenerTest->[testSubject->[willReturn,checkPermission,prophesize,onKernelController,reveal],testMethodPermissionMapping->[willReturn,prophesize,onKernelController,shouldHaveBeenCalled,reveal],testRestController->[willReturn,prophesize,onKernelController,shouldHaveBeenCalled,reveal],testLocale->[willReturn,prophesize,onKernelController,shouldHaveBeenCalled,reveal],testNonRestControllerAbstain->[willReturn,prophesize,onKernelController,reveal,shouldNotHaveBeenCalled],setUp->[reveal,prophesize],tearDown->[assertPostConditions]]]
Set up the object.
Didn't know that was a thing
@@ -119,14 +119,9 @@ class DeviceNDArrayBase(object): else: ptr = 0 - if array_core(self).flags['C_CONTIGUOUS']: - strides = None - else: - strides = tuple(self.strides) - return { 'shape': tuple(self.shape), - 'strides': strides, + 'strides': None if is_contiguous(self) else tuple(self.strides), 'data': (ptr, False), 'typestr': self.dtype.str, 'version': 2,
[DeviceNDArray->[reshape->[reshape],_do_setitem->[reshape,_assign_kernel,__getitem__,_default_stream,view],ravel->[ravel,_default_stream],__array__->[copy_to_host],_do_getitem->[__getitem__,view,_default_stream]],from_array_like->[DeviceNDArray,reshape],check_array_compatibility->[squeeze],DeviceNDArrayBase->[copy_to_host->[_default_stream],split->[_default_stream],copy_to_device->[_default_stream],to_host->[copy_to_host,_default_stream],transpose->[transpose],squeeze->[_default_stream,squeeze]],sentry_contiguous->[array_core],IpcArrayHandle->[close->[close],__exit__->[close],__enter__->[open],open->[DeviceNDArray,open]],verify_cuda_ndarray_interface->[requires_attr],auto_device->[sentry_contiguous,from_array_like,copy_to_device,from_record_like],require_cuda_ndarray->[is_cuda_ndarray],from_record_like->[DeviceRecord]]
Return dict with information about the nanoseconds in the array.
FWIW I think we could also just do `self.flags['C_CONTIGUOUS']`, which doesn't appear to use a CUDA context. Edit: This may simplify things in this PR a bit.
@@ -0,0 +1,16 @@ +<?php + +$oids = snmpwalk_cache_oid($device, 'cmdExcFrequency', [], 'RS-XX8000-DVB-TX-MIB'); + +echo 'Output-Frequency '; + +$count = 1; +foreach ($oids as $id => $data) { + $num_oid = '.1.3.6.1.4.1.2566.127.1.2.167.4.1.1.1.64.' . $count; + $index = 'cmdExcFrequency.' . $id; + $descr = (count($oids) > 1) ? 'Frequency ' . $id : 'Frequency'; + $type = 'rs'; + $current = $data['cmdExcFrequency']; + discover_sensor($valid['sensor'], 'frequency', $device, $num_oid, $index, $type, $descr, '1', '1', null, null, null, null, $current); + $count++; +}
[No CFG could be retrieved]
No Summary Found.
Any reason you couldn't do this in yaml?
@@ -1,4 +1,4 @@ ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__) -require 'bundler/setup' # Set up gems listed in the Gemfile. -require 'bootsnap/setup' # Speed up boot time by caching expensive operations. +require "bundler/setup" # Set up gems listed in the Gemfile. +require "bootsnap/setup" # Speed up boot time by caching expensive operations.
[require,expand_path]
Set up the gemfile.
Style/StringLiterals: Prefer single-quoted strings when you don't need string interpolation or special symbols.
@@ -67,6 +67,12 @@ class ProviderIndex(object): self.update(spec) def update(self, spec): + """Update the provider index with the virtual specs provided by the + spec passed as input + + Args: + spec: spec potentially providing additional virtual specs + """ if not isinstance(spec, spack.spec.Spec): spec = spack.spec.Spec(spec)
[ProviderIndex->[copy->[ProviderIndex,copy],from_json->[ProviderIndex],providers_for->[update],_transform->[_transform],satisfies->[_cross_provider_maps],__str__->[_transform]],_transform->[mapiter]]
Update the in the index.
Love the addition of the docstring. Nit: Is it more accurate and concise to say ``Update the provider index with additional virtual specs`` given the inclusion of the `Args` description?
@@ -24,6 +24,7 @@ import com.google.common.collect.Maps; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; +import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils;
[InterpreterSettingManager->[setInterpreterBinding->[saveToFile],get->[get],remove->[saveToFile,get,remove],close->[run->[close],close],getAllResourcesExcept->[getAllInterpreterGroup],removeRepository->[saveToFile],restart->[copyDependenciesFromLocalPath],removeResourcesBelongsToParagraph->[getAllInterpreterGroup],createNewSetting->[initInterpreterSetting,saveToFile],closeNote->[getInterpreterSettings],getEditorSetting->[getInterpreterSettings],init->[saveToFile,loadFromFile],removeNoteInterpreterSettingBinding->[setInterpreterBinding],removeResourcesBelongsToNote->[removeResourcesBelongsToParagraph],addInterpreterSetting->[initInterpreterSetting],setPropertyAndRestart->[saveToFile,loadFromFile],loadFromFile->[initInterpreterSetting],addRepository->[saveToFile],getSettingIds->[get],recursiveBuildLibList->[recursiveBuildLibList]]]
Imports a single type token from a Java source. Imports the object that represents a remote interpreter.
this could be removed
@@ -0,0 +1,7 @@ +""" +This module contains classes related to handling data for semantic parsing. That includes knowledge graphs, type +declarations, and world representations for all the domains for which we want to build semantic parsers. +""" +from allennlp.data.semparse.knowledge_graphs import KnowledgeGraph, TableKnowledgeGraph +from allennlp.data.semparse.type_declarations import wikitables_type_declaration +from allennlp.data.semparse.worlds import WikitablesWorld, NLVRWorld
[No CFG could be retrieved]
No Summary Found.
But I probably _wouldn't_ import `TableKnowledgeGraph` here. The things that go in `allennlp.data.semparse` are top-level API classes, like `KnowledgeGraph`. That's the only import out of all of these that should be in here. The rest should be in their respective submodules.
@@ -0,0 +1,16 @@ +from spack import * + +class Atop(Package): + """Atop is an ASCII full-screen performance monitor for Linux""" + homepage = "http://www.atoptool.nl/index.php" + url = "http://www.atoptool.nl/download/atop-2.2-3.tar.gz" + + version('2.2-3', '034dc1544f2ec4e4d2c739d320dc326d') + + def install(self, spec, prefix): + make() + mkdirp(prefix.bin) + install("atop", join_path(prefix.bin, "atop")) + mkdirp(join_path(prefix.man, "man1")) + install(join_path("man", "atop.1"), + join_path(prefix.man, "man1", "atop.1"))
[No CFG could be retrieved]
No Summary Found.
Is the indentation here correct?
@@ -52,12 +52,17 @@ class StepsController < ApplicationController respond_to do |format| if @step.save + # Post process all assets @step.assets.each do |asset| asset.post_process_file(@protocol.team) end + #link tiny_mce_assets to the step + link_tiny_mce_assets(@step.description, @step) + create_annotation_notifications(@step) + # Generate activity if @protocol.in_module? Activity.create(
[StepsController->[destroy->[destroy],update->[create],create->[new,create],update_protocol_ts->[update],new->[new],toggle_step_state->[create],extract_destroy_params->[has_destroy_params,extract_destroy_params],checklistitem_state->[create]]]
Creates a new object. endregion region ecs.
Missing space after #.
@@ -79,10 +79,13 @@ public abstract class AbstractMonitorFactory implements MonitorFactory { } final URL monitorUrl = url; - final ListenableFutureTask<Monitor> listenableFutureTask = ListenableFutureTask.create(new MonitorCreator(monitorUrl)); - listenableFutureTask.addListener(new MonitorListener(key)); - executor.execute(listenableFutureTask); - FUTURES.put(key, listenableFutureTask); + final CompletableFuture<Monitor> completableFuture = CompletableFuture.supplyAsync(()->{ + Monitor newMonitor = AbstractMonitorFactory.this.createMonitor(monitorUrl); + return newMonitor; + + },executor); + completableFuture.thenRunAsync(new MonitorListener(key)); + FUTURES.put(key, completableFuture); return null; } finally {
[AbstractMonitorFactory->[MonitorCreator->[call->[createMonitor]]]]
Returns a monitor with the given name or null if no monitor with the given name exists.
We already defined an executor, specify this executor for `thenRunAsync ` may be better.
@@ -58,8 +58,8 @@ module MarksGradersHelper table_row[:id] = grader.id table_row[:filter_table_row_contents] = - render_to_string :partial => 'marks_graders/table_row/filter_table_grader_row', - :locals => { :grader => grader } + render_to_string partial: 'marks_graders/table_row/filter_table_grader_row', + locals: { grader: grader } #These used only for searching table_row[:first_name] = grader.first_name
[construct_table_rows->[each,id,construct_table_row],construct_table_row->[nil?,render_to_string,user_name,last_name,find_by_user_id,first_name,join,name,id],construct_grader_table_row->[render_to_string,user_name,last_name,first_name,get_membership_count_by_grade_entry_form,id],construct_grader_table_rows->[each,id,construct_grader_table_row]]
Construct a filter table row for the grader.
Align the elements of a hash literal if they span more than one line.
@@ -359,13 +359,14 @@ if ( ! function_exists('meta')) // Turn single array into multidimensional $name = array($name); } - + $allowed_type = array('charset', 'http-equiv', 'name', 'property'); $str = ''; foreach ($name as $meta) { - $type = (isset($meta['type']) && $meta['type'] !== 'name') ? 'http-equiv' : 'name'; + $meta['type'] = (isset($meta['type']) && ($meta['type'] == 'equiv')) ? 'http-equiv' : $meta['type']; // backward compatibility + $type = (isset($meta['type']) && in_array($meta['type'], $allowed_type))? $meta['type'] : 'name'; $name = isset($meta['name']) ? $meta['name'] : ''; - $content = isset($meta['content']) ? $meta['content'] : ''; + $content = (isset($meta['content']) && $type != 'charset') ? $meta['content'] : ''; $newline = isset($meta['newline']) ? $meta['newline'] : "\n"; $str .= '<meta '.$type.'="'.$name.'" content="'.$content.'" />'.$newline;
[link_tag->[site_url,slash_item],img->[site_url,slash_item]]
This function will generate a meta tag with the given name content and type.
Don't change the alignment please.
@@ -131,7 +131,7 @@ class ConvertImageDtype(object): self.dtype = dtype def __call__(self, image: torch.Tensor) -> torch.Tensor: - return F.convert_image_dtype(image, self.dtype) + return F_t.convert_image_dtype(image, self.dtype) class ToPILImage(object):
[RandomAffine->[__call__->[get_params]],RandomPerspective->[__call__->[get_params]],RandomCrop->[forward->[get_params]],RandomResizedCrop->[forward->[get_params]],ColorJitter->[get_params->[Compose,Lambda]],RandomRotation->[__call__->[get_params]],RandomErasing->[forward->[get_params]]]
Convert image to missing tensor.
Can we revert the changes in this file, given that we have now exposed `convert_image_dtype` to `functional.py`? This way `transforms.py` only depends on `functional.py`
@@ -1112,9 +1112,9 @@ void gui_init(struct dt_iop_module_t *self) c->select_by = dt_bauhaus_combobox_new(self); dt_bauhaus_widget_set_label(c->select_by, NULL, _("select by")); gtk_widget_set_tooltip_text(c->select_by, _("choose selection criterion, will be the abscissa in the graph")); - dt_bauhaus_combobox_add(c->select_by, _("hue")); - dt_bauhaus_combobox_add(c->select_by, _("saturation")); - dt_bauhaus_combobox_add(c->select_by, _("lightness")); + dt_bauhaus_combobox_add(c->select_by, C_("select by", "hue")); + dt_bauhaus_combobox_add(c->select_by, C_("select by", "saturation")); + dt_bauhaus_combobox_add(c->select_by, C_("select by", "lightness")); gtk_box_pack_start(GTK_BOX(self->widget), c->select_by, TRUE, TRUE, 0); g_signal_connect(G_OBJECT(c->select_by), "value-changed", G_CALLBACK(select_by_changed), (gpointer)self);
[No CFG could be retrieved]
gtk_box_pack_start - start of widget widget widget widget widget widget widget REGISTER_ALL GUILD_EVENTS_MASK - > COLOR_COUNTER_MASK.
Here the context is more "color", we are talking about hue, saturation or lightness in the "color" context.
@@ -1424,11 +1424,12 @@ class _SDFBoundedSourceWrapper(ptransform.PTransform): def current_restriction(self): start_pos = self._delegate_range_tracker.start_position() stop_pos = self._delegate_range_tracker.stop_position() - return SourceBundle( - self._weight, - self._source, - start_pos, - stop_pos) + return _SDFBoundedSourceWrapper._SDFBoundedSourceRestriction( + SourceBundle( + self._weight, + self._source, + start_pos, + stop_pos), self._delegate_range_tracker) def start_pos(self): return self._delegate_range_tracker.start_position()
[_finalize_write->[close,finalize_write,open_writer],_SDFBoundedSourceWrapper->[_SDFBoundedSourceRestrictionProvider->[create_tracker->[_SDFBoundedSourceRestrictionTracker],split->[split],initial_restriction->[stop_position,start_position,get_range_tracker]],expand->[split_source->[],_create_sdf_bounded_source_dofn],_SDFBoundedSourceRestrictionTracker->[current_progress->[fraction_consumed,RestrictionProgress],__init__->[get_range_tracker],try_claim->[try_claim],current_restriction->[stop_position,start_position],stop_pos->[stop_position],start_pos->[start_position],try_split->[position_at_fraction,stop_pos,start_pos,fraction_consumed,try_split],check_done->[fraction_consumed]],_create_sdf_bounded_source_dofn->[SDFBoundedSourceDoFn->[process->[get_range_tracker,current_restriction,_SDFBoundedSourceRestrictionProvider,read]],SDFBoundedSourceDoFn,estimate_size,get_desired_chunk_size],_infer_output_coder->[default_output_coder]],_WriteKeyedBundleDoFn->[process->[close,write,open_writer]],RestrictionTrackerView->[current_restriction->[current_restriction],defer_remainder->[defer_remainder],try_claim->[try_claim]],Read->[to_runner_api_parameter->[is_bounded],from_runner_api_parameter->[Read],_infer_output_coder->[default_output_coder],expand->[split_source->[split,estimate_size,get_desired_chunk_size],get_range_tracker,is_bounded,read]],_WriteBundleDoFn->[finish_bundle->[close],process->[write,open_writer]],_pre_finalize->[pre_finalize],RestrictionProgress->[with_completed->[RestrictionProgress]],ThreadsafeRestrictionTracker->[current_progress->[current_progress],try_claim->[try_claim],current_restriction->[current_restriction],defer_remainder->[try_split],try_split->[try_split],check_done->[check_done]],WriteImpl->[expand->[split_source->[],initialize_write]]]
Returns a new SourceBundle with the current restriction.
Rather than re-creating it here, how about just storing (and returning) the initial _SDFBoundedSourceRestriction object (which does lazy initialization in range_tracker())?
@@ -62,7 +62,7 @@ public class MsgEchoClient { final NioEventLoopGroup connectGroup = new NioEventLoopGroup(1, connectFactory, NioUdtProvider.MESSAGE_PROVIDER); try { - boot.group(connectGroup) + boot.group(connectGroup).localAddress(0) .channelFactory(NioUdtProvider.MESSAGE_CONNECTOR) .handler(new ChannelInitializer<UdtChannel>() { @Override
[MsgEchoClient->[main->[info,run,enable],run->[initChannel->[addLast,LoggingHandler,MsgEchoClientHandler],UtilThreadFactory,Bootstrap,handler,NioEventLoopGroup,shutdown,sync],getLogger]]
Run the client.
is this really needed ?
@@ -290,8 +290,10 @@ func rulesToProjectRules(rules []engine.Rule) []*api.ProjectRule { Values: r.Values, } } + // TODO: The ProjectRule Type needs to be added to the database rule := &api.ProjectRule{ Conditions: pr, + Type: tags.RuleTypeNodeTag, } return []*api.ProjectRule{rule} }
[ListProjects->[ListProjects],DeleteProject->[DeleteProject],CreateProject->[CreateProject],GetProject->[GetProject],UpdateProject->[UpdateProject]]
returns a list of rules that can be applied to the Condition.
This is hard coded to be of 'node' type. This will later have to be replaced with the type from the data store.
@@ -77,6 +77,10 @@ public class DomainRouterResponse extends BaseResponse implements ControlledView @Param(description = "the Pod ID for the router") private String podId; + @SerializedName(ApiConstants.POD_NAME) + @Param(description = "the Pod name for the router") + private String podName; + @SerializedName(ApiConstants.HOST_ID) @Param(description = "the host ID for the router") private String hostId;
[DomainRouterResponse->[getObjectId->[getId],addNic->[add]]]
The parameters related to a single in the System. The ID of the corresponding link local network.
@davidjumani can we have `since` key here
@@ -59,7 +59,7 @@ func TestRepository_GetWatchers(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) - watchers, err := repo.GetWatchers(1) + watchers, err := repo.GetWatchers(ListOptions{Page: 1}) assert.NoError(t, err) assert.Len(t, watchers, repo.NumWatches) for _, watcher := range watchers {
[EqualValues,False,NoError,Len,GetWatchers,True]
TestWatchRepo tests that watches and repositories are loaded. TestNotifyWatchers tests that the action has a unique identifier.
You haven't set a page size here...
@@ -6,11 +6,11 @@ package armhelpers import ( "context" - "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-05-01/resources" "github.com/Azure/go-autorest/autorest/to" ) // ListProviders returns all the providers for a given AzureClient -func (az *AzureClient) ListProviders(ctx context.Context) (resources.ProviderListResultPage, error) { - return az.providersClient.List(ctx, to.Int32Ptr(100), "") +func (az *AzureClient) ListProviders(ctx context.Context) (ProviderListResultPage, error) { + page, err := az.providersClient.List(ctx, to.Int32Ptr(100), "") + return &page, err }
[ListProviders->[List,Int32Ptr]]
ListProviders lists all providers.
ditto about breaking change
@@ -501,7 +501,10 @@ crt_hg_class_init(int provider, int idx, hg_class_t **ret_hg_class) if (rc != 0) D_GOTO(out, rc); - init_info.na_init_info.progress_mode = 0; + if (crt_provider_is_block_mode(provider)) + init_info.na_init_info.progress_mode = 0; + else + init_info.na_init_info.progress_mode = NA_NO_BLOCK; if (crt_provider_is_sep(provider)) init_info.na_init_info.max_contexts = crt_gdata.cg_ctx_max_num;
[No CFG could be retrieved]
This function is called by the CRT code to initialize the individual HG objects. This function is called by the constructor of the HG class.
Why is there a macro definition for the value of 0. What does it mean?
@@ -25,6 +25,7 @@ module SamlIdpAuthConcern def add_sp_metadata_to_session session[:sp] = { loa3: loa3_requested?, logo: current_sp_metadata[:logo], + issuer: saml_request.service_provider.identifier, return_url: current_sp_metadata[:return_to_sp_url], name: current_sp_metadata[:friendly_name] || current_sp_metadata[:agency],
[requested_authn_context->[requested_authn_context],build_asserted_attributes->[attribute_asserter]]
add_sp_metadata_to_session adds the SP metadata to the session and.
I duplicated this method a bit in `openid_connect/authorization_controller.rb` -- can you rebase and add this there too? I can try to refactor it after too so we can share more code
@@ -11,8 +11,7 @@ class AddCreatedByToAssets < ActiveRecord::Migration[4.2] tables = [:assets, :checklists, :checklist_items, :comments, :custom_fields, :my_modules, :teams, :projects, - :reports, :results, :sample_groups, :sample_types, :samples, - :steps, :tables, :tags] + :reports, :results, :steps, :tables, :tags] tables.each do |table_name| add_column table_name, :last_modified_by_id, :integer
[AddCreatedByToAssets->[change->[each,add_index,add_column]]]
Add missing columns to the missing_keys table.
Layout/AlignArray: Align the elements of an array literal if they span more than one line.
@@ -1544,6 +1544,8 @@ public class JGroupsTransport implements Transport { nonBlockingExecutor.execute(() -> processMessage(message)); }); + + processMessage(firstMessage.get()); } } }
[JGroupsTransport->[getViewId->[getViewId],invokeCommand->[getAddress],receiveClusterView->[getMembers,getAddress],waitForInitialNodes->[getMembers],invokeCommands->[getAddress],isCoordinator->[isCoordinator],processRequest->[sendResponse,getAddress],ChannelCallbacks->[sitesUp->[updateSitesView],up->[receiveClusterView,siteUnreachable,processMessage],sitesDown->[updateSitesView]],invokeCommandOnAll->[getMembers,getAddress],startJGroupsChannelIfNeeded->[getPhysicalAddresses,getAddress],sendCommand->[getAddress,toJGroupsAddress,setMessageFlags,marshallRequest,send],sendCommandToAll->[setMessageFlags,marshallRequest,send],performAsyncRemoteInvocation->[sendCommand],addRequest->[addRequest],waitForView->[getViewId],getCoordinator->[getCoordinator],getMembers->[getMembers],invokeCommandStaggered->[getAddress],send->[send],withView->[getViewId,withView],getPhysicalAddresses->[getAddress],initChannel->[getAddress],processResponse->[getAddress],sendResponse->[send,getAddress],updateSitesView->[getCoordinator,sendTo,isCoordinator,getSitesView],performSyncRemoteInvocation->[invokeCommandStaggered,invokeCommandOnAll,invokeCommand]]]
Processes all messages in the given batch.
looking at JGroups code, `message` can be `null` in `forEach()'. it is probably worth checking for `null`.
@@ -203,7 +203,7 @@ func TestGenerateHints(t *testing.T) { config: defaultConfig(), logger: logp.NewLogger("hints.builder"), } - cfgs := m.CreateConfig(test.event) + cfgs := m.CreateConfig(test.event, nil) assert.Equal(t, len(cfgs), test.len, test.message) if len(cfgs) != 0 {
[Unpack,Equal,NewLogger,CreateConfig,Nil]
icmp tests for the icmp protocol.
I think you need to remove the `nil` here.
@@ -55,9 +55,7 @@ public class ForceSyncAsyncFlagsTest extends MultipleCacheManagersTest { // check that the replication call was sync cache1.put("k", "v"); - verify(mockTransport) - .invokeCommandOnAll(any(ReplicableCommand.class), any(ResponseCollector.class), any(DeliverOrder.class), - anyLong(), any(TimeUnit.class)); + verify(mockTransport).invokeCommandOnAll(any(), any(), any(), any(), anyLong(), any()); reset(mockTransport);
[ForceSyncAsyncFlagsTest->[testForceSyncFlagUsage->[extractGlobalComponent,createClusteredCaches,setTransport,reset,any,getAdvancedCache,extractComponent,anyLong,getCacheManager,ReplicatedControlledConsistentHashFactory,invokeCommandOnAll,consistentHashFactory,getDefaultClusteredCacheConfig,put,spy,sendToAll],testForceAsyncFlagUsage->[extractGlobalComponent,createClusteredCaches,setTransport,reset,any,getAdvancedCache,extractComponent,anyLong,getCacheManager,ReplicatedControlledConsistentHashFactory,invokeCommandOnAll,consistentHashFactory,getDefaultClusteredCacheConfig,put,spy,sendToAll]]]
testForceAsyncFlagUsage - test if the cache has a blocking flag on SYNC cache.
why have you removed the classes?
@@ -21,6 +21,8 @@ import torch import torch.nn.functional as F from torchvision import datasets +random_state_numpy = np.random.RandomState(0) + class STL10TestCase(datasets_utils.ImageDatasetTestCase): DATASET_CLASS = datasets.STL10
[Flickr30kTestCase->[_create_annotations_file->[_create_captions]],MNISTTestCase->[inject_fake_data->[_create_binary_file]],VOCSegmentationTestCase->[_create_annotation_file->[add_name->[add_child],add_bndbox->[add_child],add_name,add_child,add_bndbox],_create_annotation_files->[_create_annotation_file]],STL10TestCase->[_make_image_file->[_make_binary_file],_make_train_files->[_make_image_file,_make_fold_indices_file,_make_label_file],_make_label_file->[_make_binary_file],_make_test_files->[_make_image_file,_make_label_file],inject_fake_data->[_make_test_files,_make_train_files,_make_class_names_file]],Places365TestCase->[_make_images_archive->[_make_image],_make_categories_txt->[_make_txt],test_classes->[create_dataset],_make_devkit_archive->[_make_categories_txt,_make_file_list_txt],test_images_download_preexisting->[create_dataset],test_class_to_idx->[create_dataset],inject_fake_data->[_make_images_archive,_make_devkit_archive],_make_file_list_txt->[_make_txt]],ImageFolderTestCase->[test_classes->[create_dataset]],CityScapesTestCase->[inject_fake_data->[make_image]],Flickr8kTestCase->[test_captions->[create_dataset],inject_fake_data->[_create_images]],INaturalistTestCase->[test_targets->[create_dataset]],QMNISTTestCase->[_labels_file->[_prefix],test_num_examples_test50k->[create_dataset]],DatasetFolderTestCase->[test_is_valid_file->[create_dataset],test_classes->[create_dataset]],CocoDetectionTestCase->[inject_fake_data->[_create_annotation_file]],SBDatasetTestCase->[inject_fake_data->[_create_split_files]],HMDB51TestCase->[_create_videos->[file_name_fn],inject_fake_data->[_create_videos]],UCF101TestCase->[_create_videos->[file_name_fn],inject_fake_data->[_create_annotation_files],_create_annotation_files->[_create_annotation_file]]]
Imports the given object and imports the object. Create train and unlabeled files for the given n - tuple.
we should create a local RandomState object in each test function instead of having a global one in each file. Otherwise, tests are still dependent on each other withing a single module. Also, no strong opinion on that but `np_rng` would be shorter and still be a descriptive name, so I'd suggest to use that instead
@@ -82,9 +82,12 @@ final class CollectionNormalizer extends AbstractCollectionNormalizer foreach ($object as $obj) { $item = $this->normalizer->normalize($obj, $format, $context); + if (!\is_array($item)) { + throw new UnexpectedValueException('Expected item to be an array'); + } if (!isset($item['data'])) { - throw new InvalidArgumentException('The JSON API document must contain a "data" key.'); + throw new UnexpectedValueException('The JSON API document must contain a "data" key.'); } $data['data'][] = $item['data'];
[CollectionNormalizer->[getItemsData->[normalize],getPaginationData->[getPaginationConfig]]]
Get items data.
Could it be a BC break?
@@ -164,6 +164,11 @@ public class FlinkExecutionEnvironments { ? ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION : ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION); } + + long minPauseBetweenCheckpoints = options.getMinPauseBetweenCheckpoints(); + flinkStreamEnv + .getCheckpointConfig() + .setMinPauseBetweenCheckpoints(minPauseBetweenCheckpoints); } applyLatencyTrackingInterval(flinkStreamEnv.getConfig(), options);
[FlinkExecutionEnvironments->[applyLatencyTrackingInterval->[setLatencyTrackingInterval,getLatencyTrackingInterval],createBatchExecutionEnvironment->[splitToList,toArray,equals,parseInt,size,matches,warn,info,CollectionEnvironment,getObjectReuse,getParallelism,createLocalEnvironment,getExecutionEnvironment,enableObjectReuse,getFlinkMaster,setParallelism,createRemoteEnvironment,applyLatencyTrackingInterval,get,disableObjectReuse,getConfig],createStreamExecutionEnvironment->[splitToList,getCheckpointingInterval,toArray,equals,parseInt,size,matches,warn,info,enableExternalizedCheckpoints,enableCheckpointing,getCheckpointingMode,IllegalArgumentException,getCheckpointTimeoutMillis,setNumberOfExecutionRetries,setCheckpointTimeout,getExecutionRetryDelay,setStateBackend,getObjectReuse,setExecutionRetryDelay,getParallelism,createLocalEnvironment,getExecutionEnvironment,enableObjectReuse,getStateBackend,getFlinkMaster,setStreamTimeCharacteristic,setParallelism,createRemoteEnvironment,applyLatencyTrackingInterval,isExternalizedCheckpointsEnabled,get,disableObjectReuse,getConfig,getRetainExternalizedCheckpointsOnCancellation,getNumberOfExecutionRetries],getLogger]]
Creates a stream execution environment based on the given options and filesToStage. The last value that can be applied to the environment.
With this, we might always be overriding the Flink default if it changes, or if it becomes configurable in the Flink config in the future. I think we should check whether the value is different from the "Beam default" and only set it then.
@@ -58,10 +58,12 @@ public class AvroConsumer { Map<String, Object> serdeProps = new HashMap<>(); serdeProps.put(KsqlGenericRowAvroSerializer.AVRO_SERDE_SCHEMA_CONFIG, schemaStr); - final Serializer<GenericRow> genericRowSerializer = new KsqlGenericRowAvroSerializer(null); + final Serializer<GenericRow> genericRowSerializer = new KsqlGenericRowAvroSerializer(null, + null, + null); genericRowSerializer.configure(serdeProps, false); - final Deserializer<GenericRow> genericRowDeserializer = new KsqlGenericRowAvroDeserializer(null); + final Deserializer<GenericRow> genericRowDeserializer = new KsqlGenericRowAvroDeserializer(null, new MockSchemaRegistryClient()); genericRowDeserializer.configure(serdeProps, false); genericRowSerde = Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
[AvroConsumer->[printGenericRowTopic->[getGenericRowSerde],main->[printGenericRowTopic]]]
get generic row serde.
`null`, `null`, `null`? This will result in an `NPE` at Line 61 of `KsqlGenericRowAvroSerializer` As a side note: Why bother passing the `schema` here? It is never used in `KsqlGenericRowAvroSerializer`
@@ -45,7 +45,7 @@ async def analyze_layout_async(): "./sample_forms/forms/form_selection_mark.png", ) ) - # [START analyze_layout_async] + from azure.core.credentials import AzureKeyCredential from azure.ai.formrecognizer.aio import DocumentAnalysisClient
[analyze_layout_async->[format_bounding_box],main->[analyze_layout_async],main]
Analyzes a layout and prints out any errors that occur. Prints out a list of missing items in the selection box.
do we need to remove unused directives from the sync layout sample too?
@@ -388,7 +388,11 @@ class Diaspora if (!is_object($j_outer_key_bundle)) { logger('Outer Salmon did not verify. Discarding.'); - System::httpExit(400); + if ($no_exit) { + return false; + } else { + System::httpExit(400); + } } $outer_iv = base64_decode($j_outer_key_bundle->iv);
[Diaspora->[decodeRaw->[children,attributes],validPosting->[children,getName],verifyMagicEnvelope->[children,attributes],receiveStatusMessage->[children],message->[getName],decode->[children,attributes],transmit->[get_curl_code,get_curl_headers],dispatch->[getName]]]
Decode a raw XML string into a signed XML object Encode the message.
This should be done in the calling module instead of here. You got the right idea adding a `return false;`, but I think you didn't bring it far enough. There should be no flag in the function parameter, and `System::httpExit()` should be called in the calling modules.
@@ -122,5 +122,11 @@ module Users def otp_rate_limiter @_otp_rate_limited ||= OtpRateLimiter.new(phone: phone_to_deliver_to, user: current_user) end + + def change_phone + mark_user_session_authenticated + bypass_sign_in current_user + redirect_to manage_phone_url + end end end
[TwoFactorAuthenticationController->[reauthn_param->[permit,dig],show->[redirect_to,two_factor_enabled?,totp_enabled?],flash_error_for_exception->[t,code],send_user_otp->[send,to_s,confirmation_context?,direct_otp,create_direct_otp,increment,constantize],handle_valid_otp_delivery_preference->[lock_out_user,redirect_to,login_two_factor_url,exceeded_otp_send_limit?,send_user_otp,reset_count_and_otp_last_sent_at,no_longer_locked_out?,reauthn?],phone_to_deliver_to->[authentication_context?,phone],validate_otp_delivery_preference_and_send_code->[to_h,track_event,handle_valid_otp_delivery_preference,otp_delivery_preference,success?,first,submit],invalid_phone_number->[flash_error_for_exception,track_event,redirect_back,message,code],otp_rate_limiter->[new],otp_delivery_selection_form->[new],send_code->[to_h,track_event,handle_valid_otp_delivery_preference,success?,invalid_phone_number,handle_invalid_otp_delivery_preference,submit],delivery_params->[permit],handle_invalid_otp_delivery_preference->[first,redirect_to,login_two_factor_url,otp_delivery_preference],include]]
Get the next OTP that we should limit for.
Code Climate says this is not tested.
@@ -146,6 +146,14 @@ class SpanBasedF1Measure(Metric): # We don't care about tags we are # told to ignore, so we do nothing. continue + elif bio_tag == "U": + # The U tag is used to indicate a span of length 1, + # so if there's an active tag we end it, and then + # we add a "length 0" tag. + if active_conll_tag: + spans.add(((span_start, span_end), active_conll_tag)) + spans.add(((index, index), conll_tag)) + active_conll_tag = None elif bio_tag == "B": # We are entering a new span; reset indices # and active tag to new span.
[SpanBasedF1Measure->[_compute_metrics->[float],reset->[defaultdict],__init__->[defaultdict,get_index_to_token_vocabulary],__call__->[,remove,size,max,cpu,sequence_gold_label,sequence_prediction,format,isinstance,_extract_spans,range,ones_like,get_lengths_from_binary_sequence_mask,ConfigurationError],_extract_spans->[add,enumerate,set],get_metric->[_compute_metrics,reset,keys,update,sum,set,values]],register]
Extract spans from a sequence of BIO tags. MissingTokens is a part of a bio label.
It'd be nice if the test for this made sure `U` tags were handled correctly - I don't think you updated the test to check for this.
@@ -379,6 +379,7 @@ namespace System.Net.Test.Common public string Domain { get; set; } public string Password { get; set; } public bool IsProxy { get; set; } = false; + public X509Certificate2 Certificate { get; set; } public Options() {
[Http11LoopbackServerFactory->[CreateConnectionAsync->[CreateAsync]],LoopbackServer->[GetSingleChunkHttpResponse->[GetStatusDescription],GetBytePerChunkHttpResponse->[GetStatusDescription],Connection->[Dispose->[Dispose],Task->[GetStatusDescription,GetHttpResponseHeaders]],GetConnectionCloseResponse->[GetStatusDescription],Dispose->[Dispose],GetHttpResponseHeaders->[GetStatusDescription,GetHttpResponseHeaders]]]
Create a response object from a HTTP response. Creates a connection object for the given socket and stream.
Would it make sense to put the certificate in `GenericLoopbackOptions` and make use if it in other loopback servers?
@@ -147,6 +147,7 @@ func initRand() { // instance. // XXX Move this as a *Beat method? func Run(settings Settings, bt beat.Creator) error { + setUmaskWithSettings(settings) name := settings.Name idxPrefix := settings.IndexPrefix version := settings.Version
[launch->[InitWithSettings,createBeater],TestConfig->[InitWithSettings,createBeater],Setup->[InitWithSettings,Setup,createBeater],indexSetupCallback->[Setup],createBeater->[BeatConfig],configure->[BeatConfig],Init->[InitWithSettings]]
Run initializes and runs a Beater implementation. Add additional info to state registry.
Should we log failures? E.g. add an ErrNotImplemented and only log `if err != nil && err != ErrNotImplemented`.
@@ -101,6 +101,7 @@ class OAuthClient(Client): verifier=verifier)) req.sign_request(self.signature_method, consumer, token) return super(OAuthClient, self).get(req.to_url(), HTTP_HOST='api', + HTTP_AUTHORIZATION='OAuth realm=""', **req) def delete(self, url, consumer=None, token=None, callback=False,
[TestAddon->[test_create_version_bad_license->[post,create_addon],test_create_user_altered->[get,make_create_request],test_create_no_license_step->[get,create_no_license],test_my_addons_only->[get],make_create_request->[post],test_xpi_failure->[make_create_request],test_duplicate_guid->[make_create_request,create_addon],test_my_addons_deleted->[get],test_delete_version->[delete,get,activitylog_count,create_addon],test_update->[put,get,activitylog_count,create_addon],test_one_addon->[get],test_update_nonexistant->[put],test_retrieve_versions->[get,create_addon],create_addon->[make_create_request,activitylog_count],test_not_my_addon->[put,get,create_addon],test_fake_license->[make_create_request],test_get_version->[get,create_addon],test_create_version_no_license->[post,get,create_addon],test_update_version_bad_id->[put],test_update_version_bad_license->[put,create_for_update],test_create_no_license_status->[get,create_no_license],test_wrong_guid->[post,get,create_addon],test_my_addons_disabled->[get],test_bad_zip->[make_create_request],create_no_license->[make_create_request],test_create_no_license->[create_no_license],test_create_slug->[make_create_request],test_create_no_license_url->[get,create_no_license],test_delete->[delete,create_addon],test_create_version->[post,get,activitylog_count,create_addon],test_create_status->[get,make_create_request],test_update_version_bad_xpi->[put,get,create_addon],test_create->[get,create_addon],test_bad_appversion->[create_addon],test_my_addons_role->[get],test_update_version_no_license->[put,get,create_for_update],test_no_user->[get],test_update_fail->[put,create_addon],create_for_update->[get,create_addon],test_get_version_statuses->[get,create_addon],test_update_version->[put,get,create_for_update,activitylog_count],test_no_addons->[get],test_create_no_user->[make_create_request]],get_request_token->[get,get_token_from_response],TestPerformanceAPI->[test_not_allowed->[post],test_form_data->[make_create_request,get_data],test_creates_os_version->[test_form_data],test_creates_app_version->[test_form_data],make_create_request->[post],test_form_fails->[make_create_request],test_no_addon->[make_create_request,get_data],test_gets_os_version->[test_form_data],test_addon->[make_create_request,get_data],test_form_validate->[make_create_request,get_data],test_gets_app_version->[test_form_data],test_form_incomplete->[make_create_request,get_data],test_form_updates->[test_form_data,make_create_request,get_data]],TestBaseOAuth->[test_user->[get],test_request_token_cancelled->[get_request_token],test_request_token_pending->[get_request_token],test_login_nonexistant->[_test_auth],test_login_deleted->[_test_auth],test_request_token_fake->[get],test_user_lookup->[get],test_login_unconfirmed->[_test_auth],test_accepted_callback->[get_request_token],test_forbidden_user_lookup->[get],test_login_works->[_test_auth],test_login_three_legged->[_test_auth],test_failed_user_lookup->[get]],get_access_token->[get,get_token_from_response],OAuthClient->[put->[get_absolute_url,_get_args,data_keys],delete->[get_absolute_url,_get_args],get->[get_absolute_url,_get_args],post->[get_absolute_url,_get_args,data_keys]],BaseOAuth->[setUp->[get]],OAuthClient]
Get a single resource by url.
That line need to be discussed: that's the only one I had to change in tests to be able to run those against both Piston and DRF implementation of OAuth. Given that this check doesn't look to be performed on Piston's one, it may break existing clients when we switch to DRF if they don't use that header
@@ -51,6 +51,11 @@ func expandDistributionConfig(d *schema.ResourceData) *cloudfront.DistributionCo Origins: expandOrigins(d.Get("origin").(*schema.Set)), PriceClass: aws.String(d.Get("price_class").(string)), } + if v, ok := d.GetOk("ordered_cache_behavior"); ok { + distributionConfig.CacheBehaviors = expandCacheBehaviors(v.([]interface{})) + } else { + distributionConfig.CacheBehaviors = expandCacheBehaviorsDeprecated(d.Get("cache_behavior").(*schema.Set)) + } // This sets CallerReference if it's still pending computation (ie: new resource) if v, ok := d.GetOk("caller_reference"); ok == false { distributionConfig.CallerReference = aws.String(time.Now().Format(time.RFC3339Nano))
[StringValueSlice,NewSet,Now,ValueOf,Set,Add,Atoi,Itoa,Field,Format,GetOk,Len,Type,Bool,Sort,CanSet,Int64,Get,StringValue,Sprintf,List,Elem,Flatten,String,Interface,NumField,WriteString]
Less implements the Less method of the StringPtrSlice interface. if - Gets the if from the schema.
Old behavior is kept under a expandCacheBehaviorsDeprecated function, which contains the exact same previous code.
@@ -82,6 +82,13 @@ func (d *Dispatcher) DeleteVCH(conf *config.VirtualContainerHostConfigSpec, cont } } + element, err := d.session.Finder.Element(d.op, vmm.Reference()) + if err != nil { + return err + } + vmm.SetInventoryPath(element.Path) + d.appliance = vmm + if err = d.deleteImages(conf); err != nil { errs = append(errs, err.Error()) }
[DeleteVCHInstances->[getImageDatastore,detachAttachedDisks]]
DeleteVCH deletes all the VMs in the VM. delete a VM from the cluster.
Will need some eyes on this. Before I did a new Element lookup the inventory path was being incorrectly filled. I can likely just refactor the other call. I did this to get around the InventoryPath mysteriously being `""`.
@@ -142,3 +142,8 @@ bool adt_MCLQ::prepareLoadedData() return true; } + +bool adt_MFBO::prepareLoadedData() +{ + return fcc == MFBOMagic.fcc; +} \ No newline at end of file
[prepareLoadedData->[prepareLoadedData],free->[free]]
check if the magic number of the MCLQ magic is correct.
please leave a new line at the end of every file
@@ -293,4 +293,8 @@ defineSuite([ '}'; context.verifyDrawForSpecs(fs); }); + + it('has czm_metersPerPixel', function() { + + }); }, 'WebGL'); \ No newline at end of file
[No CFG could be retrieved]
WebGL can t draw if it s not possible to draw it.
Forget to fill this in? Or was this not meant to be submitted?
@@ -459,7 +459,7 @@ func getOrCreateRepoAndID( config.MakeLogger("").CDebugf( ctx, "Overwriting symlink for repo %s with a new repo", normalizedRepoName) - err = config.KBFSOps().RemoveEntry(ctx, repoDir, normalizedRepoName) + err = config.KBFSOps().RemoveEntry(ctx, repoDir, repoNamePPS) if err != nil { return nil, NullID, err }
[Chtimes,GetCurrentSession,RemoveEntry,ForEachObjectHash,Cause,New,ToKBFolderHandle,ParseInt,MustCompile,Bytes,CountLooseRefs,Time,Lookup,GetCanonicalPath,Split,MakeLogger,OpenFile,Getenv,IsExist,Before,ObjectPacks,Now,Close,Add,MatchString,RecursiveDelete,SyncAll,PutGitMetadata,After,Create,UnixNano,RepoID,ToLower,Symlink,MkdirAll,Write,Prune,WithTimeout,Seek,MaxNameBytes,Chroot,RepackObjects,GitRepoName,Stat,CDebugf,ReadDir,WithValue,KBFSOps,Lock,NewStorage,PackRefs,Equal,Name,CreateDir,Truncate,GetLockNamespace,Sprintf,NewFS,MDServer,String,Open,EncodeToString,ReadAll,Rename,GetOrCreateRootNode,GetCanonicalName,toBytes,ToFavorite,IsNotExist,Errorf,Type,Clock,KBPKI,Join,Unix,SetLockNamespace,Remove,WithStack,ModTime,TrimSuffix,LooseObjectTime]
Find the object in the given repoDir that matches the given repoName. If the user uniqID is the unique ID of the repo in the given repoDir.
I thought we didn't care about obscuring git repo names? Or did you decide to add it because it was easier than anticipated?
@@ -307,7 +307,7 @@ def get_cross_building_settings(settings, self_os=None, self_arch=None): return build_os, build_arch, host_os, host_arch -def get_gnu_triplet(os, arch, compiler=None): +def get_gnu_triplet(os, arch, compiler, output): """ Returns string with <machine>-<vendor>-<op_system> triplet (<vendor> can be omitted in practice)
[get_cross_building_settings->[detected_architecture],OSInfo->[uname->[bash_path],detect_windows_subsystem->[uname],get_win_os_version->[_OSVERSIONINFOEXW]],cpu_count->[cpu_count],OSInfo]
Returns the GNU triplet for the specified OS and architecture. Compute the name of the lease that is not found in the system.
Compiler was optional!!
@@ -477,6 +477,8 @@ public class HoodieWriteClient<T extends HoodieRecordPayload> extends AbstractHo */ @Override public void close() { + AutoCleanerService.shutdownAutoCleaner(autoCleanerService); + autoCleanerService = null; // Stop timeline-server if running super.close(); }
[HoodieWriteClient->[clean->[clean],rollbackInflightCompaction->[rollback],inlineCompact->[compact,scheduleCompaction],startCommitWithTime->[startCommit],startCommit->[startCommit],deleteSavepoint->[deleteSavepoint],close->[close],scheduleCompactionAtInstant->[scheduleCompaction],savepoint->[savepoint],rollbackPendingCommits->[rollback],compact->[rollbackInflightCompaction,compact,completeCompaction],insert->[insert],bulkInsert->[bulkInsert],upsert->[upsert],delete->[delete],rollback->[rollback]]]
Stop timeline - server if it s running a .
this kind of resetting is probably needed after each write operation as well? may be its fine to just reinitialize the service after waitForCompletion.. food for thought..
@@ -786,9 +786,9 @@ dma_map_one(struct bio_desc *biod, struct bio_iov *biov, void *arg) } /* - * Switch to another idle chunk, if there isn't any idle chunk - * available, grow buffer. - */ + * * Switch to another idle chunk, if there isn't any idle chunk + * * available, grow buffer. + * */ rc = chunk_get_idle(bdb, &chk); if (rc) { if (rc == -DER_AGAIN) {
[No CFG could be retrieved]
finds the next bio_dma_chunk that can be mapped into the current bio Get the next unused chunk from the buffer or NULL if there are no unused blocks.
(style) please, no space before tabs
@@ -2596,7 +2596,7 @@ evt_debug(daos_handle_t toh, int debug_level) /** Common routines */ typedef int (cmp_rect_cb)(struct evt_context *tcx, - const struct evt_rect_df *mbr, + const struct evt_node *nd, const struct evt_rect_df *rt1, const struct evt_rect_df *rt2); static int
[No CFG could be retrieved]
Debug information of tree nodes at level \ a debug_level. evt_node_entry - get the event node entry for the given node.
(style) 'nd' may be misspelled - perhaps 'and'?
@@ -240,14 +240,13 @@ class ReceptiveField(BaseEstimator): X, y = self._check_dimensions(X, y, predict=True) n_times, n_epochs, n_outputs = y.shape y_pred = self.predict(X) - - y_pred = y_pred.reshape(y.shape, order='F') - y_pred = y_pred[self.keep_samples_] - y = y[self.keep_samples_] + y_pred = y_pred[self.valid_samples_] + y = y[self.valid_samples_] # Re-vectorize and call scorer y = y.reshape([-1, n_outputs], order='F') y_pred = y_pred.reshape([-1, n_outputs], order='F') + assert y.shape == y_pred.shape scores = scorer_(y, y_pred, multioutput='raw_values') return scores
[ReceptiveField->[score->[predict],predict->[_delay_and_reshape,predict],fit->[_delay_and_reshape,fit]]]
Score predictions generated with a receptive field. This calls the model predict and then masks Check if a sequence of non - zero values in y is missing.
I think this is where it might break sklearn compatibility. I could be wrong though, since we do the reshaping below
@@ -693,12 +693,12 @@ public class WindowTest implements Serializable { } private static class CustomWindowFn<T> extends WindowFn<T, CustomWindow> { - - @Override public Collection<CustomWindow> assignWindows(AssignContext c) throws Exception { + @Override + public Collection<CustomWindow> assignWindows(AssignContext c) throws Exception { String element; // It loses genericity of type T but this is not a big deal for a test. // And it allows to avoid duplicating CustomWindowFn to support PCollection<KV> - if (c.element() instanceof KV){ + if (c.element() instanceof KV) { element = ((KV<Integer, String>) c.element()).getValue(); } else { element = (String) c.element();
[WindowTest->[testNoWindowFnDoesNotReassignWindows->[WindowOddEvenBuckets,apply],testMergingCustomWindowsKeyedCollection->[apply],CustomWindowCoder->[decode->[CustomWindow,decode],verifyDeterministic->[verifyDeterministic],encode->[encode],CustomWindowCoder],testTimestampCombinerEndOfWindow->[apply],WindowOddEvenBuckets->[verifyCompatibility->[isCompatible]],testMergingCustomWindows->[apply],testTimestampCombinerDefault->[apply],CustomWindow->[hashCode->[hashCode],equals->[equals]],CustomWindowFn->[windowCoder->[of],assignWindows->[CustomWindow,equals]]]]
Assign windows.
Yes it is a lot better indeed, thanks! As I understand comparing the two implementations: the problem with mine was that, in a given merge iteration, if the big window is the last window we encounter while going through the` c.windows()` list, then previously encountered small windows would not have been added to `toBeMerged` and thus not merged. That is why you said that you implementation is more tolerant to arbitrary order?
@@ -35,9 +35,16 @@ func InitLogging(logToStderr bool, verbose int, logFlow bool) { // this is the only way to control the way glog runs. That includes poking around at flags below. flag.Parse() if logToStderr { - flag.Lookup("logtostderr").Value.Set("true") + err := flag.Lookup("logtostderr").Value.Set("true") + if err != nil { + contract.Assert(err != nil) + } + } if verbose > 0 { - flag.Lookup("v").Value.Set(strconv.Itoa(verbose)) + err := flag.Lookup("v").Value.Set(strconv.Itoa(verbose)) + if err != nil { + contract.Assert(err != nil) + } } }
[Lookup,Itoa,Set,Parse]
This is only used for debugging.
This should just be `contract.Assert(err != nil)`; no need for the `if err != nil`.
@@ -1306,7 +1306,7 @@ public class Functions { private static class ThreadSorterBase { protected Map<Long,String> map = new HashMap<>(); - private ThreadSorterBase() { + public ThreadSorterBase() { ThreadGroup tg = Thread.currentThread().getThreadGroup(); while (tg.getParent() != null) tg = tg.getParent(); Thread[] threads = new Thread[tg.activeCount()*2];
[Functions->[isCollapsed->[isCollapsed],subList->[subList],getSearchURL->[getUrl],doPrintStackTrace->[doPrintStackTrace],xmlEscape->[xmlEscape],getRelativeNameFrom->[getRelativeNameFrom],emptyList->[emptyList],getServerName->[getServerName],getCrumb->[getCrumb],getCurrentDescriptorByNameUrl->[getCurrentDescriptorByNameUrl],setCurrentDescriptorByNameUrl->[getCurrentDescriptorByNameUrl],dumpThreadInfo->[getThreadGroup],encode->[encode],getPasswordValue->[hasPermission],filterDescriptors->[apply],getThreadInfos->[dumpAllThreads],singletonList->[singletonList],Tag->[compareTo->[compareTo]],getUserTimeZonePostfix->[isUserTimeZoneOverride,getUserTimeZone],initPageVariables->[Functions,initPageVariables],urlEncode->[encode],getLoggerNames->[getLoggerNames],runScript->[getCurrentJellyContext],getRelativeLinkTo->[getUrl],reverse->[reverse],hasPermission->[hasPermission,reverse],getSortedDescriptorsForGlobalConfigNoSecurity->[getSortedDescriptorsForGlobalConfig],checkPermission->[reverse,checkPermission],getPageDecorators->[emptyList],ThreadGroupMap->[compare->[compare]],getSortedDescriptorsForGlobalConfig->[getSortedDescriptorsForGlobalConfig],getCheckUrl->[getCheckUrl],getActionUrl->[joinPath],getBuilderDescriptors->[filter],getWin32ErrorMessage->[getWin32ErrorMessage],getUserAvatar->[getAvatar],ThreadSorter->[compare->[compare]],escape->[escape],getSortedDescriptorsForGlobalConfigUnclassified->[getSortedDescriptorsForGlobalConfig],adminCheck->[adminCheck,checkPermission],getCLICommands->[compare->[compareTo]],isAnonymous->[isAnonymous],getRelativeDisplayNameFrom->[getRelativeNameFrom],getNearestAncestorUrl->[getUrl],getCrumbRequestField->[getCrumbRequestField],getPublisherDescriptors->[filter],getCookie->[getCookie]]]
Computes the relative link to the given item. This method returns the relative display name to the given item from the specified group.
out of curiosity: why did you change it?
@@ -162,7 +162,11 @@ def get_path_from_module(mod): # Read the module text = modulecmd('show', mod, output=str, error=str).split('\n') - return get_path_from_module_contents(text, mod) + p = get_path_from_module_contents(text, mod) + if not os.path.exists(p): + tty.warn("Extracted path from module does not exist:" + "\n\tExtracted path: " + p) + return p def get_path_from_module_contents(text, module_name):
[unload_module->[get_module_cmd],get_path_from_module->[get_module_cmd],load_module->[unload_module,get_module_cmd],get_path_from_module_contents->[get_path_arg_from_module_line]]
Inspects a TCL module and returns the absolute path of the path of the library supported Returns the path argument of the module that has the it in text.
@balay It turns out that `get_path_from_module_contents` can return `None` in certain cases (including the unit tests), which trips `os.path.exists`, so I think the test will need to be stricter: `if p and (not os.path.exists(p)):` (apologies for missing that)
@@ -116,11 +116,11 @@ func (d *DockerBuilder) Build() error { if authPresent { glog.V(4).Infof("Authenticating Docker push with user %q", pushAuthConfig.Username) } - glog.Infof("Pushing image %s ...", pushTag) + glog.V(1).Infof("Pushing image %s ...", pushTag) if err := pushImage(d.dockerClient, pushTag, pushAuthConfig); err != nil { return fmt.Errorf("Failed to push image: %v", err) } - glog.Infof("Push successful") + glog.V(1).Infof("Push successful") } return nil }
[addBuildParameters->[Join,ParseDockerImageReference,Stat,Exact,buildInfo,buildLabels,Parse,Open,ParseTreeToDockerfile,DaemonMinimal,WriteFile,Mode],Build->[addBuildParameters,Infof,TempDir,V,dockerBuild,Errorf,GetDockerAuth,NewHelper],copySecrets->[Join,Clean,Output,Infof,V,MkdirAll,Command],setupPullSecret->[Infof,V,GetDockercfgFile,Errorf,Open,NewAuthConfigurations,Getenv],buildLabels->[GenerateLabelsFromSourceInfo,GetInfo,Error,Infof],dockerBuild->[setupPullSecret,Join,copySecrets],NewReader,From,New,Env,Parse,FindAll,InsertInstructions]
Build starts a Docker image from the given build config. tagImage removes the build tag and push the image.
well you've got this push as a v(1) and the other push as a v(0)
@@ -327,7 +327,8 @@ public class ServerGroupManager { validateAccessCredentials(loggedInUser, sg, sg.getName()); validateAdminCredentials(loggedInUser); - SystemManager.addServersToServerGroup(servers, sg); + SystemManager systemManager = new SystemManager(ServerFactory.SINGLETON, ServerGroupFactory.SINGLETON, saltApi); + systemManager.addServersToServerGroup(servers, sg); updatePillarAfterGroupUpdateForServers(servers); }
[ServerGroupManager->[dissociateAdmins->[validateAdminCredentials,remove,validateAccessCredentials],associateAdmins->[validateAdminCredentials,validateAccessCredentials],lookupEntitled->[lookupEntitled],listAdministrators->[listAdministrators,validateAdminCredentials,validateAccessCredentials],create->[validateAdminCredentials,create],listNoAdminGroups->[listNoAdminGroups],addServers->[validateAdminCredentials,validateAccessCredentials],removeServers->[updatePillarAfterGroupUpdateForServers,validateAdminCredentials,removeServers,validateAccessCredentials],remove->[validateAdminCredentials,remove,validateAccessCredentials],processAdminList->[remove],listServers->[listServers],associateOrDissociateAdminsByLoginName->[validateAdminCredentials,validateAccessCredentials],validateAccessCredentials->[canAccess]]]
Adds a collection of servers to a server group.
this system manager should not be passed as a class field instead of being created on each method invocation?
@@ -62,12 +62,13 @@ public class TestArrayBlockBuilder assertTrue(arrayBlockBuilder.getRetainedSizeInBytes() >= (expectedEntries * Long.BYTES + ClassLayout.parseClass(LongArrayBlockBuilder.class).instanceSize() + initialRetainedSize)); } - @Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "Expected current entry to be closed but was opened") + @Test public void testConcurrentWriting() { BlockBuilder blockBuilder = new ArrayBlockBuilder(BIGINT, null, EXPECTED_ENTRY_COUNT); BlockBuilder elementBlockWriter = blockBuilder.beginBlockEntry(); elementBlockWriter.writeLong(45).closeEntry(); - blockBuilder.appendStructure(new LongArrayBlockBuilder(null, 1).writeLong(123).closeEntry().build()); + assertThatThrownBy(() -> blockBuilder.appendStructure(new LongArrayBlockBuilder(null, 1).writeLong(123).closeEntry().build())) + .isInstanceOf(IllegalStateException.class).hasMessage("Expected current entry to be closed but was opened"); } }
[TestArrayBlockBuilder->[testRetainedSizeInBytes->[assertTrue,beginBlockEntry,instanceSize,closeEntry,writeLong,getRetainedSizeInBytes,ArrayBlockBuilder],testConcurrentWriting->[build,closeEntry,ArrayBlockBuilder,beginBlockEntry,appendStructure],testIsFull->[assertTrue,beginBlockEntry,isFull,getPositionCount,createBlockBuilderStatus,closeEntry,writeLong,isEmpty,ArrayBlockBuilder,appendNull,assertEquals],testArrayBlockIsFull->[PageBuilderStatus,testIsFull]]]
This test method creates a new array block with the specified size and writes the contents of the.
If ` blockBuilder.appendStructure(new LongArrayBlockBuilder(null, 1)` does not throw, let's put it before `assertThatThrownBy`. Similarly, of `writeLong(123)` doesn't throw, let's keep it outside as well.
@@ -148,8 +148,8 @@ Server::Server( const std::string &path_world, const SubgameSpec &gamespec, bool simple_singleplayer_mode, - bool ipv6 - ): + bool ipv6, + bool dedicated) : m_path_world(path_world), m_gamespec(gamespec), m_simple_singleplayer_mode(simple_singleplayer_mode),
[No CFG could be retrieved]
Get pos from server Creates a new object manager and all of its child managers.
Hmmm, instead of continuously adding more boolean parameters, why not combine these into a set of "Server Startup" flags?
@@ -4,9 +4,12 @@ from mock import patch from pip._internal.vcs.subversion import Subversion +@patch('pip._internal.vcs.subversion.Subversion.get_remote_call_options') @patch('pip._internal.vcs.call_subprocess') @pytest.mark.network -def test_obtain_should_recognize_auth_info_url(call_subprocess_mock, script): +def test_obtain_should_recognize_auth_info_url( + call_subprocess_mock, get_remote_call_options_mock, script): + get_remote_call_options_mock.return_value = [] url = 'svn+http://username:password@svn.example.com/' svn = Subversion() svn.obtain(script.scratch_path / 'test', url=url)
[test_export_should_recognize_auth_info_url->[Subversion,export],test_obtain_should_recognize_auth_info_url->[Subversion,obtain],patch]
Test to obtain the auth info url from the remote repository and check it is correct.
If you instantiate `Subversion` with `use_interactive=False`, you can feed two birds with one scone and both (1) avoid having to mock `get_remote_call_options()` and (2) test that remote call options are actually used.
@@ -34,6 +34,12 @@ class VoucherType: pgettext_lazy("Voucher: discount for", "Specific categories of products"), ), (SHIPPING, pgettext_lazy("Voucher: discount for", "Shipping")), + ( + SPECIFIC_PRODUCT, + pgettext_lazy( + "Voucher: discount for", "Specific products, collections and categories" + ), + ), ]
[DiscountValueType->[pgettext_lazy],VoucherType->[pgettext_lazy]]
A list of product - category - collection - discounted items.
`collections and categories` -> `collections or categories` ?
@@ -1170,6 +1170,9 @@ GlobOpt::ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd opnd->SetSlotIndex(slotIndex); opnd->SetUsesAuxSlot(auxSlot); + opnd->GetObjTypeSpecInfo()->SetSlotIndex(slotIndex); + opnd->GetObjTypeSpecInfo()->SetUsesAuxSlot(auxSlot); + isSpecialized = true; if (isTypeCheckedOut) {
[No CFG could be retrieved]
Determines if a value is a non - equivalent set. Checks if a type object is found in the type set.
Does it make sense to have the methods on Opnd themselves set the respective fields on its objTypeSpecFldInfo?
@@ -52,7 +52,7 @@ func (r QueryResult) String() string { func withQueryResult(status int, resp []byte, err error) (int, *QueryResult, error) { if err != nil { - return status, nil, err + return status, nil, errors.Errorf("%v. Response: %s", err, resp) } result, err := readQueryResult(resp) return status, result, err
[CreateIndex->[apiCall],SearchURI->[apiCall],DeletePipeline->[apiCall],CountSearchURI->[apiCall],Index->[apiCall],CreatePipeline->[apiCall],Ingest->[apiCall],Delete->[apiCall],String->[Marshal,Warn],IndexExists->[apiCall],Refresh->[apiCall],apiCall->[Request],Unmarshal]
QueryResult is the base type for all of the query results. readCountResult - reads a CountResults from a byte array or returns nil if the object.
This looks like a good candidate for `errors.Wrapf` as opposed to `Errorf`.
@@ -197,7 +197,12 @@ class Stage(object): return os.path.isfile(path) and Stage.is_valid_filename(path) def changed_md5(self): - return self.md5 != self._compute_md5() + for h in self.hash: + checksum = getattr(self, h) + if checksum: + return checksum != self._compute_checksum(h) + + return self._compute_checksum(self.hash[0]) is not None @property def is_callback(self):
[Stage->[_run->[_warn_if_fish,_check_missing_deps,StageCmdFailedError],_check_stage_path->[StagePathOutsideError,StagePathNotFoundError,StagePathNotDirectoryError],_changed_md5->[changed_md5],relpath->[relpath],is_stage_file->[is_valid_filename],remove->[remove_outs],_check_file_exists->[StageFileDoesNotExistError],check_missing_outputs->[MissingDataSource],check_can_commit->[save,changed_md5,StageCommitError,_changed_entries],dump->[relpath,dumpd,_check_dvc_filename],run->[save,commit,_run,remove_outs],status->[changed_md5,_status],is_cached->[_changed_outs],load->[validate,relpath,Stage,_check_file_exists,_check_dvc_filename,_check_isfile,_get_path_tag],_compute_md5->[dumpd],save->[save,_compute_md5],_check_missing_deps->[MissingDep],_check_dvc_filename->[relpath,is_valid_filename,StageFileBadNameError],checkout->[checkout],_check_isfile->[StageFileIsNotDvcFileError],_already_cached->[changed_md5,changed],commit->[commit],create->[unprotect_outs,_stage_fname,_check_stage_path,StageFileBadNameError,Stage,StageFileAlreadyExistsError,remove_outs],changed->[_changed_deps,_changed_outs,_changed_md5],reproduce->[changed],validate->[StageFileFormatError]]]
True if the MD5 of the object has changed.
There is no need to change hash when computing checksum for the stage file itself. md5 is fine for that application.
@@ -477,6 +477,13 @@ static int ValidateRegistryPromiser(char *key, const Promise *pp) strlcpy(root_key, key, CF_MAXVARSIZE ); sp = strchr(root_key, '\\'); + if (sp == NULL) + { + Log(LOG_LEVEL_ERR, "Cannot locate '\\' in '%s'", root_key); + Log(LOG_LEVEL_ERR, "Failed validating registry promiser"); + PromiseRef(LOG_LEVEL_ERR, pp); + return false; + } *sp = '\0'; for (i = 0; valid[i] != NULL; i++)
[int->[QueryTableColumns,CfFetchColumn,ValidateSQLTableName,strlcpy,PromiseRef,CountChar,xmalloc,CreateTableColumns,RlistDestroy,CheckRegistrySanity,CreateDBQuery,cfPS,xstrdup,strcat,RlistLen,NewSQLColumns,strchr,RlistScalarValue,PromiseResultUpdate,RlistFromSplitString,strncpy,ToLowerStrInplace,CheckSQLDataType,CfDeleteQuery,DeleteSQLColumns,strcmp,GetSQLTables,Log,ValidateRegistryPromiser,free,TableExists,CfFetchRow,IntFromString,memset,CfVoidQueryDB,CfNewQueryDB,snprintf],Rlist->[CfFetchColumn,ListTables,CfDeleteQuery,CfFetchRow,CfNewQueryDB,Log,RlistPrepend],PromiseResult->[cfPS,sscanf,strlcpy,PromiseRef,strlen,YieldCurrentLock,AcquireLock,strcmp,strcpy,CfConnectDB,snprintf,strchr,Log,PromiseResultUpdate,CfCloseDB,VerifyTablePromise,VerifyDatabasePromise],VerifyDatabasePromises->[CheckDatabaseSanity,VerifyRegistryPromise,ProgrammingError,strcmp,PromiseBanner,GetDatabaseAttributes,VerifySQLPromise],void->[free,snprintf]]
Validate a registry prefix.
Might as well fix the return type of this `static` function. :wink:
@@ -51,12 +51,12 @@ class ExportPublishedSubmissionsListGridCellProvider extends DataObjectGridCellP if (empty($title)) $title = __('common.untitled'); $authorsInTitle = $publishedSubmission->getShortAuthorString(); $title = $authorsInTitle . '; ' . $title; - import('lib.pkp.controllers.grid.submissions.SubmissionsListGridCellProvider'); + import('lib.pkp.classes.core.ServicesContainer'); return array( new LinkAction( 'itemWorkflow', new RedirectAction( - SubmissionsListGridCellProvider::getUrlByUserRoles($request, $publishedSubmission) + ServicesContainer::instance()->getWorklowUrlByUserRoles($publishedSubmission) ), $title )
[ExportPublishedSubmissionsListGridCellProvider->[getCellActions->[getStatusNames,getById,getIssueIdentification,getDispatcher,getData,getDepositStatusSettingName,getShortAuthorString,url,getStatusActions,getLocalizedTitle,getId,getIssueId,getContextId],getTemplateVarsFromRowColumn->[getStatusNames,getData,getStatusActions,getDepositStatusSettingName,getId]]]
Get the cell actions for a given row and column This method is used to get the list of actions that should be performed on the issue cell.
which service does getWorklowUrlByUserRoles come from? I can't find it.
@@ -754,13 +754,13 @@ void GetBinaryLocation(char *path, const uint32_t size) return; } - if ((uint32_t)str_len > size - 1) + if ((unsigned)str_len > size - 1) { str_len = (int) size - 1; } path[str_len] = char(0); #elif defined(__APPLE__) - uint32_t path_size = size; + uint32_t path_size = (uint32_t)size; char *tmp = nullptr; int str_len; if (_NSGetExecutablePath(path, &path_size))
[No CFG could be retrieved]
GetBinaryLocation - Get binary location of a module. This function is called from the WScriptJsrt. It checks if the binary is available.
Ew. But okay. Is this really the best option?
@@ -188,6 +188,7 @@ RSpec.describe "StoriesShow", type: :request do it "handles invalid slug characters" do allow(Article).to receive(:find_by).and_raise(ArgumentError) + allow(Article).to receive(:where).and_raise(ArgumentError) get article.path expect(response.status).to be(400)
[create,let,be,describe,slug,ago,join,it,rand,to,and_raise,canonical_url,escapeHTML,require,username,include,have_http_status,update,title,id,redirect_to,path,get,not_to,sign_in,update_column,and_return]
checks if the user has the correct hotness and go to middle username and go to old has high score if article w / intermed score and <code >.
We used to only do `find_by` but with the `where` clause introduced this keeps the spec working in the same way. Any feedback here to make sure this case is properly covered is appreciated :)
@@ -109,11 +109,10 @@ func (v *uniformVoteWeight) AmIMemberOfCommitee() bool { return false } identity, _ := pubKeyFunc() - everyone := v.DumpParticipants() + everyone := v.Participants() for _, key := range identity.PublicKey { - myVoterID := key.SerializeToHexStr() for i := range everyone { - if everyone[i] == myVoterID { + if everyone[i] == key { return true } }
[AmIMemberOfCommitee->[MyPublicKey,SerializeToHexStr,DumpParticipants],QuorumThreshold->[NewDec,TwoThirdsSignersCount],Award->[NewInt,Div,Address,Sub,Add,Mul],IsRewardThresholdAchieved->[SignersCount,ParticipantsCount],IsQuorumAchieved->[Str,Msg,ParticipantsCount,TwoThirdsSignersCount,String,Int64,SignersCount,Info,Logger],ResetPrepareAndCommitVotes->[reset],MarshalJSON->[Policy,Marshal,DumpParticipants,String],IsQuorumAchievedByMask->[Warn,Msgf,CountOneBits,TwoThirdsSignersCount,Logger,Debug],String->[Marshal],ResetViewChangeVotes->[reset]]
AmIMemberOfCommitee returns true if the voter is the one who is.
This is comparing pointers. We shouldn't compare pointers. it's error prone.
@@ -81,4 +81,15 @@ COLOR_RED_FORMAT = '\033[1;31;31m%s\033[0m' COLOR_GREEN_FORMAT = '\033[1;32;32m%s\033[0m' -COLOR_YELLOW_FORMAT = '\033[1;33;33m%s\033[0m' \ No newline at end of file +COLOR_YELLOW_FORMAT = '\033[1;33;33m%s\033[0m' + +ModuleName = { + 'TPE': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Random': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Anneal': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Evolution': 'nni.evolution_tuner.evolution_tuner', + 'SMAC': 'nni.smac_tuner.smac_tuner', + 'BatchTuner': 'nni.batch_tuner.batch_tuner', + 'GridSearch': 'nni.gridsearch_tuner.gridsearch_tuner', + 'NetworkMorphism': 'nni.networkmorphism_tuner.networkmorphism_tuner' +}
[join]
Color - green color for the list of metabolite components.
Seems this part has conflict with master now
@@ -561,7 +561,7 @@ function thirdPartyBootstrap(input, outputName, shouldMinify) { // version is not available on the absolute path. var integrationJs = argv.fortesting ? './f.js' - : `https://3p.ampproject.net/${internalRuntimeVersion}/f.js`; + : `https://${hostname3p}/${internalRuntimeVersion}/f.js`; // Convert default relative URL to absolute min URL. var html = fs.readFileSync(input, 'utf8') .replace(/\.\/integration\.js/g, integrationJs);
[No CFG could be retrieved]
Bootstraps a type check from the 3p and the integration - check - types. js Concatenates the given files into the given destination file.
Doesn't f.js (3p/integration) already use urls defined config.js?