patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -125,10 +125,10 @@ def bbox3(img): return np.array([[-1, -1, -1], [0, 0, 0]]) -def read_nii_header(data_path, series_name, suffix, separate_folder=True): - filename = os.path.join(data_path, series_name, series_name + suffix) +def read_nii_header(data_path, name, suffix='', separate_folder=True): + filename = os.path.join(data_path, name, name + suffix) if not separate_folder: - filename = os.path.join(data_path, series_name + suffix) + filename = os.path.join(data_path, name + suffix) if not os.path.exists(filename): raise AttributeError("File {} is not exist. Please, validate path to input".format(filename)) return nib.load(filename)
[read_image->[read_nii_header,resample_np,normalize,bbox3],main->[resample_np,find_series_name,read_image,parse_arguments],main]
Read NIIs header from NIIs file.
Given that `separate_folder` is always specified as `false`, perhaps it would be better to remove the `suffix` and `separate_folder` parameters entirely? The caller can append a suffix if it needs it.
@@ -50,7 +50,8 @@ import org.jboss.weld.util.reflection.Reflections; public class ResolvableBuilder { - private static final Class<?>[] FACADE_TYPES = new Class<?>[] { Event.class, Instance.class, WeldInstance.class, Provider.class, InterceptionFactory.class }; + private static final Class<?>[] FACADE_TYPES = new Class<?>[] { Event.class, Instance.class, WeldEvent.class, + WeldInstance.class, Provider.class, InterceptionFactory.class }; private static final Class<?>[] METADATA_TYPES = new Class<?>[] { Interceptor.class, Decorator.class, Bean.class }; private static final Set<QualifierInstance> ANY_SINGLETON = Collections.singleton(QualifierInstance.ANY);
[ResolvableBuilder->[addQualifier->[addQualifier],addQualifiers->[addQualifier,addQualifiers],ResolvableImpl->[toString->[getTypes,getQualifiers],equals->[getTypes,equals]]]]
Create a new instance of a bean manager. Create a ResolvableBuilder that will resolve to the given type.
Not really sure we need to change this?
@@ -114,11 +114,12 @@ public class StatementExecutorTest extends EasyMockSupport { Map<CommandId, CommandStatus> statusStore = statementExecutor.getStatuses(); Assert.assertNotNull(statusStore); + Assert.assertEquals(6, statusStore.size()); Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(topicCommandId).getStatus()); Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(csCommandId).getStatus()); -// Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(csasCommandId).getStatus()); -// Assert.assertEquals(CommandStatus.Status.ERROR, statusStore.get(ctasCommandId).getStatus()); -// Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(terminateCommandId).getStatus()); + Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(csasCommandId).getStatus()); + Assert.assertEquals(CommandStatus.Status.ERROR, statusStore.get(ctasCommandId).getStatus()); + Assert.assertEquals(CommandStatus.Status.SUCCESS, statusStore.get(terminateCommandId).getStatus()); }
[StatementExecutorTest->[handleIncorrectDDLStatement->[getStatementExecutor],handleCorrectDDLStatement->[getStatementExecutor],handleCSAS_CTASStatement->[getStatementExecutor],handlePriorStatement->[getStatementExecutor]]]
This method is used to handle the CSAS and CTA statements. This method handles the case where a prior command is executed.
you can create a `Map<CommandId, CommandStatus>` call it expected and then do: `assertThat(statusStore, equalTo(expected))`
@@ -162,7 +162,7 @@ SCHEMA = { "endpointurl": str, "access_key_id": str, "secret_access_key": str, - Optional("listobjects", default=False): Bool, + Optional("listobjects", default=False): Bool, # obsoleted Optional("use_ssl", default=True): Bool, "sse": str, "sse_kms_key_id": str,
[merge->[merge],_lower_keys->[_lower_keys],Config->[load->[validate],_load_paths->[resolve->[RelPath]],edit->[load,_save_paths,ConfigError,_save_config,load_one,load_config_to_level,validate],_save_config->[_get_tree],init->[Config],files->[get_dir],load_one->[_load_config],load_config_to_level->[load_one],validate->[ConfigError],_load_config->[_get_tree]],Choices,ByUrl]
XML - Schema of the network network. Return a list of cache entries that can be accessed by remote servers.
Obsoleted since we are using a high-level resource API now, which just does the switch automatically.
@@ -1,5 +1,7 @@ <?php +use Automattic\Jetpack\Sync\Modules\Comments; + /** * Testing CRUD on Comments */
[WP_Test_Jetpack_Sync_Comments->[test_modify_comment_content->[modify_comment_helper,markTestSkipped],test_update_comment->[do_sync,assertEquals,get_comment],test_modify_comment_multiple_attributes->[modify_comment_helper,markTestSkipped],test_post_trashed_comment_handling->[do_sync,assertEquals,comment_count],test_wp_unspam_comment->[do_sync,assertEquals,get_most_recent_event,comment_count],test_trash_comment_trashes_data->[do_sync,assertEquals,comment_count],test_sync_comment_jetpack_sync_prevent_sending_comment_data_filter->[assertTrue,reset,get_most_recent_event,assertFalse,get_comment,do_sync,assertEquals,comment_count],test_add_comment_syncs_event->[assertEqualsObject,assertNotEquals,assertEquals,get_most_recent_event],test_wp_spam_comment->[do_sync,assertEquals,comment_count],test_returns_comment_object_by_id->[get_most_recent_event,get_codec,get_object_by_id,decode,encode,assertEquals],setUp->[do_sync,create,create_post_comments],test_modify_comment_author->[modify_comment_helper,markTestSkipped],test_unapprove_comment_does_not_trigger_content_modified_event->[do_sync,assertFalse,get_most_recent_event],test_modify_comment_author_email->[modify_comment_helper,markTestSkipped],test_wp_trash_comment->[reset,get_most_recent_event,assertFalse,do_sync,assertEquals,comment_count],test_add_comment_syncs_comment_data->[get_comment,assertEqualsObject,assertEquals,comment_count],test_modify_comment_author_url->[modify_comment_helper,markTestSkipped],test_unapprove_comment->[assertTrue,reset,get_most_recent_event,assertFalse,get_comment,do_sync,assertEquals,comment_count],test_delete_comment_deletes_data->[do_sync,assertEquals,comment_count],test_post_untrashed_comment_handling->[do_sync,assertEquals,comment_count],modify_comment_helper->[assertTrue,reset,get_most_recent_event,assertFalse,do_sync,assertEquals],test_wp_untrash_comment->[do_sync,assertEquals,get_most_recent_event,comment_count]]]
CRUD on Comments This method checks if the comment content has not been modified.
we don't seem to be using this class in this file
@@ -28,6 +28,7 @@ namespace Microsoft.Extensions.Hosting /// the lifetime of the long running operation(s) being performed. /// </summary> /// <param name="stoppingToken">Triggered when <see cref="IHostedService.StopAsync(CancellationToken)"/> is called.</param> + /// See <related type="Article" href="/dotnet/core/extensions/workers">Worker Services in .NET</related> for implementation guidelines. /// <returns>A <see cref="Task"/> that represents the long running operations.</returns> protected abstract Task ExecuteAsync(CancellationToken stoppingToken);
[BackgroundService->[Task->[Token,CreateLinkedTokenSource,CompletedTask,IsCompleted,Cancel,ConfigureAwait,ExecuteAsync]]]
Execute the task.
This is incorrect, AFAIK. The `<related>` tag shouldn't have any surrounding text. If you want to have descriptive text like that, I think you need to use a `<see href="url">text</see>` within the summary or remarks.
@@ -159,7 +159,7 @@ class DoctrineListBuilder extends AbstractListBuilder // now select all data $this->queryBuilder = $this->em->createQueryBuilder() - ->from($this->entityName, $this->entityName); + ->from($this->entityName, str_replace(['\\', ':'], '_', $this->entityName)); $this->assignJoins($this->queryBuilder); // Add all select fields
[DoctrineListBuilder->[getNecessaryJoins->[getAllFields,getJoins],getEntityNamesOfFieldDescriptors->[getJoins],createQueryBuilder->[assignGroupBy],assignJoins->[getJoins],createSubQueryBuilder->[getAllFields],getJoins->[getJoins]]]
Execute the query for a single node.
this are illegal characters for the new doctrine version.
@@ -40,7 +40,7 @@ func (mc *MeshCatalog) repeater() { } func (mc *MeshCatalog) handleAnnouncement(ann announcements.Announcement) { - if ann.Type == announcements.EndpointDeleted { + if ann.Type == announcements.PodDeleted { log.Trace().Msgf("Handling announcement: %+v", ann) // TODO: implement (https://github.com/openservicemesh/osm/issues/1719) }
[getCases->[getAnnouncementChannels,ValueOf],repeater->[Error,broadcastToAllProxies,handleAnnouncement,Msgf,Now,getCases,Select,Interface,Trace,Since,Add],handleAnnouncement->[Trace,Msgf],broadcastToAllProxies->[Unlock,Msgf,GetCommonName,GetAnnouncementsChannel,Lock,Debug]]
handleAnnouncement handles an announcement from the remote endpoint.
What about PodAdded and PodUpdated ?
@@ -150,6 +150,7 @@ class Seq2seqAgent(Agent): self.truncate = opt['truncate'] if opt['truncate'] > 0 else None self.metrics = {'loss': 0.0, 'num_tokens': 0} self.history = {} + self.report_freq = opt['report_freq'] states = {} # check for cuda
[Seq2seqAgent->[add_cmdline_args->[dictionary_class],vectorize->[parse],load->[load],shutdown->[save],save->[save],act->[batch_act],predict->[update_params,zero_grad],batch_act->[predict,vectorize],zero_grad->[zero_grad]]]
Set up model if shared params not set otherwise no work to do. Load the model parameters and initialize the object with the next n - grams. add additional tensors to the model and embed the tokens in the model.
can you make this opt.get('report_freq', 0.001) just to make sure old loaded opts don't freak out?
@@ -355,14 +355,13 @@ public class HivePageSourceProvider for (HiveColumnHandle column : columns) { Optional<HiveType> baseTypeCoercionFrom = tableToPartitionMapping.getCoercion(column.getBaseHiveColumnIndex()); - if (column.getColumnType() == REGULAR) { if (column.isBaseColumn()) { baseColumnHiveIndices.add(column.getBaseHiveColumnIndex()); } checkArgument( - projectionsForColumn.computeIfAbsent(column.getBaseHiveColumnIndex(), HashSet::new).add(column.getHiveColumnProjectionInfo()), + projectionsForColumn.computeIfAbsent(column.getBaseHiveColumnIndex(), index -> new HashSet<>()).add(column.getHiveColumnProjectionInfo()), "duplicate column in columns list"); // Add regular mapping if projection is valid for partition schema, otherwise add an empty mapping
[HivePageSourceProvider->[createHivePageSource->[createPageSource],ColumnMapping->[empty->[ColumnMapping,empty],buildColumnMappings->[interim,regular,prefilled,empty],regular->[ColumnMapping],prefilled->[ColumnMapping],interim->[ColumnMapping]]]]
Build column mappings. Returns a list of column mappings that can be used to determine whether a column should be added.
Good catch! Please make it separate commit (this or separate PR)
@@ -81,6 +81,17 @@ const buildOpenOutlinkAttachmentElement = (element) => </div> </a>`; +/** + * @param {!Element} element + * @return {!Element} + */ +export const buildOpenAttachmentElementLinkIcon = (element) => + htmlFor(element)` + <svg class="i-amphtml-story-page-open-attachment-link-icon" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" xml:space="preserve"> + <path fill-opacity=".1" d="M12 0c6.6 0 12 5.4 12 12s-5.4 12-12 12S0 18.6 0 12 5.4 0 12 0z"/> + <path d="M13.8 14.6c.1.1.2.3.2.5s-.1.3-.2.5L12.3 17c-.7.7-1.7 1.1-2.7 1.1-1 0-1.9-.4-2.7-1.1-.7-.7-1.1-1.7-1.1-2.7 0-1 .4-1.9 1.1-2.7l1.5-1.5c.2 0 .3-.1.5-.1s.3.1.5.2c.1.1.2.3.2.5s-.1.4-.2.5l-1.5 1.5c-.5.5-.7 1.1-.7 1.7 0 .6.3 1.3.7 1.7.5.5 1.1.7 1.7.7s1.3-.3 1.7-.7l1.5-1.5c.3-.3.7-.3 1 0zM17 7c-.7-.7-1.7-1.1-2.7-1.1-1 0-1.9.4-2.7 1.1l-1.5 1.5c0 .1-.1.3-.1.4 0 .2.1.3.2.5.1.1.3.2.5.2s.3-.1.5-.2l1.5-1.5c.5-.5 1.1-.7 1.7-.7.6 0 1.3.3 1.7.7.5.5.7 1.1.7 1.7 0 .6-.3 1.3-.7 1.7l-1.5 1.5c-.1.1-.2.3-.2.5s.1.3.2.5c.1.1.3.2.5.2s.3-.1.5-.2l1.5-1.5c.7-.7 1.1-1.7 1.1-2.7-.1-1-.5-1.9-1.2-2.6zm-7.9 7.2c0 .2.1.3.2.5.1.1.3.2.5.2s.4-.1.5-.2l4.5-4.5c.1-.1.2-.3.2-.5s-.1-.4-.2-.5c-.3-.2-.8-.2-1 .1l-4.5 4.5c-.1.1-.2.3-.2.4z"/> + </svg>`; + /** * Determines which open attachment UI to render. * @param {!Window} win
[No CFG could be retrieved]
Determines which open attachment UI to render. Renders the page attachment UI if the user has clicked on the link.
xml:space might be unnecessary
@@ -508,6 +508,9 @@ class BuildManager(BuildManagerBase): self.modules = {} # type: Dict[str, MypyFile] self.missing_modules = set() # type: Set[str] self.fg_deps_meta = {} # type: Dict[str, FgDepMeta] + # Always convert the plugin to a ChainedPlugin so that it can be manipulated if needed + if not isinstance(plugin, ChainedPlugin): + plugin = ChainedPlugin(options, [plugin]) self.plugin = plugin if options.new_semantic_analyzer: # Set of namespaces (module or class) that are being populated during semantic
[_build->[BuildSourceSet,BuildResult],write_deps_cache->[deps_to_json,getmtime],BuildManager->[all_imported_modules_in_file->[correct_rel_imp,import_priority],report_file->[is_source],get_stat->[maybe_swap_for_shadow_path],getmtime->[getmtime]],generate_deps_for_cache->[invert_deps,load_fine_grained_deps],load_plugins->[plugin_error],State->[parse_file->[parse_file,compute_hash,wrap_context,check_blockers,maybe_swap_for_shadow_path],wrap_context->[check_blockers],type_check_second_pass->[wrap_context,type_checker],load_fine_grained_deps->[load_fine_grained_deps],__init__->[find_cache_meta,parse_file,validate_meta,use_fine_grained_cache],semantic_analysis->[wrap_context],compute_fine_grained_deps->[type_map],write_cache->[delete_cache,mark_interface_stale,write_cache],type_check_first_pass->[wrap_context],generate_unused_ignore_notes->[generate_unused_ignore_notes,verify_dependencies],semantic_analysis_pass1->[wrap_context],semantic_analysis_pass_three->[wrap_context],compute_dependencies->[all_imported_modules_in_file,check_blockers],finish_passes->[report_file,type_map,wrap_context,type_checker],type_map->[type_checker]],in_partial_package->[State],dump_graph->[dumps,NodeInfo],NodeInfo->[dumps->[dumps]],load_graph->[State,find_module_simple,use_fine_grained_cache],read_deps_cache->[getmtime],get_cache_names->[normpath,_cache_dir_prefix],delete_cache->[get_cache_names],find_cache_meta->[get_cache_names,_load_json_file,cache_meta_from_dict],validate_meta->[get_cache_names,normpath,use_fine_grained_cache,get_stat,getmtime],order_ascc->[order_ascc],write_cache->[json_dumps,cache_meta_from_dict,compute_hash,get_cache_names,get_stat,getmtime],dispatch->[write_plugins_snapshot,read_deps_cache,dispatch,write_deps_cache,use_fine_grained_cache,generate_deps_for_cache],strongly_connected_components->[dfs->[dfs],dfs]]
Initialize a new object with the given configuration. Initialize the object with a object.
Can't you do that at an earlier stage? IIRC there's code earlier that creates a ChainedPlugin if there's more than one - you could change that.
@@ -27,7 +27,7 @@ module.exports = class coinmate extends Exchange { }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/27811229-c1efb510-606c-11e7-9a36-84ba2ce412d8.jpg', - 'api': 'https://coinmate.io/api', + 'api': 'https://coinmate.docs.apiary.io', 'www': 'https://coinmate.io', 'fees': 'https://coinmate.io/fees', 'doc': [
[No CFG could be retrieved]
Provides a description of a CoinMate exchange that is used to retrieve a single radingPairs - > radialPairs.
I think this is a wrong key to edit the doc url )
@@ -349,7 +349,8 @@ namespace DynamoShapeManager throw new MissingMethodException("Method 'DynamoInstallDetective.Utilities.FindProductInstallations' not found"); } - var methodParams = new object[] { "Revit", "ASMAHL*.dll" }; + + var methodParams = new object[] { ProductsWithASM, "ASMAHL*.dll" }; return installationsMethod.Invoke(null, methodParams) as IEnumerable; } }
[Utilities->[GetGeometryFactoryPath->[MapLibGVersionEnumToFullVersion,GetGeometryFactoryPath2],PreloadAsmFromPath->[GetType,RemapOldLibGPathToNewVersionPath,Public,Exists,Format,GetMethod,Invoke,WriteLine,IsNullOrEmpty,Combine,Static,LoadFrom],RemapOldLibGPathToNewVersionPath->[Build,Format,MapLibGVersionEnumToFullVersion,Count,Empty,IsNullOrEmpty,Major,GetDirectoryName,Enum,Minor,GetFileName,Combine,Split],Version->[Build,Any,Exists,Format,getASMInstallsFunc,Count,Empty,IsNullOrEmpty,Item2,Major,FullName,Minor,GetFiles,Item3,Combine,Item1,Key],IEnumerable->[GetType,Public,Invoke,Exists,GetMethod,Combine,Static,LoadFrom],LibraryVersion->[Any,Exists,Format,None,Count,GetAsmInstallations,IsNullOrEmpty,Empty,FullName,GetFiles,Combine,Item1,Key],GetGeometryFactoryPath2->[Build,Exists,Format,Major,IsNullOrEmpty,Minor,Combine]]]
Finds all product installations in the specified assembly.
Could you use ASMFileMask from above to avoid string duplication?
@@ -85,6 +85,8 @@ func (s *statePersister) iteration(ctx context.Context) error { } func (s *statePersister) persist(ctx context.Context) error { + s.persistTotal.Inc() + // Only the replica at position zero should write the state. if s.state.Position() != 0 { return nil
[starting->[WaitReady],persist->[GetFullState,WithTimeout,Position,Log,SetFullState,Debug],iteration->[persist,Log,Error],RegisterFlagsWithPrefix->[DurationVar],New,NewTimerService]
persist is used to persist the state of the user in the state store.
If you track it before `s.state.Position() != 0` then it's value doesn't match with `s.persistFailed` because only the replica at position 0 can fail (think about computing the % of failures). I would track this metric after the check `s.state.Position() != 0`.
@@ -17,7 +17,10 @@ namespace NServiceBus public static Task Publish(IBehaviorContext context, object message, PublishOptions options) { - return Publish(context, message.GetType(), message, options); + var mapper = context.Builder.Build<IMessageMapper>(); + var messageType = mapper.GetMappedTypeFor(message.GetType()); + + return Publish(context, messageType, message, options); } static Task Publish(IBehaviorContext context, Type messageType, object message, PublishOptions options)
[MessageOperations->[Task->[GetType,Context,SendMessage,Invoke,OutgoingHeaders,ReplyMessage,MessageId,AddDeliveryConstraint,Builder,CreateInstance,Publish,Extensions,UserDefinedMessageId,ToString,DelayedDeliveryConstraint,cache]]]
Publish a message with the given options.
could we reuse the mapper instead of building it every time?
@@ -408,13 +408,15 @@ class Flows(ClientModule): class FlowRuns(ClientModule): - def create(self, flow_id, parameters, start_time: None) -> dict: + def create( + self, flow_id: str, parameters: dict, start_time: datetime.datetime = None + ) -> dict: """ Create a flow run Args: - flow_id (str): A unique flow identifier - - parameters (str): Paramaters set on a flow + - parameters (dict): Paramater dictionary to provide for the flow run - start_time (datetime, optional): An optional start time for the flow run Returns:
[Secret->[get->[get]],FlowRuns->[set_state->[_graphql],create->[_graphql],query->[_graphql]],Flows->[delete->[_graphql],create->[_graphql],query->[_graphql]],Client->[graphql->[_post],_request->[request_fn]],ClientModule->[_get->[_get],_graphql->[graphql],_post->[_post],_delete->[_delete]],RunFlow->[run_flow->[_graphql]],TaskRuns->[set_state->[_graphql],query->[_graphql]],Projects->[create->[_graphql]],States->[query_flow_run_version->[_graphql],query_task_run_id_and_version->[_graphql],set_flow_run_from_serialized_state->[_graphql],set_task_run_from_serialized_state->[_graphql]]]
Create a new flow run with the given parameters.
Note for future -- we need to do some json-able tests for parameters
@@ -38,11 +38,11 @@ class Registrable(FromParams): a subclass to load all other subclasses and the abstract class). """ - _registry: Dict[Type, Dict[str, Type]] = defaultdict(dict) + _registry: Dict[Type, Dict[str, Tuple[Type, str]]] = defaultdict(dict) default_implementation: str = None @classmethod - def register(cls: Type[T], name: str, exist_ok=False): + def register(cls: Type[T], name: str, constructor: str = None, exist_ok: bool = False): """ Register a class under a particular name.
[Registrable->[by_name->[split,debug,getattr,_registry,import_module,join,ConfigurationError],register->[add_subclass_to_registry->[info,ConfigurationError]],list_available->[list,ConfigurationError,_registry],defaultdict],getLogger,TypeVar]
Register a class under a particular name.
Now we can have markdown in docstrings, can you add a couple of examples of registering different constructors and their corresponding jsonnet config?
@@ -204,7 +204,6 @@ async function executeTransfer(transfer, opts) { case 'failed': transferStatus = enums.TransferStatuses.Failed eventAction = TRANSFER_FAILED - failureReason = 'Tx failed' break case 'timeout': transferStatus = enums.TransferStatuses.Failed
[No CFG could be retrieved]
Send transaction to transfer the tokens and record the transaction hash in the DB. create an event for a specific .
why removing this? is it because it is redundant with the eventAction and does not add much more info?
@@ -189,6 +189,10 @@ public class DeltaConfig implements Serializable { return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_CLEAN, false).toString()); } + public boolean doEnableRowWriting() { + return Boolean.valueOf(configsMap.getOrDefault(ENABLE_ROW_WRITING, false).toString()); + } + public Map<String, Object> getOtherConfigs() { if (configsMap == null) { return new HashMap<>();
[DeltaConfig->[Config->[Builder->[build->[Config]]]]]
Validate the configuration.
doEnableRowWriting -> doRowWriting or enableRowWriting
@@ -0,0 +1,15 @@ +from decimal import Decimal +from typing import TYPE_CHECKING, Union + +from babel.numbers import get_currency_precision + +if TYPE_CHECKING: + from prices import TaxedMoney, Money, TaxedMoneyRange + + +def quantize_price( + price: Union["TaxedMoney", "Money", Decimal, "TaxedMoneyRange"], currency: str +) -> Union["TaxedMoney", "Money", Decimal, "TaxedMoneyRange"]: + precision = get_currency_precision(currency) + number_places = Decimal(10) ** -precision + return price.quantize(number_places)
[No CFG could be retrieved]
No Summary Found.
What if I run Saleor in some specific currency for example: Dogecoin? Maby we should have some default precision?
@@ -76,4 +76,14 @@ public class RehashWithL1Test extends MultipleCacheManagersTest { assertEquals(0, cache(2).size()); } + private static class MyBaseControlledConsistentHashFactory extends BaseControlledConsistentHashFactory { + public MyBaseControlledConsistentHashFactory() { + super(1); + } + + @Override + protected List<Address> createOwnersCollection(List<Address> members, int numberOfOwners, int segmentIndex) { + return Collections.singletonList(members.get(members.size() - 1)); + } + } }
[RehashWithL1Test->[testPutWithRehashAndCacheClear->[call->[addClusterEnabledCacheManager,testThreadStarted,withMerge,getCache],fork,size,sleep,get,containsKey,remove,assertFalse,put,cache,assertEquals,Exception],createCacheManagers->[createClusteredCaches,getDefaultClusteredCacheConfig,enable]]]
Put with rehash and cache clear.
I would call this LastMemberIsOwnerCHF to describe the intent.
@@ -740,6 +740,11 @@ public class TerminalSession extends XTermWidget } }); } + + public void showZombieMessage() + { + writeln("[Process completed]"); + } /** * Write to terminal after a terminal has restarted (on the server). We
[TerminalSession->[unregisterHandlers->[unregisterHandlers],cleanupAfterTerminate->[unregisterHandlers],disconnect->[disconnect],terminate->[onResponseReceived->[getHandle],getHandle],onLoad->[onLoad,connect],connect->[onResponseReceived->[connect]],shellSupportsReload->[getShellType],setVisible->[connect,setVisible],receivedInput->[connect],sendUserInput->[onResponseReceived->[sendUserInput]],getHandle->[getHandle],getShellType->[getShellType],onSessionSerialization->[disconnect],writeRestartSequence->[getHandle],getInteractionMode->[getInteractionMode],fetchNextChunk->[execute->[onError->[writeError],onResponseReceived->[fetchNextChunk]],shellSupportsReload],onDetach->[unregisterHandlers,disconnect,onDetach]]]
Fetch next chunk from the console.
Would it be useful to show the exit code here? E.g. `[Process completed]` vs. `[Process exited with error code n]` when a non-zero exit code is encountered?
@@ -1039,12 +1039,16 @@ public abstract class AbstractRemoteFileOutboundGateway<F> extends AbstractReply if (!appending && !tempFile.renameTo(localFile)) { throw new MessagingException("Failed to rename local file"); } - if (this.options.contains(Option.PRESERVE_TIMESTAMP)) { + if (this.options.contains(Option.PRESERVE_TIMESTAMP) + || FileExistsMode.REPLACE_IF_MODIFIED.equals(fileExistsMode)) { localFile.setLastModified(getModified(fileInfo)); } } else if (FileExistsMode.REPLACE_IF_MODIFIED.equals(fileExistsMode)) { logger.debug("Local file '" + localFile + "' has the same modified timestamp, ignored"); + if (this.command.equals(Command.MGET)) { + localFile = null; + } } else if (!FileExistsMode.IGNORE.equals(fileExistsMode)) { throw new MessageHandlingException(message, "Local file " + localFile + " already exists");
[AbstractRemoteFileOutboundGateway->[setTemporaryFileSuffix->[setTemporaryFileSuffix],putLocalDirectory->[putLocalDirectory,doPut],listFilesInRemoteDir->[listFilesInRemoteDir],mGetWithoutRecursion->[buildRemotePath,ls,get],setRenameExpressionString->[setRenameExpression],filterMputFiles->[filterFiles],filterFiles->[filterFiles],doPut->[doPut],setRemoteFileSeparator->[setRemoteFileSeparator],put->[isChmodCapable],doMput->[doPut],mGetWithRecursion->[ls,get],toCommand]]
This method retrieves a file from the remote server. Delete the local file from remote server.
This DEBUG message should be wrapped with the `if (logger.isDebugEnabled()) {` OTOH with the new logic via `if (this.command.equals(Command.MGET)) `, I'd say that this message should go to the else side. WDYT? I can fix on merge. I guess since it is a logic change we can't backport it into `4.3.x`
@@ -106,18 +106,6 @@ namespace Dynamo.ViewModels } } - public DelegateCommand ValidateConnectionsCommand - { - get - { - if(_validateConnectionsCommand == null) - _validateConnectionsCommand = - new DelegateCommand(ValidateConnections, CanValidateConnections); - - return _validateConnectionsCommand; - } - } - public DelegateCommand ToggleIsVisibleCommand { get
[No CFG could be retrieved]
Replies the command that is used to set the state of the node and the command that is Replies the command that sets the model size for the given node.
API Change: `NodeCommands.ValidateConnectionsCommand` has been removed.
@@ -103,7 +103,6 @@ class Jetpack_Plan { 'ecommerce-bundle', 'ecommerce-bundle-monthly', 'ecommerce-bundle-2y', - 'vip', ), 'supports' => array(), ),
[No CFG could be retrieved]
This endpoint provides a list of all possible sites. Updates the site s from the site s sites response.
By moving it over, will that affect sites still on "professional" plans?
@@ -24,6 +24,8 @@ use Friendica\Util\Network; use Friendica\Util\Temporal; use dba; +use \InvalidArgumentException; + require_once 'include/dba.php'; require_once 'mod/proxy.php';
[Profile->[load->[set_template_engine,getCurrentTheme],openWebAuthInit->[get_hostname]]]
Returns a formatted string of the given node ID in the given profile. Displays a profile in the page sidebar.
Code Standards: Please remove leading `\` in `use` statements.
@@ -501,12 +501,13 @@ class Model(base_layer.Layer, version_utils.ModelVersionSelector): categorical crossentropy where shape = `[batch_size, d0, .. dN-1]`. y_pred = predicted values with shape = `[batch_size, d0, .. dN]`. It returns a weighted loss float tensor. If a custom `Loss` instance is - used and reduction is set to NONE, return value has the shape - [batch_size, d0, .. dN-1] ie. per-sample or per-timestep loss values; + used and reduction is set to `None`, return value has the shape + `[batch_size, d0, .. dN-1]` i.e. per-sample or per-timestep loss values; otherwise, it is a scalar. If the model has multiple outputs, you can use a different loss on each output by passing a dictionary or a list of losses. The loss value that will be minimized by the model will - then be the sum of all individual losses. + then be the sum of all individual losses, unless `loss_weights` is + specified. metrics: List of metrics to be evaluated by the model during training and testing. Each of this can be a string (name of a built-in function), function or a `tf.keras.metrics.Metric` instance. See
[concat->[concat],_collective_all_reduce_multi_worker->[_in_multi_worker_mode],write_scalar_summaries->[_is_scalar],_tpu_multi_host_concat->[concat],Model->[predict_generator->[predict],make_predict_function->[step_function->[run_step->[predict_step]],predict_function->[step_function]],make_train_function->[train_function->[step_function],step_function->[run_step->[train_step]]],to_json->[_updated_config],_set_inputs->[_set_save_spec],make_test_function->[test_function->[step_function],step_function->[run_step->[test_step]]],evaluate_generator->[evaluate],predict->[predict_function,make_predict_function],fit_generator->[fit],__new__->[is_functional_model_init_params],test_on_batch->[test_function,make_test_function,reset_metrics],__init__->[inject_functional_model_class,is_functional_model_init_params,__init__],train_on_batch->[train_function,reset_metrics,make_train_function],save_weights->[save],evaluate->[test_function,make_test_function],_get_compile_args->[_assert_compile_was_called],predict_on_batch->[predict_function,make_predict_function],to_yaml->[_updated_config],reset_states->[reset_states],_in_multi_worker_mode->[_in_multi_worker_mode],fit->[train_function,make_train_function]],_multi_worker_concat->[concat],inject_functional_model_class->[inject_functional_model_class]]
Configures the model for training a . Parameters for training and testing a . Creates a base layer for a object.
Add: "in that case it will be the weighted sum"?
@@ -702,3 +702,16 @@ func (pr *PullRequest) GetHeadBranchHTMLURL() string { } return pr.HeadRepo.HTMLURL() + "/src/branch/" + util.PathEscapeSegments(pr.HeadBranch) } + +// UpdateAllowEdits update if PR can be edited from maintainers +func (pr *PullRequest) UpdateAllowEdits(allow bool) error { + return pr.updateAllowEdits(db.GetEngine(db.DefaultContext), allow) +} + +func (pr *PullRequest) updateAllowEdits(e db.Engine, allow bool) error { + pr.AllowEditsFromMaintainers = allow + if _, err := e.ID(pr.ID).Cols("allow_edits_from_maintainers").Update(pr); err != nil { + return err + } + return nil +}
[GetWorkInProgressPrefix->[LoadIssue],LoadAttributes->[loadAttributes],SetMerged->[loadIssue],Update->[Update],updateCommitDivergence->[Update],GetBaseBranchHTMLURL->[LoadBaseRepo],GetDefaultSquashMessage->[LoadIssue,LoadBaseRepo],GetHeadBranchHTMLURL->[LoadHeadRepo],LoadHeadRepo->[loadHeadRepo],UpdateColsIfNotMerged->[Update],UpdateCols->[Update],GetDefaultMergeMessage->[LoadIssue,LoadBaseRepo],IsWorkInProgress->[LoadIssue],loadAttributes,LoadIssue,LoadAttributes]
GetHeadBranchHTMLURL returns the HTML URL of the HEAD branch of the pull request.
I suggest the new methods should be functions if it will operate database.
@@ -214,6 +214,12 @@ public class Format extends AbstractFormatService implements ApplicationListener metadata = getXmlFromUrl(context, lang, url, request); } Element metadataEl = Xml.loadString(metadata, false); + + if(mdPath != null) { + final List<Namespace> namespaces = context.getBean(SchemaManager.class).getSchema(schema).getNamespaces(); + metadataEl = Xml.selectElement(metadataEl, mdPath, namespaces); + metadataEl.detach(); + } Metadata metadataInfo = new Metadata().setData(metadata).setId(1).setUuid("uuid"); metadataInfo.getDataInfo().setType(MetadataType.METADATA).setRoot(metadataEl.getQualifiedName()).setSchemaId(schema);
[Format->[isDevMode->[isDevMode],FormatMetadata->[call->[FormatMetadata,loadMetadataAndCreateFormatterAndParams]],copyNewerFilesToDataDir->[visitFile->[visitFile]],clear->[clear],getPluginLocResources->[visitFile->[addTranslations,visitFile],getPluginLocResources],createServiceContext->[createServiceContext]]]
Executes a single node in XML format.
What happens if the schema does not exist. It would be better to provide a useful error message rather than a Nullpointer as you would get here.
@@ -158,7 +158,7 @@ func NewUnionInput(input ...*pps.Input) *pps.Input { // NewCronInput returns an input which will trigger based on a timed schedule. // It uses cron syntax to specify the schedule. The input will be exposed to -// jobs as `/pfs/<name>/time` which will contain a timestamp. +// jobs as `/pfs/<name>/<timestamp>`. func NewCronInput(name string, spec string) *pps.Input { return &pps.Input{ Cron: &pps.CronInput{
[RerunPipeline->[RerunPipeline],FlushJob->[FlushJob],GetLogs->[GetLogs],DeleteJob->[DeleteJob],InspectJob->[InspectJob],InspectPipeline->[InspectPipeline],RestartDatum->[RestartDatum],ListPipeline->[ListPipeline],GarbageCollect->[GarbageCollect],InspectJobOutputCommit->[InspectJob],StopJob->[StopJob],CreatePipeline->[CreatePipeline],CreatePipelineService->[CreatePipeline],StartPipeline->[StartPipeline],FlushJobAll->[FlushJob],StopPipeline->[StopPipeline],DeletePipeline->[DeletePipeline],InspectDatum->[InspectDatum],CreateJob->[CreateJob]]
NewPFSInputOpts creates a new PFS input with all options. NewPipelineInput creates a new PipelineInput object and runs a job in PPS.
Would you mind adding a mention of the format we're using for this timestamp in here so people know how to parse it. (We probably should have had that in the old version of the code as well, but alas.)
@@ -607,6 +607,9 @@ type KubeletConfig struct { // +optional // Default: 1m ImagePullProgressDeadline *metav1.Duration `json:"imagePullProgressDeadline,omitempty" protobuf:"bytes,12,opt,name=imagePullProgressDeadline"` + // FailSwapOn makes the Kubelet fail to start if swap is enabled on the node. (default true). + // +optional + FailSwapOn *bool `json:"failSwapOn,omitempty" protobuf:"varint,13,opt,name=failSwapOn"` } // KubeletConfigEviction contains kubelet eviction thresholds supporting either a resource.Quantity or a percentage based value.
[FromInt]
KubeletConfigEviction is a wrapper around the above methods to allow for config parameters that are NodeFSInDiskSpace in the nodefs filesystem.
Should we default it to `true` in the `defaults.go`?
@@ -117,7 +117,7 @@ namespace System.Text.Json.Serialization.Converters } } - ArrayPool<FoundPropertiesAsync>.Shared.Return(state.Current.CtorArgumentState!.FoundPropertiesAsync!, clearArray: true); + ArrayPool<FoundPropertyAsync>.Shared.Return(argumentState.FoundPropertiesAsync!, clearArray: true); } }
[ObjectWithParameterizedConstructorConverter->[BeginRead->[InitializeConstructorArgumentCaches],ReadConstructorArguments->[ReadAndCacheConstructorArgument],HandleConstructorArgumentWithContinuation->[ReadAndCacheConstructorArgument]]]
override this method to handle the case where the object is not found in the JSON. This method is called when a token is not found in the JSON stream. It is called EndRead method for reading a object.
I recommend nulling out `argumentState.FoundPropertiesAsync` after the `Return`. Otherwise a potential bug could write to another user's pooled array.
@@ -190,13 +190,14 @@ def test_apply_mne_inverse_raw(): def test_apply_mne_inverse_fixed_raw(): - """Test MNE with fixed-orientation inverse operator on Raw""" + """Test MNE with fixed-orientation inverse operator on Raw + """ start = 3 stop = 10 _, times = raw[0, start:stop] # create a fixed-orientation inverse operator - fwd = read_forward_solution(fname_fwd, force_fixed=True) + fwd = read_forward_solution(fname_fwd, force_fixed=False, surf_ori=True) inv_op = make_inverse_operator(raw.info, fwd, noise_cov, loose=None, depth=0.8)
[test_io_inverse_operator->[_compare],test_apply_inverse_operator->[_compare],_compare->[_compare]]
Test MNE with fixed - orientation inverse operator on Raw.
do you actually test the fixed orientation case then? how can make_inverse_operator know that you want a fixed orientation inverse operator?
@@ -8,8 +8,9 @@ from django.core.exceptions import ValidationError from graphene import relay from promise import Promise +from saleor.checkout.utils import get_app_shipping_id + from ...account.models import Address -from ...account.utils import requestor_is_staff_member_or_app from ...core.anonymize import obfuscate_address, obfuscate_email from ...core.exceptions import PermissionDenied from ...core.permissions import (
[OrderEvent->[resolve_discount->[get_order_discount_event],resolve_lines->[_resolve_lines->[OrderEventOrderLineObject,get_order_discount_event]]],OrderLine->[resolve_thumbnail->[_get_first_product_image->[_get_image_from_media],_resolve_thumbnail->[_get_first_variant_image,_get_image_from_media]]],Order->[resolve_user_email->[_resolve_user_email]]]
Get a single object from a database. Image - Image ImageError - ImageError - ImageError - ImageError - ImageError -.
Please use relative imports as we use them everywhere
@@ -352,6 +352,10 @@ type ScheduleConfig struct { MergeScheduleLimit uint64 `toml:"merge-schedule-limit,omitempty" json:"merge-schedule-limit"` // TolerantSizeRatio is the ratio of buffer size for balance scheduler. TolerantSizeRatio float64 `toml:"tolerant-size-ratio,omitempty" json:"tolerant-size-ratio"` + // LowSpaceRatio is the lowest available ratio of store which regraded as low space. + LowSpaceRatio float64 `toml:"low-space-ratio,omitempty" json:"low-space-ratio"` + // HighSpaceRatio is the highest available ratio of store which regraded as high space. + HighSpaceRatio float64 `toml:"high-space-ratio,omitempty" json:"high-space-ratio"` // Schedulers support for loding customized schedulers Schedulers SchedulerConfigs `toml:"schedulers,omitempty" json:"schedulers-v2"` // json v2 is for the sake of compatible upgrade }
[Parse->[Parse],adjust->[validate,Parse,adjust],Parse]
clone returns a copy of the schedule config.
Add some description about how does _low space_ or _high space_ affect balance algorithms.
@@ -408,12 +408,13 @@ class GroupsController < ApplicationController def request_membership params.require(:reason) - unless current_user.staff? - RateLimiter.new(current_user, "request_group_membership", 1, 1.day).performed! - end - group = find_group(:id) - group_name = group.name + + begin + GroupRequest.create!(group: group, user: current_user, reason: params[:reason]) + rescue ActiveRecord::RecordNotUnique => e + return render json: failed_json.merge(error: I18n.t("groups.errors.already_requested_membership")), status: 409 + end usernames = [current_user.username].concat( group.users.where('group_users.owner')
[GroupsController->[mentionable->[new],check_name->[new],messageable->[new],handle_membership_request->[new],update->[update],members->[new],users_from_params->[new]]]
request a single that can be found in group membership.
You should add a specific exception you want to catch here. Otherwise the error will occur on things like typos, nil exceptions, etc. Additionally, it should be `create!` if you want to raise an exception on validation error.
@@ -10,4 +10,8 @@ data-turbolinks="false" <% end %> +<% if action_name == "tour" %> <%= render template: "layouts/application" %> +<% else %> +<%= render template: "layouts/application" %> +<% end %>
[No CFG could be retrieved]
View for a single .
@entantoencuanto this isn't necessary anymore
@@ -451,6 +451,15 @@ func (rcc *rotateCertsCmd) rotateKubelet() error { } } } + + for _, host := range rcc.masterNodes { + log.Debugf("Restarting kubelet on node %s", host.Name) + out, err := rcc.sshCommandExecuter("sudo systemctl restart kubelet", rcc.masterFQDN, host.Name, "22", rcc.sshConfig) + if err != nil { + log.Printf("Command `sudo systemctl restart kubelet` output: %s\n", out) + return errors.Wrap(err, "failed to restart kubelet") + } + } return nil }
[updateKubeconfig->[Printf,sshCommandExecuter,Replace,Wrap,GenerateKubeConfig],deleteServiceAccounts->[DeleteServiceAccount,getKubeClient,ListServiceAccounts,Wrap,Debugf],writeArtifacts->[PrettyPrintArmTemplate,Wrapf,WriteTLSArtifacts,BuildAzureParametersFile,Wrap,InitializeTemplateGenerator,GenerateTemplateV2],rebootAllNodes->[ListVirtualMachineScaleSets,RestartVirtualMachine,RestartVirtualMachineScaleSets,Values,Wrap,ListVirtualMachines],rotateApiserver->[Printf,Wrap,sshCommandExecuter,Debugf],getKubeClient->[Wrap,GetKubernetesClient,GenerateKubeConfig,Duration],getClusterNodes->[Wrap,getKubeClient,Contains,ListNodes],rotateEtcd->[Printf,Itoa,Infoln,rebootAllNodes,sshCommandExecuter,Wrap,Debugf],setSSHConfig->[InsecureIgnoreHostKey],run->[deleteServiceAccounts,writeArtifacts,rotateApiserver,validateAuthArgs,setSSHConfig,rotateEtcd,Wrap,deleteAllPods,IsNotExist,Stat,getClient,getClusterNodes,Errorf,Debugf,LoadTranslations,LoadContainerServiceFromFile,Join,Infoln,rotateKubelet,EnsureResourceGroup,updateKubeconfig,WithTimeout,SetDefaultCerts,Background,getAuthArgs],rotateKubelet->[Printf,Wrap,sshCommandExecuter,Debugf],deleteAllPods->[DeletePod,ListAllPods,getKubeClient,Wrap,Debugf],StringVar,ParsePrivateKey,NewClientConn,ReadFile,Dial,Sprintf,getAuthArgs,StringVarP,Close,String,MarkDeprecated,NewClient,PublicKeys,Wrap,NewSession,Run,Flags]
rotateKubelet rotates the certificates on the master nodes to the master node.
It would be nice if this had retry logic
@@ -176,6 +176,13 @@ func HTTP(ctx *context.Context) { ctx.Handle(http.StatusInternalServerError, "GetUserByID", err) return } + } else { + _, err = models.GetTwoFactorByUID(authUser.ID) + + if err == nil { + ctx.HandleText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page") + return + } } if !isPublicPull {
[sendFile->[IsNotExist,Header,ModTime,Join,Size,Stat,Format,Sprintf,ServeFile,Set,WriteHeader],setHeaderNoCache->[Header,Set],setHeaderCacheForever->[Header,Unix,Sprintf,Now,Set],Handle,HandleText,ComposeHTTPSCloneURL,GetAccessTokenBySHA,Now,sendFile,Close,Fields,GetRepositoryByName,GetUserByID,HasPrefix,Set,IsErrRepoNotExist,RepoPath,PlainText,IsNotExist,Error,Stat,setHeaderCacheForever,FindStringSubmatch,Run,UserSignIn,MustCompile,FormValue,HasSuffix,BasicAuthDecode,Join,Output,CheckUnitUser,IsErrUserNotExist,Repeat,ToLower,Get,GetUserByName,setHeaderNoCache,Query,Command,Header,IsErrAccessTokenNotExist,HasAccess,Expand,Write,TrimSuffix,NewReader,Sprintf,IsErrAccessTokenEmpty,FormatInt,handler,Environ,Replace,Params,WriteHeader,UpdateAccessToken]
Get an access token from the database. Check if user has access to the given unit type.
You need to also check that `if !models.IsErrTwoFactorNotEnrolled(err)` and do `ctx.Handle(http.StatusInternalServerError, "IsErrTwoFactorNotEnrolled", err) + return` otherwise if there is some kind of unexpected error you will let user in and it will bypass 2fa
@@ -54,7 +54,7 @@ namespace DotNetNuke.Services.FileSystem public string Extension { get; set; } [XmlElement("size")] - public int Size { get; set; } + public long Size { get; set; } [XmlElement("height")] public int Height { get; set; }
[No CFG could be retrieved]
region properties.
This creates backward compatibility issue. Must deprecate and create another one.
@@ -222,11 +222,11 @@ class ArgSplitterTest(unittest.TestCase): def test_passthru_args(self) -> None: self.assert_valid_split( - "./pants test foo/bar -- -t arg", + "./pants test foo/bar -- -t 'this is the arg'", expected_goals=["test"], expected_scope_to_flags={"": [], "test": []}, expected_specs=["foo/bar"], - expected_passthru=["-t", "arg"], + expected_passthru=["-t", "this is the arg"], ) self.assert_valid_split( "./pants -farg --fff=arg compile --gg-gg=arg-arg -g test.junit --iii "
[ArgSplitterTest->[test_basic_arg_splitting->[assert_valid_split],test_distinguish_goals_from_specs->[assert_valid_split,assert_unknown_goal],test_version_request_detection->[assert_version_request],test_descoping_qualified_flags->[assert_valid_split],test_unknown_goal_detection->[assert_unknown_goal],test_passthru_args->[assert_valid_split],test_subsystem_flags->[assert_valid_split],test_hidden_scope_is_unknown_goal->[assert_unknown_goal]]]
Checks that the specified arguments are valid passthru arguments.
FWIW: There were no issues with the `ArgSplitter` class, but this change to the test helped to confirm that.
@@ -73,6 +73,14 @@ func CreateConfigToOCISpec(config *CreateConfig) (*spec.Spec, error) { //nolint g.AddAnnotation(key, val) } g.SetRootReadonly(config.ReadOnlyRootfs) + if config.Hostname == "" { + if config.NetMode.IsHost() { + config.Hostname, err = os.Hostname() + if err != nil { + return nil, errors.Wrap(err, "unable to retrieve hostname") + } + } + } g.SetHostname(config.Hostname) if config.Hostname != "" { g.AddProcessEnv("HOSTNAME", config.Hostname)
[GetVolumesFrom,IsRootless,SetLinuxResourcesCPURealtimePeriod,SetLinuxResourcesMemoryReservation,IsContainer,AddLinuxSysctl,AddLinuxGIDMapping,SetLinuxResourcesPidsLimit,SetLinuxResourcesCPUCpus,Wrap,GetVolumeMounts,SetLinuxRootPropagation,SetLinuxResourcesMemorySwap,SetLinuxResourcesCPUPeriod,SetProcessApparmorProfile,TweakCapabilities,ParseUlimit,IsHost,SetLinuxResourcesCPUShares,ParseTmpfsOptions,New,AddLinuxUIDMapping,Errorf,CreateBlockIO,SetLinuxResourcesMemorySwappiness,AddAnnotation,SetupPrivileged,AddProcessEnv,RemoveMount,SplitN,SetLinuxResourcesCPUQuota,Debug,Wrapf,SetProcessNoNewPrivileges,SetLinuxResourcesCPUMems,IsNone,AddProcessRlimits,SetLinuxResourcesMemoryKernel,SetLinuxResourcesMemoryDisableOOMKiller,AddPrivilegedDevices,ToUpper,SetProcessArgs,AddMount,SetProcessTerminal,SetProcessSelinuxLabel,SetProcessOOMScoreAdj,Split,SetHostname,IsBridge,SetRootReadonly,AddOrReplaceLinuxNamespace,RemoveLinuxNamespace,IsUserDefined,SetProcessCwd,AddLinuxReadonlyPaths,AddLinuxMaskedPaths,SetLinuxResourcesCPURealtimeRuntime,SetLinuxResourcesMemoryLimit,SetLinuxMountLabel]
Initialize a nagios mount. This function is intend to set the linux resources.
I think this is OK as is, but thought I'd ask just in case. If by chance config.Hostname is still equal to "" at this point, do we want to continue on as we do now or raise an "unable to determine hostname" type of error?
@@ -452,10 +452,10 @@ class WPSEO_Upgrade { * @return void */ private function upgrade_63() { - $option_titles = WPSEO_Options::get_option( 'wpseo_titles' ); - unset( $option_titles['noindex-subpages-wpseo'], $option_titles['usemetakeywords'] ); + $this->remove_key_from_option( 'wpseo_titles', array( 'noindex-subpages-wpseo', 'usemetakeywords' ) ); // Remove all the meta keyword template options we've stored. + $option_titles = WPSEO_Options::get_option( 'wpseo_titles' ); foreach ( array_keys( $option_titles ) as $key ) { if ( strpos( $key, 'metakey' ) === 0 ) { unset( $option_titles[ $key ] );
[WPSEO_Upgrade->[upgrade_47->[query,prepare],upgrade_55->[add],upgrade_56->[query,get_table_name],upgrade_49->[get_blog_prefix,prepare,get_results],remove_about_notice->[get_id],upgrade_20->[move_pinterest_option],upgrade_36->[query],upgrade_50->[install,query,manage_notification],upgrade_50_51->[query,get_table_name],upgrade_61->[query,get_table_name],upgrade_40->[get_notification_by_id,remove_notification],__construct->[upgrade_63,upgrade_44,upgrade_15,finish_up,upgrade_22,upgrade_20,upgrade_55,upgrade_56,upgrade_50,upgrade_23,upgrade_47,upgrade_33,upgrade_49,upgrade_36,upgrade_50_51,upgrade_21,upgrade_61,upgrade_30,upgrade_40,set_upgrade_notice]]]
To upgrade to 6. 6.
I think this should be `upgrade_64`
@@ -623,7 +623,8 @@ def clear_pending(): @manager.option( "-p", "--path", dest="path", help="Absolute file path to a Lemur query csv." ) -@manager.option("-r", "--reason", dest="reason", help="Reason to revoke certificate.") +@manager.option("-r", "--reason", dest="reason", default="unspecified", help="CRL Reason as per RFC 5280 section 5.3.1") +@manager.option("-m", "--message", dest="message", help="Message explaining reason for revocation") @manager.option( "-c", "--commit",
[request_rotation_region->[request_rotation],request_reissue->[print_certificate_details],rotate_region->[validate_endpoint,request_rotation,request_rotation_region,validate_certificate],rotate->[validate_endpoint,request_rotation,validate_certificate],reissue->[request_reissue,validate_certificate]]
This worker processes the certificate revocation command. This function is called by the worker thread and checks if the is revoked.
Reused `--reason` for CRL Reason as it matches the RFC guidance
@@ -772,6 +772,7 @@ func TestContainerRuntime(t *testing.T) { mockCS := getMockBaseContainerService("1.10.13") properties := mockCS.Properties properties.OrchestratorProfile.OrchestratorType = Kubernetes + properties.OrchestratorProfile.KubernetesConfig.MobyVersion = "3.0.4" mockCS.setOrchestratorDefaults(true) if properties.OrchestratorProfile.KubernetesConfig.ContainerRuntime != Docker { t.Fatalf("ContainerRuntime did not have the expected value, got %s, expected %s",
[PutUint32,DeepEqual,IsAvailabilitySets,Activate,NewStringResponse,Diff,HasMultipleNodes,IsManagedDisks,setOrchestratorDefaults,SetCustomCloudProfileEnvironment,BoolPtr,Error,RegisterResponder,New,Errorf,HasAvailabilityZones,Logf,IsVirtualMachineScaleSets,Bool,setAgentProfileDefaults,To4,setKubeletConfig,Fatalf,DecodeString,ParseIP,Uint32,setWindowsProfileDefaults,SetPropertiesDefaults,SetAzureStackCloudSpec,Sprintf,SetDefaultCerts,DeactivateAndReset,Fatal,setMasterProfileDefaults,EqualError]
TestContainerRuntime tests that the mock container runtime has the correct values. This method is used to set the default values for the orchestrator profile.
Can we do a for loop with at least two < 3.0.5 versions as input? Just to make sure that we don't accidentally regress the test if the static string actually matches the value of `DefaultMobyVersion`
@@ -23,13 +23,15 @@ class Feature { $x = Config::get('feature_lock', $feature, false); - if ($x === false) { + if (is_null($x)) { $x = PConfig::get($uid, 'feature', $feature, false); } - if ($x === false) { + + if (is_null($x)) { $x = Config::get('feature', $feature, false); } - if ($x === false) { + + if (is_null($x)) { $x = self::getDefault($feature); }
[No CFG could be retrieved]
Check if a feature is enabled.
This is wrong because we defined a default value in the `Config::get()` call, so the return value will never be `null` and this condition will never be true.
@@ -24,3 +24,12 @@ class ConfigurationError(Exception): def log_pytorch_version_info(): import torch logger.info("Pytorch version: %s", torch.__version__) + + +def check_dimensions_match(dimension_1: int, + dimension_2: int, + dim_1_name: str, + dim_2_name: str) -> None: + if dimension_1 != dimension_2: + raise ConfigurationError(f"{dim_1_name} must match {dim_2_name}, but got {dimension_1} " + f"and {dimension_2} instead")
[log_pytorch_version_info->[info],ConfigurationError->[__str__->[repr],__init__->[super]],getLogger]
Log PyTorch version info.
do you want to add an optional `message` field, so that we can include longer explanations when appropriate? (I see that this change currently eliminates some of them)
@@ -0,0 +1,9 @@ +'use strict' +module.exports = { + up: queryInterface => { + return queryInterface.addIndex('discovery_access_token', { + fields: ['nonce', 'eth_address'] + }) + }, + down: () => {} +}
[No CFG could be retrieved]
No Summary Found.
I think we also need another index on auth_token column.
@@ -46,7 +46,7 @@ class BidirectionalEndpointSpanExtractor(SpanExtractor): The method used to combine the ``forward_start_embeddings`` and ``forward_end_embeddings`` for the forward direction of the bidirectional representation. See above for a full description. - backward_combination : str, optional (default = "y-x"). + backward_combination : str, optional (default = "x-y"). The method used to combine the ``backward_start_embeddings`` and ``backward_end_embeddings`` for the backward direction of the bidirectional representation. See above for a full description.
[BidirectionalEndpointSpanExtractor->[get_output_dim->[get_output_dim],from_params->[BidirectionalEndpointSpanExtractor]]]
This method is used to determine the similarity function of a sequence element - wise. This is a helper function to tell us that the exclusive start and end indices are always within.
@DeNeutoy, just a note to look at this, if you haven't already.
@@ -1694,12 +1694,12 @@ function fix_private_photos($s, $uid, $item = null, $cid = 0) { // Only embed locally hosted photos $replace = false; $i = basename($image); - $i = str_replace(array('.jpg','.png','.gif'),array('','',''),$i); + $i = str_replace(array('.jpg','.png','.gif'),array('', '',''), $i); $x = strpos($i,'-'); if ($x) { - $res = substr($i,$x+1); - $i = substr($i,0,$x); + $res = substr($i, $x+1); + $i = substr($i,0, $x); $r = q("SELECT * FROM `photo` WHERE `resource-id` = '%s' AND `scale` = %d AND `uid` = %d", dbesc($i), intval($res),
[new_follower->[get_item_tags],item_add_language_opt->[detect],fix_private_photos->[scaleImage,is_valid,getType,imageString],item_is_remote_self->[get_hostname],subscribe_to_hub->[get_curl_code],item_store->[format,get_hostname]]
fix_private_photos - fix private photos This function is used to fix the private photo link in the response. fix_private_photos fix base64.
Standards: Please add a space after commas.
@@ -53,7 +53,7 @@ func (s *Server) Start(ctx context.Context, cancel context.CancelFunc, port int, } xds_discovery.RegisterAggregatedDiscoveryServiceServer(grpcServer, s) - go utils.GrpcServe(ctx, grpcServer, lis, cancel, ServerType) + go utils.GrpcServe(ctx, grpcServer, lis, cancel, ServerType, nil) s.ready = true return nil
[Start->[Msg,Error,NewGrpc,RegisterAggregatedDiscoveryServiceServer,GetIssuingCA,GrpcServe,GetCertificateChain,Err,GetPrivateKey]]
Start starts the ADS server.
You don't want to be passing a nil channel, writing to it will block.
@@ -118,8 +118,6 @@ define([ */ this.computedTransform = computedTransform; - this._transformDirty = true; - this._boundingVolume = this.createBoundingVolume(header.boundingVolume, computedTransform); this._boundingVolume2D = undefined;
[No CFG could be retrieved]
Creates a new object. The base class for the object.
Removed this since it was only used in point clouds and it was easy to get around.
@@ -217,7 +217,7 @@ void AddSC_redridge_mountains(); void AddSC_silverpine_forest(); void AddSC_stormwind_city(); void AddSC_stranglethorn_vale(); -void AddSC_swamp_of_sorrows(); +//void AddSC_swamp_of_sorrows(); void AddSC_tirisfal_glades(); void AddSC_undercity(); void AddSC_western_plaguelands();
[AddOutdoorPvPScripts->[AddSC_outdoorpvp_ep,AddSC_outdoorpvp_tf,AddSC_outdoorpvp_zm,AddSC_outdoorpvp_hp,AddSC_outdoorpvp_gh,AddSC_outdoorpvp_na,AddSC_outdoorpvp_si],AddCommandScripts->[AddSC_cast_commandscript,AddSC_lookup_commandscript,AddSC_lfg_commandscript,AddSC_mmaps_commandscript,AddSC_npc_commandscript,AddSC_instance_commandscript,AddSC_reset_commandscript,AddSC_event_commandscript,AddSC_ban_commandscript,AddSC_arena_commandscript,AddSC_learn_commandscript,AddSC_titles_commandscript,AddSC_achievement_commandscript,AddSC_ticket_commandscript,AddSC_message_commandscript,AddSC_tele_commandscript,AddSC_guild_commandscript,AddSC_list_commandscript,AddSC_player_commandscript,AddSC_gm_commandscript,AddSC_honor_commandscript,AddSC_character_commandscript,AddSC_misc_commandscript,AddSC_disable_commandscript,AddSC_modify_commandscript,AddSC_cheat_commandscript,AddSC_wp_commandscript,AddSC_account_commandscript,AddSC_spectator_commandscript,AddSC_server_commandscript,AddSC_quest_commandscript,AddSC_deserter_commandscript,AddSC_debug_commandscript,AddSC_gobject_commandscript,AddSC_bf_commandscript,AddSC_go_commandscript,AddSC_reload_commandscript],AddSpellScripts->[AddSC_paladin_spell_scripts,AddSC_warlock_spell_scripts,AddSC_priest_spell_scripts,AddSC_quest_spell_scripts,AddSC_item_spell_scripts,AddSC_deathknight_spell_scripts,AddSC_hunter_spell_scripts,AddSC_rogue_spell_scripts,AddSC_warrior_spell_scripts,AddSC_generic_spell_scripts,AddSC_shaman_spell_scripts,AddSC_druid_spell_scripts,AddSC_mage_spell_scripts],AddEventScripts->[AddSC_event_pilgrims_end_scripts,AddSC_event_winter_veil_scripts,AddSC_event_brewfest_scripts,AddSC_event_childrens_week,AddSC_event_midsummer_scripts,AddSC_event_love_in_the_air,AddSC_event_hallows_end_scripts],AddPetScripts->[AddSC_hunter_pet_scripts,AddSC_mage_pet_scripts,AddSC_deathknight_pet_scripts,AddSC_generic_pet_scripts,AddSC_priest_pet_scripts,AddSC_shaman_pet_scripts],AddOutlandScripts->[AddSC_terokkar_forest,AddSC_boss_warchief_kargath_bladefist,AddSC_boss_hydross_the_unstable,AddSC_boss_zereketh_the_unbound,AddSC_boss_gruul,AddSC_instance_serpentshrine_cavern,AddSC_instance_the_slave_pens,AddSC_boss_shade_of_akama,AddSC_boss_exarch_maladaar,AddSC_boss_pandemonius,AddSC_boss_mother_shahraz,AddSC_instance_shadow_labyrinth,AddSC_boss_mechano_lord_capacitus,AddSC_boss_kaelthas,AddSC_boss_void_reaver,AddSC_boss_laj,AddSC_boss_alar,AddSC_boss_lady_vashj,AddSC_netherstorm,AddSC_boss_dalliah_the_doomsayer,AddSC_boss_warp_splinter,AddSC_boss_supremus,AddSC_boss_harbinger_skyriss,AddSC_boss_watchkeeper_gargolmar,AddSC_instance_gruuls_lair,AddSC_hellfire_peninsula,AddSC_boss_mekgineer_steamrigger,AddSC_boss_gatewatcher_iron_hand,AddSC_instance_magtheridons_lair,AddSC_blades_edge_mountains,AddSC_boss_nexusprince_shaffar,AddSC_instance_the_underbog,AddSC_instance_mana_tombs,AddSC_boss_gatewatcher_gyrokill,AddSC_boss_vazruden_the_herald,AddSC_zangarmarsh,AddSC_instance_the_botanica,AddSC_boss_ahune,AddSC_boss_hydromancer_thespia,AddSC_boss_broggok,AddSC_boss_omor_the_unscarred,AddSC_instance_arcatraz,AddSC_boss_najentus,AddSC_boss_grandmaster_vorpil,AddSC_boss_illidan,AddSC_instance_mechanar,AddSC_boss_the_lurker_below,AddSC_boss_doomwalker,AddSC_instance_the_eye,AddSC_instance_blood_furnace,AddSC_boss_talon_king_ikiss,AddSC_boss_nethermancer_sepethrea,AddSC_boss_grand_warlock_nethekurse,AddSC_instance_steam_vault,AddSC_boss_reliquary_of_souls,AddSC_boss_shirrak_the_dead_watcher,AddSC_boss_illidari_council,AddSC_boss_warbringer_omrogg,AddSC_boss_kelidan_the_breaker,AddSC_instance_hellfire_ramparts,AddSC_arcatraz,AddSC_boss_warlord_kalithresh,AddSC_boss_high_astromancer_solarian,AddSC_boss_magtheridon,AddSC_instance_sethekk_halls,AddSC_boss_high_king_maulgar,AddSC_boss_fathomlord_karathress,AddSC_boss_pathaleon_the_calculator,AddSC_boss_teron_gorefiend,AddSC_instance_black_temple,AddSC_shadowmoon_valley,AddSC_boss_doomlordkazzak,AddSC_boss_leotheras_the_blind,AddSC_boss_ambassador_hellmaw,AddSC_boss_the_black_stalker,AddSC_instance_auchenai_crypts,AddSC_boss_the_maker,AddSC_boss_wrath_scryer_soccothrates,AddSC_nagrand,AddSC_boss_murmur,AddSC_boss_blackheart_the_inciter,AddSC_boss_morogrim_tidewalker,AddSC_boss_commander_sarannis,AddSC_shattrath_city,AddSC_boss_thorngrin_the_tender,AddSC_boss_high_botanist_freywinn,AddSC_instance_shattered_halls,AddSC_boss_gurtogg_bloodboil],AddEasternKingdomsScripts->[AddSC_boss_anubshiah,AddSC_instance_molten_core,AddSC_boss_shadowvosh,AddSC_instance_scarlet_monastery,AddSC_boss_halycon,AddSC_boss_moira_bronzebeard,AddSC_boss_shade_of_aran,AddSC_boss_gizrul_the_slavener,AddSC_boss_kiljaeden,AddSC_boss_sulfuron,AddSC_boss_akilzon,AddSC_boss_kalecgos,AddSC_boss_gehennas,AddSC_boss_chromaggus,AddSC_boss_renataki,AddSC_boss_terestian_illhoof,AddSC_instance_sunwell_plateau,AddSC_boss_felmyst,AddSC_boss_jeklik,AddSC_instance_deadmines,AddSC_isle_of_queldanas,AddSC_undercity,AddSC_instance_zulaman,AddSC_boss_malchezaar,AddSC_boss_ambassador_flamelash,AddSC_boss_gyth,AddSC_instance_scholomance,AddSC_boss_tomb_of_seven,AddSC_boss_grizzle,AddSC_bosses_opera,AddSC_boss_ragnaros,AddSC_boss_broodlord,AddSC_boss_nightbane,AddSC_stormwind_city,AddSC_boss_arlokk,AddSC_instance_stratholme,AddSC_boss_razorgore,AddSC_the_scarlet_enclave_c5,AddSC_instance_sunken_temple,AddSC_boss_halazzi,AddSC_boss_mandokir,AddSC_hinterlands,AddSC_eversong_woods,AddSC_westfall,AddSC_stranglethorn_vale,AddSC_boss_nalorakk,AddSC_boss_jindo,AddSC_boss_firemaw,AddSC_duskwood,AddSC_boss_servant_quarters,AddSC_boss_brutallus,AddSC_boss_hazzarah,AddSC_boss_garr,AddSC_boss_pyroguard_emberseer,AddSC_ghostlands,AddSC_boss_gahzranka,AddSC_boss_felblood_kaelthas,AddSC_boss_marli,AddSC_boss_nefarian,AddSC_instance_gnomeregan,AddSC_boss_urok_doomhowl,AddSC_boss_wushoolay,AddSC_boss_venoxis,AddSC_zulaman,AddSC_boss_shazzrah,AddSC_boss_drekthar,AddSC_arathi_highlands,AddSC_instance_zulgurub,AddSC_instance_blackrock_depths,AddSC_swamp_of_sorrows,AddSC_boss_balinda,AddSC_boss_curator,AddSC_boss_drakkisath,AddSC_boss_overlordwyrmthalak,AddSC_silverpine_forest,AddSC_boss_general_angerforge,AddSC_instance_uldaman,AddSC_ironforge,AddSC_boss_maiden_of_virtue,AddSC_boss_grilek,AddSC_boss_attumen,AddSC_boss_flamegor,AddSC_boss_kirtonos_the_herald,AddSC_instance_shadowfang_keep,AddSC_boss_janalai,AddSC_tirisfal_glades,AddSC_boss_highlordomokk,AddSC_boss_thekal,AddSC_instance_blackrock_spire,AddSC_boss_quatermasterzigris,AddSC_boss_netherspite,AddSC_boss_warmastervoone,AddSC_boss_baron_geddon,AddSC_boss_rend_blackhand,AddSC_boss_majordomo,AddSC_boss_moroes,AddSC_eastern_plaguelands,AddSC_boss_magmadar,AddSC_boss_thebeast,AddSC_boss_vanndar,AddSC_boss_hakkar,AddSC_karazhan,AddSC_alterac_valley,AddSC_instance_the_stockade,AddSC_boss_magmus,AddSC_the_scarlet_enclave_c2,AddSC_boss_vexallus,AddSC_boss_selin_fireheart,AddSC_blackrock_depths,AddSC_boss_ebonroc,AddSC_instance_karazhan,AddSC_boss_zuljin,AddSC_boss_mr_smite,AddSC_boss_gorosh_the_dervish,AddSC_the_scarlet_enclave,AddSC_boss_high_interrogator_gerstahn,AddSC_boss_lucifron,AddSC_boss_golemagg,AddSC_boss_galvangar,AddSC_western_plaguelands,AddSC_instance_magisters_terrace,AddSC_boss_hex_lord_malacrass,AddSC_instance_blackwing_lair,AddSC_redridge_mountains,AddSC_boss_draganthaurissan,AddSC_boss_priestess_delrissa,AddSC_boss_muru,AddSC_boss_vaelastrasz,AddSC_blasted_lands,AddSC_the_scarlet_enclave_c1,AddSC_boss_eredar_twins],AddNorthrendScripts->[AddSC_boss_koralon,AddSC_instance_drak_tharon_keep,AddSC_boss_skadi,AddSC_boss_ionar,AddSC_dalaran,AddSC_boss_general_zarithrian,AddSC_instance_halls_of_reflection,AddSC_boss_grobbulus,AddSC_boss_professor_putricide,AddSC_boss_skarvald_dalronn,AddSC_boss_falric,AddSC_boss_ingvar_the_plunderer,AddSC_wintergrasp,AddSC_instance_halls_of_stone,AddSC_boss_razuvious,AddSC_instance_eye_of_eternity,AddSC_boss_hodir,AddSC_instance_forge_of_souls,AddSC_boss_jedoga_shadowseeker,AddSC_boss_lady_deathwhisper,AddSC_boss_four_horsemen,AddSC_boss_garfrost,AddSC_boss_xt002,AddSC_boss_anomalus,AddSC_borean_tundra,AddSC_boss_taldaram,AddSC_boss_ormorok,AddSC_boss_anubarak_trial,AddSC_boss_baltharus_the_warborn,AddSC_boss_devourer_of_souls,AddSC_boss_argent_challenge,AddSC_boss_jaraxxus,AddSC_sholazar_basin,AddSC_zuldrak,AddSC_boss_xevozz,AddSC_boss_bjarngrim,AddSC_boss_blood_prince_council,AddSC_brann_bronzebeard,AddSC_boss_gal_darah,AddSC_boss_maiden_of_grief,AddSC_boss_dred,AddSC_boss_sartharion,AddSC_boss_the_lich_king,AddSC_oculus,AddSC_boss_eck,AddSC_instance_gundrak,AddSC_boss_sindragosa,AddSC_boss_noth,AddSC_instance_vault_of_archavon,AddSC_violet_hold,AddSC_instance_utgarde_keep,AddSC_dragonblight,AddSC_boss_tharon_ja,AddSC_boss_volkhan,AddSC_boss_palehoof,AddSC_instance_trial_of_the_crusader,AddSC_boss_mimiron,AddSC_boss_amanitar,AddSC_instance_utgarde_pinnacle,AddSC_boss_deathbringer_saurfang,AddSC_boss_commander_stoutbeard,AddSC_boss_vezax,AddSC_instance_obsidian_sanctum,AddSC_boss_tyrannus,AddSC_boss_archavon,AddSC_boss_faction_champions,AddSC_boss_krik_thir,AddSC_grizzly_hills,AddSC_boss_emalon,AddSC_boss_lord_marrowgar,AddSC_instance_oculus,AddSC_boss_malygos,AddSC_boss_kelthuzad,AddSC_boss_krystallus,AddSC_boss_svala,AddSC_boss_erekem,AddSC_boss_elder_nadox,AddSC_boss_valithria_dreamwalker,AddSC_instance_pit_of_saron,AddSC_boss_hadronox,AddSC_icecrown_citadel_teleport,AddSC_boss_slad_ran,AddSC_boss_thaddius,AddSC_boss_yoggsaron,AddSC_boss_urom,AddSC_instance_nexus,AddSC_instance_ulduar,AddSC_isle_of_conquest,AddSC_boss_moorabi,AddSC_boss_trollgore,AddSC_boss_ichoron,AddSC_boss_black_knight,AddSC_boss_cyanigosa,AddSC_crystalsong_forest,AddSC_boss_gothik,AddSC_trial_of_the_crusader,AddSC_boss_heigan,AddSC_boss_loken,AddSC_instance_ruby_sanctum,AddSC_boss_eregos,AddSC_boss_sapphiron,AddSC_utgarde_keep,AddSC_boss_marwyn,AddSC_boss_toravon,AddSC_boss_loatheb,AddSC_boss_auriaya,AddSC_boss_zuramat,AddSC_boss_assembly_of_iron,AddSC_boss_faerlina,AddSC_boss_halion,AddSC_trial_of_the_champion,AddSC_boss_freya,AddSC_instance_naxxramas,AddSC_icecrown,AddSC_icecrown_citadel,AddSC_boss_patchwerk,AddSC_boss_moragg,AddSC_boss_grand_champions,AddSC_boss_festergut,AddSC_instance_trial_of_the_champion,AddSC_boss_icecrown_gunship_battle,AddSC_boss_magus_telestra,AddSC_howling_fjord,AddSC_boss_northrend_beasts,AddSC_boss_volazj,AddSC_boss_razorscale,AddSC_boss_anub_arak,AddSC_ulduar,AddSC_instance_icecrown_citadel,AddSC_boss_saviana_ragefire,AddSC_boss_keristrasza,AddSC_boss_maexxna,AddSC_boss_blood_queen_lana_thel,AddSC_boss_keleseth,AddSC_halls_of_reflection,AddSC_instance_ahnkahet,AddSC_storm_peaks,AddSC_boss_bronjahm,AddSC_boss_drakkari_colossus,AddSC_boss_anubrekhan,AddSC_boss_novos,AddSC_pit_of_saron,AddSC_instance_halls_of_lightning,AddSC_boss_ick,AddSC_boss_sjonnir,AddSC_instance_azjol_nerub,AddSC_boss_twin_valkyr,AddSC_boss_thorim,AddSC_boss_lavanthor,AddSC_boss_gluth,AddSC_boss_varos,AddSC_instance_violet_hold,AddSC_boss_flame_leviathan,AddSC_forge_of_souls,AddSC_boss_ymiron,AddSC_boss_rotface,AddSC_boss_ignis,AddSC_boss_kologarn,AddSC_boss_drakos,AddSC_boss_algalon_the_observer],AddWorldScripts->[AddSC_npc_taxi,AddSC_character_creation,AddSC_npc_innkeeper,AddSC_achievement_scripts,AddSC_chat_log,AddSC_areatrigger_scripts,AddSC_guards,AddSC_item_scripts,AddSC_action_ip_logger,AddSC_go_scripts,AddSC_npcs_special,AddSC_generic_creature,AddSC_npc_professions,AddSC_emerald_dragons],AddKalimdorScripts->[AddSC_instance_ragefire_chasm,AddSC_hyjal,AddSC_instance_ruins_of_ahnqiraj,AddSC_durotar,AddSC_boss_rage_winterchill,AddSC_npc_anubisath_sentinel,AddSC_instance_culling_of_stratholme,AddSC_instance_wailing_caverns,AddSC_boss_cthun,AddSC_boss_lieutenant_drake,AddSC_teldrassil,AddSC_boss_azuregos,AddSC_boss_huhuran,AddSC_boss_skeram,AddSC_boss_aeonus,AddSC_the_black_morass,AddSC_hyjal_trash,AddSC_boss_epoch,AddSC_darkshore,AddSC_boss_anetheron,AddSC_thunder_bluff,AddSC_culling_of_stratholme,AddSC_instance_temple_of_ahnqiraj,AddSC_boss_salramm,AddSC_boss_twinemperors,AddSC_ungoro_crater,AddSC_instance_dire_maul,AddSC_dustwallow_marsh,AddSC_instance_mount_hyjal,AddSC_bloodmyst_isle,AddSC_instance_onyxias_lair,AddSC_boss_kurinnaxx,AddSC_thousand_needles,AddSC_ashenvale,AddSC_boss_fankriss,AddSC_instance_blackfathom_deeps,AddSC_bug_trio,AddSC_boss_viscidus,AddSC_boss_temporus,AddSC_boss_epoch_hunter,AddSC_boss_ossirian,AddSC_boss_ayamiss,AddSC_mulgore,AddSC_boss_meathook,AddSC_boss_moam,AddSC_desolace,AddSC_the_barrens,AddSC_felwood,AddSC_instance_old_hillsbrad,AddSC_winterspring,AddSC_tanaris,AddSC_boss_rajaxx,AddSC_orgrimmar,AddSC_stonetalon_mountains,AddSC_boss_archimonde,AddSC_boss_chrono_lord_deja,AddSC_boss_kazrogal,AddSC_azuremyst_isle,AddSC_feralas,AddSC_old_hillsbrad,AddSC_instance_zulfarrak,AddSC_razorfen_downs,AddSC_instance_the_black_morass,AddSC_silithus,AddSC_moonglade,AddSC_boss_infinite_corruptor,AddSC_instance_razorfen_downs,AddSC_azshara,AddSC_boss_sartura,AddSC_instance_maraudon,AddSC_boss_azgalor,AddSC_boss_onyxia,AddSC_boss_mal_ganis,AddSC_boss_buru,AddSC_boss_captain_skarloc,AddSC_instance_razorfen_kraul,AddSC_boss_ouro]]
AddSC_instance_zulaman - > AddSC_instance_zul AddSC_boss_boss_boss_morass_hun.
why not just deleting it completely?
@@ -357,7 +357,10 @@ class ForwardpropTest(test.TestCase, parameterized.TestCase): def f(x): return math_ops.reduce_prod(math_ops.tanh(x)**2) - _test_gradients(self, f, [constant_op.constant([1.])], order=3) + _test_gradients(self, + f, [constant_op.constant([1.])], + order=3, + recompute=True) def testExceptionInCustomGradientNotSwallowed(self):
[HessianTests->[testHessian1D->[_forward_over_back_hessian]],ControlFlowTests->[testInFunctionWhile->[_fprop_while],testOfFunctionWhile->[_has_loop],testInFunctionCond->[_fprop_cond],testOfFunctionCond->[_has_cond]],ForwardpropTest->[testGradPureForward->[f],testHVPMemory->[_hvp],testVariableWatchedFunction->[compute_jvps,_Model],testPushPopAccumulatorState->[f],testElementwiseNNOps->[_test_gradients],testArgumentUnused->[_f],testJVPManual->[_jvp],testVariableReadInFunction->[f],testExceptionInCustomGradientNotSwallowed->[f],testNumericHigherOrder->[_test_gradients],testFunctionGrad->[_test_gradients],testReusingJVP->[_expected],testCustomGradient->[_test_gradients],testFusedBatchNormGradsInference->[_test_gradients],testCustomGradientRecomputeGrad->[_test_gradients],testFunctionGradInFunctionPureForward->[take_gradients->[f],take_gradients],testHVPCorrectness->[_hvp,fun],testHigherOrderPureForward->[_forwardgrad->[_compute_forwardgrad->[f]],_forwardgrad,f]],_jacfwd->[_jvp],_test_gradients->[_grad,_test_gradients,_jacfwd],_forward_over_back_hessian->[_vectorize_parameters]]
Test for custom gradient recomputation.
Rather than complicating _test_gradients, can you wrap the whole thing in assertRaisesRegexp? Or break it out so it's just running ForwardAccumulator on top of a GradientTape.
@@ -113,11 +113,12 @@ export class AmpFormTextarea { this.unlisteners_.push(listen(root, AmpEvents.DOM_UPDATE, () => { cachedTextareaElements = root.querySelectorAll('textarea'); })); - const throttledResize = throttle(this.win_, e => { - if (e.relayoutAll) { - resizeTextareaElements(cachedTextareaElements); - } - }, MIN_EVENT_INTERVAL_MS); + const throttledResize = throttle( + /** @type {!Window} */ (this.win_), e => { + if (e.relayoutAll) { + resizeTextareaElements(cachedTextareaElements); + } + }, MIN_EVENT_INTERVAL_MS); this.unlisteners_.push(this.viewport_.onResize(throttledResize)); // For now, warn if textareas with initial overflow are present, and
[No CFG could be retrieved]
The DOM event listeners for the AMP - FORM textareas. Determines if an element has overflow.
type should probably be in declaration of the field in constructor instead of usage time
@@ -45,7 +45,11 @@ bootRun { <%_ if (!skipClient) { _%> task webpackBuildDev(type: <%= _.upperFirst(clientPackageManager) %>Task) { + <%_ if (clientPackageManager==='npm') { _%> inputs.file("package-lock.json") + <%_ } else { _%> + inputs.file("yarn.lock") + <%_ } _%> inputs.dir("<%= CLIENT_MAIN_SRC_DIR %>") def webpackDevFiles = fileTree("<%= CLIENT_WEBPACK_DIR %>/")
[No CFG could be retrieved]
Produces a list of all packages that are required by the application. filter for missing dependencies.
For readability I would but spaces here: `clientPackageManager === 'npm'` (and at all other places too).
@@ -2,7 +2,7 @@ class ChatChannel < ApplicationRecord include AlgoliaSearch attr_accessor :current_user, :usernames_string - has_many :messages + has_many :messages, dependent: :destroy has_many :chat_channel_memberships, dependent: :destroy has_many :users, through: :chat_channel_memberships
[ChatChannel->[adjusted_slug->[direct?],pusher_channels->[invite_only?,open?],user_obj->[last_opened_at]]]
A class that represents a single channel in a group. _name _slug _human_names _name _name _name _slug _human.
This is the most important change. Is there a reason why we delete instead of destroy messages? Let me know if I should revert this.
@@ -32,10 +32,10 @@ import ( // returns true for yes, false for no func Confirm(prompt string, def bool) (bool, error) { reader := bufio.NewReader(os.Stdin) - return confirm(reader, prompt, def) + return confirm(reader, os.Stdout, prompt, def) } -func confirm(r io.Reader, prompt string, def bool) (bool, error) { +func confirm(r io.Reader, out io.Writer, prompt string, def bool) (bool, error) { options := " [Y/n]" if !def { options = " [y/N]"
[Println,NewReader,New,Print,ToLower,Scan,NewScanner,Text]
Confirm displays the confirmation text and asks the user to answer the given .
`go test -json` does not always report a test status if the test did write to stdout/stderr directly. I found it easier to modify the code/tests and capture the output as well instead of trying to fix the testing evaluation in gotestsum. As a nice bonus I modified the tests to actually capture and validate the produced output.
@@ -25,7 +25,7 @@ namespace DynamoCLI } var runner = new CommandLineRunner(model); runner.Run(cmdLineArgs); - + } catch (Exception e) {
[Program->[Main->[SetLocale,ASMPath,IsCrashing,Message,WriteLine,IsNullOrEmpty,MakeModel,Parse,StackTrace,Run,TrackException]]]
Main method that runs the DynamoDB query and returns a single object.
Revert this file - only whitespace changes.
@@ -39,6 +39,7 @@ module AccountReset end def destroy_user + Db::DeletedUser::Create.call(user.id) user.destroy! end
[DeleteAccount->[send_push_notifications->[call]]]
Destroy the user if there is a lease and send a push notification.
should we consider doing this in a transaction? so if one step fails we're not left in a half-state?
@@ -15,9 +15,11 @@ */ package io.netty.handler.codec.http.cookie; -import static io.netty.handler.codec.http.cookie.CookieUtil.*; +import static io.netty.handler.codec.http.cookie.CookieUtil.add; +import static io.netty.handler.codec.http.cookie.CookieUtil.addQuoted; +import static io.netty.handler.codec.http.cookie.CookieUtil.stringBuilder; +import static io.netty.handler.codec.http.cookie.CookieUtil.stripTrailingSeparator; import static io.netty.util.internal.ObjectUtil.checkNotNull; - import io.netty.handler.codec.http.HttpHeaderDateFormat; import io.netty.handler.codec.http.HttpRequest;
[ServerCookieEncoder->[encode->[encode],ServerCookieEncoder]]
Provides a cookie encoder to be used in the server side. Strict encoder that validates that name and value chars are in the valid scope of a cookie.
@normanmaurer What's the coding style rule regarding static imports wildcard usage here?
@@ -124,12 +124,7 @@ class ProductVariantTranslatableContent(CountableDjangoObjectType): @staticmethod def resolve_product_variant(root: product_models.ProductVariant, info): - visible_products = product_models.Product.objects.visible_to_user( - info.context.user - ).values_list("pk", flat=True) - return product_models.ProductVariant.objects.filter( - product__id__in=visible_products, pk=root.id - ).first() + return product_models.ProductVariant.objects.filter(pk=root.id).first() class ProductTranslation(BaseTranslationType):
[MenuItemTranslatableContent->[TranslationField,Field],CollectionTranslatableContent->[resolve_collection->[visible_to_user],TranslationField,Field],CategoryTranslatableContent->[TranslationField,Field],SaleTranslatableContent->[permission_required,TranslationField,Field],BaseTranslationType->[resolve_language->[LanguageDisplay,next,str_to_enum],Field],ShippingMethodTranslatableContent->[permission_required,TranslationField,Field],AttributeTranslatableContent->[TranslationField,Field],PageTranslatableContent->[resolve_page->[visible_to_user],TranslationField,Field],AttributeValueTranslatableContent->[TranslationField,Field],ProductTranslatableContent->[resolve_product->[visible_to_user],TranslationField,Field],VoucherTranslatableContent->[permission_required,TranslationField,Field],ProductVariantTranslatableContent->[resolve_product_variant->[filter,visible_to_user],TranslationField,Field]]
Resolve a product variant from a given root product.
If `root` is `ProductVariant` why we use filter and first here? Can't we just return root instance?
@@ -766,9 +766,11 @@ static int ssl_print_extension(BIO *bio, int indent, int server, int extype, } static int ssl_print_extensions(BIO *bio, int indent, int server, - const unsigned char *msg, size_t msglen) + const unsigned char **msgin, size_t *msglenin) { - size_t extslen; + size_t extslen, msglen = *msglenin; + const unsigned char *msg = *msgin; + BIO_indent(bio, indent, 80); if (msglen == 0) { BIO_puts(bio, "No Extensions\n");
[No CFG could be retrieved]
This function prints the extensions of a key exchange. This function is used to print client_hello and client_version.
shouldn't this be msginlen?
@@ -645,7 +645,12 @@ export class ViewportBindingNatural_ { if (doc./*OK*/scrollingElement) { return doc./*OK*/scrollingElement; } - if (doc.body) { + if (doc.body + // Firefox does not support scrollTop on doc.body for default + // scrolling. + // See https://github.com/ampproject/amphtml/issues/1398 + // Unfortunately there is no way to feature detect this. + && !platform.isFirefox()) { return doc.body; } return doc.documentElement;
[No CFG could be retrieved]
Implementation of ViewportBindingDef based on the native window on iOS. The constructor for the window object.
Are you sure this is correct? There are still lots of chromes out there with the old API...
@@ -666,6 +666,12 @@ if ($mode == 'common') // Version print '<td class="center nowrap" width="120px">'; print $versiontrans; + if (!empty($objMod->url_last_version)) { + $newversion = file_get_contents($objMod->url_last_version); + if (version_compare($newversion, $versiontrans) > 0) { + print "&nbsp;<span class='butAction' title='" . $langs->trans('LastStableVersion') . "'>$newversion</span>"; + } + } print "</td>\n"; // Activate/Disable and Setup (2 columns)
[getMessage,alreadyUsed,formconfirm,fetch_object,get_next_link,getVersion,getRemoteData,get_previous_link,get_products,initHooks,getDesc,load,executeHooks,isCoreOrExternalModule,loadLangs,getDescLong,close,transnoentitiesnoconv,query,getName,trans,getPublisher,get_categories,selectarray]
Print a block of code that can be used to display a module -- > Prints missing configuration.
To get external content, you must use method getURLContent of geturl.lib.php Also making external accesses may creates pb to show the module page (when proxy or firewall disagree), making the module page very very slow or not possible to show, solution is - introduce a hidden option to enable such feature (easy solution). - detect when there is at least on module with property url_last_version and if yes, add a button "Try to detect and show last version of modules" (more work to do)
@@ -218,6 +218,7 @@ func (i *installCmd) resolveValues() (map[string]interface{}, error) { fmt.Sprintf("OpenServiceMesh.enableEgress=%t", i.enableEgress), fmt.Sprintf("OpenServiceMesh.meshCIDRRanges=%s", strings.Join(i.meshCIDRRanges, " ")), fmt.Sprintf("OpenServiceMesh.deployJaeger=%t", i.deployJaeger), + fmt.Sprintf("OpenServiceMesh.envoyLogLevel=%s", i.envoyLogLevel), } if i.containerRegistrySecret != "" {
[resolveValues->[Sprintf,ParseInto,Join],validateOptions->[EqualFold,TODO,loadOSMChart,AppsV1,List,String,Errorf,Namespace,Set,Deployments],loadOSMChart->[LoadChart,Errorf,Load],run->[validateOptions,Fprintf,resolveValues,NewInstall,Namespace,Run],RESTClientGetter,StringVar,ToRESTConfig,IsValidLabelValue,StringSliceVar,BoolVar,NewForConfig,Replace,Errorf,run,ParseCIDR,Flags,IntVar]
resolveValues returns a map of values that can be used to populate the OpenServiceMesh configuration This function is used to parse the Helm strvals line and merges into a map for.
We should add a cli validation for the allowed values to ensure bad values are caught early on.
@@ -235,7 +235,7 @@ public class LeaseManager<T> { } } catch (InterruptedException e) { // This means a new lease is added to activeLeases. - LOG.error("Execution was interrupted ", e); + LOG.debug("Execution was interrupted ", e); Thread.currentThread().interrupt(); } }
[LeaseManager->[acquire->[acquire],shutdown->[release],LeaseMonitor->[run->[get,release]],start->[start],get->[get]]]
This method is run in a thread. It checks for a lease expiry and if it is.
This should not be related to block deletion. What exception are we seeing here?
@@ -1445,7 +1445,8 @@ def fully_connected(inputs, ValueError: If x has rank less than 2 or if its last dimension is not set. """ if not isinstance(num_outputs, six.integer_types): - raise ValueError('num_outputs should be int or long, got %s.', num_outputs) + raise ValueError( + 'num_outputs should be int or long, got %s.' % (num_outputs,)) layer_variable_getter = _build_variable_getter({'bias': 'biases', 'kernel': 'weights'})
[legacy_fully_connected->[bias_add,stack,_apply_activation],fully_connected->[_build_variable_getter,_add_variable_to_collections],convolution2d_in_plane->[bias_add],softmax->[softmax],batch_norm->[_fused_batch_norm],pool->[pool],separable_convolution2d->[bias_add,_build_variable_getter,_add_variable_to_collections],bias_add->[bias_add],_inner_flatten->[_sparse_inner_flatten,_dense_inner_flatten],_build_variable_getter->[layer_variable_getter->[_model_variable_getter]]]
Adds a fully connected layer to the network. Computes a single non - zero non - zero non - zero non - zero non - zero Missing node - lease output.
There is no need for the brackets and comma here, since there is only one item.
@@ -63,13 +63,13 @@ public class ParseReportStepTest { @Test public void extract_report_from_db_and_browse_components() throws Exception { AnalysisReportDto reportDto = prepareAnalysisReportInDb(); - - IssueComputation issueComputation = mock(IssueComputation.class); DbClient dbClient = new DbClient(dbTester.database(), dbTester.myBatis(), new AnalysisReportDao()); - ParseReportStep step = new ParseReportStep(issueComputation, dbClient, new DefaultTempFolder(temp.newFolder())); + sut = new ParseReportStep(issueComputation, dbClient, new DefaultTempFolder(temp.newFolder())); ComputationContext context = new ComputationContext(reportDto, mock(ComponentDto.class)); - step.execute(context); + context.setProjectSettings(mock(Settings.class, Mockito.RETURNS_DEEP_STUBS)); + + sut.execute(context); // verify that all components are processed (currently only for issues) verify(issueComputation).processComponentIssues(context, "PROJECT_UUID", Collections.<BatchReport.Issue>emptyList());
[ParseReportStepTest->[prepareAnalysisReportInDb->[setSnapshotId,setData,writeComponent,zipDir,insert,openSession,newFile,AnalysisReportDao,AnalysisReportDto,setProjectKey,currentTimeMillis,close,writeMetadata,BatchOutputWriter,FileInputStream,setCreatedAt,build,get,commit,setStatus,newFolder],extract_report_from_db_and_browse_components->[DbClient,AnalysisReportDao,execute,mock,ComputationContext,isEqualTo,ParseReportStep,DefaultTempFolder,emptyList,afterReportProcessing,myBatis,database,processComponentIssues,newFolder,prepareAnalysisReportInDb],setUp->[truncateTables],TemporaryFolder,DbTester]]
Extract report from db and browse components.
With the new logging API, you could check that the INFO log is printed.
@@ -71,6 +71,7 @@ if ( ! function_exists('create_captcha')) 'font_path' => '', 'expiration' => 7200, 'word_length' => 8, + 'font_size' =>16, 'pool' => '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', 'colors' => array( 'background' => array(255,255,255),
[No CFG could be retrieved]
Create a Captcha This function is used to create a random color from a word Create the rectangle image filled rectangle and spiral pattern image filled rectangle and spiral This function creates the image and border for a word.
Use a tab after the key name and a space after `=>`
@@ -21,3 +21,9 @@ class CuraAPI: # Interface API interface = Interface() + + _account = Account() + + @pyqtProperty(QObject, constant = True) + def account(self) -> Account: + return CuraAPI._account
[CuraAPI->[Interface,Backups]]
Create an instance of the Element object.
I don't think this is a good idea, especially that `Account.__init__()` does way too much (or in other words, has too many dependencies) such as accessing `Application.getInstance().getPreferences()`.
@@ -268,14 +268,15 @@ class ConvectionDiffusionBaseSolver(PythonSolver): # Check and prepare computing model part and import constitutive laws. self._execute_after_reading() - throw_errors = False - KratosMultiphysics.TetrahedralMeshOrientationCheck(self.main_model_part, throw_errors).Execute() + if self.settings["check_mesh_orientation"].GetBool(): + throw_errors = False + KratosMultiphysics.TetrahedralMeshOrientationCheck(self.main_model_part, throw_errors).Execute() KratosMultiphysics.ReplaceElementsAndConditionsProcess(self.main_model_part,self._get_element_condition_replace_settings()).Execute() self._set_and_fill_buffer() - if (self.settings["echo_level"].GetInt() > 0): + if self.settings["echo_level"].GetInt() > 0: self.print_on_rank_zero(self.model) KratosMultiphysics.Logger.PrintInfo("::[ConvectionDiffusionBaseSolver]::", "ModelPart prepared for Solver.")
[ConvectionDiffusionBaseSolver->[_create_convergence_criterion->[_get_convergence_criterion_settings],_create_builder_and_solver->[get_linear_solver],_execute_after_reading->[import_materials],Initialize->[Initialize],_create_line_search_strategy->[GetComputingModelPart,get_solution_scheme,get_builder_and_solver,get_linear_solver,get_convergence_criterion],_create_newton_raphson_strategy->[GetComputingModelPart,get_solution_scheme,get_builder_and_solver,get_linear_solver,get_convergence_criterion],_set_and_fill_buffer->[GetMinimumBufferSize],_create_linear_strategy->[GetComputingModelPart,get_builder_and_solver,get_solution_scheme,get_linear_solver],Solve->[Solve]]]
Prepares the main model part for Solver.
this is potentially dangerous, please add a warning in case the user disables it sth like "WARNING, manually disabling the mesh-orientation-check, which is only recommended for developers / ppl that know what they are doing" or similar, I think you get what I mean :) Also remember to remove this again once you figured out what is the problem with the `TetrahedralMeshOrientationCheck`
@@ -179,7 +179,8 @@ public class ResolverUtils { if (stackedTypesModelProperty.isPresent()) { resolver = stackedTypesModelProperty.get().getValueResolverFactory().getExpressionBasedValueResolver(expression, getType(type)); - //TODO MULE-13518: Add support for stacked value resolvers for @Parameter inside pojos // The following "IFs" should be removed once implemented + // TODO MULE-13518: Add support for stacked value resolvers for @Parameter inside pojos // The following "IFs" should be + // removed once implemented } else if (isTypedValue.getAsBoolean()) { ExpressionTypedValueValueResolver<Object> valueResolver = new ExpressionTypedValueValueResolver<>(expression, getType(type));
[ResolverUtils->[resolveCursor->[resolveCursor],resolveValue->[resolveRecursively],resolveRecursively->[resolveRecursively],getExpressionBasedValueResolver->[getExpressionBasedValueResolver],getDefaultValueResolver->[getExpressionBasedValueResolver]]]
This method creates a ValueResolver based on the given expression.
fix format, put the second `//` in a new line
@@ -212,14 +212,7 @@ namespace System.Windows.Forms { return !(p1 == p2); } - /// <include file='doc\Padding.uex' path='docs/doc[@for="Padding.GetHashCode"]/*' /> - public override int GetHashCode() { - // Padding class should implement GetHashCode for perf - return Left - ^ WindowsFormsUtils.RotateLeft(Top, 8) - ^ WindowsFormsUtils.RotateLeft(Right, 16) - ^ WindowsFormsUtils.RotateLeft(Bottom, 24); - } + public override int GetHashCode() => HashCode.Combine(Left, Top, Right, Bottom); /// <include file='doc\Padding.uex' path='docs/doc[@for="Padding.ToString"]/*' /> public override string ToString() {
[PaddingConverter->[ConvertTo->[ConvertTo,ShouldSerializeAll],ConvertFrom->[ConvertFrom],CanConvertFrom->[CanConvertFrom],CanConvertTo->[CanConvertTo]],Padding->[ToString->[ToString],Debug_SanityCheck->[ShouldSerializeAll]]]
Get hashCode for this node.
Not certain this is the same operation (which may be fine)
@@ -10,14 +10,14 @@ from parlai.core.fbdialog_teacher import FbDialogTeacher from .build import build -def _path(version, opt, exsz=""): +def _path(version, opt, exsz=''): # Build the data if it doesn't exist. build(opt) dt = opt['datatype'].split(':')[0] if exsz: - fname = "%s.%s.txt" % (dt, exsz) + fname = '%s.%s.txt' % (dt, exsz) else: - fname = "%s.txt" % dt + fname = '%s.txt' % dt return os.path.join(opt['datapath'], 'InsuranceQA', version, fname)
[V2Teacher->[__init__->[_path]],V1Teacher->[__init__->[_path]]]
Return the path to the InsuranceQA file.
could also add a default, if len(split) < 3 then use "100"
@@ -405,11 +405,17 @@ def options(func): ), option( "--proportional-fee", - help=("Mediation for as ratio of mediated amount in parts-per-million (10^-6)"), + help=("Mediation fee as ratio of mediated amount in parts-per-million (10^-6)."), default=0, type=int, show_default=True, ), + option( + "--rebalancing-fee/--no-rebalancing-fee", + help="Enables the rebalancing fee (Preview).", + default=False, + show_default=True, + ), ), ]
[smoketest->[print_step,append_report],run->[run]]
Decorator to add options to a command line. Adds options for a specific block of flags. Raiden options. Options for a raiden routing service. Config for logging. Option group that adds missing options to the options list.
If you want to allow a bit more configurability, you could pass in the parabola exponent instead of just an on/off flag.
@@ -59,10 +59,12 @@ public interface Log extends BasicLogger { @LogMessage(level = WARN) @Message(value = "Conditional operation '%s' should be used with transactional caches, otherwise data inconsistency issues could arise under failure situations", id = 6010) + @Once void warnConditionalOperationNonTransactional(String op); @LogMessage(level = WARN) @Message(value = "Operation '%s' forced to return previous value should be used on transactional caches, otherwise data inconsistency issues could arise under failure situations", id = 6011) + @Once void warnForceReturnPreviousNonTransactional(String op); @Message(value = "Listener %s factory '%s' not found in server", id = 6013)
[No CFG could be retrieved]
Warns if a missing cache event is found in the server.
I didn't realise this existed, nice!
@@ -37,6 +37,13 @@ def test_io_set(): _test_raw_reader(read_raw_eeglab, input_fname=raw_fname_onefile, montage=montage) raw = read_raw_eeglab(input_fname=raw_fname_onefile, montage=montage) + # test finding events in continuous data + event_id={'rt':1, 'square':2} + raw = read_raw_eeglab(input_fname=raw_fname_onefile, montage=montage, + event_id=event_id) + Epochs(raw, find_events(raw)) # without event_id + epochs = Epochs(raw, find_events(raw), event_id) # with event_id + assert_equal(epochs["square"].average().nave, 80) raw2 = read_raw_eeglab(input_fname=raw_fname, montage=montage) assert_array_equal(raw[:][0], raw2[:][0]) # one warning per each preload=False or str with raw_fname_onefile
[test_io_set->[assert_array_equal,_TempDir,get_data,read_raw_eeglab,catch_warnings,assert_equal,write_events,len,assert_raises,savemat,read_epochs_eeglab,copyfile,join,loadmat,_test_raw_reader,simplefilter],simplefilter,run_tests_if_main,data_path,join,requires_version]
Test import of the EEGLAB. set file for missing data. read_epochs_eeglab reads a file of data and returns a missing event.
since it's a `mat` file after all. Maybe you can read the events in, modify them and write them to a temp file which you can use for testing if the warnings are raised.
@@ -283,6 +283,11 @@ int ACE_TMAIN(int argc, ACE_TCHAR *argv[]) dcs->wait_for("driver", LOCAL_MANUAL_DATAREADER, "LIVELINESS_LOST_" + OpenDDS::DCPS::to_dds_string(i)); } + + while (automatic_drl_servant->samples_handled() < 1) { + ACE_OS::sleep(0.25); + } + delete writer; pub->delete_contained_entities(); dp->delete_publisher(pub);
[No CFG could be retrieved]
Reads the specified number of components from the DDS and writes the sample to the DDS Check if the local and remote manual dr match.
build warnings are saying this is getting cast to zero on some platforms
@@ -220,10 +220,10 @@ public class PackagePersistence { return getPackage(pkgId); } - public synchronized String getActivePackageId(String name) { - name = name + '-'; + public synchronized String getActivePackageId(String name) throws PackageException { for (Entry<String, PackageState> entry : states.entrySet()) { - if (entry.getKey().startsWith(name) && entry.getValue().isInstalled()) { + if (entry.getKey().startsWith(name) && entry.getValue().isInstalled() + && getPackage(entry.getKey()).getName().equals(name)) { return entry.getKey(); } }
[PackagePersistence->[reset->[writeStates],getActivePackage->[getPackage],getPackages->[getState],updateState->[writeStates],removePackage->[writeStates],addPackageFromDir->[getPackage]]]
getActivePackage - get active package by name.
Please put `entry.getKey()` in a local variable.
@@ -264,6 +264,11 @@ public class BootstrapAppModelResolver implements AppModelResolver { List<AppDependency> fullDeploymentDeps = new ArrayList<>(userDeps); fullDeploymentDeps.addAll(deploymentDeps); + //we need these to have a type of 'jar' + //type is blank when loaded + for (AppArtifactKey i : localProjects) { + appBuilder.addLocalProjectArtifact(new AppArtifactKey(i.getGroupId(), i.getArtifactId(), null, "jar")); + } return appBuilder .addDeploymentDeps(deploymentDeps) .setAppArtifact(appArtifact)
[BootstrapAppModelResolver->[relink->[relink],doResolveModel->[resolve],install->[install],resolveVersionRangeResult->[resolveVersionRangeResult],addRemoteRepositories->[addRemoteRepositories],toAetherDeps->[toAetherArtifact],collect->[collect]]]
Resolve the model. find a that can be used to resolve the dependency graph of the appArtifact find the best match for the requests.
Ideally, we should at least match this `localProjects` to the resolved user dependencies: first, to make sure they are actually present among the resolved deps; second, we can get the actual artifact classifier and types from them.
@@ -44,6 +44,14 @@ func NewSampleRepoTest(c SampleRepoConfig) func() { err := oc.Run("new-app").Args("-f", c.templateURL).Execute() o.Expect(err).NotTo(o.HaveOccurred()) + // sometimes the deployment controller takes a while to start the DB deployment which + // results in the app coming up before the DB is available, which makes the app tests fail + // because they require the DB is available when the app started, not when the http request + // is made (they init the DB when they start, not on each request). So here we force + // an immediate deployment of the DB. It may error because there is already a deployment + // in progress, so do not check for an error. + oc.Run("deploy").Args(c.dbDeploymentConfigName, "--latest").Execute() + // all the templates automatically start a build. buildName := c.buildConfigName + "-1"
[REST,By,SetOutputDir,Expect,HaveOccurred,Duration,It,ShouldNot,WaitForAnEndpoint,KubeREST,Should,Args,ReplicationControllers,Namespace,KubeFramework,ContainSubstring,DumpBuildLogs,Equal,Execute,NewCLI,Output,ServiceAccounts,JustBeforeEach,Builds,WaitForBuilderAccount,NotTo,Template,Describe,WaitForADeploymentToComplete,Sprintf,CheckOpenShiftNamespaceImageStreams,WaitForABuild,GinkgoRecover,FetchURL,Run,KubeConfigPath]
NewSampleRepoTest creates a new sample repo test function that runs a new - app from if - expect the service is available.
By the time you create the database config and since it has a config change trigger, its latestVersion should be immediatelly incremented to 1. I am wondering if there are any update conflicts for the dc and the initial update is somehow lost. Can you post any server logs from failed runs of this test?
@@ -36,6 +36,7 @@ namespace Dynamo.Graph.Workspaces double cx = 0; double cy = 0; double zoom = 1.0; + double scaleFactor = 1.0; string id = ""; string category = ""; string description = "";
[WorkspaceInfo->[FromXmlDocument->[Value,ToString,TryParse,Attributes,Message,Count,WriteLine,IsNullOrEmpty,Manual,OpenWorkbenchError,Parse,Log,GetElementsByTagName,StackTrace,InvariantCulture,DefaultRunPeriod,Equals],Automatic,DefaultRunPeriod,IsNullOrEmpty]]
This method extracts the information from an XML document. if a dyf and it doesn t have an ID field it will be assigned a random.
@ramramps @QilongTang @sm6srw seems we may need to serialize this?
@@ -126,6 +126,7 @@ class PasswordForm extends Component<Props, State> { autoFocus = { true } className = 'info-password-input' onChange = { this._onEnteredPasswordChange } + placeholder = { placeHolderText } spellCheck = { 'false' } type = 'text' value = { this.state.enteredPassword } />
[No CFG could be retrieved]
Gets the derived state from the props. A component that renders the info box for the user s password.
What do you think about using type = 'number' instead? That would defer the digit checking and even length check to the browser, but then you'd have to add css to hide the number wheel (up/down arrows) which maybe is a bigger pain than checking the entered value.
@@ -262,10 +262,10 @@ class WPSEO_Primary_Term_Admin { $primary_term = $this->get_primary_term( $taxonomy->name ); if ( empty( $primary_term ) ) { - $primary_term = ''; + $primary_term = 1; } - $terms = get_terms( $taxonomy->name ); + $terms = $this->get_taxonomies( $taxonomy->name ); return array( 'title' => $taxonomy->labels->singular_name,
[WPSEO_Primary_Term_Admin->[map_taxonomies_for_js->[get_primary_term,generate_field_id],get_primary_term_taxonomies->[get_current_id],get_primary_term->[get_current_id,get_primary_term]]]
Map a taxonomy to a JS array.
`get_taxonomies` isn't the right name.
@@ -136,16 +136,14 @@ class ReadFromMongoDB(PTransform): Returns: :class:`~apache_beam.transforms.ptransform.PTransform` - """ if extra_client_params is None: extra_client_params = {} if not isinstance(db, str): - raise ValueError('ReadFromMongDB db param must be specified as a string') + raise ValueError("ReadFromMongDB db param must be specified as a string") if not isinstance(coll, str): raise ValueError( - 'ReadFromMongDB coll param must be specified as a ' - 'string') + "ReadFromMongDB coll param must be specified as a string") self._mongo_source = _BoundedMongoSource( uri=uri, db=db,
[_BoundedMongoSource->[_count_id_range->[_merge_id_filter],_get_auto_buckets->[estimate_size,_estimate_average_document_size],_replace_none_positions->[_get_head_document_id]],_ObjectIdHelper->[increment_id->[id_to_int,int_to_id]],_ObjectIdRangeTracker->[position_to_fraction->[id_to_int],fraction_to_position->[increment_id,id_to_int,int_to_id]]]
Initialize a new ReadFromMongoDB object.
What problem does this class solve? It looks almost a duplicate of LexicographicKeyRangeTracker. Couldn't you just update LexicographicKeyRangeTracker if needed?
@@ -115,7 +115,7 @@ public class ImageStoreUploadMonitorImpl extends ManagerBase implements ImageSto public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { _executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Upload-Monitor")); _monitoringInterval = UploadMonitoringInterval.value(); - _uploadOperationTimeout = UploadOperationTimeout.value() * 60 * 1000; + _uploadOperationTimeout = UploadOperationTimeout.value() * 60 * 1000L; _nodeId = ManagementServerNode.getManagementServerId(); return true; }
[ImageStoreUploadMonitorImpl->[start->[scheduleWithFixedDelay,UploadStatusCheck],stop->[shutdownNow],getConfigComponentName->[getSimpleName],UploadStatusCheck->[handleTemplateStatusResponse->[doInTransactionWithoutResult->[setState,getMessage,setDownloadState,getPhysicalSize,setInstallPath,isDebugEnabled,getZoneId,getDownloadPercent,setExtractUrl,getDetails,setDownloadPercent,getTime,setPhysicalSize,incrementResourceCount,debug,getState,transitTo,error,update,createForUpdate,currentTimeMillis,sendAlert,getInstallPath,getUuid,findById,getStatus,getVirtualSize,getAccountId,setSize,getId],getStateMachine,TransactionCallbackNoReturn,execute],runInContext->[longValue,getMessage,getManagementServerId,warn,getDataStore,trace,select,sendMessage,getTemplateId,getExtractUrl,UploadStatusAnswer,listByVolumeState,getDataStoreId,isTraceEnabled,getName,handleTemplateStatusResponse,getUuid,findById,getId,UploadStatusCommand,listByTemplateState,getVolumeId,handleVolumeStatusResponse],handleVolumeStatusResponse->[doInTransactionWithoutResult->[setState,getMessage,setDownloadState,getPhysicalSize,setInstallPath,isDebugEnabled,getDataCenterId,getDownloadPercent,getDetails,setDownloadPercent,getTime,setPhysicalSize,incrementResourceCount,debug,getState,transitTo,error,update,createForUpdate,currentTimeMillis,sendAlert,getInstallPath,getUuid,findById,getStatus,getVirtualSize,getAccountId,setSize,getId],getStateMachine,TransactionCallbackNoReturn,execute]],configure->[getManagementServerId,NamedThreadFactory,newScheduledThreadPool,value],getUploadOperationTimeout->[value],getLogger]]
Override this method to configure the upload monitor.
please find or make a definition of 60 seconds
@@ -111,3 +111,18 @@ class ServerDiskAdapter(ServerStorageAdapter): except OSError: break # not empty ref_path = os.path.dirname(ref_path) + + def path_exists(self, path): + return os.path.exists(path) + + def read_file(self, path): + with open(path) as f: + return f.read() + + def write_file(self, path, contents, lock_file): + with fasteners.InterProcessLock(lock_file, logger=None) if lock_file else no_op(): + with open(path, "w") as f: + f.write(contents) + + def base_storage_folder(self): + return self._store_folder
[ServerStorageAdapter->[get_download_urls->[NotImplementedError],delete_empty_dirs->[NotImplementedError],delete_folder->[NotImplementedError],get_snapshot->[NotImplementedError],get_upload_urls->[NotImplementedError]],ServerDiskAdapter->[get_download_urls->[relpath,get_token_for,replace,decode_text,isinstance],delete_empty_dirs->[conan,dirname,rmdir,SimplePaths,exists,range],delete_folder->[NotFoundException,rmdir,path_exists],delete_file->[remove,NotFoundException,path_exists],get_snapshot->[md5sum,NotFoundException,set,join,path_exists,relative_dirs],get_upload_urls->[relpath,get_token_for,replace,decode_text,isinstance,items]]]
Delete empty directories.
Lock to read file too.
@@ -265,18 +265,6 @@ public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayD * data has been explicitly requested. See {@link Window} for more information. */ public abstract PaneInfo pane(); - - /** - * Gives the runner a (best-effort) lower bound about the timestamps of future output associated - * with the current element. - * - * <p>If the {@link DoFn} has multiple outputs, the watermark applies to all of them. - * - * <p>Only splittable {@link DoFn DoFns} are allowed to call this method. It is safe to call - * this method from a different thread than the one running {@link ProcessElement}, but all - * calls must finish before {@link ProcessElement} returns. - */ - public abstract void updateWatermark(Instant watermark); } /** Information accessible when running a {@link DoFn.OnTimer} method. */
[DoFn->[ProcessContinuation->[withResumeDelay->[shouldResume]]]]
abstract method to update the watermark of a pane.
This is a backwards-incompatible change. It probably does not affect many users but we should make sure to announce it, e.g. via the release notes.
@@ -267,6 +267,8 @@ def any_all_helper(builder: IRBuilder, @specialize_function('dataclasses.field') +@specialize_function('attr.ib') +@specialize_function('attr.attrib') @specialize_function('attr.Factory') def translate_dataclasses_field_call( builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]:
[translate_tuple_from_generator_call->[sequence_from_generator_preallocate_helper,isinstance,len],translate_set_from_generator_call->[translate_set_comprehension,isinstance,len],translate_all_call->[any_all_helper,unary_op,isinstance,len],translate_dataclasses_field_call->[AnyType],translate_next_call->[gen_inner_stmts->[accept,assign,goto],node_type,list,BasicBlock,Unreachable,RaiseStandardError,add,len,comprehension_helper,assign,isinstance,activate_block,zip,Register,accept,goto],translate_dict_setdefault->[call_c,Integer,len,isinstance,accept],translate_str_format->[join_formatted_strings,convert_format_expr_to_str,len,isinstance,tokenizer_format_call,count],translate_list_from_generator_call->[sequence_from_generator_preallocate_helper,isinstance,len],translate_any_call->[any_all_helper,isinstance,len],translate_fstring->[append,join_formatted_strings,isinstance,convert_format_expr_to_str],translate_len->[node_type,builtin_len,Integer,len,isinstance,accept],dict_methods_fast_path->[node_type,call_c,is_dict_rprimitive,len,isinstance,accept],any_all_helper->[gen_inner_stmts->[new_value,assign,activate_block,add_bool_branch,modify,accept,goto],list,initial_value,BasicBlock,goto_and_activate,comprehension_helper,assign,zip,Register],translate_globals->[load_globals_dict,len],translate_isinstance->[AnyType,len,flatten_classes,isinstance,accept,isinstance_helper],specialize_function->[wrapper->[setdefault]],translate_safe_generator_call->[node_type,call_refexpr_with_args,len,gen_method_call,translate_list_comprehension,isinstance,accept],specialize_function]
Translate a for a dataclasses. field and attr. Factory call.
Modify the docstring
@@ -62,7 +62,7 @@ class AnchorGenerator(nn.Module): return base_anchors.round() def set_cell_anchors(self, device): - if self.cell_anchors is not None: + if self.cell_anchors is not None and device == self.cell_anchors[0].devcie: return self.cell_anchors cell_anchors = [ self.generate_anchors(
[concat_box_prediction_layers->[permute_and_flatten],AnchorGenerator->[forward->[set_cell_anchors,cached_grid_anchors],set_cell_anchors->[generate_anchors],cached_grid_anchors->[grid_anchors]],RegionProposalNetwork->[forward->[concat_box_prediction_layers,assign_targets_to_anchors,filter_proposals,compute_loss],filter_proposals->[_get_top_n_idx]]]
Set cell anchors.
There is a typo here, and the code is not going to work as is (`devcie` is not an attribute of Tensor).
@@ -43,13 +43,13 @@ static unsigned const char cov_2char[64] = { static int do_passwd(int passed_salt, char **salt_p, char **salt_malloc_p, char *passwd, BIO *out, int quiet, int table, int reverse, size_t pw_maxlen, int usecrypt, int use1, - int useapr1); + int useapr1, int use5, int use6); typedef enum OPTION_choice { OPT_ERR = -1, OPT_EOF = 0, OPT_HELP, OPT_IN, OPT_NOVERIFY, OPT_QUIET, OPT_TABLE, OPT_REVERSE, OPT_APR1, - OPT_1, OPT_CRYPT, OPT_SALT, OPT_STDIN + OPT_1, OPT_5, OPT_6, OPT_CRYPT, OPT_SALT, OPT_STDIN } OPTION_CHOICE; OPTIONS passwd_options[] = {
[No CFG could be retrieved]
The list of possible password identifiers. The passwd command line interface.
i'd really prefer a single 'mode' variable, rather than adding a new flag param, since only one flag can be set at a time ...
@@ -135,8 +135,6 @@ func (e *EthTx) insertEthTx(input models.RunInput, store *strpkg.Store) models.R return models.NewRunOutputError(err) } - store.NotifyNewEthTx.Trigger() - return models.NewRunOutputPendingOutgoingConfirmationsWithData(input.Data()) }
[perform->[FindEthTaskRunTxByTaskRunID,Error,checkForConfirmation,TaskRunID,UUID,NewRunOutputError,Wrap,insertEthTx],checkForConfirmation->[Data,NewRunOutputPendingOutgoingConfirmationsWithData,NewRunOutputError,checkEthTxForReceipt,GetError],Perform->[perform,EnableBulletproofTxManager,legacyPerform],pickFromAddress->[TaskRunID,Hex,Warnf,GetRoundRobinAddress],checkEthTxForReceipt->[Error,NewRunOutputComplete,Data,NewRunOutputPendingOutgoingConfirmationsWithData,Hex,NewRunOutputError,MinRequiredOutgoingConfirmations,MultiAdd,Wrap],legacyPerform->[Status,ConcatBytes,Connected,PendingOutgoingConfirmations,Bytes,NewRunOutputError,Wrap],insertEthTx->[Error,ConcatBytes,Data,TaskRunID,NewRunOutputPendingOutgoingConfirmationsWithData,pickFromAddress,Bytes,IdempotentInsertEthTaskRunTx,NewRunOutputError,Trigger,Wrap,EthGasLimitDefault],Status,Warn,NewRunOutputPendingOutgoingConfirmationsWithData,Wrap,BumpGasUntilSafe,Add,ToInt,IsRecordNotFoundError,EVMTranscodeJSONWithFormat,Error,First,Errorw,New,EVMWordUint64,Joins,HexToHash,NewRunOutputPendingConnection,Bytes,StringFrom,Wrapf,ConcatBytes,Data,Hex,NewRunOutputComplete,CheckAttempt,NewRunOutputError,Where,ReceiptIsUnconfirmed,Get,CreateTxWithGas,Sprintf,Result,PendingOutgoingConfirmations,ResultString,Debugw,Unmarshal,String,JobRunID]
insertEthTx creates a new threaded transaction in the database. checkEthTxForReceipt checks if the input is a pending outgoing confirmations and if.
why did you remove this?
@@ -365,7 +365,7 @@ func GetConfiguredMappings() ([]idtools.IDMap, []idtools.IDMap, error) { } mappings, err := idtools.NewIDMappings(username, username) if err != nil { - logrus.Warnf("cannot find mappings for user %s: %v", username, err) + logrus.Errorf("cannot find mappings for user %s: %v", username, err) } else { uids = mappings.UIDs() gids = mappings.GIDs()
[Value,reexec_in_user_namespace,Syscall,Readlink,Fd,Warnf,Close,Setenv,Signal,rootless_gid,LookupId,NewFile,WriteFile,Atoi,Itoa,CombinedOutput,SystemBus,Stat,int,LockOSThread,ReadFile,NewIDMappings,New,Getegid,Notify,GIDs,reexec_userns_join,Errorf,Run,LookPath,Socketpair,reexec_in_user_namespace_wait,Debugf,Wrapf,Join,Create,Output,Geteuid,Do,UnlockOSThread,Name,Kill,ReadLine,Read,ParseUint,CloseQuiet,Command,CString,free,Object,Write,Reset,UIDs,NewReader,Sprintf,ObjectPath,GetProperty,Pointer,String,Replace,Open,Sscanf,Call,Getenv,rootless_uid,Trim]
GetConfiguredMappings returns the additional IDs configured for the current user. returns true if the node has a unique identifier false if it has no unique identifier and.
Would it be a problem for people using a single ID mapped to get always the error?
@@ -698,8 +698,17 @@ public class ActiveMQSession implements QueueSession, TopicSession { */ if (!response.isExists() || !response.getQueueNames().contains(dest.getSimpleAddress())) { if (response.isAutoCreateQueues()) { + SimpleString queueNameToUse = dest.getSimpleAddress(); + SimpleString addressToUse = queueNameToUse; + RoutingType routingTypeToUse = RoutingType.ANYCAST; + if (CompositeAddress.isFullyQualified(queueNameToUse.toString())) { + CompositeAddress compositeAddress = CompositeAddress.getQueueName(queueNameToUse.toString()); + addressToUse = new SimpleString(compositeAddress.getAddress()); + queueNameToUse = new SimpleString(compositeAddress.getQueueName()); + routingTypeToUse = RoutingType.MULTICAST; + } try { - session.createQueue(dest.getSimpleAddress(), RoutingType.ANYCAST, dest.getSimpleAddress(), null, true, true, response.getDefaultMaxConsumers(), response.isDefaultPurgeOnNoConsumers()); + session.createQueue(addressToUse, routingTypeToUse, queueNameToUse, null, true, true, response.getDefaultMaxConsumers(), response.isDefaultPurgeOnNoConsumers()); } catch (ActiveMQQueueExistsException e) { // The queue was created by another client/admin between the query check and send create queue packet }
[ActiveMQSession->[createDurableConsumer->[createDurableConsumer,checkTopic,createConsumer],getDeserializationBlackList->[getDeserializationBlackList],createSharedDurableConsumer->[createSharedDurableConsumer,checkTopic],createSubscriber->[createConsumer],createDurableSubscriber->[createDurableSubscriber,createConsumer],internalCreateSharedConsumer->[createConsumer],deleteQueue->[deleteQueue],createConsumer->[createQueue,createConsumer],getQueueSession->[getSession],close->[close],createSharedConsumer->[createSharedConsumer,checkTopic],internalCreateTopic->[internalCreateTopic],createReceiver->[createConsumer],createTemporaryQueue->[createTemporaryQueue],start->[start],rollback->[rollback],lookupTopic->[createTemporaryTopic,createTopic],createTemporaryTopic->[createTemporaryTopic,createTemporaryQueue],createSender->[createProducer],recover->[rollback],stop->[stop],lookupQueue->[createQueue,createTemporaryQueue],getXAResource->[getXAResource],getDeserializationWhiteList->[getDeserializationWhiteList],internalCreateQueue->[internalCreateQueue],commit->[commit],getTopicSession->[getSession],createProducer->[createProducer],createPublisher->[createProducer],createBrowser->[createQueue,createBrowser]]]
Creates a consumer. Adds a destination to the session and creates a consumer and a queue. This method is called when a consumer is not subscribed to a durable topic.
Is this related to Virtual Topics support for OpenWire? My concern here is, i request a composite address on a JMS Queue, it should be ANYCAST routing.
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.values.KV;
[No CFG could be retrieved]
Imports a single object. Basic concepts that are used in the MinimalWordCount and WordCount and WordCount.
Differs from non archetypes version: Redundant import ${package}.WordCount, please remove
@@ -235,8 +235,8 @@ class XLATestCase(test.TestCase): 'test_session not supported on XLATestCase, please use session') @contextlib.contextmanager - def test_scope(self): - """Test scope that runs tests on `self.device`. + def device_scope(self): + """Scope that runs tests on `self.device`. Yields: A scope to apply to the operators under test.
[XLATestCase->[__init__->[parse_disabled_manifest]]]
A test scope that runs tests on self. device.
Nice, this looks like something we can separate and land (or will that be too inconvenient)?
@@ -860,8 +860,8 @@ func TestCloudFrontStructure_expandGeoRestriction_whitelist(t *testing.T) { if *gr.Quantity != 3 { t.Fatalf("Expected Quantity to be 3, got %v", *gr.Quantity) } - if !reflect.DeepEqual(gr.Items, aws.StringSlice([]string{"CA", "GB", "US"})) { - t.Fatalf("Expected Items be [CA, GB, US], got %v", gr.Items) + if !reflect.DeepEqual(aws.StringValueSlice(gr.Items), []string{"GB", "US", "CA"}) { + t.Fatalf("Expected Items be [CA, GB, US], got %v", aws.StringValueSlice(gr.Items)) } }
[Difference,Equal,NewSet,DeepEqual,StringSlice,List,Fatalf]
TestCloudFrontStructure_expandRestrictions expands the given restrictions into a single object. RestrictionConfNoItems checks that the data has no items.
The previous resource logic sorted the slice, which is no longer the case for TypeSet usage.
@@ -214,6 +214,17 @@ SectionHeader WasmBinaryReader::ReadSectionHeader() return header; } +#if ENABLE_DEBUG_CONFIG_OPTIONS +Js::FunctionBody* WasmBinaryReader::GetFunctionBody() const +{ + if (m_readerState == READER_STATE_FUNCTION) + { + return m_funcState.body; + } + return nullptr; +} +#endif + #if DBG_DUMP void WasmBinaryReader::PrintOps() {
[CheckBytesLeft->[ThrowDecodingError],ReadSectionHeader->[ThrowDecodingError],ReadCustomSection->[ThrowDecodingError],char16->[ThrowDecodingError],ReadFunctionHeaders->[ThrowDecodingError],SeekToFunctionBody->[ThrowDecodingError],ReadStartFunction->[ThrowDecodingError],ReadInitExpr->[ReadExpr,ThrowDecodingError],ReadTableSection->[ThrowDecodingError],ReadSectionLimits->[LEB128,ThrowDecodingError],ReadExportSection->[ThrowDecodingError],ReadMutableValue->[ThrowDecodingError],CallIndirectNode->[ThrowDecodingError],BrTableNode->[ThrowDecodingError],ReadFunctionSignatures->[ThrowDecodingError],ValidateModuleHeader->[ThrowDecodingError],ReadImportSection->[ReadTableSection,ThrowDecodingError,ReadMemorySection],ReadDataSection->[ThrowDecodingError],CallNode->[ThrowDecodingError],ReadSignatureTypeSection->[ThrowDecodingError],ReadNameSection->[ThrowDecodingError],ReadGlobalSection->[ThrowDecodingError],LEB128->[ThrowDecodingError],ReadElementSection->[ThrowDecodingError],ReadExpr->[ThrowDecodingError],ReadMemorySection->[ThrowDecodingError]]
Read the section header. \ brief Print the binary operation code and delete the heap. \ details.
I wasn't sure if we wanted to keep PrintOps. In the current code base in doesn't do anything, because we print before actually parsing any opcode.