patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -468,7 +468,6 @@ static const ssl_trace_tbl ssl_exts_tbl[] = { {TLSEXT_TYPE_srp, "srp"}, {TLSEXT_TYPE_signature_algorithms, "signature_algorithms"}, {TLSEXT_TYPE_use_srtp, "use_srtp"}, - {TLSEXT_TYPE_heartbeat, "tls_heartbeat"}, {TLSEXT_TYPE_application_layer_protocol_negotiation, "application_layer_protocol_negotiation"}, {TLSEXT_TYPE_signed_certificate_timestamp, "signed_certificate_timestamps"},
[No CFG could be retrieved]
Static constructor for the compression table. Table of available TLSEXT types.
Was it really necessary to remove this trace code ?
@@ -120,7 +120,7 @@ def msvc_build_command(settings, sln_path, targets=None, upgrade_project=True, b @deprecation.deprecated(deprecated_in="1.2", removed_in="2.0", details="Use the MSBuild() build helper instead") def build_sln_command(settings, sln_path, targets=None, upgrade_project=True, build_type=None, - arch=None, parallel=True, toolset=None, platforms=None, output=None): + arch=None, parallel=True, toolset=None, platforms=None, output=None, verbosity=None): """ Use example: build_command = build_sln_command(self.settings, "myfile.sln", targets=["SDL2_image"])
[vcvars->[vcvars_dict],find_windows_10_sdk->[_system_registry_key],vcvars_dict->[relevant_path,vcvars_command],vcvars_command->[find_windows_10_sdk,latest_vs_version_installed,vs_installation_path],latest_visual_studio_version_installed->[_visual_compiler],run_in_windows_bash->[get_path_value->[unix_path],escape_windows_cmd,get_path_value,unix_path],_visual_compiler->[_system_registry_key,_visual_compiler_cygwin],unix_path->[get_cased_path]]
Build command for sln.
As this tool is deprecated I don't see the need of updating it with this new command unless it is needed by the `MSBuild` build helper
@@ -1015,6 +1015,16 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler { super(dataSchema, ioConfig, tuningConfig); + Checks.checkOneNotNullOrEmpty( + ImmutableList.of( + new Property<>("parser", dataSchema.getParserMap()), + new Property<>("inputFormat", ioConfig.getInputFormat()) + ) + ); + if (dataSchema.getParserMap() != null && ioConfig.getInputSource() != null) { + throw new IAE("Cannot use parser and inputSource together. Try use inputFormat instead of parser."); + } + this.dataSchema = dataSchema; this.ioConfig = ioConfig; this.tuningConfig = tuningConfig == null ? new IndexTuningConfig() : tuningConfig;
[IndexTask->[makeGroupId->[makeGroupId],getSegmentGranularity->[getSegmentGranularity],generateAndPublishSegments->[getTaskCompletionReports,createSegmentAllocator],getLiveReports->[doGetRowStats],ShardSpecs->[getShardSpec->[getShardSpec]],IndexTuningConfig->[createDefault->[IndexTuningConfig],withPartitionsSpec->[IndexTuningConfig],getNumShards->[getNumShards],getMaxTotalRows->[getMaxTotalRows],getPartitionDimensions->[getPartitionDimensions],equals->[equals],withBasePersistDirectory->[IndexTuningConfig],getMaxRowsPerSegment->[getMaxRowsPerSegment],getDefaultPartitionsSpec],makeGroupId]]
Gets a map of intervals to shards. Creates an IOConfig object that can be used to create an index.
Typo: Try use inputFormat -> Try using inputFormat
@@ -14,6 +14,15 @@ */ package io.netty.example.http2; +import static io.netty.util.internal.ObjectUtil.checkNotNull; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.handler.codec.http.QueryStringDecoder; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + /** * Utility methods used by the example client and server. */
[No CFG could be retrieved]
This class is used to provide a basic example of the HTTP2 protocol.
Kill this line?
@@ -58,6 +58,7 @@ class Moab(Package): depends_on('mpi') depends_on('hdf5+mpi') depends_on('netcdf+mpi', when='+netcdf') + depends_on('metis') depends_on('parmetis') depends_on('zoltan') depends_on('zoltan~fortran', when='~fortran')
[Moab->[install->[append,make,configure],variant,depends_on,version]]
Installs a new in the system.
Why the added dependency? Does it change the way Moab builds?
@@ -95,7 +95,7 @@ export default class ScanLndInvoice extends React.Component { onCameraReady: function() { alert('onCameraReady'); }, - barCodeTypes: [Camera.Constants.BarCodeType.qr], + barCodeTypes: [BarCodeScanner.Constants.BarCodeType.qr], }); }
[No CFG could be retrieved]
This function is called when the component is mounted and the user has requested to pay a new Renders a single in the UI.
Never seen this alert. This is copypasted from example. Probably better remove?
@@ -249,10 +249,10 @@ class GroupsController < ApplicationController #These actions act on all currently selected students & groups def global_actions @assignment = Assignment.find(params[:assignment_id], - :include => [{ - :groupings => [{ - :student_memberships => :user, - :ta_memberships => :user}, + include: [{ + groupings: [{ + student_memberships: :user, + ta_memberships: :user}, :group]}]) @tas = Ta.all grouping_ids = params[:groupings]
[GroupsController->[remove_member->[remove_member]]]
Checks if there is a global action and if so it can be removed from the group.
Use 2 spaces for indentation in a hash, relative to the start of the line where the left curly brace is.
@@ -39,7 +39,7 @@ class TrackingProgressService end def ratio - (halfway_checked_count / work.episodes.without_deleted.count.to_f rescue 1) * 100 + (halfway_checked_count / work.episodes.only_kept.count.to_f rescue 1) * 100 end private
[TrackingProgressService->[get_all_records_count->[get_all_records_count]]]
ratio of possible number of items that have been checked.
Style/RescueModifier: Avoid using rescue in its modifier form.
@@ -330,17 +330,6 @@ namespace System.Xml _coreWriter.WriteNode(navigator, defattr); } - protected override void Dispose(bool disposing) - { - if (disposing) - { - CheckAsync(); - //since it is protected method, we can't call coreWriter.Dispose(disposing). - //Internal, it is always called to Dispose(true). So call coreWriter.Dispose() is OK. - _coreWriter.Dispose(); - } - } - #endregion #region Async Methods
[XmlAsyncCheckWriter->[Flush->[Flush,CheckAsync],WriteNmToken->[WriteNmToken,CheckAsync],WriteName->[WriteName,CheckAsync],WriteStartAttribute->[WriteStartAttribute,CheckAsync],WriteEndDocument->[CheckAsync,WriteEndDocument],WriteCharEntity->[CheckAsync,WriteCharEntity],WriteComment->[WriteComment,CheckAsync],WriteSurrogateCharEntity->[WriteSurrogateCharEntity,CheckAsync],WriteCData->[CheckAsync,WriteCData],Close->[Close,CheckAsync],Task->[CheckAsync],LookupPrefix->[LookupPrefix,CheckAsync],WriteValue->[WriteValue,CheckAsync],WriteEndElement->[WriteEndElement,CheckAsync],WriteEndAttribute->[WriteEndAttribute,CheckAsync],WriteFullEndElement->[WriteFullEndElement,CheckAsync],WriteAttributes->[CheckAsync,WriteAttributes],WriteDocType->[WriteDocType,CheckAsync],WriteWhitespace->[WriteWhitespace,CheckAsync],WriteProcessingInstruction->[WriteProcessingInstruction,CheckAsync],WriteStartDocument->[WriteStartDocument,CheckAsync],WriteStartElement->[WriteStartElement,CheckAsync],WriteBinHex->[WriteBinHex,CheckAsync],WriteChars->[CheckAsync,WriteChars],WriteString->[CheckAsync,WriteString],WriteNode->[WriteNode,CheckAsync],WriteBase64->[WriteBase64,CheckAsync],WriteQualifiedName->[WriteQualifiedName,CheckAsync],WriteRaw->[WriteRaw,CheckAsync],Dispose->[Dispose,CheckAsync],WriteEntityRef->[CheckAsync,WriteEntityRef],CheckAsync]]
Override method to write a node in the coreWriter.
Why is this being deleted?
@@ -1841,8 +1841,6 @@ void GenericCAO::processMessage(const std::string &data) (void)type; addAttachmentChild(child_id); - } else if (cmd == AO_CMD_OBSOLETE1) { - // Don't do anything and also don't log a warning } else { warningstream << FUNCTION_NAME << ": unknown command or outdated client \""
[updateNametag->[getSceneNode],clearParentAttachment->[setAttachment,v3f],clearChildAttachments->[setAttachment,v3f],step->[getSceneNode,update,removeFromScene,translate,v3f,addToScene,updateNodePos,getParent],updateMeshCulling->[getSceneNode],processInitData->[updateNodePos,processMessage,init], ClientActiveObject->[getType],processMessage->[setAttachment,updateNametag,visualExpiryRequired,clearParentAttachment,update,clearChildAttachments,addAttachmentChild,init,updateTexturePos,updateTextures,v3f,updateAnimation,updateNodePos,getParent,updateAnimationSpeed,updateMarker],removeFromScene->[clearParentAttachment],setNodeLight->[getSceneNode],addToScene->[updateNodePos,getSceneNode,v3f],updateNodePos->[getSceneNode,getParent], removeFromScene->[removeFromScene],updateAttachments->[getAnimatedMeshSceneNode,updateAttachments,getSceneNode,getParent],updateBonePosition->[getParent], ClientActiveObject->[getType],directReportPunch->[updateTextures],updateMarker->[getSceneNode]]
Process a message received from the AO This function is called when a new object is read from the device. This function is called when a node is read from the device. read all the data from the input stream This function is called when a local player is opened.
@rubenwardy I suppose I should keep this too or is it fine?
@@ -45,4 +45,6 @@ class ArrayField(Field[numpy.ndarray]): @overrides def empty_field(self): # pylint: disable=no-self-use - return ArrayField(numpy.array([], dtype="float32")) + # pass the padding_value, so that any outer field, e.g., ` ListField[ArrayField]` uses the + # same padding_value in the padded ArrayFields + return ArrayField(numpy.array([], dtype="float32"), padding_value=self.padding_value)
[ArrayField->[empty_field->[ArrayField,array],get_padding_lengths->[str,enumerate],as_tensor->[list,from_numpy,len,slice,format,ones,Variable,cuda,range]]]
Returns an empty field.
Capitalize this, please. Also, spacing is off around `ListField`.
@@ -25,11 +25,5 @@ class DecoderTrainer(Registrable): def decode(self, initial_state: DecoderState, decode_step: DecoderStep, - targets: torch.Tensor, - target_mask: torch.Tensor) -> Dict[str, torch.Tensor]: + supervision: SupervisionType) -> Dict[str, torch.Tensor]: raise NotImplementedError - - @classmethod - def from_params(cls, params: Params) -> 'DecoderTrainer': - choice = params.pop_choice('type', cls.list_available()) - return cls.by_name(choice).from_params(params)
[DecoderTrainer->[from_params->[by_name,list_available,pop_choice]]]
Decodes a single from the decoder.
A docstring describing at least what `supervision` is would be useful here.
@@ -345,12 +345,13 @@ def get_text_field_mask(text_field_tensors: Dict[str, torch.Tensor], the mask. Most frequently this will be a character id tensor, but it could also be a featurized representation of each token, etc. + TODO(joelgrus): can we change this? NOTE: Our functions for generating masks create torch.LongTensors, because using - torch.ByteTensors inside Variables makes it easy to run into overflow errors + torch.ByteTensors makes it easy to run into overflow errors when doing mask manipulation, such as summing to get the lengths of sequences - see below. >>> mask = torch.ones([260]).byte() >>> mask.sum() # equals 260. - >>> var_mask = torch.autograd.Variable(mask) + >>> var_mask = torch.autograd.V(mask) >>> var_mask.sum() # equals 4, due to 8 bit precision - the sum overflows. """ tensor_dims = [(tensor.dim(), tensor) for tensor in text_field_tensors.values()]
[_get_combination->[_get_combination],flatten_and_batch_shift_indices->[get_device_of],batched_index_select->[flatten_and_batch_shift_indices],last_dim_log_softmax->[_last_dimension_applicator],add_positional_features->[get_range_vector],last_dim_softmax->[_last_dimension_applicator],_get_combination_dim->[_get_combination_dim]]
Returns a mask of the given text field tensors. Returns the number of nanoseconds in the tensor.
spacing is off after deletion.
@@ -297,7 +297,7 @@ namespace System.Net.Http.Functional.Tests using (Http2LoopbackServer server = Http2LoopbackServer.CreateServer()) using (HttpClient client = CreateHttpClient()) { - (_, Http2LoopbackConnection connection) = await EstablishConnectionAndProcessOneRequestAsync(client, server); + (_, Http2LoopbackConnection connection) = await EstablishConnectionAndProcessOneRequestAsync(client, server, 1); Task<HttpResponseMessage> sendTask = client.GetAsync(server.Address); (int streamId1, HttpRequestData requestData1) = await connection.ReadAndParseRequestHeaderAsync(readBody: true);
[HttpClientHandlerTest_Http2->[ValidAndInvalidProtocolErrorsAndBool->[ValidAndInvalidProtocolErrors],DuplexContent->[Task->[Task],WaitForStreamAsync->[Task]],Task->[Fail,EstablishConnectionAndProcessOneRequestAsync,ValidateConnection,ReadToEndOfStream,Task,WaitForStreamAsync,Complete]]]
Asynchronously gets a stream refused request.
I don't understand why this change is necessary. Same question for the identical change in the below test.
@@ -49,13 +49,6 @@ class Configuration(object): # User Agent configuration self.user_agent_policy = None - # HTTP Transport - self.transport = transport - - def get_transport(self, **kwargs): - if self.transport: - return self.transport(configuration=self, **kwargs) - class ConnectionConfiguration(object): """HTTP transport connection configuration settings."""
[ConnectionConfiguration->[__init__->[pop]],Configuration->[get_transport->[transport],__init__->[ConnectionConfiguration]]]
Initialize connection configuration object.
This is a breaking change?
@@ -54,6 +54,14 @@ class ColorizationEvaluator(BaseEvaluator): launcher_settings['device'] = 'CPU' launcher = create_launcher(launcher_settings, delayed_model_loading=True) network_info = config.get('network_info', {}) + colorization_network = network_info.get('colorization_network', {}) + verification_network = network_info.get('verification_network', {}) + colorization_network['net_type'] = 'colorization_network' + verification_network['net_type'] = 'verification_network' + network_info.update({ + 'colorization_network': colorization_network, + 'verification_network': verification_network + }) if not delayed_model_loading: colorization_network = network_info.get('colorization_network', {}) verification_network = network_info.get('verification_network', {})
[ColorizationEvaluator->[release->[release],reset->[reset],register_metric->[register_metric],extract_metrics_results->[compute_metrics],get_metrics_attributes->[get_metrics_attributes],print_metrics_results->[compute_metrics]],ColorizationTestModel->[predict->[postprocessing,data_preparation]],ColorizationCheckModel->[predict->[fit_to_input]],BaseModel->[load_network->[load_network],load_model->[load_network,auto_model_search],__init__->[load_model]]]
Create a Colorization object from a config dict.
why do you need this? they have this info as keys in network info, why is it not enough for you?
@@ -98,10 +98,8 @@ class CommentPanel extends JPanel { save.setMargin(inset); save.setFocusable(false); for (final PlayerID playerId : data.getPlayerList().getPlayers()) { - final CompletableFuture<?> future = CompletableFuture - .supplyAsync(() -> frame.getUiContext().getFlagImageFactory().getSmallFlag(playerId)) - .thenAccept(image -> SwingUtilities.invokeLater(() -> iconMap.put(playerId, new ImageIcon(image)))); - CompletableFutureUtils.logExceptionWhenComplete(future, "Failed to load small flag icon for " + playerId); + Optional.ofNullable(frame.getUiContext().getFlagImageFactory().getSmallFlag(playerId)) + .ifPresent(image -> iconMap.put(playerId, new ImageIcon(image))); } }
[CommentPanel->[setupKeyMap->[getInputMap,getKeyStroke,put],init->[setBold,setupKeyMap,setItalic,setSize,loadHistory,setupListeners,createComponents,layoutComponents],addMessage->[getLength,getModel,getDocument,insertString,getMaximum,getEditDelegate,log,setValue,invokeNowOrLater,addComment],readHistoryTreeEvent->[acquireReadLock,getLength,getLastChild,matches,matcher,toString,invokeNowOrLater,compile,group,getPlayerId,getDocument,log,getRound,insertString,getLastPathComponent,getName,getDescription,releaseReadLock,get,getChildCount,insertIcon,getTitle],loadHistory->[start,getDocument],setupListeners->[treeStructureChanged->[readHistoryTreeEvent],treeNodesInserted->[readHistoryTreeEvent],addTreeModelListener,TreeModelListener],createComponents->[thenAccept,logExceptionWhenComplete,setEditable,JTextPane,setMargin,invokeLater,getPlayers,JTextField,ImageIcon,put,setFocusable,JButton,Insets],layoutComponents->[BorderLayout,add,JPanel,setLayout,JScrollPane],setText,of,init,addMessage,SimpleAttributeSet,getText,length]]
Create the components.
The reason this asynchronous computation was introduced was in order to move this potential "expensive IO operation" off the EDT
@@ -16,7 +16,7 @@ See the License for the specific language governing permissions and limitations under the License. -%> -$header-color: #fff; +header-color: #fff; $header-color-secondary: #bbb; $header-color-hover: darken( $header-color, 20% );
[No CFG could be retrieved]
XML - Element for the given node that contains a list of all possible residue objects. Navbar styles for the .
this is wrong, vars should start with $
@@ -227,6 +227,11 @@ class EpollEventLoop extends SingleThreadEventLoop { this.ioRatio = ioRatio; } + @Override + public int registeredChannels() { + return channels.size(); + } + private int epollWait(boolean oldWakeup) throws IOException { // If a task was submitted when wakenUp value was 1, the task didn't get a chance to produce wakeup event. // So we need to check task queue again before calling epoll_wait. If we don't, the task might be pended
[EpollEventLoop->[epollWaitNow->[epollWait],run->[epollWait,epollBusyWait],remove->[remove],closeAll->[epollWaitNow],processReady->[get],epollWait->[epollWait],epollBusyWait->[epollBusyWait]]]
Sets the ioRatio for the poll.
I think this does not really work as `channels` is not thread-safe.
@@ -111,6 +111,17 @@ public class XsltPayloadTransformerTests { assertEquals("Wrong value from result conversion", returnValue, transformed); } + + @Test + public void testXsltPayloadWithTransformerFactoryClassname() throws Exception { + Integer returnValue = new Integer(13); + transformer = new XsltPayloadTransformer(getXslResource(), new StubResultTransformer(returnValue), + "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl"); + Object transformed = transformer + .doTransform(buildMessage(new StringSource(docAsString))); + assertEquals("Wrong value from result conversion", returnValue, + transformed); + } @Test(expected = TransformerException.class) public void testNonXmlString() throws Exception {
[XsltPayloadTransformerTests->[getXslResource->[ByteArrayResource,getBytes],testDocumentAsPayload->[assertTrue,assertXMLEqual,docToString,getDocumentForString,buildMessage,doTransform,getClass,isAssignableFrom],stringInDomResultOut->[XsltPayloadTransformer,ClassPathResource,StringResultFactory,buildMessage,getDocumentForString,doTransform,setResultFactory,setAlwaysUseResultFactory,assertEquals,getClass],testStringAsPayload->[assertXMLEqual,buildMessage,doTransform,assertEquals,getClass],testSourceAsPayload->[assertXMLEqual,getNode,StringSource,buildMessage,getDocumentForString,doTransform,assertEquals,getClass],testNonXmlString->[doTransform,buildMessage],testUnsupportedPayloadType->[doTransform,Long,buildMessage],buildMessage->[build],docInStringOut->[StringResultFactory,XsltPayloadTransformer,buildMessage,getDocumentForString,doTransform,setResultFactory,toString,setAlwaysUseResultFactory,assertEquals,getClass,getXslResourceThatOutputsText],testSourceWithResultTransformer->[getXslResource,XsltPayloadTransformer,Integer,StringSource,buildMessage,StubResultTransformer,doTransform,assertEquals],documentInStringResultOut->[XsltPayloadTransformer,ClassPathResource,StringResultFactory,buildMessage,getDocumentForString,doTransform,setResultFactory,setAlwaysUseResultFactory,assertEquals,getClass],setUp->[getXslResource,XsltPayloadTransformer],testXsltWithImports->[XsltPayloadTransformer,ClassPathResource,buildMessage,doTransform,assertEquals,getClass],testStringAsPayloadUseResultFactoryTrue->[assertXMLEqual,getNode,buildMessage,getDocumentForString,doTransform,setAlwaysUseResultFactory,assertEquals,getClass],getXslResourceThatOutputsText->[ByteArrayResource,getBytes]]]
Test source with result transformer.
Need one more 'fail' test, when we provide some bad class name, to be sure, that we correctly use `TransformerFactory.newInstance()`
@@ -139,3 +139,17 @@ def test_add_flow_with_weird_name_is_cleaned(): assert "!" not in loc assert " " not in loc assert "~" not in loc + + +def test_build_healthchecks(): + with tempfile.TemporaryDirectory() as tmpdir: + s = Local(directory=tmpdir) + flow = Flow("TestFlow") + s.add_flow(flow) + assert s.build() + + +def test_build_healthcheck_returns_on_no_flows(): + with tempfile.TemporaryDirectory() as tmpdir: + s = Local(directory=tmpdir) + assert s.build()
[test_add_flow_with_weird_name_is_cleaned->[Flow,add_flow,TemporaryDirectory,Local],test_add_flow_raises_if_name_conflict->[TemporaryDirectory,Local,raises,Flow,add_flow],test_create_local_storage->[isinstance,Local,join,endswith],test_add_flow_to_storage->[TemporaryDirectory,Local,loads,endswith,read,isinstance,join,open,Flow,add_flow],test_get_flow_returns_flow->[TemporaryDirectory,Local,Flow,add_flow,get_flow],test_multiple_flows_in_storage->[TemporaryDirectory,Local,Flow,add_flow,get_flow],test_get_env_runner_raises->[raises,Local,get_env_runner],test_create_local_storage_with_custom_dir->[isinstance,Local,isabs],test_build_returns_self->[TemporaryDirectory,Local,build,Flow,add_flow],test_create_local_storage_without_validation->[isinstance,Local],test_get_flow_raises_if_flow_not_present->[raises,Local,get_flow],test_containment->[Flow,add_flow,TemporaryDirectory,Local]]
Test add_flow_with_weird_name_is_cleaned.
This test doesn't seem to test anything related to healthchecks?
@@ -110,9 +110,12 @@ public final class RenderingContextWebUtils { HttpServletRequest webRequest = (HttpServletRequest) request; // base url String baseURL = VirtualHostHelper.getBaseURL(request); + builder.base(baseURL); // current session - CoreSession session = SessionFactory.getSession(webRequest); - builder.base(baseURL).session(session); + builder.sessionWrapperSupplier(() -> { + CoreSession session = SessionFactory.getSession(webRequest); + return session == null ? null : new RenderingContext.SessionWrapper(session, false); + }); // gets the locale from the request or takes the server's default Locale locale = request.getLocale(); if (locale != null) {
[RenderingContextWebUtils->[getContext->[getAttribute,fillContext,getContextKey,get,registerContext,builder],registerContext->[setAttribute,getContextKey],fillContext->[getValue,getAttribute,session,getHeaders,getAttributeNames,param,getHeaderNames,paramValues,getKey,getBaseURL,getLocale,hasMoreElements,nextElement,entrySet,getSession,locale],getBuilder->[fillContext,builder]]]
fill the context with the base url session and headers.
`new RenderingContext.SessionWrapper(session, false)`
@@ -1,8 +1,15 @@ <div class="assignment_summary"> <h2 class="title"> - <%= h(assignment.short_identifier)+": - "+h(assignment.description) %> + <%= link_to h(assignment.short_identifier)+": + "+h(assignment.description), + { + :controller => 'assignments', + :action => 'edit', + :id => assignment.id + }, + :class => 'title' + %> </h2> <div class="left">
[No CFG could be retrieved]
Efficiently render the n - node .
Could you please use *_path() helpers over url_for here? Thanks! See also: #820. Looks good otherwise!
@@ -44,8 +44,8 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.Float64Var(&l.IngestionRate, "distributor.ingestion-rate-limit", 25000, "Per-user ingestion rate limit in samples per second.") f.IntVar(&l.IngestionBurstSize, "distributor.ingestion-burst-size", 50000, "Per-user allowed ingestion burst size (in number of samples). Warning, very high limits will be reset every -distributor.limiter-reload-period.") f.BoolVar(&l.AcceptHASamples, "distributor.accept-ha-samples", false, "Per-user flag to enable handling of samples with external labels for identifying replicas in an HA Prometheus setup.") - f.StringVar(&l.HAReplicaLabel, "ha-tracker.replica", "__replica__", "Prometheus label to look for in samples to identify a Proemtheus HA replica.") - f.StringVar(&l.HAClusterLabel, "ha-tracker.cluster", "cluster", "Prometheus label to look for in samples to identify a Poemtheus HA cluster.") + f.StringVar(&l.HAReplicaLabel, "distributor.ha-tracker.replica", "__replica__", "Prometheus label to look for in samples to identify a Prometheus HA replica.") + f.StringVar(&l.HAClusterLabel, "distributor.ha-tracker.cluster", "cluster", "Prometheus label to look for in samples to identify a Prometheus HA cluster.") f.IntVar(&l.MaxLabelNameLength, "validation.max-length-label-name", 1024, "Maximum length accepted for label names") f.IntVar(&l.MaxLabelValueLength, "validation.max-length-label-value", 2048, "Maximum length accepted for label value. This setting also applies to the metric name") f.IntVar(&l.MaxLabelNamesPerSeries, "validation.max-label-names-per-series", 30, "Maximum number of label names per series.")
[RegisterFlags->[StringVar,IntVar,DurationVar,Float64Var,BoolVar]]
RegisterFlags registers the limits flags. This is called before and after it s needed.
Perhaps this should be `distributor.ha-tracker.accept-users` or something like that? Since turning it on here will turn it on for all users.
@@ -586,9 +586,15 @@ export class Resources { if (!promise || !schedulePass) { return promise; } - // TODO(dvoytenko): Consider removing "blacklisted" resources - // altogether from the list of resources. - return promise.then(() => this.schedulePass()); + return promise.then(() => this.schedulePass(), error => { + // Build failed: remove the resource. No other state changes are + // needed. + const index = this.resources_.indexOf(resource); + if (index != -1) { + this.resources_.splice(index, 1); + } + throw error; + }); } /**
[No CFG could be retrieved]
Private method for handling the unrelated events. Removes all resources belonging to the specified child window.
Don't we already have a method for this?
@@ -36,6 +36,9 @@ namespace System private const int MaxKeyLength = 255; private const string InvariantUtcStandardDisplayName = "Coordinated Universal Time"; + private static readonly Dictionary<string, string> s_FileMuiPathCache = new(); + private static readonly TimeZoneInfo s_utcTimeZone = CreateUtcTimeZone(); + private sealed partial class CachedData { private static TimeZoneInfo GetCurrentOneYearLocal()
[TimeZoneInfo->[GetLocalizedNamesByRegistryKey->[TryGetLocalizedNameByMuiNativeResource],TryCreateAdjustmentRules->[CreateAdjustmentRuleFromTimeZoneInformation],TimeZoneInfoResult->[GetLocalizedNamesByRegistryKey,TryCreateAdjustmentRules,TryGetTimeZoneEntryFromRegistry],TryCompareTimeZoneInformationToRegistry->[CheckDaylightSavingTimeNotSupported,TryGetTimeZoneEntryFromRegistry,TryCompareStandardDate],GetUtcStandardDisplayName->[TryGetLocalizedNameByMuiNativeResource]]]
Get the time zone info for the current one year.
We have a TimeZoneInfo.cs file. This `private static readonly TimeZoneInfo s_utcTimeZone = CreateUtcTimeZone();` is now declared in both TimeZoneInfo.Unix.cs and TimeZoneInfo.Win32.cs. Can this not be deduplicated into the shared TimeZoneInfo.cs file?
@@ -179,6 +179,12 @@ function rrdtool($command, $filename, $options, $timeout = 0) ) { print $console_color->convert('[%rRRD Disabled%n]'); $output = array(null, null); + } elseif ($command == 'create' && + version_compare($config['rrdtool_version'], '1.5', '<') && + file_exists($filename) + ) { // do not ovewrite RRD if already exist and RRDTool ver. < 1.5 + print $console_color->convert('[%yRRD file ' . $filename . ' already exist%n]'); + $output = array(null, null); } else { if ($timeout > 0 && stream_select($r = $rrd_pipes, $w = null, $x = null, 0)) { // dump existing data
[rrdtool->[convert]]
rrdtool executes a rrd command and returns the output if successful. Get the contents of the file.
This should be d_echo, otherwise it will make a lot of noise in the log.
@@ -467,6 +467,7 @@ func (c *MasterConfig) GetRestStorage() map[string]rest.Storage { imageStorage, err := imageetcd.NewREST(c.RESTOptionsGetter) checkStorageErr(err) imageRegistry := image.NewRegistry(imageStorage) + imageSignatureStorage := imagesignature.NewREST(c.PrivilegedLoopbackOpenShiftClient.Images()) imageStreamLimitVerifier := imageadmission.NewLimitVerifier(c.KubeClient()) imageStreamSecretsStorage := imagesecret.NewREST(c.ImageStreamSecretClient()) imageStreamStorage, imageStreamStatusStorage, internalImageStreamStorage, err := imagestreametcd.NewREST(c.RESTOptionsGetter, imageapi.DefaultRegistryFunc(defaultRegistryFunc), subjectAccessReviewRegistry, imageStreamLimitVerifier)
[Run->[InstallAPI],apiLegacyV1->[defaultAPIGroupVersion]]
GetRestStorage returns a map of REST storage for the master Initialize all the known routes NewVirtualStorage creates a new virtual storage. This is the main API that is used to create a REST interface for all the necessary resources This is the main entry point for the master - config.
@deads2k is this the kind of client you have in mind?
@@ -88,6 +88,12 @@ public class HiveSyncConfig implements Serializable { @Parameter(names = {"--verify-metadata-file-listing"}, description = "Verify file listing from Hudi's metadata against file system") public Boolean verifyMetadataFileListing = HoodieMetadataConfig.DEFAULT_METADATA_VALIDATE; + @Parameter(names = {"--table-properties"}, description = "Table properties to hive table") + public String tableProperties; + + @Parameter(names = {"--serde-properties"}, description = "Serde properties to hive table") + public String serdeProperties; + @Parameter(names = {"--help", "-h"}, help = true) public Boolean help = false;
[HiveSyncConfig->[copy->[HiveSyncConfig],getName]]
Creates a new instance of the HiveSyncConfig class. This method is called to set the configuration for the partition value extractor.
Can you update the `toString()` in this class ?
@@ -0,0 +1,2 @@ +from pulp.server.lazy.alias import AliasTable +from pulp.server.lazy.url import Key, SignedURL, URL
[No CFG could be retrieved]
No Summary Found.
Why is this here?
@@ -173,16 +173,7 @@ namespace Dynamo.ViewModels } private CompositeCollection _workspaceElements = new CompositeCollection(); - public CompositeCollection WorkspaceElements - { - get { return _workspaceElements; } - set - { - _workspaceElements = value; - RaisePropertyChanged("Nodes"); - RaisePropertyChanged("WorkspaceElements"); - } - } + public CompositeCollection WorkspaceElements { get { return _workspaceElements; } } ObservableCollection<ConnectorViewModel> _connectors = new ObservableCollection<ConnectorViewModel>(); private ObservableCollection<Watch3DFullscreenViewModel> _watches = new ObservableCollection<Watch3DFullscreenViewModel>();
[WorkspaceViewModel->[FitView->[GetSelectionMaxY,OnRequestZoomToFitView,GetSelectionMaxX,GetSelectionMinX,GetSelectionMinY],ZoomOut->[OnRequestZoomToViewportCenter],ModelPropertyChanged->[OnZoomChanged],Loaded->[OnZoomChanged,OnCurrentOffsetChanged],AlignSelected->[GetSelectionMaxY,GetSelectionAverageY,GetSelectionMaxX,GetSelectionMinX,GetSelectionMaxTopY,GetSelectionMinY,GetSelectionMaxLeftX,GetSelectionAverageX],ZoomIn->[OnRequestZoomToViewportCenter]]]
OnRequestCenterViewOnElement - This method is called when the user clicks on an element Notes that have not been set on a group.
No one is currently using the `WorkspaceElements` property setter (in fact, no one _should_ be setting this property at all).
@@ -2792,8 +2792,12 @@ void Client::makeScreenshot(IrrlichtDevice *device) sstr << "Failed to save screenshot '" << filename << "'"; } m_chat_queue.push_back(narrow_to_wide(sstr.str())); - infostream << sstr << std::endl; - image->drop(); + #if defined(__APPLE__) && defined(__MACH__) + infostream << sstr.str() << std::endl; + #else + infostream << sstr << std::endl; + #endif + image->drop(); } raw_image->drop(); }
[No CFG could be retrieved]
Client - > Client private private static final int _last_unused_node_id = 0 ;.
Indentation broken. Also in annother place below.
@@ -8,7 +8,7 @@ module GobiertoPeople end def self.module_submodules - %W( officials agendas blogs statements ) + %W( officials agendas blogs statements departments interest_groups) end def self.remote_calendar_integrations
[No CFG could be retrieved]
Find a node with a node id that is not a submodule of any of its sub.
Do not use %W unless interpolation is needed. If not, use %w.<br>Do not use spaces inside percent literal delimiters.
@@ -277,12 +277,12 @@ function bb_tag_preg_replace($pattern, $replace, $name, $s) { $occurance = 1; $pos = get_bb_tag_pos($string, $name, $occurance); - while($pos !== false && $occurance < 1000) { + while ($pos !== false && $occurance < 1000) { $start = substr($string, 0, $pos['start']['open']); $subject = substr($string, $pos['start']['open'], $pos['end']['close'] - $pos['start']['open']); $end = substr($string, $pos['end']['close']); - if($end === false) + if ($end === false) $end = ''; $subject = preg_replace($pattern, $replace, $subject);
[bbcode->[saveHTML,loadHTML],bb_RemovePictureLinks->[save_timestamp,loadHTML,query,get_useragent],bb_CleanPictureLinksSub->[save_timestamp,loadHTML,query,get_useragent]]
This function is a wrapper for preg_replace that iterates over the string until it finds a.
Standards: Please add braces to this condition.
@@ -612,8 +612,8 @@ class CustomUrlControllerTest extends SuluTestCase $this->assertArrayHasKey('creator', $responseData); $this->assertArrayHasKey('changer', $responseData); - $this->assertEquals(new \DateTime(), new \DateTime($responseData['created']), '', 2); - $this->assertEquals(new \DateTime(), new \DateTime($responseData['changed']), '', 2); + $this->assertGreaterThanOrEqual(new \DateTime(), new \DateTime($responseData['created'])); + $this->assertGreaterThanOrEqual(new \DateTime(), new \DateTime($responseData['changed'])); $this->assertEquals(Urlizer::urlize($data['title']), $responseData['nodeName']); if (array_key_exists('targetDocument', $data)) { $this->assertEquals('Homepage', $responseData['targetTitle']);
[CustomUrlControllerTest->[testCDelete->[testPost],testGet->[testPost],testPut->[testPost],testPostMultiple->[testPost],testCGet->[testPost],testCGetWithoutLocale->[testPost],testGetWithoutLocale->[testPost],testDelete->[testPost],testCDeleteRoutes->[testPut]]]
testGet - test if a node has a specific node in a custom - url.
Is that correct? Shouldn't the datetime be created before the request is sent? Otherwise the code in between runs too long again and might make the test fail.
@@ -764,7 +764,8 @@ class GeneralizationAcrossTime(_GeneralizationAcrossTime): If an integer is passed, it is the number of folds. Specific cross-validation objects can be passed, see scikit-learn.cross_validation module for the list of possible objects. - Defaults to 5. + If clf is a classifier, defaults to KFold(n_folds=5), else defaults to + StratifiedKFold(n_folds=5). clf : object | None An estimator compliant with the scikit-learn API (fit & predict). If None the classifier will be a standard pipeline including
[_GeneralizationAcrossTime->[score->[predict],predict->[_DecodingTime,chunk_X],fit->[f],__init__->[_DecodingTime]],_sliding_window->[find_time_idx,_DecodingTime],TimeDecoding->[score->[predict],_prep_times->[_DecodingTime]],_fit_slices->[fit]]
Fits a series of classifiers on the data and returns the object. The time - series for the given each classifier is trained.
This may be a silly question, but why not use Stratified either way?
@@ -59,9 +59,9 @@ def set_up(): command = set_up_command(config_dir, logs_dir, work_dir, nginx_dir) dirs = [logs_dir, config_dir, work_dir] - # Travis and Circle CI set CI to true so we - # will always test Nginx's lock during CI - if os.environ.get('CI') == 'true' or util.exe_exists('nginx'): + # A specific environment variable for Azure Pipelines is SYSTEM_TEAMFOUNDATIONSERVERURI. + # If set, then we know we are on a CI job, and we test Nginx's lock. + if os.environ.get('SYSTEM_TEAMFOUNDATIONSERVERURI') == 'true' or util.exe_exists('nginx'): dirs.append(nginx_dir) else: logger.warning('Skipping Nginx lock tests')
[set_up->[append,set_up_dirs,exe_exists,get,basicConfig,set_up_command,warning],subprocess_call->[communicate,log_output,Popen,debug],set_up_dirs->[debug,mkdtemp,partial,register,set_up_nginx_dir,join,mkdir,make_lineage],test_command->[zip,lock_dir,release,check_error],check_error->[group,search,subprocess_call,read,format,report_failure,open],set_up_command->[format],log_output->[log],set_up_nginx_dir->[write,setup_certificate,join,open,check_call],setup_certificate->[default_backend,NoEncryption,write,private_bytes,NameAttribute,Name,CertificateBuilder,public_bytes,join,open,generate_private_key,SHA256],report_failure->[log_output,critical,exit],main->[info,set_up,test_command],check_call->[subprocess_call,report_failure],print,getLogger,main]
Prepare tests to be run.
Is this environment variable's value "true"? Do we expect this environment variable to be set at this point or do we need to whitelist it in tox? Do we need to do anything special to have it exposed by Azure?
@@ -1454,7 +1454,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface): # lvalue had a type defined; this is handled by other # parts, and all we have to worry about in that case is # that lvalue is compatible with the base class. - compare_node = None # type: Node + compare_node = None # type: Optional[Node] if lvalue_type: compare_type = lvalue_type compare_node = lvalue.node
[TypeChecker->[analyze_async_iterable_item_type->[accept],visit_try_without_finally->[check_assignment,accept],visit_class_def->[accept],iterable_item_type->[lookup_typeinfo],visit_for_stmt->[accept_loop],visit_operator_assignment_stmt->[check_assignment,accept],check_return_stmt->[get_generator_return_type,accept],visit_del_stmt->[accept],check_multi_assignment->[check_assignment,accept],check_async_with_item->[check_assignment,accept],visit_decorator->[check_method_override,accept,check_func_item],should_suppress_optional_error->[contains_none],visit_assert_stmt->[accept],accept->[accept],set_inference_error_fallback_type->[set_inferred_type],check_multi_assignment_from_tuple->[check_assignment,accept,check_rvalue_count_in_assignment],check_override->[erase_override],visit_try_stmt->[accept],check_member_assignment->[check_simple_assignment],warn->[warn],visit_overloaded_func_def->[accept],check_except_handler_test->[accept],visit_with_stmt->[accept],try_infer_partial_type_from_indexed_assignment->[accept],check_for_missing_annotations->[is_unannotated_any],function_type->[named_type,function_type],visit_print_stmt->[accept],visit_assignment_stmt->[accept],fail->[fail],check_with_item->[check_assignment,accept],visit_block->[accept],analyze_index_variables->[check_assignment],type_type->[named_type],check_func_def->[accept,get_generator_receive_type,get_generator_return_type,get_generator_yield_type,is_async_generator_return_type,is_generator_return_type],type_check_raise->[accept],contains_none->[contains_none],check_overlapping_op_methods->[check_overlapping_op_methods],check_lvalue->[check_lvalue,accept],get_generator_receive_type->[is_generator_return_type,is_async_generator_return_type],visit_while_stmt->[accept_loop],analyze_iterable_item_type->[accept],check_compatibility_super->[accept],accept_loop->[accept],check_assignment_to_multiple_lvalues->[check_assignment],visit_expression_stmt->[accept],lookup_qualified->[lookup],note->[note],check_multi_assignment_from_iterable->[type_is_iterable,check_assignment],get_generator_return_type->[is_generator_return_type],check_indexed_assignment->[accept],check_simple_assignment->[accept],get_generator_yield_type->[is_generator_return_type,is_async_generator_return_type],visit_if_stmt->[accept],find_isinstance_check->[find_isinstance_check],lvalue_type_for_inference->[append_types_for_inference],check_assignment->[accept],str_type->[named_type],get_types_from_except_handler->[get_types_from_except_handler]],flatten->[flatten],partition_by_callable->[partition_by_callable],get_isinstance_type->[flatten_types],conditional_callable_type_map->[partition_by_callable],is_static->[is_static],find_isinstance_check->[is_true_literal,remove_optional,or_conditional_maps,conditional_callable_type_map,find_isinstance_check,is_optional,conditional_type_map,is_false_literal,and_conditional_maps,is_literal_none],flatten_types->[flatten_types],expand_func->[accept],is_valid_inferred_type_component->[is_valid_inferred_type_component],Scope->[active_self_type->[active_class]]]
Checks whether the lvalue and rvalue are compatible with the base class. Check if the expression has a type in which case it is not checked.
This annotation seems unnecessary (partial types do their thing here).
@@ -121,6 +121,7 @@ dsc_cont_open(daos_handle_t poh, uuid_t cont_uuid, uuid_t coh_uuid, dc_cont_hdl_link(cont); dc_cont2hdl(cont, coh); + dc_cont_put(cont); out: if (cont != NULL) dc_cont_put(cont);
[dsc_cont_close->[dc_hdl2pool,dc_cont_put,D_GOTO,d_list_del_init,dc_hdl2cont,D_RWLOCK_UNLOCK,daos_csummer_destroy,dc_pool_put,D_RWLOCK_WRLOCK],int->[daos_contprop2hashtype,daos_csummer_init_with_type,ds_pool_put,ds_pool_lookup,ds_get_cont_props,daos_cont_csum_prop_is_enabled],dsc_cont_open->[dc_hdl2pool,daos_handle_is_valid,D_GOTO,d_list_add,dc_cont_alloc,dc_cont_put,dsc_cont_csummer_init,dc_hdl2cont,D_ASSERT,D_RWLOCK_UNLOCK,dc_cont2hdl,dc_cont_hdl_link,uuid_copy,dc_pool_put,D_RWLOCK_WRLOCK,dc_cont_free]]
open a dc_cont get nofnofnofnofnofnofnofnofn.
I think this one is possibly right, dc_cont_open() takes one ref and calls dc_cont_put() once, however dsc_cont_open() takes an additional ref in dc_cont2hdl but there's no matching cont_put(), either here or in dsc_cont_close().
@@ -1302,6 +1302,17 @@ public class ResourceManagerImpl extends ManagerBase implements ResourceManager, throw new CloudRuntimeException("Host is already in state " + host.getResourceState() + ". Cannot recall for maintenance until resolved."); } + if (SET_HOST_DOWN_TO_MAINTENANCE.value() && (host.getStatus() == Status.Down)) { + if (host.getResourceState() == ResourceState.Enabled) { + _hostDao.updateResourceState(ResourceState.Enabled, ResourceState.Event.AdminAskMaintenance, ResourceState.PrepareForMaintenance, host); + _hostDao.updateResourceState(ResourceState.PrepareForMaintenance, ResourceState.Event.InternalEnterMaintenance, ResourceState.Maintenance, host); + return _hostDao.findById(hostId); + } else if (host.getResourceState() == ResourceState.ErrorInMaintenance) { + _hostDao.updateResourceState(ResourceState.ErrorInMaintenance, ResourceState.Event.InternalEnterMaintenance, ResourceState.Maintenance, host); + return _hostDao.findById(hostId); + } + } + if (_hostDao.countBy(host.getClusterId(), ResourceState.PrepareForMaintenance, ResourceState.ErrorInPrepareForMaintenance) > 0) { throw new CloudRuntimeException("There are other servers attempting migrations for maintenance. " + "Found hosts in PrepareForMaintenance OR ErrorInPrepareForMaintenance STATUS in cluster " + host.getClusterId());
[ResourceManagerImpl->[getGPUDevice->[listAvailableGPUDevice],updateClusterPassword->[doUpdateHostPassword],attemptMaintain->[setHostIntoMaintenance,setHostIntoPrepareForMaintenanceAfterErrorsFixed,setHostIntoErrorInMaintenance,setHostIntoErrorInPrepareForMaintenance],createHostAndAgentDeferred->[markHostAsDisconnected,getNewHost,isFirstHostInCluster,createHostVO],setHostIntoMaintenance->[resourceStateTransitTo],propagateResourceEvent->[getPeerName],createHostVOForConnectedAgent->[createHostVO],dispatchToStateAdapters->[deleteHost],umanageHost->[doUmanageHost],setHostIntoPrepareForMaintenanceAfterErrorsFixed->[resourceStateTransitTo],getAvailableHypervisor->[getSupportedHypervisorTypes,getDefaultHypervisor],doMaintain->[resourceStateTransitTo],checkAndMaintain->[attemptMaintain],updateHost->[resourceStateTransitTo],deleteHost->[doDeleteHost],setHostIntoErrorInPrepareForMaintenance->[resourceStateTransitTo,configureVncAccessForKVMHostFailedMigrations],addHost->[createHostAndAgent],updateHostPassword->[doUpdateHostPassword],setHostIntoErrorInMaintenance->[resourceStateTransitTo,configureVncAccessForKVMHostFailedMigrations],listAllUpAndEnabledNonHAHosts->[listAllUpAndEnabledNonHAHosts],doCancelMaintenance->[resourceStateTransitTo],configureVncAccessForKVMHostFailedMigrations->[setKVMVncAccess],executeUserRequest->[doCancelMaintenance,doDeleteHost,doMaintain],fillRoutingHostVO->[checkIPConflicts],createHostAndAgent->[markHostAsDisconnected,createHostAndAgent,getNewHost,createHostVO],discoverHostsFull->[processResourceEvent],createHostVO->[checkCIDR,dispatchToStateAdapters,getCluster,resourceStateTransitTo,getNewHost],migrateAwayFailed->[resourceStateTransitTo],maintain->[processResourceEvent,maintain,doMaintain],registerResourceEvent->[insertListener],cancelMaintenance->[doCancelMaintenance,cancelMaintenance,processResourceEvent]]]
Maintains a host. Get a host by id.
@ravening what do you think of extracting lines 1308 - 1318 to a method? With that, it would be possible to cover these cases with JUnit test methods?
@@ -2,6 +2,7 @@ import json import logging +import httplib2 import zope.interface from googleapiclient import discovery from googleapiclient import errors as googleapiclient_errors
[_GoogleClient->[_find_managed_zone_id->[PluginError,list,debug,managedZones,base_domain_name_guesses,len,format,execute],del_txt_record->[create,_find_managed_zone_id,warn,changes,execute],add_txt_record->[PluginError,create,error,_find_managed_zone_id,get,format,changes,execute],__init__->[from_json_keyfile_name,open,build,load]],Authenticator->[_get_google_client->[conf,_GoogleClient],add_parser_arguments->[add,,super],_perform->[_get_google_client],_setup_credentials->[conf,_configure_file,validate_file_permissions],_cleanup->[_get_google_client],__init__->[super]],implementer,getLogger,provider]
An authenticator for Google Cloud DNS. Create a new record in the specified domain.
If we're going to start importing `httplib2`, we should probably add it to this plugin's `setup.py` even though `googleapiclient` already depends on it to ensure this still works if `googleapiclient` ever removes their dependency.
@@ -102,6 +102,16 @@ class ConanException(Exception): return exception_message_safe(msg) +class ConanReferenceDoesNotExist(ConanException): + """ Reference does not exist in cache db """ + pass + + +class ConanReferenceAlreadyExist(ConanException): + """ Reference already exists in cache db """ + pass + + class ConanV2Exception(ConanException): def __str__(self): msg = super(ConanV2Exception, self).__str__()
[PackageNotFoundException->[__str__->[remote_message]],RecipeNotFoundException->[__str__->[remote_message]],ConanException->[__str__->[remote_message]]]
Returns a string representation of the exception.
If these are DB/Cache errors, maybe add DB/Cache to the name itself? It could read as a 404, server side missing package error.
@@ -2,13 +2,13 @@ <% title "Listings" %> <link rel="canonical" href="https://dev.to<%= request.path %>" /> - <meta name="description" content="Where programmers share ideas and help each other grow."> + <meta name="description" content="<%= SiteConfig.community_description %>"> <meta name="keywords" content="software development,engineering,rails,javascript,ruby"> <meta property="og:type" content="website" /> <meta property="og:url" content="https://dev.to<%= request.path %>" /> <meta property="og:site_name" content="<%= community_qualified_name %>" /> <% if @displayed_classified_listing %> - <meta property="og:title" content="<%= truncate @displayed_classified_listing.title, length: 54 %>" /> + <meta property="og:title" content="<%= truncate @displayed_classified_listing.title, length: 54 %>"> <meta property="og:description" content="DEV Listing" /> <meta property="og:image" content="<%= listing_social_image_url @displayed_classified_listing %>"> <meta name="twitter:title" content="<%= truncate @displayed_classified_listing.title, length: 54 %>">
[No CFG could be retrieved]
Displays a list of ideas. Renders a single tag sequence.
Looks like an additional ` />` was removed? We do this inconsistently though so not sure what's correct syntax
@@ -296,13 +296,13 @@ public class UnboundedSourceWrapperTest { } @SuppressWarnings("unchecked") - private static <T> void setupSourceOperator(StreamSource<T, ?> operator) { + private static <T> void setupSourceOperator(StreamSource<T, ?> operator, int numSubTasks) { ExecutionConfig executionConfig = new ExecutionConfig(); StreamConfig cfg = new StreamConfig(new Configuration()); cfg.setTimeCharacteristic(TimeCharacteristic.EventTime); - Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0); + Environment env = new DummyEnvironment("MockTwoInputTask", numSubTasks, 0); StreamTask<?, ?> mockTask = mock(StreamTask.class); when(mockTask.getName()).thenReturn("Mock Task");
[UnboundedSourceWrapperTest->[testRestore->[collect->[add,SuccessException,getValue],assertTrue,Object,create,setupSourceOperator,restoreState,size,snapshotState,run,assertEquals,TestCountingSource],testWithOneReader->[collect->[SuccessException],Object,create,setupSourceOperator,size,run,assertEquals,TestCountingSource,fail],setupSourceOperator->[Object,ExecutionConfig,thenReturn,mock,Configuration,StreamConfig,setTimeCharacteristic,emptyMap,DummyEnvironment,setup],testWithMultipleReaders->[collect->[SuccessException],Object,create,setupSourceOperator,size,run,assertEquals,TestCountingSource,fail]]]
Setup the source operator.
If you set it up like this the source is always only responsible for one split. Maybe it would be good if a source had several splits, i.e. if the number of source splits was higher than the number of subtasks.
@@ -151,10 +151,11 @@ def is_reviewer(request, addon): return check_addons_reviewer(request) -def is_user_any_kind_of_reviewer(user): +def is_user_any_kind_of_reviewer(user, allow_viewers=False): """More lax version of is_reviewer: does not check what kind of reviewer the user is, and accepts unlisted reviewers, post reviewers, content - reviewers, or people with just revierwer tools view access. + reviewers. If allow_viewers is passed and truthy, also allows users with + just reviewer tools view access. Don't use on anything that would alter add-on data.
[check_static_theme_reviewer->[action_allowed],check_collection_ownership->[action_allowed_user],check_unlisted_addons_reviewer->[action_allowed],check_ownership->[check_ownership],check_addons_reviewer->[action_allowed],is_reviewer->[check_static_theme_reviewer,check_addons_reviewer],langpack_submission_allowed->[action_allowed_user],action_allowed_user->[match_rules],check_addon_ownership->[action_allowed],is_user_any_kind_of_reviewer->[action_allowed_user],experiments_submission_allowed->[action_allowed_user]]
Checks if a user is a member of any kind of reviewer.
So how is this supposed to work? I don't see this argument referenced in the body of the function.
@@ -587,15 +587,10 @@ func (display *ProgressDisplay) refreshAllRowsIfInTerminal() { var maxColumnLengths []int display.convertNodesToRows(rootNodes, maxSuffixLength, &rows, &maxColumnLengths) - for i, row := range rows { - var id string - if i == 0 { - id = "#" - } else { - id = fmt.Sprintf("%v", i) - } + removeInfoColumnIfUnneeded(rows) - display.refreshColumns(id, row, maxColumnLengths) + for i, row := range rows { + display.refreshColumns(fmt.Sprintf("%v", i), row, maxColumnLengths) } systemID := len(rows)
[filterOutUnnecessaryNodesAndSetDisplayTimes->[filterOutUnnecessaryNodesAndSetDisplayTimes],processEvents->[processNormalEvent,processEndSteps,processTick],processEndSteps->[refreshAllRowsIfInTerminal,writeSimpleMessage,refreshSingleRow,writeBlankLine],handleSystemEvent->[refreshAllRowsIfInTerminal,writeSimpleMessage],getStepInProgressDescription->[getPreviewText,getStepOp],refreshAllRowsIfInTerminal->[updateTerminalWidth,generateTreeNodes,colorizeAndWriteProgress,refreshColumns,addIndentations,filterOutUnnecessaryNodesAndSetDisplayTimes,convertNodesToRows],getPaddedMessage->[getMessagePadding],generateTreeNodes->[getOrCreateTreeNode],writeBlankLine->[writeSimpleMessage],getOrCreateTreeNode->[getOrCreateTreeNode],processNormalEvent->[refreshAllRowsIfInTerminal,refreshSingleRow,writeSimpleMessage,getRowForURN],uncolorizeColumns->[uncolorizeString],addIndentations->[addIndentations],writeSimpleMessage->[colorizeAndWriteProgress],refreshColumns->[colorizeAndWriteProgress,uncolorizeColumns,getPaddedMessage,writeSimpleMessage],getStepOpLabel->[getStepOp],processTick->[refreshAllRowsIfInTerminal],convertNodesToRows->[uncolorizeColumns,convertNodesToRows]]
refreshAllRowsIfInTerminal refreshes all rows in the terminal. range lines - system ID.
this was old code from back when we printed a number at the start of the line. we don't do that anymore, so there's no need to use # as the ID as '0' is fine here.
@@ -6,3 +6,6 @@ from .enas import EnasTrainer from .proxyless import ProxylessTrainer from .random import SinglePathTrainer, RandomTrainer from .utils import replace_input_choice, replace_layer_choice +from .differentiable import DartsModel, ProxylessModel +from .sampling import EnasModel,RandomSampleModel +from .base_lightning import MergeTrainValDataset \ No newline at end of file
[No CFG could be retrieved]
Trainer for the random layer.
Suggest calling them `DartsModule`, `ProxylessModule`...
@@ -87,4 +87,18 @@ public class TestHiveGlueMetastore { testStorePartitionWithStatistics(STATISTICS_PARTITIONED_TABLE_COLUMNS, BASIC_STATISTICS_1, BASIC_STATISTICS_2, BASIC_STATISTICS_1, EMPTY_TABLE_STATISTICS); } + + @Test + public void testGetPartitions() throws Exception + { + try { + createDummyPartitionedTable(tablePartitionFormat, CREATE_TABLE_COLUMNS_PARTITIONED); + Optional<List<String>> partitionNames = getMetastoreClient().getPartitionNames(HIVE_CONTEXT, tablePartitionFormat.getSchemaName(), tablePartitionFormat.getTableName()); + assertTrue(partitionNames.isPresent()); + assertEquals(partitionNames.get(), ImmutableList.of("ds=2016-01-01", "ds=2016-01-02")); + } + finally { + dropTable(tablePartitionFormat); + } + } }
[TestHiveGlueMetastore->[testStorePartitionWithStatistics->[testStorePartitionWithStatistics]]]
Test if the partition is empty.
Nit: use `ImmutableList.of()`
@@ -626,6 +626,9 @@ export class Resource { * Returns a previously measured layout box adjusted to the viewport. This * mainly affects fixed-position elements that are adjusted to be always * relative to the document position in the viewport. + * The returned layoutBox is + * relative to the top of the document for non fixed element, + * relative to the viewport for fixed element. * @return {!../layout-rect.LayoutRectDef} */ getLayoutBox() {
[No CFG could be retrieved]
Provides a method to handle the calculation of the element s layout box. Asynchronously gets the page layout box.
Styling nits: - Add a `:` to the end of this line - Prefix each point with a `- `
@@ -59,6 +59,11 @@ class DocumentationPage: elif node['type'] == 'link': yield ExternalReference('link', node['link']) + def omz_references(self): + for node in _get_all_ast_nodes(self._ast): + if node['type'] == 'codespan': + yield node['text'] + def html_fragments(self): for node in _get_all_ast_nodes(self._ast): if node['type'] == 'inline_html':
[_get_all_ast_nodes->[_get_all_ast_nodes],DocumentationPage->[external_references->[_get_all_ast_nodes],html_fragments->[_get_all_ast_nodes],__init__->[_get_text_from_ast]],_get_text_from_ast->[get_text_from_node->[_get_text_from_ast]]]
Yield all external references found in the AST.
This function should either be renamed `code_spans` or changed to yield not all code spans, but only those that are `<omz_dir>` references.
@@ -3,7 +3,7 @@ module UserRolesHelper def user_roles_collection Rails.cache.fetch([current_user, 'available_user_roles']) do - @user_roles_collection ||= UserRole.all.pluck(:name, :id) + @user_roles_collection ||= UserRole.order(id: :asc).pluck(:name, :id) end end end
[user_roles_collection->[fetch,pluck]]
Returns a if there is no user_roles_collection.
Rails/HelperInstanceVariable: Do not use instance variables in helpers.
@@ -72,7 +72,7 @@ export class AmpViewerIntegration { const ampdoc = getAmpDoc(this.win.document); - if (this.isWebView_) { + if (this.isWebView_ || this.isHandShakePoll_) { let source; let origin; if (isIframed(this.win)) {
[No CFG could be retrieved]
Construct a that is used to communicate with the AMP viewer. Promise for pre - handshake of a window.
Does this mean that iframe poll always require port exchange? Is this an ok requirement per existing viewers?
@@ -2169,7 +2169,7 @@ input#input_import_file { */ #index_frm .index_info input, #index_frm .index_info select { - width: 14em; + width: 17em; margin: 0; box-sizing: border-box; -ms-box-sizing: border-box;
[getFontSize,getImgPath,getCssGradient]
Displays a list of all of the standard standard standard standard standard standard standard standard standard standard standard Config for the index_frm widget.
This really doesn't fix the problem, but rather hide it. I think that we rather should not force any width on the button as the text is being translated and can really have different widths.
@@ -225,6 +225,16 @@ class WPSEO_OpenGraph { else if ( is_front_page() ) { $title = ( isset( $this->options['og_frontpage_title'] ) && $this->options['og_frontpage_title'] !== '' ) ? $this->options['og_frontpage_title'] : $frontend->title( '' ); } + elseif ( is_category() || is_tax() || is_tag() ) { + $title = WPSEO_Taxonomy_Meta::get_meta_without_term( 'opengraph-title' ); + if ( $title === '' ) { + $title = $frontend->get_taxonomy_title( '' ); + } + else { + // Replace Yoast SEO Variables. + $title = wpseo_replace_vars( $title, $GLOBALS['wp_query']->get_queried_object() ); + } + } else { $title = $frontend->title( '' ); }
[WPSEO_OpenGraph->[type->[og_tag],website_facebook->[og_tag],og_title->[og_tag],site_owner->[og_tag],locale->[og_tag],image_output->[image],category->[og_tag],description->[og_tag],site_name->[og_tag],tags->[og_tag],url->[og_tag],article_author_facebook->[og_tag],publish_date->[og_tag],image->[og_tag]]]
Get the Open Graph title.
why not an is_string check here?
@@ -46,6 +46,9 @@ class Toolbar extends Component<Props> { <AudioMuteButton tooltipPosition = 'left' visible = { this._shouldShowButton('microphone') } /> + <PresenterMuteButton + tooltipPosition = 'left' + visible = { this._shouldShowButton('presenter') } /> <VideoMuteButton tooltipPosition = 'left' visible = { this._shouldShowButton('camera') } />
[No CFG could be retrieved]
A Toolbar which is a part of the standardized way of showing a menu of a Maps the redux state of a component to the associated props for this component.
In filmstrip-only mode, where this is used, so many buttons are not going to fit.
@@ -1755,6 +1755,7 @@ namespace Internal.TypeSystem.Interop safeHandleType.GetKnownMethod("DangerousGetHandle", new MethodSignature(0, 0, Context.GetWellKnownType(WellKnownType.IntPtr), TypeDesc.EmptyTypes)))); StoreNativeValue(marshallingCodeStream); + marshallingCodeStream.EmitLabel(lHandleIsNull); ILCodeLabel lNotAddrefed = emitter.NewCodeLabel(); cleanupCodeStream.EmitLdLoc(vAddRefed);
[BlittableArrayMarshaller->[EmitCleanupManaged->[EmitCleanupManaged],ReInitNativeTransform->[StoreNativeValue],AllocAndTransformManagedToNative->[AllocManagedToNative,LoadNativeValue,EmitElementCount,StoreNativeValue,LoadManagedValue],TransformNativeToManaged->[TransformNativeToManaged]],Marshaller->[EmitMarshalFieldNativeToManaged->[LoadManagedValue,SetupArgumentsForFieldMarshalling,AllocAndTransformNativeToManaged,StoreNativeValue],EmitMarshalElementManagedToNative->[SetupArgumentsForElementMarshalling,AllocAndTransformManagedToNative,LoadNativeValue,StoreManagedValue],TransformNativeToManaged->[LoadNativeValue,StoreManagedValue],LoadNativeAddr->[LoadAddr],LoadManagedValue->[LoadValue],LoadNativeArg->[LoadValue,LoadAddr],PropagateFromByRefArg->[StoreValue],EmitMarshalFieldManagedToNative->[LoadNativeValue,SetupArgumentsForFieldMarshalling,AllocAndTransformManagedToNative,StoreManagedValue],LoadManagedAddr->[LoadAddr],EmitMarshalArgumentNativeToManaged->[SetupArguments,AllocManagedToNative,PropagateToByRefArg,TransformManagedToNative,LoadManagedArg,PropagateFromByRefArg,AllocAndTransformNativeToManaged],EmitMarshalArgumentManagedToNative->[SetupArguments,PropagateFromByRefArg,PropagateToByRefArg,LoadNativeArg],LoadNativeValue->[LoadValue],EmitMarshalReturnValueNativeToManaged->[SetupArgumentsForReturnValueMarshalling,AllocAndTransformManagedToNative,StoreManagedValue],IsMarshallingRequired->[IsMarshallingRequired],StoreManagedValue->[StoreValue],StoreNativeValue->[StoreValue],PropagateToByRefArg->[LoadValue],TransformManagedToNative->[LoadManagedValue,StoreNativeValue],LoadManagedArg->[LoadValue,LoadAddr],EmitMarshalElementNativeToManaged->[SetupArgumentsForElementMarshalling,LoadManagedValue,StoreNativeValue,AllocAndTransformNativeToManaged]],BlittableValueMarshaller->[EmitMarshalArgumentNativeToManaged->[EmitMarshalArgumentNativeToManaged]],SafeHandleMarshaller->[EmitMarshalArgumentManagedToNative->[SetupArguments,LoadNativeValue,StoreNativeValue,LoadNativeArg,LoadManagedValue,PropagateFromByRefArg,AllocSafeHandle],EmitMarshalReturnValueManagedToNative->[LoadNativeValue,StoreManagedValue,StoreNativeValue,LoadManagedValue,SetupArgumentsForReturnValueMarshalling,AllocSafeHandle]],ArrayMarshaller->[AllocManagedToNative->[LoadManagedValue,StoreNativeValue,EmitElementCount],EmitCleanupManaged->[LoadManagedValue,EmitElementCleanup,EmitElementCount,LoadNativeValue],EmitElementCount->[LoadManagedValue,EmitElementCount],TransformNativeToManaged->[EmitMarshallingIL,LoadNativeValue,EmitElementCount,StoreManagedValue,LoadManagedValue],AllocNativeToManaged->[EmitElementCount,StoreManagedValue],TransformManagedToNative->[LoadManagedValue,EmitMarshallingIL,EmitElementCount,LoadNativeValue]],DelegateMarshaller->[EmitCleanupManaged->[LoadManagedValue],AllocAndTransformManagedToNative->[LoadManagedValue,StoreNativeValue],TransformNativeToManaged->[LoadNativeValue,StoreManagedValue]],BlittableStructPtrMarshaller->[TransformManagedToNative->[LoadManagedAddr,StoreNativeValue]],BooleanMarshaller->[AllocAndTransformManagedToNative->[LoadManagedValue,StoreNativeValue],AllocAndTransformNativeToManaged->[LoadNativeValue,StoreManagedValue]],UnicodeStringMarshaller->[TransformManagedToNative->[LoadManagedValue,StoreNativeValue],EmitCleanupManaged->[LoadManagedValue,LoadNativeValue],TransformNativeToManaged->[LoadNativeValue,StoreManagedValue]],AnsiStringMarshaller->[TransformManagedToNative->[LoadManagedValue,StoreNativeValue],EmitCleanupManaged->[LoadManagedValue,LoadNativeValue],TransformNativeToManaged->[LoadNativeValue,StoreManagedValue]],UTF8StringMarshaller->[TransformManagedToNative->[LoadManagedValue,StoreNativeValue],EmitCleanupManaged->[LoadManagedValue,LoadNativeValue],TransformNativeToManaged->[LoadNativeValue,StoreManagedValue]]]
Emit the marshal argument managed to native. This method is called when a native method is called to release a handle that has already been Label the native function callsite.
I do not see anything that sets the NativeValue to zero for the null case.
@@ -341,7 +341,7 @@ check_restricted(const char *poolname) static char *restricted = NULL; const char *cur, *end; - int len, namelen; + uint32_t len, namelen; if (!initialized) { initialized = B_TRUE;
[zfs_iter_root->[check_restricted,namespace_reload,uu_avl_first,func,uu_avl_next,make_dataset_handle],int->[zfs_strdup,uu_avl_find,uu_avl_create,zcmd_read_dst_nvlist,verify,zcmd_expand_dst_nvlist,nvlist_next_nvpair,zfs_alloc,zcmd_free_nvlists,nvlist_dup,nvpair_name,ioctl,strcmp,zfs_standard_error,uu_avl_pool_create,free,no_memory,dgettext,uu_avl_insert,zcmd_alloc_dst_nvlist,nvpair_value_nvlist,nvlist_free,uu_avl_teardown],zpool_iter->[check_restricted,zpool_open_silent,namespace_reload,uu_avl_first,func,uu_avl_next],namespace_clear->[free,uu_avl_destroy,uu_avl_pool_destroy,nvlist_free,uu_avl_teardown],zpool_get_features->[nvlist_lookup_nvlist,zpool_refresh_stats,nvlist_exists,zpool_get_config],zpool_refresh_stats->[nvlist_free,zcmd_expand_dst_nvlist,ioctl,strcpy,zcmd_alloc_dst_nvlist,zcmd_free_nvlists,zcmd_read_dst_nvlist],boolean_t->[strncmp,strlen,strchr,getenv]]
Check if a node in the system is restricted.
This doesn't look like it fixes the actual issue detected by Coverity. And besides that this function is surprisingly hard to understand. In a separate PR I'd suggest reworking it to use `strtok()`, similar to `zfs_resolve_shortname()`.
@@ -102,7 +102,7 @@ public class SourceWindowManager implements PopoutDocEvent.Handler, EventBus events, FileTypeRegistry registry, GlobalDisplay display, - SourceShim sourceShim, + Source source, Session session, UserPrefs uiPrefs) {
[SourceWindowManager->[handleUnsavedChangesBeforeExit->[execute->[onExecute->[],handleUnsavedChangesBeforeExit]],getCurrentDocId->[getCurrentDocId],getWindowIdOfDocId->[getSourceDocs,getSourceWindowId],openSourceWindow->[isSourceWindowOpen],getLastFocusedSourceWindow->[isSourceWindowOpen,getSourceWindowObject],navigateToPath->[getWindowIdOfDocPath,NavigationResult,assignSourceDocWindowId,canActivateSourceWindows,isSourceWindowOpen,isMainSourceWindow,getSourceWindowId,getSourceDocs,fireEventToSourceWindow],fireEventToLastFocusedWindow->[getLastFocusedWindowId],getWindowIdOfDocPath->[getDocFromPath,getSourceWindowId],getDocFromPath->[getSourceDocs],onCollabEditEnded->[getSourceDocs],hasSourceAndConsolePaired->[hasSourceAndConsolePaired],closeAllSatelliteDocs->[execute->[onExecute->[],execute,isMainSourceWindow]],broadcastDocWindowChanged->[fireEventToSourceWindow,getSourceWindowId,isMainSourceWindow],onPopoutDoc->[execute->[onExecute->[],getDocId],getDocId],maximizeSourcePaneIfNecessary->[isMainSourceWindow],doForAllSourceWindows->[onExecute->[execute],execute->[onExecute->[execute]],execute,doForAllSourceWindows],fireEventToSourceWindow->[isMainSourceWindow],getDocCollabParams->[getSourceDocs],assignSourceDocWindowId->[onResponseReceived->[execute],getSourceDocs,getSourceWindowId],fireEventToAllSourceWindows->[fireEventToSourceWindow],onEditorCommandDispatch->[fireEventToLastFocusedWindow,getType],onDocTabClosed->[getSourceDocs,getDocId],onSourceDocAdded->[getSourceWindowId],onCollabEditStarted->[getSourceDocs],closeSourceWindowDocs->[assignSourceDocWindowId,getSourceWindowId],getSourceDocs->[isMainSourceWindow],ensureVisibleSourcePaneIfNecessary->[isMainSourceWindow],activateLastFocusedSource->[getLastFocusedSourceWindowId,getSourceWindowId],onDocWindowChanged->[getSourceWindowId,getDocId],saveWithPrompt->[getWindowIdOfDocId,saveWithPrompt],onSourceFileSaved->[getDocId],fireEventForDocument->[getWindowIdOfDocId,fireEventToLastFocusedWindow],getCurrentDocPath->[getCurrentDocPath],saveUnsavedDocuments->[execute->[onExecute->[],saveUnsavedDocuments]]]]
Creates a new instance of SourceWindowManager. Adds this to the list of handlers for the events.
update file header year
@@ -47,6 +47,7 @@ type DefaultEntryPoints []string // String is the method to format the flag's value, part of the flag.Value interface. // The String method's output will be used in diagnostics. func (dep *DefaultEntryPoints) String() string { + //TODO : The string returned should be formatted in such way that the func Set below could parse it. return fmt.Sprintf("%#v", dep) }
[Set->[Set,SubexpNames,New,MustCompile,Split,FindAllStringSubmatch],Type->[Sprint],String->[Sprintf],AddConfigPath,Printf,AutomaticEnv,AllSettings,SetConfigType,SetConfigName,Decode,ReadInConfig,ConfigFileUsed,SetConfigFile,GetString,StringToTimeDurationHookFunc,Fatalf,NewDecoder,Set,SetEnvPrefix]
String returns a string representation of the entry points.
What is left to do ? :angel: (should be written here, or the `TODO` should go away :stuck_out_tongue_closed_eyes: )
@@ -460,11 +460,9 @@ class InstallRequirement(object): # FIXME: This is a lame hack, entirely for PasteScript which has # a self-provided entry point that causes this awkwardness _run_setup_py = """ -__file__ = __SETUP_PY__ from setuptools.command import egg_info import pkg_resources import os -import tokenize def replacement_run(self): self.mkpath(self.egg_info) installer = self.distribution.fetch_build_egg
[parse_editable->[_build_req_from_url,_strip_postfix,_build_editable_options],InstallRequirement->[from_path->[from_path],from_line->[_strip_extras],get_dist->[egg_info_path],install->[prepend_root],_correct_build_location->[build_location],move_wheel_files->[move_wheel_files],run_egg_info->[_correct_build_location],archive->[pkg_info],pkg_info->[egg_info_data,egg_info_path],assert_source_matches_version->[pkg_info],ensure_has_source_dir->[build_location]]]
This is the main entry point for the egg_info command. It is called by the Provides access to the data and metadata of the given node.
I think this "lame hack" present since c2000d7de68ef (7 years now) could now be removed ?
@@ -654,7 +654,7 @@ class FsyncedZipFile(zipfile.ZipFile): os.fsync(descriptor) os.close(descriptor) - def _extract_member(self, member, targetpath, pwd): + def _extract_member(self, member, targetpath, *args, **kwargs): """Extends `ZipFile._extract_member` to call fsync(). For every extracted file we are ensuring that it's data has been
[RDFExtractor->[apps->[find,get_appversions,uri],find->[uri],parse->[parse]],atomic_lock->[get],get_background_images->[parse_xpi,get,get_filepath,read],SafeZip->[close->[close],extract_to_dest->[extract_info_to_dest],read->[read],initialize_and_validate->[FsyncedZipFile,archive_member_validator]],extract_search->[parse,text],copy_over->[exists],get_sha256->[_get_hash],parse_addon->[parse_xpi,parse_search],extract_translations->[read,get_filepath,namelist],update_version_number->[read],parse_xpi->[get_file,parse],write_crx_as_xpi->[read],_get_hash->[read],FsyncedZipFile->[_extract_member->[_fsync_file,_fsync_dir]],extract_zip->[SafeZip,extract_to_dest],repack->[extract_zip,zip_folder_content],get_all_files->[iterate->[iterate],iterate],extract_xpi->[extract_zip,get_all_files,copy_over],check_xpi_info->[exists,get],ManifestJSONExtractor->[strict_max_version->[get_simple_version,get],strict_min_version->[get_simple_version,get],apps->[get,get_appversions],get->[get],guid->[get],target_locale->[get],parse->[apps,get,target_locale,parse],gecko->[get],is_experiment->[get]],parse_search->[get_file,extract_search]]
Syncs a file and all subdirectories if a is found.
Wow, given the inconsistencies between `TarFile` and `ZipFile`, I'm amazed that this method exists in both! Of course it has a different signature, because eh, it would be too easy...
@@ -84,8 +84,7 @@ def fix_stim_artifact(inst, events=None, event_id=None, tmin=0., window = None if mode == 'window': window = _get_window(s_start, s_end) - ch_names = inst.info['ch_names'] - picks = pick_channels(ch_names, ch_names) + picks = _pick_data_channels(inst.info) if isinstance(inst, BaseRaw): _check_preload(inst)
[fix_stim_artifact->[_check_preload,_get_window,_fix_artifact]]
Fix the stimulation artifacts from a given stimulation instance. Missing nanoseconds.
I forgot to mention this, but I also fixed this. Previously `fix_stim_artifact` was fixing all channels (this line was equivaletn to `picks = np.arange(len(ch_names))`), but now it should only fix the data channels. `whats_new.rst` updated.
@@ -83,6 +83,8 @@ export type BasePopoverPropsT = { onClick?: (e: Event) => mixed, /** Handler for 'Esc' keypress events */ onFocus?: (e: Event) => mixed, + /** Pass FocusOptions to autoFocus */ + autoFocusOptions?: FocusOptions, /** Handler for mouseenter events on trigger element. */ onMouseEnter?: (e: Event) => mixed, /** Number of milliseconds to wait before showing the popover after mouse enters the trigger element (for triggerType `hover`). */
[No CFG could be retrieved]
Analyzes the popover configuration and returns the popover content. All popover elements?.
Would `FocusOptions` be a better name than `autoFocusOptions`? It seems like these options include configs for the entire `FocusLock` component.
@@ -2089,7 +2089,7 @@ class NodeControllerTest extends SuluTestCase public function testRenamePageWithLinkedChild() { $client = $this->createAuthenticatedClient(); - $this->importer->import(__DIR__ . '/../../app/Resources/exports/tree.xml'); + $this->importer->import(__DIR__ . '/../../files/exports/tree.xml'); $document = $this->documentManager->find('585ccd35-a98e-4e41-a62c-e502ca905496', 'en'); $document->setStructureType('internallinks');
[NodeControllerTest->[testGetAnotherTemplate->[setTitle,createAuthenticatedClient,getContent,assertArrayNotHasKey,flush,persist,request,bind,getUuid,setStructureType,createPageDocument,setResourceSegment,assertEquals],testCopyNonExistingSource->[createAuthenticatedClient,getResponse,request,assertHttpStatusCode,import],testPutNotExisting->[getResponse,request,assertHttpStatusCode,createAuthenticatedClient],testGetShadowContent->[setTitle,createAuthenticatedClient,getContent,flush,persist,request,bind,setShadowLocale,getUuid,setStructureType,createPageDocument,setShadowLocaleEnabled,setResourceSegment,assertEquals],testOrderNonExistingSource->[getResponse,request,assertHttpStatusCode,createAuthenticatedClient],testGetWithPermissions->[setTitle,createAuthenticatedClient,getContent,assertArrayHasKey,flush,persist,request,getUuid,setStructureType,createPageDocument,clear,setResourceSegment],testSmallResponse->[createAuthenticatedClient,getContent,assertArrayHasKey,assertArrayNotHasKey,getResponse,request,assertHttpStatusCode,setUpContent],testGetNotExisting->[getResponse,request,assertHttpStatusCode,createAuthenticatedClient],testCopyWithShadow->[assertStringStartsWith,setTitle,find,createAuthenticatedClient,getContent,getResourceSegment,flush,persist,request,bind,setShadowLocale,getUuid,setStructureType,createPageDocument,publish,setShadowLocaleEnabled,setResourceSegment],testMoveNonExistingSource->[createAuthenticatedClient,getResponse,request,assertHttpStatusCode,import],testPostWithExistingResourceLocator->[getResponse,request,assertHttpStatusCode,createAuthenticatedClient],initOrm->[setIdGeneratorType,flush,persist,get,getClassMetaData,setId,purgeDatabase,setName,createNew,setIdGenerator],testOrderWithGhosts->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,import,assertEquals],testInternalLinkAutoName->[request,assertEquals,createAuthenticatedClient,getContent],testMoveNonExistingDestination->[createAuthenticatedClient,getResponse,request,assertHttpStatusCode,import],testBreadcrumb->[createAuthenticatedClient,getContent,assertArrayNotHasKey,getResponse,request,assertHttpStatusCode,import,assertEquals],testCopy->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,hasNode,assertNull,assertNotEquals,getRootNode,import,assertEquals],testCopyNonExistingDestination->[createAuthenticatedClient,getResponse,request,assertHttpStatusCode,import],testGetFlat->[assertTrue,createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,import,assertEquals],testPutWithValidHash->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode],testPutAndPublish->[createAuthenticatedClient,getContent,getPropertyValue,getResponse,request,assertHttpStatusCode,assertCount,setUpContent,getString,getTestUserId,assertEquals],testOrderNonExistingPosition->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode],testNavContexts->[createAuthenticatedClient,getContent,assertArrayHasKey,getResponse,request,assertFalse,assertHttpStatusCode,assertEquals],testPost->[getNode,createAuthenticatedClient,getContent,getPropertyValue,hasProperty,getResponse,request,assertHttpStatusCode,assertFalse,assertCount,getString,getTestUserId,getIdentifier,assertEquals],testRemoveDraftWithoutWebspace->[setTitle,find,getNodeByIdentifier,getPropertyValue,createAuthenticatedClient,flush,persist,request,assertHttpStatusCode,getResponse,getUuid,setStructureType,createPageDocument,publish,assertEquals],testRemoveDraft->[setTitle,find,getNodeByIdentifier,getPropertyValue,createAuthenticatedClient,flush,persist,request,assertHttpStatusCode,getResponse,getContent,getUuid,refresh,setStructureType,createPageDocument,publish,assertEquals],testPutWithTemplateChange->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,setUpContent,assertEquals],testGet->[setTitle,createAuthenticatedClient,getContent,flush,persist,request,bind,getUuid,setStructureType,createPageDocument,setResourceSegment,assertEquals],testPutWithInvalidHash->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertEquals],testPutHomeWithChildren->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertEquals],setUp->[getEntityManager,get,initOrm,initPhpcr],testPostAndPublish->[createAuthenticatedClient,getContent,getPropertyValue,getResponse,request,assertHttpStatusCode,assertCount,getString,getTestUserId,assertEquals],testCGetWithAllWebspaceNodes->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertContains,assertCount],testGetNotExistingTree->[getResponse,request,assertHttpStatusCode,createAuthenticatedClient],testDeleteReferencedNode->[setTitle,createAuthenticatedClient,getResponse,flush,persist,request,assertHttpStatusCode,bind,getUuid,setStructureType,createPageDocument,publish,setResourceSegment],testOrder->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,import,assertEquals],testCopyMultipleLocales->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertContains,assertEquals],testCGetWithSingleWebspaceNodes->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertContains,assertCount],testTreeGetTillSelectedId->[assertTrue,createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,assertEmpty,assertCount,assertNull,import,assertEquals],testGetTree->[assertTrue,createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,import,assertEquals],setUpContent->[request,createAuthenticatedClient,getContent],testPutRemoveShadowWithDifferentTemplate->[setTitle,createAuthenticatedClient,getContent,flush,persist,request,bind,setShadowLocale,getUuid,setStructureType,getStatusCode,createPageDocument,setShadowLocaleEnabled,setResourceSegment,assertEquals],testGetGhostContent->[setTitle,createAuthenticatedClient,getContent,assertArrayNotHasKey,flush,persist,request,bind,getUuid,setStructureType,createPageDocument,setResourceSegment,assertEquals],testUnpublish->[getProperties,getNodeByIdentifier,flush,request,assertHttpStatusCode,publish,getPropertyValue,setStructureType,assertFalse,setResourceSegment,setTitle,createAuthenticatedClient,hasProperty,persist,createPageDocument,assertEquals,getResponse,assertEmpty,getUuid],createPageDocument->[create],testCopyLocale->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertContains,assertEquals],testDelete->[createAuthenticatedClient,getResponse,request,assertHttpStatusCode,setUpContent],testHistory->[request,assertEquals,createAuthenticatedClient,getContent],testTreeGet->[assertTrue,createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,import,assertEquals],testCGetWithAllWebspaceNodesDifferentLocales->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertContains,assertCount],testMove->[assertTrue,getNode,createAuthenticatedClient,getContent,getPropertyValue,getResponse,request,assertHttpStatusCode,assertFalse,hasNode,getRootNode,import,assertEquals],testPutShadow->[createAuthenticatedClient,getContent,assertArrayNotHasKey,request,assertEquals],testRenamePageWithLinkedChild->[getIdentifier,getNode,createAuthenticatedClient,find,getContent,getResponse,flush,persist,request,assertHttpStatusCode,bind,get,getUuid,setStructureType,publish,import,clear,assertEquals],testCgetAction->[assertTrue,createAuthenticatedClient,getContent,assertArrayHasKey,getResponse,request,assertHttpStatusCode,assertEmpty,assertFalse,import,assertEquals],testPut->[createAuthenticatedClient,getContent,hasProperty,getResponse,request,assertHttpStatusCode,assertFalse,getTestUserId,assertEquals],testGetInternalLink->[setTitle,createAuthenticatedClient,find,getContent,flush,persist,request,getUuid,setStructureType,createPageDocument,setRedirectType,setRedirectTarget,setResourceSegment,assertEquals],testPutWithAlreadyExistingUrl->[createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertEquals],testGetExternalLink->[setTitle,createAuthenticatedClient,getContent,setRedirectExternal,flush,persist,request,getUuid,setStructureType,createPageDocument,setRedirectType,setResourceSegment,assertEquals],testTreeGetTillId->[assertTrue,createAuthenticatedClient,getContent,getResponse,request,assertHttpStatusCode,assertFalse,assertEmpty,assertCount,assertNull,import,assertEquals]]]
Rename a page with a linked child. Checks if the node is the same as the current node.
Maybe fixtures instead of files? Or is that a bad idea? :thinking:
@@ -46,6 +46,14 @@ class PageView < ApplicationRecord customRanking ["desc(visited_at_timestamp)"] end + def self.trigger_delayed_job(record, remove) + if remove + record.delay.remove_from_index! + else + record.delay.index! + end + end + private def extract_domain_and_path
[PageView->[belongs_to_pro_user?->[pro?],extract_domain_and_path->[parse,path,domain],article_searchable_text->[body_text],article_searchable_tags->[cached_tag_list],article_tags->[cached_tag_list_array],attributeForDistinct,attributes,reading_time,year,searchableAttributes,attributesForFaceting,name,algoliasearch,distinct,profile_image_90,before_create,attribute,username,tags,include,belongs_to,to_i,user,strftime,customRanking,title,path]]
Extracts the domain and path from the referrer if any.
Currently, we don't use DelayedJob `delay` and use ActiveJobs instead so that the possible migration to another background job queue would be easier. We refactored all the `delay` calls while working on the #3136, the whole issue is also described in #2497. Another problem with this is that it will pass ActiveRecord object instead of an id, so the job could be executed when the record is not in the database already, so the job will fail with a deserialization error. I suppose `Search::RemoveFromIndexJob` and `Search::IndexJob` (added in #5063) can be used here.
@@ -73,9 +73,11 @@ export function getData(event) { * @param {string} eventType * @param {function(!Event)} listener * @param {boolean=} opt_capture + * @param {boolean=} opt_passive * @return {!UnlistenDef} */ -export function listenOnce(element, eventType, listener, opt_capture) { +export function listenOnce(element, eventType, listener, + opt_capture, opt_passive) { let localListener = listener; const unlisten = internalListenImplementation(element, eventType, event => { try {
[No CFG could be retrieved]
Listens for the specified event on the element and removes the listener as soon as the specified listenOncePromise - Function to listen for event on element with optional cancel callback.
could you add to `listenPromise*` as well, for completeness
@@ -147,7 +147,7 @@ </div> <% end %> - <%= form_for(SiteConfig.new, url: admin_config_path, html: { data: { action: "submit->config#configUpdatePrecheck", "config-target": "authSectionForm", testid: "authSectionForm" } }) do |f| %> + <%= form_for(Settings::Authentication.new, url: admin_settings_authentications_path, html: { data: { action: "submit->config#configUpdatePrecheck", "config-target": "authSectionForm", testid: "authSectionForm" } }) do |f| %> <div class="card mt-3"> <%= render partial: "admin/shared/card_header", locals: {
[No CFG could be retrieved]
Displays a hidden field that displays a health check token and video encoder key. Helps to show the crayons checkbox.
Not directly related to this PR, but while doing manual QA I saw a related warning in the console and decided to address it right away.
@@ -342,16 +342,9 @@ def _setup_chpi_fits(info, t_window, t_step_min, method='forward', model += [slope, np.ones(slope.shape)] model = np.concatenate(model, axis=1) inv_model = linalg.pinv(model) - # Set up highpass at half lowest cHPI freq - hp_n = 2 ** (int(np.ceil(np.log2(n_window))) + 1) - freqs = fftpack.rfftfreq(hp_n, 1. / info['sfreq']) - hp_ind = np.where(freqs >= hpi_freqs.min())[0][0] - 2 - hp_window = np.concatenate( - [[0], np.repeat(np.hanning(hp_ind - 1)[:(hp_ind - 1) // 2], - 2)])[np.newaxis] # Set up magnetic dipole fits - picks_meg = pick_types(info, meg=True, eeg=False, exclude=exclude) + meg_picks = pick_types(info, meg=True, eeg=False, exclude=exclude) if len(exclude) > 0: if exclude == 'bads': msg = info['bads']
[filter_chpi->[_setup_chpi_fits],_setup_chpi_fits->[_get_hpi_info],_calculate_chpi_positions->[_time_prefix,_fit_magnetic_dipole,_fit_chpi_pos,_setup_chpi_fits]]
Setup the cHPI fits. Compute the n - window non - zero n - window - HPI - highpass - Exclude missing HPI - related data. coil_head_rrs - coil_head_rrs.
Why get rid of this step?
@@ -925,6 +925,12 @@ exports.extensionBundles = [ latestVersion: '0.1', type: TYPES.MISC, }, + { + name: 'amp-mplayer', + version: '0.1', + latestVersion: '0.1', + type: TYPES.MISC, + }, ]; exports.aliasBundles = [
[No CFG could be retrieved]
Create an object with all the possible bundles for a specific node. Private functions - functions - functions - functions - functions - functions - functions - functions - functions.
`amp-mplayer` is too generic and ambitious as a name. What is the name of the product/company behind this video platform? Could you please add a link to the website for your platform?
@@ -74,7 +74,8 @@ func withTerminalResetter(f func()) { initialTermState, err := term.GetState(osSafeStdin) if err != nil { - logger.Fatal(err) + fmt.Print(err) + os.Exit(1) } c := make(chan os.Signal, 1)
[PasswordPrompt->[Print,ReadPassword,Fatal,Fd],IsTerminal->[IsTerminal,Fd],Prompt->[NewReader,ReadString,Print,Fatal,TrimSpace],Exit,Restore,ErrorIf,Printf,Stop,Fd,Notify,Repeat,Fatal,GetState]
IsTerminal returns true if the current process is executing in a terminal false otherwise.
Maybe use built in `log.Fatal` ?
@@ -654,6 +654,8 @@ namespace Dynamo.Tests //Check whether the geometry node is frozen var node = CurrentDynamoModel.CurrentWorkspace.NodeFromWorkspace("8163332d-21ec-4257-9a5a-0b69462db44f"); Assert.IsTrue(node.IsFrozen); + //Frozen nodes should not get involved in execution. + Assert.IsFalse(node.WasInvolvedInExecution); } [Test]
[NodeExecutionTest->[GetLibrariesToPreload->[GetLibrariesToPreload]]]
Load the graph with freeze nodes test. if (!nul l ) return false ;.
simple test to check whether the frozen node when opening the graph was involved in execution
@@ -3428,4 +3428,13 @@ func getDefaultAddons(version string) []KubernetesAddon { }, }, } + + if common.IsKubernetesVersionGe(version, "1.15.0") { + addons = append(addons, KubernetesAddon{ + Name: common.PodSecurityPolicyAddonName, + Enabled: to.BoolPtr(true), + }) + } + + return addons }
[Itoa,BoolPtr,DeepEqual,Bool,IsEnabled,Parallel,Errorf,Fatalf,setAddonsConfig,Run]
1 ) }.
Yay w/ the new addons UT interfaces we just add default addons in one place and get that coverage everywhere
@@ -112,6 +112,15 @@ func (i *Inspect) Run(cli *cli.Context) error { } executor.InitDiagnosticLogs(vchConfig) + installerVer := version.GetBuild() + + log.Info("") + log.Infof("Installer version: %s", installerVer.ShortVersion()) + log.Infof("VCH version: %s", vchConfig.Version.ShortVersion()) + log.Info("") + log.Info("VCH upgrade status:") + i.upgradeStatusMessage(ctx, vch, installerVer, vchConfig.Version) + if err = executor.InspectVCH(vch, vchConfig); err != nil { executor.CollectDiagnosticLogs() log.Errorf("%s", err)
[Run->[InitDiagnosticLogs,NewDispatcher,Reference,InspectVCH,Error,Args,New,NewVCHFromComputePath,Errorf,Infof,processParams,NewVCHFromID,CollectDiagnosticLogs,GetVCHConfig,WithTimeout,Background,NewValidator,String,SetLevel],processParams->[End,HasCredentials,Begin],Flags->[ComputeFlags,TargetFlags,DebugFlags,IDFlags],NewData]
Run the inspect command vchConfig is the vchConfig object that contains the vchConfig.
Why make these separate calls instead of just doing a `log.Infof("\n...` with all of the output formatted how you want?
@@ -1606,7 +1606,7 @@ def test_exit_positions(mocker, default_conf, limit_buy_order, caplog) -> None: assert n == 0 # Test amount not modified by fee-logic assert not log_has( - 'Applying fee to amount for Trade {} from 90.99181073 to 90.81'.format(trade), caplog + 'Applying fee to amount for Trade {} from 30.0 to 90.81'.format(trade), caplog ) mocker.patch('freqtrade.freqtradebot.FreqtradeBot.get_real_amount', return_value=90.81)
[test_process_exchange_failures->[patch_RPCManager],test_handle_cancel_exit_limit->[patch_RPCManager],test_check_handle_timedout_exception->[patch_RPCManager],test_check_handle_cancelled_sell->[patch_RPCManager],test_create_trade_no_signal->[patch_RPCManager],test_check_available_stake_amount->[patch_RPCManager],test_sell_not_enough_balance->[patch_RPCManager],test_get_valid_price->[patch_RPCManager],test_process_trade_no_whitelist_pair->[patch_RPCManager],test_order_book_depth_of_market->[patch_RPCManager],test_check_handle_timedout_partial_except->[patch_RPCManager],test_close_trade->[patch_RPCManager],test_handle_trade_use_sell_signal->[patch_RPCManager],test_handle_stoploss_on_exchange_trailing->[patch_RPCManager],test_process_trade_creation->[patch_RPCManager],test_execute_trade_exit_market_order->[patch_RPCManager],test_trailing_stop_loss->[patch_RPCManager],test_handle_overlapping_signals->[patch_RPCManager],test_handle_cancel_exit_cancel_exception->[patch_RPCManager],test_check_handle_cancelled_buy->[patch_RPCManager],test_tsl_only_offset_reached->[patch_RPCManager],test_enter_positions_no_pairs_left->[patch_RPCManager],test_handle_cancel_enter_exchanges->[patch_RPCManager],test_check_handle_timedout_partial->[patch_RPCManager],test_handle_stoploss_on_exchange->[patch_RPCManager],test_execute_trade_exit_down_stoploss_on_exchange_dry_run->[patch_RPCManager],test_add_stoploss_on_exchange->[patch_RPCManager],test_execute_trade_exit_up->[patch_RPCManager],test_edge_called_in_process->[patch_RPCManager],test_process_operational_exception->[patch_RPCManager],test_tsl_on_exchange_compatible_with_edge->[patch_RPCManager],test_handle_trade_roi->[patch_RPCManager],test_create_trade_no_stake_amount->[patch_RPCManager],test_check_handle_timedout_sell_usercustom->[patch_RPCManager],test__safe_exit_amount->[patch_RPCManager],test_trailing_stop_loss_offset->[patch_RPCManager],test_order_dict->[patch_RPCManager],test_update_closed_trades_without_assigned_fees->[patch_with_fee],test_get_trade_stake_amount->[patch_RPCManager],test_order_book_ask_strategy->[patch_RPCManager],test_create_trades_preopen->[patch_RPCManager],test_check_handle_timedout_buy_usercustom->[patch_RPCManager],test_trailing_stop_loss_positive->[patch_RPCManager],test_create_stoploss_order_invalid_order->[patch_RPCManager],test_create_trade_minimal_amount->[patch_RPCManager],test_enter_positions_global_pairlock->[patch_RPCManager],test_create_trades_multiple_trades->[patch_RPCManager],test_check_handle_timedout_buy_exception->[patch_RPCManager],test_locked_pairs->[patch_RPCManager],test_handle_sle_cancel_cant_recreate->[patch_RPCManager],test_handle_cancel_enter->[patch_RPCManager],test_handle_cancel_enter_corder_empty->[patch_RPCManager],test_check_handle_timedout_buy->[patch_RPCManager],test_execute_trade_exit_with_stoploss_on_exchange->[patch_RPCManager],test_create_trade->[patch_RPCManager],test_total_open_trades_stakes->[patch_RPCManager],test_ignore_roi_if_buy_signal->[patch_RPCManager],test_execute_trade_exit_down->[patch_RPCManager],test_check_handle_timedout_sell->[patch_RPCManager],test_may_execute_trade_exit_after_stoploss_on_exchange_hit->[patch_RPCManager],test_disable_ignore_roi_if_buy_signal->[patch_RPCManager],test_sell_profit_only->[patch_RPCManager],test_process_trade_handling->[patch_RPCManager],test_execute_entry->[patch_RPCManager],test_execute_trade_exit_custom_exit_price->[patch_RPCManager],test_edge_overrides_stake_amount->[patch_RPCManager],test_process_informative_pairs_added->[patch_RPCManager],test_handle_stoploss_on_exchange_custom_stop->[patch_RPCManager],test_handle_trade->[patch_RPCManager],test_check_handle_timedout_partial_fee->[patch_RPCManager],test_refind_lost_order->[reset_open_orders],test_edge_overrides_stoploss->[patch_RPCManager]]
Test exit positions.
What is this test testing? get_real_amount is not relevant for this test (as handle_trade is mocked).
@@ -131,6 +131,12 @@ def get_hashable_destination(destination): return destination +def no_retry_on_invalid_input(exn): + if isinstance(exn, ValueError): + return False + return retry.retry_on_server_errors_and_timeout_filter(exn) + + def parse_table_schema_from_json(schema_string): """Parse the Table Schema provided as string.
[AppendDestinationsFn->[_get_table_fn->[_value_provider_or_static_val]],get_avro_schema_from_table_schema->[get_dict_table_schema],RowAsDictJsonCoder->[decode->[decode]],table_schema_to_dict->[get_table_field->[get_table_field],get_table_field],BigQueryReader->[__iter__->[convert_row_to_dict,run_query],__exit__->[clean_up_temporary_dataset],__enter__->[create_temporary_dataset,BigQueryWrapper,_get_source_location],_get_source_location->[get_table_location,get_query_location]],get_dict_table_schema->[table_schema_to_dict,get_table_schema_from_string],BigQueryWrapper->[get_or_create_table->[_create_table,_delete_table,_is_table_empty,get_table],insert_rows->[_insert_all_rows],_get_temp_table->[parse_table_reference],_insert_copy_job->[_build_job_labels],create_temporary_dataset->[get_or_create_dataset],get_table_location->[get_table],convert_row_to_dict->[_convert_cell_value_to_dict],perform_extract_job->[_build_job_labels],clean_up_temporary_dataset->[_get_temp_table,_delete_dataset],perform_load_job->[_insert_load_job],_insert_load_job->[_build_job_labels],run_query->[_get_query_results,_start_query_job],_start_query_job->[_build_job_labels,_get_temp_table]],parse_table_schema_from_json->[_parse_schema_field->[_parse_schema_field],_parse_schema_field],JsonRowWriter->[tell->[tell],write->[write,encode],__init__->[RowAsDictJsonCoder],writable->[writable],close->[close],flush->[flush]],BigQueryWriter->[__exit__->[_flush_rows_buffer],__enter__->[get_or_create_table,BigQueryWrapper],Write->[_flush_rows_buffer],_flush_rows_buffer->[insert_rows]],AvroRowWriter->[tell->[tell,flush],write->[write],__init__->[writable],writable->[writable],close->[close,flush],flush->[flush]]]
Parse the BigQuery table schema from a JSON string.
I wonder if this should exist under retry.py. Thoughts?
@@ -28,6 +28,7 @@ using DynCmd = Dynamo.ViewModels.DynamoViewModel; using System.Reflection; using Dynamo.Wpf.Properties; using DynamoUtilities; +using Dynamo.Wpf.Views.Gallery; namespace Dynamo.ViewModels {
[DynamoViewModel->[CanZoomOut->[CanZoomOut],SaveAs->[SaveAs],ExportToSTL->[ExportToSTL],ShowOpenDialogAndOpenResult->[Open,CanOpen],ShowSaveDialogIfNeededAndSave->[SaveAs,Save],ImportLibrary->[ImportLibrary],ReportABug->[ReportABug],ClearLog->[ClearLog],Escape->[CancelActiveState],ShowSaveDialogIfNeededAndSaveResult->[CanSave,Save],FitView->[FitView],Undo->[Undo],SelectNeighbors->[SelectNeighbors],CanSelectAll->[CanSelectAll],CanRedo->[CanRedo],ZoomIn->[ZoomIn],ZoomOut->[ZoomOut],CleanUp->[UnsubscribeAllEvents],ShowSaveImageDialogAndSaveResult->[CanSaveImage,SaveImage],GoToWorkspace->[FocusCustomNodeWorkspace],OpenRecent->[Open],ShowElement->[FocusCustomNodeWorkspace],CanUndo->[CanUndo],SelectAll->[SelectAll],ShowSaveDialogAndSaveResult->[SaveAs],Redo->[Redo],CancelActiveState->[CancelActiveState],CanZoomIn->[CanZoomIn]]]
A view model which tracks the model in the workspace and then tracks the model in the view - The object to which the model is being transformed.
If this isn't required, please remove it.
@@ -15,7 +15,7 @@ from acme import messages logging.basicConfig(level=logging.DEBUG) -DIRECTORY_URL = 'https://acme-staging.api.letsencrypt.org/directory' +DIRECTORY_URL = 'https://acme-staging-v02.api.letsencrypt.org/directory' BITS = 2048 # minimum for Boulder DOMAIN = 'example1.com' # example.com is ignored by Boulder
[request_challenges,basicConfig,join,agree_to_tos,info,resource_string,default_backend,poll,load_certificate_request,register,generate_private_key,ComparableX509,debug,print,format,JWKRSA,request_issuance,Identifier,Client]
Example script showing how to use acme client API.
nit: While I doubt anyone is using this file, this won't work because we use `acme.client.Client` below which doesn't work with ACMEv2.
@@ -455,6 +455,7 @@ func (c *RaftCluster) processRegionHeartbeat(region *core.RegionInfo) error { for _, p := range region.GetPeers() { c.updateStoreStatusLocked(p.GetStoreId()) } + regionHeartbeatCounter.WithLabelValues("none", "none", "cache", "update").Inc() } if c.regionStats != nil {
[UpdateStoreLabels->[GetStore],GetReplicaScheduleLimit->[GetReplicaScheduleLimit],DropCacheRegion->[GetRegion],putConfig->[putMetaLocked],GetStoreBalanceRate->[GetStoreBalanceRate],GetMaxMergeRegionSize->[GetMaxMergeRegionSize],GetRegionStores->[GetRegionStores],AllocPeer->[allocID],GetSplitMergeInterval->[GetSplitMergeInterval],GetMetaStores->[GetMetaStores],IsNamespaceRelocationEnabled->[IsNamespaceRelocationEnabled],RandHotRegionFromStore->[RandHotRegionFromStore,GetRegion],BuryStore->[GetStore],BlockStore->[BlockStore],IsRaftLearnerEnabled->[IsFeatureSupported,IsRaftLearnerEnabled],IsMakeUpReplicaEnabled->[IsMakeUpReplicaEnabled],GetAdjacentRegions->[GetAdjacentRegions],putStore->[GetStores,GetStore],RemoveStore->[GetStore],SetStoreState->[SetStoreState,GetStore],GetHotRegionScheduleLimit->[GetHotRegionScheduleLimit],GetStrictlyMatchLabel->[GetStrictlyMatchLabel],GetMetaRegions->[GetMetaRegions],RandPendingRegion->[RandPendingRegion],GetLowSpaceRatio->[GetLowSpaceRatio],IsLocationReplacementEnabled->[IsLocationReplacementEnabled],start->[initCluster],GetMergeScheduleLimit->[GetMergeScheduleLimit],OnStoreVersionChange->[GetStores],GetLocationLabels->[GetLocationLabels],GetHotRegionCacheHitsThreshold->[GetHotRegionCacheHitsThreshold],GetEnableOneWayMerge->[GetEnableOneWayMerge],check->[GetStores,GetStoreRegionCount],GetTolerantSizeRatio->[GetTolerantSizeRatio],IsReplaceOfflineReplicaEnabled->[IsReplaceOfflineReplicaEnabled],GetFollowerStores->[GetFollowerStores],GetMaxReplicas->[GetMaxReplicas],GetMaxMergeRegionKeys->[GetMaxMergeRegionKeys],GetAverageRegionSize->[GetAverageRegionSize],GetMaxPendingPeerCount->[GetMaxPendingPeerCount],stop->[stop],RandLeaderRegion->[RandLeaderRegion],GetRegionStats->[GetRegionStats],checkStores->[GetStores,GetStoreRegionCount,BuryStore],GetRegionStatsByType->[GetRegionStatsByType],RandFollowerRegion->[RandFollowerRegion],GetLeaderScheduleLimit->[GetLeaderScheduleLimit],GetRegion->[GetRegion],GetRegions->[GetRegions],SetStoreWeight->[GetStore],CheckLabelProperty->[CheckLabelProperty],IsRemoveExtraReplicaEnabled->[IsRemoveExtraReplicaEnabled],GetHighSpaceRatio->[GetHighSpaceRatio],GetPatrolRegionInterval->[GetPatrolRegionInterval],GetLeaderStore->[GetLeaderStore],GetStore->[GetStore],GetMaxStoreDownTime->[GetMaxStoreDownTime],GetStoreRegionCount->[GetStoreRegionCount],IsRegionHot->[IsRegionHot],collectMetrics->[GetStores],IsRemoveDownReplicaEnabled->[IsRemoveDownReplicaEnabled],GetMaxSnapshotCount->[GetMaxSnapshotCount],GetStoreRegions->[GetStoreRegions],GetStores->[GetStores],GetSchedulerMaxWaitingOperator->[GetSchedulerMaxWaitingOperator],AttachOverloadStatus->[AttachOverloadStatus],RemoveTombStoneRecords->[GetStores],GetRegionScheduleLimit->[GetRegionScheduleLimit],UnblockStore->[UnblockStore]]
processRegionHeartbeat processes a heartbeat from a region Save the region in the storage system. This function is called when a region is not in the cache and it is not in the Observe - update all items in the hotSpotCache.
Do we need a new counter?
@@ -292,9 +292,6 @@ class Model(torch.nn.Module, Registrable): model_params = config.get("model") - training_params = config.get("trainer", Params({})) - opt_level = opt_level or training_params.get("opt_level") - # The experiment config tells us how to _train_ a model, including where to get pre-trained # embeddings from. We're now _loading_ the model, so those embeddings will already be # stored in our weights. We don't need any pretrained weight file anymore, and we don't
[Model->[from_archive->[extend_embedder_vocab],load->[_load]],remove_pretrained_embedding_params->[remove_pretrained_embedding_params]]
Instantiates a model from a given configuration. Load a single missing node model from the model file.
I'm very happy to get rid of all of this logic here.
@@ -2255,7 +2255,7 @@ var NgModelController = ['$scope', '$exceptionHandler', '$attrs', '$element', '$ * * For best practices on using `ngModel`, see: * - * - [https://github.com/angular/angular.js/wiki/Understanding-Scopes] + * - (Understanding Scopes)[https://github.com/angular/angular.js/wiki/Understanding-Scopes] * * For basic examples, how to use `ngModel`, see: *
[No CFG could be retrieved]
A directive that binds an input input select textarea or form control to a * property - directives - input select textarea.
I think this is backwards, `[]` on the left and `()` on the right, although who knows if our docs processor does its own thing
@@ -202,9 +202,11 @@ public class TestPubsub implements TestRule { public List<PubsubMessage> pull(int maxBatchSize) throws IOException { List<PubsubClient.IncomingMessage> messages = pubsub.pull(0, subscriptionPath, maxBatchSize, true); - pubsub.acknowledge( - subscriptionPath, - messages.stream().map(msg -> msg.ackId).collect(ImmutableList.toImmutableList())); + if (!messages.isEmpty()) { + pubsub.acknowledge( + subscriptionPath, + messages.stream().map(msg -> msg.ackId).collect(ImmutableList.toImmutableList())); + } return messages.stream() .map(msg -> new PubsubMessage(msg.elementBytes, msg.attributes, msg.recordId))
[TestPubsub->[pull->[pull],listSubscriptions->[listSubscriptions],assertThatTopicEventuallyReceives->[waitForNMessages],create->[TestPubsub],waitForNMessages->[pull],apply->[evaluate->[evaluate]],checkIfAnySubscriptionExists->[topicPath],publish->[publish]]]
Pulls messages from the queue.
out of curiosity, was this a crasher?
@@ -188,6 +188,10 @@ namespace DSCPython { globalScope = CreateGlobalScope(); } + + // Reset the 'sys.path' value to the default python paths on node evaluation. + code = "import sys" + Environment.NewLine + "sys.path = sys.path[0:3]" + Environment.NewLine + code; + using (PyScope scope = Py.CreateScope()) { ProcessAdditionalBindings(scope, bindingNames, bindingValues);
[DynamoCPythonHandleComparer->[Equals->[Equals],GetHashCode->[GetHashCode]],CPythonEvaluator->[ProcessAdditionalBindings->[Equals],GetTraceBack->[ToString],ToString,Equals],DynamoCPythonHandle->[ToString->[ToString],Dispose->[ToString]]]
Evaluate a Python script with the given name and values.
This should be done in its own scope. Otherwise the line numbers will be moved, which will affect traceback.
@@ -59,7 +59,9 @@ func (b EventBuilder) Build() (common.MapStr, error) { metricsetData := common.MapStr{ "module": b.ModuleName, "name": b.MetricSetName, - "rtt": b.FetchDuration.Nanoseconds() / int64(time.Microsecond), + } + if b.FetchDuration != 0 { + metricsetData["rtt"] = b.FetchDuration.Nanoseconds() / int64(time.Microsecond) } namespace := b.MetricSetName
[Build->[Time,Nanoseconds,Error,Run,Update],Build,Name,Host,Module,Err,Config]
Build builds a new event from the given event This function is called to update the event with the data from the module.
Nice, that means we also remove this value in the system module case
@@ -68,11 +68,13 @@ func cpCmd(c *cliconfig.CpValues) error { } defer runtime.Shutdown(false) - extract := c.Flag("extract").Changed - return copyBetweenHostAndContainer(runtime, args[0], args[1], extract) + copyOpts := copyOptions{ + extract: c.Extract, + } + return copyBetweenHostAndContainer(runtime, args[0], args[1], copyOpts) } -func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, extract bool) error { +func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, copyOpts copyOptions) error { srcCtr, srcPath := parsePath(runtime, src) destCtr, destPath := parsePath(runtime, dest)
[GetHostIDs,Dir,Hash,WorkingDir,IDMappings,GetUser,SetUsageTemplate,Exit,Digester,Atoi,IsNotExist,MkdirAllAndChownNew,Error,Stat,ReadFile,Cause,BecomeRootInUserNS,GetRuntime,Errorf,State,AddCommand,IsArchivePath,SplitN,IsAbs,HasSuffix,Debugf,Wrapf,Join,Mount,Geteuid,EvalSymlinks,Contains,User,SetSkipStorageSetup,Shutdown,GetAdditionalGroupsForUser,Base,CopyFileWithTarAndChown,Config,MkdirAll,Flag,UntarPathAndChown,IsDir,JoinDirectUserAndMountNS,SetHelpTemplate,CopyWithTarAndChown,LookupContainer,Glob,Unmount,Getwd,Flags,BoolVar]
cpCmd - cp command line interface FromHostToCtr converts a host container id from the container container store to the container.
Why are we only filling one field in here? If we're not using all of `copyOptions`, just pass in Extract to `copyBetweenHostAndContainer` and make a `copyOptions` in there
@@ -238,7 +238,15 @@ def mixed_norm(evoked, forward, noise_cov, alpha, loose=0.2, depth=0.8, for c in gain_info['ch_names']] r = deepcopy(e) r = pick_channels_evoked(r, include=gain_info['ch_names']) - r.data -= np.dot(forward['sol']['data'][sel, :][:, active_set], Xe) + r_tmp = deepcopy(r) + r_tmp.data = np.dot(forward['sol']['data'][sel, :][:, + active_set], Xe) + if e.proj == True: + r_tmp.info['projs'] = deactivate_proj(r_tmp.info['projs'], + copy=False) + r_tmp.proj = False + r_tmp.apply_proj() + r.data -= r_tmp.data residual.append(r) logger.info('[done]')
[tf_mixed_norm->[_window_evoked,_prepare_gain,_make_sparse_stc],mixed_norm->[_prepare_gain,_make_sparse_stc]]
Mixed - normal estimate of a single node. Returns a 1d array of n_sources with the time courses of the n_sources Find missing components in a data matrix. Returns the index of the last non - zero non - zero non - zero non - zero.
I would write a _compute_residual function to avoid the code duplication here too.
@@ -145,7 +145,8 @@ var ngBindTemplateDirective = ['$interpolate', '$compile', function($interpolate * element in a secure way. By default, the innerHTML-ed content will be sanitized using the {@link * ngSanitize.$sanitize $sanitize} service. To utilize this functionality, ensure that `$sanitize` * is available, for example, by including {@link ngSanitize} in your module's dependencies (not in - * core Angular.) You may also bypass sanitization for values you know are safe. To do so, bind to + * core Angular). In order to use {@link ngSanitize} in your module's dependencies, you need to add "angular-sanitize.js" to your index.html. + * You may also bypass sanitization for values you know are safe. To do so, bind to * an explicitly trusted value via {@link ng.$sce#trustAsHtml $sce.trustAsHtml}. See the example * under {@link ng.$sce#Example Strict Contextual Escaping (SCE)}. *
[No CFG could be retrieved]
The ng - bind - html directive is a directive that will render the contents of the Requires the neccesary JS files and controller to be used in the necces.
I don't think this change is adding much --- the specific instructions are only helpful to people under a specific configuration, and the real instructions for this are laid out more clearly in the tutorial
@@ -237,7 +237,7 @@ module GobiertoBudgets end def place_position_in_ranking(options) - id = %w{ine_code year code kind}.map {|f| options[f.to_sym]}.join('/') + id = %w{organization_id year code kind}.map {|f| options[f.to_sym]}.join('/') response = budget_line_query(options.merge(to_rank: true)) buckets = response['hits']['hits'].map{|h| h['_id']}
[all->[functional_codes_for_economic_budget_line],compare->[search],any_data?->[search],budget_line_query->[search,for_ranking],top_differences->[search],search->[search]]
Places the position in ranking for the specified budget line.
%w-literals should be delimited by ( and ).<br>Space between { and | missing.<br>Space missing inside }.<br>Prefer double-quoted strings unless you need single quotes to avoid extra backslashes for escaping.
@@ -164,6 +164,7 @@ namespace Dynamo.PackageManager if (body.name == null || body.version == null) throw new Exception("The header is missing a name or version field."); + // TODO: Add serialization part of hosts specified in package.json var pkg = new Package( Path.GetDirectoryName(headerPath), body.name,
[Package->[Log->[Log],UninstallCore->[MarkForUninstall]]]
Creates a package from a JSON file.
did you verify if this now gets serialized?
@@ -2,6 +2,18 @@ import assert from 'assert'; import { HDLegacyP2PKHWallet, HDSegwitP2SHWallet } from '../../class'; import AOPP from '../../class/aopp'; +import * as BlueElectrum from '../../blue_modules/BlueElectrum'; // so it connects ASAP + +beforeAll(async () => { + // awaiting for Electrum to be connected. For RN Electrum would naturally connect + // while app starts up, but for tests we need to wait for it + await BlueElectrum.waitTillConnected(); +}); + +afterAll(() => { + // after all tests we close socket so the test suite can actually terminate + BlueElectrum.forceDisconnect(); +}); describe('AOPP', () => { it('can validate uri', async () => {
[No CFG could be retrieved]
Describe the type of a specific network object. test that the address is in p2sh p2wpkh.
AOPP doesnt need `BlueElectrum`
@@ -279,7 +279,7 @@ namespace Dynamo.Core /// <param name="model">The model to check against.</param> /// <returns>Returns true if the model has already been recorded in the /// current action group, or false otherwise.</returns> - private bool IsRecordedInActionGroup(XmlElement group, ModelBase model) + private bool IsRecordedInActionGroup(XmlElement group, ModelBase model, UserAction action) { if (null == group) throw new ArgumentNullException("group");
[UndoRedoRecorder->[Clear->[Clear],UndoActionGroup->[DeleteModel,ReloadModel,CreateModel,RecordActionInternal],RecordActionInternal->[IsRecordedInActionGroup,Undo,SetNodeAction],RecordCreationForUndo->[Clear],RecordDeletionForUndo->[Clear],ActionGroupDisposable->[Dispose->[EndActionGroup]],RecordModificationForUndo->[Clear],ModelModificationUndoHelper->[Dispose->[SetNodeAction,RecordCreationForUndo],Undo,RecordModificationForUndo],RedoActionGroup->[CreateModel,ReloadModel,DeleteModel,RecordActionInternal]]]
Checks if a model is recorded in an action group.
We need to add a new param tag here. :)
@@ -81,7 +81,10 @@ class EmailDigestArticleCollector end def last_user_emails - @last_user_emails ||= @user.email_messages.select(:sent_at, - :opened_at).where(mailer: "DigestMailer#digest_email").limit(10) + @last_user_emails ||= @user + .email_messages + .select(:sent_at, :opened_at) + .where(mailer: "DigestMailer#digest_email") + .limit(10) end end
[EmailDigestArticleCollector->[should_receive_email?->[current],open_rate->[present?,count],user_has_followings?->[any?,positive?],cutoff_date->[max,utc],last_email_sent_at->[sent_at],last_user_emails->[limit],articles_to_send->[should_receive_email?,limit,user_has_followings?,length,experience_level],days_until_next_email->[round,max,periodic_email_digest_min,periodic_email_digest_max,tanh]]]
Returns the last 10 email messages opened by the user.
This is a simple reformatting of the method chain
@@ -52,12 +52,11 @@ public class ServerUserSession extends AbstractUserSession { private final DbClient dbClient; private final OrganizationFlags organizationFlags; private final DefaultOrganizationProvider defaultOrganizationProvider; - private final Supplier<List<GroupDto>> groups = Suppliers.memoize(this::loadGroups); + private final Supplier<Collection<GroupDto>> groups = Suppliers.memoize(this::loadGroups); private final Supplier<Boolean> isSystemAdministratorSupplier = Suppliers.memoize(this::loadIsSystemAdministrator); - private SetMultimap<String, String> projectUuidByPermission = HashMultimap.create(); - private SetMultimap<String, String> permissionsByOrganizationUuid; - private Map<String, String> projectUuidByComponentUuid = newHashMap(); - private List<String> projectPermissionsCheckedByUuid = new ArrayList<>(); + private final Map<String, String> projectUuidByComponentUuid = newHashMap(); + private Map<String, Set<String>> permissionsByOrganizationUuid; + private Map<String, Set<String>> permissionsByProjectUuid; ServerUserSession(DbClient dbClient, OrganizationFlags organizationFlags, DefaultOrganizationProvider defaultOrganizationProvider, @Nullable UserDto userDto) {
[ServerUserSession->[loadIsSystemAdministrator->[isRoot],getName->[getName],isRoot->[isRoot],hasProjectUuidPermission->[getUserId],getLogin->[getLogin]]]
Creates a new server user session. Get login.
I think this should be a Set and equals/hashcode should be defined in GroupDto (based on the id) It's important we know the groups are not ordered, but also as important that there is no duplicate. WDYT?
@@ -201,7 +201,15 @@ describe('amp-a4a', () => { const child = element.querySelector('iframe[src]'); expect(child).to.be.ok; expect(child.src).to.have.string(srcUrl); - expect(child.getAttribute('name')).not.to.be.ok; + const nameData = child.getAttribute('name'); + expect(nameData).to.be.ok; + expect(JSON.parse.bind(null, nameData), nameData).not.to.throw(Error); + const attributes = JSON.parse(nameData); + expect(attributes).to.be.ok; + expect(attributes._context).to.be.ok; + if (!attributes._context.amp3pSentinel) { + expect(attributes._context.sentinel).to.be.ok; + } expect(child).to.be.visible; }
[No CFG could be retrieved]
Checks that an element is an amp - ad that is rendered via nameframe. This is the main method that is called when the test page is loaded.
Ah. You're doing essentially the same test here as above. I suggest factoring the `nameData` validation out to a common function that you call from both places.
@@ -194,7 +194,7 @@ namespace System.Diagnostics public abstract partial class TraceFilter { protected TraceFilter() { } - public abstract bool ShouldTrace(System.Diagnostics.TraceEventCache cache, string source, System.Diagnostics.TraceEventType eventType, int id, string formatOrMessage, object[] args, object data1, object[] data); + public abstract bool ShouldTrace(System.Diagnostics.TraceEventCache? cache, string source, System.Diagnostics.TraceEventType eventType, int id, string? formatOrMessage, object[]? args, object? data1, object[]? data); } public enum TraceLevel {
[SwitchAttribute->[Class,Method,Constructor,Property,Event,Assembly],SwitchLevelAttribute->[Class],Advanced]
abstract ShouldTrace implementation.
Same question about these `object[]?`s... should they be `object?[]?`?
@@ -639,8 +639,7 @@ int tls_parse_ctos_supported_groups(SSL *s, PACKET *pkt, X509 *x, return 0; } - if (!s->hit - && !PACKET_memdup(&supported_groups_list, + if (!PACKET_memdup(&supported_groups_list, &s->session->ext.supportedgroups, &s->session->ext.supportedgroups_len)) { *al = SSL_AD_DECODE_ERROR;
[tls_construct_stoc_etm->[SSLerr,WPACKET_put_bytes_u16],tls_parse_ctos_supported_groups->[PACKET_memdup,PACKET_remaining,PACKET_as_length_prefixed_2],tls_parse_ctos_ems->[PACKET_remaining],tls_parse_ctos_status_request->[PACKET_get_1,sk_OCSP_RESPID_new_null,PACKET_get_length_prefixed_2,d2i_OCSP_RESPID,d2i_X509_EXTENSIONS,PACKET_end,sk_X509_EXTENSION_pop_free,sk_OCSP_RESPID_pop_free,PACKET_remaining,sk_OCSP_RESPID_push,PACKET_as_length_prefixed_2,OCSP_RESPID_free,PACKET_data],tls_construct_stoc_renegotiate->[WPACKET_put_bytes_u16,WPACKET_start_sub_packet_u8,WPACKET_memcpy,SSLerr,WPACKET_start_sub_packet_u16,WPACKET_close],tls_parse_ctos_ec_pt_formats->[PACKET_remaining,PACKET_as_length_prefixed_1,PACKET_memdup],tls_parse_ctos_alpn->[PACKET_memdup,PACKET_remaining,PACKET_get_length_prefixed_1,PACKET_as_length_prefixed_2],tls_parse_ctos_use_srtp->[PACKET_get_1,PACKET_forward,SSLerr,PACKET_get_sub_packet,PACKET_get_net_2,sk_SRTP_PROTECTION_PROFILE_num,sk_SRTP_PROTECTION_PROFILE_value,SSL_get_srtp_profiles,PACKET_remaining,STACK_OF],tls_parse_ctos_session_ticket->[PACKET_data,PACKET_remaining,session_ticket_cb],int->[tls_curve_allowed],tls_construct_stoc_use_srtp->[WPACKET_put_bytes_u16,SSLerr,WPACKET_put_bytes_u8,WPACKET_start_sub_packet_u16,WPACKET_close],tls_construct_stoc_key_share->[ssl_derive,ssl_generate_pkey,WPACKET_put_bytes_u16,OPENSSL_free,SSLerr,EVP_PKEY_free,EVP_PKEY_get1_tls_encodedpoint,WPACKET_start_sub_packet_u16,WPACKET_sub_memcpy_u16,WPACKET_close],tls_construct_stoc_session_ticket->[tls_use_ticket,SSLerr,WPACKET_put_bytes_u16],tls_parse_ctos_sig_algs->[tls1_save_sigalgs,PACKET_remaining,PACKET_as_length_prefixed_2],tls_construct_stoc_ems->[SSLerr,WPACKET_put_bytes_u16],tls_construct_stoc_next_proto_neg->[npn_advertised_cb,SSLerr,WPACKET_put_bytes_u16,WPACKET_sub_memcpy_u16],tls_parse_ctos_key_share->[EVP_PKEY_set1_tls_encodedpoint,PACKET_get_length_prefixed_2,tls1_ec_curve_id2nid,EVP_PKEY_new,SSLerr,EVP_PKEY_free,PACKET_get_net_2,EVP_PKEY_CTX_set_ec_paramgen_curve_nid,EVP_PKEY_CTX_free,PACKET_remaining,EVP_PKEY_paramgen_init,EVP_PKEY_set_type,PACKET_as_length_prefixed_2,EVP_PKEY_CTX_new_id,tls1_get_curvelist,EVP_PKEY_paramgen,PACKET_data,check_in_list],tls_parse_ctos_renegotiate->[PACKET_get_1,memcmp,PACKET_get_bytes,SSLerr],tls_construct_stoc_cryptopro_bug->[SSLerr,WPACKET_memcpy,SSL_get_options],tls_construct_stoc_server_name->[SSLerr,WPACKET_put_bytes_u16],tls_construct_stoc_alpn->[WPACKET_sub_memcpy_u8,WPACKET_put_bytes_u16,SSLerr,WPACKET_start_sub_packet_u16,WPACKET_close],tls_construct_stoc_status_request->[WPACKET_put_bytes_u16,SSLerr,SSL_IS_TLS13,WPACKET_start_sub_packet_u16,tls_construct_cert_status_body,WPACKET_close],tls_parse_ctos_srp->[PACKET_strndup,PACKET_contains_zero_byte,PACKET_as_length_prefixed_1],tls_parse_ctos_server_name->[PACKET_get_1,PACKET_strndup,strlen,PACKET_remaining,PACKET_as_length_prefixed_2,PACKET_contains_zero_byte,PACKET_equal],tls_construct_stoc_ec_pt_formats->[WPACKET_sub_memcpy_u8,WPACKET_put_bytes_u16,SSLerr,tls1_get_formatlist,WPACKET_start_sub_packet_u16,WPACKET_close]]
Parse the list of supported groups.
Style fix indentation of the two other arguments.
@@ -1114,7 +1114,8 @@ namespace { case AST_Decl::NT_sequence: { AST_Sequence* seq_node = dynamic_cast<AST_Sequence*>(type); AST_Type* base_node = seq_node->base_type(); - // TODO(iguessthislldo): XCDR Stuff? + const ExtensibilityKind exten = be_global->extensibility(type); + idl_max_serialized_size_dheader(encoding, exten, size); size_t bound = seq_node->max_size()->ev()->u.ulval; align(encoding, size, 4); size += 4;
[No CFG could be retrieved]
The maximum serialized size of the type. - - - - - - - - - - - - - - - - - -.
Should this move above the `switch` so it doesn't have to be repeated in the various case blocks?
@@ -236,6 +236,14 @@ interface CategoryInterface extends AuditableInterface */ public function setCreator(UserInterface $creator = null); + /** + * Set created. + * Note: This property is set automatically by the TimestampableSubscriber if not set manually. + * + * @return CategoryInterface + */ + public function setCreated(\DateTime $created); + /** * Set changer. * Note: This property is set automatically by the UserBlameSubscriber if not set manually.
[No CFG could be retrieved]
Set the creator of the user.
Do we need to add this to `UPGRADE.md`?
@@ -10731,7 +10731,7 @@ bool Parser::CheckAsmjsModeStrPid(IdentPtr pid) !m_pscan->IsEscapeOnLastTkStrCon() && wcsncmp(pid->Psz(), L"use asm", 10) == 0); - if (isAsmCandidate && m_scriptContext->IsInDebugMode()) + if (isAsmCandidate && m_scriptContext->IsScriptContextInDebugMode()) { // We would like to report this to debugger - they may choose to disable debugging. // TODO : localization of the string?
[No CFG could be retrieved]
Checks if a node in the parse tree is a directive or a parse tree is a binary Parse the first node of the parse tree that is not a duplicate of the last node in.
So if we have asmjs code in the library file we will emit this error to the debugger. (we were doing it before your change as well). In the future we should not raise this message when the current code is library code - for the user it does not make sense.
@@ -381,12 +381,12 @@ function $SanitizeProvider() { if (node.nodeType === 1) { handler.end(node.nodeName.toLowerCase()); } - nextNode = node.nextSibling; + nextNode = getNonDescendant('nextSibling', node); if (!nextNode) { while (nextNode == null) { - node = node.parentNode; + node = getNonDescendant('parentNode', node); if (node === inertBodyElement) break; - nextNode = node.nextSibling; + nextNode = getNonDescendant('nextSibling', node); if (node.nodeType === 1) { handler.end(node.nodeName.toLowerCase()); }
[No CFG could be retrieved]
This function is used to convert the given HTML into a string. Encodes all potentially dangerous characters in a string into a map of attribute name - > value.
nodeType can be clobberred.
@@ -55,8 +55,14 @@ public class DefaultSchedulerMessageSourceTestCase extends AbstractMuleContextTe schedulerMessageSource.setListener(flow); schedulerMessageSource.setAnnotations(singletonMap(LOCATION_KEY, TEST_CONNECTOR_LOCATION)); + doAnswer(invocationOnMock -> { + CoreEvent inputEvent = invocationOnMock.getArgument(0); + flow.process(inputEvent); + return null; + }).when(sourcePolicy).process(any(CoreEvent.class), any(), any()); + schedulerMessageSource.trigger(); - new PollingProber(RECEIVE_TIMEOUT, 100).check(new Probe() { + new PollingProber(120000, 100).check(new Probe() { @Override public boolean isSatisfied() {
[DefaultSchedulerMessageSourceTestCase->[createMessageSource->[getAppleFlowComponentLocationAnnotations,initialise,createAndRegisterFlow,DefaultSchedulerMessageSource,scheduler,setAnnotations],getStartUpRegistryObjects->[singletonMap],disposeScheduler->[reset,any,start,cpuLightScheduler,anyLong,stop,createMessageSource,scheduleAtFixedRate,dispose,getSchedulerService],scheduler->[FixedFrequencyScheduler,setFrequency],after->[disposeIfNeeded,stopIfNeeded],simplePoll->[trigger,singletonMap,setListener,Probe,createMessageSource,getSensingNullMessageProcessor,check,setAnnotations],getLogger]]
Poll for a single event.
is RECEIVE_TIMEOUT too low?