patch
stringlengths
18
160k
callgraph
stringlengths
4
179k
summary
stringlengths
4
947
msg
stringlengths
6
3.42k
@@ -93,4 +93,4 @@ epochs.plot_image(combine='gfp', group_by='type', sigma=2., cmap="YlGnBu_r") # default, but noisy channels and different channel types can cause the scaling # to be a bit off. Here we define the limits by hand. epochs.plot_topo_image(vmin=-250, vmax=250, title='ERF images', sigma=2., - fig_facecolor='w', font_color='k') +fig_facecolor='w', font_color='k') \ No newline at end of file
[read_events,load_data,plot_events,plot_image,plot,data_path,set_eeg_reference,epochs,read_raw_fif,plot_topo_image,pick_events,join,Epochs]
This function is the default method for plotting the Topo image of the NI - 1.
restore whitespace indentation, and newline at end of file
@@ -66,7 +66,6 @@ import org.rstudio.studio.client.workbench.model.Session; import org.rstudio.studio.client.workbench.prefs.events.UserPrefsChangedEvent; import org.rstudio.studio.client.workbench.prefs.events.UserPrefsChangedHandler; import org.rstudio.studio.client.workbench.prefs.model.UserPrefs; -import org.rstudio.studio.client.workbench.views.source.SourceBuildHelper; import org.rstudio.studio.client.workbench.views.source.events.NotebookRenderFinishedEvent; import com.google.gwt.core.client.JavaScriptObject;
[RmdOutput->[performRenderOperation->[execute],displayOfficeDoc->[execute->[execute],execute],showDownloadPreviewFileDialog->[execute->[execute]],reRenderPreview->[reRenderPreview],displayRenderResult->[displayOfficeDoc],displayHTMLRenderResult->[execute->[displayRenderResult],displayRenderResult]]]
Imports an object from the RMD model. Implementation of RmdOutput.
update file copyright year
@@ -150,10 +150,11 @@ public class FileUtils * }}</pre> * * @param file the file to map + * * @return a {@link MappedByteBufferHandler}, wrapping a read-only buffer reflecting {@code file} - * @throws FileNotFoundException if the {@code file} does not exist - * @throws IOException if an I/O error occurs * + * @throws FileNotFoundException if the {@code file} does not exist + * @throws IOException if an I/O error occurs * @see FileChannel#map(FileChannel.MapMode, long, long) */ public static MappedByteBufferHandler map(File file) throws IOException
[FileUtils->[map->[map],FileCopyResult->[addFiles->[addSizedFiles],addFile->[addFiles]],retryCopy->[retryCopy]]]
Map a file and return a MappedByteBufferHandler.
I wonder if there is no such utility already in Apache Commons IO, Guava, or JDK itself?
@@ -39,6 +39,13 @@ class Test_Admin_Menu extends WP_UnitTestCase { */ public static $domain; + /** + * The customizer default link. + * + * @var string + */ + public static $customize_slug; + /** * Admin menu instance. *
[Test_Admin_Menu->[test_add_tools_menu->[add_tools_menu,assertArrayNotHasKey,assertSame,assertNotContains,assertContains],test_add_my_home_menu->[assertContains,add_my_home_menu,assertArrayNotHasKey,assertSame],test_add_comments_menu->[assertEmpty,create,add_comments_menu,assertSame],test_add_appearance_menu->[assertContains,add_appearance_menu,assertArrayNotHasKey,assertSame],test_add_users_menu->[create,assertEmpty,add_users_menu,assertSame,assertContains],test_add_jetpack_upgrades_menu->[add_upgrades_menu,assertArrayNotHasKey,assertSame],test_add_media_menu->[assertNotContains,assertArrayNotHasKey,add_media_menu,assertSame],test_add_admin_menu_separator->[add_admin_menu_separator,assertSame],wpSetUpBeforeClass->[create,get_site_suffix],test_add_page_menu->[assertEmpty,add_page_menu,assertSame],test_migrate_submenus->[assertArrayHasKey,assertArrayNotHasKey,assertSame,migrate_submenus],test_add_wpcom_upgrades_menu->[add_upgrades_menu,assertContains,assertSame],test_get_instance->[assertInstanceOf,assertSame],test_add_stats_menu->[assertArrayNotHasKey,add_stats_menu,assertSame],test_admin_menu_output->[assertEquals,reregister_menu_items,assertSame],test_add_custom_post_type_menu->[assertContains,assertNotSame,add_custom_post_type_menu,assertSame],test_add_options_menu->[add_options_menu,assertContains,assertNotContains],test_add_posts_menu->[add_posts_menu,assertArrayNotHasKey,assertSame]]]
Creates a class which implements the admin menu test. Test get_instance.
Is this needed?
@@ -70,7 +70,7 @@ class FeatureManagement end def self.fake_banner_mode? - Rails.env.production? && Figaro.env.domain_name != 'secure.login.gov' + Rails.env.production? && ENVS_DO_NOT_DISPLAY_FAKE_BANNER.exclude?(Figaro.env.domain_name) end def self.enable_saml_cert_rotation?
[FeatureManagement->[development_and_identity_pki_disabled?->[identity_pki_disabled?],prefill_otp_codes_allowed_in_production?->[telephony_disabled?],development_and_telephony_disabled?->[telephony_disabled?]]]
missing - banner - mode.
WDYT of checking `LoginGov::Hostdata.env` instead? That way it would be `staging` or `prod` and maybe easier to follow than domain name?
@@ -6,14 +6,10 @@ */ $bookmark_guid = elgg_extract('guid', $vars); -elgg_entity_gatekeeper($bookmark_guid, 'object', 'bookmarks'); +elgg_entity_gatekeeper($bookmark_guid, 'object', 'bookmarks', true); $bookmark = get_entity($bookmark_guid); -if (!$bookmark->canEdit()) { - throw new \Elgg\EntityPermissionsException(elgg_echo('bookmarks:unknown_bookmark')); -} - $title = elgg_echo('edit:object:bookmarks'); elgg_push_entity_breadcrumbs($bookmark);
[canEdit]
Add bookmark page.
i believe the canEdit check can be removed now we have the check in the gatekeeper
@@ -14,7 +14,7 @@ internal static partial class Interop [DllImport(Interop.Libraries.Mswsock, SetLastError = true)] internal static extern unsafe bool TransmitFile( SafeHandle socket, - SafeHandle fileHandle, + SafeHandle? fileHandle, int numberOfBytesToWrite, int numberOfBytesPerSend, NativeOverlapped* overlapped,
[Interop->[Mswsock->[TransmitFileBuffers->[Sequential],TransmitFile->[Mswsock]]]]
Transmit a file if it is not found.
In what situation do we pass null here? #Closed
@@ -390,6 +390,10 @@ func IsTerminal(state pps.JobState) bool { // UpdateJobState performs the operations involved with a job state transition. func UpdateJobState(pipelines col.ReadWriteCollection, jobs col.ReadWriteCollection, jobPtr *pps.EtcdJobInfo, state pps.JobState, reason string) error { + if jobPtr.State == pps.JobState_JOB_FAILURE { + return nil + } + // Update pipeline pipelinePtr := &pps.EtcdPipelineInfo{} if err := pipelines.Get(jobPtr.Pipeline.Name, pipelinePtr); err != nil {
[Decode->[Decode],NextCreatePipelineRequest->[Decode]]
UpdateJobState updates the state of a job in the pipeline. Update job info.
I'm not sure if this is a good idea. It seems like it could mask bugs? I mean, I guess it is good to keep the job in a failed state if it was marked as failed before, but we would not know about the issue we found if this existed. The side effects from what you fixed (uploading a datums object, attempting egress, etc.) would go unnoticed without the job state indicating an issue.
@@ -58,14 +58,12 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; import org.yaml.snakeyaml.Yaml; /** * Test cases for mini ozone cluster. */ -@Ignore public class TestMiniOzoneCluster { private MiniOzoneCluster cluster;
[TestMiniOzoneCluster->[testDNstartAfterSCM->[getStorageContainerManager,getState,build,sleep,stop,getValues,getDatanodeStateMachine,waitForClusterToBeReady,restartHddsDatanode,restartStorageContainerManager,assertEquals],cleanup->[shutdown],afterClass->[deleteQuietly],testContainerRandomPort->[assertTrue,start,assertNotEquals,getInt,toString,getAbsolutePath,getIPCPort,DatanodeStateMachine,set,intValue,assertFalse,randomDatanodeDetails,stop,setBoolean,close,getConf,assertEquals,getTestDir,add],createMalformedIDFile->[randomDatanodeDetails,write,writeDatanodeDetailsTo,FileOutputStream,close,getBytes,delete],testStartMultipleDatanodes->[getDatanodeDetails,assertTrue,size,build,connect,getHddsDatanodes,waitForClusterToBeReady,getFirstNode,add,assertEquals,isConnected,XceiverClientGrpc],setup->[OzoneConfiguration,mkdirs,setBoolean,set,toString],testDatanodeIDPersistent->[setCertSerialId,assertTrue,setPort,Yaml,getProtoBufMessage,newPort,FileReader,File,getCertSerialId,load,writeTo,randomDatanodeDetails,nextLong,writeDatanodeDetailsTo,assertEquals,fail,delete,createMalformedIDFile,FileOutputStream,readDatanodeDetailsFrom],File,getTestDir]]
Method to import all packages that are used by mini - ozone clusters. Creates a new instance of the HDSSecuritySystem.
Why was it ignored and how is the problem fixed by this patch? Do we need to run multiple checks to prove that there is no intermittency any more?
@@ -580,8 +580,14 @@ namespace Dynamo.Controls if (previewEnabled == false && !PreviewControl.StaysOpen) { - PreviewControl.TransitionToState(PreviewControl.State.Condensed); - PreviewControl.TransitionToState(PreviewControl.State.Hidden); + if (PreviewControl.IsExpanded) + { + PreviewControl.TransitionToState(PreviewControl.State.Condensed); + PreviewControl.TransitionToState(PreviewControl.State.Hidden); + } else if (PreviewControl.IsCondensed) + { + PreviewControl.TransitionToState(PreviewControl.State.Hidden); + } } }
[NodeView->[ExpandPreviewControl->[IsMouseOver,Expanded,TransitionToState,IsCondensed],NodeViewReady->[OnNodeViewReady],OnPreviewControlMouseLeave->[GetPosition,Condensed,Control,IsInTransition,StaysOpen,IsMouseInsideNodeOrPreview,Captured,TransitionToState,Modifiers],OnNodeViewMouseEnter->[CachedValue,Condensed,IsInTransition,IsHidden,TransitionToState,BindToDataSource,DelayInvoke,IsDataBound],IsMouseInsideNodeOrPreview->[HitTest,Stop],NickNameBlock_OnMouseDown->[Execute,ClickCount,WriteLine,Handled,CanExecute],OnDataContextChanged->[NewValue,Value,Height,RenderSize,HasValue,Width],OnNodeViewMouseMove->[GetPosition,Captured,IsCondensed,IsMouseInsideNodeOrPreview,TransitionToState,Hidden],OnNodeViewUnloaded->[RequestsSelection,RequestShowNodeRename,PropertyChanged,DispatchedToUI,RequestShowNodeHelp],OnNodeViewMouseLeave->[GetPosition,Condensed,IsMouseOver,Captured,StaysOpen,ZIndex,IsCondensed,IsExpanded,IsMouseInsideNodeOrPreview,TransitionToState,Hidden],NodeLogic_DispatchedToUI->[Invoke,ActionToDispatch],TogglePreviewControlAllowance->[Condensed,TransitionToState,StaysOpen,Hidden],topControl_MouseRightButtonDown->[AsDynamoType,Handled,ExecuteCommand,GUID],ViewModel_RequestShowNodeHelp->[Show,Handled,Owner,GetWindow,Model],DisableInteraction->[Clear,Execute,IsEnabled,CanExecute,Children,Dead],PrepareZIndex->[parent,ZIndex,NodeStartZIndex,StaticZIndex],OnPreviewControlStateChanged->[GetPosition,Condensed,IsInTransition,IsTestMode,IsMouseOver,StaysOpen,CurrentState,DelayInvoke,IsMouseInsideNodeOrPreview,Expanded,Captured,TransitionToState,Hidden],OnNodeViewLoaded->[RequestsSelection,RequestShowNodeRename,PropertyChanged,DispatchedToUI,RequestShowNodeHelp],CachedValueChanged->[CachedValue,IsInTransition,IsHidden,BeginInvoke,BindToDataSource,IsDataBound,EnqueueBindToDataSource],OnPreviewMouseLeftButtonDown->[BringToFront],ViewModel_RequestsSelection->[RightShift,Remove,AddUnique,NodeLogic,IsSelected,IsKeyDown,ClearSelection,LeftShift],OnSizeChanged->[ActualHeight,Execute,HasValue,CanExecute],BringToFront->[PrepareZIndex,IsMouseOver,IsTestMode,ZIndex,StaticZIndex,MaxValue],EnableInteraction->[Children,Execute,IsEnabled,ContainsKey],ViewModel_RequestShowNodeRename->[BindToProperty,Handled,EditNodeWindowTitle,ShowDialog,Explicit,TwoWay,Owner,GetWindow,DynamoViewModel],NodeLogic_PropertyChanged->[RaiseCanExecuteChanged,CachedValueChanged,PropertyName],OnPreviewControlMouseEnter->[DelayInvoke,IsCondensed],topControl_MouseLeftButtonDown->[Control,ExecuteCommand,AsDynamoType,ClickCount,Execute,OnRequestReturnFocusToView,WpfUtilities,Focus,Handled,CanExecute,GUID,Modifiers],InitializeComponent,SizeChanged,MouseLeave,DataTemplatesDictionary,SetZIndex,StateChanged,MouseEnter,DynamoModernDictionary,DynamoColorsAndBrushesDictionary,DynamoConvertersDictionary,PortsDictionary,Loaded,Add]]
Toggle preview control allowance.
condensed and hidden? Hidden should always make the preview condense and hide. Think about it.
@@ -85,8 +85,12 @@ public class VersionInfo { return info.getProperty("srcChecksum", "Unknown"); } - public String getHadoopProtocVersion() { - return info.getProperty("hadoopProtocVersion", "Unknown"); + public String getHadoopProtoc2Version() { + return info.getProperty("hadoopProtoc2Version", "Unknown"); + } + + public String getHadoopProtoc3Version() { + return info.getProperty("hadoopProtoc3Version", "Unknown"); } public String getGrpcProtocVersion() {
[VersionInfo->[getBuildVersion->[getRevision,getUser,getSrcChecksum,getVersion]]]
get srcChecksum HDFSProtocVersion and gRPCProtocVersion.
This class is marked `Public` and `Stable`, I guess we should keep this method for compatibility.
@@ -575,7 +575,16 @@ exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec')) name = name.replace(os.path.sep, '/') return name - def install(self, install_options, global_options=(), root=None): + def _get_setup_command(self, setup_py_path): + cmd = ('import setuptools;' + '__file__ = %r;' + 'setup_py=open(__file__).read();' + 'setup_py=setup_py.replace("\\r\\n", "\\n");' + 'exec(compile(setup_py, __file__, "exec"))') + + return cmd % setup_py_path + + def install(self, install_options, global_options=[], root=None): if self.editable: self.install_editable(install_options, global_options) return
[parse_requirements->[parse_requirements,from_line,from_editable],RequirementSet->[cleanup_files->[remove_temporary_source],install->[remove_temporary_source,rollback_uninstall,install,uninstall,values,commit_uninstall],uninstall->[values,commit_uninstall,uninstall],create_bundle->[_clean_zip_name,bundle_requirements],__init__->[Requirements],prepare_files->[requirements,update_editable,check_if_exists,build_location,run_egg_info,values,InstallRequirement,add_requirement,archive,has_requirement,move_bundle_files,assert_source_matches_version,bundle_requirements],has_requirements->[values],locate_files->[values,build_location,check_if_exists],has_editables->[values],bundle_requirements->[values],__str__->[values]],UninstallPthEntries->[remove->[remove],add->[add]],InstallRequirement->[from_path->[from_path],requirements->[egg_info_lines],egg_info_lines->[egg_info_data],install->[prepend_root],move_wheel_files->[move_wheel_files],dependency_links->[egg_info_lines],run_egg_info->[correct_build_location],installed_version->[pkg_info],pkg_info->[egg_info_data,egg_info_path],bundle_requirements->[InstallRequirement]],Requirements->[__repr__->[keys]],UninstallPathSet->[remove->[_can_uninstall,values,remove,compact,_stash],add_pth->[_permitted,add],add->[_permitted,add],compact->[add],rollback->[_stash,rollback]]]
Clean up a name for a zip file. This function is called when a record file is not found in the install record. It will.
No, there's no explicit override logic. The last option given on the command line is the one that will be used. In our case requiremens.txt options will always override cmd options. Maybe I should reverse that?
@@ -123,6 +123,7 @@ public class MysqlStateStore<T extends State> implements StateStore<T> { private final String UPSERT_JOB_STATE_SQL; private final String SELECT_JOB_STATE_SQL; + private final String SELECT_ALL_JOB_STATE_SQL; private final String SELECT_JOB_STATE_WITH_LIKE_SQL; private final String SELECT_JOB_STATE_EXISTS_SQL; private final String SELECT_JOB_STATE_NAMES_SQL;
[MysqlStateStore->[createAlias->[exists],getAll->[getAll],putAll->[addStateToDataOutputStream]]]
Manages the persistence and retrieval of the state in a MySQL database. Method to create a MysqlStateStore object.
rename variable to SELECT_ALL_JOBS_STATE_SQL?
@@ -297,8 +297,8 @@ func (c *Commit) CommitsCount() (int64, error) { } // CommitsByRange returns the specific page commits before current revision, every page's number default by CommitsRangeSize -func (c *Commit) CommitsByRange(page int) (*list.List, error) { - return c.repo.commitsByRange(c.ID, page) +func (c *Commit) CommitsByRange(page, pageSize int) (*list.List, error) { + return c.repo.commitsByRange(c.ID, page, pageSize) } // CommitsBefore returns all the commits before current revision
[GetSubModule->[GetSubModules],Parent->[ParentID],HasPreviousCommit->[Parent,HasPreviousCommit,ParentCount],ImageInfo->[IsImageFile]]
CommitsByRange returns a list of commits in the given page range.
why not use **ListOptions** there ?
@@ -709,7 +709,9 @@ class Channel(object): partner_known = self.partner_state.balance_proof.is_known(hashlock) if not our_known and not partner_known: - msg = 'Secret doesn\'t correspond to a registered hashlock. hashlock:{} token:{}'.format( + msg = ( + "Secret doesn't correspond to a registered hashlock. hashlock:{} token:{}" + ).format( pex(hashlock), pex(self.token_address), )
[ChannelExternalState->[close->[close],unlock->[unlock],update_transfer->[update_transfer],settle->[settle]],InvalidLocksRoot->[__init__->[__init__]],Channel->[channel_closed->[get_known_unlocks,query_transferred_amount,unlock,update_transfer],blockalarm_for_settle->[_settle->[query_settled,set_settled,settle]],register_transfer_from_to->[is_pending,compute_merkleroot_with,unclaimed_merkletree,InvalidLocksRoot,merkleroot_for_unclaimed,InvalidNonce,register_locked_transfer,distributable,register_direct_transfer,InsufficientBalance],balance->[balance],release_lock->[release_lock,get_lock_by_hashlock,is_known],isopen->[isopen],__init__->[callback_on_settled,callback_on_closed,callback_on_opened],create_refundtransfer_for->[create_lockedtransfer,is_pending],register_secret->[register_secret,get_lock_by_hashlock,is_known],distributable->[distributable],withdraw_lock->[release_lock,get_lock_by_hashlock,is_known],create_lockedtransfer->[compute_merkleroot_with,distributable],create_mediatedtransfer->[create_lockedtransfer],locked->[locked],create_directtransfer->[merkleroot_for_unclaimed,distributable],outstanding->[locked]],BalanceProof->[register_direct_transfer->[InvalidLocksRoot,merkleroot_for_unclaimed],register_secret->[is_pending,is_known],register_locked_transfer->[unclaimed_merkletree,is_known],release_lock_by_secret->[is_unclaimed,is_pending]],ChannelEndState->[compute_merkleroot_with->[unclaimed_merkletree],register_locked_transfer->[register_locked_transfer],release_lock->[release_lock_by_secret],__init__->[BalanceProof],register_secret->[register_secret],distributable->[locked,balance],register_direct_transfer->[register_direct_transfer],locked->[locked]]]
Registers a secret with the node. get lock by hashlock.
~Please enclose the string literal in single quotes. `' '`.~
@@ -54,8 +54,6 @@ class T::Private::Methods::Signature @parameters = parameters @on_failure = on_failure @override_allow_incompatible = override_allow_incompatible - @generated = generated - @ever_failed = false param_names = parameters.map {|_, name| name} declared_param_names = raw_arg_types.keys
[method_desc->[join,source_location],owner->[owner],each_args_value_type->[to_s,empty?,new,is_a?,raise,each,length],kwarg_names->[keys],new_untyped->[to_h,parameters,new,name,untyped],arg_count->[length],initialize->[parameters,empty?,keys,raise,any?,join,zip,length,coerce,name,index,map],attr_reader,freeze]
Initializes the object with the necessary values. Initialize a new object.
Why did `@ever_failed` disappear too?
@@ -55,11 +55,13 @@ func (w *WebHook) ServeHTTP(writer http.ResponseWriter, req *http.Request, ctx k switch err { case webhook.ErrSecretMismatch, webhook.ErrHookNotEnabled: return errors.NewUnauthorized(fmt.Sprintf("the webhook %q for %q did not accept your secret", hookType, name)) - case nil: - return nil - default: + case webhook.MethodNotSupported: + return errors.NewMethodNotSupported(buildapi.Resource("buildconfighook"), req.Method) + } + if _, ok := err.(*errors.StatusError); !ok && err != nil { return errors.NewInternalError(fmt.Errorf("hook failed: %v", err)) } + return err } warning := err
[ServeHTTP->[Instantiate,Sprintf,Resource,Extract,GetBuildConfig,Errorf,NewInternalError,NewBadRequest,Split,NewNotFound,NewUnauthorized],Sprintf,NewWebHook]
ServeHTTP implements the http. Handler interface for the webhook.
is the branch issue resolved by getting rid of the "default" from the switch?
@@ -2399,6 +2399,7 @@ DomainParticipantImpl::LivelinessTimer::handle_timeout( const ACE_Time_Value& tv, const void* /* arg */) { + Thread_Monitor::Green_Light gl("DomainParticipant"); const MonotonicTimePoint now(tv); ACE_GUARD_RETURN(ACE_Thread_Mutex, guard, this->lock_, 0);
[No CFG could be retrieved]
Adds adjust to the given . Automatic liveliness timer.
Does the name passed to the constructor need to be unique (thinking of the case where there are two or more DomainParticipants)?
@@ -25,6 +25,8 @@ import io.confluent.ksql.function.udf.UdfDescription; + " characters with 'x', all digits with 'n', and any other character with '-'.") public class MaskKeepRightKudf { + private final String udfName = this.getClass().getAnnotation(UdfDescription.class).name(); + @Udf(description = "Returns a masked version of the input string. All characters except for the" + " last n will be replaced according to the default masking rules.") public String mask(final String input, final int numChars) {
[MaskKeepRightKudf->[doMask->[validateParams,max,append,mask,substring,toString,StringBuilder,length],validateParams->[KsqlFunctionException],mask->[doMask,getMaskCharacter,Masker]]]
Returns a masked version of the input string. All characters except the last n will be replaced.
This can be `static`.
@@ -755,7 +755,7 @@ describes.realWin( defaultConfig = dict({ 'consents': { 'ABC': { - 'checkConsentHref': 'https://response1', + 'checkConsentHref': 'https://geo-override-check2/', 'promptUI': '123', }, },
[No CFG could be retrieved]
The AMP consent is a special case for the UI and the AMP consent. Provide a function to be called after each element in the DOM has been added to the A.
does this matter? it is a unit test, are we making request to local server?
@@ -211,7 +211,7 @@ void SchedulerRunner::start() { // GLog is not re-entrant, so logs must be flushed in a dedicated thread. if ((i % 3) == 0) { - relayStatusLogs(true); + relayStatusLogs(); } auto loop_step_duration = std::chrono::duration_cast<std::chrono::milliseconds>(
[No CFG could be retrieved]
The main entry point for the scheduler. The time - drift of the scheduler is determined by the time - drift of the scheduler.
This is called on the main scheduler thread (it actually should be moved to and independent thread), so making `relayStatusLogs` synchronous has the potential of delaying this thread.
@@ -27,12 +27,14 @@ p4.quad(x, x - 0.2, y, y - 0.2) paragraph = Paragraph(text="We build up a grid plot manually. Try changing the mode of the plots yourself.") select = Select(title="Sizing mode", value="fixed", options=list(SizingMode), width=300) + +grid = GridBox(children=[(p1, 0, 1), (p2, 1, 0), (p3, 1, 1), (p4, 1, 2)]) +layout = column([paragraph, select, grid]) + select.js_link('value', p1, 'sizing_mode') select.js_link('value', p2, 'sizing_mode') select.js_link('value', p3, 'sizing_mode') select.js_link('value', p4, 'sizing_mode') - -grid = GridBox(children=[(p1, 0, 1), (p2, 1, 0), (p3, 1, 1), (p4, 1, 2)]) -layout = column([paragraph, select, grid]) +select.js_link('value', layout, 'sizing_mode') show(layout)
[quad,list,bezier,show,js_link,dict,GridBox,annular_wedge,Select,sin,column,linspace,Paragraph,figure,line]
Creates a grid box with a select and a grid box with a grid box with a grid.
I would also use `grid([[None, p1, None], [p2, p3, p4]])` instead of the low-level API.
@@ -670,7 +670,7 @@ namespace System.Threading.Tasks if (Task.s_asyncDebuggingEnabled) Task.AddToActiveTasks(t); - if (asyncResult.IsCompleted) + if (asyncResult.IsCompleted || OperatingSystem.IsBrowser()) { try { t.InternalRunSynchronously(scheduler, waitForCompletion: false); } catch (Exception e) { promise.TrySetException(e); } // catch and log any scheduler exceptions
[TaskFactory->[FromAsync->[FromAsyncImpl],ContinueWhenAllImpl->[TaskFactory,CreateCanceledTask],StartNew->[StartNew],ContinueWhenAnyImpl->[CreateCanceledTask],FromAsyncImpl->[FromAsyncCoreLogic]]]
Creates a task from an asynchronous task. if t is not yet scheduled then try to run it asynchronously.
This also might not the solution, but annotating the API with `[UnsupportedOSPlatform("browser")]` would affect many related public APIs
@@ -278,6 +278,8 @@ namespace DynamoShapeManager // Fallback mechanism, look inside libg folders if any of them contains ASM dlls. foreach (var v in versions) { + if (v == null) continue; + var folderName = string.Format("libg_{0}_{1}_{2}", v.Major, v.Minor, v.Build); var dir = new DirectoryInfo(Path.Combine(rootFolder, folderName)); if (!dir.Exists)
[Utilities->[GetGeometryFactoryPath2->[GetLibGPreloaderLocation]]]
Get installed ASM version 2. check if libg - base - v. js is available.
when did this happen?
@@ -141,7 +141,9 @@ class PerforceDriver extends VcsDriver $this->composerInfo = $this->perforce->getComposerInformation('//' . $this->depot . '/' . $identifier); $this->composerInfoIdentifier = $identifier; $result = false; - if (isset($this->composerInfo)) { + return !empty($this->composerInfo); + + if (!empty($this->composerInfo)) { $result = count($this->composerInfo) > 0; }
[PerforceDriver->[getBranches->[getBranches],getComposerInformation->[getComposerInformation],hasComposerFile->[getComposerInformation],getTags->[getTags]]]
Checks if the given composer file exists.
Why is there dead code after this return statement?
@@ -288,9 +288,11 @@ bool process_record_quantum(keyrecord_t *record) { if (record->event.pressed) { switch (keycode) { +#ifndef NO_RESET case RESET: reset_keyboard(); return false; +#endif #ifndef NO_DEBUG case DEBUG: debug_enable ^= 1;
[No CFG could be retrieved]
Check if a key has been pressed and if so enable it. Process a single neccesary action.
XD002 has no physical reset button...
@@ -723,6 +723,13 @@ const ( CRINameContainerD CRIName = "containerd" ) +// ContainerRuntimeName is a type alias for the ContainerRuntime name string. +type ContainerRuntimeName string + +const ( + ContainerRuntimeGVisor ContainerRuntimeName = "gvisor" +) + // ContainerRuntime contains information about worker's available container runtime type ContainerRuntime struct { // Type is the type of the Container Runtime.
[FromInt]
Volume contains information about the type and size of the volume.
Why is this here? I don't think, there should be any known provider names in Gardener's API... Btw, isn't this a clear case for an integration test, that should be part of the respective Extension repo?
@@ -480,14 +480,7 @@ public class RowBasedGrouperHelper @Override public Supplier<Comparable> makeInputRawSupplier(LongColumnSelector selector) { - return new Supplier<Comparable>() - { - @Override - public Comparable get() - { - return selector.get(); - } - }; + return () -> selector.get(); } }
[RowBasedGrouperHelper->[LongInputRawSupplierColumnSelectorStrategy->[makeInputRawSupplier->[get->[get]]],FloatInputRawSupplierColumnSelectorStrategy->[makeInputRawSupplier->[get->[get]]],makeValueConvertFunctions->[apply->[toString],get],makeValueExtractFunction->[apply->[apply],makeTimestampExtractFunction],RowBasedKey->[hashCode->[hashCode],toString->[toString],fromJsonArray->[RowBasedKey],equals->[equals]],RowBasedKeySerde->[makeSerdeHelpersForLimitPushDown->[get],StringRowBasedKeySerdeHelper->[getFromByteBuffer->[get],putToKeyBuffer->[getKey,addToDictionary],compare->[compare]],addToDictionary->[get],fromByteBuffer->[RowBasedKey],LimitPushDownLongRowBasedKeySerdeHelper->[compare->[compare]],LongRowBasedKeySerdeHelper->[putToKeyBuffer->[getKey],compare->[compare]],FloatRowBasedKeySerdeHelper->[putToKeyBuffer->[getKey],compare->[compare]],bufferComparator->[compare->[compare],get],LimitPushDownFloatRowBasedKeySerdeHelper->[compare->[compare]],bufferComparatorWithAggregators->[compare->[get,compare],get,equals],compareDimsInBuffersForNullFudgeTimestampForPushDown->[get,compare],compareDimsInBuffersForNullFudgeTimestamp->[compare],DoubleRowBasedKeySerdeHelper->[putToKeyBuffer->[getKey],compare->[compare]],LimitPushDownDoubleRowBasedKeySerdeHelper->[compare->[compare]],toByteBuffer->[getKey],LimitPushDownStringRowBasedKeySerdeHelper->[compare->[get,compare]]],StringInputRawSupplierColumnSelectorStrategy->[makeInputRawSupplier->[get->[get]]],getValueSuppliersForDimensions->[makeInputRawSupplier],InputRawSupplierColumnSelectorStrategyFactory->[makeColumnSelectorStrategy->[LongInputRawSupplierColumnSelectorStrategy,FloatInputRawSupplierColumnSelectorStrategy,StringInputRawSupplierColumnSelectorStrategy]],RowBasedKeySerdeFactory->[compareDimsInRowsWithAggs->[getKey,toString,get,compare],compareDimsInRows->[getKey],objectComparator->[compare->[getKey,compare]],objectComparatorWithAggs->[compare->[getKey,compare],equals]]]]
Make a Supplier that gets the raw value of a .
Could be `selector::get`
@@ -822,12 +822,12 @@ class RscCompile(ZincCompile): # execution requests. def _runtool_nonhermetic(self, parent_workunit, classpath, main, tool_name, args, distribution): result = self.runjava(classpath=classpath, - main=main, - jvm_options=self.get_options().jvm_options, - args=args, - workunit_name=tool_name, - workunit_labels=[WorkUnitLabel.TOOL], - dist=distribution + main=main, + jvm_options=self.get_options().jvm_options, + args=args, + workunit_name=tool_name, + workunit_labels=[WorkUnitLabel.TOOL], + dist=distribution ) if result != 0: raise TaskError('Running {} failed'.format(tool_name))
[RscCompile->[create_compile_jobs->[CompositeProductAdder->[add_for_target->[add_for_target]],work_for_vts_metacp->[fast_relpath_collection,_is_scala_core_library,ensure_output_dirs_exist,stdout_contents],work_for_vts_rsc->[register_extra_products_from_contexts,fast_relpath_collection,ensure_output_dirs_exist,_paths_from_classpath,is_java_compile_target],_metacpable,_compile_against_rsc_key_for_target,CompositeProductAdder,_rsc_compilable,_rsc_key_for_target,_metacp_key_for_target,_metacp_dep_key_for_target,_only_zinc_compilable],_runtool->[_runtool_hermetic,_runtool_nonhermetic],register_extra_products_from_contexts->[to_classpath_entries->[pathglob_for],confify,to_classpath_entries],_run_metai_tool->[_runtool],create_compile_context->[RscCompileContext],_collect_metai_classpath->[_create_desandboxify_fn,desandboxify],_rsc_key_for_target->[_rsc_compilable,_metacpable,_only_zinc_compilable],_get_jvm_distribution->[HermeticDistribution->[find_libs->[find_libs]],HermeticDistribution],_metacp_dep_key_for_target->[_rsc_compilable,_metacpable,_only_zinc_compilable],_runtool_hermetic->[fast_relpath_collection],select->[_rsc_compilable,_metacpable,_only_zinc_compilable],pre_compile_jobs->[work_for_vts_rsc_jdk->[stdout_contents]]]]
Run a non - hermetic tool.
My editor appears to format this differently than yours, and usually I would expect that if the first argument is indented, the others are indented at the same level. This isn't always followed because some editors require you to manually tab, but if we're trying to reduce whitespace noise in future diffs, I would probably also put the `classpath=` argument on the following line.
@@ -405,6 +405,16 @@ public class DefaultHttp2ConnectionEncoder implements Http2ConnectionEncoder { int queuedData = queue.readableBytes(); if (!endOfStream) { if (queuedData == 0) { + if (queue.isEmpty()) { + // When the queue is empty it means we did clear it because of an error(...) call + // (as otherwise we will have at least 1 entry in there), which will happen either when called + // explicit or when the write itself fails. In this case just set dataSize and padding to 0 + // which will signal back that the whole frame was consumed. + // + // See https://github.com/netty/netty/issues/8707. + padding = dataSize = 0; + return; + } // There's no need to write any data frames because there are only empty data frames in the queue // and it is not end of stream yet. Just complete their promises by getting the buffer corresponding // to 0 bytes and writing it to the channel (to preserve notification order).
[DefaultHttp2ConnectionEncoder->[writeSettings->[writeSettings],writePriority->[writePriority],FlowControlledData->[merge->[size],write->[size,write,writeData]],flowController->[flowController],configuration->[configuration],FlowControlledBase->[operationComplete->[error]],writePushPromise->[writePushPromise],writePing->[writePing],writeFrame->[writeFrame],writeSettingsAck->[writeSettingsAck],close->[close],writeHeaders->[writeHeaders,validateHeadersSentState,flowController],FlowControlledHeaders->[write->[writeHeaders,validateHeadersSentState]]]]
Writes the next available bytes of data to the channel.
nit: you could remove a `return` statement by doing `if {} else {} return;` to remove one jump of control flow.
@@ -3516,10 +3516,7 @@ public abstract class AbstractFlashcardViewer extends NavigationDrawerActivity i // We play sounds through these links when a user taps the sound icon. private boolean filterUrl(String url) { if (url.startsWith("playsound:")) { - // Send a message that will be handled on the UI thread. - Message msg = Message.obtain(); - msg.obj = url.replaceFirst("playsound:", ""); - mHandler.sendMessage(msg); + onCurrentAudioChanged(url); return true; } if (url.startsWith("file") || url.startsWith("data:")) {
[AbstractFlashcardViewer->[MyWebView->[onOverScrolled->[onOverScrolled],onScrollChanged->[onScrollChanged],onTouchEvent->[onTouchEvent],loadDataWithBaseURL->[loadDataWithBaseURL],findScrollParent->[findScrollParent]],LinkDetectingGestureDetector->[executeTouchCommand->[processCardAction],onWebViewCreated->[executeTouchCommand]],undo->[AnswerCardHandler],dispatchKeyEvent->[dispatchKeyEvent],initLayout->[lookUp,clipboardHasText,onTouch,onClick],onCollectionLoaded->[setTitle,onCollectionLoaded],onDestroy->[onDestroy],onCreate->[onCreate],displayMediaUpgradeRequiredSnackbar->[showSnackbar],onSelectedTags->[displayCardQuestion],onKeyDown->[onKeyDown,processCardFunction],displayString->[typeAnsQuestionFilter],fillFlashcard->[updateForNewCard,processCardAction,switchTopBarVisibility],hideEaseButtons->[run],tapOnCurrentCard->[processCardAction],displayAnswerBottomBar->[run],removeFrontSideAudio->[getAnswerFormat],CardViewerWebClient->[filterUrl->[displayCardAnswer,focusAnswerCompletionField,flipOrAnswerCard,getSignalFromUrl,isFullscreen,executeCommand,redrawCard],shouldInterceptRequest->[shouldInterceptRequest],onRenderProcessGone->[destroyWebView,displayCardQuestion,canRecoverFromWebViewRendererCrash,webViewRendererLastCrashedOnCard,recreateWebView,inflateNewView],onReceivedHttpError->[onReceivedHttpError],onPageFinished->[drawMark,drawFlag],onReceivedError->[onReceivedError]],onResume->[setTitle,onResume],onPause->[onPause],blockControls->[typeAnswer],onPageDown->[processCardAction],AnswerCardHandler->[onPreExecute->[onPreExecute]],handleUrlFromJavascript->[filterUrl],loadInitialCard->[AnswerCardHandler],loadUrlInViewer->[processCardAction],initTimer->[getElapsedRealTime],cleanCorrectAnswer->[cleanCorrectAnswer],executeCommand->[undo,suspendCard,showDeleteNoteDialog,editCard,displayCardAnswer,flipOrAnswerCard,buryCard,lookUpOrSelectText,playSounds,suspendNote,buryNote,getRecommendedEase],selectAndCopyText->[processCardAction],answerCard->[AnswerCardHandler,hideLookupButton],onFlag->[refreshActionBar,drawFlag],onActivityResult->[onActivityResult,clearClipboard],onKeyUp->[onKeyUp],onBackPressed->[onBackPressed],scrollCurrentCardBy->[processCardAction],performClickWithVisualFeedback->[performClickWithVisualFeedback],displayCardQuestion->[displayCardQuestion,displayString,setInterface,hideEaseButtons],playSounds->[playSounds],updateCard->[isInNightMode,addAnswerSounds,processCardAction],lookUp->[lookUp,clearClipboard],recreateWebView->[createWebView],unblockControls->[typeAnswer],getRecommendedEase->[getAnswerButtonCount],ttsInitialized->[playSounds],dismiss->[blockControls,NextCardHandler],displayCouldNotFindMediaSnackbar->[showSnackbar],MyGestureDetector->[onDoubleTap->[executeCommand],onFling->[onFling,executeCommand],executeTouchCommand->[executeCommand,showLookupButtonIfNeeded]],onMark->[drawMark,refreshActionBar],isControlBlocked->[getControlBlocked],JavaScriptFunction->[ankiIsInFullscreen->[isFullscreen],init->[requireApiVersion,enableJsApi],ankiIsInNightMode->[isInNightMode],ankiIsDisplayingAnswer->[isDisplayingAnswer],ankiGetCardMark->[shouldDisplayMark]],displayCardAnswer->[displayAnswerBottomBar,typeAnsAnswerFilter,actualHideEaseButtons,cleanTypedAnswer,cleanCorrectAnswer],lookUpOrSelectText->[clipboardHasText],showDeveloperContact->[showSnackbar],resumeTimer->[getElapsedRealTime,resumeTimer],updateDeckName->[setTitle],hideLookupButton->[clearClipboard],onConfigurationChanged->[onConfigurationChanged],inflateNewView->[getContentViewAttr],onPageUp->[processCardAction],showSnackbar,SelectEaseHandler]]
This method is called to filter a URL. Check if a specific card is available in the list. Checks if a signal is known and if so updates the keyboard based on the current state of Private method to handle a missing link in the intent hierarchy.
I think it would be nice to extract those three lines into a method in a first commit. Then you can call this method from `controlSound` in a second commit.
@@ -331,7 +331,7 @@ public class Server { } else if (keyStoreType.equals(KsqlRestConfig.SSL_STORE_TYPE_JKS)) { options.setKeyStoreOptions( new JksOptions().setPath(keyStorePath).setPassword(keyStorePassword.value())); - } else if (keyStoreType.equals(KsqlRestConfig.SSL_STORE_TYPE_PKCS12)) { + } else if (keyStoreType.equalsIgnoreCase(KsqlRestConfig.SSL_STORE_TYPE_PKCS12)) { options.setPfxKeyCertOptions( new PfxOptions().setPath(keyStorePath).setPassword(keyStorePassword.value())); }
[Server->[restart->[start,stop],configureTlsCertReload->[start]]]
Sets the TLS options based on the given configuration.
Could you also use equalsIgnoreCase() here? We probably need it too.
@@ -833,6 +833,8 @@ class TupleType(Type): column: int = -1, implicit: bool = False) -> None: self.items = items self.fallback = fallback + # TODO: assert not (isinstance(fallback, Instance) and fallback.type and + # fallback.type.fullname() == 'builtins.tuple' and not fallback.args) self.implicit = implicit self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.items) == 0
[TypeQuery->[visit_star_type->[accept],visit_overloaded->[items],visit_type_type->[accept],query_types->[accept]],TypeList->[serialize->[serialize],deserialize->[deserialize_type,TypeList]],TypeStrVisitor->[visit_typeddict_type->[accept,items],visit_star_type->[accept],visit_instance->[name],visit_tuple_type->[accept],visit_type_type->[accept],visit_overloaded->[accept,items],visit_partial_type->[name],keywords_str->[accept],list_str->[accept]],TupleType->[serialize->[serialize],copy_modified->[TupleType],slice->[TupleType],deserialize->[deserialize_type,TupleType,deserialize]],TypeVarId->[__repr__->[__repr__],new->[TypeVarId]],FunctionLike->[is_concrete_type_obj->[is_type_obj]],TypeVarDef->[serialize->[serialize,is_meta_var],deserialize->[deserialize_type,TypeVarDef],new_unification_variable->[new,TypeVarDef],__init__->[TypeVarId]],true_only->[true_only,UninhabitedType,copy_type,make_simplified_union],callable_type->[CallableType,name,AnyType],UnionType->[make_simplified_union->[AnyType,make_union],has_readable_member->[has_readable_member],serialize->[serialize],deserialize->[deserialize_type,UnionType],make_union->[UninhabitedType,UnionType]],EllipsisType->[deserialize->[EllipsisType]],set_typ_args->[make_simplified_union,copy_modified,Instance],Instance->[serialize->[serialize],copy_modified->[Instance],deserialize->[deserialize_type,Instance]],TypeVisitor->[visit_star_type->[_notimplemented_helper],visit_overloaded->[_notimplemented_helper],visit_erased_type->[_notimplemented_helper],visit_type_list->[_notimplemented_helper],visit_ellipsis_type->[_notimplemented_helper]],true_or_false->[copy_type],strip_type->[strip_type,copy_modified,items,Overloaded],UnboundType->[serialize->[serialize],deserialize->[UnboundType,deserialize_type]],Type->[__repr__->[accept]],UninhabitedType->[deserialize->[UninhabitedType]],AnyType->[deserialize->[AnyType]],TypeVarType->[serialize->[serialize,is_meta_var],deserialize->[deserialize_type,TypeVarType,TypeVarDef]],Overloaded->[serialize->[serialize,items],with_name->[with_name,Overloaded],name->[get_name],deserialize->[deserialize,Overloaded]],CallableType->[type_object->[is_type_obj],is_concrete_type_obj->[is_type_obj],with_name->[copy_modified],serialize->[serialize],deserialize->[CallableType,deserialize,deserialize_type],copy_modified->[CallableType]],NoneTyp->[deserialize->[NoneTyp]],DeletedType->[deserialize->[DeletedType]],TypeType->[serialize->[serialize],deserialize->[deserialize_type,TypeType],__init__->[is_type_obj]],get_type_vars->[get_typ_args,get_type_vars],false_only->[make_simplified_union,UninhabitedType,copy_type,false_only],TypedDictType->[zipall->[items],create_anonymous_fallback->[copy_modified,as_anonymous],serialize->[serialize,items],as_anonymous->[as_anonymous],deserialize->[deserialize_type,TypedDictType,deserialize],zip->[items],copy_modified->[TypedDictType]],TypeTranslator->[visit_union_type->[UnionType],visit_callable_type->[copy_modified,accept],visit_typeddict_type->[accept,TypedDictType,items],visit_star_type->[accept,StarType],translate_types->[accept],visit_instance->[Instance],visit_tuple_type->[TupleType,accept],visit_type_type->[accept,TypeType],visit_overloaded->[accept,Overloaded,items]]]
Initialize a object.
I would prefer to see the TODO items resolved before this is merged.
@@ -65,7 +65,7 @@ func (s *PublicBlockChainAPI) isBeaconShard() error { } func (s *PublicBlockChainAPI) isBlockGreaterThanLatest(blockNum uint64) error { - if blockNum > s.b.CurrentBlock().NumberU64() { + if blockNum > (s.b.CurrentBlock().NumberU64() + 1) { return ErrRequestedBlockTooHigh } return nil
[IsBlockSigner->[GetValidators,isBlockGreaterThanLatest],GetElectedValidatorAddresses->[GetElectedValidatorAddresses,isBeaconShard],GetAllValidatorInformation->[getAllValidatorInformation,isBeaconShard],GetBlockByNumber->[isBlockGreaterThanLatest],GetBalanceByBlockNumber->[isBlockGreaterThanLatest],GetMedianRawStakeSnapshot->[GetMedianRawStakeSnapshot,isBeaconShard],GetValidatorSelfDelegation->[GetValidatorSelfDelegation,isBeaconShard],Call->[BlockNumber],GetDelegationsByDelegatorByBlockNumber->[BlockNumber,isBlockGreaterThanLatest,isBeaconShard],ResendCx->[ResendCx],GetDelegationsByDelegator->[GetDelegationsByDelegator,isBeaconShard],GetShardID->[GetShardID],GetShardingStructure->[GetShardingStructure,GetEpoch],getAllValidatorInformation->[BlockNumber,GetAllValidatorAddresses,GetValidatorInformation],GetValidators->[GetValidators,isBeaconShard],GetValidatorInformationByBlockNumber->[BlockNumber,isBlockGreaterThanLatest,isBeaconShard,GetValidatorInformation],GetDelegationsByValidator->[GetDelegationsByValidator,isBeaconShard],GetAllValidatorInformationByBlockNumber->[getAllValidatorInformation,isBlockGreaterThanLatest,isBeaconShard,BlockNumber],GetCode->[GetCode],GetCurrentUtilityMetrics->[GetCurrentUtilityMetrics,isBeaconShard],GetStakingNetworkInfo->[isBeaconShard,EpochLastBlock,GetTotalStaking,GetTotalSupply,GetMedianRawStakeSnapshot,GetCirculatingSupply,GetEpoch],EpochLastBlock->[EpochLastBlock,isBeaconShard],GetSuperCommittees->[isBeaconShard,GetSuperCommittees],GetTotalStaking->[isBeaconShard],GetAllValidatorAddresses->[GetAllValidatorAddresses,isBeaconShard],GetAccountNonce->[GetAccountNonce],GetValidatorTotalDelegation->[isBeaconShard],GetSignedBlocks->[IsBlockSigner],GetBlockSigners->[GetValidators,isBlockGreaterThanLatest],GetDelegationByDelegatorAndValidator->[GetDelegationsByDelegator,isBeaconShard],GetLastCrossLinks->[GetLastCrossLinks,isBeaconShard],GetBalance->[GetBalance],GetValidatorInformation->[BlockNumber,isBeaconShard,GetValidatorInformation],GetAllDelegationInformation->[GetAllValidatorAddresses,isBeaconShard],IsLastBlock->[IsLastBlock,isBeaconShard],GetCurrentBadBlocks->[GetCurrentBadBlocks]]
isBlockGreaterThanLatest returns ErrRequestedBlockTooHigh if the given block number is.
+1 needed? I think no change is needed.
@@ -88,7 +88,7 @@ func (o *orm) InsertFinishedRun(db *gorm.DB, run Run, trrs []TaskRunResult, save if run.Outputs.Val == nil || len(run.Errors) == 0 { return 0, errors.Errorf("run must have both Outputs and Errors, got Outputs: %#v, Errors: %#v", run.Outputs.Val, run.Errors) } - if len(trrs) == 0 && saveSuccessfulTaskRuns { + if len(trrs) == 0 && (saveSuccessfulTaskRuns || run.HasErrors()) { return 0, errors.New("must provide task run results") }
[DeleteRunsOlderThan->[Add,Now,Exec],FindRun->[Preload,First],InsertFinishedRun->[Create,ErrorDB,Join,OutputIndex,Exec,Sprintf,GormTransactionWithoutContext,New,DotID,Errorf,OutputDB,HasErrors,IsZero,Wrap,Type],CreateSpec->[WithStack,Create],Wrapf,First,NewCounterVec,NewGaugeVec,New,String]
InsertFinishedRun inserts a finished run into the database. Execute the NestedQuery with the given strings.
This was a missing check, we also attempt to save the task runs if the run has errors, so you need to provide them
@@ -169,7 +169,7 @@ public class ListAction implements TestsWsAction { List<String> fileUuids = Lists.transform(tests, new TestToFileUuidFunction()); List<ComponentDto> components = dbClient.componentDao().selectByUuids(dbSession, fileUuids); - return Maps.uniqueIndex(components, new ComponentToUuidFunction()); + return Maps.uniqueIndex(components, ComponentDtoFunctions.toUuid()); } private static class TestToFileUuidFunction implements Function<TestDoc, String> {
[ListAction->[define->[setExampleValue,addPagingParams],writeTests->[status,longName,testUuid,endObject,message,endArray,stackTrace,key,coveredFiles,coveredLines,prop,fileUuid,beginArray,name,durationInMs,beginObject],searchTestsByTestFileUuid->[checkComponentUuidPermission,searchByTestFileUuid],searchTestsByTestFileKey->[getByKey,uuid,checkComponentPermission,searchByTestFileUuid],checkComponentUuidPermission->[selectOrFailByUuid,checkProjectUuidPermission,projectUuid],searchTestsBySourceFileUuidAndLineNumber->[searchBySourceFileUuidAndLineNumber,checkComponentUuidPermission],TestToFileUuidFunction->[apply->[fileUuid]],ComponentToUuidFunction->[apply->[uuid]],searchTestsByTestUuid->[fileUuid,searchByTestUuid,checkComponentUuidPermission],coveredLines->[size],searchTests->[IllegalArgumentException,searchTestsByTestFileKey,searchTestsBySourceFileUuidAndLineNumber,searchTestsByTestUuid,searchTestsByTestFileUuid],buildComponentsByTestFileUuid->[ComponentToUuidFunction,TestToFileUuidFunction,selectByUuids,uniqueIndex,transform],handle->[setPage,writeJson,getTotal,writeTests,paramAsInt,param,closeQuietly,getDocs,close,openSession,searchTests,mandatoryParamAsInt,buildComponentsByTestFileUuid,beginObject]]]
Build a map of components by test file uuid.
Should have been done in another commit
@@ -126,6 +126,8 @@ class Boost(Package): depends_on('mpi', when='+mpi') depends_on('bzip2', when='+iostreams') depends_on('zlib', when='+iostreams') + depends_on('zlib', when='%xl_r' ) + depends_on('bzip2', when='%xl_r') # Patch fix from https://svn.boost.org/trac/boost/ticket/11856 patch('boost_11856.patch', when='@1.60.0%gcc@4.4.7')
[Boost->[determine_bootstrap_options->[determine_toolset,bjam_python_line],install->[determine_bootstrap_options,add_buildopt_symlinks,determine_b2_options],determine_b2_options->[determine_toolset]]]
This function is used to determine if a version is a bug in the Boost library. Determine the toolset for a given .
it's not obvious why the two dependencies are needed for xl compiler regardless of `iostreams` variant.
@@ -4845,7 +4845,7 @@ bool Blockchain::prepare_handle_incoming_blocks(const std::vector<block_complete { m_blocks_longhash_table.clear(); uint64_t thread_height = height; - tools::threadpool::waiter waiter; + tools::threadpool::waiter waiter(tpool); m_prepare_height = height; m_prepare_nblocks = blocks_entry.size(); m_prepare_blocks = &blocks;
[No CFG could be retrieved]
region Blockchain. prevalidate_block_hashes This function is used to find the next block in the chain.
There doesn't seem to be an effective method to see if this section failed via exception.
@@ -13,6 +13,7 @@ import random import time from typing import Union, Optional, Set, Any, Dict, List import warnings +from parlai.core.opt import Opt from parlai.core.message import Message
[display_messages->[_token_losses_line,_ellipse,clip_text],TimeLogger->[time->[time],log->[reset,time],__init__->[Timer]],PaddingUtils->[pad_text->[valid]],str_to_msg->[tolist->[tostr],convert->[tolist,tostr],convert],maintain_dialog_history->[parse],Timer->[time->[time]],msg_to_str->[add_field->[filter],add_field],NoLock]
A file containing the miscellaneous utility functions and constants. Reads the history of the last n - grams and updates the history of the last n.
ahh yes I am aware this broke previous models, we should not add it back here however, those models should be re-saved if necessary (I have already done this with my old models)
@@ -254,9 +254,6 @@ class TypeFixer(TypeVisitor[None]): for it in ut.items: it.accept(self) - def visit_type_guard_type(self, t: TypeGuardType) -> None: - t.type_guard.accept(self) - def visit_void(self, o: Any) -> None: pass # Nothing to descend into.
[NodeFixer->[visit_symbol_table->[visit_type_info]],missing_info->[lookup_qualified]]
Visit a UnionType.
Even if it's not a proper type, we still need to fixup the deserialized target type. Do we have an incremental test with an import cycle?
@@ -72,7 +72,7 @@ def _fetch( if failed: raise DownloadError(failed) - return downloaded + return downloaded + len(used_run_cache) def _fetch_external(self, repo_url, repo_rev, files, jobs):
[_fetch_external->[exception,format,use_cache,join,external_repo,fetch_external],_fetch->[DownloadError,_fetch_external,pull,used_cache,isinstance,items],getLogger]
Download data items from a cloud and a repository. This function returns the number of downloaded and failed objects.
`fetch` and `push/pull` for run-cache did not add up counts. Was this intentional, @efiop?
@@ -671,5 +671,13 @@ namespace Microsoft.Xna.Framework.Content return this.serviceProvider; } } + + internal byte[] GetScratchBuffer(int size) + { + size = Math.Max(size, 1024 * 1024); + if (scratchBuffer == null || scratchBuffer.Length < size) + scratchBuffer = new byte[size]; + return scratchBuffer; + } } }
[ContentManager->[ReloadAsset->[Dispose],T->[Dispose],Normalize->[Normalize],Unload->[Dispose],Dispose->[RemoveContentManager,Dispose],AddContentManager]]
Get the service provider.
Eventually we could consider making this public for those writing custom content readers. We would have to add docs to it explaining its usage and benefits.
@@ -101,8 +101,11 @@ func (ls *Leadership) Campaign(leaseTimeout int64, leaderData string) error { return err } // The leader key must not exist, so the CreateRevision is 0. + finalCmps := make([]clientv3.Cmp, 0, len(cmps)+1) + finalCmps = append(finalCmps, cmps...) + finalCmps = append(finalCmps, clientv3.Compare(clientv3.CreateRevision(ls.leaderKey), "=", 0)) resp, err := kv.NewSlowLogTxn(ls.client). - If(clientv3.Compare(clientv3.CreateRevision(ls.leaderKey), "=", 0)). + If(finalCmps...). Then(clientv3.OpPut(ls.leaderKey, leaderData, clientv3.WithLease(ls.getLease().ID))). Commit() log.Info("check campaign resp", zap.Any("resp", resp))
[Keep->[getLease],Check->[getLease],Reset->[getLease],DeleteLeader->[LeaderTxn],Campaign->[getLease,setLease],Watch->[Watch]]
Campaign creates a new lease for the leadership.
This line should be moved to line 105.
@@ -13,6 +13,8 @@ const SOURCE_ROOT = path.normalize(path.dirname(__dirname)); const DEPOT_TOOLS = path.resolve(SOURCE_ROOT, '..', 'third_party', 'depot_tools'); const IGNORELIST = new Set([ + ['filenames.libcxx.gni'], + ['filenames.libcxxabi.gni'], ['shell', 'browser', 'resources', 'win', 'resource.h'], ['shell', 'browser', 'notifications', 'mac', 'notification_center_delegate.h'], ['shell', 'browser', 'ui', 'cocoa', 'event_dispatching_window.h'],
[No CFG could be retrieved]
Creates a new child process and spawns a new child process and checks its exit code The main entry point for cpplint. py.
These shouldn't need to be ignored (E.g. filenames.auto.gni doesn't appear to be ignored)
@@ -99,7 +99,11 @@ def psd_array_welch(x, sfreq, fmin=0, fmax=np.inf, n_fft=256, n_overlap=0, x = x.reshape(-1, n_times) # Prep the PSD - n_fft, n_overlap = _check_nfft(n_times, n_fft, n_overlap) + if n_per_seg is None and n_fft > n_times: + raise ValueError('If n_per_seg is None n_fft is not allowed to be >' + ' n_times. If you want zero-padding, you have to set' + ' n_per_seg to relevant length.') + n_fft, n_per_seg, n_overlap = _check_nfft(n_times, n_fft, n_per_seg, n_overlap) win_size = n_fft / float(sfreq) logger.info("Effective window size : %0.3f (s)" % win_size) freqs = np.arange(n_fft // 2 + 1, dtype=float) * (sfreq / n_fft)
[psd_array_welch->[_check_nfft],psd_welch->[_check_psd_data,psd_array_welch],psd_multitaper->[_check_psd_data]]
Compute power spectral density using Welch s method. Compute the PSD of the windows of the n - 1 - dimensions of the data.
say what value of n_times and were given compared to n_fft
@@ -140,6 +140,18 @@ public class ImportUtils { DialogHandler.storeMessage(handlerMessage); } + public static boolean isCollectionPackage(String filename) { + return filename != null && (filename.toLowerCase().endsWith(".colpkg") || filename.equals("collection.apkg")); + } + + private static boolean isDeckPackage(String filename) { + return filename != null && filename.toLowerCase().endsWith(".apkg") && !filename.equals("collection.apkg"); + } + + public static boolean isValidPackageName(String filename) { + return isDeckPackage(filename) || isCollectionPackage(filename); + } + /** * Check if the InputStream is to a valid non-empty zip file * @param intent intent from which to get input stream
[ImportUtils->[copyFileToCache->[getData,e,openInputStream,copyFile,close],showImportUnsuccessfulDialog->[show,e,getString],handleFileImport->[sendExceptionReport,copyFileToCache,moveToFirst,equals,getType,d,endsWith,hasValidZipFile,i,getManualUrl,getData,e,sendShowImportFileDialogMsg,w,close,query,RuntimeException,getString,getEncodedPath,getPath],hasValidZipFile->[ZipInputStream,getData,e,openInputStream,getNextEntry,close,d],sendShowImportFileDialogMsg->[Bundle,getName,equals,storeMessage,setData,File,putString,obtain]]]
Send a show import file dialog message.
this was going to be my first question "so what about the transition, will it handle the old full collections with .apkg suffixes"? yes it does, sweet
@@ -208,7 +208,7 @@ class TestDataset: ("config", "kwarg"), [ pytest.param(*(datasets.utils.DatasetConfig(split="test"),) * 2, id="specific"), - pytest.param(make_minimal_dataset_info().default_config, None, id="default"), + pytest.param(datasets.utils.DatasetConfig(split="train"), None, id="default"), ], ) def test_load_config(self, config, kwarg):
[TestDataset->[test_decoder->[DatasetMock],test_name->[DatasetMock,make_minimal_dataset_info],test_default_config->[DatasetMock,make_minimal_dataset_info],test_missing_dependencies->[DatasetMock,make_minimal_dataset_info],DatasetMock->[__init__->[make_minimal_dataset_info]],test_load_config->[DatasetMock],test_resources->[DatasetMock],make_minimal_dataset_info],TestDatasetInfo->[test_repr_optional_info->[make_minimal_dataset_info],info->[make_minimal_dataset_info],test_check_dependencies->[make_minimal_dataset_info],test_default_config_split_train->[make_minimal_dataset_info],test_valid_options_split_but_no_train->[make_minimal_dataset_info]]]
Load a config and assert that it is loaded.
just wondering if the `id="default"` is still accurate?
@@ -1,8 +1,10 @@ <?php use Friendica\App; +use Friendica\Core\Config; use Friendica\Core\System; use Friendica\Core\Worker; +use Friendica\Network\Probe; require_once 'include/contact_widgets.php'; require_once 'include/probe.php';
[dirfind_content->[set_pager_itemspage,set_pager_total]]
Directory find. Get contact details by user.
Can you please check if this include is required after you replaced `probe_url()`?
@@ -31,11 +31,15 @@ class Mozjs(AutotoolsPackage): configure_directory = 'js/src' build_directory = 'js/src/spack-build' - patch('perl-bug.patch') + patch('perl-bug.patch', sha256='9f7d8502d85a4125e975a84cae11a8b34e696172d56f8ebc7ecf6d21fa3c30c9') + patch('Bug-638056-Avoid-The-cacheFlush-support-is-missing-o.patch', + sha256='b1c869a65f5ebc10741d4631cc2e1e166c6ed53035cfa56bede55a4c19b7b118', when='@1.8.5') + patch('fix-811665.patch', + sha256='2b298b8a693865b38e2b0d33277bb5ffe152c6ecf43648e85113fec586aa4752', when='@1.8.5') def configure_args(self): spec = self.spec - return [ + config_args = [ '--enable-readline', # enables readline support in JS shell '--enable-threadsafe', # enables support for multiple threads '--enable-system-ffi',
[Mozjs->[configure_args->[format],depends_on,version,patch]]
Returns a list of command line options for the object.
You do not have to include checksums for patches that are stored in the Spack repository
@@ -213,6 +213,8 @@ class Jetpack_Sync_Defaults { 'idx_page', 'postman_sent_mail', 'rssmi_feed_item', + 'rssap-feed', + 'wp_automatic' ); static $default_post_checksum_columns = array(
[No CFG could be retrieved]
Filters the list of callables that are manageable via the JSON API. This method is used to add the default meta checksum columns to the options.
Could you add a trailing comma to avoid issues later on when adding new items, if one were to forget to add that comma? This will also make WPCS happy :) Thanks!
@@ -328,13 +328,16 @@ def push_python_path(path: PathType) -> ContextManagerFunctionReturnType[None]: sys.path.remove(path) -def import_module_and_submodules(package_name: str) -> None: +def import_module_and_submodules(package_name: str, exclude: Optional[List[str]] = None) -> None: """ Import all submodules under the given package. Primarily useful so that people using AllenNLP as a library can specify their own custom packages and have their custom classes get loaded and registered. """ + if exclude and package_name in exclude: + return + importlib.invalidate_caches() # For some reason, python doesn't always add this by default to your path, but you pretty much
[sanitize->[sanitize],is_global_primary->[is_distributed],import_module_and_submodules->[push_python_path,import_module_and_submodules]]
Import all submodules under the given package and all submodules under the given package.
Looks like `exclude` should be a `Optional[Set[str]]`?
@@ -39,6 +39,7 @@ #include <osquery/core.h> #include <osquery/database.h> +#include <osquery/dispatcher.h> #include <osquery/filesystem.h> #include <osquery/flags.h> #include <osquery/logger.h>
[No CFG could be retrieved]
Creates a new object. The following functions are used to set the alarm timeout for the daemon daemon.
Why all of these new includes?
@@ -31,7 +31,7 @@ def build_packages(targeted_packages, distribution_directory, is_dev_build=False print("Generating Package Using Python {}".format(sys.version)) run_check_call( [ - "python", + sys.executable, build_packing_script_location, "--dest", distribution_directory,
[verify_update_package_requirement->[process_requires,abspath,join],build_packages->[verify_update_package_requirement,print,run_check_call,format],abspath,add_argument,process_glob_string,append,build_packages,ArgumentParser,parse_args,str_to_bool,join]
Build packages using the build_packing_script_location and the distribution_directory.
Discovered when running locally that python calling python doesn't nest properly unless you're using system PATH updates like how the VSAgents do them. Using sys.executable is the proper way to ensure that you're always in the same environment.
@@ -23,8 +23,8 @@ class Analytics } end - def ahoy - @ahoy ||= Rails.env.test? ? FakeAhoyTracker.new : Ahoy::Tracker.new(request: request) + def analytics + @analytics ||= Rails.env.test? ? FakeKeen : PublishAnalyticsJob end def uuid
[Analytics->[track_event->[info,merge!,track,key?],uuid->[uuid],ahoy->[test?,new],request_attributes->[user_agent,remote_ip],attr_reader,freeze]]
Returns an object with the n - th unique identifier for the request.
Rather than leak the `FakeKeen` into "production" code, what if we did a `stub_const("PublishAnalyticsJob", FakeKeen)` in tests?
@@ -33,7 +33,7 @@ public abstract class InsertRetryPolicy implements Serializable { */ public static class Context { // A list of all errors corresponding to an attempted insert of a single record. - TableDataInsertAllResponse.InsertErrors errors; + public final TableDataInsertAllResponse.InsertErrors errors; public Context(TableDataInsertAllResponse.InsertErrors errors) { this.errors = errors;
[InsertRetryPolicy->[retryTransientErrors->[InsertRetryPolicy],alwaysRetry->[InsertRetryPolicy],neverRetry->[InsertRetryPolicy]]]
Creates an InsertRetryPolicy that will retry any failures if the insert fails. Creates a policy that will always retry all failures.
BTW I think Java standards prefer adding a getter here over making the variable public.
@@ -335,7 +335,6 @@ def check_event_date(row): ok_(row_datetime < prev_end_datetime) class EventsViewsTest(test_utils.TestCase): - fixtures = ['devmo_calendar.json'] def setUp(self): self.client = LocalizingClient()
[EventsViewsTest->[test_events->[reverse,pq,eq_,get],setUp->[LocalizingClient,devmo_calendar_reload]],get_datetime_from_string->[fromtimestamp,mktime,strptime],ProfileViewsTest->[test_profile_edit_websites->[find,dict,get,update,post,pq,eq_,items,_create_profile,reverse],test_profile_edit->[find,dict,attr,get,post,pq,eq_,_create_profile,reverse],_break->[ok_,debug,items],test_profile_edit_tags->[all_ns,find,sort,dict,get,post,pq,eq_,replace,join,_create_profile,reverse],setUp->[LocalizingClient],_create_profile->[create_user,DekiUser,save,UserProfile],test_profile_view->[reverse,find,get,pq,eq_,UserDocsActivityFeed,eq,_create_profile,open,range],attr,patch],check_event_date->[debug,find,get_datetime_from_string,ok_,prev,today],dirname]
Set up the client.
Ditto here - I think we need this now.
@@ -255,7 +255,7 @@ { await transaction.Commit() .ConfigureAwait(false); - + log.Debug($"Dropping message '{messageId}' as the specified TimeToBeReceived of '{ttbrString}' expired since sending the message at '{sentTime:O}'"); return; } }
[LearningTransportMessagePump->[Start->[Token,Exists,Start,Delete,EnsureDirectoriesExists,Run,MaxConcurrency,RecoverPendingTransactions],ILearningTransportTransaction->[ToString,None],EnsureDirectoriesExists->[None,CreateDirectory],Task->[UtcNow,IsCancellationRequested,BeginTransaction,FileToProcess,RetryRequired,CurrentCount,Set,SendsAtomicWithReceive,Rollback,InputQueue,onError,criticalError,TryGetValue,ClearPendingOutgoingOperations,CompletedTask,GetCreationTimeUtc,Combine,Release,RequiredTransactionMode,Ignore,GetTransaction,Dispose,Remove,AddOrUpdate,onMessage,EnumerateFiles,Deserialize,WriteLine,Replace,Cancel,ConfigureAwait,ThrowForBadPath,Raise,Parse,TimeToBeReceived,PurgeOnStartup],RecoverPendingTransactions->[None,Delete,Exists,RecoverPartiallyCompletedTransactions],basePath,LogManager]]
ProcessFile - Process a file and process it if it can t be processed. A blocking call that will fail if the action is not required.
I think this should actually be Info instead of Debug. Thoughts?
@@ -127,8 +127,9 @@ public class TestDeleteWithSlowFollower { raftClientConfig.setRpcWatchRequestTimeout(Duration.ofSeconds(10)); conf.setFromObject(raftClientConfig); - conf.setTimeDuration(OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL, - 1, TimeUnit.SECONDS); + ScmConfig scmConfig = conf.getObject(ScmConfig.class); + scmConfig.setBlockDeletionInterval(Duration.ofSeconds(1)); + conf.setFromObject(scmConfig); RatisClientConfig ratisClientConfig = conf.getObject(RatisClientConfig.class);
[TestDeleteWithSlowFollower->[shutdown->[shutdown]]]
Initialize the configuration. This method is used to create a key - value object in the cluster.
We should keep the old config as well since OM is involved in deleting the keys.
@@ -139,12 +139,12 @@ namespace System.Net if (p >= 10) return false; - return IsPredefinedScheme(s.Substring(0, p)); + return IsPredefinedScheme(s.AsSpan(0, p)); } - private static bool IsPredefinedScheme(string scheme) + private static bool IsPredefinedScheme(ReadOnlySpan<char> scheme) { - if (scheme == null || scheme.Length < 3) + if (scheme.Length < 3) return false; char c = scheme[0];
[HttpListenerRequest->[IAsyncResult->[GetClientCertificateCore],FinishInitialization->[MaybeUri]]]
Checks if a string is a URI or not.
A lot of the code below is now incorrect. All of the equality operators are going to do the wrong thing.
@@ -20,13 +20,13 @@ import java.util.concurrent.CancellationException; /** * Special {@link Future} which is writable. */ -public interface Promise<V> extends Future<V> { +public interface Promise<V> { /** * Marks this future as a success and notifies all listeners. * <p> * If it is success or failed already it will throw an {@link IllegalStateException}. */ - Promise<V> setSuccess(V result); + Future<V> setSuccess(V result); /** * Marks this future as a success and notifies all listeners.
[No CFG could be retrieved]
Set the result of the promise to the given result.
This does not look correct. It should return the promise
@@ -83,7 +83,14 @@ func (host *pluginHost) Provider(pkg tokens.Package, version *semver.Version) (p return nil, nil } - prov, err := best.load() + load := best.load + if load == nil { + load = func() (plugin.Provider, error) { + return best.loadWithHost(host) + } + } + + prov, err := load() if err != nil { return nil, err }
[CloseProvider->[Lock,Unlock],Provider->[Unlock,LT,Lock,load,GT],SignalCancellation->[SignalCancellation,Lock,Unlock],Analyzer->[New],Log->[Logf,StreamMessage],LogStatus->[Logf,StreamMessage]]
Provider returns a plugin. Provider that can be used to load a plugin from the specified package.
This code exists only because `NewProviderLoaderWithHost` now exists, and in that case load is nil, right?
@@ -73,6 +73,8 @@ class Following extends BaseApi $params['order'] = ['cid']; } + $accounts = []; + $followers = DBA::select('contact-relation', ['cid'], $condition, $params); while ($follower = DBA::fetch($followers)) { self::setBoundaries($follower['cid']);
[Following->[rawContent->[createFromContactId,UnprocessableEntity,RecordNotFound,getRequest]]]
Get the raw content of a contact This function creates a new account from a contact id and a min id.
Please add this to the corresponding `Followers` call as well.
@@ -248,6 +248,12 @@ func (a storageClient) BatchWrite(ctx context.Context, input chunk.WriteBatch) e unprocessed.TakeReqs(requests, -1) backoff.Wait() continue + }else if ok && awsErr.Code() == "ValidationException"{ + // this write will never work, so the only option is to drop the offending items and continue. + // TODO: add more debug options for capturing data/telemetry about the offending items? + level.Warn(util.Logger).Log("Data lost while flushing to Dynamo: %v",awsErr) + level.Debug(util.Logger).Log("Dropped request details: \n%v",requests) + continue } // All other errors are critical.
[NextPage->[NextPage],PutChunks->[BatchWrite],GetChunks->[Error],getDynamoDBChunks->[Send,Data,Retryable],HasNextPage->[HasNextPage],TakeReqs->[Len],Send->[Send],RegisterFlags->[RegisterFlags],Add]
BatchWrite writes a batch of items to the DynamoDB table. This function is called when a client is unable to write a chunk of data to DynamoDB.
Please run `go fmt`
@@ -2,7 +2,7 @@ module Users module DeleteComments module_function - def call(user, cache_buster = CacheBuster) + def call(user) return unless user.comments.any? user.comments.find_each do |comment|
[call->[call,remove_notifications,remove_from_elasticsearch,any?,bust_user,delete_all,find_each,commentable,delete]]
Remove any comments that have a in the user s comments list.
This refactor is shaping up so nicely, it's great that we can get rid of this argument
@@ -257,7 +257,13 @@ def train_model( check_for_gpu(device_ids) master_addr = distributed_params.pop("master_address", "127.0.0.1") - master_port = distributed_params.pop("master_port", 29500) + if master_addr in ("127.0.0.1", "0.0.0.0", "localhost"): + # If running locally, we can automatically find an open port if one is not specified. + master_port = distributed_params.pop("master_port", common_util.find_open_port()) + else: + # Otherwise we require that the port be specified. + master_port = distributed_params.pop("master_port") + num_procs = len(device_ids) world_size = num_nodes * num_procs
[TrainModel->[finish->[dump_metrics,evaluate,join,items,info],run->[train],from_partial_objects->[index_with,construct,cls,get,is_master,read_all_datasets,values,items,join,info,from_instances,save_to_files,ConfigurationError]],train_model_from_file->[train_model,from_file],_train_worker->[from_params,int,prepare_global_logging,exists,set_device,len,import_plugins,str,finish,prepare_environment,run,info,join,import_module_and_submodules,barrier,archive_model,init_process_group],train_model_from_args->[train_model_from_file],train_model->[to_file,_train_worker,pop,duplicate,len,check_for_gpu,load,isinstance,join,spawn,info,from_files,create_serialization_dir,make_vocab_from_params,archive_model,ConfigurationError],Train->[add_subparser->[add_parser,set_defaults,add_argument]],register,getLogger]
Trains a model based on a given nltk node. Train a single cuda device or process if distributed is not in the config. Load a single node from a file.
This will run `find_open_port()` before it tries to read the port from the config. I think we should not run port discovery at all if we don't have to.
@@ -1,7 +1,7 @@ using System; using Mono.Addins; using Mono.Addins.Description; -[assembly: Addin("MonoGame_IDE_VisualStudioForMac",Namespace = "MonoDevelop",Version = "0.0.0.0")] +[assembly: Addin("MonoGame_IDE_VisualStudioForMac",Namespace = "MonoDevelop",Version = "3.7.0.0")] [assembly: AddinName("MonoGame Extension")] [assembly: AddinCategory("Game Development")] [assembly: AddinDescription("VisualStudio for Mac extension for MonoGame")]
[No CFG could be retrieved]
region MonoGame Extension.
This number is getting set by the build system and should not be modified.
@@ -50,7 +50,7 @@ bool LinuxNetworkConfigMonitor::open() struct { nlmsghdr header; rtgenmsg msg; - } request = {}; + } request; // Request a dump of the links. request.header.nlmsg_len = NLMSG_LENGTH(sizeof(request.msg));
[No CFG could be retrieved]
DDS - related functions Reads the list of network config records and sends them to the server.
does this need a memset or similar?
@@ -25,10 +25,7 @@ public class TripleADisplay implements ITripleADisplay { } @Override - public void initialize(final IDisplayBridge bridge) { - final IDisplayBridge m_displayBridge = bridge; - m_displayBridge.toString(); - } + public void initialize(final IDisplayBridge bridge) {} // TODO: unit_dependents and battleTitle are both likely not used, they have been removed // from BattlePane().showBattle( .. ) already
[TripleADisplay->[changedUnitsNotification->[changedUnitsNotification],casualtyNotification->[casualtyNotification],bombingResults->[bombingResults],notifyRetreat->[notifyRetreat],showBattle->[showBattle],deadUnitNotification->[deadUnitNotification]]]
Method to show a battle.
The only implementation of `IDisplayBridge` is `DefaultDisplayBridge`, and it does not override `Object#toString()`. Therefore, it doesn't seem that any side effects could be caused by calling this method, so it should be safe to remove, and thus remove the `m_displayBridge` local variable in the process.
@@ -65,6 +65,11 @@ namespace PythonNodeModelsWpf editText.TextArea.TextEntering += OnTextAreaTextEntering; editText.TextArea.TextEntered += OnTextAreaTextEntered; + // Initialize editor with global settings for show/hide tabs and spaces + editText.Options = dynamoViewModel.TextOptions; + editText.Options.ShowTabs = dynamoViewModel.ShowTabsAndSpacesInScriptEditor; + editText.Options.ShowSpaces = dynamoViewModel.ShowTabsAndSpacesInScriptEditor; + const string highlighting = "ICSharpCode.PythonBinding.Resources.Python.xshd"; var elem = GetType().Assembly.GetManifestResourceStream( "PythonNodeModelsWpf.Resources." + highlighting);
[ScriptEditorWindow->[OnEngineChanged->[UpdateScript],OnRunClicked->[UpdateScript],OnMigrationAssistantClicked->[UpdateScript]]]
Initialize the object with the specified properties.
I'm confused by this. If we have `TextOptions` in our VM, why do we also need `ShowTabsAndSpacesInScriptEditor` as well? Can't `ShowTabs` and `ShowSpaces` come already assigned in `TextOptions` so that we can avoid exposing `ShowTabsAndSpacesInScriptEditor` too? Also, should this be needed if we are using a binding for `Options`?
@@ -93,6 +93,12 @@ module Email end end + def ensure_valid_date + if @mail.date.nil? + raise InvalidPost, "No post creation date found. Is the e-mail missing a Date: header?" + end + end + def is_blacklisted? return false if SiteSetting.ignore_by_title.blank? Regexp.new(SiteSetting.ignore_by_title, Regexp::IGNORECASE) =~ @mail.subject
[Receiver->[extract_from_zimbra->[to_markdown],find_related_post->[sent_to_mailinglist_mirror?],find_or_create_user!->[find_or_create_user],extract_from_exchange->[to_markdown],attachments->[is_whitelisted_attachment?],extract_from_outlook->[to_markdown],extract_from_apple_mail->[to_markdown],embedded_email_raw->[fix_charset],to_markdown->[to_markdown],should_invite?->[reply_by_email_address_regex],extract_from_newton->[to_markdown],subject->[subject],extract_from_word->[to_markdown],extract_from_protonmail->[to_markdown],subscription_action_for->[sent_to_mailinglist_mirror?],create_post->[is_bounce?,sent_to_mailinglist_mirror?],extract_from_gmail->[to_markdown],create_reply->[notification_level_for,post_action_for],notify_about_rejected_attachment->[subject],add_other_addresses->[find_or_create_user],process_destination->[is_bounce?,sent_to_mailinglist_mirror?],extract_from_mozilla->[to_markdown],select_body->[formats],process_forwarded_email->[parse_from_field,find_or_create_user]]]
Ensures that the addresses in the message are valid and that they are not blacklisted.
This probably should be i18n'ed
@@ -42,6 +42,7 @@ import org.apache.gobblin.metrics.kafka.Pusher; /** * Test {@link KafkaKeyValueProducerPusher}. */ +@Test (groups = {"disabledOnTravis"}) public class KafkaKeyValueProducerPusherTest { public static final String TOPIC = KafkaKeyValueProducerPusherTest.class.getSimpleName();
[KafkaKeyValueProducerPusherTest->[setup->[startServers,KafkaTestBase,provisionTopic],after->[println,close],test->[hasNext,newArrayList,of,sleep,parseMap,message,pushMessages,key,getKafkaServerPort,getIteratorForTopic,String,close,getBytes,assertEquals,next,interrupt],getSimpleName]]
Package for testing. Returns a list of Pairs.
Let's remove this, since this test is not the root cause of travis build failures.
@@ -61,6 +61,8 @@ public abstract class MessagingAttributesExtractor<REQUEST, RESPONSE> set(attributes, SemanticAttributes.MESSAGING_MESSAGE_ID, messageId(request, response)); } + public abstract MessageOperation operation(); + @Nullable protected abstract String system(REQUEST request);
[MessagingAttributesExtractor->[onStart->[destinationKind,system,protocolVersion,protocol,messagePayloadCompressedSize,messagePayloadSize,operation,url,destination,set,operationName,temporaryDestination,conversationId],onEnd->[messageId,set]]]
Method called when the end of a sequence has been reached.
removing the arg
@@ -72,7 +72,7 @@ func (cs *ContainerService) setAddonsConfig(isUpdate bool) { defaultClusterAutoscalerAddonsConfig := KubernetesAddon{ Name: ClusterAutoscalerAddonName, - Enabled: to.BoolPtr(DefaultClusterAutoscalerAddonEnabled), + Enabled: to.BoolPtr(DefaultClusterAutoscalerAddonEnabled && !cs.Properties.IsAzureStackCloud()), Config: map[string]string{ "min-nodes": "1", "max-nodes": "5",
[setAddonsConfig->[Itoa,HasNSeriesSKU,BoolPtr,GetAzureCNICidr,IsKubernetesVersionGe,HasCoreOS,GetCloudSpecConfig,GetNonMasqueradeCIDR],BoolPtr,IsKubernetesVersionGe,GetAddonContainersIndexByName,Bool,IsAzureCNI]
setAddonsConfig sets the addons config Spec for the cluster - spec AddonConfig - A plugin to configure the necessary configuration for a new add - on. This function returns a list of KubernetesContainerSpec objects that can be used to create a new This is the default configuration for the cluster.
Carried this additional default gate over from the "last mile" "concatenate addons string" business logic area.
@@ -110,13 +110,10 @@ public class AsyncQueryForwardingServlet extends HttpServlet query = objectMapper.readValue(req.getInputStream(), Query.class); queryId = query.getId(); if (queryId == null) { - queryId = idProvider.next(query); + queryId = UUID.randomUUID().toString(); query = query.withId(queryId); } - requestLogger.log( - new RequestLogLine(new DateTime(), req.getRemoteAddr(), query) - ); out = resp.getOutputStream(); final OutputStream outputStream = out;
[AsyncQueryForwardingServlet->[doPost->[run->[run]]]]
POST method. This method is called when the request is done.
let's leave at least log.info() in case we never get past this stage
@@ -119,11 +119,12 @@ class Jetpack_Instant_Search extends Jetpack_Search { $prefix = Jetpack_Search_Options::OPTION_PREFIX; $options = array( 'overlayOptions' => array( - 'colorTheme' => get_option( $prefix . 'color_theme', 'light' ), - 'enableInfScroll' => (bool) get_option( $prefix . 'inf_scroll', false ), - 'highlightColor' => get_option( $prefix . 'highlight_color', '#FFC' ), - 'opacity' => (int) get_option( $prefix . 'opacity', 97 ), - 'showPoweredBy' => (bool) get_option( $prefix . 'show_powered_by', true ), + 'colorTheme' => get_option( $prefix . 'color_theme', 'light' ), + 'enableInfScroll' => (bool) get_option( $prefix . 'inf_scroll', false ), + 'enableTransition' => (bool) get_option( $prefix . 'overlay_transition', false ), + 'highlightColor' => get_option( $prefix . 'highlight_color', '#FFC' ), + 'opacity' => (int) get_option( $prefix . 'opacity', 97 ), + 'showPoweredBy' => (bool) get_option( $prefix . 'show_powered_by', true ), ), // core config.
[Jetpack_Instant_Search->[filter__posts_pre_query->[should_handle_query],action__parse_query->[add_aggregations_to_es_query_builder,build_aggregation,instant_api],load_assets->[load_and_initialize_tracks,inject_javascript_options]]]
Inject JS options into the options array This function is used to create a search object with all of the post type options. This function is called to set the options for the missing node - level parameters in Instant Search.
Had to set this to default as `false` to get the class removed when the option was off, but it doesn't make sense to me if it's fetching the option. Could you give this a closer look @jsnmoon ? Thanks in advance.
@@ -182,6 +182,10 @@ def run_yara(results, upload_pk): results as first argument. - `upload_pk` is the FileUpload ID. """ + return _run_yara(results, upload_pk) + + +def _run_yara(results, upload_pk): log.info('Starting yara task for FileUpload %s.', upload_pk) if not results['metadata']['is_webextension']:
[run_customs->[run_scanner],_run_yara_query_rule_on_version->[_run_yara_for_path],run_wat->[run_scanner]]
Run Yara on a FileUpload and store the Yara results.
What does this change (moving all the code into a separate function) do? Does the function where the exception is raised need to be *not* a `@validation_task`?
@@ -203,6 +203,17 @@ func newIngesterMetrics(r prometheus.Registerer, createMetricsConflictingWithTSD }, []string{"user"}) } + if !createMetricsConflictingWithTSDB { + m.numUsersWithBackfillTSDBs = promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Name: "cortex_ingester_tsdb_users_with_backfill_tsdb", + Help: "Total number of users with backfill TSDBs", + }) + m.numBackfillTSDBsPerUser = promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ + Name: "cortex_ingester_tsdb_backfill_tsdb_per_user", + Help: "Total number of backfill TSDBs per user", + }, []string{"user"}) + } + return m }
[registries->[RUnlock,RLock],setRegistryForUser->[Lock,Unlock],Collect->[BuildMetricFamiliesPerUserFromUserRegistries,SendSumOfCounters,registries,SendSumOfCountersPerUser,SendSumOfHistograms,SendSumOfSummaries,SendSumOfGauges],NewCounterVec,NewGauge,ExponentialBuckets,LinearBuckets,NewHistogram,NewCounter,With,MustRegister,NewDesc]
Metrics collector for flushing dequeued series. PageFlushes - Prometheus metrics for page flushes.
I would suggest `cortex_ingester_backfill_tsdb_tenants`, because: 1. We're trying to move from "user" to "tenant" naming (will take ages) 2. We could prefix the backfill metrics with `cortex_ingester_backfill_tsdb`
@@ -72,8 +72,8 @@ class _MaskedConv(base.Layer): linear activation. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: An initializer for the convolution kernel. - bias_initializer: An initializer for the bias vector. If None, no bias will - be applied. + bias_initializer: An initializer for the bias vector. If None, the default + initializer will be used. kernel_regularizer: Optional regularizer for the convolution kernel. bias_regularizer: Optional regularizer for the bias vector. activity_regularizer: Regularizer function for the output.
[MaskedConv2D->[__init__->[super,zeros_initializer]],_MaskedConv->[build->[add_variable,TensorShape,InputSpec,zeros_initializer,add_to_collection,multiply,ValueError,ones_initializer],_compute_output_shape->[TensorShape,append,len,range,conv_output_length],call->[as_list,upper,activation,convert_data_format,convolution,bias_add,reshape],__init__->[normalize_data_format,InputSpec,zeros_initializer,super,normalize_tuple,normalize_padding]],MaskedFullyConnected->[build->[add_variable,TensorShape,InputSpec,zeros_initializer,add_to_collection,multiply,ValueError,ones_initializer],_compute_output_shape->[input_shape,TensorShape,ValueError,with_rank_at_least],call->[activation,get_shape,len,matmul,bias_add,tensordot,set_shape,convert_to_tensor],__init__->[super,zeros_initializer,InputSpec]]]
Creates a new object for the layer. This function is a wrapper for the above function in the base class.
I'm not sure this would apply to this file.
@@ -274,10 +274,13 @@ module ProtocolsIoHelper description_string += '<br>' elsif json_hash[e].present? new_e = '<strong>' + e.humanize + '</strong>' + + image_tag = Array(nil) + image_tag = Array('img') if allowed_image_attributes.include? e description_string += new_e.to_s + ': ' + pio_eval_prot_desc( - sanitize_input(json_hash[e]), + sanitize_input(json_hash[e], image_tag), e ).html_safe + '<br>' end
[protocols_io_fill_desc->[prepare_for_view],pio_stp_17->[prepare_for_view],pio_stp_1->[prepare_for_view],pio_stp->[fill_attributes],prepare_for_view->[pio_eval_len,string_html_table_remove,not_null],fill_attributes->[prepare_for_view],pio_stp_6->[pio_eval_title_len],protocols_io_fill_step->[pio_stp_17,pio_stp,pio_stp_1,pio_stp_6,protocolsio_string_to_table_element]]
Generates a nicely formatted description of the missing elements in the protocol s io element. Prints a warning if a is encountered.
Here the same, you can use ternary operator.
@@ -354,9 +354,10 @@ class Jetpack_Sync_Module_Themes extends Jetpack_Sync_Module { * * @since 4.9.0 * - * @param array $sidebar, Sidebar id got changed + * @param array $moved_to_inactive, Array of widgets id that moved to inactive id got changed + * @param array $moved_to_inactive, Array of widgets names that moved to inactive id got changed Since 5.0.0 */ - do_action( 'jetpack_widget_moved_to_inactive', $moved_to_inactive ); + do_action( 'jetpack_widget_moved_to_inactive', $moved_to_inactive, array_map( array( $this, 'get_widget_name' ), $moved_to_inactive ) ); } elseif ( empty( $moved_to_sidebar ) && empty( $new_value['wp_inactive_widgets']) && ! empty( $old_value['wp_inactive_widgets'] ) ) {
[Jetpack_Sync_Module_Themes->[sync_sidebar_widgets_actions->[sync_add_widgets_to_sidebar,sync_widgets_reordered,sync_remove_widgets_from_sidebar]]]
Synchronize the sidebar widgets with the new ones. This method is called when a node is removed from the sidebar. It is called when a.
Consider renaming $moved_to_inactive to $moved_to_inactive_ids Consider reducing `array_map( array( $this, 'get_widget_name' ), $moved_to_inactive ) )` to a variable called $moved_to_inactive_names and passing it as an argument to do_action
@@ -46,8 +46,10 @@ c = Scale(a,b); [Category("SmokeTest")] public void T003_BasicImport_ParentPath() { + /* object[] expectedC = { 2.2, 4.4 }; thisTest.Verify("c", expectedC); + */ } [Test]
[TestImport->[T020_MultipleImport_WithSameFunctionName->[RunScriptSource,Verify],T013_BaseImportImperative_Bottom->[RunScriptSource,Throws],T007_BasicImport_TestClassConstructorAndProperties->[RunScriptSource,Verify],T016_BaseImportAssociative->[RunScriptSource,Verify],T002_BasicImport_AbsoluteDirectory->[RunScriptSource,Verify],T010_BaseImportWithVariableClassInstance_top->[RunScriptSource,Verify],T005_BasicImport_RelativePath->[RunScriptSource,Verify],T009_BasicImport_TestClassInstanceMethod->[RunScriptSource,Verify],T014_BasicImport_BeforeImperative->[RunScriptSource,Verify],T022_Defect_1457740->[RunScriptSource,Verify],T015_BasicImport_Middle->[RunScriptSource,Throws],T018_MultipleImport->[RunScriptSource,Verify],T019_MultipleImport_ClashFunctionClassRedifinition->[RunScriptSource,Throws],T017_BaseImportWithVariableClassInstance_Associativity->[RunScriptSource,Verify],T004_BasicImport_CurrentDirectoryWithDotAndSlash->[RunScriptSource,Verify],T008_BasicImport_TestClassConstructorAndProperties_UserDefinedClass->[RunScriptSource,Verify],T001_BasicImport_CurrentDirectory->[RunScriptSource,Verify],T012_BaseImportImperative->[RunScriptSource,Verify],T003_BasicImport_ParentPath->[Verify]]]
T003_BasicImport_ParentPath - T003_BasicImport_CurrentDirectoryWith.
Do we need to keep this test?? T001_BasicImport_CurrentDirectory & T002_BasicImport_AbsoluteDirectory seems to be the same test.
@@ -1365,7 +1365,7 @@ return [ "comment" => "Background tasks queue entries", "fields" => [ "id" => ["type" => "int unsigned", "not null" => "1", "extra" => "auto_increment", "primary" => "1", "comment" => "Auto incremented worker task id"], - "parameter" => ["type" => "mediumblob", "comment" => "Task command"], + "parameter" => ["type" => "mediumtext", "comment" => "Task command"], "priority" => ["type" => "tinyint unsigned", "not null" => "1", "default" => "0", "comment" => "Task priority"], "created" => ["type" => "datetime", "not null" => "1", "default" => DBA::NULL_DATETIME, "comment" => "Creation date"], "pid" => ["type" => "int unsigned", "not null" => "1", "default" => "0", "comment" => "Process id of the worker"],
[No CFG could be retrieved]
The list of fields that can be used to define the state of the thread. The index of the last non - null non - null non - null value of the last.
Why have you changed it to "mediumtext"? The "blob" is done by intention, since it doesn't convert charsets. Since we are storing JSON encoded stuff there, this should never be changed.
@@ -527,6 +527,13 @@ export const adConfig = { renderStartImplemented: true, }, + 'insticator': { + prefetch: 'https://testthisshit.online/amp-embed-lib/insticator.js', + preconnect: [ + 'https://d3lcz8vpax4lo2.cloudfront.net' + ] + }, + 'invibes': { prefetch: 'https://k.r66net.com/GetAmpLink', renderStartImplemented: true,
[No CFG could be retrieved]
Get a list of URLs that should be fetched from the AMP API. The list of urls that should be fetched for a given sequence number.
would you please also implement renderStart API? in your remote loaded `insticator.js`, it should call `context.renderStart` once content is ready to show. This basically gives AMP runtime signal when to hide loading indicator and show your content.
@@ -39,7 +39,7 @@ To report a bug, please create <a href="https://github.com/forem/forem/issues/new/choose">a bug report</a> in our open source repository. </p> <p> - To request a feature, please visit <a href="https://forem.dev">forem.dev</a> and write a post! + To request a feature, please visit <a href="https://forem.dev">forem.dev</a> and Create Post! </p> </div> </div>
[No CFG could be retrieved]
How to report a bug report?.
This should probably still be "write a post"
@@ -45,12 +45,14 @@ class ExplicitMechanicalSolver(MechanicalSolver): super(ExplicitMechanicalSolver, self).AddVariables() self._add_dynamic_variables() self.main_model_part.AddNodalSolutionStepVariable(StructuralMechanicsApplication.MIDDLE_VELOCITY) + self.main_model_part.AddNodalSolutionStepVariable(StructuralMechanicsApplication.FRACTIONAL_ACCELERATION) self.main_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NODAL_MASS) self.main_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.FORCE_RESIDUAL) self.main_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.RESIDUAL_VECTOR) if (self.settings["rotation_dofs"].GetBool()): self.main_model_part.AddNodalSolutionStepVariable(StructuralMechanicsApplication.MIDDLE_ANGULAR_VELOCITY) + self.main_model_part.AddNodalSolutionStepVariable(StructuralMechanicsApplication.FRACTIONAL_ANGULAR_ACCELERATION) self.main_model_part.AddNodalSolutionStepVariable(StructuralMechanicsApplication.NODAL_INERTIA) self.main_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.MOMENT_RESIDUAL)
[ExplicitMechanicalSolver->[AddDofs->[PrintInfo,super,_add_dynamic_dofs],_create_solution_scheme->[ExplicitCentralDifferencesScheme,settings,Exception],AddVariables->[PrintInfo,super,_add_dynamic_variables,settings,AddNodalSolutionStepVariable],_create_mechanical_solution_strategy->[MechanicalExplicitStrategy,SetRebuildLevel,GetComputingModelPart,get_solution_scheme,settings],ComputeDeltaTime->[GetComputingModelPart,settings,CalculateDeltaTime],Initialize->[CalculateDeltaTime,GetComputingModelPart,super,AddValue,Parameters,settings],GetDefaultSettings->[super,Parameters,AddMissingParameters],__init__->[settings,super,PrintInfo]],CreateSolver->[ExplicitMechanicalSolver]]
Adds variables to the model.
1. Is this needed to be a historical variable? 2. If yes then please add it only if the "multi_stage" scheme is used
@@ -84,6 +84,14 @@ public class SCMContainerMetrics implements MetricsSource { stateCount.get(DELETING.toString())) .addGauge(Interns.info("DeletedContainers", "Number of containers in deleted state"), + stateCount.get(DELETED.toString())) + .addGauge(Interns.info("TotalContainers", + "Number of all containers"), + stateCount.get(OPEN.toString())+ + stateCount.get(CLOSING.toString())+ + stateCount.get(QUASI_CLOSED.toString())+ + stateCount.get(CLOSED.toString())+ + stateCount.get(DELETING.toString())+ stateCount.get(DELETED.toString())); } }
[SCMContainerMetrics->[create->[SCMContainerMetrics]]]
Get metrics for a single sequence of containers.
Nit: Can this be converted to a loop over all states summing individual values? That way if a new state is introduced, no further change in counting totals will be required.
@@ -90,11 +90,12 @@ export class AmpFlyingCarpet extends AMP.BaseElement { } /** @override */ - onLayoutMeasure() { + onMeasureChanged() { const width = this.getLayoutWidth(); - this.getVsync().mutate(() => { + this.mutateElement(() => { setStyle(this.container_, 'width', width, 'px'); }); + this.scheduleLayout(dev().assertElement(this.container_)); } /** @override */
[No CFG could be retrieved]
Creates a fixed - height that can be used to build the container and the child Checks that the elements of the layoutBox are positioned after the 75% of.
This needs to be reverted. It can only schedule layout for the container if it itself has been laid out.
@@ -350,7 +350,7 @@ public class ExchangeCodecTest extends TelnetCodecTest { Channel channel = getCliendSideChannel(url); Response response = new Response(); response.setHeartbeat(true); - response.setId(1001l); + response.setId(1001L); response.setStatus((byte) 20); response.setVersion("11"); Person person = new Person();
[ExchangeCodecTest->[test_Decode_Return_Request_Heartbeat_Object->[getRequestBytes,decode],test_Decode_Error_Response_Object->[getRequestBytes,decode],testInvalidSerializaitonId->[decode],decode->[decode],test_Decode_Error_MagicNum->[assemblyDataProtocol],test_Decode_Return_Response_Person->[getRequestBytes,decode],test_Decode_Check_Payload->[assemblyDataProtocol],test_Decode_Return_Response_Error->[getRequestBytes,decode],test_Encode_Error_Response->[decode],testMessageLengthGreaterThanMessageActualLength->[decode],test_Decode_Return_Request_Event_String->[getRequestBytes,decode],test_Decode_MigicCodec_Contain_ExchangeHeader->[decode],test_Header_Response_NoSerializationFlag->[getRequestBytes,decode],test_Decode_Return_Request_Event_Object->[getRequestBytes,decode],test_Decode_Return_Request_Object->[getRequestBytes,decode],test_Decode_Error_Request_Object->[getRequestBytes,decode],test_Decode_Error_Length->[getRequestBytes,decode],test_Encode_Request->[decode],test_Encode_Response->[decode],test_Header_Response_Heartbeat->[getRequestBytes,decode]]]
Test encode response.
also this line.
@@ -652,7 +652,6 @@ namespace DynamoCoreWpfTests //create the view View = new DynamoView(ViewModel); - View.Show(); SynchronizationContext.SetSynchronizationContext(new SynchronizationContext()); }
[CoreUserInterfaceTests->[WorkspaceContextMenu_TestIfNotOpenOnNodeRightClick->[CreateNodeOnCurrentWorkspace]]]
Restart Dynamo and restart it .
`RestartTestSetup` is only used in one test which requires the test mode to be turned off but does not require the view to be loaded.
@@ -346,7 +346,8 @@ class SubmissionsController < ApplicationController return end else - @grouping = @assignment.groupings.find(params[:grouping_id]) + @grouping = @assignment.groupings.find(params[:grouping_id]) || + @assignment.groupings.joins(student_memberships: :user).where('users.type': 'TestStudent').first end # The files that will be deleted
[SubmissionsController->[downloads->[revision_identifier,find,group_name,accepted_grouping_for,flash_message,send_file,nil?,redirect_back,get_revision,t,count,send_tree_to_zip,access_repo,open,repository_folder,short_identifier,get_latest_revision,student?,files_at_path],update_submissions->[flash_now,set_pr_release_on_results,short_identifier,empty?,find,has_key?,message,log,where,set_release_on_results,update_results_stats,is_peer_review?,head,t,id,update_remark_request_count],download_repo_list->[short_identifier,get_repo_list,send_data,find,allowed_to?],zipped_grouping_file_name->[short_identifier,new,user_name],run_tests->[find,has_key?,host_with_port,flash_message,join,map,flash_now,protocol,is_a?,message,current_submission_used,t,empty?,blank?,job_id,id,short_identifier,perform_later,render,head,authorize!],set_filebrowser_vars->[get_latest_revision,access_repo,files_at_path,join,repository_folder,missing_assignment_files],server_time->[now,render,l],zip_groupings_files->[to_s,perform_later,find,render,zipped_grouping_file_name,where,job_id,ta?,ids,id,map],download_repo_checkout_commands->[short_identifier,send_data,find,allowed_to?,get_repo_checkout_commands,join],uncollect_all_submissions->[perform_later,js,find,respond_to,render,job_id],file_manager->[find,can_collect_now?,is_timed,allow_web_submits,accepted_grouping_for,flash_message,set_filebrowser_vars,nil?,vcs_submit,t,grouping_past_due_date?,l,human_attribute_name,blank?,due_date,id,is_valid?,render,section,head,overtime_message,authorize!],download_zipped_file->[find,basename,redirect_back,zipped_grouping_file_name,flash_message,send_file,t],set_result_marking_state->[flash_now,empty?,key?,transaction,first,save,message,each,raise,where,select,head,t,marking_state],browse->[section_due_dates_type,flash_now,l,nil?,find,new,scanned_exam,calculate_collection_time,past_all_collection_dates?,each,layout,now,ta?,join,name,t,find_each,push],index->[render,find,respond_to,pluck,json,current_submission_data],get_all_file_data->[path_exists?,compact,sort,anonymize_groups,ta?,join,repository_folder,count],revisions->[server_timestamp,to_s,get_all_revisions,find,path_exists?,timestamp,render,changes_at_path?,access_repo,revision_identifier_ui,each,repository_folder,l,map],update_files->[only_required_files,find,allow_web_submits,accepted_grouping_for,flash_message,set_filebrowser_vars,join,map,reject!,redirect_back,message,concat,add_files,remove_folders,t,commit_transaction,flash_repository_messages,empty?,access_repo,remove_files,get_transaction,blank?,present?,gsub,add_folders,push,is_valid?,user_name,student?,casecmp?,pluck,raise,authorize!],get_feedback_file->[file_content,find,filename,send_data_download,authorize!],collect_submissions->[find,has_key?,current,flash_now,to_set,transform_keys,t,count,empty?,all_grouping_collection_dates,include?,each,present?,job_id,id,short_identifier,perform_later,render,is_collected?,head,scanned_exam?],download->[nil?,get_latest_revision,find,render,send_data_download,access_repo,download_as_string,find_appropriate_grouping,files_at_path,message,get_revision,join,t,repository_folder,id],get_file->[revision_identifier,find,flash_message,get_file_type,is_binary?,is_supported_image?,encode!,nil?,redirect_back,download_as_string,get_revision,t,is_a_reviewer?,access_repo,to_json,filename,grouping,id,is_pdf?,path,render,student?,files_at_path,pr_assignment],manually_collect_and_begin_grading->[redirect_to,nil?,find,edit_assignment_submission_result_path,perform_now,assessment_id,current_submission_used,id],repo_browser->[server_timestamp,nil?,get_all_revisions,find,path_exists?,revision_identifier,to_s,render,changes_at_path?,access_repo,in_time_zone,current_submission_used,each,repository_folder,assignment],populate_file_manager->[get_all_file_data,get_latest_revision,find,render,student?,access_repo,accepted_grouping_for,get_revision,blank?],before_action,include,authorize!]]
Updates a single node in the group. add_files add_folders commit_success flash_messages.
This should not be necessary, the `params[:grouping_id]` value should still be correct
@@ -51,12 +51,16 @@ module T::Props accessor_key: Symbol, non_nil_type: T.any(T::Types::Base, T.all(T::Props::CustomType, Module)), klass: T.all(Module, T::Props::ClassMethods), + validate: T.nilable(ValidateProc) ) .returns(SetterProc) end - private_class_method def self.non_nil_proc(prop, accessor_key, non_nil_type, klass) + private_class_method def self.non_nil_proc(prop, accessor_key, non_nil_type, klass, validate) proc do |val| if non_nil_type.valid?(val) + if validate + validate.call(prop, val) + end instance_variable_set(accessor_key, val) else T::Props::Private::SetterFactory.raise_pretty_error(
[raise_pretty_error->[path,find,include?,class,call_validation_error_handler,inspect,name,lineno],non_nil_proc->[raise_pretty_error,proc,instance_variable_set,valid?],build_setter_proc->[fetch,need_nil_write_check?,nil?,non_nil_proc,key?,nilable_proc,get_underlying_type_object],nilable_proc->[nil?,raise_pretty_error,proc,instance_variable_set,valid?],type_alias,checked,extend,sig,returns,void,private_class_method]
Returns a proc that will set the value of the given non - nil property if it is.
Is this `if` in the setter path OK? Or would it be better to do something like `if validate; setter_proc = then(validate, setter_proc)`?
@@ -93,7 +93,7 @@ module Idv end def set_idv_form - @idv_form ||= Idv::PhoneForm.new( + @idv_form = Idv::PhoneForm.new( user: current_user, previous_params: idv_session.previous_phone_step_params, allowed_countries: ['US'],
[PhoneController->[set_idv_form->[new],step->[new],async_state_done->[async_state_done]]]
set idv form if not set.
Removed memoization because this is a before filter, so it's always called
@@ -252,6 +252,9 @@ public class ProPurchaseOption { } private double calculateLandDistanceFactor(final int enemyDistance) { + if (movement <= 0) { + return 0.1; + } final double distance = Math.max(0, enemyDistance - 1.5); final int moveValue = isLandTransport ? (movement + 1) : movement; // 1, 2, 2.5, 2.75, etc
[ProPurchaseOption->[calculateEfficiency->[calculateEfficiency]]]
Calculate the land distance factor.
What's the rationale for 0.1?
@@ -97,7 +97,8 @@ public class ProDummyDelegateBridge implements IDelegateBridge { } @Override - public void enterDelegateExecution() {} + public void enterDelegateExecution() { + } @Override public void addChange(final Change change) {
[ProDummyDelegateBridge->[getRandom->[getRandom]]]
Add a change to the list of changes that have occurred in the current execution.
Seems like your formatter isn't configured correctly.
@@ -76,13 +76,6 @@ class Artifact(Model): Fields: file (models.FileField): The stored file. - downloaded (models.BooleanField): The associated file has been successfully downloaded. - requested (models.BooleanField): The associated file has been requested by a client at - least once. - relative_path (models.TextField): The artifact's path relative to the associated - :class:`Content`. This path is incorporated in the absolute storage path of the file - and its published path relative to the root publishing directory. At a minimum the path - will contain the file name but may also include sub-directories. size (models.IntegerField): The size of the file in bytes. md5 (models.CharField): The MD5 checksum of the file. sha1 (models.CharField): The SHA-1 checksum of the file.
[Content->[natural_key_digest->[getattr,update,sorted,hexdigest,sha256,isinstance,encode],__str__->[,cast,natural_key,repr,join,zip],natural_key->[tuple,getattr],GenericKeyValueRelation],Artifact->[CharField,ArtifactLocation,ForeignKey,FileField,BooleanField,TextField,IntegerField]]
Returns a string representation of the object. Required fields for the n - lease record.
If the file itself is required, why not require this as well?
@@ -612,7 +612,6 @@ class WavReader(BaseReader): def read_item(self, data_id): return DataRepresentation(*self.read_dispatcher(data_id), identifier=data_id) - class DicomReader(BaseReader): __provider__ = 'dicom_reader'
[ReaderCombiner->[parameters->[DataReaderField],read->[read],configure->[get_value_from_config]],OpenCVFrameReader->[_read_sequence->[read],configure->[get_value_from_config]],RawpyReader->[configure->[get_value_from_config]],NumpyDictReader->[read_item->[DataRepresentation]],NiftiImageReader->[configure->[get_value_from_config]],NumpyBinReader->[configure->[get_value_from_config]],WavReader->[read_item->[DataRepresentation]],NumPyReader->[configure->[get_value_from_config]],AnnotationFeaturesReader->[configure->[get_value_from_config],_read_list->[read]],JSONReader->[configure->[get_value_from_config]],BaseReader->[read_item->[DataRepresentation],get_value_from_config->[parameters],validate_config->[validate,validate_config],configure->[get_value_from_config],_read_list->[read]],OpenCVImageReader->[configure->[get_value_from_config]],PickleReader->[read_item->[DataRepresentation]],DataReaderField->[validate->[validate,DictReaderValidator]]]
Read a single item from the data source.
please return empty line (recommended for making code more readable, usually for classes used 2 line spacing)
@@ -118,7 +118,7 @@ class SemanticRoleLabeler(Model): # Negative log likelihood criterion takes integer labels, not one hot. if tags.dim() == 3: _, tags = tags.max(-1) - loss = self.sequence_loss(reshaped_log_probs, tags.view(-1)) + loss = weighted_cross_entropy_with_logits(logits, tags, mask) output_dict["loss"] = loss return output_dict
[SemanticRoleLabeler->[tag->[forward],from_params->[from_params]]]
Forward computation of a . encoder on all registered encoders. This method is called to find the tag and label of a given node in the model.
Remove the `self.sequence_loss` field and associated TODO above.
@@ -80,6 +80,10 @@ def find_usecase(x, y): return x.find(y) +def count_usecase(x, y): + return x.find(y) + + def startswith_usecase(x, y): return x.startswith(y)
[TestUnicodeInTuple->[test_ascii_flag_getitem->[f],test_ascii_flag_join->[f],test_const_unicode_in_tuple->[f],test_ascii_flag_add_mul->[f],test_const_unicode_in_hetero_tuple->[f],test_ascii_flag_unbox->[f]],TestUnicode->[test_basic_lt->[pyfunc],test_pointless_slice->[pyfunc],test_ge->[_check_ordering_op],test_comparison->[pyfunc],test_literal_find->[pyfunc],test_walk_backwards->[pyfunc],test_literal_xyzwith->[pyfunc],test_lt->[_check_ordering_op],test_basic_gt->[pyfunc],test_le->[_check_ordering_op],test_stride_slice->[pyfunc],test_literal_concat->[pyfunc],test_literal_in->[pyfunc],test_gt->[_check_ordering_op],test_literal_comparison->[pyfunc],test_literal_getitem->[pyfunc],test_literal_len->[pyfunc]],TestUnicodeIteration->[test_unicode_stopiteration_iter->[f],test_unicode_literal_stopiteration_iter->[f],test_unicode_iter->[pyfunc],test_unicode_enumerate_iter->[pyfunc],test_unicode_literal_iter->[pyfunc]]]
Check if x starts with y.
Copy-paste error: `find` -> `count`.
@@ -52,7 +52,7 @@ public class MuleRegistryHelper implements MuleRegistry { /** * A reference to Mule's internal registry */ - private Registry registry; + private final Registry registry; /** * We cache transformer searches so that we only search once
[MuleRegistryHelper->[unregisterObject->[unregisterObject],lookupLocalObjects->[lookupLocalObjects],registerObject->[postObjectRegistrationActions,registerObject],lookupObjectsForLifecycle->[lookupObjectsForLifecycle],lookupObjects->[lookupObjects],dispose->[dispose],applyLifecycle->[applyLifecycle],fireLifecycle->[initialise,dispose,fireLifecycle],isSingleton->[isSingleton],lookupObject->[lookupObject],get->[get],lookupByType->[lookupByType],postObjectRegistrationActions->[notifyTransformerResolvers],registerObjects->[postObjectRegistrationActions,registerObjects],unregisterTransformer->[notifyTransformerResolvers,lookupTransformer]]]
Creates a class which implements the MuleRegistry interface. Initialize a .
Also this one?
@@ -588,7 +588,7 @@ public class BigtableIO { public PDone expand(PCollection<KV<ByteString, Iterable<Mutation>>> input) { getBigtableConfig().validate(); - input.apply(ParDo.of(new BigtableWriterFn(getBigtableConfig().getTableId(), + input.apply(ParDo.of(new BigtableWriterFn(getBigtableConfig().getTableId().get(), new SerializableFunction<PipelineOptions, BigtableService>() { @Override public BigtableService apply(PipelineOptions options) {
[BigtableIO->[BigtableReader->[getFractionConsumed->[getFractionConsumed],getSplitPointsConsumed->[getSplitPointsConsumed],start->[start,createReader,makeByteKey],close->[close],advance->[makeByteKey,advance],splitAtFraction->[getRange,withStartKey,withEndKey],getRange],Write->[withProjectId->[build,getBigtableConfig],withBigtableOptions->[withBigtableOptions,build,toBuilder,getBigtableConfig],withBigtableService->[build,getBigtableConfig],expand->[validate,apply,getTableId],BigtableWriterFn->[checkForFailures->[toString]],withoutValidation->[build,getBigtableConfig],create->[build],validate->[getBigtableConfig],getBigtableOptions->[getBigtableOptions],populateDisplayData->[populateDisplayData],toString->[toString],withInstanceId->[build,getBigtableConfig],withBigtableOptionsConfigurator->[build,getBigtableConfig],withTableId->[build,getBigtableConfig],enableBulkApiConfigurator->[apply->[apply,build]]],Read->[withProjectId->[build,getBigtableConfig],withBigtableOptions->[withBigtableOptions,build,toBuilder,getBigtableConfig],withBigtableService->[build,getBigtableConfig],expand->[getRowFilter,apply,getKeyRange,getTableId],withRowFilter->[build],withoutValidation->[build,getBigtableConfig],withKeyRange->[build],getTableId->[getTableId],validate->[getBigtableConfig],getBigtableOptions->[getBigtableOptions],populateDisplayData->[getRowFilter,populateDisplayData],toString->[toString],withInstanceId->[build,getBigtableConfig],withBigtableOptionsConfigurator->[build,getBigtableConfig],withTableId->[build,getBigtableConfig]],BigtableSource->[withStartKey->[withStartKey,BigtableSource],getEstimatedSizeBytes->[getSampleRowKeys],withEndKey->[BigtableSource,withEndKey],splitKeyRangeIntoBundleSizedSubranges->[build,split,withEstimatedSizeBytes,withEndKey],split->[getSampleRowKeys],splitBasedOnSamples->[makeByteKey,build,withEndKey],withEstimatedSizeBytes->[BigtableSource],getEstimatedSizeBytesBasedOnSamples->[makeByteKey],createReader->[apply],populateDisplayData->[populateDisplayData],toString->[toString],getSampleRowKeys->[getSampleRowKeys]],validateTableExists->[getTableId]]]
Expand the BigtableWriter with a sequence of mutations.
This will be resolved at pipeline construction time effectively making it so that static value providers can only be used.