patch stringlengths 18 160k | callgraph stringlengths 4 179k | summary stringlengths 4 947 | msg stringlengths 6 3.42k |
|---|---|---|---|
@@ -54,7 +54,7 @@ class InviteDialog extends Component {
* @inheritdoc
*/
componentWillUnmount() {
- sendAnalyticsEvent(TOOLBAR_INVITE_CLOSE);
+ sendAnalytics(createInviteDialogClosedEvent);
}
/**
| [No CFG could be retrieved] | InviteDialog component for the invite dialog. Maps the state of the given object to the associated { code InviteDialog s *. | Should be a function invocation? |
@@ -81,6 +81,7 @@ const App = () => (
</>
)
+window.isOriginAdmin = true
export default App
require('react-styl')(`
| [No CFG could be retrieved] | A simple export that exports a single - level hierarchy of components that can be used to create Displays a header of a n - tuple. | I think perhaps we should make the linker opt-int instead of opt-out. Perhaps with window.enableOriginLinker or something? Just thinking of users using a future origin-graphql npm module |
@@ -53,6 +53,12 @@ module Api
render json: { errors: errors }, status: 400
end
+
+ def send_log_to_timber
+ timber.log(:info, :REST_API_REQUEST,
+ oauth_access_token_id: doorkeeper_token.id
+ )
+ end
end
end
end
| [ApplicationController->[prepare_params!->[new,sub,classify,valid?,name],response_params_error->[render,map],current_user->[find,blank?,resource_owner_id],not_found->[render],doorkeeper_authorize!,include,rescue_from,skip_before_action,before_action,attr_reader]] | response_params error_nack. | Layout/MultilineMethodCallBraceLayout: Closing method call brace must be on the same line as the last argument when opening brace is on the same line as the first argument. |
@@ -8,7 +8,7 @@ cases.push({
func: async function (a) { await a; },
expectText: `exports.handler = __f0;
-function __f0() {
+function __f0(__0) {
return (function() {
with({ }) {
| [No CFG could be retrieved] | JS - Private API JS - Closure function captures V8 intrinsic. | note: we never actually consume these. i didn't want to change semantics in some unforseen manner. so in the body, we still reference `arguments`. These params just serve to ensure that `func.length` returns the *declared* param count so that we work with other libraries that check for that. |
@@ -204,11 +204,11 @@ class GradersController < ApplicationController
end
when 'criteria_table'
@assignment = Assignment.find(params[:assignment_id],
- :include => [{:groupings => [:students,
- {:tas => :criterion_ta_associations}, :group]}])
+ include: [{groupings: [:students,
+ {tas: :criterion_ta_associations}, :group]}])
if params[:criteria].nil? or params[:criteria].size == 0
#don't do anything if no criteria
- render :nothing => true
+ render nothing: true
return
end
criteria = criteria_with_assoc(@assignment,
| [GradersController->[render_grouping_modifications->[criteria_with_assoc,groupings_with_assoc]]] | This method checks if there is a specific global action in the current context. If it is if params has a list of graders add them to the criteria otherwise remove them from the. | Align the elements of an array literal if they span more than one line.<br>Space inside } missing.<br>Space inside { missing. |
@@ -39,6 +39,7 @@ class Internal::ArticlesController < Internal::ApplicationController
article.update!(article_params)
Article.where.not(id: article.id).where(live_now: true).update_all(live_now: false) if article.live_now
CacheBuster.bust("/live_articles")
+ Audit::Logger.log(:moderator, current_user, params.dup)
render body: nil
end
| [show->[find],articles_boosted_additional->[per],articles_satellite->[per],articles_chronological->[per],article_params->[permit],update->[to_s,find,to_i,render,email_digest_eligible,approved,boosted_dev_digest_email,featured,bust,user_id,update_all,boosted_additional_articles,update!,live_now],articles_mixed->[per],authorize_admin->[authorize],articles_not_buffered->[per],articles_featured->[order],articles_top->[per],index->[to_f,ago,articles_not_buffered,articles_top,includes],layout] | Updates an existing article in the database. | What if we make all of these a `after_action` at the top of the controllers? This way they don't need to be sprinkled around the code and it's super clear to the reader which actions are audited |
@@ -40,11 +40,11 @@ async function postToDiscordWebhook(url, data) {
}
const icon = eventIcons[data.log.eventName] || ':dromedary_camel: '
- const listing = data.related.listing
+ const listing = data.listing
let discordData = {}
- if (data.related.offer !== undefined) {
+ if (data.offer !== undefined) {
// Offer
discordData = {
embeds: [
| [No CFG could be retrieved] | Posts a to the discord channel via webhook. Triggers on Identity event to add the user s email to the list. | The GCF for IPFS pinner relies on the related field so I think we'll need to update it. |
@@ -112,6 +112,18 @@ class Reporting(Subsystem):
raise ValueError(
"Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set."
)
+ if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or \
+ any(ch not in set('0123456789abcdefABCDEF') for ch in trace_id)):
+ raise ValueError(
+ "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. "
+ + "Got {}.".format(trace_id)
+ )
+ if parent_id and (len(parent_id) != 16 or \
+ any(ch not in set('0123456789abcdefABCDEF') for ch in parent_id)):
+ raise ValueError(
+ "Value of the flag zipkin-parent-id must be a 16-character hex string. "
+ + "Got {}.".format(parent_id)
+ )
if zipkin_endpoint is not None:
zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO)
| [Reporting->[update_reporting->[_consume_stringio,_get_invalidation_report],initialize->[initialize]]] | Initialize with the given RunTracker. This function is called when a node is missing a node. It will create a Zipkin. | You can actually use `string.hexdigits` here, rather than writing it down explicitly :) |
@@ -92,7 +92,7 @@ class WP_Test_WPCOM_REST_API_V2_Endpoint_Admin_Menu extends WP_Test_Jetpack_REST
$request = wp_rest_request( Requests::GET, '/wpcom/v2/admin-menu' );
$response = $this->server->dispatch( $request );
- $menu = wp_list_filter( $response->get_data(), array( 'title' => 'Settings' ) );
+ $menu = wp_list_filter( $response->get_data(), array( 'title' => 'Jetpack' ) );
$menu_item = array_pop( $menu );
$this->assertNotEmpty( $menu_item );
| [WP_Test_WPCOM_REST_API_V2_Endpoint_Admin_Menu->[test_get_item->[assertTrue,get_data,get_public_item_schema,dispatch],test_get_item_permissions_check->[dispatch,assertErrorResponse],test_prepare_menu_item_icon->[invokeArgs,getMethod,setAccessible,assertEquals],wpSetUpBeforeClass->[create],test_prepare_menu_item->[invokeArgs,getMethod,setAccessible,assertEquals],test_prepare_menu_item_url->[invokeArgs,getMethod,setAccessible,assertEquals],test_prepare_submenu_item->[invokeArgs,getMethod,setAccessible,assertSame],test_parent_menu_item_always_exists->[get_data,assertNotEmpty,dispatch,assertSame],test_parse_markup_data->[invokeArgs,getMethod,setAccessible,assertSame],test_schema_request->[get_public_item_schema,get_data,assertEquals,dispatch]]] | Test parent menu item always exists. | The Settings menu no longer returns submenu items for Jetpack sites, so I had to update this test to pick another menu with submenus. |
@@ -116,7 +116,8 @@ public class GobblinHelixJobScheduler extends JobScheduler implements StandardMe
this.jobCatalog = jobCatalog;
this.metricContext = Instrumented.getMetricContext(new org.apache.gobblin.configuration.State(properties), this.getClass());
- int metricsWindowSizeInMin = ConfigUtils.getInt(ConfigUtils.propertiesToConfig(this.properties),
+ Config jobConfig = ConfigUtils.propertiesToConfig(this.properties);
+ int metricsWindowSizeInMin = ConfigUtils.getInt(jobConfig,
ConfigurationKeys.METRIC_TIMER_WINDOW_SIZE_IN_MINUTES,
ConfigurationKeys.DEFAULT_METRIC_TIMER_WINDOW_SIZE_IN_MINUTES);
| [GobblinHelixJobScheduler->[startUp->[startUp],scheduleJob->[scheduleJob],handleNewJobConfigArrival->[scheduleJob],NonScheduledJobRunner->[run->[runJob]],scheduleJobImmediately->[cancel->[cancel],get->[get],isDone->[isDone],isCancelled->[isCancelled]],handleUpdateJobConfigArrival->[handleNewJobConfigArrival]]] | Creates a new instance of GobblinHelixJobScheduler. This method is called by the GobblinHelixMetrics class to initialize the list. | Conversion of all the properties to Config looks unnecessary to me. |
@@ -1,6 +1,7 @@
module DocAuth
class Response
- attr_reader :errors, :exception, :extra, :pii_from_doc
+ attr_reader :errors, :exception, :pii_from_doc
+ attr_accessor :extra # so we can chain extra analytics
def initialize(success:, errors: [], exception: nil, extra: {}, pii_from_doc: {})
@success = success
| [Response->[merge->[new,exception,merge,extra,success?,pii_from_doc,errors],to_h->[merge],attr_reader]] | Initialize a new object with all the attributes of the next object. | I'm not sure I like this `attr_accessor` ... I like the idea of immutable response objects --- if we want to combine stuff in, how about we use `#merge` to combine in new ones? Also techincally since `extra` is a hash, which is mutable by default, we could modify the `extra` with just an `attr_reader` still (not that I would recommend it) |
@@ -0,0 +1,11 @@
+package models
+
+import "time"
+
+// SyncEvent represents an event sourcing style event, which is used to sync
+// data upstream with another service
+type SyncEvent struct {
+ ID int `json:"id" gorm:"primary_key"`
+ CreatedAt time.Time `json:"createdAt" gorm:"index"`
+ Body string
+}
| [No CFG could be retrieved] | No Summary Found. | optional: `gorm.Model` makes this as a `uint`, we could follow their lead. |
@@ -98,8 +98,8 @@ import org.slf4j.LoggerFactory;
* <h3>Writing to Kudu</h3>
*
* <p>The Kudu sink executes a set of operations on a single table. It takes as input a {@link
- * PCollection PCollection<T>} and a {@link FormatFunction<T>} which is responsible for
- * converting the input into an idempotent transformation on a row.
+ * PCollection PCollection} and a {@link FormatFunction} which is responsible for converting the
+ * input into an idempotent transformation on a row.
*
* <p>To configure a Kudu sink, you must supply the Kudu master addresses, the table name and a
* {@link FormatFunction} to convert the input records, for example:
| [KuduIO->[Read->[withProjectedColumns->[build],withBatchSize->[build],expand->[inferCoder],withMasterAddresses->[build],inferCoder->[getParseFn,getCoder],withPredicates->[build],withKuduService->[build],withTable->[build],withParseFn->[build],withCoder->[build],validate->[getTable,getMasterAddresses,getParseFn],populateDisplayData->[getTable,populateDisplayData],withFaultTolerent->[build]],Write->[withMasterAddresses->[build],withKuduService->[build],withTable->[build],validate->[masterAddresses,table,formatFn],withFormatFn->[build],populateDisplayData->[populateDisplayData,table],WriteFn->[populateDisplayData->[populateDisplayData,table],processElement->[write]]],KuduSource->[createReader->[createReader],split->[getCoder]]]] | This function is used to provide the type of coder to be used in the type er This class is used to create a Builde - IO object from a given K. | Now there's a spotless violation for this paragraph. |
@@ -31,7 +31,10 @@ class Recola(CMakePackage):
def cmake_args(self):
args = [
- '-DCMAKE_VERBOSE_MAKEFILE=ON',
+ self.define(static, True),
+ self.define(CMAKE_VERBOSE_MAKEFILE, True),
+ self.define(collier_path, self.spec['collier'].prefix.lib.cmake),
+ self.define(modelfile_path, self.spec['recola-sm'].prefix.lib.cmake),
self.define_from_variant("with_python3", 'python'),
]
return args
| [Recola->[cmake_args->[define_from_variant],variant,depends_on,version]] | Return the list of arguments for cmake. | These should all be quoted: they're strings becoming cmake variables, not python variables. How is this not causing the ci pipeline to fail?? |
@@ -8,11 +8,12 @@ require_once dirname( __FILE__ ) . '/class.jetpack-sync-modules.php';
* This class monitors actions and logs them to the queue to be sent
*/
class Jetpack_Sync_Listener {
- const QUEUE_SIZE_CHECK_TRANSIENT = "jetpack_sync_last_checked_queue_size";
- const QUEUE_SIZE_CHECK_TIMEOUT = 300; // 5 minutes
+ const QUEUE_STATE_CHECK_TRANSIENT = "jetpack_sync_last_checked_queue_state";
+ const QUEUE_STATE_CHECK_TIMEOUT = 300; // 5 minutes
private $sync_queue;
- private $sync_queue_limit;
+ private $sync_queue_size_limit;
+ private $sync_queue_lag_limit;
// singleton functions
private static $instance;
| [Jetpack_Sync_Listener->[set_defaults->[set_queue_limit],action_handler->[is_over_queue_limit]]] | Get the instance of the class. | this could use simple quotes :) |
@@ -53,10 +53,14 @@ type Manager struct {
metricsRegistry metrics.Registry
bufferPool httputil.BufferPool
defaultRoundTripper http.RoundTripper
- balancers map[string][]healthcheck.BalancerHandler
- configs map[string]*runtime.ServiceInfo
- api http.Handler
- rest http.Handler
+ // balancers is the map of all Balancers, keyed by service name.
+ // There is one Balancer per service handler, and there is one service handler per reference to a service
+ // (e.g. if 2 routers refer to the same service name, 2 service handlers are created),
+ // which is why there is not just one Balancer per service name.
+ balancers map[string][]healthcheck.Balancer
+ configs map[string]*runtime.ServiceInfo
+ api http.Handler
+ rest http.Handler
}
// BuildHTTP Creates a http.Handler for a service configuration.
| [getLoadBalancerWRRServiceHandler->[BuildHTTP],getLoadBalancerMirrorServiceHandler->[BuildHTTP]] | BuildHTTP creates a new HTTP handler for the given service getLoadBalancerMirrorServiceHandler - get the mirror service handler. | You should use the `healthcheck.Balancers` instead of `[]healthcheck.Balancer`. |
@@ -259,6 +259,11 @@ module.exports = JhipsterServerGenerator.extend({
this.serviceDiscoveryType = false;
}
+ // If the service discovery is not defined, it is disabled by default
+ if (this.apiFirst === undefined) {
+ this.apiFirst = false;
+ }
+
// If translation is not defined, it is enabled by default
if (this.enableTranslation === undefined) {
this.enableTranslation = true;
| [No CFG could be retrieved] | This function checks if the configuration is valid and if not generates a random key. The config options for the current user. | wouldn't this be set already above seems redundant and by making it boolean such defaulting will be unnecessary anyway |
@@ -697,6 +697,17 @@ void stepstag_state::vjdash_map(address_map &map)
}
+void stepstag_state::ymz280b_map(address_map &map)
+{
+ // map.global_mask(0x3fffff);
+ map(0x000000, 0xffffff).ram().share("ymz_ram");
+}
+
+void stepstag_state::ymz280b_map1(address_map &map)
+{
+ map(0x000000, 0xffffff).ram().share("ymz_ram1");
+}
+
/***************************************************************************
| [No CFG could be retrieved] | region Private functions 0x0004 - 0x0002 - 0x0002 - 0x0002 - 0x. | When you have a member for finding the share, you should use it rather than a literal tag in the map. |
@@ -54,7 +54,11 @@ export function parseUrl(url) {
};
// For data URI a.origin is equal to the string 'null' which is not useful.
// We instead return the actual origin which is the full URL.
- info.origin = (a.origin && a.origin != 'null') ? a.origin : getOrigin(info);
+ info.origin = (a.origin && a.origin != 'null')
+ ? a.origin
+ : info.protocol == 'data:' || !info.host
+ ? info.href
+ : info.protocol + '//' + info.host;
user.assert(info.origin, 'Origin must exist');
// Freeze during testing to avoid accidental mutation.
cache[url] = (window.AMP_TEST && Object.freeze) ? Object.freeze(info) : info;
| [No CFG could be retrieved] | Parses a URL into Location - like objects. Appends a query string field and value to a url. | I am a big fan of ternary operators. But after a certain point it becomes hard to read, why not use if an else statements? |
@@ -0,0 +1,15 @@
+package com.baeldung.xmlapplicationcontext;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.annotation.ImportResource;
+
+@SpringBootApplication
+@ImportResource({"classpath*:application-context.xml"})
+public class XmlBeanApplication {
+
+ public static void main(String[] args) {
+ SpringApplication.run(XmlBeanApplication.class, args);
+ }
+
+}
| [No CFG could be retrieved] | No Summary Found. | Indent with spaces instead of tabs |
@@ -58,9 +58,10 @@ class Resolver(object):
self.use_user_site = use_user_site
self.use_pep517 = use_pep517
- self._discovered_dependencies = defaultdict(list)
+ self._discovered_dependencies = defaultdict(list) # type: DefaultDict[str, List] # noqa: E501
def resolve(self, requirement_set):
+ # type: (RequirementSet) -> None
"""Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
| [Resolver->[_resolve_one->[add_req,_get_abstract_dist_for],get_installation_order->[schedule->[schedule],schedule],_check_skip_installed->[_set_req_to_reinstall,_is_upgrade_allowed],_get_abstract_dist_for->[_check_skip_installed,_set_req_to_reinstall,_is_upgrade_allowed]]] | Initialize the resolver. This function is called when a link type is not found. | Break this line but don't put a `# noqa: E501`. |
@@ -22,7 +22,7 @@ class FilemapIntegrationTest(PantsRunIntegrationTest):
scan_set = set()
def should_ignore(file):
- return file.endswith('.pyc')
+ return file.endswith('.pyc') or file.endswith('__init__.py')
for root, dirs, files in project_tree.walk(''):
scan_set.update({os.path.join(root, f) for f in files if not should_ignore(f)})
| [FilemapIntegrationTest->[test_exclude_globs->[_extract_exclude_output],test_exclude_strings->[_extract_exclude_output],test_exclude_invalid_string->[_mk_target],setUp->[should_ignore],test_implicit_sources->[_extract_exclude_output],test_exclude_zglobs->[_extract_exclude_output],test_exclude_set->[_extract_exclude_output],test_exclude_list_of_strings->[_extract_exclude_output],test_exclude_composite->[_extract_exclude_output],_extract_exclude_output->[_mk_target],test_exclude_rglobs->[_extract_exclude_output]]] | Checks if all files in the project tree are in the test_exclude_files list. | V2 injects `__init__.py` that isn't normally there. |
@@ -945,11 +945,15 @@ ec_rec_update(struct btr_instance *tins, struct btr_record *rec,
d_iov_t *key, d_iov_t *val)
{
struct ec_rec *r = umem_off2ptr(&tins->ti_umm, rec->rec_off);
+ int rc;
if (val->iov_len != sizeof(r->er_counter))
return -DER_INVAL;
- umem_tx_add_ptr(&tins->ti_umm, r, sizeof(*r));
+ rc = umem_tx_add_ptr(&tins->ti_umm, r, sizeof(*r));
+ if (rc)
+ return rc;
+
r->er_counter = *(uint64_t *)val->iov_buf;
return 0;
}
| [No CFG could be retrieved] | free a record from the table. returns a string of the specified type of record in the tree. | (style) trailing whitespace |
@@ -1,3 +1,5 @@
+<% title t('account.navigation.connected_accounts') %>
+
<h1 class="mt0">
<%= t('headings.account.connected_accounts') %>
</h1>
| [No CFG could be retrieved] | list of connected apps that can be found in the system. | Thinking out loud a bit: It's sensible that the page's title and the page's `h1` are the same, which raises a question whether it's worth having duplicate strings for these. And if we do have duplicate strings (one for the navigation and one for the page title), whether the value we use in `title` should be the same as the page's `h1`, or the same as what's shown in the navigation. |
@@ -31,7 +31,10 @@ import java.util.Collection;
*
* <p>See <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/channels/package-summary.html"
* >Java NIO Channels</a>
+ *
+ * @deprecated This is under redesign, see: https://issues.apache.org/jira/browse/BEAM-59.
*/
+@Deprecated
public interface IOChannelFactory {
/**
| [No CFG could be retrieved] | Provides a read channel for a given specification. Opens a read - only byte channel for a given . | Seems `@Experimental` rather than `@Deprecated` |
@@ -145,6 +145,11 @@ class TypeCheckSuite(Suite):
if incremental and res:
self.verify_cache(module_name, program_name, a, res.manager)
+ if incremental == 2:
+ assert_string_arrays_equal(
+ list(sorted(testcase.expected_stale_modules)),
+ list(sorted(res.stale)),
+ 'Set of stale modules does not match expected set')
def verify_cache(self, module_name: str, program_name: str, a: List[str],
manager: build.BuildManager) -> None:
| [TypeCheckSuite->[verify_cache->[find_error_paths,update,find_module_files,find_missing_cache_files,AssertionFailure],run_test_once->[,verify_cache,walk,normalize_error_messages,write,build,assert_string_arrays_equal,parse_module,endswith,copy,join,testcase_pyversion,BuildSource,open,update_testcase_output,parse_options,find_module_clear_caches],find_error_paths->[add,group,match,set],parse_module->[group,search,read,find_module,open],clear_cache->[exists,rmtree],find_module_files->[walk,splitext,split,endswith,join],find_missing_cache_files->[set,values,find_cache_meta,items],run_test->[run_test_once,lower,clear_cache],cases->[parse_test_cases,join],parse_options->[group,setattr,search,Options]]] | Run a single test only if the testcase is run once. Checks if the cache metadata for each module is valid. | Maybe only do this if expected_stale_modules is set? |
@@ -183,7 +183,7 @@ const WalletsList = () => {
console.log('onSnapToItem', wallets.length === index ? 'NewWallet/Importing card' : index);
if (wallets[index] && (wallets[index].timeToRefreshBalance() || wallets[index].timeToRefreshTransaction())) {
console.log(wallets[index].getLabel(), 'thinks its time to refresh either balance or transactions. refetching both');
- refreshAllWalletTransactions(index, false).finally(() => setIsLoading(false));
+ refreshAllWalletTransactions(false).finally(() => setIsLoading(false));
}
currentWalletIndex.current = index;
} else {
| [No CFG could be retrieved] | Displays the menu of transactions that can be refreshed. Renders the nagios list. | if you want to update all walllets on pull-to-refresh you need to mess with `onRefresh`, not with `onSnapToItem` |
@@ -18,9 +18,10 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-from tensorflow.python.layers import utils
from tensorflow.python.platform import test
+from tensorflow.python.layers import utils
+
class ConvUtilsTest(test.TestCase):
| [ConvUtilsTest->[testConvInputLength->[assertEqual,conv_input_length],testConvertDataFormat->[assertRaises,convert_data_format,assertEqual],testNormalizeDataFormat->[assertRaises,assertEqual,normalize_data_format],testConvOutputLength->[assertEqual,conv_output_length],testDeconvOutputLength->[assertEqual,deconv_output_length],testNormalizePadding->[assertRaises,assertEqual,normalize_padding],testNormalizeTuple->[assertRaises,assertEqual,normalize_tuple]],main] | Tests convert data format. | This one the original order was okay? |
@@ -15,6 +15,12 @@
#include "Tempus_StepperFactory.hpp"
#include "Tempus_UnitTest_Utils.hpp"
+#include "Tempus_StepperHHTAlphaModifierBase.hpp"
+#include "Tempus_StepperHHTAlphaModifierXBase.hpp"
+#include "Tempus_StepperHHTAlphaObserverBase.hpp"
+#include "Tempus_StepperHHTAlphaModifierDefault.hpp"
+#include "Tempus_StepperHHTAlphaModifierXDefault.hpp"
+#include "Tempus_StepperHHTAlphaObserverDefault.hpp"
#include "../TestModels/SinCosModel.hpp"
#include "../TestModels/VanDerPolModel.hpp"
| [rcp->[getUseFSALDefault,getZeroInitialGuess,setSolver,isInitialized,setObserver,initialize,testFactoryConstruction,setStartUpStepper,setUseFSAL,setICConsistencyCheck,setICConsistency,rcp,setModel,setZeroInitialGuess,getICConsistencyCheckDefault,getICConsistencyDefault,TEUCHOS_ASSERT,getOrder,setParameterList,TEUCHOS_TEST_FOR_EXCEPT]] | -------------- This file contains the basic basic test of the n - tuple. This function is called when a new BDF2Observer is created. | Do you need this in BDF2? |
@@ -433,7 +433,7 @@ static int kpb_prepare(struct comp_dev *dev)
if (kpb->state == KPB_STATE_RESETTING ||
kpb->state == KPB_STATE_RESET_FINISHING) {
- comp_cl_err(&comp_kpb, "kpb_prepare(): can not prepare KPB due to ongoing reset, state log %x",
+ comp_cl_err(&comp_kpb, "kpb_prepare(): can not prepare KPB due to ongoing reset, state log %llx",
kpb->state_log);
return -EBUSY;
}
| [No CFG could be retrieved] | This function prepares a key phrase buffer. Initialize the private data structures. | Wait, kpb->state_log is a 64-bit value? If so this was really broken before. |
@@ -236,6 +236,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
+
class Client(ClientBase):
"""ACME client for a v1 API.
| [ClientV2->[new_order->[_authzr_from_response,_post],revoke->[_revoke],finalize_order->[_post],poll_authorizations->[_authzr_from_response],_get_v2_account->[_post],new_account->[_regr_from_response,_post]],Client->[revoke->[_revoke],agree_to_tos->[update_registration],request_challenges->[_authzr_from_response,_post],check_cert->[_get_cert],fetch_chain->[_get_cert],refresh->[check_cert],poll_and_request_issuance->[retry_after,poll,request_issuance],request_domain_challenges->[request_challenges],request_issuance->[_post],register->[_regr_from_response,_post]],ClientNetwork->[_get_nonce->[_check_response,_add_nonce,head],_post_once->[_check_response,_wrap_in_jws,_send_request,_add_nonce,_get_nonce],get->[_check_response,_send_request],head->[_send_request]],BackwardsCompatibleClientV2->[new_order->[new_order,request_domain_challenges],revoke->[revoke],finalize_order->[fetch_chain,finalize_order,request_issuance],new_account_and_tos->[agree_to_tos,_assess_tos,new_account,register],__init__->[ClientV2,Client]],ClientBase->[_revoke->[_post],poll->[_authzr_from_response],update_registration->[_send_recv_regr],deactivate_registration->[update_registration],_send_recv_regr->[_regr_from_response],query_registration->[_send_recv_regr],answer_challenge->[_post]]] | Revoke a certificate. | nit: added whitespace |
@@ -53,10 +53,12 @@ public class ClockMonitor extends NodeMonitor {
*/
@Deprecated
@Restricted(NoExternalUse.class)
+ @SuppressFBWarnings(value = "MS_PKGPROTECT", justification = "for backward compatibility")
public static /*almost final*/ AbstractNodeMonitorDescriptor<ClockDifference> DESCRIPTOR;
@Extension @Symbol("clock")
public static class DescriptorImpl extends AbstractAsyncNodeMonitorDescriptor<ClockDifference> {
+ @SuppressFBWarnings(value = "ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD", justification = "for backward compatibility")
public DescriptorImpl() {
DESCRIPTOR = this;
}
| [ClockMonitor->[DescriptorImpl->[newInstance->[ClockMonitor]]]] | Creates a callable that can be used to perform a clock difference on a node. | At some point we should do a pass through things like this and delete any not actually being used, but certainly out of scope here. |
@@ -90,7 +90,7 @@ int cadence_codec_init(struct comp_dev *dev)
comp_dbg(dev, "cadence_codec_init() start");
- cd = codec_allocate_memory(dev, sizeof(struct cadence_codec_data), 0);
+ cd = comp_devm_alloc(dev, sizeof(struct cadence_codec_data), 0);
if (!cd) {
comp_err(dev, "cadence_codec_init(): failed to allocate memory for cadence codec data");
return -ENOMEM;
| [No CFG could be retrieved] | cadence_codec_init - initialize cadence codec Get the object of a codec from the config file. | I wonder if this shouldn't be better called devm_comp_alloc(dev, ..) similar with its Linux counterpart. |
@@ -71,8 +71,10 @@ class Libxsmm(Package):
makefile.filter('FC = gfortran', 'FC ?= gfortran', **kwargs)
def manual_install(self, prefix):
+ spec = self.spec
install_tree('include', prefix.include)
- install_tree('lib', prefix.lib)
+ if not ('+header-only' in spec and '@1.6.2:' in spec):
+ install_tree('lib', prefix.lib)
doc_path = prefix.share + '/libxsmm/doc'
mkdirp(doc_path)
for doc_file in glob('documentation/*.md'):
| [Libxsmm->[install->[make,manual_install],manual_install->[install,glob,mkdirp,install_tree],patch->[FileFilter,filter],variant,version]] | Install manual documentation. | That is still wrong, u want to install lib if ~header-only. That's it. |
@@ -128,16 +128,8 @@ class GroupByKeyTranslator<K, InputT, OutputT>
return (SystemReduceFn<K, InputT, ?, OutputT, BoundedWindow>)
SystemReduceFn.buffering(kvInputCoder.getValueCoder());
} else if (transform instanceof Combine.PerKey) {
- final CombineFnBase.GlobalCombineFn<? super InputT, ?, OutputT> combineFn;
- try {
- combineFn =
- (CombineFnBase.GlobalCombineFn<? super InputT, ?, OutputT>)
- CombineTranslation.getCombineFn(appliedPTransform)
- .orElseThrow(() -> new IOException("CombineFn not found in node."));
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
-
+ final CombineFnBase.GlobalCombineFn<? super InputT, ?, OutputT> combineFn =
+ ((Combine.PerKey) transform).getFn();
return SystemReduceFn.combining(
kvInputCoder.getKeyCoder(),
AppliedCombineFn.withInputCoder(combineFn, pipeline.getCoderRegistry(), kvInputCoder));
| [GroupByKeyTranslator->[translate->[getValue,getMaxSourceParallelism,windowCoder,getKeyCoder,getType,filter,getCurrentTransform,getWindowingStrategy,map,getValueCoder,getInput,registerMessageStream,of,getOutputTag,getOutput,getCoder,getMessageStream,adapt,getFullName,flatMap,ofElement,getPipeline,getSystemReduceFn],getSystemReduceFn->[getValueCoder,RuntimeException,orElseThrow,buffering,withInputCoder,getKeyCoder,IOException,getCoderRegistry,combining]]] | getSystemReduceFn - creates a SystemReduceFn that combines a key - value pair with. | The samza runner does _not_ expect to support portability as-is, correct? |
@@ -29,9 +29,7 @@ public class DefaultHttpContent extends DefaultHttpObject implements HttpContent
* Creates a new instance with the specified chunk content.
*/
public DefaultHttpContent(ByteBuf content) {
- if (content == null) {
- throw new NullPointerException("content");
- }
+ ObjectUtil.checkNotNull(content, "content");
this.content = content;
}
| [DefaultHttpContent->[copy->[copy],duplicate->[duplicate],release->[release],refCnt->[refCnt],retain->[retain],touch->[touch],retainedDuplicate->[retainedDuplicate],toString->[content],replace->[DefaultHttpContent]]] | Returns the content of the . | nit: you can merge both lines above as `checkNotNull` will return the given argument |
@@ -603,13 +603,13 @@ if ($object->id > 0) {
print '</td>';
}
- print '<td class="right">';
+ print '<td class="right expectedqty" id="id_'.$obj->rowid.'">';
print $obj->qty_stock;
print '</td>';
print '<td class="center">';
if ($object->status == $object::STATUS_VALIDATED) {
$qty_view = GETPOST("id_".$obj->rowid) ? GETPOST("id_".$obj->rowid) : $obj->qty_view;
- print '<input type="text" class="maxwidth75 right" name="id_'.$obj->rowid.'" value="'.$qty_view.'">';
+ print '<input type="text" class="maxwidth75 right" id="id_'.$obj->rowid.'_input" value="'.$qty_view.'">';
print '</td>';
print '<td class="right">';
print '<a class="reposition" href="'.DOL_URL_ROOT.'/product/inventory/inventory.php?id='.$object->id.'&lineid='.$obj->rowid.'&action=deleteline&token='.newToken().'">'.img_delete().'</a>';
| [fetch,create,rollbak,formconfirm,setRecorded,fetch_object,lasterrno,getNomUrl,begin,initHooks,load,executeHooks,loadLangs,update,setCanceled,close,textwithpicto,fetch_optionals,select_produits,query,fetch_name_optionals_label,transnoentitiesnoconv,load_stock,_create,trans,num_rows,selectWarehouses,getOptionalsFromPost,commit] | This function returns a list of all possible Warehouses. Displays the number of unique objects in the system. | You need to add the id for autofill but are you sure you can remove the "name" parameter ? What happen if you then post the form ? |
@@ -259,7 +259,11 @@ func (u *userState) removeSeries(fp model.Fingerprint, metric labelPairs) {
// forSeriesMatching passes all series matching the given matchers to the provided callback.
// Deals with locking and the quirks of zero-length matcher values.
-func (u *userState) forSeriesMatching(ctx context.Context, allMatchers []*labels.Matcher, callback func(context.Context, model.Fingerprint, *memorySeries) error) error {
+// There are 2 callbacks. callback1 is called for each series, where the lock is held.
+// callback2 is called at certain intervals specified by callback2Interval,
+// i.e. callback2() after callback2Interval calls of callback1().
+func (u *userState) forSeriesMatching(ctx context.Context, allMatchers []*labels.Matcher, append func(context.Context, model.Fingerprint, *memorySeries) error,
+ send func(context.Context) error, batchSize int) error {
log, ctx := spanlogger.New(ctx, "forSeriesMatching")
defer log.Finish()
| [forSeriesMatching->[get],getOrCreateSeries->[get],getViaContext->[get],getSeries->[get]] | forSeriesMatching is a helper function that calls the callback for each series in the given list. | names of callbacks need to match the code; also I would be explicit that the lock is not held while the second callback is called |
@@ -19,7 +19,7 @@ namespace System.Windows.Forms
_item = item;
}
- public override ImageList ImageList
+ public override ImageList? ImageList
{
get => _item?.Owner?.ImageList;
set => Debug.Assert(false, "We should never set the image list");
| [ToolStripItem->[ToolStripItemImageIndexer->[Assert]]] | The image indexer for the item. | ditto here - `item` can never been `null`. |
@@ -51,7 +51,7 @@ void FilamentSensorBase::filament_present(const uint8_t extruder) {
uint8_t FilamentSensorEncoder::motion_detected;
#endif
-#if FILAMENT_RUNOUT_DISTANCE_MM > 0
+#ifdef FILAMENT_RUNOUT_DISTANCE_MM
float RunoutResponseDelayed::runout_distance_mm = FILAMENT_RUNOUT_DISTANCE_MM;
volatile float RunoutResponseDelayed::runout_mm_countdown[EXTRUDERS];
#else
| [filament_present->[filament_present],ENABLED] | calls response. | Allowing for a negative or zero `FILAMENT_RUNOUT_DISTANCE_MM`? If not, a sanity check will be needed to support this change. |
@@ -162,7 +162,7 @@ namespace System.Xml
ValidateQName(base.reader.Name);
CheckCharacters(base.reader.Value);
- string str;
+ string? str;
str = base.reader.GetAttribute("SYSTEM");
if (str != null)
{
| [XmlCharCheckingReader->[ReadContentAsBase64Async->[CanReadBinaryContent,CreateOrReset,InReadBinary,ConfigureAwait,Interactive],ReadElementContentAsBinHexAsync->[CanReadBinaryContent,nameof,CreateOrReset,InReadBinary,ConfigureAwait,Interactive,Length],ReadAsync->[Value,BuildCharExceptionArgs,EndElement,Initial,Interactive,ReadState,ProcessingInstruction,Prohibit,Error,NodeType,Empty,DocumentType,Xml_DtdIsProhibitedEx,CheckWhitespace,Prefix,Element,Text,Ignore,MoveToElement,GetAttribute,Name,MoveToFirstAttribute,SignificantWhitespace,InReadBinary,Xml_InvalidCharacter,LocalName,Comment,MoveToNextAttribute,Whitespace,CDATA,Throw,EntityReference,IsPublicId,ConfigureAwait,Fail,CheckCharacters,ValidateQName],ReadContentAsBinHexAsync->[CanReadBinaryContent,CreateOrReset,InReadBinary,ConfigureAwait,Interactive],ReadElementContentAsBase64Async->[CanReadBinaryContent,nameof,CreateOrReset,InReadBinary,ConfigureAwait,Interactive,Length],Task->[Interactive,ConfigureAwait]]] | Reads an element from the stream. Checks if node read is a node of type unknown type and if so checks if it is Checks if node is missing or not whitespace. | NIT: merge into one row maybe? |
@@ -400,3 +400,18 @@ def test_import_url_empty_directory(tmp_dir, dvc, workspace):
empty_dir = tmp_dir / "empty_dir"
assert empty_dir.is_dir()
assert tuple(empty_dir.iterdir()) == ()
+
+
+def test_import_url_to_remote_status(tmp_dir, dvc, local_cloud, local_remote):
+ local_cloud.gen("foo", "foo")
+
+ stage = dvc.imp_url(str(local_cloud / "foo"), to_remote=True)
+ assert stage.md5 is not None
+
+ status = dvc.status()
+ assert status["foo.dvc"] == [{"changed outs": {"foo": "not in cache"}}]
+
+ dvc.pull()
+
+ status = dvc.status()
+ assert len(status) == 0
| [test_import_url_to_remote_invalid_combinations->[raises,imp_url],TestImportFilename->[test->[assertTrue,exists,assertEqual,join,mkdir,remove,main],setUp->[write,mkdtemp,super,join,open]],test_import_url->[,status,imp_url,gen],test_import_url_nonexistent->[raises,imp_url,fspath],test_import_url_to_dir->[isdir,imp_url,join,gen,makedirs,read_text],TestDefaultOutput->[test->[assertTrue,write,mkdtemp,uuid4,assertEqual,str,read,join,exists,open,main]],test_import_url_with_no_exec->[exists,join,imp_url,gen],test_import_url_dir->[,list,status,edit,imp_url,ODBManager,set,gen,listdir],test_should_remove_outs_before_import->[main,spy,gen,fspath],test_import_url_preserve_meta->[,imp_url,gen,dedent],test_import_stage_accompanies_target->[,fspath,imp_url,chdir,gen,join,dvc_gen],test_import_url_to_remote_directory->[,imp_url,len,hash_to_path_info,gen,open,load],test_import_url_to_remote_single_file->[,imp_url,hash_to_path_info,len,gen],test_import_url_empty_directory->[iterdir,is_dir,imp_url,tuple,gen],TestCmdImport->[test->[assertTrue,assertNotEqual,assertEqual,exists,main],test_unsupported->[assertNotEqual,main]],test_import_url_to_remote_absolute->[fspath,make_tmp_dir,imp_url,str,with_suffix,write_text],skipif,param,xfail,lazy_fixture,parametrize] | Test import of empty directory. | It can be used in this way `str(Path)`? In the previous I'm always using `os.fspath(Path)` following other's work. |
@@ -562,9 +562,11 @@ abstract class AbstractPrestoResultSet
// TODO (https://github.com/prestosql/presto/issues/6048) move to convertFromClientRepresentation
return getDate(columnIndex);
case Types.TIME:
+ case Types.TIME_WITH_TIMEZONE:
// TODO (https://github.com/prestosql/presto/issues/6048) move to convertFromClientRepresentation
return getTime(columnIndex);
case Types.TIMESTAMP:
+ case Types.TIMESTAMP_WITH_TIMEZONE:
// TODO (https://github.com/prestosql/presto/issues/6048) move to convertFromClientRepresentation
return getTimestamp(columnIndex);
case Types.ARRAY:
| [AbstractPrestoResultSet->[getLong->[getLong],getTimestamp->[getTimestamp],getBytes->[getBytes],convertFromClientRepresentation->[convertFromClientRepresentation],getInt->[getInt],getDouble->[getDouble],getByte->[getByte],column->[checkOpen,checkValidRow],getObject->[getTimestamp,getBigDecimal,getDate,getObject,getTime],columnInfo->[checkOpen,checkValidRow],getDate->[getDate],getBigDecimal->[getBigDecimal],getArray->[getArray,convertFromClientRepresentation],getFloat->[getFloat],next->[next],getBoolean->[getBoolean],checkOpen->[isClosed],getShort->[getShort],getTime->[getTime],getString->[getString]]] | Get object from database. | `Fix TIME/TIMESTAMP TZ column declaration for result set` is backward incompatible. Do we care? |
@@ -3,6 +3,8 @@ require 'net/https'
module PivCacService
class << self
+ RANDOM_HOSTNAME_BYTES = 2
+
include Rails.application.routes.url_helpers
def decode_token(token)
| [token_decoded->[decode_token_response,decode_test_token,post_form,identity_pki_disabled?,start_with?],decode_token->[token_present,token_decoded],decode_token_response->[parse,body,to_i],piv_cac_available_for_agency?->[piv_cac_agencies,parse,piv_cac_enabled?,include?,blank?],piv_cac_service_link->[to_s,query,development_and_piv_cac_entry_enabled?,escape,piv_cac_service_url],decode_test_token->[development_and_piv_cac_entry_enabled?,parse],token_present->[raise,blank?],piv_cac_verify_token_link->[piv_cac_verify_token_url],require,include,url_helpers] | Decode a token if it is present and decoded otherwise nil. | I'm open to argument but I'd vote to make this bigger, maybe 4? Every time somebody gets confused and hits Esc or something, they increase their probability of a collision. The longer the random string, the uglier the URL, but the lower the probability of a user-hostile collision. |
@@ -63,7 +63,7 @@ func PachdRc(shards uint64, backend backend, hostPath string, version string) *a
// we turn metrics on only if we have a static version this prevents dev
// clusters from reporting metrics
metrics := "true"
- if version == "" {
+ if version == "local" {
metrics = "false"
}
volumes := []api.Volume{
| [Fprintf,Join,Sprintf,NewEncoder,FormatUint,CodecEncodeSelf,MustParse] | PachdRc returns a pachd controller that is responsible for handling the nec returns a string that represents the environment variable that will be set when the is. | Could we have this be a constant that's stored somewhere rather than a magic value. I feel like that would prevent us getting in the same situation as before where we changed the value in one place but not another. Probably locate it in `src/server/pkg/deploy/deploy.go` |
@@ -86,13 +86,13 @@ public class KsqlConfigResolver implements ConfigResolver {
return strict ? Optional.empty() : Optional.of(ConfigItem.unresolved(key));
}
- private static Optional<ConfigItem> resolveKsqlConfig(final String propertyName) {
+ Optional<ConfigItem> resolveKsqlConfig(final String propertyName) {
final Optional<ConfigItem> possibleItem = resolveConfig("", KSQL_CONFIG_DEF, propertyName);
if (possibleItem.isPresent()) {
return possibleItem;
}
- if (propertyName.startsWith(KsqlConfig.KSQ_FUNCTIONS_PROPERTY_PREFIX)) {
+ if (propertyName.startsWith(KsqlConfig.KSQL_FUNCTIONS_PROPERTY_PREFIX)) {
// Functions properties are free form, so can not be resolved / validated:
return Optional.of(ConfigItem.unresolved(propertyName));
}
| [KsqlConfigResolver->[resolve->[resolveKsqlConfig,startsWith,resolveStreamsConfig],getConfigDef->[get,IllegalStateException,getDeclaredField,setAccessible],resolveStreamsConfig->[unresolved,of,isPresent,findFirst,empty,stripPrefix,startsWith],resolveKsqlConfig->[unresolved,of,isPresent,empty,resolveConfig,startsWith],PrefixedConfig->[requireNonNull],resolveConfig->[of,resolved,get,empty,stripPrefix,startsWith],stripPrefix->[substring,startsWith,length],PrefixedConfig,getConfigDef,of,configDef]] | Resolve streams config. | Can stay private and static, right? |
@@ -1184,7 +1184,14 @@ def plot_tfr_topomap(tfr, tmin=None, tmax=None, fmin=None, fmax=None,
if not show_names:
names = None
- data = tfr.data
+ data = tfr.data[picks, :, :]
+
+ # merging grads before rescaling makes ERDs visible
+ if merge_grads:
+ from ..channels.layout import _merge_grad_data
+ data, shape = _merge_grad_data(data), data.shape
+ data = data.reshape((data.shape[0], shape[1], shape[2]))
+
data = rescale(data, tfr.times, baseline, mode, copy=True)
# crop time
| [_init_anim->[set_values,_check_outlines,_hide_frame,_autoshrink,_GridData,_draw_outlines],plot_ica_components->[plot_ica_components,_check_outlines,_prepare_topo_plot,_add_colorbar,plot_topomap,_autoshrink],_plot_topomap->[set_locations,_check_outlines,_show_names,_GridData,_draw_outlines,_plot_sensors],plot_evoked_topomap->[_plot_topomap,_prepare_topo_plot,_autoshrink,_check_outlines],_plot_topomap_multi_cbar->[_add_colorbar,plot_topomap],plot_tfr_topomap->[_add_colorbar,_prepare_topo_plot,plot_topomap],plot_layout->[_draw_outlines,_check_outlines],plot_psds_topomap->[_plot_topomap_multi_cbar],_plot_ica_topomap->[_check_outlines,_add_colorbar,_prepare_topo_plot,plot_topomap,_autoshrink],plot_projs_topomap->[_add_colorbar,_prepare_topo_plot],_onselect->[_check_outlines],_plot_corrmap->[_check_outlines,_prepare_topo_plot,plot_topomap,_hide_frame,_plot_corrmap],_topomap_animation->[_prepare_topo_plot],plot_epochs_psd_topomap->[_prepare_topo_plot],_animate->[_hide_frame],_slider_changed->[plot_topomap,_resize_cbar]] | Plot a topographic map of specific time - frequency intervals of TFR data. Plots a colorbar or a colorbar label if a specific channel type is missing. Plots a single key in the current figure. Plots a colorbar of the critical block. | so `_merge_grad_data` does not retain the shape properly? does it make this 3D array 2D? If so perhaps it should be fixed to make it properly return an array of the same shape (other than along the merged dimension) |
@@ -769,6 +769,9 @@ function ResultCtrl ($scope, $rootScope, $route, $window, $routeParams, $locatio
for (let index in row) {
let stringValue = (row[index]).toString()
if (stringValue.indexOf(delimiter) > -1) {
+ if (stringValue.indexOf('\"')) {
+ stringValue = stringValue.replace('\"', '\"\"')
+ }
dsvRow += '"' + stringValue + '"' + delimiter
} else {
dsvRow += row[index] + delimiter
| [No CFG could be retrieved] | function to export the n - ary data to a dsv file. Helium ---------------- Scope for loading a single application from base64. | I think the way to escape `"` is `\"`? actually according to wikipedia both approaches might work, depending on the preference of the app |
@@ -237,8 +237,9 @@ public class BeamFnDataReadRunner<OutputT> {
}
synchronized (splittingLock) {
- // Don't attempt to split if we haven't started.
- if (!started) {
+ // Don't attempt to split if we are already done since there isn't a meaningful split we can
+ // provide.
+ if (index == stopIndex) {
return;
}
// Since we hold the splittingLock, we guarantee that we will not pass the next element
| [BeamFnDataReadRunner->[trySplit->[round,max,sort,size,getProgress,getEstimatedInputElements,trySplit,get,setFirstResidualElement,isValidSplitPoint,getAllowedSplitPointsList,binarySearch,getFractionOfRemainder],blockTillReadFinishes->[get,awaitCompletion,debug],forwardElementToConsumer->[accept],Factory->[createRunnerForPTransform->[getMultiplexingConsumer,register,getOnlyElement,values]],isValidSplitPoint->[contains,isEmpty],Registrar->[getPTransformRunnerFactories->[of,Factory]],registerInputLocation->[receive,get,data],Object,forComponents,fromProto,of,getPort,build,getLogger,getCoderId,get,accept,getApiServiceDescriptor,StateBackedIterableTranslationContext]] | Tries to split the bundle. The number of elements that we should keep. if nextPoint is less than index then it is the last residual element. | should we also check `index != -1`? |
@@ -232,10 +232,10 @@ void kpython_state::kpython(machine_config &config)
}
#define KPYTHON_BIOS \
- ROM_REGION32_LE(0x200000, "bios", 0) \
- ROM_LOAD( "b22a01.u42", 0x000000, 0x080000, CRC(98de405e) SHA1(4bc268a996825c1bdf6ae277d331fe7bdc0cc00c) ) \
- ROM_REGION(0x8000, "io_mcu", 0) \
- ROM_LOAD( "hd64f3664", 0x0000, 0x8000, NO_DUMP ) // Internal ROM not dumped
+ ROM_REGION32_LE(0x200000, "bios", 0) \
+ ROM_LOAD( "b22a01.u42", 0x000000, 0x080000, CRC(98de405e) SHA1(4bc268a996825c1bdf6ae277d331fe7bdc0cc00c) ) \
+ ROM_REGION(0x8000, "io_mcu", 0) \
+ ROM_LOAD( "hd64f3664", 0x0000, 0x8000, NO_DUMP ) // Internal ROM not dumped
ROM_START( kpython )
KPYTHON_BIOS
| [No CFG could be retrieved] | region machine configuration Internal ROM not available. | Please don't make changes like this - this has two levels of indentation because it's a continuation, not a level of nested scope. |
@@ -342,7 +342,8 @@ class Optimizer(object):
dtype=None,
fill_value=0.0,
shape=None,
- type=None):
+ type=None,
+ force_cpu=False):
"""Utility function to add an accumulator for a parameter
Args:
| [MomentumOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],PipelineOptimizer->[_extract_section_ops->[_is_opt_role_op],_find_input_output->[update],minimize->[minimize,_create_vars,_split_program],_split_program->[_extract_section_ops,_find_persistable_vars,update,_find_input_output,_is_lr_role_op,_find_section_opt],_find_section_opt->[_extract_section_opt_ops]],DecayedAdagradOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],ModelAverage->[_add_average_apply_op->[_get_accumulator],_append_average_accumulate_op->[_add_accumulator]],AdamaxOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_finish_update->[_get_accumulator],_create_accumulators->[_add_accumulator]],Optimizer->[apply_gradients->[_create_optimization_pass,_process_distribute_lookuptable],apply_optimize->[apply_gradients,_create_optimization_pass],_create_param_lr->[_global_learning_rate],_create_optimization_pass->[_append_optimize_op,_create_global_learning_rate,_finish_update,_create_accumulators],minimize->[apply_optimize,backward],_process_distribute_lookuptable->[_create_global_learning_rate,_create_param_lr],backward->[_append_dgc_ops]],AdamOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],ExponentialMovingAverage->[apply->[restore]],LookaheadOptimizer->[minimize->[minimize]],RMSPropOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],SGDOptimizer->[_append_optimize_op->[_create_param_lr]],FtrlOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],AdagradOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],LambOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr]],LarsMomentumOptimizer->[_append_optimize_op->[_get_accumulator,_create_param_lr],_create_accumulators->[_add_accumulator]],RecomputeOptimizer->[apply_gradients->[apply_gradients],minimize->[apply_optimize,backward],apply_optimize->[apply_optimize]],AdadeltaOptimizer->[_append_optimize_op->[_get_accumulator],_create_accumulators->[_add_accumulator]],DGCMomentumOptimizer->[apply_gradients->[_is_use_dgc,_create_optimization_pass,_process_distribute_lookuptable],_append_optimize_op->[_is_use_dgc,_get_accumulator,_create_param_lr],_append_clip_norm->[_clip_by_norm],_append_dgc_ops->[_add_auto_increment_var,_is_use_dgc,_add_nranks_var,_add_accumulator]],DpsgdOptimizer->[_append_optimize_op->[_create_param_lr]]] | Utility function to add an accumulator for a parameter. | @lanxianghit @phlrain force_cpudevice_guard - 1force_cpuforce_cpudevice_guarddevice_guardwarning - 2: force_cpu 12`force_cpu`? |
@@ -61,8 +61,11 @@
#include <Kokkos_SPMV_Inspector.hpp>
#include <CuSparse_SPMV.hpp>
#include <MKL_SPMV.hpp>
+#include <OpenMPStatic_SPMV.hpp>
+#include <OpenMPDynamic_SPMV.hpp>
+#include <OpenMPSmartStatic_SPMV.hpp>
-enum {KOKKOS, MKL, CUSPARSE, KK_KERNELS, KK_INSP};
+enum {KOKKOS, MKL, CUSPARSE, KK_KERNELS, KK_INSP, OMP_STATIC, OMP_DYNAMIC, OMP_INSP};
enum {AUTO, DYNAMIC, STATIC};
typedef Kokkos::DefaultExecutionSpace execution_space;
| [test_crs_matrix_singlevec->[matvec],main->[print_help]] | PUBLIC METHODS For the list of possible non - null n - tuples in the system. - - - - - - - - - - - - - - - - - -. | These includes currently won't build if OpenMP is disabled. Please protect the includes with `#ifdef KOKKOS_HAVE_OPENMP ... #endif`. |
@@ -235,11 +235,11 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
}
@Override
- public String makeIndexPathName(DataSegment dataSegment, String indexName)
+ public String makeIndexPathName(DataSegment dataSegment, String indexName, boolean useUniquePath)
{
return StringUtils.format(
"./%s/%d_%s",
- this.getStorageDir(dataSegment),
+ this.getStorageDir(dataSegment, useUniquePath),
dataSegment.getShardSpec().getPartitionNum(),
indexName
);
| [HdfsDataSegmentPusher->[makeIndexPathName->[getStorageDir],getPathForHadoop->[getPathForHadoop]]] | This method is called to create the index path for a given data segment. | Please add a check that useUniquePath is always false. |
@@ -38,6 +38,13 @@ func (d *ManualDecoder) IntPtr(base map[string]interface{}, key string, keys ...
if valFloat == float32(valInt) {
return &valInt
}
+ } else if valNumber, ok := val.(json.Number); ok {
+ if valInt, err := valNumber.Int64(); err != nil {
+ d.Err = err
+ } else {
+ i := int(valInt)
+ return &i
+ }
}
d.Err = fetchErr
return nil
| [String->[StringPtr],Int->[IntPtr],TimeRFC3339->[Parse],New] | IntPtr returns a pointer to an int value from a base map that can be nil if. | As this should be the default now, to you mind moving it up to the first `else if`? |
@@ -1684,9 +1684,10 @@ constexpr uint8_t epps = ENCODER_PULSES_PER_STEP;
const PauseMode mode/*=PAUSE_MODE_SAME*/,
const uint8_t extruder/*=active_extruder*/
) {
- if (mode == PAUSE_MODE_SAME)
- return;
+ //if (mode == PAUSE_MODE_SAME) return;
+ SERIAL_ECHOLNPAIR("PauseMsg=", message);
pause_mode = mode;
+ ExtUI::pauseModeStatus = message;
switch (message) {
case PAUSE_MESSAGE_PARKING: ExtUI::onUserConfirmRequired_P(GET_TEXT(MSG_PAUSE_PRINT_PARKING));
case PAUSE_MESSAGE_CHANGING: ExtUI::onUserConfirmRequired_P(GET_TEXT(MSG_FILAMENT_CHANGE_INIT));
| [No CFG could be retrieved] | This function is called by the UI when a message is paused. This function handles the administration of the ExtUI. | Is this for debugging purposes? |
@@ -28,7 +28,7 @@ module Admin
@tag.name = params[:tag][:name].downcase
if @tag.save
- flash[:success] = "Tag has been created!"
+ flash[:success] = "#{@tag.name} has been created!"
redirect_to edit_admin_tag_path(@tag)
else
flash[:danger] = @tag.errors_as_sentence
| [TagsController->[new->[new],create->[new],update->[update]]] | POST - Tag Athors. | We do this in the `Admin::TagsController#update` method and when adding a moderator to a tag (but we display the moderator's name rather than the tag name!), so I updated this for consistency. |
@@ -363,12 +363,13 @@ public class CacheManagerTest extends AbstractInfinispanTest {
public void call() {
EmbeddedCacheManager cm1 = cms[0];
EmbeddedCacheManager cm2 = cms[1];
+ TestingUtil.waitForNoRebalance(cm1.getCache(), cm2.getCache());
Cache<Object, Object> c1 = cm1.getCache();
- cm2.getCache();
GlobalConfiguration globalCfg = cm1.getCacheManagerConfiguration();
Configuration cfg = c1.getCacheConfiguration();
- cm1.stop();
+ TestingUtil.killCacheManagers(cm1);
+ globalCfg = new GlobalConfigurationBuilder().read(globalCfg).build();
withCacheManager(new CacheManagerCallable(
new DefaultCacheManager(globalCfg, cfg)) {
| [CacheManagerTest->[doTestRemoveCacheClustered->[getManagerWithStore],UnreliableCacheStoreConfigurationBuilder->[create->[UnreliableCacheStoreConfiguration,create]],testCacheManagerRestartReusingConfigurations->[call->[stop]],getManagerWithStore->[getManagerWithStore],testRemoveNonExistentCache->[stop],testConcurrentCacheManagerStopAndGetCache->[stop->[stop],stop],testRemoveCacheLocal->[stop]]] | Test if the cache manager restarts and reuses the configuration. | why copying? are you hiding something? :) |
@@ -52,7 +52,7 @@ func (n *node) addChild(child *node) {
type Index struct {
root *node
lookupTable map[string]*node
- m sync.Mutex
+ m sync.RWMutex
}
func NewIndex() *Index {
| [bfsworker->[New,Remove,PushBack,Front,Len],List->[Copy,Lock,Unlock],Delete->[Self,Lock,Unlock,Errorf],Insert->[Copy,addChild,Unlock,Lock,Errorf,Parent,Self,Debugf],Get->[Copy,Lock,Unlock],bfs->[bfsworker,Lock,Unlock],HasChildren->[Lock,Unlock],New] | Insert inserts a copy of the given node into the tree under the given parent. Get returns a copy of the named node. | Mutex hat? This mutex should go at the top of the struct. |
@@ -14,8 +14,8 @@ namespace System.Xml.Serialization
{
private bool _includeInSchema = true;
private bool _anonymousType;
- private string _ns;
- private string _typeName;
+ private string? _ns;
+ private string? _typeName;
/// <devdoc>
/// <para>[To be supplied.]</para>
| [XmlTypeAttribute->[Struct,Class,Empty,Enum,Interface]] | XmlTypeAttribute - Constructs an attribute from the supplied parameters. | On line 31: `typeName` should be nullable. |
@@ -26,6 +26,8 @@ import javax.validation.constraints.Size;
import java.util.List;
import java.util.concurrent.TimeUnit;
+import static java.util.concurrent.TimeUnit.MINUTES;
+
/**
* Configuration read from etc/catalog/kudu.properties
*/
| [KuduClientConfig->[setMasterAddresses->[copyOf,splitToList],omitEmptyStrings,Duration]] | Creates a new instance of a NetworkConfig object that can be used to configure a network network Sets the master addresses. | nit: test for kudu config is missing |
@@ -2586,6 +2586,16 @@ function del_service_from_host(Illuminate\Http\Request $request)
return api_error(500, 'Failed to delete the service');
}
+function search_by_mac(Illuminate\Http\Request $request)
+{
+ $search = $request->route('search');
+ $ports = dbFetchRows('select ports.* from ports_fdb join ports on ports.port_id=ports_fdb.port_id join devices on devices.device_id=ports.device_id where ports_fdb.port_id in (select port_id from ports_fdb where mac_address=?) group by ports_fdb.port_id having count(ports_fdb.port_id)>0 order by count(ports_fdb.port_id) limit 1', $search);
+ if (empty($ports)) {
+ return api_error(404, 'No ports found for ' . $search);
+ }
+
+ return api_success($ports, 'ports');
+}
function edit_service_for_host(Illuminate\Http\Request $request)
{
$service_id = $request->route('id');
| [get_graphs->[route],search_ports->[route,get,isEmpty],edit_location->[route,getContent],update_device->[route,getContent],get_port_stack->[route,get],get_vlans->[route],get_vrf->[route],get_inventory_for_device->[route],get_device->[route],get_bill_history_graph->[route,get],get_devices_by_group->[toArray,route,get,isEmpty,first],list_vrf->[get,hasGlobalRead],get_fdb->[route],list_fdb->[route,get,isEmpty],get_link->[route],api_error->[json],add_device_group->[messages,getContent,fails,toSql,save,sync],get_inventory->[route,get],del_service_from_host->[route],add_components->[route,createComponent],edit_components->[route,getContent,setComponentPrefs],get_components->[all,route,get,has,getComponents],list_vlans->[get,hasGlobalRead],unmute_alert->[route,getContent],api_success->[json],get_oxidized_config->[route],add_location->[getContent],validate_column_list->[getColumns],get_all_ports->[get,hasGlobalRead],get_device_ip_addresses->[route],get_device_groups->[toArray,route,get,groups,isEmpty,count],list_alert_rules->[route],add_edit_rule->[toSql,getContent],add_parents_to_host->[route,getContent,sync],add_service_template_for_device_group->[messages,getContent,fails,toSql,save],delete_rule->[route],show_endpoints->[getName,getPrefix,json,url,getRoutes,uri],get_bill_history_graphdata->[route,get],device_outages->[orderBy,route,get],get_service_templates->[toArray,cannot,get,isEmpty,count],list_available_health_graphs->[route,count],list_links->[route,hasGlobalRead],get_graph_generic_by_hostname->[has,route,get],get_bgp->[route],get_network_ip_addresses->[route],create_edit_bill->[getContent],get_graph_by_port_hostname->[has,route,get],add_port_group->[save,messages,getContent,fails],get_graph_by_portgroup->[has,route,get],get_bill_history->[route],get_bill_graph->[route,get],delete_bill->[route],list_bills->[route,get,hasGlobalRead],get_port_groups->[toArray,get,isEmpty,count],del_location->[route],trigger_device_discovery->[route],list_cbgp->[get,hasGlobalRead],ack_alert->[route,getContent],get_port_ip_addresses->[route],edit_service_for_host->[route,getContent],list_bgp->[uncompressed,get],list_sensors->[get,count],get_port_info->[route],add_service_for_host->[route,getContent],list_alerts->[has,route,get],list_devices->[get,hasGlobalRead],list_logs->[route,get,getName],list_ospf->[get],get_port_stats_by_port_hostname->[path,has,route,get],list_ipsec->[route],list_available_wireless_graphs->[route],search_oxidized->[route],list_oxidized->[get,json],rename_device->[route],list_arp->[getNetworkAddress,getNetmask,route,get],get_port_graphs->[route,get],add_device->[getMessage,getContent],delete_components->[deleteComponent,route],device_availability->[orderBy,route,get],del_device->[route],get_bill_graphdata->[route,get],list_services->[has,route,get],del_parents_from_host->[detach,route,getContent]] | Delete a service from host. | Would you mind writing this in eloquent syntax instead? It's more future proof. Not sure why you need to mix in device table either |
@@ -37,6 +37,11 @@ class CategorySerializer < BasicCategorySerializer
true
end
+ def is_special
+ [SiteSetting.lounge_category_id, SiteSetting.meta_category_id, SiteSetting.staff_category_id, SiteSetting.uncategorized_category_id]
+ .include? object.id
+ end
+
def include_can_delete?
scope && scope.can_delete?(object)
end
| [CategorySerializer->[include_cannot_delete_reason->[include_can_delete?]]] | Check if the object can be deleted by this category. | One thing I prefer to do to keep JSON size down is have boolean values return true always, then use `include_is_special?` with the logic. That way the attribute will not be present unless true. |
@@ -41,6 +41,7 @@ import lombok.extern.java.Log;
*
* @param <T> The type of the setting value.
*/
+@SuppressWarnings("StaticInitializerReferencesSubClass")
@Log
public abstract class ClientSetting<T> implements GameSetting<T> {
public static final ClientSetting<Integer> aiPauseDuration =
| [ClientSetting->[resetValue->[setValueAndFlush],equals->[equals],setValueAndFlush->[setValue,getPreferences],hashCode->[hashCode],getValue->[getDefaultValue,getEncodedCurrentValue],flush->[flush]]] | A class that represents a list of settings that can be adjusted and stored with a Client s Client settings for game - specific settings. | I saw this warning too, is it safe to ignore? |
@@ -94,12 +94,12 @@ func updateConfig(config strpkg.Config, debug bool) strpkg.Config {
}
func logNodeBalance(store *strpkg.Store) {
- balance, err := presenters.ShowEthBalance(store)
+ balance, kv, err := presenters.ShowEthBalance(store)
logger.WarnIf(err)
- logger.Infow(balance)
- balance, err = presenters.ShowLinkBalance(store)
+ logger.Infow(balance, kv...)
+ balance, kv, err = presenters.ShowLinkBalance(store)
logger.WarnIf(err)
- logger.Infow(balance)
+ logger.Infow(balance, kv...)
}
func logConfigVariables(config strpkg.Config) {
| [DeleteUser->[SetLogger,GetStore,NewApplication,CreateProductionLogger,DeleteUser,Info],RunNode->[SetLogger,Stop,GetStore,Infow,Authenticate,Initialize,Start,errorOut,CreateProductionLogger,Errorf,NewApplication,String,Run,Info,Bool],ImportKey->[LastIndex,First,Args,Present,KeysDir,New,errorOut],Warn,GetActiveAccount,Close,Readdirnames,Copy,Error,ReadFile,ShowEthBalance,Infow,ShowLinkBalance,Errorf,TrimSpace,Debug,Create,GetLastNonce,NewConfigWhitelist,GetNonce,Open,WarnIf] | DeleteUser deletes the user from the database. ImportKey imports a key from filepath to the chainlink node. | Rather than returning a 3 tuple, could we only return the `kv` map, and place `message` inside of it as a third key? You would then just use the value when needed: `kv["message"]`. The standard 2 tuple followed by an `err` is so idiomatic in golang that I'm reluctant to leave it. Open to other ideas. |
@@ -121,6 +121,7 @@ class PageAdmin extends Admin
new ToolbarAction(
'sulu_admin.type',
[
+ 'sort_by_title' => true,
'disabled_condition' => '(_permissions && !_permissions.edit)',
]
),
| [PageAdmin->[getSecurityContextsWithPlaceholder->[getWebspaceCollection,getSystem,getSecurity],getSecurityContexts->[getWebspaceCollection,getKey,getSystem,getSecurity],configureViews->[getKey,setParent,hasSomeWebspacePermission,getWebspaces,add,setOption,setAttributeDefault],configureNavigationItems->[setPosition,hasSomeWebspacePermission,setView,add,setIcon],hasSomeWebspacePermission->[getKey,hasPermission,getWebspaces],getConfig->[getConfiguration,getWebspaces]]] | Configures views based on the configuration of the admin toolbar. Add the page view for the given webspace. Adds the preview of the page Adds a section of the view collection that is a page view. | Don't you think it would make more sense to name this option `sort_by`? I think it would scale better, because we could use `key` or `title` as a property, and it would still work in the future if we might have other properties to sort by. Could e.g. imagine that there will be a `order` property in the future, so that people can decide completely on their own how they want to order it. |
@@ -134,14 +134,17 @@ public abstract class AbstractMessageProducingHandler extends AbstractMessageHan
this.notPropagatedHeaders.addAll(Arrays.asList(headers));
}
this.selectiveHeaderPropagation = this.notPropagatedHeaders.size() > 0;
+
+ this.shouldCopyRequestHeaders = !this.notPropagatedHeaders.contains("*");
}
/**
- * Get the header names this handler doesn't propagate.
+ * Get the header patterns this handler doesn't propagate.
* @return an immutable {@link java.util.Collection} of headers that will not be
* copied from the inbound message if {@link #shouldCopyRequestHeaders()} is true.
* @since 4.3.10
* @see #setNotPropagatedHeaders(String...)
+ * @see org.springframework.util.PatternMatchUtils
*/
@Override
public Collection<String> getNotPropagatedHeaders() {
| [AbstractMessageProducingHandler->[getOutputChannelFromRoutingSlip->[getOutputChannelFromRoutingSlip],onInit->[onInit],sendOutput->[getOutputChannel],sendErrorMessage->[sendOutput],setSendTimeout->[setSendTimeout],addNotPropagatedHeaders->[updateNotPropagatedHeaders],produceOutput->[getOutputChannel]]] | Update notPropagatedHeaders. | Shouldn't this be ..equals("\*") instead of ..contains("\*") ? |
@@ -662,11 +662,11 @@ class RestAPI: # pragma: no unittest
def connect(
self,
- registry_address: typing.TokenNetworkRegistryAddress,
- token_address: typing.TokenAddress,
- funds: typing.TokenAmount,
- initial_channel_target: int = None,
- joinable_funds_target: float = None,
+ registry_address: TokenNetworkRegistryAddress,
+ token_address: TokenAddress,
+ funds: TokenAmount,
+ initial_channel_target: int = 3,
+ joinable_funds_target: float = 0.4,
):
log.debug(
"Connecting to token network",
| [endpoint_not_found->[api_error],RestAPI->[initiate_payment->[api_error,api_response],connect->[api_error,api_response],get_raiden_events_payment_history_with_timestamps->[api_error,api_response,get_raiden_events_payment_history_with_timestamps],_deposit->[api_error,api_response,get_channel],get_blockchain_events_token_network->[normalize_events_list,get_blockchain_events_token_network,api_error,api_response],get_partners_by_token->[api_error,api_response,get_channel_list],get_pending_transfers->[api_error,api_response,get_pending_transfers],get_token_network_for_token->[api_error,api_response],open->[api_error,api_response],leave->[api_response],register_token->[api_error,api_response],patch_channel->[_close,api_error,_withdraw,get_channel,_deposit],mint_token->[api_error,api_response,mint_token],_withdraw->[api_error,api_response,get_channel],get_channel->[api_error,api_response,get_channel],get_our_address->[api_response],get_blockchain_events_network->[normalize_events_list,get_blockchain_events_network,api_response,api_error],get_blockchain_events_channel->[normalize_events_list,api_error,get_blockchain_events_channel,api_response],_close->[api_error,api_response,get_channel],get_tokens_list->[get_tokens_list,api_response],get_channel_list->[api_response,get_channel_list]],APIServer->[unhandled_exception->[api_error],stop->[stop],__init__->[restapi_setup_type_converters,restapi_setup_urls]],normalize_events_list->[hexbytes_to_str,encode_object_to_str,encode_byte_values]] | Connects to a token network. | You can use `settings.DEFAULT_JOINABLE_FUNDS_TARGET` and `settings.DEFAULT_INITIAL_CHANNEL_TARGET` for these values |
@@ -54,6 +54,7 @@ module.exports = class huobi extends Exchange {
'fetchTradingFee': true,
'fetchTradingLimits': true,
'fetchWithdrawals': true,
+ 'parseFundingRate': true,
'transfer': true,
'withdraw': true,
},
| [No CFG could be retrieved] | Return a dictionary of all supported countries and countries. Return a map of urls to urls to a specific . | You don't need to include the parse methods in the has array, the parse methods are just helper methods, and they're nothing the user should care about |
@@ -432,7 +432,7 @@ namespace System.Xml
/// </summary>
public override void WriteValue(object? value)
{
- WriteString(XmlUntypedConverter.Untyped.ToString(value, this._resolver));
+ WriteString(XmlUntypedConverter.Untyped.ToString(value!, this._resolver));
}
public override void WriteValue(string? value)
| [XmlEventCache->[Flush->[Flush],WriteValue->[WriteString],WriteRaw->[WriteRaw],WriteChars->[WriteString],Close->[Close],Dispose->[Dispose]]] | Write the value of the NestedListField as XML. | Seems `value` is non null, the virtual method and most of the overwrites are throwing, maybe mistaken with `public override void WriteValue(string? value)`? |
@@ -286,7 +286,7 @@ def get_metrics(
the `"loss"` metric is "average loss per batch".
Returns the `"batch_loss"` separately.
"""
- metrics = model.get_metrics(reset=reset)
+ metrics = model.get_metrics(reset=reset, world_size=world_size)
if batch_loss is not None:
metrics["batch_loss"] = batch_loss
metrics["loss"] = float(total_loss / num_batches) if num_batches > 0 else 0.0
| [make_vocab_from_params->[datasets_from_params],evaluate->[get_metrics],get_batch_size->[get_batch_size],get_metrics->[get_metrics]] | Get the metrics of a . | Needs another input - cuda_device. |
@@ -3,6 +3,6 @@
frappe.ui.form.on('Google Settings', {
refresh: function(frm) {
- frm.dashboard.set_headline(__("For more information, {0}.", [`<a href='https://erpnext.com/docs/user/manual/en/erpnext_integration/google_settings'>${__('Click here')</a>`]));
+ frm.dashboard.set_headline(__("For more information, <a href='https://erpnext.com/docs/user/manual/en/erpnext_integration/google_settings'>Click here</a>."));
}
});
| [No CFG could be retrieved] | On Google Settings. | Why? previous one should work... are you getting any error for that? |
@@ -116,7 +116,7 @@ describe('amp-list', function() {
doc = win.document;
});
- it.configure().skipChromeDev().run(
+ it.configure().run(
'should change to layout container as on bind', function*() {
expect(isExperimentOn(win, 'amp-list-resizable-children')).to.be.true;
| [No CFG could be retrieved] | A list with a single element with a layout attribute. | nit: Same, just make it `it('should change ` |
@@ -340,6 +340,7 @@ class DatabasesController extends AbstractController
$databases[] = [
'name' => $database['SCHEMA_NAME'],
+ 'name_hash' => md5($database['SCHEMA_NAME']),
'collation' => [
'name' => $database['DEFAULT_COLLATION_NAME'],
'description' => Charsets::getCollationDescr(
| [DatabasesController->[createDatabaseAction->[tryQuery,addParam,setRequestStatus,getError,getLowerCaseNames],indexAction->[isSuperuser,render,getStatisticsColumns,getDatabasesFull,setSortDetails,getDatabases,getServerCollation],dropDatabasesAction->[isSuccess,addParam,setRequestStatus],getDatabases->[getStatisticsColumns,isSystemSchema]]] | Get databases and statistics. missing_statistics - missing_statistics - missing_statistics. | Can you use sha1 and hash method ? |
@@ -14,7 +14,9 @@ class Openstf(MakefilePackage):
homepage = "http://www.e-em.co.jp/OpenSTF/"
url = "http://www.e-em.co.jp/OpenSTF/OpenSTF.zip"
- version('1.3.1', sha256='4c39c81f70e3f8017fcb9cd457436c77d29e016d78bc5337f152f2eb078aa7b6')
+ version('1.7.0', sha256='50a0406dd452e3ec09d29aa362a426ca04c8a586d817ab1295988f578baeac2a')
+ version('1.3.1', sha256='4c39c81f70e3f8017fcb9cd457436c77d29e016d78bc5337f152f2eb078aa7b6',
+ url='http://www.e-em.co.jp/OpenSTF/old/OpenSTF_131.zip')
variant('mpi', default=False, description='Build with MPI Support')
| [Openstf->[build->[working_dir,make],install->[install,mkdirp],edit->[filter_file],variant,depends_on,version]] | Creates a function which can be used to edit an existing object s . Build a single object file from the source code. | Is the new version available at a version-specific URL too? If it is, we should define a `url_for_version`. |
@@ -68,6 +68,12 @@ namespace IDynamicInterfaceCastableTests
return GetNumberStaticReturnValue;
}
+ public static int GetNumberHelperReturnValue = 4;
+ int GetNumberHelper()
+ {
+ return GetNumberHelperReturnValue;
+ }
+
int ITest.CallImplemented(ImplementationToCall toCall)
{
switch (toCall)
| [Program->[ValidateBasicInterface->[CallImplemented],ValidateGenericInterface->[ReturnArg],ValidateErrorHandling->[GetNumber],Main->[ValidateBasicInterface,ValidateNotImplemented,ValidateGenericInterface,ValidateErrorHandling,ValidateDirectlyImplemented,ValidateOverriddenInterface],ValidateDirectlyImplemented->[ImplementedMethod],ValidateOverriddenInterface->[CallImplemented]],CallImplemented->[ImplementedMethod,GetNumberPrivate,GetNumberStatic,GetNumber],BadDynamicInterfaceCastable->[ITest->[CallImplemented],GetNumber->[UseOther],UseOther->[OtherMethod]]] | This method returns the number of the object that can be called by the dynamic interface. | This appears incorrect. We expect this call to trigger an exception right? This means we should call `Assert.Fail("<DESCRIPTION OF VIOLATION>");` |
@@ -110,7 +110,8 @@ class ArchiveIngestService(BaseService):
superdesk.get_resource_service(ARCHIVE).post([dest_doc])
insert_into_versions(dest_doc.get('guid'))
- push_notification('item:fetch', item=str(ingest_doc.get('_id')))
+ if(kwargs.get('notify', True)):
+ push_notification('item:fetch', fetched=1)
return new_guids
| [ArchiveIngestService->[create->[get_resource_service,generate_guid,badRequestError,create,insert_into_versions,append,dict,get,send_to,utcnow,str,remove_unwanted,generate_unique_id_and_name,notFoundError,is_workflow_state_transition_valid,InvalidStateTransitionError,push_notification,set_original_creator]],ArchiveIngestResource->[rel],workflow_state,workflow_action] | Create a new chunk of data from a list of documents. Add missing missing values to the dest_doc. | hey it's not javascript ;) |
@@ -23,11 +23,11 @@ package io.druid.query;
* This factory is used for DI of custom {@link QueryMetrics} implementations for all query types, which don't (yet)
* need to emit custom dimensions and/or metrics, i. e. they are good with the generic {@link QueryMetrics} interface.
*/
-public interface GenericQueryMetricsFactory
+public interface GenericQueryMetricsFactory<QueryType extends Query<?>>
{
/**
* Creates a {@link QueryMetrics} for query, which doesn't have predefined QueryMetrics subclass. This method must
* call {@link QueryMetrics#query(Query)} with the given query on the created QueryMetrics object before returning.
*/
- QueryMetrics<Query<?>> makeMetrics(Query<?> query);
+ QueryMetrics<QueryType> makeMetrics(QueryType query);
}
| [No CFG could be retrieved] | Make Metrics for a query. | It doesn't make sense, `GenericQueryMetricsFactory` is not a "generic base" for other QueryMetricsFactories, it is a query metrics factory _specifically for "any" queries_. It must be able to accept any query type. |
@@ -63,6 +63,9 @@ def _prepare_topo_plot(inst, ch_type, layout):
if ch_type == 'eeg':
picks = pick_types(info, meg=False, eeg=True, ref_meg=False,
exclude='bads')
+ elif ch_type == 'ref_meg':
+ picks = pick_types(info, meg=False, ref_meg=True,
+ exclude='bads')
else:
picks = pick_types(info, meg=ch_type, ref_meg=False,
exclude='bads')
| [_init_anim->[set_values,_check_outlines,_hide_frame,_autoshrink,_GridData,_draw_outlines],plot_ica_components->[plot_ica_components,_check_outlines,_prepare_topo_plot,_add_colorbar,plot_topomap,_autoshrink],plot_arrowmap->[_trigradient,_prepare_topo_plot,plot_topomap,_check_outlines],_plot_topomap->[set_locations,_check_outlines,_show_names,_GridData,_draw_outlines,_plot_sensors],plot_evoked_topomap->[_plot_topomap,_prepare_topo_plot,_autoshrink,_check_outlines],_plot_topomap_multi_cbar->[_add_colorbar,plot_topomap],plot_tfr_topomap->[_add_colorbar,_prepare_topo_plot,plot_topomap],plot_layout->[_draw_outlines,_check_outlines],plot_psds_topomap->[_plot_topomap_multi_cbar],_plot_ica_topomap->[_check_outlines,_add_colorbar,_prepare_topo_plot,plot_topomap,_autoshrink],plot_projs_topomap->[_add_colorbar,_prepare_topo_plot,_eliminate_zeros],_onselect->[_check_outlines],_GridData->[__init__->[_get_extra_points]],_plot_corrmap->[_check_outlines,_prepare_topo_plot,plot_topomap,_hide_frame,_plot_corrmap],_topomap_animation->[_prepare_topo_plot],plot_epochs_psd_topomap->[_prepare_topo_plot],_animate->[_hide_frame],_slider_changed->[plot_topomap,_resize_cbar]] | Prepare topo plot. Returns the picks merge_grads ch_names and ch_type . | This will presumably change `topo` plots for any system with reference channels. We need to check to see if it still looks okay, and probably provide a way to turn it off here, too, e.g. a `ref_meg=False` kwarg for the `plot_topo`-class of functions. |
@@ -472,9 +472,10 @@ ECPKPARAMETERS *EC_GROUP_get_ecpkparameters(const EC_GROUP *group,
return NULL;
}
} else {
- if (ret->type == 0)
+ if (ret->type == ECPKPARAMETERS_TYPE_NAMED)
ASN1_OBJECT_free(ret->value.named_curve);
- else if (ret->type == 1 && ret->value.parameters)
+ else if (ret->type == ECPKPARAMETERS_TYPE_EXPLICIT
+ && ret->value.parameters != NULL)
ECPARAMETERS_free(ret->value.parameters);
}
| [No CFG could be retrieved] | get the private part of an ASN. 1 ethernet group get the base point and order of the object. | Coding style improvement (well, already on existing code): Either leave out `&& ret->value.parameters` or use `&& ret->value.parameters != NULL` |
@@ -202,9 +202,16 @@ class Comment < ApplicationRecord
def shorten_urls!
doc = Nokogiri::HTML.fragment(processed_html)
doc.css("a").each do |anchor|
- unless anchor.to_s.include?("<img") || anchor.to_s.include?("<del") || anchor.attr("class")&.include?("ltag")
- anchor.content = strip_url(anchor.content) unless anchor.to_s.include?("<img") # rubocop:disable Style/SoleNestedConditional
+ anchor_inner_html = anchor.inner_html
+ next if anchor_inner_html.include?("<img")
+
+ anchor_content = anchor.content
+ urls = anchor_content.scan(URI_REGEXP).flatten.compact
+ urls.each do |url|
+ anchor_content.sub!(/#{Regexp.escape(url)}/, strip_url(url))
end
+ anchor_inner_html.sub!(/#{Regexp.escape(anchor.content)}/, anchor_content)
+ anchor.inner_html = anchor_inner_html
end
self.processed_html = doc.to_html.html_safe # rubocop:disable Rails/OutputSafety
end
| [Comment->[update_descendant_notifications->[update_notifications],wrap_timestamps_if_video_present!->[path],update_notifications->[update_notifications],expire_root_fragment->[root_exists?]]] | Shortens urls that are not img or del tags. | I understand the `flatten` but why do we need the `compact`, i.e. what leads to `nil`s in the array? |
@@ -6,14 +6,15 @@ from __future__ import annotations
import os
from dataclasses import dataclass
from pathlib import PurePath
-from typing import Optional, Sequence
+from typing import Sequence
from pants.engine.engine_aware import EngineAwareParameter
from pants.util.dirutil import fast_relpath, longest_dir_prefix
from pants.util.strutil import strip_prefix
-# Currently unused, but reserved for possible future needs.
-BANNED_CHARS_IN_TARGET_NAME = frozenset(r"@!?/\:=")
+# `:` is used as a delimiter already. Others are reserved for possible future needs.
+BANNED_CHARS_IN_TARGET_NAME = frozenset(r":!@?/\=")
+BANNED_CHARS_IN_GENERATED_NAME = frozenset(r":!@?=")
class InvalidSpecPath(ValueError):
| [AddressInput->[file_to_address->[InvalidTargetName],__post_init__->[InvalidTargetName,InvalidSpecPath],parse->[prefix_subproject]],Address->[__init__->[InvalidTargetName,InvalidSpecPath]]] | Creates a new object with the given parameters. Parse a string into an AddressInput. | Will ban `#` in a dedicated followup. |
@@ -107,4 +107,12 @@ public final class ClientAuthenticationHandler extends AbstractPreAndPostProcess
throw new FailedLoginException("Provider did not produce profile for " + clientCredentials);
}
+
+ public boolean isUseTypedId() {
+ return useTypedId;
+ }
+
+ public void setUseTypedId(final boolean useTypedId) {
+ this.useTypedId = useTypedId;
+ }
}
| [ClientAuthenticationHandler->[doAuthentication->[getClientName,debug,findClient,getNativeResponse,getNativeRequest,J2EContext,setUserProfile,isNotBlank,BasicCredentialMetaData,SimplePrincipal,getExternalContext,HandlerResult,getTypedId,getAttributes,getCredentials,FailedLoginException,getUserProfile],supports->[getClass,isAssignableFrom]]] | Checks if a user has a specific profile and if so stores it in the client credential. | Might make a bit more sense if the field was named ~~"typeIdUsed"~~ "typedIdUsed" so the getter would be "isTypeIdUsed". |
@@ -24,3 +24,4 @@ class BanTestCase(UserTestCase):
resp = self.client.get('/')
self.assertTemplateUsed(resp, 'users/user_banned.html')
+ assert parsedate(resp['Expires']) <= gmtime()
| [BanTestCase->[test_ban_middleware->[UserBan,get,login,save,assertTemplateUsed,assertTemplateNotUsed],attr]] | Ban middleware functions correctly. | We can also add `self.assertEqual(resp['cache-control'], 'max-age=0')` |
@@ -20,6 +20,11 @@ class FileVersionMetaRepository extends EntityRepository implements FileVersionM
{
public function findLatestWithoutSecurity()
{
+ $entityIdCondition = 'accessControl.entityId = collection.id';
+ if ('postgresql' === $this->getDatabasePlatformName()) {
+ $entityIdCondition = 'accessControl.entityId = CAST(collection.id AS VARCHAR)';
+ }
+
$queryBuilder = $this->createQueryBuilder('fileVersionMeta')
->addSelect('fileVersion')
->addSelect('file')
| [FileVersionMetaRepository->[findLatestWithoutSecurity->[getResult,andWhere],findByCollectionId->[setParameter,where,getResult],getQueryBuilderWithoutSecurity->[setParameter]]] | Finds the latest fileVersion without security. | does this make problems with other databases than postgres? think i would not make this dependent on the `DatabasePlatformName` of not necessary - in theory, the repository should only know about doctrine and not about the specific database. doctrine should be responsible for abstracting the differences between different platforms |
@@ -36,7 +36,7 @@
return;
}
- var factory = this.Builder.Build<LogicalMessageFactory>();
+ var factory = Builder.Build<LogicalMessageFactory>();
var newLogicalMessage = factory.Create(newInstance);
Message.Metadata = newLogicalMessage.Metadata;
| [IncomingLogicalMessageContext->[UpdateMessageInstance->[Create,nameof,AgainstNull,ReferenceEquals,Builder,Metadata,Instance],MessageId,Set,ReplyToAddress,Headers]] | Updates the message instance. | I know this is just cleanup, but since it's unrelated, should it be done somewhere else? |
@@ -343,13 +343,16 @@ class _BaseEpochs(ProjMixin, ContainsMixin, UpdateChannelsMixin,
assert self._data.shape[-1] == len(self.times)
return self
- def decimate(self, decim, copy=False):
+ def decimate(self, decim, offset=0, copy=False):
"""Decimate the epochs
Parameters
----------
decim : int
The amount to decimate data.
+ offset : int
+ Apply an offset to where the decimation starts.
+ The offset is in samples, at the original sampling rate.
copy : bool
If True, operate on and return a copy of the Epochs object.
| [EpochsArray->[__init__->[_detrend_offset_decim,drop_bad_epochs]],combine_event_ids->[copy],equalize_epoch_counts->[drop_epochs,drop_bad_epochs],_BaseEpochs->[equalize_event_counts->[drop_epochs,copy,_key_match,drop_bad_epochs],plot_drop_log->[plot_drop_log],_get_data->[_get_epoch_from_raw,_detrend_offset_decim,_is_good_epoch,_project_epoch],get_data->[_get_data],resample->[resample],next->[_get_epoch_from_raw,_detrend_offset_decim,_is_good_epoch,_project_epoch],__getitem__->[_key_match],save->[_save_split,drop_bad_epochs],drop_bad_epochs->[_reject_setup]],_minimize_time_diff->[_fix_fill],_concatenate_epochs->[_compare_epochs_infos,get_data],_finish_concat->[_BaseEpochs,drop_bad_epochs],concatenate_epochs->[_finish_concat,_concatenate_epochs],EpochsFIF->[__init__->[_BaseEpochs,_read_one_epoch_file,copy,_RawContainer]],add_channels_epochs->[_check_merge_epochs,get_data],bootstrap->[copy]] | Preloads the data if not already preloaded. This function is called when a new block of data is found in a block of epochs. | I think we need to call it `sample_offset` (to delineate it from a time offset), and it needs to go after `copy` to avoid an API change. |
@@ -0,0 +1,14 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Runtime.InteropServices;
+
+internal static partial class Interop
+{
+ internal static partial class Globalization
+ {
+ [DllImport(Libraries.GlobalizationNative, EntryPoint = "GlobalizationNative_LoadICUData")]
+ internal static extern int LoadICUData(string? path);
+ }
+}
| [No CFG could be retrieved] | No Summary Found. | >? [](start = 53, length = 1) Why this is nullable? I am seeing the implementation not expecting null values. |
@@ -89,11 +89,11 @@ public class Http2ServerUpgradeCodec implements HttpServerUpgradeHandler.Upgrade
}
Http2Settings settings = decodeSettingsHeader(ctx, upgradeHeaders.get(0));
connectionHandler.onHttpServerUpgrade(settings);
- // Everything looks good, no need to modify the response.
- } catch (Throwable e) {
- // Send a failed response back to the client.
- upgradeResponse.setStatus(BAD_REQUEST);
- upgradeResponse.headers().clear();
+ // Everything looks good.
+ return true;
+ } catch (Throwable cause) {
+ logger.info("Error during upgrade to HTTP/2", cause);
+ return false;
}
}
| [Http2ServerUpgradeCodec->[decodeSettingsHeader->[decodeSettings,createSettingsFrame,decode,alloc,encodeString,wrap,release],decodeSettings->[onSettingsRead->[copyFrom],Http2Settings,Http2FrameAdapter,release,readFrame],createSettingsFrame->[readableBytes,Http2Flags,release,buffer,writeBytes,writeFrameHeader],prepareUpgradeResponse->[decodeSettingsHeader,IllegalArgumentException,size,getAll,onHttpServerUpgrade,get,isEmpty,clear,setStatus],upgradeTo->[name,addAfter],singletonList,DefaultHttp2FrameReader,checkNotNull]] | Prepare the response for a upgrade. | Checking if info is enabled here seems useless. Why not just log blindly? |
@@ -549,11 +549,7 @@ public class HBase_1_1_2_ClientService extends AbstractControllerService impleme
logger.warn("Connection has not been established, could not create a transit URI. Returning null.");
return null;
}
- try {
- final String masterAddress = connection.getAdmin().getClusterStatus().getMaster().getHostAndPort();
- return "hbase://" + masterAddress + "/" + tableName + (rowKey != null && !rowKey.isEmpty() ? "/" + rowKey : "");
- } catch (IOException e) {
- throw new RuntimeException("Failed to get HBase Admin interface, due to " + e, e);
- }
+ final String transitUriMasterAddress = StringUtils.isEmpty(masterAddress) ? "unknown" : masterAddress;
+ return "hbase://" + transitUriMasterAddress + "/" + tableName + (StringUtils.isEmpty(rowKey) ? "" : "/" + rowKey);
}
}
| [HBase_1_1_2_ClientService->[delete->[delete],toBytes->[toBytes],createConnection->[run->[createConnection],createConnection],toBytesBinary->[toBytesBinary],put->[put],checkAndPut->[checkAndPut]]] | Returns the transit URI for the given table and row key. | We are sending back "unknown" if the master address is empty. Won't that be a problem, if it happens to be empty? I mean when `session.getProvenanceReporter().fetch(handlerFlowFile, transitUri);` gets executed on the HBase processors? |
@@ -383,7 +383,14 @@ public class KeyValueBlobTransientStore implements TransientStoreProvider {
}
}
// get values
- return parameters.stream().collect(Collectors.toMap(identity(), p -> getParameter(key, p)));
+ Map<String, Serializable> map = new HashMap<>();
+ for (String p : parameters) {
+ Serializable value = getParameter(key, p);
+ if (value != null) {
+ map.put(p, value);
+ }
+ }
+ return map;
}
protected void removeParameters(String key) {
| [KeyValueBlobTransientStore->[atomicUpdate->[getKeyValueStore],removeCompleted->[getKeyValueStore],remove->[removeCompleted,removeBlobs,removeParameters],removeAll->[getKeyValueStore,doGC],setReleaseTTL->[jsonToList,getKeyValueStore,jsonToMap],putParameters->[putParameter],getParameters->[jsonToList,getKeyValueStore,getParameter],isCompleted->[getKeyValueStore],putParameter->[toJson,atomicUpdate,jsonToList,getKeyValueStore,markEntryExists],doGC->[computeStorageSize,getBlobProvider],exists->[getKeyValueStore],computeStorageSize->[getKeyValueStore],putBlobs->[toJson,getKeyValueStore,addStorageSize,doGC,getStorageSize,getBlobProvider,markEntryExists],keyStream->[getKeyValueStore],setCompleted->[getKeyValueStore],release->[doGC,getStorageSize],getSize->[getKeyValueStore,jsonToMap],getStorageSize->[getKeyValueStore],markEntryExists->[getKeyValueStore],getBlobs->[getBlobProvider,getKeyValueStore,jsonToMap],getParameter->[getKeyValueStore],getKeyValueStore->[getKeyValueStore],removeBlobs->[addStorageSize,getKeyValueStore,jsonToMap],removeParameters->[jsonToList,getKeyValueStore]]] | This method retrieves the parameters for the given key. | I am not keen on using a type as a variable name, i.e. `map`, but its not a big deal. Perhaps `values` would have been more appropriate? |
@@ -82,6 +82,7 @@ namespace Dynamo.Tests.Engine
//Act
var codeBlockNodeOne = CreateCodeBlockNode();
CurrentDynamoModel.AddNodeToCurrentWorkspace(codeBlockNodeOne, true);
+ codeBlockNodeOne.UpdateValue(new UpdateValueParams("Code", "10;"));
CurrentDynamoModel.EngineController.AstBuilt -= EngineController_AstBuilt;
//Assert
| [EngineControllerTest->[CodeBlockNodeModel->[LibraryServices,IsNotNull,ExecuteCommand],EngineControllerComputeSyncDataTest->[HasPendingGraphSyncData,ComputeSyncData,RunModel,IsFalse,Combine,IsNull],EngineControllerReconcileTraceDataAndNotify->[AddNodeToCurrentWorkspace,CreateCodeBlockNode,AstBuilt,IsTrue],EngineControllerReconcileTraceDataAndNotifyException->[Assert,Dispose,ReconcileTraceDataAndNotify],EngineControllerPreviewGraphSyncDataTest->[First,RunModel,PreviewGraphSyncData,Combine,IsNull,IsNotNull]]] | Reconcile the trace data and notify the EngineController. | I guess this was triggering a run before your changes in `WorkspaceModel.cs`? |
@@ -86,7 +86,9 @@ class BasePublishService(BaseService):
if not is_workflow_state_transition_valid(self.publish_type, original[config.CONTENT_STATE]):
raise InvalidStateTransitionError()
- validate_item = {'act': self.publish_type, 'type': original['type'], 'validate': updates}
+ updated = original.copy()
+ updates.update(updates)
+ validate_item = {'act': self.publish_type, 'type': original['type'], 'validate': updated}
validation_errors = get_resource_service('validate').post([validate_item])
if validation_errors[0]:
| [ArchivePublishService->[get_subscribers->[_get_subscribers_for_previously_sent_items,filter_subscribers]],CorrectPublishService->[get_subscribers->[_get_subscribers_for_previously_sent_items,filter_subscribers]],KillPublishService->[get_subscribers->[_get_subscribers_for_previously_sent_items],_publish_kill_for_takes->[_update_archive,_set_version_last_modified_and_state,publish,update_published_collection]],BasePublishService->[_publish_package_items->[_publish_package_items],update->[update],queue_transmission->[set_state],_update_archive->[update],_publish_takes_package->[update],publish->[update]]] | On update of an item. | @sivakuna-aap looks like there is typo here. it is updating the same dict. |
@@ -141,6 +141,10 @@ void push_item_definition_full(lua_State *L, const ItemDefinition &i)
lua_setfield(L, -2, "name");
lua_pushstring(L, i.description.c_str());
lua_setfield(L, -2, "description");
+ if (!i.detailed_description.empty()) {
+ lua_pushstring(L, i.detailed_description.c_str());
+ lua_setfield(L, -2, "detailed_description");
+ }
lua_pushstring(L, type.c_str());
lua_setfield(L, -2, "type");
lua_pushstring(L, i.inventory_image.c_str());
| [read_noiseparams->[getflagsfield],read_hud_change->[string_to_enum],push_noiseparams->[push_flags_string],read_items->[read_item],read_content_features->[read_tiledef],read_json_value->[read_json_value],read_hud_element->[getenumfield],read_animation_definition->[getenumfield]] | This method pushes ItemDefinition objects onto the stack This function is called from the push protocol for all items in liquids_pointable. | Should do this even if the `detailed_description` is empty? |
@@ -731,6 +731,13 @@ describes.realWin(
/(\?|&)is_amp=5(&|$)/
);
});
+ it('does not set ptt parameter by default', () =>
+ expect(impl.getAdUrl()).to.not.eventually.match(/(\?|&)ptt=12(&|$)/));
+ it('sets ptt parameter', () => {
+ forceExperimentBranch(impl.win, 'adsensePttExp', '21068092');
+ return expect(impl.getAdUrl()).to.eventually.match(
+ /(\?|&)ptt=12(&|$)/);
+ });
});
// Not using arrow function here because otherwise the way closure behaves
| [No CFG could be retrieved] | Checks that the element covered in the unit test - unit - analytics. js is present and Exit the test. | Instead of checking for `&ptt=12` we should check for `&ptt=` |
@@ -664,6 +664,11 @@ def verbose(function, *args, **kwargs):
-------
dec : function
The decorated function
+
+ See Also
+ --------
+ set_log_level
+ set_config
"""
arg_names = _get_args(function)
default_level = verbose_level = None
| [set_config->[get_config_path,warn,_load_config],get_config->[get_config_path,_load_config],object_diff->[_sort_keys,object_diff],_load_config->[warn],object_size->[object_size],_get_stim_channel->[get_config],deprecated->[_decorate_class->[deprecation_wrapped->[warn]],_decorate_fun->[deprecation_wrapped->[warn]]],buggy_mkl_svd->[dec->[warn]],_get_ftp->[ProgressBar],_chunk_write->[update_with_increment_value],check_fname->[warn],object_hash->[object_hash,_sort_keys],md5sum->[update],_check_mayavi_version->[check_version],set_log_file->[warn,WrapStdOut],get_config_path->[_get_extra_data_path],requires_nibabel->[has_nibabel],ProgressBar->[update_with_increment_value->[update]],_get_http->[ProgressBar,update_with_increment_value],run_subprocess->[warn],random_permutation->[check_random_state],compute_corr->[_get_fast_dot],catch_logging->[__exit__->[set_log_file]],_TempDir->[__new__->[__new__]],SizeMixin->[_size->[object_size,warn],__hash__->[object_hash]],pformat->[_FormatDict]] | Verbose decorator to allow functions to override log - level. | As mentioned previously, I think it would be good to have an `Examples` section here. You can additionally cross-link to the tutorial, but we shouldn't force people to look there for examples of `call(..., verbose=True)` and `call(..., verbose=False)` |
@@ -0,0 +1,10 @@
+class CreateProfiles < ActiveRecord::Migration[6.0]
+ def change
+ create_table :profiles do |t|
+ t.belongs_to :user, null: false, foreign_key: { on_delete: :cascade }
+ t.jsonb :data, null: false, default: {}
+
+ t.timestamps
+ end
+ end
+end
| [No CFG could be retrieved] | No Summary Found. | Under certain conditions (internal admin tools) we use `delete` instead of `destroy` on users, so apart from `dependent: destroy` on the model layer I also added cascading to the DB. |
@@ -171,7 +171,7 @@ def test_install_local_editable_with_extras(script, data):
@pytest.mark.network
def test_install_collected_dependencies_first(script):
result = script.pip(
- 'install', 'paramiko',
+ 'install', 'paramiko==1.17',
)
text = [line for line in result.stdout.split('\n')
if 'Installing' in line][0]
| [test_package_in_constraints_and_dependencies->[pip,join],test_constraints_not_installed_by_default->[pip,join],test_constraints_constrain_to_local->[pip,join],test_install_unsupported_wheel_file->[pip,len,join,dedent],test_install_distribution_duplicate_extras->[raises,pip_install_local,join],test_schema_check_in_requirements_file->[raises,pip,join],test_install_with_extras_from_constraints->[pip_install_local,join],test_requirements_file->[pip,join,dedent],test_wheel_user_with_prefix_in_pydistutils_cfg->[write,dedent,pip,join,open],test_install_distribution_union_with_constraints->[pip_install_local,join],test_install_local_editable_with_extras->[str,pip,join],test_install_distribution_union_with_versions->[pip_install_local,join],test_constraints_only_causes_error->[pip,join],test_multiple_requirements_files->[pip,local_checkout,join,dedent],test_install_with_extras_editable_joined->[pip_install_local,join],test_install_with_extras_from_install->[pip_install_local,join],test_nowheel_user_with_prefix_in_pydistutils_cfg->[write,dedent,pip,join,open],test_respect_order_in_requirements_file->[pip,split,join,dedent],test_constrained_to_url_install_same_url->[pip,join],test_install_distribution_full_union->[pip_install_local,join],test_install_with_extras_joined->[pip_install_local,join],test_constraints_constrain_to_local_editable->[pip,join],test_install_option_in_requirements_file->[pip,join,dedent],test_install_local_editable_with_subdirectory->[_create_test_package_with_subdirectory,pip,assert_installed],test_install_collected_dependencies_first->[pip,split,endswith],test_install_unsupported_wheel_link_with_marker->[pip,len,join,dedent],test_multiple_constraints_files->[pip,join],test_install_local_with_subdirectory->[_create_test_package_with_subdirectory,pip,assert_installed],test_relative_requirements_file->[dedent,pip,path_to_url,str,join],test_install_distribution_union_conflicting_extras->[pip_install_local,join],test_constraints_local_install_causes_error->[pip,join],test_double_install_spurious_hash_mismatch->[str,requirements_file,pip,pip_install_local],test_constraints_local_editable_install_causes_error->[pip,join]] | Test install collected dependencies first. | Instead of needing paramiko, we could perform this test without the need for network with `toporequires2` which requires `toporequires` (with ``--no-index`, '-f', data.find_links`) |
@@ -53,6 +53,13 @@ public class DefaultInvitationUserFactory implements InvitationUserFactory {
String login = (String) registrationDoc.getPropertyValue(configuration.getUserInfoUsernameField());
NuxeoPrincipal user = userManager.getPrincipal(login);
if (user == null) {
+
+ if (!isSameTenant(registrationDoc, configuration)) {
+ throw new UserRegistrationException("Can only invite in same tenant");
+ }
+
+ List<String> groups = filterGroups(registrationDoc, configuration);
+
DocumentModel newUserDoc = userManager.getBareUserModel();
newUserDoc.setPropertyValue(UserConfig.USERNAME_COLUMN, login);
newUserDoc.setPropertyValue(UserConfig.PASSWORD_COLUMN,
| [DefaultInvitationUserFactory->[doCreateUser->[equals,getEmail,getPrincipal,getUserInfoTenantIdField,info,getUserInfoLastnameField,getUserInfoGroupsField,getPropertyValue,getBareUserModel,createUser,getContextData,getUserInfoUsernameField,getUserInfoEmailField,getService,getUserInfoFirstnameField,getUserInfoCompanyField,UserRegistrationException,getName,setPropertyValue],getLog]] | This method is called when a user is not found in the registration document. | Please avoid reformatting (or reorganizing imports) for code that will have to be backported. |
@@ -580,10 +580,6 @@ public class HoodieCombineHiveInputFormat<K extends WritableComparable, V extend
protected CombineFileSplit inputSplitShim;
private Map<Path, PartitionDesc> pathToPartitionInfo;
- public CombineHiveInputSplit() throws IOException {
- this(ShimLoader.getHadoopShims().getCombineFileInputFormat().getInputSplitShim());
- }
-
public CombineHiveInputSplit(CombineFileSplit inputSplitShim) throws IOException {
this(inputSplitShim.getJob(), inputSplitShim);
}
| [HoodieCombineHiveInputFormat->[CheckNonCombinablePathCallable->[call->[shouldSkipCombine,getPartitionFromPath]],CombinePathInputFormat->[hashCode->[hashCode],equals->[equals]],processPaths->[getSplits],getRecordReader->[getParquetRealtimeInputFormatClassName,createInputFormatShim,getRecordReader],getCombineSplits->[getParquetRealtimeInputFormatClassName,getParquetInputFormatClassName,createInputFormatShim],getInputPaths->[getInputPaths],getSplits->[getCombineSplits,getNonCombinablePathIndices,getSplits],HoodieCombineFileInputFormatShim->[getInputPathsShim->[getInputPaths],getRecordReader->[toString,getPaths,getRecordReader],getSplits->[createParquetRealtimeInputFormat,getLocations,getSplits,getPaths,getLengths,getStartOffsets],listStatus->[createParquetInputFormat,listStatus,createParquetRealtimeInputFormat],createPool->[createPool]],CombineHiveInputSplit->[readFields->[readFields],getNumPaths->[getNumPaths],write->[getPath,getPartitionFromPath,write],getPath->[getPath],getLocations->[getLocations],getOffset->[getOffset],getPaths->[getPaths],getJob->[getJob],getLength->[getLength],toString->[toString],getLengths->[getLengths],getStartOffsets->[getStartOffsets],getPartitionFromPath],CombineFilter->[toString->[toString],accept->[getPath],addPath->[getPath]]]] | This class encapsulates a single InputSplit in the HiveInputFormat. Get the name of the input file that was not found in the partition info. | this could have been called by serialization code for e.g? |
@@ -20,6 +20,7 @@ namespace Content.Shared.GameObjects.Components.Chemistry
protected Solution _containedSolution = new Solution();
protected int _maxVolume;
private SolutionCaps _capabilities;
+ private int _transferAmount;
/// <summary>
/// Triggered when the solution contents change.
| [SolutionComponent->[RemoveAllSolution->[RemoveAllSolution],HandleComponentState->[HandleComponentState],ExposeData->[ExposeData],Shutdown->[Shutdown],Startup->[Startup]]] | Component which implements the base functionality of the object that is used to create a new object. Overrides the base class to expose the data of an object. | This isn't used. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.