diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/packages/veritone-widgets/src/widgets/EngineOutputExport/EngineCategoryConfigList.js b/packages/veritone-widgets/src/widgets/EngineOutputExport/EngineCategoryConfigList.js index <HASH>..<HASH> 100644 --- a/packages/veritone-widgets/src/widgets/EngineOutputExport/EngineCategoryConfigList.js +++ b/packages/veritone-widgets/src/widgets/EngineOutputExport/EngineCategoryConfigList.js @@ -38,7 +38,7 @@ export default class EngineCategoryConfigList extends Component { static propTypes = { tdos: arrayOf( shape({ - tdoId: string, + tdoId: string.isRequired, mentionId: string, startOffsetMs: number, stopOffsetMs: number diff --git a/packages/veritone-widgets/src/widgets/EngineOutputExport/index.js b/packages/veritone-widgets/src/widgets/EngineOutputExport/index.js index <HASH>..<HASH> 100644 --- a/packages/veritone-widgets/src/widgets/EngineOutputExport/index.js +++ b/packages/veritone-widgets/src/widgets/EngineOutputExport/index.js @@ -50,7 +50,7 @@ class EngineOutputExport extends Component { static propTypes = { tdos: arrayOf( shape({ - tdoId: string, + tdoId: string.isRequired, mentionId: string, startOffsetMs: number, stopOffsetMs: number
todId is required because the if the user toggles the includeMedia to true then we need to provide a tdoId.
diff --git a/src/server/pfs/server/api_server.go b/src/server/pfs/server/api_server.go index <HASH>..<HASH> 100644 --- a/src/server/pfs/server/api_server.go +++ b/src/server/pfs/server/api_server.go @@ -352,8 +352,14 @@ func putFileLogHelper(request *pfs.PutFileRequest, err error, duration time.Dura func (a *apiServer) putFileObj(objClient obj.Client, request *pfs.PutFileRequest, url *url.URL) (retErr error) { put := func(filePath string, objPath string) (thisRetErr error) { - request.Url = objPath - request.File.Path = filePath + logRequest := &pfs.PutFileRequest{ + FileType: request.FileType, + Delimiter: request.Delimiter, + Url: objPath, + File: request.File, + Recursive: request.Recursive, + } + logRequest.File.Path = filePath protorpclog.Log("pfs.API", "putFileObj", request, nil, nil, 0) defer func(start time.Time) { protorpclog.Log("pfs.API", "putFileObj", request, nil, retErr, time.Since(start))
Use fresh putFileRequest for logging Otherwise there's some contention around this object
diff --git a/cobald/composite/factory.py b/cobald/composite/factory.py index <HASH>..<HASH> 100644 --- a/cobald/composite/factory.py +++ b/cobald/composite/factory.py @@ -54,7 +54,7 @@ class FactoryPool(CompositePool): except ZeroDivisionError: return 1. - def __init__(self, *children: Pool, factory: Callable[..., Pool], interval: float = 30): + def __init__(self, *children: Pool, factory: Callable[[], Pool], interval: float = 30): self._demand = sum(child.demand for child in children) #: children fulfilling our demand self._hatchery = set(children)
type hint for FactoryPool marks factory as taking no parameters
diff --git a/vingd/__init__.py b/vingd/__init__.py index <HASH>..<HASH> 100644 --- a/vingd/__init__.py +++ b/vingd/__init__.py @@ -1,7 +1,7 @@ """Vingd API interface client.""" __title__ = 'vingd-api-python' -__version__ = '0.1.3' +__version__ = '0.1.4' __author__ = 'Radomir Stevanovic' __author_email__ = 'radomir@vingd.com' __copyright__ = 'Copyright 2012 Vingd, Inc.'
bumped to <I>
diff --git a/lib/qunited/server.rb b/lib/qunited/server.rb index <HASH>..<HASH> 100644 --- a/lib/qunited/server.rb +++ b/lib/qunited/server.rb @@ -105,8 +105,9 @@ module QUnited end end - # Compile the CoffeeScript file with the given filename to JavaScript. Returns the full - # path of the compiled JavaScript file. The file is created in a temporary directory. + # Compile the CoffeeScript file with the given filename to JavaScript. Returns the compiled + # code as a string. Returns failing test JavaScript if CoffeeScript support is not installed. + # Also adds a failing test on compilation failure. def compile_coffeescript(file) begin require 'coffee-script' @@ -125,7 +126,18 @@ test('coffee-script gem must be installed to compile this file: #{file}', functi end compiled_js_file = Tempfile.new(["compiled_#{File.basename(file).gsub('.', '_')}", '.js']) - contents = CoffeeScript.compile(File.read(file)) + + begin + contents = CoffeeScript.compile(File.read(file)) + rescue => e + return <<-COMPILATION_ERROR_SCRIPT +module('CoffeeScript'); +test('CoffeeScript compilation error', function() { + ok(false, "#{e.message.gsub('"', '\"')}") +}); + COMPILATION_ERROR_SCRIPT + end + compiled_js_file.write contents compiled_js_file.close
Fail server tests on CoffeeScript compile failure
diff --git a/core/Tracker/GoalManager.php b/core/Tracker/GoalManager.php index <HASH>..<HASH> 100644 --- a/core/Tracker/GoalManager.php +++ b/core/Tracker/GoalManager.php @@ -10,6 +10,7 @@ namespace Piwik\Tracker; use Exception; use Piwik\Common; +use Piwik\Container\StaticContainer; use Piwik\Date; use Piwik\Piwik; use Piwik\Plugin\Dimension\ConversionDimension; @@ -817,7 +818,6 @@ class GoalManager * @param $pattern_type * @param $url * @return bool - * @throws Exception */ protected function isGoalPatternMatchingUrl($goal, $pattern_type, $url) { @@ -852,7 +852,11 @@ class GoalManager $match = ($matched == 0); break; default: - throw new Exception(Piwik::translate('General_ExceptionInvalidGoalPattern', array($pattern_type))); + try { + StaticContainer::get('Psr\Log\LoggerInterface')->warning(Piwik::translate('General_ExceptionInvalidGoalPattern', array($pattern_type))); + } catch (\Exception $e) { + } + $match = false; break; } return $match;
Prevent tracking failures when invalid goal patterns are defined (#<I>) * Log warning instead of throwing an exception * add try/catch
diff --git a/src/ExtendedMySql.php b/src/ExtendedMySql.php index <HASH>..<HASH> 100644 --- a/src/ExtendedMySql.php +++ b/src/ExtendedMySql.php @@ -3,6 +3,28 @@ namespace Codeception\Lib\Driver; -class ExtendedMySql extends MySql{ +class ExtendedMySql extends MySql +{ -} \ No newline at end of file + public function insertOrUpdate($tableName, array &$data) + { + $columns = array_map( + array($this, 'getQuotedName'), + array_keys($data) + ); + + $updateAssignments = array(); + foreach ($data as $key => $value) { + $updateAssignments[] = sprintf('%s=%s', $key, $value); + } + $updateAssignments = implode(', ', $updateAssignments); + + $query = sprintf( + "INSERT INTO %s (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s", + $this->getQuotedName($tableName), + implode(', ', $columns), + implode(', ', array_fill(0, count($data), '?')), + $updateAssignments + ); + } +} \ No newline at end of file
added the insertOrUpdate method to the ExtendedMySql class
diff --git a/src/logger.js b/src/logger.js index <HASH>..<HASH> 100644 --- a/src/logger.js +++ b/src/logger.js @@ -4,7 +4,7 @@ import { format as fmt } from 'util'; import chalkModule from 'chalk'; const chalk = new chalkModule.constructor({ - enabled: process.stderr.isTTY, + enabled: process.stderr && process.stderr.isTTY, }); // Special chars.
Don't assume `process.stderr` is an object It's typically not set in Browserify.
diff --git a/common/common.go b/common/common.go index <HASH>..<HASH> 100644 --- a/common/common.go +++ b/common/common.go @@ -39,7 +39,7 @@ const ( OverlayPreparedFilename = "overlay-prepared" PrivateUsersPreparedFilename = "private-users-prepared" - MetadataServicePort = 2375 + MetadataServicePort = 18112 MetadataServiceRegSock = "/run/rkt/metadata-svc.sock" DefaultLocalConfigDir = "/etc/rkt"
common: change the port number The previous port number was registered and reserved. Use a random high port number in the non-dynamic range.
diff --git a/scripts/build_pyinstaller.py b/scripts/build_pyinstaller.py index <HASH>..<HASH> 100644 --- a/scripts/build_pyinstaller.py +++ b/scripts/build_pyinstaller.py @@ -130,10 +130,10 @@ def generate_static_css(): os.chdir(server_dir) compass_process = subprocess.Popen(['compass', 'compile']) compass_process.communicate() - # if compass_process.returncode != 0: - # print(script_tab + "ERROR: Compass returned with exit code: %s" % - # compass_process.returncode) - # return False + if compass_process.returncode != 0: + print(script_tab + "ERROR: Compass returned with exit code: %s" % + compass_process.returncode) + return False os.chdir(old_cwd) return True
ensures compass compile succeeds
diff --git a/command/bdist_dumb.py b/command/bdist_dumb.py index <HASH>..<HASH> 100644 --- a/command/bdist_dumb.py +++ b/command/bdist_dumb.py @@ -71,12 +71,11 @@ class bdist_dumb (Command): self.run_command ('build') - install = self.reinitialize_command('install') + install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir self.announce ("installing to %s" % self.bdist_dir) - install.ensure_finalized() - install.run() + self.run_command('install') # And make an archive relative to the root of the # pseudo-installation tree.
Ensure sub-commands of "install" are reinitialized too. Run "install" the right way, by calling 'run_command()'.
diff --git a/DotNotationPointers.js b/DotNotationPointers.js index <HASH>..<HASH> 100644 --- a/DotNotationPointers.js +++ b/DotNotationPointers.js @@ -83,11 +83,17 @@ Object.defineProperty(DotNotationPointer.prototype, 'val', { } } }, set: function(value) { - if(this.propertyInfo.obj === undefined) { // create the path if it doesn't exist - createProperty(this) - } + if (value === undefined) { + if (this.propertyInfo.obj !== undefined) { + delete this.propertyInfo.obj[this.propertyInfo.last] + } + } else { + if(this.propertyInfo.obj === undefined) { // create the path if it doesn't exist + createProperty(this) + } - this.propertyInfo.obj[this.propertyInfo.last] = value + this.propertyInfo.obj[this.propertyInfo.last] = value + } } })
remove a property from the object if it's new value is set to 'undefined'
diff --git a/ansible_runner/runner.py b/ansible_runner/runner.py index <HASH>..<HASH> 100644 --- a/ansible_runner/runner.py +++ b/ansible_runner/runner.py @@ -178,7 +178,11 @@ class Runner(object): # option expecting should have already been written in ansible_runner.runner_config env_file_host = os.path.join(self.config.artifact_dir, 'env.list') with open(env_file_host, 'w') as f: - f.write('\n'.join(list(self.config.env.keys()))) + f.write( + '\n'.join( + ["{}={}".format(key, value) for key, value in self.config.env.items()] + ) + ) else: cwd = self.config.cwd pexpect_env = self.config.env
Write fully populated envfile to pass to podman Without this, manually reproducing the container's environment is really difficult.
diff --git a/src/main/java/groovy/lang/NumberRange.java b/src/main/java/groovy/lang/NumberRange.java index <HASH>..<HASH> 100644 --- a/src/main/java/groovy/lang/NumberRange.java +++ b/src/main/java/groovy/lang/NumberRange.java @@ -644,7 +644,7 @@ public class NumberRange extends AbstractList<Comparable> implements Range<Compa // make the first fetch lazy too next = isAscending ? range.getFrom() : range.getTo(); if (!range.inclusiveLeft) { - next = next(); + next = isAscending ? increment(next, step) : decrement(next, step); } } else { next = isAscending ? increment(next, step) : decrement(next, step);
GROOVY-<I>: Fixed NumberRange.get not throwing at certain conditions With ranges like 0G<..<1G, the get method would erroneously return 1G instead of throwing an exception. This commit fixes that by directly incrementing the current value in the iterator instead of next() call.
diff --git a/salt/modules/network.py b/salt/modules/network.py index <HASH>..<HASH> 100644 --- a/salt/modules/network.py +++ b/salt/modules/network.py @@ -2106,7 +2106,7 @@ def fqdns(): results = pool.map(_lookup_fqdn, addresses) pool.close() pool.join() - except Exception as exc: + except Exception as exc: # pylint: disable=broad-except log.error("Exception while creating a ThreadPool for resolving FQDNs: %s", exc) for item in results:
Fix pylint issue
diff --git a/lib/virtus/value_object.rb b/lib/virtus/value_object.rb index <HASH>..<HASH> 100644 --- a/lib/virtus/value_object.rb +++ b/lib/virtus/value_object.rb @@ -35,6 +35,7 @@ module Virtus include ::Virtus include InstanceMethods extend ClassMethods + private :attributes= end end diff --git a/spec/integration/virtus/value_object_spec.rb b/spec/integration/virtus/value_object_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/virtus/value_object_spec.rb +++ b/spec/integration/virtus/value_object_spec.rb @@ -40,7 +40,7 @@ describe Virtus::ValueObject do it 'writer methods are set to private' do private_methods = class_under_test.private_instance_methods private_methods.map! { |m| m.to_s } - private_methods.should include('latitude=', 'longitude=') + private_methods.should include('latitude=', 'longitude=', 'attributes=') end it 'attempts to call attribute writer methods raises NameError' do
Disallow mutation of ValueObjects via #attributes= * Increases consistency as mutations on value objects should be created via #with(mutations).
diff --git a/lib/endpoints/class-wp-rest-users-controller.php b/lib/endpoints/class-wp-rest-users-controller.php index <HASH>..<HASH> 100755 --- a/lib/endpoints/class-wp-rest-users-controller.php +++ b/lib/endpoints/class-wp-rest-users-controller.php @@ -121,7 +121,7 @@ class WP_REST_Users_Controller extends WP_REST_Controller { $prepared_args['has_published_posts'] = true; } - if ( '' !== $prepared_args['search'] ) { + if ( ! empty( $prepared_args['search'] ) ) { $prepared_args['search'] = '*' . $prepared_args['search'] . '*'; }
Only add asterisks to the user query if there is a query present
diff --git a/amino/algebra.py b/amino/algebra.py index <HASH>..<HASH> 100644 --- a/amino/algebra.py +++ b/amino/algebra.py @@ -3,6 +3,7 @@ from typing import GenericMeta, Any from types import SimpleNamespace from amino import List, Lists +from amino.util.string import ToStr class AlgebraMeta(GenericMeta): @@ -21,4 +22,8 @@ class AlgebraMeta(GenericMeta): cls.sub = sub return super().__new__(cls, name, bases, namespace, **kw) -__all__ = ('AlgebraMeta',) + +class Algebra(ToStr, metaclass=AlgebraMeta): + pass + +__all__ = ('AlgebraMeta', 'Algebra')
convenience base class `Algebra` inheriting `ToStr`
diff --git a/lib/assets/Css.js b/lib/assets/Css.js index <HASH>..<HASH> 100644 --- a/lib/assets/Css.js +++ b/lib/assets/Css.js @@ -69,10 +69,9 @@ extendWithGettersAndSetters(Css.prototype, { // CSSOM gets the @charset declaration mixed up with the first selector: try { this._parseTree = cssom.parse(this.text.replace(/@charset\s*([\'\"])\s*[\w\-]+\s*\1;/, "")); - } catch (e) { - var err = new Error('Parse error in ' + (this.url || 'inline Css' + (this.nonInlineAncestor ? ' in ' + this.nonInlineAncestor.url : '')) + '\n' + e.message); + } catch (err) { + err.message = 'Parse error in ' + (this.url || 'inline Css' + (this.nonInlineAncestor ? ' in ' + this.nonInlineAncestor.url : '')) + '\n' + err.message; if (this.assetGraph) { - err.styleSheet = {cssRules: []}; if ('styleSheet' in err) { err.message += '\nFalling back to using the ' + err.styleSheet.cssRules.length + ' parsed CSS rules'; this._parseTree = err.styleSheet;
asset.Css: Error details are no longer thrown away on caught css parse errors
diff --git a/terms/settings.py b/terms/settings.py index <HASH>..<HASH> 100644 --- a/terms/settings.py +++ b/terms/settings.py @@ -34,15 +34,13 @@ if hasattr(settings, 'TERMS_ADDITIONAL_IGNORED_TAGS'): TERMS_IGNORED_TAGS.extend(settings.TERMS_ADDITIONAL_IGNORED_TAGS) -TERMS_IGNORED_CLASSES = set( - getattr(settings, 'TERMS_IGNORED_CLASSES', - ( - 'cms_reset', - ) - ) +TERMS_IGNORED_CLASSES = getattr(settings, 'TERMS_IGNORED_CLASSES', + [ + 'cms_reset', + ] ) if hasattr(settings, 'TERMS_ADDITIONAL_IGNORED_CLASSES'): - TERMS_IGNORED_CLASSES |= set(settings.TERMS_ADDITIONAL_IGNORED_CLASSES) + TERMS_IGNORED_CLASSES.extend(settings.TERMS_ADDITIONAL_IGNORED_CLASSES) TERMS_IGNORED_IDS = getattr(settings, 'TERMS_IGNORED_IDS',
Defines TERMS_IGNORED_CLASSES as a list instead of a set.
diff --git a/zipline/utils/date_utils.py b/zipline/utils/date_utils.py index <HASH>..<HASH> 100644 --- a/zipline/utils/date_utils.py +++ b/zipline/utils/date_utils.py @@ -44,10 +44,10 @@ def EPOCH(utc_datetime): delta = utc_datetime - UNIX_EPOCH seconds = delta.total_seconds() ms = seconds * 1000 - return ms + return int(ms) def UN_EPOCH(ms_since_epoch): - seconds_since_epoch = ms_since_epoch / 1000 + seconds_since_epoch = float(ms_since_epoch) / 1000.0 delta = timedelta(seconds = seconds_since_epoch) dt = UNIX_EPOCH + delta return dt
encoding epoch as an int, rather than float.
diff --git a/lib/json_api_client/query/builder.rb b/lib/json_api_client/query/builder.rb index <HASH>..<HASH> 100644 --- a/lib/json_api_client/query/builder.rb +++ b/lib/json_api_client/query/builder.rb @@ -81,8 +81,8 @@ module JsonApiClient end end when Array - table.map do - parse_related_links(*table) + table.map do |v| + parse_related_links(*v) end else table
fix handling of array definitions of related included links
diff --git a/lib/swag_dev/project/tools/yardoc/watcher/_bootstrap.rb b/lib/swag_dev/project/tools/yardoc/watcher/_bootstrap.rb index <HASH>..<HASH> 100644 --- a/lib/swag_dev/project/tools/yardoc/watcher/_bootstrap.rb +++ b/lib/swag_dev/project/tools/yardoc/watcher/_bootstrap.rb @@ -8,8 +8,7 @@ require 'listen/record/symlink_detector' # Listen >=2.8 # patch to silence duplicate directory errors. USE AT YOUR OWN RISK module Listen - # rubocop:disable Style/Documentation - # rubocop:disable Style/SignalException + # rubocop:disable all class Record class SymlinkDetector def _fail(_, _) @@ -17,6 +16,5 @@ module Listen end end end - # rubocop:enable Style/SignalException - # rubocop:enable Style/Documentation + # rubocop:enable all end
yardoc/watcher/_bootstrap (tools) disable rubocop
diff --git a/mode/r/r.js b/mode/r/r.js index <HASH>..<HASH> 100644 --- a/mode/r/r.js +++ b/mode/r/r.js @@ -19,7 +19,7 @@ CodeMirror.defineMode("r", function(config) { for (var i = 0; i < words.length; ++i) res[words[i]] = true; return res; } - var commonAtoms = ["NULL", "NA", "Inf", "NaN", "NA_integer_", "NA_real_", "NA_complex_", "NA_character_"]; + var commonAtoms = ["NULL", "NA", "Inf", "NaN", "NA_integer_", "NA_real_", "NA_complex_", "NA_character_", "TRUE", "FALSE"]; var commonBuiltins = ["list", "quote", "bquote", "eval", "return", "call", "parse", "deparse"]; var commonKeywords = ["if", "else", "repeat", "while", "function", "for", "in", "next", "break"]; var commonBlockKeywords = ["if", "else", "repeat", "while", "function", "for"];
[r mode] Highlight TRUE/FALSE as atoms
diff --git a/plenum/server/pool_req_handler.py b/plenum/server/pool_req_handler.py index <HASH>..<HASH> 100644 --- a/plenum/server/pool_req_handler.py +++ b/plenum/server/pool_req_handler.py @@ -25,7 +25,7 @@ class PoolRequestHandler(RequestHandler): self.state = state self.domainState = domainState - def validate(self, req: Request): + def validate(self, req: Request, config = None): typ = req.operation.get(TXN_TYPE) error = None if typ == NODE: diff --git a/plenum/server/req_handler.py b/plenum/server/req_handler.py index <HASH>..<HASH> 100644 --- a/plenum/server/req_handler.py +++ b/plenum/server/req_handler.py @@ -19,7 +19,7 @@ class RequestHandler: self.ledger = ledger self.state = state - def validate(self, req: Request, config): + def validate(self, req: Request, config = None): pass def applyReq(self, req: Request):
add config=None argument to validate method to make sub and super signatures match
diff --git a/discord/client.py b/discord/client.py index <HASH>..<HASH> 100644 --- a/discord/client.py +++ b/discord/client.py @@ -308,8 +308,9 @@ class ConnectionState(object): if server is not None: user_id = data['user']['id'] member = utils.find(lambda m: m.id == user_id, server.members) - server.members.remove(member) - self.dispatch('member_remove', member) + if member in server.members: + server.members.remove(member) + self.dispatch('member_remove', member) def handle_guild_member_update(self, data): server = self._get_server(data.get('guild_id'))
Check if member is in list for GUILD_MEMBER_REMOVE.
diff --git a/testarator.go b/testarator.go index <HASH>..<HASH> 100644 --- a/testarator.go +++ b/testarator.go @@ -77,7 +77,11 @@ func (s *Setup) SpinUp() error { return nil } - opt := &aetest.Options{AppID: "unittest", StronglyConsistentDatastore: true} + opt := &aetest.Options{ + AppID: "unittest", + StronglyConsistentDatastore: true, + SuppressDevAppServerLog: true, + } inst, err := aetest.NewInstance(opt) if err != nil { return err
Set SuppressDevAppServerLog option in aetest The latest appengine supports an option to suppress annoying logs in tests. This patch enables it. <URL>
diff --git a/agent/config/runtime_test.go b/agent/config/runtime_test.go index <HASH>..<HASH> 100644 --- a/agent/config/runtime_test.go +++ b/agent/config/runtime_test.go @@ -8,6 +8,7 @@ import ( "errors" "flag" "fmt" + "github.com/armon/go-metrics/prometheus" "io/ioutil" "net" "os" @@ -7103,9 +7104,11 @@ func TestFullConfig(t *testing.T) { AllowedPrefixes: []string{"oJotS8XJ"}, BlockedPrefixes: []string{"cazlEhGn"}, MetricsPrefix: "ftO6DySn", - PrometheusRetentionTime: 15 * time.Second, StatsdAddr: "drce87cy", StatsiteAddr: "HpFwKB8R", + PrometheusOpts: prometheus.PrometheusOpts{ + Expiration: 15 * time.Second, + }, }, TLSCipherSuites: []uint16{tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256}, TLSMinVersion: "pAOWafkR",
update runtime_test to handle PrometheusOpts expiry field change
diff --git a/lib/Ogone/AbstractResponse.php b/lib/Ogone/AbstractResponse.php index <HASH>..<HASH> 100644 --- a/lib/Ogone/AbstractResponse.php +++ b/lib/Ogone/AbstractResponse.php @@ -93,4 +93,13 @@ abstract class AbstractResponse implements Response { return $this->parameters[$key]; } + + /** + * Get all parameters + SHASIGN + * @return array + */ + public function toArray() + { + return $this->parameters + array('SHASIGN' => $this->shaSign); + } }
toArray method vanished from the PaymentResponse
diff --git a/psphere/managedobjects.py b/psphere/managedobjects.py index <HASH>..<HASH> 100644 --- a/psphere/managedobjects.py +++ b/psphere/managedobjects.py @@ -360,6 +360,30 @@ class ManagedEntity(ExtensibleManagedObject): def __init__(self, mo_ref, server): super(ManagedEntity, self).__init__(mo_ref, server) + @classmethod + def find(self, filter=None): + """Find ManagedEntity's of this type using the given filter. + + :param filter: Find ManagedEntity's matching these key/value pairs + :type filter: dict + :returns: A list of ManagedEntity's matching the filter or None + :rtype: list + """ + return self.vim.find_entity_view(view_type=self.__class__.__name__, + filter=filter) + @classmethod + def find_one(self, filter=None): + """Find a ManagedEntity of this type using the given filter. + + If multiple ManagedEntity's are found, only the first is returned. + + :param filter: Find ManagedEntity's matching these key/value pairs + :type filter: dict + :returns: A ManagedEntity's matching the filter or None + :rtype: ManagedEntity + """ + return self.find(filter=filter)[0] + def find_datacenter(self, parent=None): """Find the datacenter which this ManagedEntity belongs to.""" # If the parent hasn't been set, use the parent of the
Provide a find and find_one method for ManagedEntity's.
diff --git a/src/df.js b/src/df.js index <HASH>..<HASH> 100644 --- a/src/df.js +++ b/src/df.js @@ -1,3 +1,4 @@ +import * as LogManager from 'aurelia-logging'; import {I18N} from './i18n'; export class DfValueConverter { @@ -17,7 +18,8 @@ export class DfValueConverter { if (dfOrOptions && (typeof dfOrOptions.format === 'function')) { return dfOrOptions.format(value); } else if (df) { - console.warn('This ValueConverter signature is depcrecated and will be removed in future releases. Please use the signature [dfOrOptions, locale]'); // eslint-disable-line no-console + let i18nLogger = LogManager.getLogger('i18n'); + i18nLogger.warn('This ValueConverter signature is depcrecated and will be removed in future releases. Please use the signature [dfOrOptions, locale]'); } else { df = this.service.df(dfOrOptions, locale || this.service.getLocale()); }
fix(logger): use LogManager instead console uses the aurelia-logging service instead of direct usage of console.warn
diff --git a/ReText/window.py b/ReText/window.py index <HASH>..<HASH> 100644 --- a/ReText/window.py +++ b/ReText/window.py @@ -111,7 +111,7 @@ class ReTextWindow(QMainWindow): self.actionChangeFont = self.act(self.tr('Change default font'), trig=self.changeFont) self.actionSearch = self.act(self.tr('Find text'), 'edit-find', shct=QKeySequence.Find) self.actionSearch.setCheckable(True) - self.actionSearch.triggered.connect(self.searchBar.setVisible) + self.actionSearch.triggered[bool].connect(self.searchBar.setVisible) self.searchBar.visibilityChanged.connect(self.searchBarVisibilityChanged) self.actionPreview = self.act(self.tr('Preview'), shct=Qt.CTRL+Qt.Key_E, trigbool=self.preview) if QIcon.hasThemeIcon('document-preview'): @@ -349,7 +349,7 @@ class ReTextWindow(QMainWindow): action.triggered.connect(trig) elif trigbool: action.setCheckable(True) - action.triggered.connect(trigbool) + action.triggered[bool].connect(trigbool) if shct: action.setShortcut(shct) return action
Fix triggering boolean actions in PySide
diff --git a/src/Codeception/Command/Run.php b/src/Codeception/Command/Run.php index <HASH>..<HASH> 100644 --- a/src/Codeception/Command/Run.php +++ b/src/Codeception/Command/Run.php @@ -398,9 +398,15 @@ class Run extends Command $tokens = explode(' ', $request); foreach ($tokens as $token) { $token = preg_replace('~=.*~', '', $token); // strip = from options + + if (empty($token)) { + continue; + } + if ($token == '--') { break; // there should be no options after ' -- ', only arguments } + if (substr($token, 0, 2) === '--') { $options[] = substr($token, 2); } elseif ($token[0] === '-') {
Fixed uninitialized string offset (#<I>)
diff --git a/core/server/api/canary/utils/serializers/output/products.js b/core/server/api/canary/utils/serializers/output/products.js index <HASH>..<HASH> 100644 --- a/core/server/api/canary/utils/serializers/output/products.js +++ b/core/server/api/canary/utils/serializers/output/products.js @@ -73,6 +73,7 @@ function serializeProduct(product, options, apiType) { name: json.name, description: json.description, slug: json.slug, + active: json.active, type: json.type, created_at: json.created_at, updated_at: json.updated_at, @@ -161,6 +162,8 @@ function createSerializer(debugString, serialize) { * @prop {string} name * @prop {string} slug * @prop {string} description + * @prop {boolean} active + * @prop {string} type * @prop {Date} created_at * @prop {Date} updated_at * @prop {StripePrice[]} [stripe_prices]
Added active flag to products API (#<I>) refs <URL> as active or archived
diff --git a/resources/views/previewLinkPopup.blade.php b/resources/views/previewLinkPopup.blade.php index <HASH>..<HASH> 100644 --- a/resources/views/previewLinkPopup.blade.php +++ b/resources/views/previewLinkPopup.blade.php @@ -1,6 +1,7 @@ <div id="MailPreviewDriverBox" style=" position:absolute; - top:0; + top:10px; + right:10px; z-index:99999; background:#fff; border:solid 1px #ccc; @@ -15,9 +16,19 @@ <a style="text-decoration: underline" href="{{ $previewUrl }}&file_type=eml">Open mail in email client</a> </li> </ul> + <span onclick="closePopup()" id="close" style=" + cursor: pointer; + font-size: smaller; + position: absolute; + top: 2px; + right: 6px; + font-family: monospace;">X</span> </div> <script type="text/javascript"> - setTimeout(function () { + function closePopup() { document.body.removeChild(document.getElementById('MailPreviewDriverBox')); - }, $timeoutInSeconds * 1000); + } + @if($timeoutInSeconds) + setTimeout(closePopup(), {{ $timeoutInSeconds }} * 1000); + @endif </script>
fix: make setTimeoutInSeconds works In previous version, variable $timeoutInSeconds wasn't scaped by double braces, so blade puts it like a string. Now, it is fixed, and as a plus, added a X button to close manually
diff --git a/lib/bibliothecary/parsers/npm.rb b/lib/bibliothecary/parsers/npm.rb index <HASH>..<HASH> 100644 --- a/lib/bibliothecary/parsers/npm.rb +++ b/lib/bibliothecary/parsers/npm.rb @@ -25,7 +25,7 @@ module Bibliothecary }, match_filename("npm-ls.json") => { kind: 'lockfile', - parser: :parse_tree + parser: :parse_ls } } end @@ -80,7 +80,7 @@ module Bibliothecary end end - def self.parse_tree(file_contents) + def self.parse_ls(file_contents) manifest = JSON.parse(file_contents) transform_tree_to_array(manifest.fetch('dependencies', {}))
Also rename method to be more consistent.
diff --git a/tests/Kernel/Http/StreamResponseTest.php b/tests/Kernel/Http/StreamResponseTest.php index <HASH>..<HASH> 100644 --- a/tests/Kernel/Http/StreamResponseTest.php +++ b/tests/Kernel/Http/StreamResponseTest.php @@ -1,12 +1,20 @@ <?php +/* + * This file is part of the overtrue/wechat. + * + * (c) overtrue <i@overtrue.me> + * + * This source file is subject to the MIT license that is bundled + * with this source code in the file LICENSE. + */ + use EasyWeChat\Kernel\Http\StreamResponse; use EasyWeChat\Tests\TestCase; use org\bovigo\vfs\vfsStream; use org\bovigo\vfs\vfsStreamDirectory; use org\bovigo\vfs\vfsStreamWrapper; - class StreamResponseTest extends TestCase { public function setUp()
Apply fixes from StyleCI (#<I>) [ci skip] [skip ci]
diff --git a/cleanse.js b/cleanse.js index <HASH>..<HASH> 100644 --- a/cleanse.js +++ b/cleanse.js @@ -60,8 +60,10 @@ function cleanseHtml(str, options){ str = str.replace(/<head\b[^<]*(?:(?!<\/head>)<[^<]*)*<\/head>/gi,' '); //removes head section entirely if(options.style) str = str.replace(/<style\b[^<]*(?:(?!<\/style>)<[^<]*)*<\/style>/gi,' '); //removes style section entirely - if(options.html) + if(options.html){ + str = str.replace(/='.*?'/gm,'=""'); str = str.replace(/<(?:.|\n)*?>/gm,' '); //remove all remaining tags + } //cleanup str = str.replace(/\s{2,}/g, ' '); //replace more than one space with a single space str = str.replace(/^\s+/,''); //remove lead space
fixed mid attribute '>' usage by completely clearing out attributes first before removing the tag
diff --git a/errors.js b/errors.js index <HASH>..<HASH> 100644 --- a/errors.js +++ b/errors.js @@ -462,6 +462,7 @@ module.exports.classify = function classify(err) { case 'tchannel-thrift-handler.parse-error.head-failed': case 'tchannel.checksum': case 'tchannel.duplicate-header-key': + case 'tchannel.null-key': return 'BadRequest'; case 'tchannel.init.call-request-before-init-request': @@ -480,7 +481,6 @@ module.exports.classify = function classify(err) { case 'tchannel.invalid-error-code': case 'tchannel.invalid-frame-type': case 'tchannel.missing-init-header': - case 'tchannel.null-key': case 'tchannel.protocol.read-failed': case 'tchannel.protocol.write-failed': case 'tchannel.unhandled-frame-type':
errors: move null key to bad request
diff --git a/tests/test.js b/tests/test.js index <HASH>..<HASH> 100644 --- a/tests/test.js +++ b/tests/test.js @@ -55,6 +55,12 @@ describe('lessWatchCompilerUtils Module API', function(){ lessWatchCompilerUtils.config.sourceMap = true; assert.equal("lessc --source-map test testFolder/test.css", lessWatchCompilerUtils.compileCSS("test", true)); }); + it('should run the correct command with minified flag', function(){ + lessWatchCompilerUtils.config.outputFolder = "testFolder"; + lessWatchCompilerUtils.config.minified = true; + lessWatchCompilerUtils.config.sourceMap = false; + assert.equal("lessc -x test testFolder/test.min.css", lessWatchCompilerUtils.compileCSS("test", true)); + }); }) describe('filterFiles()', function(){ it('filterFiles() function should be there', function(){
test: Add test for minified flag
diff --git a/estnltk/storage/postgres/where_clause.py b/estnltk/storage/postgres/where_clause.py index <HASH>..<HASH> 100644 --- a/estnltk/storage/postgres/where_clause.py +++ b/estnltk/storage/postgres/where_clause.py @@ -23,13 +23,16 @@ class WhereClause(Composed): else: super().__init__([]) + # We omit layers inside a Text object. + self._required_layers = sorted(set(layer_query or []) | set(layer_ngram_query or [])) + self.collection = collection + def __bool__(self): return bool(self.seq) - # TODO @property - def required_tables(self): - return self.required_layer_tables + def required_layers(self): + return self._required_layers # TODO @property
added property WhereClause.required_layers
diff --git a/Doctrine/Mapper/MetaInformationFactory.php b/Doctrine/Mapper/MetaInformationFactory.php index <HASH>..<HASH> 100644 --- a/Doctrine/Mapper/MetaInformationFactory.php +++ b/Doctrine/Mapper/MetaInformationFactory.php @@ -55,7 +55,7 @@ class MetaInformationFactory } if (!$this->annotationReader->hasDocumentDeclaration($entity)) { - throw new \RuntimeException(sprintf('no declaration for document found in entity %s', $className)); + return null; } $metaInformation = new MetaInformation();
do not throw exception if a entity has no document annotation
diff --git a/rrecur.js b/rrecur.js index <HASH>..<HASH> 100644 --- a/rrecur.js +++ b/rrecur.js @@ -118,6 +118,11 @@ } if ('UNTIL' === k || 'DTSTART' === k) { + if ('number' === typeof v) { + v = new Date(v).toISOString(); + } else if ('object' === typeof v) { + v = v.toISOString(); + } v = v.replace(/\-|:/g, '').replace(/\.\d+/, ''); }
fix #2 convert date object and integer timestamp to iso string for until and dtstart
diff --git a/h5p.classes.php b/h5p.classes.php index <HASH>..<HASH> 100644 --- a/h5p.classes.php +++ b/h5p.classes.php @@ -1864,7 +1864,7 @@ class H5PCore { 'js/h5p-utils.js', ); - public static $defaultContentWhitelist = 'json png jpg jpeg gif bmp tif tiff svg eot ttf woff woff2 otf webm mp4 ogg mp3 wav txt pdf rtf doc docx xls xlsx ppt pptx odt ods odp xml csv diff patch swf md textile vtt webvtt'; + public static $defaultContentWhitelist = 'json png jpg jpeg gif bmp tif tiff svg eot ttf woff woff2 otf webm mp4 ogg mp3 m4a wav txt pdf rtf doc docx xls xlsx ppt pptx odt ods odp xml csv diff patch swf md textile vtt webvtt'; public static $defaultLibraryWhitelistExtras = 'js css'; public $librariesJsonData, $contentJsonData, $mainJsonData, $h5pF, $fs, $h5pD, $disableFileCheck;
Add m4a extension to content files whitelist
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,13 +2,13 @@ from setuptools import setup setup( name="tyoi.OAuth2", - version="0.1.0", + version="0.2.0", author="Ryan Horn", author_email="ryan.horn.web@gmail.com", description=("Implements the client side of the OAuth 2 protocol"), keywords="oauth oauth2 auth authentication", url="https://github.com/ryanhorn/tyoiOAuth2", - packages=["tyoi", "tyoi.oauth2"], + packages=["tyoi", "tyoi.oauth2", "tyoi.oauth2.grants", "tyoi.oauth2.authenticators"], test_suite="tests", tests_require=["mox"] )
Updated setup.py with new version and packages
diff --git a/lib/conceptql/rdbms/postgres.rb b/lib/conceptql/rdbms/postgres.rb index <HASH>..<HASH> 100644 --- a/lib/conceptql/rdbms/postgres.rb +++ b/lib/conceptql/rdbms/postgres.rb @@ -32,6 +32,10 @@ module ConceptQL def explain_temp_tables? ENV["CONCEPTQL_PG_EXPLAIN_TEMP_TABLES"] == "true" end + + def preferred_formatter + SqlFormatters::PgFormat + end end end end
Introduce RDBMS#preferred_formatter sql_format doesn't format our commented PostgreSQL SQL very nicely, so we'll use pg_format when we're formatting PostgreSQL-oriented SQL This is a quick hack to allow an RDBMS to list a preferred formatter and I'd really like to refactor this some day
diff --git a/benchexec/tablegenerator/__init__.py b/benchexec/tablegenerator/__init__.py index <HASH>..<HASH> 100755 --- a/benchexec/tablegenerator/__init__.py +++ b/benchexec/tablegenerator/__init__.py @@ -1159,11 +1159,8 @@ def basename_without_ending(file): name = name[:-4] return name -def main(args=None): - - if args is None: - args = sys.argv +def create_argument_parser(): parser = argparse.ArgumentParser( fromfile_prefix_chars='@', description= @@ -1247,8 +1244,10 @@ def main(args=None): parser.add_argument("--version", action="version", version="%(prog)s " + __version__ ) + return parser - options = parser.parse_args(args[1:]) +def main(args=None): + options = create_argument_parser().parse_args((args or sys.argv)[1:]) logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.WARNING if options.quiet else logging.INFO)
Refactoring: extract method in table-generator
diff --git a/course/jumpto.php b/course/jumpto.php index <HASH>..<HASH> 100644 --- a/course/jumpto.php +++ b/course/jumpto.php @@ -10,6 +10,10 @@ $jump = optional_param('jump', '', PARAM_RAW); + if (!confirm_sesskey()) { + print_error('confirmsesskeybad'); + } + if (strpos($jump, $CFG->wwwroot) === 0) { // Anything on this site redirect(urldecode($jump)); } else if (preg_match('/^[a-z]+\.php\?/', $jump)) { diff --git a/lib/weblib.php b/lib/weblib.php index <HASH>..<HASH> 100644 --- a/lib/weblib.php +++ b/lib/weblib.php @@ -1053,6 +1053,7 @@ function popup_form($common, $options, $formname, $selected='', $nothing='choose } $output .= '</select>'; + $output .= '<input type="hidden" name="sesskey" value="'.sesskey().'" />'; $output .= '<noscript id="noscript'.$formname.'" style="display: inline;">'; $output .= '<input type="submit" value="'.$go.'" /></noscript>'; $output .= '<script type="text/javascript">'.
validate local redirection actions in jumpto.php SC#<I>
diff --git a/notario/decorators.py b/notario/decorators.py index <HASH>..<HASH> 100644 --- a/notario/decorators.py +++ b/notario/decorators.py @@ -58,7 +58,8 @@ def not_empty(_object): @instance_of() def decorated(value): - assert value, "is empty" + name = getattr(value, '__name__', getattr(value.__class__, '__name__')) + assert value, "%s is empty" % name return _validator(value) return decorated assert _object, "is empty"
is_empty has a better failure message
diff --git a/tests/job/validation_test.py b/tests/job/validation_test.py index <HASH>..<HASH> 100644 --- a/tests/job/validation_test.py +++ b/tests/job/validation_test.py @@ -262,7 +262,7 @@ class ClassicalHazardFormTestCase(unittest.TestCase): 'Number of logic tree samples must be >= 0', ], 'poes': [ - 'PoEs for hazard maps must be in the range [0, 1]', + '`poes` values must be in the range [0, 1]', ], 'quantile_hazard_curves': [ 'Quantile hazard curve values must in the range [0, 1]'
tests/job/validation_test: Updated expected error message string in reference to a `poes` param error.
diff --git a/src/Data/ListController.js b/src/Data/ListController.js index <HASH>..<HASH> 100644 --- a/src/Data/ListController.js +++ b/src/Data/ListController.js @@ -1,14 +1,24 @@ "use strict"; +let route; + class ListController { - constructor(app, Module, $routeParams) { + constructor(app, Module, $routeParams, $route) { this.app = app; + route = $route; var elements = Module.retrieve(app); for (let name in elements) { this[name] = elements[name]; } + var setup = {}; + this.Meta.fieldsets.map(fieldset => { + if (fieldset.primary) { + fieldset.fields.map(field => setup[field] = undefined); + } + }); + this.Model.$load(setup); this.items = []; this.page($routeParams); } @@ -16,9 +26,20 @@ class ListController { page(params) { this.Service.list(params).success(items => this.items = items); } + + create() { + this.Meta.$create(this.Model).success(() => { + this.form = false; + route.reload(); + }); + } + + reset() { + this.Model.$load({}); + } }; -ListController.$inject = ['app', 'Module', '$routeParams']; +ListController.$inject = ['app', 'Module', '$routeParams', '$route']; export {ListController};
empty model needs setup; add create method
diff --git a/api/config.go b/api/config.go index <HASH>..<HASH> 100644 --- a/api/config.go +++ b/api/config.go @@ -3,6 +3,7 @@ package api import ( "flag" "net" + "net/http" "net/http/httputil" "net/url" @@ -58,4 +59,11 @@ func ConfigProcess() { log.Fatal(4, "API Cannot parse fallback-graphite-addr: %s", err) } graphiteProxy = httputil.NewSingleHostReverseProxy(u) + // remove these headers from upstream + // we will set our own correct ones (and duplicate headers are illegal) + graphiteProxy.ModifyResponse = func(resp *http.Response) error { + resp.Header.Del("access-control-allow-credentials") + resp.Header.Del("Access-Control-Allow-Origin") + return nil + } }
fix duplicate access control headers leading to browser blocking requests we can now successfully use MT directly from browser, with dynamic proxying!
diff --git a/core/src/main/java/hudson/tasks/MailSender.java b/core/src/main/java/hudson/tasks/MailSender.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/tasks/MailSender.java +++ b/core/src/main/java/hudson/tasks/MailSender.java @@ -312,7 +312,12 @@ public class MailSender { rcp.addAll(buildCulpritList(listener,b.getCulprits())); } else { // ordinary address - rcp.add(new InternetAddress(address)); + try { + rcp.add(new InternetAddress(address)); + } catch (AddressException e) { + // report bad address, but try to send to other addresses + e.printStackTrace(listener.error(e.getMessage())); + } } }
[FIXED HUDSON-<I>] Send build status email to valid addresses rather than aborting for one invalid address git-svn-id: <URL>
diff --git a/python/mxboard/event_file_writer.py b/python/mxboard/event_file_writer.py index <HASH>..<HASH> 100644 --- a/python/mxboard/event_file_writer.py +++ b/python/mxboard/event_file_writer.py @@ -32,6 +32,8 @@ import six from .proto import event_pb2 from .record_writer import RecordWriter +logging.basicConfig() + class EventsWriter(object): """Writes `Event` protocol buffers to an event file. This class is ported from
Fix logging problem in py<I>
diff --git a/protocols/primary-backup/src/main/java/io/atomix/protocols/backup/proxy/PrimaryBackupProxy.java b/protocols/primary-backup/src/main/java/io/atomix/protocols/backup/proxy/PrimaryBackupProxy.java index <HASH>..<HASH> 100644 --- a/protocols/primary-backup/src/main/java/io/atomix/protocols/backup/proxy/PrimaryBackupProxy.java +++ b/protocols/primary-backup/src/main/java/io/atomix/protocols/backup/proxy/PrimaryBackupProxy.java @@ -229,6 +229,7 @@ public class PrimaryBackupProxy extends AbstractPrimitiveProxy { .whenCompleteAsync((response, error) -> { protocol.unregisterEventListener(sessionId); clusterService.removeListener(clusterEventListener); + future.complete(null); }, threadContext); } else { future.complete(null);
Ensure PrimaryBackupProxy.close future is properly completed.
diff --git a/robjects/tests.py b/robjects/tests.py index <HASH>..<HASH> 100644 --- a/robjects/tests.py +++ b/robjects/tests.py @@ -2,6 +2,8 @@ import unittest import redis from robjects.base import BaseObject +from robjects.objects import JsonObject, HashObject + r = redis.Redis() r.flushdb() @@ -65,5 +67,23 @@ class ObjectTestMixin(object): self.assertEquals(self.CLS.count(), 0) +class TestJsonObject(JsonObject): + redis = r + HASH_KEY = 'testjson' + + +class JsonObjectTest(ObjectTestCase, ObjectTestMixin): + CLS = TestJsonObject + + +class TestHashObject(HashObject): + redis = r + HASH_KEY = 'testhash%s' + + +class HashObjectTest(ObjectTestCase, ObjectTestMixin): + CLS = TestHashObject + + if __name__ == '__main__': unittest.main()
add tests for JsonObject and HashObject
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -135,7 +135,7 @@ HtmlWebpackPugPlugin.prototype.injectAssetsIntoSlim = function (file, styles, sc */ HtmlWebpackPugPlugin.prototype.injectAssets = function (file, head, body, assets) { var self = this; - var bodyRegExp = /( *)(%?body)/i; + var bodyRegExp = /^( *)(%?body)\b/im; var match = bodyRegExp.exec(file); if (match) {
Fix adding link tag to head when word 'body' is present in content
diff --git a/crypto/secp256k1/secp256.go b/crypto/secp256k1/secp256.go index <HASH>..<HASH> 100644 --- a/crypto/secp256k1/secp256.go +++ b/crypto/secp256k1/secp256.go @@ -20,11 +20,11 @@ package secp256k1 /* #cgo CFLAGS: -I./libsecp256k1 -#cgo darwin CFLAGS: -I/usr/local/include +#cgo darwin CFLAGS: -I/usr/local/include -I/opt/pkg/include #cgo freebsd CFLAGS: -I/usr/local/include #cgo linux,arm CFLAGS: -I/usr/local/arm/include #cgo LDFLAGS: -lgmp -#cgo darwin LDFLAGS: -L/usr/local/lib +#cgo darwin LDFLAGS: -L/usr/local/lib -L/opt/pkg/lib #cgo freebsd LDFLAGS: -L/usr/local/lib #cgo linux,arm LDFLAGS: -L/usr/local/arm/lib #define USE_NUM_GMP
crypto/secp<I>k1: add C compiler flags for pkgsrc pkgsrc is a cross-platform package manager that also supports OS X.
diff --git a/java/client/test/org/openqa/selenium/RenderedWebElementTest.java b/java/client/test/org/openqa/selenium/RenderedWebElementTest.java index <HASH>..<HASH> 100644 --- a/java/client/test/org/openqa/selenium/RenderedWebElementTest.java +++ b/java/client/test/org/openqa/selenium/RenderedWebElementTest.java @@ -141,6 +141,7 @@ public class RenderedWebElementTest extends AbstractDriverTestCase { assertTrue("The element and the enclosing map should be considered shown.", isShown); } + @Ignore @JavascriptEnabled public void testCanClickOnSuckerFishMenuItem() throws Exception { if (!hasInputDevices()) {
JasonLeyba: @Ignoring test that fails for all configs. r<I>
diff --git a/lxd-agent/exec.go b/lxd-agent/exec.go index <HASH>..<HASH> 100644 --- a/lxd-agent/exec.go +++ b/lxd-agent/exec.go @@ -235,7 +235,7 @@ func (s *execWs) Do(op *operations.Operation) error { stderr = ttys[2] } - controlExit := make(chan bool) + controlExit := make(chan bool, 1) attachedChildIsBorn := make(chan int) attachedChildIsDead := make(chan bool, 1) var wgEOF sync.WaitGroup
lxd-agent/exec: Add buffered channel to prevent deadlock on cmd exit
diff --git a/hanlp/components/sts/transformer_sts.py b/hanlp/components/sts/transformer_sts.py index <HASH>..<HASH> 100644 --- a/hanlp/components/sts/transformer_sts.py +++ b/hanlp/components/sts/transformer_sts.py @@ -161,7 +161,10 @@ class TransformerSemanticTextualSimilarity(TorchComponent): # noinspection PyMethodOverriding def build_model(self, transformer, training=True, **kwargs) -> torch.nn.Module: config = AutoConfig_.from_pretrained(transformer, num_labels=1) - model = AutoModelForSequenceClassification.from_pretrained(transformer, config=config) + if training: + model = AutoModelForSequenceClassification.from_pretrained(transformer, config=config) + else: + model = AutoModelForSequenceClassification.from_config(config) return model def predict(self, data: Union[List[str], List[List[str]]], batch_size: int = None, **kwargs) -> Union[ diff --git a/hanlp/version.py b/hanlp/version.py index <HASH>..<HASH> 100644 --- a/hanlp/version.py +++ b/hanlp/version.py @@ -2,5 +2,5 @@ # Author: hankcs # Date: 2019-12-28 19:26 -__version__ = '2.1.0-alpha.45' +__version__ = '2.1.0-alpha.46' """HanLP version"""
Avoid re-downloading Electra model
diff --git a/src/gitgraph.js b/src/gitgraph.js index <HASH>..<HASH> 100644 --- a/src/gitgraph.js +++ b/src/gitgraph.js @@ -538,11 +538,19 @@ // Add start point if (this.parentBranch) { - this.startPoint = { - x: this.parentBranch.offsetX - this.parent.commitOffsetX + this.template.commit.spacingX, - y: this.parentBranch.offsetY - this.parent.commitOffsetY + this.template.commit.spacingY, - type: "start" - }; + if ( this.parentCommit === this.parentBranch.commits.slice( -1 )[ 0 ] ) { + this.startPoint = { + x: this.parentBranch.offsetX - this.parent.commitOffsetX + this.template.commit.spacingX, + y: this.parentBranch.offsetY - this.parent.commitOffsetY + this.template.commit.spacingY, + type: "start" + }; + } else { + this.startPoint = { + x: this.parentCommit.x, + y: this.parentCommit.y, + type: "start" + }; + } } else { this.startPoint = null; }
Branch from the latest point instead of parent commit when parent commit is the head of parent branch
diff --git a/timepiece/forms.py b/timepiece/forms.py index <HASH>..<HASH> 100644 --- a/timepiece/forms.py +++ b/timepiece/forms.py @@ -77,12 +77,12 @@ class EditPersonForm(auth_forms.UserChangeForm): label=_(u'Repeat Password'), widget=forms.PasswordInput(render_value=False)) - class Meta: - model = auth_models.User - fields = ( - "username", "first_name", "last_name", - "email", "is_active", "is_staff" - ) + def __init__(self, *args, **kwargs): + super(EditPersonForm, self).__init__(*args, **kwargs) + + # In 1.4 this field is created even if it is excluded in Meta. + if 'password' in self.fields: + del(self.fields['password']) def clean_password(self): return self.cleaned_data.get('password_one', None) @@ -106,6 +106,11 @@ class EditPersonForm(auth_forms.UserChangeForm): instance.save() return instance + class Meta: + model = auth_models.User + fields = ('username', 'first_name', 'last_name', 'email', 'is_active', + 'is_staff') + class QuickSearchForm(forms.Form): quick_search = selectable_forms.AutoCompleteSelectField(
refs #<I> - Removed extra password field from Edit Person form
diff --git a/resources/config/default.php b/resources/config/default.php index <HASH>..<HASH> 100644 --- a/resources/config/default.php +++ b/resources/config/default.php @@ -58,7 +58,8 @@ return function (CM_Config_Node $config) { ); $config->CM_Http_Response_View_Abstract->exceptionsToCatch = array( - 'CM_Exception_Nonexistent' => [], + 'CM_Exception_Nonexistent' => ['log' => 'CM_Paging_Log_NotFound'], + 'CM_Exception_InvalidParam' => ['log' => 'CM_Paging_Log_NotFound'], 'CM_Exception_AuthRequired' => [], 'CM_Exception_NotAllowed' => [], 'CM_Exception_Blocked' => [],
Log "nonexistent" and "invalid param" in view responses
diff --git a/tools/upload_website.js b/tools/upload_website.js index <HASH>..<HASH> 100755 --- a/tools/upload_website.js +++ b/tools/upload_website.js @@ -1,4 +1,6 @@ #!/usr/bin/env node -const run = require('./run'); -// pip install aws -run.sh(`aws s3 sync website/ s3://propelml.org --follow-symlinks --delete`); +const { execSync } = require("child_process"); +// pip install awscli +execSync("aws s3 sync website/ s3://propelml.org --follow-symlinks --delete", { + stdio: "inherit" +});
tools: make upload_website work on windows
diff --git a/addon/components/flexberry-layers-attributes-panel.js b/addon/components/flexberry-layers-attributes-panel.js index <HASH>..<HASH> 100644 --- a/addon/components/flexberry-layers-attributes-panel.js +++ b/addon/components/flexberry-layers-attributes-panel.js @@ -667,7 +667,7 @@ export default Ember.Component.extend(LeafletZoomToFeatureMixin, { case 'MultiSurfacePropertyType': case 'PolygonPropertyType': case 'MultiPolygonPropertyType': - return ['circle', 'rectangle', 'polygon']; + return ['rectangle', 'polygon']; } }
Remove 'circle' from available draw tools for polygon layer.
diff --git a/pandas/core/series.py b/pandas/core/series.py index <HASH>..<HASH> 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -166,6 +166,8 @@ class Series(base.IndexOpsMixin, generic.NDFrame): Data type for the output Series. If not specified, this will be inferred from `data`. See the :ref:`user guide <basics.dtypes>` for more usages. + name : str, optional + The name to give to the Series. copy : bool, default False Copy input data. """
added names, fastpath parameters explanation to pandas.Series (#<I>)
diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -92,8 +92,20 @@ infos.inbox[1] = infos.tasks[0] infos.inbox[2] = infos.tasks[1] infos.inbox[3] = infos.tasks[1] +console.log(infos) + +assert(infos.inbox[0] === infos.tasks[0]) +assert(infos.inbox[1] === infos.tasks[0]) +assert(infos.inbox[2] === infos.tasks[1]) +assert(infos.inbox[3] === infos.tasks[1]) + var result4 = JSON.stringify(infos, decycle()) console.log(result4) result4 = JSON.parse(result4, retrocycle()) console.log(result4) + +assert(result4.inbox[0] === result4.tasks[0]) +assert(result4.inbox[1] === result4.tasks[0]) +assert(result4.inbox[2] === result4.tasks[1]) +assert(result4.inbox[3] === result4.tasks[1])
add asserts for infos and result4
diff --git a/src/event.js b/src/event.js index <HASH>..<HASH> 100644 --- a/src/event.js +++ b/src/event.js @@ -133,7 +133,7 @@ jQuery.event = { var namespaces = type.split("."); type = namespaces.shift(); var all = !namespaces.length, - namespace = new RegExp("(^|\\.)" + namespaces.slice().sort().join("\\.(?:.*\\.)?") + "(\\.|$)"), + namespace = new RegExp("(^|\\.)" + namespaces.slice(0).sort().join("\\.(?:.*\\.)?") + "(\\.|$)"), special = this.special[ type ] || {}; if ( events[ type ] ) { @@ -291,7 +291,7 @@ jQuery.event = { // Cache this now, all = true means, any handler all = !namespaces.length && !event.exclusive; - var namespace = new RegExp("(^|\\.)" + namespaces.slice().sort().join("\\.(?:.*\\.)?") + "(\\.|$)"); + var namespace = new RegExp("(^|\\.)" + namespaces.slice(0).sort().join("\\.(?:.*\\.)?") + "(\\.|$)"); handlers = ( jQuery.data(this, "events") || {} )[ event.type ];
Re-adding zeros removed from slice calls in last commit.
diff --git a/salt/engines/stalekey.py b/salt/engines/stalekey.py index <HASH>..<HASH> 100644 --- a/salt/engines/stalekey.py +++ b/salt/engines/stalekey.py @@ -51,7 +51,7 @@ def _get_keys(): def start(interval=3600, expire=604800): ck = salt.utils.minions.CkMinions(__opts__) - presence_file = '{0}/minions/presence.p'.format(__opts__['cachedir']) + presence_file = '{0}/presence.p'.format(__opts__['cachedir']) wheel = salt.wheel.WheelClient(__opts__) while True:
Create presence.p directly in cachedir salt-key was stacktracing when finding the presence.p file in /var/cache/salt/master/minions
diff --git a/pydsl/Grammar/Checker.py b/pydsl/Grammar/Checker.py index <HASH>..<HASH> 100644 --- a/pydsl/Grammar/Checker.py +++ b/pydsl/Grammar/Checker.py @@ -65,16 +65,12 @@ class BNFChecker(Checker): def __init__(self, bnf, parser = "auto"): Checker.__init__(self) parser = bnf.options.get("parser",parser) - if parser == "descent": + if parser == "descent" or parser == "auto" or parser == "default": from .Parser.RecursiveDescent import RecursiveDescentParser self.__parser = RecursiveDescentParser(bnf) elif parser == "weighted": self.__parser = WeightedParser(bnf) raise Exception - elif parser == "auto" or parser == "default": - #TODO Guess best parser - from .Parser.Weighted import WeightedParser - self.__parser = WeightedParser(bnf) else: LOG.error("Wrong parser name: " + parser) raise Exception
default parser is recursivedescent
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -17,6 +17,7 @@ Dir[File.join(File.dirname(__FILE__), "support/shared_examples/**/*.rb")].each d end RSpec.configure do |config| + config.color = true config.order = "random" config.disable_monkey_patching! config.filter_run_when_matching :focus
Updated RSpec spec helper to enable color output. - This used to be a global setting that is now configured at the project level. - Provides improved transparency for the project.
diff --git a/spec/api-browser-window-spec.js b/spec/api-browser-window-spec.js index <HASH>..<HASH> 100644 --- a/spec/api-browser-window-spec.js +++ b/spec/api-browser-window-spec.js @@ -1973,12 +1973,12 @@ describe('BrowserWindow module', function () { it('should keep window hidden if already in hidden state', function (done) { w.webContents.once('did-finish-load', function () { - w.setFullScreen(false) - setTimeout(() => { + w.once('leave-full-screen', () => { assert.equal(w.isVisible(), false) assert.equal(w.isFullScreen(), false) done() - }, 1000) + }) + w.setFullScreen(false) }) w.loadURL('about:blank') })
:art: Use leave-full-screen event instead of setTimeout()
diff --git a/lib/qunited/qunit_test_result.rb b/lib/qunited/qunit_test_result.rb index <HASH>..<HASH> 100644 --- a/lib/qunited/qunit_test_result.rb +++ b/lib/qunited/qunit_test_result.rb @@ -74,14 +74,21 @@ module QUnited def self.clean_up_result(test_result) test_result = symbolize_keys(test_result) test_result[:start] = DateTime.parse(test_result[:start]) - test_result[:assertion_data].map! { |data| symbolize_keys data } test_result end - def self.symbolize_keys(hash) - new_hash = {} - hash.keys.each { |key| new_hash[key.to_sym] = hash[key] } - new_hash + def self.symbolize_keys(obj) + case obj + when Hash + obj.inject({}) do |new_hash, (key, value)| + new_hash[key.to_sym] = symbolize_keys(value) + new_hash + end + when Array + obj.map { |x| symbolize_keys(x) } + else + obj + end end end end
Refactor QUnitTestResult.symbolize_keys to change keys in nested hashes & arrays
diff --git a/assess_constraints.py b/assess_constraints.py index <HASH>..<HASH> 100755 --- a/assess_constraints.py +++ b/assess_constraints.py @@ -155,8 +155,7 @@ def assess_instance_type_constraints(client): """Assess deployment with instance-type constraints.""" provider = client.env.config.get('type') if provider not in INSTANCE_TYPES: - raise ValueError('Provider does not implement instance-type ' - 'constraint.') + return for instance_type in INSTANCE_TYPES[provider]: assess_instance_type(client, provider, instance_type) diff --git a/tests/test_assess_constraints.py b/tests/test_assess_constraints.py index <HASH>..<HASH> 100644 --- a/tests/test_assess_constraints.py +++ b/tests/test_assess_constraints.py @@ -124,8 +124,9 @@ class TestAssess(TestCase): def test_instance_type_constraints_missing(self): fake_client = Mock(wraps=fake_juju_client()) - with self.assertRaises(ValueError): + with self.prepare_deploy_mock() as (fake_client, deploy_mock): assess_instance_type_constraints(fake_client) + self.assertFalse(deploy_mock.called) class TestDeploy(TestCase):
Made testing a provider with no instance-types a no-op for assess_instance_type_constraints rather than an error.
diff --git a/cmd/main.js b/cmd/main.js index <HASH>..<HASH> 100755 --- a/cmd/main.js +++ b/cmd/main.js @@ -17,14 +17,15 @@ var req = require('lazreq')({ Installer: '../lib/installer.js', inquirer: 'inquirer', path: 'path', - pipelines: '../lib/pipelines.js' + pipelines: '../lib/pipelines.js', + userHome: 'user-home' }); var Workspace = require("../lib/workspace"); var rc = Workspace.getDappleRC(); if (cli.config || typeof(rc.path) === 'undefined') { - var homeRC = req.path.join(userHome, '.dapplerc'); + var homeRC = req.path.join(req.userHome, '.dapplerc'); var confirmed; var chosen = false; @@ -47,7 +48,7 @@ if (cli.config || typeof(rc.path) === 'undefined') { } else { console.log("No configuration found! Generating..."); - Workspace.writeDappleRC(homeRC, DappleRCPrompter.prompt()); + Workspace.writeDappleRC(homeRC, req.DappleRCPrompter.prompt()); } rc = Workspace.getDappleRC(); }
Fix errors introduced during the lazy-loading rewrite.
diff --git a/src/models/BusinessHourGenerator.js b/src/models/BusinessHourGenerator.js index <HASH>..<HASH> 100644 --- a/src/models/BusinessHourGenerator.js +++ b/src/models/BusinessHourGenerator.js @@ -8,7 +8,7 @@ var BUSINESS_HOUR_EVENT_DEFAULTS = { }; -var BusinessHourGenerator = Class.extend({ +var BusinessHourGenerator = FC.BusinessHourGenerator = Class.extend({ rawComplexDef: null, calendar: null, // for anonymous EventSource
make BusinessHourGenerator public
diff --git a/Classes/Application/FLOW3Distribution.php b/Classes/Application/FLOW3Distribution.php index <HASH>..<HASH> 100644 --- a/Classes/Application/FLOW3Distribution.php +++ b/Classes/Application/FLOW3Distribution.php @@ -123,7 +123,7 @@ class FLOW3Distribution extends \TYPO3\Deploy\Domain\Model\Application { 'Data/*', 'Web/_Resources/*', 'Build/Reports', - 'Cache/', + './Cache', 'Configuration/PackageStates.php' );
[+BUGFIX] Fix inclusion of empty Cache/ directory
diff --git a/tests/QueryBuilderTest.php b/tests/QueryBuilderTest.php index <HASH>..<HASH> 100644 --- a/tests/QueryBuilderTest.php +++ b/tests/QueryBuilderTest.php @@ -361,12 +361,12 @@ class QueryBuilderTest extends \PHPUnit_Extensions_Database_TestCase $this->queryBuilder->select(['value']); - $this->assertEquals(array( - array('value' => 'foo'), - array('value' => 'bar'), - array('value' => 'baz'), - array('value' => 'xyz'), - ), $this->queryBuilder->get_arrays()); + $values = array(); + foreach($testtable as $v){ + $values[] = array('value' => $v['value']); + } + + $this->assertEquals($values, $this->queryBuilder->get_arrays()); $this->queryBuilder->select(['id']);
FIX: unittest for uid as primary key & order values
diff --git a/src/Exscript/protocols/drivers/junos_erx.py b/src/Exscript/protocols/drivers/junos_erx.py index <HASH>..<HASH> 100644 --- a/src/Exscript/protocols/drivers/junos_erx.py +++ b/src/Exscript/protocols/drivers/junos_erx.py @@ -37,7 +37,7 @@ class JunOSERXDriver(Driver): def init_terminal(self, conn): conn.execute('terminal length 0') - conn.execute('terminal width 0') + conn.execute('terminal width 512') def auto_authorize(self, conn, account, flush, bailout): conn.send('enable 15\r')
exscript: Fixed bug: Set terminal width to highest value possible for ERX driver.
diff --git a/lib/api/utils.js b/lib/api/utils.js index <HASH>..<HASH> 100644 --- a/lib/api/utils.js +++ b/lib/api/utils.js @@ -262,7 +262,8 @@ var load = exports.load = function(html, options) { }; var html = exports.html = function(dom) { - if (dom !== undefined) { + if (dom) { + dom = (type(dom) === 'string') ? this(dom) : dom; return $.render(dom); } else if (this._root && this._root.children) { return $.render(this._root.children); @@ -270,7 +271,6 @@ var html = exports.html = function(dom) { return ''; } }; - // TODO: Add me to .html above var tidy = exports.tidy = function(dom) { if (dom !== undefined) {
$.html(selector) now works to select outer html
diff --git a/lib/Models/getAncestors.js b/lib/Models/getAncestors.js index <HASH>..<HASH> 100644 --- a/lib/Models/getAncestors.js +++ b/lib/Models/getAncestors.js @@ -10,10 +10,13 @@ var defined = require('terriajs-cesium/Source/Core/defined'); * @return {CatalogMember[]} The members' ancestors in its parent tree, starting at the top, not including this member. */ function getAncestors(member) { - if (defined(member.parent) && defined(member.parent.parent)) { - return getAncestors(member.parent).concat([member.parent]); + var parent = member.parent; + var ancestors = []; + while (defined(parent) && defined(parent.parent)) { + ancestors = [parent].concat(ancestors); + parent = parent.parent; } - return []; + return ancestors; } module.exports = getAncestors;
replace recursive getAncestors with loop
diff --git a/src/spec/integration/links/export_release_spec.rb b/src/spec/integration/links/export_release_spec.rb index <HASH>..<HASH> 100644 --- a/src/spec/integration/links/export_release_spec.rb +++ b/src/spec/integration/links/export_release_spec.rb @@ -101,7 +101,6 @@ describe 'exporting release with templates that have links', type: :integration expect(out).to match(%r{Compiling packages: pkg_2\/[a-f0-9]+}) expect(out).to match(%r{Compiling packages: pkg_3_depends_on_2\/[a-f0-9]+}) expect(out).to match(%r{copying packages: pkg_1\/[a-f0-9]+}) - expect(out).to match(%r{copying packages: pkg_2\/[a-f0-9]+}) expect(out).to match(%r{copying packages: pkg_3_depends_on_2\/[a-f0-9]+}) expect(out).to match(%r{copying jobs: addon\/[a-f0-9]+}) expect(out).to match(%r{copying jobs: api_server\/[a-f0-9]+})
Update integration tests to match new export-release behavior
diff --git a/lib/table_setter/command.rb b/lib/table_setter/command.rb index <HASH>..<HASH> 100644 --- a/lib/table_setter/command.rb +++ b/lib/table_setter/command.rb @@ -24,7 +24,7 @@ options: def initialize @prefix = "" parse_options - @prefix = "/#{@prefix}/".gsub(/^\/\//, "") + @prefix = "/#{@prefix}/".gsub(/^\/\//, "/") command = ARGV.shift @directory = ARGV.shift || '.' TableSetter.configure @directory @@ -73,8 +73,9 @@ options: end def build_rack + prefix = @prefix Rack::Builder.app do - map "/#{@prefix}" do + map prefix do use Rack::CommonLogger, STDERR use Rack::ShowExceptions use Rack::Lint
prefix for rack works now as well
diff --git a/lib/devise-authy/controllers/helpers.rb b/lib/devise-authy/controllers/helpers.rb index <HASH>..<HASH> 100644 --- a/lib/devise-authy/controllers/helpers.rb +++ b/lib/devise-authy/controllers/helpers.rb @@ -11,7 +11,8 @@ module DeviseAuthy def remember_device cookies.signed[:remember_device] = { :value => Time.now.to_i, - :secure => !(Rails.env.test? || Rails.env.development?) + :secure => !(Rails.env.test? || Rails.env.development?), + :expires => resource_class.authy_remember_device.from_now } end
add exires to cookie remember_device, otherwise, cookie is expired when the browser session ends
diff --git a/src/commons/org/codehaus/groovy/grails/commons/spring/DefaultBeanConfiguration.java b/src/commons/org/codehaus/groovy/grails/commons/spring/DefaultBeanConfiguration.java index <HASH>..<HASH> 100644 --- a/src/commons/org/codehaus/groovy/grails/commons/spring/DefaultBeanConfiguration.java +++ b/src/commons/org/codehaus/groovy/grails/commons/spring/DefaultBeanConfiguration.java @@ -202,18 +202,16 @@ public class DefaultBeanConfiguration extends GroovyObjectSupport implements Bea else { bd = new ChildBeanDefinition(parentName,clazz,cav, null); } - bd.setSingleton(singleton); } else { if(parentName == null) { - bd = new RootBeanDefinition(clazz,singleton); + bd = new RootBeanDefinition(clazz); } else { bd = new ChildBeanDefinition(parentName,clazz, null,null); - bd.setSingleton(singleton); } - } + bd.setScope(singleton ? AbstractBeanDefinition.SCOPE_SINGLETON : AbstractBeanDefinition.SCOPE_PROTOTYPE); wrapper = new BeanWrapperImpl(bd); return bd; }
Fix for some deprecation warnings which appeared after upgrade to Spring <I>. git-svn-id: <URL>
diff --git a/lib/bot.js b/lib/bot.js index <HASH>..<HASH> 100644 --- a/lib/bot.js +++ b/lib/bot.js @@ -108,14 +108,22 @@ Bot.prototype = { continueParams = { continue: '' }; let titles, pageids = params.pageids; if ( params.titles ) { - if ( params.titles.length === 0 ) { delete params.titles; } - else { titles = params.titles; } + if ( params.titles.length === 0 ) { + delete params.titles; + } + else { + titles = params.titles; + } } if ( params.pageids ) { - if ( params.pageids.length === 0 ) { delete params.pageids; } + if ( params.pageids.length === 0 ) { + delete params.pageids; + } else { pageids = params.pageids; - if ( titles ) { delete params.pageids; } + if ( titles ) { + delete params.pageids; + } } }
Work on linting issues.
diff --git a/lib/ohm.rb b/lib/ohm.rb index <HASH>..<HASH> 100644 --- a/lib/ohm.rb +++ b/lib/ohm.rb @@ -105,9 +105,10 @@ module Ohm self << model end - def sort(options = {}) + def sort(_options = {}) return [] unless key.exists + options = _options.dup options[:start] ||= 0 options[:limit] = [options[:start], options[:limit]] if options[:limit] @@ -124,9 +125,10 @@ module Ohm # user = User.all.sort_by(:name, :order => "ALPHA").first # user.name == "A" # # => true - def sort_by(att, options = {}) + def sort_by(att, _options = {}) return [] unless key.exists + options = _options.dup options.merge!(:by => model.key["*->#{att}"]) if options[:get] @@ -195,7 +197,8 @@ module Ohm apply(:sdiffstore, key, source, target) end - def first(options = {}) + def first(_options = {}) + options = _options.dup options.merge!(:limit => 1) if options[:by]
Avoid overriding the passed options.
diff --git a/test/dhis2_test.rb b/test/dhis2_test.rb index <HASH>..<HASH> 100644 --- a/test/dhis2_test.rb +++ b/test/dhis2_test.rb @@ -14,8 +14,19 @@ class Dhis2Test < Minitest::Test assert_equal 50, org_units.size end + def test_get_org_units_all_fields + org_units = Dhis2.org_units(fields: [":all"], page_size: 1) + assert_equal 1, org_units.size + org_unit = org_units.first + + refute_nil org_unit.level + refute_nil org_unit.shortName + refute_nil org_unit.shortName + refute_nil org_unit.lastUpdated + end + def test_get_data_elements - data_elements = Dhis2.data_elements(fields: %w(id displayName code), page_size: 1 ) + data_elements = Dhis2.data_elements(fields: %w(id displayName code), page_size: 1) assert_equal 1, data_elements.size data_element = data_elements.first @@ -44,7 +55,6 @@ class Dhis2Test < Minitest::Test refute_nil data_element.display_name refute_nil data_element.id refute_nil data_element.code - refute_nil data_element.shortName end def test_get_org_units_pagination
Add a test requesting all fields
diff --git a/lib/blazing/cli/create.rb b/lib/blazing/cli/create.rb index <HASH>..<HASH> 100644 --- a/lib/blazing/cli/create.rb +++ b/lib/blazing/cli/create.rb @@ -7,7 +7,7 @@ module Blazing include Thor::Actions argument :repository - argument :remote + argument :target def self.source_root File.dirname(__FILE__) @@ -25,4 +25,4 @@ module Blazing end end -end \ No newline at end of file +end
remote has been renamed to target
diff --git a/src/RoundingMode.php b/src/RoundingMode.php index <HASH>..<HASH> 100644 --- a/src/RoundingMode.php +++ b/src/RoundingMode.php @@ -17,6 +17,8 @@ final class RoundingMode { /** * Private constructor. This class is not instantiable. + * + * @codeCoverageIgnore */ private function __construct() {
Ignore code coverage on private constructor for non-instantiable class
diff --git a/pushtx/broadcaster.go b/pushtx/broadcaster.go index <HASH>..<HASH> 100644 --- a/pushtx/broadcaster.go +++ b/pushtx/broadcaster.go @@ -137,7 +137,7 @@ func (b *Broadcaster) broadcastHandler(sub *blockntfns.Subscription) { // new goroutine to exectue a rebroadcast. case <-rebroadcastSem: default: - log.Debugf("Existing rebroadcast still in " + + log.Tracef("Existing rebroadcast still in " + "progress") return }
pushtx: demote existing rebroadcast log to trace It would log on every block, which during initial sync would fill the logs.
diff --git a/molgenis-data/src/main/java/org/molgenis/data/util/UniqueId.java b/molgenis-data/src/main/java/org/molgenis/data/util/UniqueId.java index <HASH>..<HASH> 100644 --- a/molgenis-data/src/main/java/org/molgenis/data/util/UniqueId.java +++ b/molgenis-data/src/main/java/org/molgenis/data/util/UniqueId.java @@ -22,8 +22,6 @@ public class UniqueId { (byte) ((CLOCK_SEQ_AND_NODE >> 8) & 0xff), (byte) ((CLOCK_SEQ_AND_NODE) & 0xff), }; - private final ThreadLocal<ByteBuffer> tlbb = - ThreadLocal.withInitial(() -> ByteBuffer.allocate(16)); private volatile int seq; private volatile long lastTimestamp; @@ -44,8 +42,7 @@ public class UniqueId { seq = 0; } seq++; - ByteBuffer bb = tlbb.get(); - bb.rewind(); + ByteBuffer bb = ByteBuffer.allocate(16); bb.putLong(time); bb.put(NODE); bb.putShort((short) seq);
Fix squid:S<I> ThreadLocal memory leak in UniqueId (#<I>)
diff --git a/src/main/org/codehaus/groovy/reflection/CachedClass.java b/src/main/org/codehaus/groovy/reflection/CachedClass.java index <HASH>..<HASH> 100644 --- a/src/main/org/codehaus/groovy/reflection/CachedClass.java +++ b/src/main/org/codehaus/groovy/reflection/CachedClass.java @@ -281,16 +281,14 @@ public class CachedClass { } public int getSuperClassDistance() { - synchronized (getTheClass()) { - if (distance == -1) { - int distance = 0; - for (Class klazz= getTheClass(); klazz != null; klazz = klazz.getSuperclass()) { - distance++; - } - this.distance = distance; - } - return distance; + if (distance>=0) return distance; + + int distance = 0; + for (Class klazz= getTheClass(); klazz != null; klazz = klazz.getSuperclass()) { + distance++; } + this.distance = distance; + return distance; } public int hashCode() {
remove unneeded synchronization. It is a racy single-check, but ok in this case
diff --git a/test/test_peerassets.py b/test/test_peerassets.py index <HASH>..<HASH> 100644 --- a/test/test_peerassets.py +++ b/test/test_peerassets.py @@ -24,7 +24,6 @@ def test_find_deck(prov): 'network': 'peercoin-testnet', 'number_of_decimals': 2, 'production': True, - 'testnet': True, 'version': 1, 'tx_confirmations': 100 }
test_peerassets::deck does no handle testnet boolean anymore
diff --git a/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go b/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go index <HASH>..<HASH> 100644 --- a/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go +++ b/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go @@ -635,7 +635,11 @@ func (factory *ConfigFactory) MakeDefaultErrorFunc(backoff *util.PodBackoff, pod if err == core.ErrNoNodesAvailable { glog.V(4).Infof("Unable to schedule %v %v: no nodes are registered to the cluster; waiting", pod.Namespace, pod.Name) } else { - glog.Errorf("Error scheduling %v %v: %v; retrying", pod.Namespace, pod.Name, err) + if _, ok := err.(*core.FitError); ok { + glog.V(4).Infof("Unable to schedule %v %v: no fit: %v; waiting", pod.Namespace, pod.Name, err) + } else { + glog.Errorf("Error scheduling %v %v: %v; retrying", pod.Namespace, pod.Name, err) + } } backoff.Gc() // Retry asynchronously.
UPSTREAM: <I>: scheduler should not log an error when no fit
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -123,6 +123,12 @@ function writeAndRunCodeBlocks(codeBlocks) { } }); }); + }) + .then(function(codeBlocks) { + return removeOldDir(dir) + .then(function() {; + return codeBlocks + }); }); }
fix: remove old directory from running markdown This was forgotten in a previous change.
diff --git a/doctr/__main__.py b/doctr/__main__.py index <HASH>..<HASH> 100644 --- a/doctr/__main__.py +++ b/doctr/__main__.py @@ -13,7 +13,7 @@ which you should insert into your .travis.yml. Then, on Travis, for the build where you build your docs, add - - doctr deploy . --built-docs path/to/built/html/ + - ``doctr deploy . --built-docs path/to/built/html/`` to the end of the build to deploy the docs to GitHub pages. This will only run on the master branch, and won't run on pull requests.
Render code in docs properly
diff --git a/settings.js b/settings.js index <HASH>..<HASH> 100644 --- a/settings.js +++ b/settings.js @@ -29,6 +29,8 @@ var fs = require('fs'); var path = require('path'); var extend = require('xtend'); +var chalk = require('chalk'); + var utilities = require('./lib/utilities.js'); var settings = { @@ -192,6 +194,11 @@ settings.transitionSparkProfiles = function() { if (fs.existsSync(sparkDir) && !fs.existsSync(particleDir)) { fs.mkdirSync(particleDir); + console.log(); + console.log(chalk.yellow('!!!'), "I detected a Spark profile directory, and will now migrate your settings."); + console.log(chalk.yellow('!!!'), "This will only happen once, since you previously used our Spark-CLI tools."); + console.log(); + var files = fs.readdirSync(sparkDir); files.forEach(function (filename) { var data = fs.readFileSync(path.join(sparkDir, filename));
Add warning when migrating .spark to .particle
diff --git a/cwltool/docker.py b/cwltool/docker.py index <HASH>..<HASH> 100644 --- a/cwltool/docker.py +++ b/cwltool/docker.py @@ -229,6 +229,8 @@ class DockerCommandLineJob(ContainerCommandLineJob): if host_outdir_tgt: # shortcut, just copy to the output directory # which is already going to be mounted + if not os.path.exists(os.path.dirname(host_outdir_tgt)): + os.makedirs(os.path.dirname(host_outdir_tgt)) shutil.copy(volume.resolved, host_outdir_tgt) else: tmp_dir, tmp_prefix = os.path.split(tmpdir_prefix)
Ensure subdirectory exists for staging (#<I>)