diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -75,10 +75,14 @@ module.exports = function(service, dcPath, app) { * last, and that allows us to override everything: */ - addFileIfExists('docker-compose.yml', command); - addFileIfExists('docker-compose.dcr.yml', command) - addFileIfExists('docker-compose.' + service + '.yml', command); - addFileIfExists('docker-compose.' + process.env.DCR_ENVIRONMENT + '.yml', command); + [ + 'docker-compose.yml', + 'docker-compose.dcr.yml', + 'docker-compose.' + service + '.yml', + 'docker-compose.' + process.env.DCR_ENVIRONMENT + '.yml' + ].map(function(fileName) { + addFileIfExists(fileName, command); + }); } /**
[#<I>] use map for list of files to try to load
diff --git a/tests/calculators/hazard/classical/post_processing_test.py b/tests/calculators/hazard/classical/post_processing_test.py index <HASH>..<HASH> 100644 --- a/tests/calculators/hazard/classical/post_processing_test.py +++ b/tests/calculators/hazard/classical/post_processing_test.py @@ -31,8 +31,10 @@ import numpy import random import unittest +from tests.utils import helpers from tests.utils.helpers import random_location_generator +from openquake.db import models from openquake.calculators.hazard.classical.post_processing import ( setup_tasks, mean_curves, quantile_curves, persite_result_decorator, mean_curves_weighted, quantile_curves_weighted, compute_hazard_map) @@ -473,3 +475,10 @@ class HazardMapsTestCase(unittest.TestCase): actual = compute_hazard_maps(curves, imls, poes) aaae(expected, actual) + + def test_do_hazard_map_post_process(self): + cfg = helpers.get_data_path( + 'calculators/hazard/classical/haz_map_test_job.ini') + helpers.run_hazard_job(cfg) + + # TODO: verify hazard maps
tests/calcs/hazard/classical/post_processing_test: Stubbed the basic test run for exercising hazard map creation. The hazard maps are created, but we still test the output values. Former-commit-id: c8b1e<I>fceb4f<I>ca<I>aa4bbb0dd4c<I>c3c
diff --git a/Tone/source/PulseOscillator.js b/Tone/source/PulseOscillator.js index <HASH>..<HASH> 100644 --- a/Tone/source/PulseOscillator.js +++ b/Tone/source/PulseOscillator.js @@ -34,7 +34,7 @@ define(["../core/Tone", "../source/Source", "../source/Oscillator", * @type {Tone.Gain} * @private */ - this._widthGate = new Tone.Gain(); + this._widthGate = new Tone.Gain(0); /** * the sawtooth oscillator
setting gain to 0 initially keeps it from popping
diff --git a/src/Console/Command/Task/PluginTask.php b/src/Console/Command/Task/PluginTask.php index <HASH>..<HASH> 100644 --- a/src/Console/Command/Task/PluginTask.php +++ b/src/Console/Command/Task/PluginTask.php @@ -47,7 +47,7 @@ class PluginTask extends BakeTask { */ public function initialize() { $this->path = current(App::path('Plugin')); - $this->bootstrap = ROOT . 'config' . DS . 'bootstrap.php'; + $this->bootstrap = ROOT . DS . 'config' . DS . 'bootstrap.php'; } /**
Fix bootstrap file path in PluginTask.
diff --git a/src/Post_Meta_Command.php b/src/Post_Meta_Command.php index <HASH>..<HASH> 100644 --- a/src/Post_Meta_Command.php +++ b/src/Post_Meta_Command.php @@ -1,7 +1,7 @@ <?php /** - * Manage post custom fields. + * Adds, updates, deletes, and lists post custom fields. * * ## EXAMPLES *
Use a more descriptive base summary for what actions are possible for post meta subcommands.
diff --git a/lib/resque/errors.rb b/lib/resque/errors.rb index <HASH>..<HASH> 100644 --- a/lib/resque/errors.rb +++ b/lib/resque/errors.rb @@ -4,4 +4,7 @@ module Resque # Raised when trying to create a job without a class class NoClassError < RuntimeError; end + + # Raised when a worker was killed while processing a job. + class DirtyExit < RuntimeError; end end diff --git a/lib/resque/worker.rb b/lib/resque/worker.rb index <HASH>..<HASH> 100644 --- a/lib/resque/worker.rb +++ b/lib/resque/worker.rb @@ -326,6 +326,15 @@ module Resque # Unregisters ourself as a worker. Useful when shutting down. def unregister_worker + # If we're still processing a job, make sure it gets logged as a + # failure. + if job + # Ensure the proper worker is attached to this job, even if + # it's not the precise instance that died. + job.worker = self + job.fail(DirtyExit.new) + end + redis.srem(:workers, self) redis.del("worker:#{self}") redis.del("worker:#{self}:started")
Ensure a terminated worker's job is put in the error queue.
diff --git a/src/Shortcodes/FileLink.php b/src/Shortcodes/FileLink.php index <HASH>..<HASH> 100644 --- a/src/Shortcodes/FileLink.php +++ b/src/Shortcodes/FileLink.php @@ -27,4 +27,12 @@ class FileLink extends DataObject 'Parent' => DataObject::class, 'Linked' => File::class, ]; + + /** + * Don't show this model in campaign admin as part of implicit change sets + * + * @config + * @var bool + */ + private static $hide_in_campaigns = true; }
Do not display records with `hide_in_campaigns` config set to `true` in the `Used On` table of files
diff --git a/holoviews/core/__init__.py b/holoviews/core/__init__.py index <HASH>..<HASH> 100644 --- a/holoviews/core/__init__.py +++ b/holoviews/core/__init__.py @@ -14,6 +14,6 @@ def public(obj): return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) -__all__ = _public + ["boundingregion", "dimension", "holoview", "layer", - "layout", "operation", "options", "sheetcoords", "viewmap"] +__all__ = _public + ["boundingregion", "dimension", "layer", "layout", + "ndmapping", "operation", "options", "sheetcoords", "view"]
Removed import of core.holoview
diff --git a/kerncraft/iaca.py b/kerncraft/iaca.py index <HASH>..<HASH> 100755 --- a/kerncraft/iaca.py +++ b/kerncraft/iaca.py @@ -77,7 +77,7 @@ def find_asm_blocks(asm_lines): m.group('idx'), int(m.group('scale')) if m.group('scale') else 1)) - if re.match(r"^[v]?(mul|add|sub|div)[h]?p[ds]", line): + if re.match(r"^[v]?(mul|add|sub|div|fmadd(132|213)?)[h]?p[ds]", line): if line.startswith('v'): avx_ctr += 1 packed_ctr += 1
added support for FMA in block stats
diff --git a/napalm_base/utils/string_parsers.py b/napalm_base/utils/string_parsers.py index <HASH>..<HASH> 100644 --- a/napalm_base/utils/string_parsers.py +++ b/napalm_base/utils/string_parsers.py @@ -94,7 +94,7 @@ def convert_uptime_string_seconds(uptime): uptime_seconds = 0 for unit, value in uptime_dict.items(): - if value != None: + if value is not None: if unit == 'weeks': uptime_seconds += int(value) * 604800 elif unit == 'days':
Fixing minor pylama issue
diff --git a/spyder/plugins/console/plugin.py b/spyder/plugins/console/plugin.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/console/plugin.py +++ b/spyder/plugins/console/plugin.py @@ -216,6 +216,7 @@ class Console(SpyderPluginWidget): if is_pyls_error: title = "Internal Python Language Server error" self.error_dlg.set_title(title) + self.error_dlg.title.setEnabled(False) self.error_dlg.append_traceback(text) self.error_dlg.show() elif DEV or get_debug_level():
Error dialog: Disable title when reporting PyLS errors
diff --git a/salt/grains/core.py b/salt/grains/core.py index <HASH>..<HASH> 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -165,7 +165,7 @@ def _linux_gpu_data(): devs = [] try: - lspci_out = __salt__['cmd.run']('lspci -vmm') + lspci_out = __salt__['cmd.run']('{0} -vmm'.format(lspci)) cur_dev = {} error = False @@ -510,7 +510,7 @@ def _virtual(osdata): if not cmd: continue - cmd = '{0} {1}'.format(command, ' '.join(args)) + cmd = '{0} {1}'.format(cmd, ' '.join(args)) ret = __salt__['cmd.run_all'](cmd)
Use path found by salt.utils.which I ran into this problem running `salt-ssh '*' test.ping` with a XenServer <I> node as the target. Even though the `lspci` and `dmidecode` (after I installed it) commands are found by `salt.utils.which`, because they're not actually in the `$PATH`, they fail to execute.
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -79,7 +79,7 @@ master_doc = 'index' # General information about the project. project = u'cookiecutter' -copyright = u'2013-2015, Audrey Roy' +copyright = u'2013-2016, Audrey Roy' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -292,7 +292,7 @@ texinfo_documents = [ epub_title = u'cookiecutter' epub_author = u'Audrey Roy' epub_publisher = u'Audrey Roy' -epub_copyright = u'2013-2015, Audrey Roy' +epub_copyright = u'2013-2016, Audrey Roy' # The language of the text. It defaults to the language option # or en if the language is not set.
Updated copyright on docs from <I>-<I> to <I>-<I> per #<I>.
diff --git a/code/Calendar.php b/code/Calendar.php index <HASH>..<HASH> 100755 --- a/code/Calendar.php +++ b/code/Calendar.php @@ -661,7 +661,8 @@ class Calendar_Controller extends Page_Controller { $xml = trim($xml); HTTP::add_cache_headers(); $this->getResponse()->addHeader('Content-Type', 'application/rss+xml'); - echo $xml; + $this->getResponse()->setBody($xml); + return $this->getResponse(); } public function monthjson(SS_HTTPRequest $r) {
Set headers correctly on RSS feed In order for the headers to be set correctly, the response needs to be returned from the function rather than the raw content.
diff --git a/tools/repl.js b/tools/repl.js index <HASH>..<HASH> 100644 --- a/tools/repl.js +++ b/tools/repl.js @@ -17,6 +17,7 @@ var RapydScript = (typeof create_rapydscript_compiler === 'function') ? create_r function create_ctx(baselib, show_js, console) { var ctx = vm.createContext({'console':console, 'show_js': !!show_js, 'RapydScript':RapydScript, 'require':require}); vm.runInContext(baselib, ctx, {'filename':'baselib-plain-pretty.js'}); + vm.runInContext('var __name__ = "__repl__";', ctx); RapydScript.AST_Node.warn_function = function() {}; return ctx; }
Fix default __repr__ not working in REPL
diff --git a/wffweb/src/main/java/com/webfirmframework/wffweb/server/page/js/WffJsFile.java b/wffweb/src/main/java/com/webfirmframework/wffweb/server/page/js/WffJsFile.java index <HASH>..<HASH> 100644 --- a/wffweb/src/main/java/com/webfirmframework/wffweb/server/page/js/WffJsFile.java +++ b/wffweb/src/main/java/com/webfirmframework/wffweb/server/page/js/WffJsFile.java @@ -80,7 +80,7 @@ public enum WffJsFile { private static volatile int variableId = 0; private static String[][] minifiableParts = { { "else {", "else{" }, - { "} else", "}else" }, { "if (", "if(" }, }; + { "} else", "}else" }, { "if (", "if(" }, { ") {", "){" } }; private static final String HEART_BEAT_JS = "setInterval(function(){try{wffWS.send([]);}catch(e){wffWS.closeSocket();}},\"${HEARTBEAT_INTERVAL}\");";
Minor optimizations -- Improved js minification
diff --git a/lxd/storage/drivers/utils.go b/lxd/storage/drivers/utils.go index <HASH>..<HASH> 100644 --- a/lxd/storage/drivers/utils.go +++ b/lxd/storage/drivers/utils.go @@ -295,17 +295,12 @@ func deleteParentSnapshotDirIfEmpty(poolName string, volType VolumeType, volName // ensureSparseFile creates a sparse empty file at specified location with specified size. // If the path already exists, the file is truncated to the requested size. func ensureSparseFile(filePath string, sizeBytes int64) error { - f, err := os.Create(filePath) + f, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0600) if err != nil { return errors.Wrapf(err, "Failed to open %s", filePath) } defer f.Close() - err = f.Chmod(0600) - if err != nil { - return errors.Wrapf(err, "Failed to chmod %s", filePath) - } - err = f.Truncate(sizeBytes) if err != nil { return errors.Wrapf(err, "Failed to create sparse file %s", filePath)
lxd/storage: Fix regression in truncate handling
diff --git a/lib/roo/open_office.rb b/lib/roo/open_office.rb index <HASH>..<HASH> 100644 --- a/lib/roo/open_office.rb +++ b/lib/roo/open_office.rb @@ -13,9 +13,7 @@ class Roo::OpenOffice < Roo::Base def process_zipfile(tmpdir, zip, path='') if zip.file.file? path if path == "content.xml" - open(File.join(tmpdir, 'roo_content.xml'),'wb') {|f| - f << zip.read(path) - } + File.write(File.join(tmpdir, 'roo_content.xml'), zip.read(path), mode: 'wb') end else unless path.empty?
Write the OpenOffice content file in one call.
diff --git a/handler/src/main/java/io/netty/handler/ssl/ReferenceCountedOpenSslEngine.java b/handler/src/main/java/io/netty/handler/ssl/ReferenceCountedOpenSslEngine.java index <HASH>..<HASH> 100644 --- a/handler/src/main/java/io/netty/handler/ssl/ReferenceCountedOpenSslEngine.java +++ b/handler/src/main/java/io/netty/handler/ssl/ReferenceCountedOpenSslEngine.java @@ -235,7 +235,6 @@ public class ReferenceCountedOpenSslEngine extends SSLEngine implements Referenc } finally { readerLock.unlock(); } - ssl = SSL.newSSL(context.ctx, !context.isClient()); try { networkBIO = SSL.bioNewByteBuffer(ssl, context.getBioNonApplicationBufferSize());
Correct merge error from f7b3caeddc5bb1da<I>aaafa4a<I>dec<I>ed<I>d<I>
diff --git a/ara/clients/offline.py b/ara/clients/offline.py index <HASH>..<HASH> 100644 --- a/ara/clients/offline.py +++ b/ara/clients/offline.py @@ -27,10 +27,10 @@ from django.core.management import execute_from_command_line from django.test import Client -class OfflineClient(object): +class AraOfflineClient(object): def __init__(self): - self.client = self.bootstrap_django_client() self.log = logging.getLogger('ara.clients.offline') + self.client = self._bootstrap_django_client() def _bootstrap_django_client(self): self.log.debug('Bootstrapping Django offline client')
Rename offline client class and fix django bootstrap Wrong method name was used when bootstrapping django. Change-Id: Iee<I>e<I>daaa7e<I>fb<I>f4da<I>a8
diff --git a/plexapi/playlist.py b/plexapi/playlist.py index <HASH>..<HASH> 100644 --- a/plexapi/playlist.py +++ b/plexapi/playlist.py @@ -253,15 +253,13 @@ class Playlist(PlexPartialObject, Playable, ArtMixin, PosterMixin): return cls(server, data, initpath=key) def copyToUser(self, user): - """ Copy playlist to another user account. """ - from plexapi.server import PlexServer - myplex = self._server.myPlexAccount() - user = myplex.user(user) - # Get the token for your machine. - token = user.get_token(self._server.machineIdentifier) - # Login to your server using your friends credentials. - user_server = PlexServer(self._server._baseurl, token) - return self.create(user_server, self.title, self.items()) + """ Copy playlist to another user account. + + Parameters: + user (str): Username, email or user id of the user to copy the playlist to. + """ + userServer = self._server.switchUser(user) + return self.create(userServer, self.title, self.items()) def sync(self, videoQuality=None, photoResolution=None, audioBitrate=None, client=None, clientId=None, limit=None, unwatched=False, title=None):
Update Playlist.copyToUser with switchUser
diff --git a/commands/command_pre_push.go b/commands/command_pre_push.go index <HASH>..<HASH> 100644 --- a/commands/command_pre_push.go +++ b/commands/command_pre_push.go @@ -7,6 +7,7 @@ import ( "github.com/github/git-lfs/git" "github.com/github/git-lfs/lfs" + "github.com/rubyist/tracerx" "github.com/spf13/cobra" ) @@ -66,6 +67,8 @@ func prePushCommand(cmd *cobra.Command, args []string) { continue } + tracerx.Printf("pre-push: %s", line) + left, right := decodeRefs(line) if left == prePushDeleteBranch { continue
trace the refs and shas given to pre-push
diff --git a/lib/synvert/core/node_ext.rb b/lib/synvert/core/node_ext.rb index <HASH>..<HASH> 100644 --- a/lib/synvert/core/node_ext.rb +++ b/lib/synvert/core/node_ext.rb @@ -387,7 +387,9 @@ module Parser::AST end when String if Parser::AST::Node === actual - actual.to_source == expected || actual.to_source[1...-1] == expected + actual.to_source == expected || + (actual.to_source[0] == ':' && actual.to_source[1..-1] == expected) || + actual.to_source[1...-1] == expected else actual.to_s == expected end
fix match_value between symbol and string
diff --git a/activesupport/test/core_ext/enumerable_test.rb b/activesupport/test/core_ext/enumerable_test.rb index <HASH>..<HASH> 100644 --- a/activesupport/test/core_ext/enumerable_test.rb +++ b/activesupport/test/core_ext/enumerable_test.rb @@ -24,7 +24,7 @@ class EnumerableTests < ActiveSupport::TestCase def test_group_by names = %w(marcel sam david jeremy) klass = Struct.new(:name) - objects = (1..50).inject([]) do |people,| + objects = (1..50).inject([]) do |people| p = klass.new p.name = names.sort_by { rand }.first people << p
Removed unused comma after loop variable
diff --git a/shinken/modules/livestatus_broker/mapping.py b/shinken/modules/livestatus_broker/mapping.py index <HASH>..<HASH> 100644 --- a/shinken/modules/livestatus_broker/mapping.py +++ b/shinken/modules/livestatus_broker/mapping.py @@ -1557,7 +1557,7 @@ livestatus_attribute_map = { }, 'alias': { 'description': 'An alias of the service group', - 'function': lambda item, req: getattr(item, 'alias', 'will_be_fixed_soon'), # REPAIRME in test_livestatus there is an alias. in multisite there is none (Service has no attr. alias. hahaha liar) + 'function': lambda item, req: item.alias, }, 'members': { 'description': 'A list of all members of the service group as host/service pairs',
Remove the servicegroup alias workaround in livestatus
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,6 +31,7 @@ MOCK_MODULES = [ 'larcc.MAP', 'PyQt4', 'PyQt4.QtCore', 'PyQt4.QtGui', 'web', 'lar2psm', 'scipy.ndimage.measurements', 'lar', 'extern.lar', 'splines', 'scipy.sparse', 'skimage.filter', 'mapper', 'skelet3d', 'numpy.core', + 'skimage.filters', 'lbpLibrary', 'skimage.exposure', 'PyQt4.QVTKRenderWindowInteractor', 'matplotlib.backends', 'matplotlib.backends.backend_qt4agg', 'numpy.linalg', 'PyQt4.Qt', 'matplotlib.figure', 'skimage.morphology', 'gtk',
mock skimage.filters
diff --git a/lib/sanford/version.rb b/lib/sanford/version.rb index <HASH>..<HASH> 100644 --- a/lib/sanford/version.rb +++ b/lib/sanford/version.rb @@ -1,3 +1,3 @@ module Sanford - VERSION = "0.6.4" + VERSION = "0.6.5" end
<I> * Don't re-daemonize the process when restarting (#<I>)
diff --git a/Resources/Public/JavaScript/Libs/jquery.responsiveimages.js b/Resources/Public/JavaScript/Libs/jquery.responsiveimages.js index <HASH>..<HASH> 100644 --- a/Resources/Public/JavaScript/Libs/jquery.responsiveimages.js +++ b/Resources/Public/JavaScript/Libs/jquery.responsiveimages.js @@ -14,6 +14,7 @@ this.$element = $(element); this.options = $.extend({}, ResponsiveImage.DEFAULTS, options); this.attrib = "src"; + this.loaded = false; this.checkviewport(); }; @@ -26,7 +27,6 @@ 1200: 'bigger' }, attrib: "src", - container: window, skip_invisible: false, preload: false }; @@ -52,8 +52,11 @@ }); if (old_attrib !== attrib) { this.attrib = attrib; + this.loaded = false; } - this.unveil(); + if (!this.loaded){ + this.unveil(); + } }; ResponsiveImage.prototype.boundingbox = function() { @@ -79,6 +82,7 @@ this.$element.attr("src", source); this.$element.css("opacity", 1); $(window).trigger('loaded.bk2k.responsiveimage'); + this.loaded = true; } } };
Update jquery.responsiveimages.js Use loaded flag to call unveil only when realy needed. Fix container option no more needed.
diff --git a/src/remote.js b/src/remote.js index <HASH>..<HASH> 100644 --- a/src/remote.js +++ b/src/remote.js @@ -65,8 +65,8 @@ module.exports = { r.lambda = userResults[0].data.lambda; r.Eg2 = n.rep([firstUserResult.numFeatures], 0); r.EdW = n.rep([firstUserResult.numFeatures], 0); - r.rho = 0.99; // watch out for this and the eps - r.eps = 1e-4; + r.rho = 0.5; // watch out for this and the eps + r.eps = 0.1; r.deltaW = 0; r.iteration = 1; return r;
fix rho/eps for convergence
diff --git a/packages/server/lib/modes/run.js b/packages/server/lib/modes/run.js index <HASH>..<HASH> 100644 --- a/packages/server/lib/modes/run.js +++ b/packages/server/lib/modes/run.js @@ -660,6 +660,7 @@ const getVideoRecordingDelay = function (startedVideoCapture) { const maybeStartVideoRecording = Promise.method(function (options = {}) { const { spec, browser, video, videosFolder } = options + debug(`video recording has been ${video ? 'enabled' : 'disabled'}. video: %s`, video) // bail if we've been told not to capture // a video recording if (!video) {
Add more debug logs around whether they have video recording disabled (#<I>) * Add more debug logs around whether they have video recording disabled * clean up debug log :P
diff --git a/lib/oauth2server.js b/lib/oauth2server.js index <HASH>..<HASH> 100644 --- a/lib/oauth2server.js +++ b/lib/oauth2server.js @@ -48,7 +48,6 @@ function OAuth2Server (config) { this.refreshTokenLifetime = config.refreshTokenLifetime !== undefined ? config.refreshTokenLifetime : 1209600; this.authCodeLifetime = config.authCodeLifetime || 30; - this.now = new Date(); this.regex = {}; this.regex.clientId = config.clientIdRegex || /^[a-z0-9-_]{3,40}$/i; @@ -94,6 +93,7 @@ OAuth2Server.prototype.handler = function () { // Setup request params req.oauth = { internal: false }; + oauth.now = new Date; if (req.path === '/oauth/token') { req.oauth.internal = true;
Fix expiration checking. (follows 9baf<I> in <I>)
diff --git a/src/Auth/Manager.php b/src/Auth/Manager.php index <HASH>..<HASH> 100644 --- a/src/Auth/Manager.php +++ b/src/Auth/Manager.php @@ -33,6 +33,8 @@ class Manager extends Module { * @var \Phalcon\Security */ protected $security; + + protected $tokenElement; # attributes - aparently to be used in Phalcon\Validation\Validator::setOption() these have to be strings ... const ATTR_ENTITY = 'A10'; @@ -219,7 +221,7 @@ class Manager extends Module { } public function getTokenElement($forcenew = false) { - if ($forcenew || !$this->getSession()->has('$PHALCON/CSRF/KEY$')) { + if ($forcenew || is_null($this->tokenElement) || !$this->getSession()->has('$PHALCON/CSRF/KEY$')) { $this->tokenkey = $this->getSecurity()->getTokenKey(); $this->tokenval = $this->getSecurity()->getToken();
fixed minor bug where it was possible to reference an undefined property
diff --git a/src/flask_rq2/cli.py b/src/flask_rq2/cli.py index <HASH>..<HASH> 100644 --- a/src/flask_rq2/cli.py +++ b/src/flask_rq2/cli.py @@ -14,7 +14,7 @@ from rq.cli import cli as rq_cli try: from flask.cli import AppGroup, ScriptInfo -except ImportError: +except ImportError: # pragma: no cover try: from flask_cli import AppGroup, ScriptInfo except ImportError:
Ignore the ImportError of flask.cli in test coverage.
diff --git a/tests/test_file_configuration.py b/tests/test_file_configuration.py index <HASH>..<HASH> 100644 --- a/tests/test_file_configuration.py +++ b/tests/test_file_configuration.py @@ -39,6 +39,24 @@ def test_file_configuration_from_string_local_variables_take_precedence( assert fconf.config['mark'] == 'just a mark' +def test_file_configuration_from_string_cannot_include_global_variables( + global_variables): + + local_variables = { + 'serializer': '__version__ = {{GLOBALS.serializer}}' + } + fconf = fc.FileConfiguration( + 'pkg/__init__.py', + local_variables, + global_variables + ) + + assert fconf.path == 'pkg/__init__.py' + assert fconf.config['serializer'] == \ + '__version__ = {{GLOBALS.serializer}}' + assert fconf.config['mark'] == 'just a mark' + + def test_file_conf_fr_str_path_cannot_be_overridden_by_global_variables( local_variables, global_variables): global_variables['path'] = 'a/new/path'
Added a test to check that GLOBAL variables are no more replaces in file configuration
diff --git a/src/lib/widget/default.js b/src/lib/widget/default.js index <HASH>..<HASH> 100644 --- a/src/lib/widget/default.js +++ b/src/lib/widget/default.js @@ -75,6 +75,10 @@ define(['../util', '../assets', '../i18n'], function(util, assets, i18n) { var widgetOptions = {}; + function escape(s) { + return s.replace(/>/, '&gt;').replace(/</, '&lt;'); + } + function addEvent(element, eventName, handler) { browserEvents.push([element, eventName, handler]); element.addEventListener(eventName, handler); @@ -220,7 +224,8 @@ define(['../util', '../assets', '../i18n'], function(util, assets, i18n) { var trace = cEl('pre'); elements.bubble.appendChild(trace); if(error instanceof Error) { - trace.innerHTML = error.stack; + trace.innerHTML = '<strong>' + escape(error.message) + '</strong>' + + "\n" + escape(error.stack); } else if(typeof(error) === 'object') { trace.innerHTML = JSON.stringify(error, null, 2); } else {
fixed error trace in widget error-state
diff --git a/waterboy/api/model_config.py b/waterboy/api/model_config.py index <HASH>..<HASH> 100644 --- a/waterboy/api/model_config.py +++ b/waterboy/api/model_config.py @@ -76,6 +76,10 @@ class ModelConfig: """ Return data directory for given dataset """ return self.project_config.project_toplevel_dir(*args) + def openai_dir(self) -> str: + """ Return directory for openai output files for this model """ + return self.project_config.project_output_dir('openai', self.run_name) + #################################################################################################################### # NAME UTILITIES @property
Add OpenAI logging directory to the model config.
diff --git a/pylint/checkers/strings.py b/pylint/checkers/strings.py index <HASH>..<HASH> 100644 --- a/pylint/checkers/strings.py +++ b/pylint/checkers/strings.py @@ -341,9 +341,7 @@ class StringFormatChecker(BaseChecker): format_type)): self.add_message('bad-string-format-type', node=node, - args=(arg_type.pytype(), - format_type)) - # TODO: compare type + args=(arg_type.pytype(), format_type)) elif isinstance(args, OTHER_NODES + (astroid.Tuple,)): type_name = type(args).__name__ self.add_message('format-needs-mapping', @@ -381,9 +379,7 @@ class StringFormatChecker(BaseChecker): if (arg_type not in (None, astroid.Uninferable) and not arg_matches_format_type(arg_type, format_type)): self.add_message('bad-string-format-type', - node=node, - args=(arg_type.pytype(), - format_type)) + node=node, args=(arg_type.pytype(), format_type)) @check_messages(*(MSGS.keys()))
Remove extraneous comment and fix the style
diff --git a/sqlg-core/src/main/java/org/umlg/sqlg/step/SqlgVertexStep.java b/sqlg-core/src/main/java/org/umlg/sqlg/step/SqlgVertexStep.java index <HASH>..<HASH> 100644 --- a/sqlg-core/src/main/java/org/umlg/sqlg/step/SqlgVertexStep.java +++ b/sqlg-core/src/main/java/org/umlg/sqlg/step/SqlgVertexStep.java @@ -200,7 +200,6 @@ public class SqlgVertexStep<E extends SqlgElement> extends SqlgAbstractStep impl private void constructQueryPerSchemaTable() { for (SchemaTable schemaTable : this.heads.keySet()) { SchemaTableTree rootSchemaTableTree = parseForStrategy(schemaTable); - this.replacedStepTree.maybeAddLabelToLeafNodes(); //If the order is over multiple tables then the resultSet will be completely loaded into memory and then sorted. if (this.replacedStepTree.hasOrderBy()) { if (isForMultipleQueries() || !replacedStepTree.orderByIsOrder() || this.replacedStepTree.orderByHasSelectOneStepAndForLabelNotInTree()) {
remove not needed maybeAddLabelToLeafNode. It is already added in the VertexStrategy
diff --git a/lib/hippo/api/handlers/tenant.rb b/lib/hippo/api/handlers/tenant.rb index <HASH>..<HASH> 100644 --- a/lib/hippo/api/handlers/tenant.rb +++ b/lib/hippo/api/handlers/tenant.rb @@ -8,7 +8,7 @@ module Hippo::API::Handlers def update tenant = Hippo::Tenant.current - tenant.assign_attributes(data.slice(*PUBLIC_ATTRS)) + tenant.assign_attributes(data.slice(*Hippo::Tenant::PUBLIC_ATTRS)) success = tenant.save if success && tenant.slug_previously_changed? Hippo::Tenant.system.perform do @@ -16,7 +16,7 @@ module Hippo::API::Handlers end end std_api_reply(:update, tenant, - only: PUBLIC_ATTRS, + only: Hippo::Tenant::PUBLIC_ATTRS, success: success) end end
fix path to PUBLIC_ATTRS
diff --git a/lib/elasticsearch/drain/autoscaling.rb b/lib/elasticsearch/drain/autoscaling.rb index <HASH>..<HASH> 100644 --- a/lib/elasticsearch/drain/autoscaling.rb +++ b/lib/elasticsearch/drain/autoscaling.rb @@ -85,7 +85,7 @@ module Elasticsearch auto_scaling_group_name: asg, min_size: count ) - wait_until(0) do + wait_until(count) do min_size end end
Fixes resizing asg min_size This fixes resizing when we are not draining a whole asg.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ import sys, os -from distutils.core import setup, Extension +from setuptools import setup, Extension from subprocess import Popen, PIPE, check_output def call(*cmd):
setup.py: switch from distutils to setuptools In Python <I>, distutils is deprecated and slated for removal in Python <I>. It also prevents 'setup.py bdist_wheel' from building a wheel.
diff --git a/contractcourt/channel_arbitrator.go b/contractcourt/channel_arbitrator.go index <HASH>..<HASH> 100644 --- a/contractcourt/channel_arbitrator.go +++ b/contractcourt/channel_arbitrator.go @@ -305,8 +305,6 @@ func (c *ChannelArbitrator) Stop() error { close(c.quit) c.wg.Wait() - c.cfg.BlockEpochs.Cancel() - return nil } @@ -1293,7 +1291,10 @@ func (c *ChannelArbitrator) UpdateContractSignals(newSignals *ContractSignals) { func (c *ChannelArbitrator) channelAttendant(bestHeight int32) { // TODO(roasbeef): tell top chain arb we're done - defer c.wg.Done() + defer func() { + c.cfg.BlockEpochs.Cancel() + c.wg.Done() + }() for { select {
contractcourt/channel_arbitrator: stop block epoch on channel attendant exit
diff --git a/influx-line-format.go b/influx-line-format.go index <HASH>..<HASH> 100644 --- a/influx-line-format.go +++ b/influx-line-format.go @@ -1,7 +1,9 @@ package csv import ( + "fmt" "io" + "os" "sort" "strconv" "time" @@ -33,7 +35,9 @@ func (p *InfluxLineFormatProcess) Run(reader Reader, out io.Writer, errCh chan<- } else { maxLen := len(p.Measurement) + count := 1 for data := range reader.C() { + count++ stringTs := data.Get(p.Timestamp) if ts, err := time.ParseInLocation(p.Format, stringTs, location); err != nil { @@ -72,7 +76,9 @@ func (p *InfluxLineFormatProcess) Run(reader Reader, out io.Writer, errCh chan<- buffer = append(buffer, "="...) buffer = append(buffer, v...) } + if appended == 0 { + fmt.Fprintf(os.Stderr, "%d: dropping field-less point\n", count) continue }
be noisy if we drop a point because it has no values.
diff --git a/apollo/client.py b/apollo/client.py index <HASH>..<HASH> 100644 --- a/apollo/client.py +++ b/apollo/client.py @@ -3,12 +3,10 @@ import json import requests import logging - try: from shlex import quote except ImportError: from pipes import quote - log = logging.getLogger()
Fix support for python <I> (2)
diff --git a/util/configv3/config.go b/util/configv3/config.go index <HASH>..<HASH> 100644 --- a/util/configv3/config.go +++ b/util/configv3/config.go @@ -233,15 +233,15 @@ func WriteConfig(c *Config) error { if err != nil { return err } + tempConfigFile.Close() + tempConfigFileName := tempConfigFile.Name() - go catchSignal(sig, tempConfigFile) + go catchSignal(sig, tempConfigFileName) - tempConfigFileName := tempConfigFile.Name() err = ioutil.WriteFile(tempConfigFileName, rawConfig, 0600) if err != nil { return err } - tempConfigFile.Close() return os.Rename(tempConfigFileName, ConfigFilePath()) } @@ -250,11 +250,10 @@ func WriteConfig(c *Config) error { // Interrupt for removing temporarily created config files before the program // ends. Note: we cannot intercept a `kill -9`, so a well-timed `kill -9` // will allow a temp config file to linger. -func catchSignal(sig chan os.Signal, tempConfigFile *os.File) { +func catchSignal(sig chan os.Signal, tempConfigFileName string) { select { case <-sig: - tempConfigFile.Close() - _ = os.Remove(tempConfigFile.Name()) + _ = os.Remove(tempConfigFileName) os.Exit(2) } }
close the file right away to prevent windows race conditions
diff --git a/owner/src/main/java/org/aeonbits/owner/loaders/XMLLoader.java b/owner/src/main/java/org/aeonbits/owner/loaders/XMLLoader.java index <HASH>..<HASH> 100644 --- a/owner/src/main/java/org/aeonbits/owner/loaders/XMLLoader.java +++ b/owner/src/main/java/org/aeonbits/owner/loaders/XMLLoader.java @@ -33,7 +33,7 @@ import java.util.Stack; */ public class XMLLoader implements Loader { - private volatile SAXParserFactory factory = null; + private transient volatile SAXParserFactory factory = null; private SAXParserFactory factory() { if (factory == null) {
fixing serialization issue happening with some jdks
diff --git a/pyfritzhome/errors.py b/pyfritzhome/errors.py index <HASH>..<HASH> 100644 --- a/pyfritzhome/errors.py +++ b/pyfritzhome/errors.py @@ -7,6 +7,3 @@ class LoginError(Exception): class InvalidError(Exception): pass - -class NotImplemented(Exception): - pass
remove redfine of built-in exception
diff --git a/lib/request.js b/lib/request.js index <HASH>..<HASH> 100644 --- a/lib/request.js +++ b/lib/request.js @@ -3,6 +3,7 @@ const { XMLHttpRequest } = require('xmlhttprequest'); function Response (request, sourceUrl) { this.request = request; + this.status = request.status; this.content = request.responseText; this.json = function () { return JSON.parse(this.content);
Response.status added from request.status XHR obj
diff --git a/vraptor-core/src/main/java/br/com/caelum/vraptor/ioc/cdi/ListProducer.java b/vraptor-core/src/main/java/br/com/caelum/vraptor/ioc/cdi/ListProducer.java index <HASH>..<HASH> 100644 --- a/vraptor-core/src/main/java/br/com/caelum/vraptor/ioc/cdi/ListProducer.java +++ b/vraptor-core/src/main/java/br/com/caelum/vraptor/ioc/cdi/ListProducer.java @@ -11,8 +11,11 @@ import javax.enterprise.inject.spi.BeanManager; import javax.enterprise.inject.spi.CDI; import javax.enterprise.inject.spi.InjectionPoint; +/** + * @deprecated This class will be deleted very soon + */ public class ListProducer { - + @SuppressWarnings({ "rawtypes", "unchecked" }) @Produces public <T> List<T> producesList(InjectionPoint injectionPoint){ @@ -22,9 +25,9 @@ public class ListProducer { BeanManager beanManager = currentCDI.getBeanManager(); Set<Bean<?>> beans = beanManager.getBeans(klass); ArrayList objects = new ArrayList(); - for (Bean<?> bean : beans) { + for (Bean<?> bean : beans) { objects.add(currentCDI.select(bean.getBeanClass()).get()); } return objects; } -} +} \ No newline at end of file
deprecating ListProducer for now
diff --git a/site/assets/js/search.js b/site/assets/js/search.js index <HASH>..<HASH> 100644 --- a/site/assets/js/search.js +++ b/site/assets/js/search.js @@ -40,7 +40,7 @@ // When in production, return the result as is, // otherwise remove our url from it. // eslint-disable-next-line no-negated-condition - hit.url = currentUrl.indexOf(liveUrl) !== -1 ? + hit.url = currentUrl.indexOf(liveUrl) !== -1 ? // lgtm [js/incomplete-url-substring-sanitization] hit.url : hit.url.replace(liveUrl, '')
site/assets/js/search.js: ignore the LGTM alert (#<I>)
diff --git a/packages/lib/test/application/management/PackagedAppManager.test.js b/packages/lib/test/application/management/PackagedAppManager.test.js index <HASH>..<HASH> 100644 --- a/packages/lib/test/application/management/PackagedAppManager.test.js +++ b/packages/lib/test/application/management/PackagedAppManager.test.js @@ -28,6 +28,12 @@ contract('PackagedAppManager', ([_, managerOwner, packageOwner, directoryOwner, this.firstVersionDirectory = await ContractDirectory.new({ from: directoryOwner }) }) + describe('when the package address is null', function () { + it('reverts', async function () { + await assertRevert(PackagedAppManager.new(0, 'dummy', this.factory.address)) + }) + }) + describe('when the given package does not support the required version', function () { it('reverts', async function () { await assertRevert(PackagedAppManager.new(this.package.address, version_0, this.factory.address, { from: managerOwner }))
tests: add test for PackagedAppManager with null address
diff --git a/js/bootstrap-select.js b/js/bootstrap-select.js index <HASH>..<HASH> 100644 --- a/js/bootstrap-select.js +++ b/js/bootstrap-select.js @@ -2548,6 +2548,10 @@ ) ) { that.$button.trigger('click.bs.dropdown.data-api'); + + if (that.options.liveSearch) { + return; + } } if (e.which === keyCodes.ESCAPE && isActive) {
auto-fill live search when button is focused and a character is typed (fixes regression introduced in <I>) (#<I>)
diff --git a/src/core.js b/src/core.js index <HASH>..<HASH> 100644 --- a/src/core.js +++ b/src/core.js @@ -229,7 +229,7 @@ $.fn.powerTip.showHide = function(element, event) { * @param {jQuery.Event=} event jQuery event. */ $.fn.powerTip.show = function(element, event) { - if (event.pageX) { + if (typeof event.pageX === 'number') { // for mouse events, pass event to show (for hover intent and mouse tracking) $.powerTip.show(element, event); } else {
Changed mouse-event check to use typeof. Although unlikely in the real world, it is possible for event.pageX to be 0. Related to pull request #<I>.
diff --git a/salt/cache/__init__.py b/salt/cache/__init__.py index <HASH>..<HASH> 100644 --- a/salt/cache/__init__.py +++ b/salt/cache/__init__.py @@ -2,12 +2,15 @@ ''' Loader mechanism for caching data, with data expirations, etc. -.. versionadded:: carbon +.. versionadded:: Carbon ''' + +# Import Python libs from __future__ import absolute_import -import os -import salt.loader import time + +# Import Salt lobs +import salt.loader from salt.payload import Serial
Clean up imports in salt/cache/__init__.py the 'os' import is not used, and splitting them out into python vs. salt imports makes them easier to read and more consistent with other salt files
diff --git a/werkzeug/http.py b/werkzeug/http.py index <HASH>..<HASH> 100644 --- a/werkzeug/http.py +++ b/werkzeug/http.py @@ -91,6 +91,8 @@ def quote_header_value(value, extra_chars='', allow_token=True): :param allow_token: if this is enabled token values are returned unchanged. """ + if isinstance(value, bytes): + value = bytes_to_wsgi(value) value = str(value) if allow_token: token_chars = _token_chars | set(extra_chars) @@ -819,6 +821,7 @@ def dump_cookie(key, value='', max_age=None, expires=None, path='/', raise TypeError('invalid key %r' % key) if isinstance(value, text_type): value = value.encode(charset) + value = quote_header_value(value) morsel = _ExtendedMorsel(key, value) if isinstance(max_age, timedelta):
ensure cookie dumping works on py3
diff --git a/apigateway.go b/apigateway.go index <HASH>..<HASH> 100644 --- a/apigateway.go +++ b/apigateway.go @@ -758,6 +758,12 @@ func (resource *Resource) NewMethod(httpMethod string, defaultHTTPStatusCode int, possibleHTTPStatusCodeResponses ...int) (*Method, error) { + if OptionsGlobal.Logger != nil && len(possibleHTTPStatusCodeResponses) != 0 { + OptionsGlobal.Logger.WithFields(logrus.Fields{ + "possibleHTTPStatusCodeResponses": possibleHTTPStatusCodeResponses, + }).Debug("The set of all HTTP status codes is no longer required for NewMethod(...). Any valid HTTP status code can be returned starting with v1.8.0.") + } + // http://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-method-settings.html#how-to-method-settings-console keyname := httpMethod existingMethod, exists := resource.Methods[keyname]
Add log message about not needing to supply all the status codes.
diff --git a/lib/onebox/engine/youtube_onebox.rb b/lib/onebox/engine/youtube_onebox.rb index <HASH>..<HASH> 100644 --- a/lib/onebox/engine/youtube_onebox.rb +++ b/lib/onebox/engine/youtube_onebox.rb @@ -28,6 +28,8 @@ module Onebox end nil + rescue + return nil end def placeholder_html @@ -70,6 +72,7 @@ module Onebox elsif params['t'] start = params['t'] elsif uri.fragment && uri.fragment.start_with?('t=') + # referencing uri is safe here because any throws were already caught by video_id returning nil # remove the t= from the start start = uri.fragment[2..-1] end @@ -99,11 +102,11 @@ module Onebox (h * 60 * 60) + (m * 60) + s else - puts 'warning - nil from parse_timestring' nil end end + # Note: May throw! Make sure to recue. def uri @_uri ||= URI(@url) end @@ -123,6 +126,8 @@ module Onebox end params end + rescue + return {} end end
Make sure that no URI parsing exceptions are leaked
diff --git a/de.tudarmstadt.ukp.wikipedia.api/src/test/java/de/tudarmstadt/ukp/wikipedia/api/PageTest.java b/de.tudarmstadt.ukp.wikipedia.api/src/test/java/de/tudarmstadt/ukp/wikipedia/api/PageTest.java index <HASH>..<HASH> 100644 --- a/de.tudarmstadt.ukp.wikipedia.api/src/test/java/de/tudarmstadt/ukp/wikipedia/api/PageTest.java +++ b/de.tudarmstadt.ukp.wikipedia.api/src/test/java/de/tudarmstadt/ukp/wikipedia/api/PageTest.java @@ -86,9 +86,9 @@ public class PageTest { fail("A WikiApiException occured while getting the page " + title); } - String text = "Wikipedia API ist die wichtigste Software überhaupt. Wikipedia API."+LF+ - "Nicht zu übertreffen."+LF+"Unglaublich"+LF+"http://www.ukp.tu-darmstadt.de"+LF+"en:Wikipedia API fi:WikipediaAPI"; + String text = "Wikipedia API ist die wichtigste Software überhaupt. Wikipedia API.\nNicht zu übertreffen.\nUnglaublich\nhttp://www.ukp.tu-darmstadt.de\nen:Wikipedia API fi:WikipediaAPI"; + try{ assertEquals(text, p.getPlainText()); }catch(Exception e){
Fixed testPlainText to work system independently (no line.separator property in assertions)
diff --git a/bakery/management/commands/publish.py b/bakery/management/commands/publish.py index <HASH>..<HASH> 100644 --- a/bakery/management/commands/publish.py +++ b/bakery/management/commands/publish.py @@ -156,7 +156,7 @@ in settings.py or provide it with --aws-bucket-name" return files_list - def sync_s3(self, dirname, names): + def sync_s3(self): """ Walk through our local file list, and match them wtih the list of keys in the S3 bucket. @@ -216,9 +216,7 @@ in settings.py or provide it with --aws-bucket-name" self.local_files = self.build_local_files_list() - # walk through the build directory - # for (dirpath, dirnames, filenames) in os.walk(self.build_dir): - # self.sync_s3(dirpath, filenames) + self.sync_s3() # delete anything that's left in our keys dict for key in self.keys:
great way to delete all files is to never call the sync_s3 command
diff --git a/src/stateManagement/imageIdSpecificStateManager.js b/src/stateManagement/imageIdSpecificStateManager.js index <HASH>..<HASH> 100755 --- a/src/stateManagement/imageIdSpecificStateManager.js +++ b/src/stateManagement/imageIdSpecificStateManager.js @@ -36,12 +36,14 @@ function newImageIdSpecificToolStateManager() { // As modules that restore saved state function addImageIdSpecificToolState(element, toolType, data) { const enabledElement = external.cornerstone.getEnabledElement(element); - // If we don't have any tool state for this imageId, add an empty object - if ( - !enabledElement.image || - toolState.hasOwnProperty(enabledElement.image.imageId) === false - ) { + // If we don't have an image for this element exit early + if (!enabledElement.image) { + return; + } + + // If we don't have any tool state for this imageId, add an empty object + if (toolState.hasOwnProperty(enabledElement.image.imageId) === false) { toolState[enabledElement.image.imageId] = {}; }
re-implements the imageIdSpecificStateManager.add bugfix
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -1292,6 +1292,14 @@ class TestCheckManifest(unittest.TestCase): self.assertEqual(str(cm.exception), "This is not a Python project (no setup.py).") + def test_forgot_to_git_add_anything(self): + from check_manifest import check_manifest, Failure + self._create_repo_with_code(add_to_vcs=False) + with self.assertRaises(Failure) as cm: + check_manifest() + self.assertEqual(str(cm.exception), + "There are no files added to version control!") + def test_all_is_well(self): from check_manifest import check_manifest self._create_repo_with_code()
Test coverage for empty VCS repo case
diff --git a/reana_commons/snakemake.py b/reana_commons/snakemake.py index <HASH>..<HASH> 100644 --- a/reana_commons/snakemake.py +++ b/reana_commons/snakemake.py @@ -73,6 +73,7 @@ def snakemake_load(workflow_file, **kwargs): "environment": rule._container_img.replace("docker://", ""), "inputs": dict(rule._input), "params": dict(rule._params), + "outputs": dict(rule._output), "commands": [rule.shellcmd], } for rule in snakemake_workflow.rules
snakemake: include outputs in workflow representation closes reanahub/reana-client#<I>
diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index <HASH>..<HASH> 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -119,6 +119,13 @@ def test_bracket_period(EN): tokens = EN(text) assert tokens[len(tokens) - 1].orth_ == u'.' + +def test_ie(EN): + text = u"It's mediocre i.e. bad." + tokens = EN(text) + assert len(tokens) == 6 + assert tokens[3].orth_ == "i.e." + #def test_cnts7(): # text = 'But then the 6,000-year ice age came...' # tokens = EN.tokenize(text)
* Upd tokenizer with i.e. tests
diff --git a/public/js/utils/source-map-worker.js b/public/js/utils/source-map-worker.js index <HASH>..<HASH> 100644 --- a/public/js/utils/source-map-worker.js +++ b/public/js/utils/source-map-worker.js @@ -82,7 +82,7 @@ function getOriginalTexts(generatedSource, generatedText) { function getOriginalSourcePosition(generatedSource, { column, line }) { const consumer = _getConsumer(generatedSource.id); - // if there is not a consumer, then its a generated source without a map + // if there is not a consumer, then it's a generated source without a map if (!consumer) { return { url: generatedSource.url,
its to it's in source-map-worker.js comment
diff --git a/sos/plugins/logrotate.py b/sos/plugins/logrotate.py index <HASH>..<HASH> 100644 --- a/sos/plugins/logrotate.py +++ b/sos/plugins/logrotate.py @@ -24,6 +24,7 @@ class LogRotate(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): self.add_copy_spec([ "/etc/logrotate*", "/var/lib/logrotate.status", + "/var/lib/logrotate/logrotate.status", self.var_puppet_gen + "/etc/logrotate-crond.conf", self.var_puppet_gen + "/var/spool/cron/root" ])
[logrotate] fix path for logrotate.status in RHEL7/CentOS7 In RHEL7/CentOS7 logrotate.status is placed in /var/lib/logrotate rather then directly in /var/lib. Resolves: #<I>
diff --git a/presto-main/src/main/java/com/facebook/presto/operator/repartition/OptimizedPartitionedOutputOperator.java b/presto-main/src/main/java/com/facebook/presto/operator/repartition/OptimizedPartitionedOutputOperator.java index <HASH>..<HASH> 100644 --- a/presto-main/src/main/java/com/facebook/presto/operator/repartition/OptimizedPartitionedOutputOperator.java +++ b/presto-main/src/main/java/com/facebook/presto/operator/repartition/OptimizedPartitionedOutputOperator.java @@ -618,10 +618,11 @@ public class OptimizedPartitionedOutputOperator { // Create buffers has to be done after seeing the first page. if (blockEncodingBuffers == null) { - blockEncodingBuffers = new BlockEncodingBuffer[channelCount]; + BlockEncodingBuffer[] buffers = new BlockEncodingBuffer[channelCount]; for (int i = 0; i < channelCount; i++) { - blockEncodingBuffers[i] = createBlockEncodingBuffers(decodedBlocks[i]); + buffers[i] = createBlockEncodingBuffers(decodedBlocks[i]); } + blockEncodingBuffers = buffers; } }
Harden blockEncodingBuffers in OptimizedPartitionedOutputOperator
diff --git a/pkg/node/manager.go b/pkg/node/manager.go index <HASH>..<HASH> 100644 --- a/pkg/node/manager.go +++ b/pkg/node/manager.go @@ -25,6 +25,7 @@ import ( "github.com/cilium/cilium/pkg/logging" "github.com/cilium/cilium/pkg/logging/logfields" "github.com/cilium/cilium/pkg/maps/tunnel" + "github.com/cilium/cilium/pkg/mtu" "github.com/sirupsen/logrus" "github.com/vishvananda/netlink" @@ -191,6 +192,12 @@ func replaceNodeRoute(ip *net.IPNet) { } route := netlink.Route{LinkIndex: link.Attrs().Index, Dst: ip, Gw: via, Src: local} + // If the route includes the local address, then the route is for + // local containers and we can use a high MTU for transmit. Otherwise, + // it needs to be able to fit within the MTU of tunnel devices. + if !ip.Contains(local) { + route.MTU = mtu.TunnelMTU + } scopedLog := log.WithField(logfields.Route, route) if err := netlink.RouteReplace(&route); err != nil {
node: Configure route MTUs depending on destination Configure MTU for routes such that routes to local destinations will have MTU <I>, whereas routes to other nodes use the tunnel MTU.
diff --git a/lib/librarian/chef/source/site.rb b/lib/librarian/chef/source/site.rb index <HASH>..<HASH> 100644 --- a/lib/librarian/chef/source/site.rb +++ b/lib/librarian/chef/source/site.rb @@ -207,9 +207,15 @@ module Librarian dependency_cache_path.mkpath metadata_cache_path = metadata_cache_path(dependency) unless metadata_cache_path.exist? - dep_uri = dependency_uri(dependency) + dep_uri = URI.parse(dependency_uri(dependency)) debug { "Caching #{dep_uri}" } - metadata_blob = Net::HTTP.get(URI.parse(dep_uri)) + http = Net::HTTP.new(dep_uri.host, dep_uri.port) + request = Net::HTTP::Get.new(dep_uri.path) + response = http.start{|http| http.request(request)} + unless Net::HTTPSuccess === response + raise Error, "Could not cache #{dependency} from #{dep_uri} because #{response.code} #{response.message}!" + end + metadata_blob = response.body JSON.parse(metadata_blob) # check that it's JSON metadata_cache_path(dependency).open('wb') do |f| f.write(metadata_blob)
A little bit of handling in case the chef site source can't find a dependency.
diff --git a/cloudvolume/datasource/graphene/mesh/sharded.py b/cloudvolume/datasource/graphene/mesh/sharded.py index <HASH>..<HASH> 100644 --- a/cloudvolume/datasource/graphene/mesh/sharded.py +++ b/cloudvolume/datasource/graphene/mesh/sharded.py @@ -149,13 +149,9 @@ class GrapheneShardedMeshSource(GrapheneUnshardedMeshSource): filenames = self.get_fragment_filenames(seg_id, level=level, bbox=bounding_box) lists = self.parse_manifest_filenames(filenames) - files = [] + meshes = [] if lists['dynamic']: - files = CloudFiles(dynamic_cloudpath, green=self.config.green).get(lists['dynamic']) - - meshes = [ - f['content'] for f in files - ] + meshes = CloudFiles(dynamic_cloudpath, green=self.config.green).get(lists['dynamic']) fetches = [] for layer_id, filename, byte_start, size in lists['initial']: @@ -166,7 +162,6 @@ class GrapheneShardedMeshSource(GrapheneUnshardedMeshSource): }) cloudpath = self.meta.join(self.meta.meta.cloudpath, self.meta.mesh_path, 'initial') - raw_binaries = [] initial_meshes = CloudFiles(cloudpath, green=self.config.green).get(fetches) meshes += initial_meshes
fix: incompatibility between inital and dynamic meshes (#<I>)
diff --git a/src/main/java/com/sonymobile/tools/gerrit/gerritevents/dto/events/RefReplicated.java b/src/main/java/com/sonymobile/tools/gerrit/gerritevents/dto/events/RefReplicated.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/sonymobile/tools/gerrit/gerritevents/dto/events/RefReplicated.java +++ b/src/main/java/com/sonymobile/tools/gerrit/gerritevents/dto/events/RefReplicated.java @@ -40,6 +40,15 @@ import com.sonymobile.tools.gerrit.gerritevents.dto.GerritEventType; public class RefReplicated extends GerritTriggeredEvent { /** + * Replication failed status. + */ + public static final String FAILED_STATUS = "failed"; + /** + * Replication succeeded status. + */ + public static final String SUCCEEDED_STATUS = "succeeded"; + + /** * Project path in Gerrit. */ private String project;
Added lost RefReplicated constants
diff --git a/lib/proinsias/version.rb b/lib/proinsias/version.rb index <HASH>..<HASH> 100644 --- a/lib/proinsias/version.rb +++ b/lib/proinsias/version.rb @@ -1,3 +1,3 @@ module Proinsias - VERSION = "0.5.0" + VERSION = "0.6.0" end
Bump proinsias to <I>
diff --git a/benchmark/benchmark.rb b/benchmark/benchmark.rb index <HASH>..<HASH> 100755 --- a/benchmark/benchmark.rb +++ b/benchmark/benchmark.rb @@ -18,7 +18,7 @@ def compare_scrub_methods puts Loofah.scrub_fragment(snip, :escape).to_s puts "--" puts HTMLFilter.new.filter(snip) - puts Loofah::Helpers.sanitize(snip) + puts Loofah.scrub_fragment(snip, :strip).to_s puts end diff --git a/benchmark/helper.rb b/benchmark/helper.rb index <HASH>..<HASH> 100644 --- a/benchmark/helper.rb +++ b/benchmark/helper.rb @@ -9,6 +9,10 @@ require "sanitize" require 'hitimes' require 'htmlfilter' +unless defined?(HTMLFilter) + HTMLFilter = HtmlFilter +end + class RailsSanitize include ActionView::Helpers::SanitizeHelper extend ActionView::Helpers::SanitizeHelper::ClassMethods
fixing trans's commit to work with the current gem class names.
diff --git a/pylast/__init__.py b/pylast/__init__.py index <HASH>..<HASH> 100644 --- a/pylast/__init__.py +++ b/pylast/__init__.py @@ -98,12 +98,11 @@ DOMAIN_RUSSIAN = 9 DOMAIN_JAPANESE = 10 DOMAIN_CHINESE = 11 -# COVER_X is deprecated since 2.1.0 and will be removed in a future version -SIZE_SMALL = COVER_SMALL = 0 -SIZE_MEDIUM = COVER_MEDIUM = 1 -SIZE_LARGE = COVER_LARGE = 2 -SIZE_EXTRA_LARGE = COVER_EXTRA_LARGE = 3 -SIZE_MEGA = COVER_MEGA = 4 +SIZE_SMALL = 0 +SIZE_MEDIUM = 1 +SIZE_LARGE = 2 +SIZE_EXTRA_LARGE = 3 +SIZE_MEGA = 4 IMAGES_ORDER_POPULARITY = "popularity" IMAGES_ORDER_DATE = "dateadded"
Remove deprecated COVER_X constants
diff --git a/src/Graviton/RestBundle/Listener/ValidationRequestListener.php b/src/Graviton/RestBundle/Listener/ValidationRequestListener.php index <HASH>..<HASH> 100644 --- a/src/Graviton/RestBundle/Listener/ValidationRequestListener.php +++ b/src/Graviton/RestBundle/Listener/ValidationRequestListener.php @@ -54,7 +54,8 @@ class ValidationRequestListener // if PATCH is required, refactor the method or do something else $request = $event->getRequest(); - if (empty($request->getContent())) { + $content = $request->getContent(); + if (empty($content)) { $isJson = true; } else { $isJson = strtolower(substr($request->headers->get('content-type'), 0, 16)) == 'application/json'; @@ -63,7 +64,6 @@ class ValidationRequestListener $controller = $event->getController(); // Moved this from RestController to ValidationListener (don't know if necessary) - $content = $request->getContent(); if (is_resource($content)) { throw new \LogicException('unexpected resource in validation'); }
Smallish php<I> refactor This one also make the code a bit more concise so I'm happy with it. There are probably other cases where I'll be to lazy to touch anything.
diff --git a/blockservice/blockservice.go b/blockservice/blockservice.go index <HASH>..<HASH> 100644 --- a/blockservice/blockservice.go +++ b/blockservice/blockservice.go @@ -78,6 +78,8 @@ func (bs *blockService) Exchange() exchange.Interface { return bs.exchange } +// NewSession creates a bitswap session that allows for controlled exchange of +// wantlists to decrease the bandwidth overhead. func NewSession(ctx context.Context, bs BlockService) *Session { exchange := bs.Exchange() if bswap, ok := exchange.(*bitswap.Bitswap); ok { diff --git a/exchange/bitswap/wantlist/wantlist.go b/exchange/bitswap/wantlist/wantlist.go index <HASH>..<HASH> 100644 --- a/exchange/bitswap/wantlist/wantlist.go +++ b/exchange/bitswap/wantlist/wantlist.go @@ -79,6 +79,7 @@ func (w *ThreadSafe) Add(c *cid.Cid, priority int, ses uint64) bool { return true } +// AddEntry adds given Entry to the wantlist. For more information see Add method. func (w *ThreadSafe) AddEntry(e *Entry, ses uint64) bool { w.lk.Lock() defer w.lk.Unlock()
bitswap: add few method comments License: MIT
diff --git a/config/projects/chefdk.rb b/config/projects/chefdk.rb index <HASH>..<HASH> 100644 --- a/config/projects/chefdk.rb +++ b/config/projects/chefdk.rb @@ -34,6 +34,18 @@ else install_dir "#{default_root}/#{name}" end +# As of 27 October 2014, the newest CA cert bundle does not work with AWS's +# root cert. See: +# * https://github.com/opscode/chef-dk/issues/199 +# * https://blog.mozilla.org/security/2014/09/08/phasing-out-certificates-with-1024-bit-rsa-keys/ +# * https://forums.aws.amazon.com/thread.jspa?threadID=164095 +# * https://github.com/opscode/omnibus-supermarket/commit/89197026af2931de82cfdc13d92ca2230cced3b6 +# +# For now we resolve it by using an older version of the cert. This only works +# if you have this version of the CA bundle stored via S3 caching (which Chef +# Software does). +override :cacerts, version: '2014.08.20' + override :berkshelf, version: "v3.1.5" override :bundler, version: "1.7.2" override :chef, version: "11.16.0"
Use an older version of CA certs to fix S3 cert checks AWS Certificate cannot be verified by the latest version of the CA bundle, lock to an older one that works until either AWS or the CA bundle fixes the issue.
diff --git a/lib/calculated_attributes.rb b/lib/calculated_attributes.rb index <HASH>..<HASH> 100644 --- a/lib/calculated_attributes.rb +++ b/lib/calculated_attributes.rb @@ -6,9 +6,10 @@ require 'calculated_attributes/rails_patches' require 'calculated_attributes/arel_patches' raise "Unsupported ActiveRecord version: #{ActiveRecord::VERSION::MAJOR}" unless [3, 4, 5, 6].include? ActiveRecord::VERSION::MAJOR -# Rails 5.2 has its own patches which are different from 5.0/5.1. In every other +# Rails 5.2 has its own patches which are different from 5.0. In every other # case, just require the patch file for the major version. -if Gem::Version.new(ActiveRecord::VERSION::STRING).canonical_segments.take(2) == [5, 2] +versions = Gem::Version.new(ActiveRecord::VERSION::STRING).canonical_segments.take(2) +if versions == [5, 2] || versions == [5, 1] require 'calculated_attributes/rails_5_2_patches' else require "calculated_attributes/rails_#{ActiveRecord::VERSION::MAJOR}_patches"
Update to support <I> The change was back ported to <I> <URL>
diff --git a/bcloud/CloudPage.py b/bcloud/CloudPage.py index <HASH>..<HASH> 100644 --- a/bcloud/CloudPage.py +++ b/bcloud/CloudPage.py @@ -237,6 +237,7 @@ class CloudPage(Gtk.Box): def add_local_bt_task(self): '''从本地上传种子到服务器, 再创建离线下载任务''' self.check_first() + print('Local BT task not supported right now.') # Open API def add_link_task(self): @@ -268,7 +269,7 @@ class CloudPage(Gtk.Box): if response != Gtk.ResponseType.OK or not len(source_url): return - if source_url.startswith('magent'): + if source_url.startswith('magnet'): self.add_cloud_bt_task(source_url) return diff --git a/bcloud/VCodeDialog.py b/bcloud/VCodeDialog.py index <HASH>..<HASH> 100644 --- a/bcloud/VCodeDialog.py +++ b/bcloud/VCodeDialog.py @@ -3,6 +3,7 @@ # Use of this source code is governed by GPLv3 license that can be found # in http://www.gnu.org/licenses/gpl-3.0.html +import os from gi.repository import Gtk
Show file chooser dialog when downloading magnet files
diff --git a/jquery.geocomplete.js b/jquery.geocomplete.js index <HASH>..<HASH> 100644 --- a/jquery.geocomplete.js +++ b/jquery.geocomplete.js @@ -116,6 +116,12 @@ 'click', $.proxy(this.mapClicked, this) ); + + google.maps.event.addListener( + this.map, + 'zoom_changed', + $.proxy(this.mapZoomed, this) + ); }, // Add a marker with the provided `markerOptions` but only @@ -423,6 +429,10 @@ this.trigger("geocode:click", event.latLng); }, + mapZoomed: function(event) { + this.trigger("geocode:zoom", this.map.getZoom()); + }, + // Restore the old position of the marker to the last now location. resetMarker: function(){ this.marker.setPosition(this.data.location);
zoom_changed event support listen on zoom_changed event and trigger geocoder:zoom with the new zoom level
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,12 @@ class build_ext(_build_ext): # see http://stackoverflow.com/q/19919905 for explanation def finalize_options(self): _build_ext.finalize_options(self) - __builtins__.__NUMPY_SETUP__ = False + # __builtins__ is a dict when this module isn't __main__ + # see https://docs.python.org/2/reference/executionmodel.html + if isinstance(__builtins__, dict): + __builtins__['__NUMPY_SETUP__'] = False + else: + __builtins__.__NUMPY_SETUP__ = False import numpy as np self.include_dirs.append(np.get_include())
handle setup.py edge case where it's not run as __main__ <URL>
diff --git a/src/test/java/org/jooq/lambda/SeqTest.java b/src/test/java/org/jooq/lambda/SeqTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/jooq/lambda/SeqTest.java +++ b/src/test/java/org/jooq/lambda/SeqTest.java @@ -20,8 +20,8 @@ import static java.util.Comparator.comparing; import static java.util.stream.Collectors.counting; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.joining; -import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.mapping; +import static java.util.stream.Collectors.toList; import static org.hamcrest.CoreMatchers.hasItems; import static org.jooq.lambda.Utils.assertThrows; import static org.jooq.lambda.tuple.Tuple.collectors; @@ -79,7 +79,7 @@ public class SeqTest { } @Test - public void testTheSameBehaviorAsGroupBy() throws Exception { + public void testGroupedSameBehaviorAsGroupBy() throws Exception { Random r = new Random(System.nanoTime()); int runs = r.nextInt(127) + 1; for (int i = 0; i < runs; i++) {
[#<I>] Rename test of the "grouped" category
diff --git a/lib/ach/records/file_header.rb b/lib/ach/records/file_header.rb index <HASH>..<HASH> 100644 --- a/lib/ach/records/file_header.rb +++ b/lib/ach/records/file_header.rb @@ -5,7 +5,7 @@ module ACH::Records const_field :record_type, '1' const_field :priority_code, '01' field :immediate_destination, String, lambda { |f| f.rjust(10) }, nil, /\A(\d{9,10}|)\z/ - field :immediate_origin, String, lambda { |f| f.rjust(10) }, nil, /\A[A-Z\d\s]{1}?\d{9}\z/ + field :immediate_origin, String, lambda { |f| f.rjust(10) }, nil, /\A[A-Z\d\s]{1}?\d{9}\s?\z/ field :transmission_datetime, Time, lambda { |f| f.strftime('%y%m%d%H%M')}, lambda { Time.now }
IMS-<I> allow appending space to immediate origin
diff --git a/code/cms/DMSUploadField.php b/code/cms/DMSUploadField.php index <HASH>..<HASH> 100644 --- a/code/cms/DMSUploadField.php +++ b/code/cms/DMSUploadField.php @@ -57,6 +57,15 @@ class DMSUploadField extends UploadField { return true; } + + public function isDisabled() { + return (parent::isDisabled() || !$this->isSaveable()); + } + + public function isSaveable() { + return (!empty($this->getRecord()->ID)); + } + /** * Action to handle upload of a single file *
FIX Don't show the upload field when inappropriate This fix is quite ugly, but in theory, it's fine. By ugly, I mean the interface is kind of weird without it. Because <I> allows uploading when a DataObject doesn't exist, we need to override the new defaults to not allow such if the DO isn't saved yet.
diff --git a/src/Commands/RsyncCommand.php b/src/Commands/RsyncCommand.php index <HASH>..<HASH> 100644 --- a/src/Commands/RsyncCommand.php +++ b/src/Commands/RsyncCommand.php @@ -93,7 +93,7 @@ class RsyncCommand extends TerminusCommand implements SiteAwareInterface } $this->log()->notice('Running {cmd}', ['cmd' => "rsync $rsyncOptionString $src $dest"]); - $this->passthru("rsync $rsyncOptionString --ipv4 --exclude=.git -e 'ssh -p 2222' $src $dest "); + $this->passthru("rsync $rsyncOptionString --ipv4 --exclude=.git -e 'ssh -p 2222' '$src' '$dest' "); } protected function passthru($command)
put paths in quotes to preserve spaces etc
diff --git a/client/views/panels/manager.js b/client/views/panels/manager.js index <HASH>..<HASH> 100644 --- a/client/views/panels/manager.js +++ b/client/views/panels/manager.js @@ -26,6 +26,7 @@ define([ tabMenu: false, newTab: false, draggable: false, + keyboardShortcuts: false, maxTabsPerSection: 1 }, this); this.tabs.$el.appendTo(this.$el); diff --git a/client/views/tabs/base.js b/client/views/tabs/base.js index <HASH>..<HASH> 100644 --- a/client/views/tabs/base.js +++ b/client/views/tabs/base.js @@ -62,6 +62,8 @@ define([ var navs = {}; container = container || this; + if (!this.tab.manager.options.keyboardShortcuts) return; + _.each(navigations, function(method, key) { navs[key] = _.bind(function() { // Trigger only if active tab diff --git a/client/views/tabs/manager.js b/client/views/tabs/manager.js index <HASH>..<HASH> 100644 --- a/client/views/tabs/manager.js +++ b/client/views/tabs/manager.js @@ -43,7 +43,10 @@ define([ maxTabsPerSection: -1, // Tabs are draggable - draggable: true + draggable: true, + + // Enable keyboard shortcuts + keyboardShortcuts: true }, events: {},
Fix keyboard shortcuts in tabs of lateral panels
diff --git a/resource_arm_managed_disk.go b/resource_arm_managed_disk.go index <HASH>..<HASH> 100644 --- a/resource_arm_managed_disk.go +++ b/resource_arm_managed_disk.go @@ -2,12 +2,13 @@ package azurerm import ( "fmt" - "github.com/Azure/azure-sdk-for-go/arm/disk" - "github.com/hashicorp/terraform/helper/schema" - "github.com/hashicorp/terraform/helper/validation" "log" "net/http" "strings" + + "github.com/Azure/azure-sdk-for-go/arm/disk" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" ) func resourceArmManagedDisk() *schema.Resource { @@ -90,9 +91,9 @@ func resourceArmManagedDisk() *schema.Resource { func validateDiskSizeGB(v interface{}, k string) (ws []string, errors []error) { value := v.(int) - if value < 1 || value > 1023 { + if value < 1 || value > 4095 { errors = append(errors, fmt.Errorf( - "The `disk_size_gb` can only be between 1 and 1023")) + "The `disk_size_gb` can only be between 1 and 4095")) } return }
Adding support for 4TB disks
diff --git a/mpldatacursor.py b/mpldatacursor.py index <HASH>..<HASH> 100644 --- a/mpldatacursor.py +++ b/mpldatacursor.py @@ -75,14 +75,15 @@ def datacursor(artists=None, axes=None, tolerance=5, formatter=None, + ax.images return artists - if not cbook.iterable(axes): - axes = [axes] - # If no axes are specified, get all axes. if axes is None: - figs = pylab_helpers.Gcf.figs.values() + managers = pylab_helpers.Gcf.get_all_fig_managers() + figs = [manager.canvas.figure for manager in managers] axes = [ax for fig in figs for ax in fig.axes] + if not cbook.iterable(axes): + axes = [axes] + # If no artists are specified, get all manually plotted artists in all of # the specified axes. if artists is None:
Bugfix for "datacursor" when no artists or axes are specified
diff --git a/modules/clipboard.js b/modules/clipboard.js index <HASH>..<HASH> 100644 --- a/modules/clipboard.js +++ b/modules/clipboard.js @@ -107,8 +107,8 @@ class Clipboard extends Module { this.container.focus(); this.quill.selection.update(Quill.sources.SILENT); setTimeout(() => { - this.quill.scrollingContainer.scrollTop = scrollTop; this.onPaste(e, range); + this.quill.scrollingContainer.scrollTop = scrollTop; this.quill.focus(); this.container.innerHTML = ''; }, 1);
onPaste might shift scrolling so run after
diff --git a/gremlin-python/src/main/jython/setup.py b/gremlin-python/src/main/jython/setup.py index <HASH>..<HASH> 100644 --- a/gremlin-python/src/main/jython/setup.py +++ b/gremlin-python/src/main/jython/setup.py @@ -51,9 +51,12 @@ install_requires = [ 'isodate>=0.6.0,<1.0.0' ] -if sys.version_info < (3,2): +if sys.version_info < (3, 2): install_requires += ['futures>=3.0.5,<4.0.0'] +if sys.version_info < (3, 5): + install_requires += ['pyparsing>=2.4.6,<3.0.0'] + setup( name='gremlinpython', version=version,
Pinned pyparsing to versions prior to <I> <I> doesn't seem to support earlier version of python anymore CTR
diff --git a/extended-template-parts.php b/extended-template-parts.php index <HASH>..<HASH> 100644 --- a/extended-template-parts.php +++ b/extended-template-parts.php @@ -40,6 +40,9 @@ */ function get_extended_template_part( $slug, $name = '', array $vars = [], array $args = [] ) { $template = new Extended_Template_Part( $slug, $name, $vars, $args ); + $dir = $template->args['dir']; + /* This action is documented in WordPress core: wp-includes/general-template.php */ + do_action( "get_template_part_{$dir}/{$slug}", "{$dir}/{$slug}", $name ); echo $template->get_output(); // WPCS: XSS ok. }
Fire the same `get_template_part_{$slug}` action that WordPress core does when loading a template part.
diff --git a/modelx/core/system.py b/modelx/core/system.py index <HASH>..<HASH> 100644 --- a/modelx/core/system.py +++ b/modelx/core/system.py @@ -124,14 +124,18 @@ class System: return from ipykernel.kernelapp import IPKernelApp + self.shell = IPKernelApp.instance().shell # None in PyCharm console - self.shell = IPKernelApp.instance().shell + if not self.shell and is_ipython(): + self.shell = get_ipython() - if self.shell: # is set to None in PyCharm + if self.shell: shell_class = type(self.shell) shell_class.default_showtraceback = shell_class.showtraceback shell_class.showtraceback = custom_showtraceback self.is_ipysetup = True + else: + raise RuntimeError("IPython shell not found.") def restore_ipython(self): """Restore default IPython showtraceback"""
FIX: setup_ipython to get shell from get_ipython()
diff --git a/src/main/java/water/exec/ASTOp.java b/src/main/java/water/exec/ASTOp.java index <HASH>..<HASH> 100644 --- a/src/main/java/water/exec/ASTOp.java +++ b/src/main/java/water/exec/ASTOp.java @@ -718,8 +718,8 @@ abstract class ASTBinOp extends ASTOp { if(chks[i].isNA0(r)) { n.addNum(Double.NaN); continue; } rv = chks[i].at0(r); } else { - if (Double.isNaN(df0)) { n.addNum(Double.NaN); continue; } - rv = df0; + if (Double.isNaN(df1)) { n.addNum(Double.NaN); continue; } + rv = df1; } n.addNum(bin.op(lv, rv)); }
missed the RHS double in the ASTBinOp
diff --git a/features/step_definitions/cli_steps.rb b/features/step_definitions/cli_steps.rb index <HASH>..<HASH> 100644 --- a/features/step_definitions/cli_steps.rb +++ b/features/step_definitions/cli_steps.rb @@ -22,16 +22,6 @@ When /^I wait (\d+) seconds?$/ do |arg1| sleep arg1.to_i end -# TODO: Remove after pull request is merged in cucumber.rb from Aruba -When /^I wait for (?:output|stdout) to contain "([^"]*)"$/ do |expected| - Timeout::timeout(exit_timeout) do - loop do - break if assert_partial_output_interactive(expected) - sleep 0.1 - end - end -end - Given /^that I create a valid app under "([^"]*)"$/ do |path| steps %Q{ When I run `ahn create #{path}`
[CS] Remove step definitions duplicated from Aruba
diff --git a/websocket.py b/websocket.py index <HASH>..<HASH> 100644 --- a/websocket.py +++ b/websocket.py @@ -162,8 +162,8 @@ def create_connection(url, timeout=None, **options): If you set "header" dict object, you can set your own custom header. >>> conn = create_connection("ws://echo.websocket.org/", - ... header={"User-Agent: MyProgram", - ... "x-custom: header"}) + ... header=["User-Agent: MyProgram", + ... "x-custom: header"]) timeout: socket timeout time. This value is integer.
header argument is sequence insted of dict
diff --git a/dht.go b/dht.go index <HASH>..<HASH> 100644 --- a/dht.go +++ b/dht.go @@ -53,7 +53,6 @@ type IpfsDHT struct { providers *providers.ProviderManager birth time.Time // When this peer started up - diaglock sync.Mutex // lock to make diagnostics work better Validator record.Validator // record validator funcs Selector record.Selector // record selection funcs
Remove diaglock as per #<I>
diff --git a/lib/virtualbox/dvd.rb b/lib/virtualbox/dvd.rb index <HASH>..<HASH> 100644 --- a/lib/virtualbox/dvd.rb +++ b/lib/virtualbox/dvd.rb @@ -7,5 +7,12 @@ module VirtualBox parse_raw(raw) end end + + # Deletes the DVD from VBox managed list, but not actually from + # disk itself. + def destroy + Command.vboxmanage("closemedium dvd #{uuid} --delete") + return $?.to_i == 0 + end end end \ No newline at end of file diff --git a/lib/virtualbox/hard_drive.rb b/lib/virtualbox/hard_drive.rb index <HASH>..<HASH> 100644 --- a/lib/virtualbox/hard_drive.rb +++ b/lib/virtualbox/hard_drive.rb @@ -10,5 +10,10 @@ module VirtualBox parse_raw(raw) end end + + def destroy + Command.vboxmanage("closemedium disk #{uuid} --delete") + return $?.to_i == 0 + end end end \ No newline at end of file
HardDrive and DVD images can now be destroyed
diff --git a/kernel/classes/datatypes/ezdatetime/ezdatetimetype.php b/kernel/classes/datatypes/ezdatetime/ezdatetimetype.php index <HASH>..<HASH> 100644 --- a/kernel/classes/datatypes/ezdatetime/ezdatetimetype.php +++ b/kernel/classes/datatypes/ezdatetime/ezdatetimetype.php @@ -625,7 +625,7 @@ class eZDateTimeType extends eZDataType default: { - $default = null; + return array(); } }
Fix EZP-<I>: ezdatetime with no default value causes class edit to fail According to the other datatypes I have checked, it should return an empty array in this case.
diff --git a/storage/metric/memory.go b/storage/metric/memory.go index <HASH>..<HASH> 100644 --- a/storage/metric/memory.go +++ b/storage/metric/memory.go @@ -348,8 +348,20 @@ func (s memorySeriesStorage) Close() (err error) { return } -func (s memorySeriesStorage) GetAllMetricNames() ([]string, error) { - panic("not implemented") +func (s memorySeriesStorage) GetAllMetricNames() (metrics []string, err error) { + metricSet := map[string]bool{} + for _, series := range s.fingerprintToSeries { + if metricName, ok := series.metric["name"]; !ok { + err = fmt.Errorf("Found timeseries without metric name label: %v", series.metric) + } else { + metricSet[string(metricName)] = true + } + } + for metricName := range metricSet { + metrics = append(metrics, metricName) + } + sort.Strings(metrics) + return } func (s memorySeriesStorage) ForEachSample(builder IteratorsForFingerprintBuilder) (err error) {
Implement GetAllMetricNames() for memory storage.
diff --git a/code/actions/BetterButtonCustomAction.php b/code/actions/BetterButtonCustomAction.php index <HASH>..<HASH> 100755 --- a/code/actions/BetterButtonCustomAction.php +++ b/code/actions/BetterButtonCustomAction.php @@ -42,12 +42,6 @@ class BetterButtonCustomAction extends BetterButtonAction { protected $redirectURL; - /** - * The success message on completion of the action - * @var string - */ - protected $successMessage; - /** * Builds the button @@ -122,27 +116,6 @@ class BetterButtonCustomAction extends BetterButtonAction { /** - * Sets the success message when action complets - * @param string $message - * @return BetterButtonCustomAction - */ - public function setSuccessMessage($message) { - $this->successMessage = $message; - - return $this; - } - - - /** - * Gets the success message - * @return string - */ - public function getSuccessMessage() { - return $this->successMessage; - } - - - /** * Gets the link for the button * @return string */
API CHANGE: Remove successMessage from BetterButtonCustomAction
diff --git a/kie-remote/kie-remote-services/src/test/java/org/kie/remote/services/rest/query/RemoteServicesQueryDataTest.java b/kie-remote/kie-remote-services/src/test/java/org/kie/remote/services/rest/query/RemoteServicesQueryDataTest.java index <HASH>..<HASH> 100644 --- a/kie-remote/kie-remote-services/src/test/java/org/kie/remote/services/rest/query/RemoteServicesQueryDataTest.java +++ b/kie-remote/kie-remote-services/src/test/java/org/kie/remote/services/rest/query/RemoteServicesQueryDataTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2015 JBoss Inc + * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License.
added missing correct license header closes #<I>