diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/code/formfields/EventInvitationField.php b/code/formfields/EventInvitationField.php index <HASH>..<HASH> 100644 --- a/code/formfields/EventInvitationField.php +++ b/code/formfields/EventInvitationField.php @@ -216,6 +216,8 @@ class EventInvitationField extends FormField { ); } + Requirements::clear(); + $response = new SS_HTTPResponse(Convert::array2json($result)); $response->addHeader('Content-Type', 'application/json'); return $response; @@ -239,6 +241,8 @@ class EventInvitationField extends FormField { ); } + Requirements::clear(); + $response = new SS_HTTPResponse(Convert::array2json($result)); $response->addHeader('Content-Type', 'application/json'); return $response;
bugfix - ajax request to loadfromtime() and loadfromgroup() was getting Requirements js which breaks the invitations popup when in livemode
diff --git a/tests/test_bulk.py b/tests/test_bulk.py index <HASH>..<HASH> 100644 --- a/tests/test_bulk.py +++ b/tests/test_bulk.py @@ -16,27 +16,6 @@ class LocalizedBulkTestCase(TestCase): @staticmethod def test_localized_bulk_insert(): - """Tests that bulk inserts work properly when using - a :see:LocalizedField in the model.""" - - model = get_fake_model( - 'BulkInsertModel', - { - 'name': LocalizedField(), - 'score': models.IntegerField() - } - ) - - objects = model.objects.bulk_create([ - model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1), - model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2), - model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3) - ]) - - assert model.objects.all().count() == 3 - - @staticmethod - def test_localized_slug_bulk_insert(): """Tests whether bulk inserts work properly when using a :see:LocalizedUniqueSlugField in the model."""
Simplify test case for bulk_create
diff --git a/src/ExceptionHandler.php b/src/ExceptionHandler.php index <HASH>..<HASH> 100644 --- a/src/ExceptionHandler.php +++ b/src/ExceptionHandler.php @@ -26,6 +26,20 @@ class ExceptionHandler extends Handler use ExceptionHandlerTrait; /** + * The exception config. + * + * @var array + */ + protected $config; + + /** + * The container instance. + * + * @var \Illuminate\Contracts\Container\Container + */ + protected $container; + + /** * A list of the exception types that should not be reported. * * @var string[]
Added the needed properties to the L5 handler
diff --git a/TYPO3.Flow/Classes/Core/Bootstrap.php b/TYPO3.Flow/Classes/Core/Bootstrap.php index <HASH>..<HASH> 100644 --- a/TYPO3.Flow/Classes/Core/Bootstrap.php +++ b/TYPO3.Flow/Classes/Core/Bootstrap.php @@ -191,7 +191,7 @@ final class Bootstrap { $this->initializeObjectManager(); $this->initializeSystemLogger(); - $this->initializeLockManager(); +# $this->initializeLockManager(); if ($this->siteLocked === TRUE) return; $this->initializePackages();
[-FEATURE] FLOW3 (Core): For now disabled the Lock Manager which caused too much hassle for the developers in Development context. Will enable it again once it is more mature. Original-Commit-Hash: e9a0f4de<I>a7a<I>c<I>d<I>f<I>e<I>fc0d
diff --git a/packages/core/lib/segments/attributes/aws.js b/packages/core/lib/segments/attributes/aws.js index <HASH>..<HASH> 100644 --- a/packages/core/lib/segments/attributes/aws.js +++ b/packages/core/lib/segments/attributes/aws.js @@ -31,7 +31,7 @@ Aws.prototype.init = function init(res, serviceName) { this.id_2 = res.extendedRequestId; } - this.addData(capturer.capture(serviceName, res)); + this.addData(capturer.capture(serviceName.toLowerCase(), res)); }; Aws.prototype.addData = function addData(data) {
Translate service name to lower case (#<I>)
diff --git a/src/main/java/com/ecwid/consul/v1/catalog/model/CatalogRegistration.java b/src/main/java/com/ecwid/consul/v1/catalog/model/CatalogRegistration.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/ecwid/consul/v1/catalog/model/CatalogRegistration.java +++ b/src/main/java/com/ecwid/consul/v1/catalog/model/CatalogRegistration.java @@ -203,6 +203,9 @@ public class CatalogRegistration { @SerializedName("NodeMeta") private Map<String, String> nodeMeta; + @SerializedName("SkipNodeUpdate") + private boolean skipNodeUpdate; + public String getDatacenter() { return datacenter; } @@ -259,6 +262,15 @@ public class CatalogRegistration { this.nodeMeta = nodeMeta; } + public boolean isSkipNodeUpdate() { + return skipNodeUpdate; + } + + public CatalogRegistration setSkipNodeUpdate(boolean skipNodeUpdate) { + this.skipNodeUpdate = skipNodeUpdate; + return this; + } + @Override public String toString() { return "CatalogRegistration{" + @@ -269,6 +281,7 @@ public class CatalogRegistration { ", check=" + check + ", writeRequest=" + writeRequest + ", nodeMeta=" + nodeMeta + + ", skipNodeUpdate=" + skipNodeUpdate + '}'; } }
#<I> Add SkipNodeUpdate support
diff --git a/hwd/wrapper.py b/hwd/wrapper.py index <HASH>..<HASH> 100644 --- a/hwd/wrapper.py +++ b/hwd/wrapper.py @@ -29,3 +29,38 @@ class Wrapper: @property def system_path(self): return self.device.sys_path + + @property + def devid(self): + d = self.device + vend_id = d.get('ID_VENDOR_ID') + model_id = d.get('ID_MODEL_ID') + return (vend_id, model_id) + + @property + def model(self): + return self.get_first([ + 'ID_MODEL_FROM_DATABASE', + 'ID_MODEL', + 'ID_MODEL_ID']) + + @property + def vendor(self): + return self.get_first([ + 'ID_OUI_FROM_DATABASE', + 'ID_VENDOR_FROM_DATAASE', + 'ID_VENDOR', + 'ID_VENDOR_ID']) + + @property + def node(self): + return self.device.device_node + + def get_first(self, keys, default=None): + """ For given keys, return value for first key that isn't none """ + d = self.device + for k in keys: + v = d.get(k) + if v: + return v + return default
Added properties for accessing common device data
diff --git a/models/classes/task/migration/service/QueueMigrationService.php b/models/classes/task/migration/service/QueueMigrationService.php index <HASH>..<HASH> 100644 --- a/models/classes/task/migration/service/QueueMigrationService.php +++ b/models/classes/task/migration/service/QueueMigrationService.php @@ -50,7 +50,7 @@ class QueueMigrationService extends ConfigurableService } if ($config->isProcessAll()) { - $config = $configFactory->spawn($config, $filter); + $config = $spawnService->spawn($config, $filter); if ($config) { $report->add(
fix broken QueueMigrationService by changing to spawnService
diff --git a/geomdl/operations.py b/geomdl/operations.py index <HASH>..<HASH> 100644 --- a/geomdl/operations.py +++ b/geomdl/operations.py @@ -610,9 +610,10 @@ def split_curve(obj, param, **kwargs): for _ in range(0, temp_obj.degree + 1): curve2_kv.insert(0, param) - # Control points (use private variable due to differences between rational and non-rational curve) - curve1_ctrlpts = temp_obj._control_points[0:ks + r] - curve2_ctrlpts = temp_obj._control_points[ks + r - 1:] + # Control points (use Pw if rational) + cpts = temp_obj.ctrlptsw if obj.rational else temp_obj.ctrlpts + curve1_ctrlpts = cpts[0:ks + r] + curve2_ctrlpts = cpts[ks + r - 1:] # Create a new curve for the first half curve1 = temp_obj.__class__()
Use getters instead of private vars in split curve
diff --git a/lib/declarative/heritage.rb b/lib/declarative/heritage.rb index <HASH>..<HASH> 100644 --- a/lib/declarative/heritage.rb +++ b/lib/declarative/heritage.rb @@ -1,3 +1,5 @@ +require "declarative/deep_dup" + module Declarative class Heritage < Array # Record inheritable assignments for replay in an inheriting class. diff --git a/lib/declarative/schema.rb b/lib/declarative/schema.rb index <HASH>..<HASH> 100644 --- a/lib/declarative/schema.rb +++ b/lib/declarative/schema.rb @@ -1,6 +1,6 @@ -require "declarative" require "declarative/definitions" require "declarative/defaults" +require "declarative/heritage" module Declarative # Include this to maintain inheritable, nested schemas with ::defaults and
Fix warning about circular require ``` declarative-<I>/lib/declarative.rb:5: warning: loading in progress, circular require considered harmful ```
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -4,9 +4,10 @@ module.exports = function(source) { this.cacheable(false); if (markoCompiler.compileForBrowser) { - return markoCompiler.compileForBrowser(source, this.resourcePath, { + var compiled = markoCompiler.compileForBrowser(source, this.resourcePath, { writeToDisk: false }); + return compiled.code; } else { return markoCompiler.compile(source, this.resourcePath, { writeToDisk: false
Updated to latest compileForBrowser API
diff --git a/src/Leevel/Http/Response.php b/src/Leevel/Http/Response.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Http/Response.php +++ b/src/Leevel/Http/Response.php @@ -107,14 +107,6 @@ class Response extends BaseResponse } /** - * 获取 COOKIE. - */ - public function getCookies(): array - { - return $this->headers->getCookies(); - } - - /** * 取回 JSON 数据. * * @return mixed
refactor(http): refactor response
diff --git a/lib/core/src/client/preview/start.js b/lib/core/src/client/preview/start.js index <HASH>..<HASH> 100644 --- a/lib/core/src/client/preview/start.js +++ b/lib/core/src/client/preview/start.js @@ -316,9 +316,8 @@ export default function start(render, { decorateStory } = {}) { reqs = [loadable]; } - let currentExports; + let currentExports = new Set(); if (reqs) { - currentExports = new Set(); reqs.forEach(req => { req.keys().forEach(filename => { const fileExports = req(filename); @@ -326,7 +325,16 @@ export default function start(render, { decorateStory } = {}) { }); }); } else { - currentExports = new Set(loadable()); + const exported = loadable(); + if (Array.isArray(exported) && !exported.find(obj => !obj.default)) { + currentExports = new Set(exported); + } else if (exported) { + logger.warn( + `Loader function passed to 'configure' should return void or an array of module exports. Received: ${JSON.stringify( + exported + )}` + ); + } } const removed = [...previousExports].filter(exp => !currentExports.has(exp));
Core: Make load by function more strict with warning
diff --git a/client/assets/scripts/services/search.js b/client/assets/scripts/services/search.js index <HASH>..<HASH> 100644 --- a/client/assets/scripts/services/search.js +++ b/client/assets/scripts/services/search.js @@ -146,7 +146,7 @@ module.exports = [ } // Prioritize exact match - if (config.exactMatch) { + if (config.exactMatch && config.searchField.name) { match = _.findIndex(list, function (item) { return self.query.toLowerCase() === item.name.toLowerCase(); });
Exact match should work with "name" field only
diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index <HASH>..<HASH> 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -762,7 +762,9 @@ def to_datetime( If parsing succeeded. Return type depends on input: - - list-like: DatetimeIndex + - list-like: + - DatetimeIndex, if timezone naive or aware with the same timezone + - Index of object dtype, if timezone aware with mixed time offsets - Series: Series of datetime64 dtype - scalar: Timestamp
Updated the return type section of to_datetime (#<I>) * Updated the return type section of to_datetime to include list-like mixed timezone inputs * updated the Returns section * updated the Return section as suggested by mroeschke * removed the extra line in my previous commit * Update pandas/core/tools/datetimes.py
diff --git a/grimoire/ocean/elastic.py b/grimoire/ocean/elastic.py index <HASH>..<HASH> 100644 --- a/grimoire/ocean/elastic.py +++ b/grimoire/ocean/elastic.py @@ -224,17 +224,20 @@ class ElasticOcean(object): } ''' % (date_field, from_date) + order_field = 'metadata__updated_on' + query = """ { "query": { "bool": { "must": [%s] } - } + }, + "sort": { "%s": { "order": "asc" }} } - """ % (filters) + """ % (filters, order_field) - # logging.debug("%s %s" % (url, query)) + # logging.debug("%s %s", url, query) r = self.requests.post(url, data=query)
[enrich] Get raw items ordered by update_time to enrich them so incremental enrichment works when enrichment process fails and it is restarted.
diff --git a/src/mol.js b/src/mol.js index <HASH>..<HASH> 100644 --- a/src/mol.js +++ b/src/mol.js @@ -786,7 +786,7 @@ function shouldIntroduceTraceBreak(aaStretch, prevResidue, thisResidue) { } function addNonEmptyTrace(traces, trace) { - if (trace.length() === 0) { + if (trace.length() < 2) { return; } traces.push(trace);
only add traces longer than one residue
diff --git a/spec/unit/plugins/aix/virtualization_spec.rb b/spec/unit/plugins/aix/virtualization_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/plugins/aix/virtualization_spec.rb +++ b/spec/unit/plugins/aix/virtualization_spec.rb @@ -15,7 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -require File.expand_path(File.dirname(__FILE__) + '/../../../spec_helper.rb') +require 'spec_helper' describe Ohai::System, "AIX virtualization plugin" do
Use spec_helper directly rather than long relative pathname
diff --git a/src/main/java/org/nlpcn/es4sql/query/AggregationQuery.java b/src/main/java/org/nlpcn/es4sql/query/AggregationQuery.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/nlpcn/es4sql/query/AggregationQuery.java +++ b/src/main/java/org/nlpcn/es4sql/query/AggregationQuery.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.client.Client; import org.elasticsearch.index.query.BoolFilterBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -44,8 +45,7 @@ public class AggregationQuery extends Query { if (where != null) { boolFilter = FilterMaker.explan(where); - filter = AggregationBuilders.filter("filter").filter(boolFilter); - request.addAggregation(filter); + request.setQuery(QueryBuilders.filteredQuery(null, boolFilter)); } //
use filtered query insead of aggregation filter when using WHERE and GROUP BY
diff --git a/src/Commands/RunCommand.php b/src/Commands/RunCommand.php index <HASH>..<HASH> 100644 --- a/src/Commands/RunCommand.php +++ b/src/Commands/RunCommand.php @@ -58,14 +58,14 @@ class RunCommand extends Command $options = collect($this->option('option') ?? []) ->mapWithKeys(function ($value, $key) { - list($key, $value) = explode('=', $value); + list($key, $value) = explode('=', $value, 2); return ["--$key" => $value]; }) ->merge($this->option('argument') ?? []) ->mapWithKeys(function ($value, $key) { if (!Str::startsWith($key, '--')) { - list($key, $value) = explode('=', $value); + list($key, $value) = explode('=', $value, 2); } return [$key => $value];
Set a limit on explode() (#<I>)
diff --git a/dockermap/map/action/simple.py b/dockermap/map/action/simple.py index <HASH>..<HASH> 100644 --- a/dockermap/map/action/simple.py +++ b/dockermap/map/action/simple.py @@ -218,10 +218,11 @@ class SignalActionGenerator(AbstractActionGenerator): class ImagePullActionGenerator(AbstractActionGenerator): + pull_all_images = False pull_insecure_registry = False - policy_options = ['pull_insecure_regsitry'] + policy_options = ['pull_all_images', 'pull_insecure_regsitry'] def get_state_actions(self, state, **kwargs): - if state.config_id.config_type == ItemType.IMAGE and state.base_state == State.ABSENT: + if state.config_id.config_type == ItemType.IMAGE and (self.pull_all_images or state.base_state == State.ABSENT): return [ItemAction(state, ImageAction.PULL, insecure_registry=self.pull_insecure_registry)]
Added option for pulling all images unconditionally.
diff --git a/spyder/plugins/ipythonconsole.py b/spyder/plugins/ipythonconsole.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/ipythonconsole.py +++ b/spyder/plugins/ipythonconsole.py @@ -72,7 +72,7 @@ QTCONSOLE_REQVER = ">=4.2.0" dependencies.add("qtconsole", _("Integrate the IPython console"), required_version=QTCONSOLE_REQVER) -IPYTHON_REQVER = ">=4.0" +IPYTHON_REQVER = ">=4.0;<6.0" if PY2 else ">=4.0" dependencies.add("IPython", _("IPython interactive python environment"), required_version=IPYTHON_REQVER)
Added ipython dependency >=<I>;<<I> for python2
diff --git a/runcommands/config.py b/runcommands/config.py index <HASH>..<HASH> 100644 --- a/runcommands/config.py +++ b/runcommands/config.py @@ -197,15 +197,12 @@ class RawConfig(OrderedDict): def _iter_dotted(self, root=''): for k in self: - v = self[k] + v = super().__getitem__(k) qualified_k = '.'.join((root, k)) if root else k - if not isinstance(v, RawConfig): - yield qualified_k - elif not v: - yield qualified_k + if isinstance(v, RawConfig) and v: + yield from v._iter_dotted(qualified_k) else: - for qualified_k in v._iter_dotted(qualified_k): - yield qualified_k + yield qualified_k def _to_string(self, flat=False, values_only=False, exclude=(), level=0, root=''): out = []
Improve iteration over dotted config keys - Skip interpolation of values - Use `yield from` to simplify recursive calls - Simplify branching and remove some duplication
diff --git a/index.php b/index.php index <HASH>..<HASH> 100644 --- a/index.php +++ b/index.php @@ -1,3 +1,3 @@ <?php -require_once __DIR__ . '/vendor' . '/autoload.php'; \ No newline at end of file +require_once __DIR__ . '/vendor' . '/autoload.php';
add newline at end of index.php
diff --git a/views/default/toolbar.php b/views/default/toolbar.php index <HASH>..<HASH> 100644 --- a/views/default/toolbar.php +++ b/views/default/toolbar.php @@ -23,7 +23,8 @@ if (window.localStorage) { } EOD; -$url = $panels['request']->getUrl(); +$firstPanel = reset($panels); +$url = $firstPanel->getUrl(); ?> <div id="yii-debug-toolbar" class="yii-debug-toolbar-<?= $position ?>"> <div class="yii-debug-toolbar-block">
use the url of the first panel as link, when toolbar is minimized
diff --git a/plan/optimizer.go b/plan/optimizer.go index <HASH>..<HASH> 100644 --- a/plan/optimizer.go +++ b/plan/optimizer.go @@ -30,7 +30,7 @@ func Optimize(ctx context.Context, node ast.Node, sb SubQueryBuilder, is infosch if err := InferType(node); err != nil { return nil, errors.Trace(err) } - if !UseNewPlanner { + if _, ok := node.(*ast.SelectStmt); !ok || !UseNewPlanner { if err := logicOptimize(ctx, node); err != nil { return nil, errors.Trace(err) }
let update/delete do old logicOptimize (#<I>)
diff --git a/py/nupic/encoders/base.py b/py/nupic/encoders/base.py index <HASH>..<HASH> 100644 --- a/py/nupic/encoders/base.py +++ b/py/nupic/encoders/base.py @@ -205,14 +205,21 @@ class Encoder(object): pass ############################################################################ - def _getInputValue(self, obj, fieldname): + def _getInputValue(self, obj, fieldName): """ Gets the value of a given field from the input record """ if isinstance(obj, dict): - return obj[fieldname] + if not fieldName in obj: + knownFields = ",".join([key for key in obj.keys() if key[:1] != "_"]) + raise Exception( + "Unknown field name '%s' in input record. Known fields are '%s'." % ( + fieldName, knownFields + ) + ) + return obj[fieldName] else: - return getattr(obj, fieldname) + return getattr(obj, fieldName) ############################################################################ def getEncoderList(self):
Better error from base encoder for bad fields. If a field name that is not within the input fields is attempted to be retreived, an exception is raised instead of a KeyError resulting.
diff --git a/allennlp/modules/seq2seq_encoders/intra_sentence_attention.py b/allennlp/modules/seq2seq_encoders/intra_sentence_attention.py index <HASH>..<HASH> 100644 --- a/allennlp/modules/seq2seq_encoders/intra_sentence_attention.py +++ b/allennlp/modules/seq2seq_encoders/intra_sentence_attention.py @@ -141,9 +141,11 @@ class IntraSentenceAttentionEncoder(Seq2SeqEncoder): similarity_function = SimilarityFunction.from_params(params.pop('similarity_function', {})) num_attention_heads = params.pop_int('num_attention_heads', 1) combination = params.pop('combination', '1,2') + output_dim = params.pop_int('output_dim', None) params.assert_empty(cls.__name__) return cls(input_dim=input_dim, projection_dim=projection_dim, similarity_function=similarity_function, num_attention_heads=num_attention_heads, - combination=combination) + combination=combination, + output_dim=output_dim)
fix sentence encoder from params (#<I>)
diff --git a/cordova-lib/src/cordova/prepare.js b/cordova-lib/src/cordova/prepare.js index <HASH>..<HASH> 100644 --- a/cordova-lib/src/cordova/prepare.js +++ b/cordova-lib/src/cordova/prepare.js @@ -21,6 +21,7 @@ var cordova_util = require('./util'), ConfigParser = require('cordova-common').ConfigParser, PlatformJson = require('cordova-common').PlatformJson, PluginInfoProvider = require('cordova-common').PluginInfoProvider, + PlatformMunger = require('cordova-common').ConfigChanges.PlatformMunger, events = require('cordova-common').events, platforms = require('../platforms/platforms'), PlatformApiPoly = require('../platforms/PlatformApiPoly'), @@ -117,6 +118,13 @@ function preparePlatforms (platformList, projectRoot, options) { var browserify = require('../plugman/browserify'); return browserify(project, platformApi); } + }) + .then(function () { + // Handle edit-config in config.xml + var platformRoot = path.join(projectRoot, 'platforms', platform); + var platformJson = PlatformJson.load(platformRoot, platform); + var munger = new PlatformMunger(platform, platformRoot, platformJson); + munger.add_config_changes(project.projectConfig, /*should_increment=*/true).save_all(); }); }); }));
CB-<I> Handle edit-config in config.xml on prepare This closes #<I>
diff --git a/test/download-dht-torrent.js b/test/download-dht-torrent.js index <HASH>..<HASH> 100644 --- a/test/download-dht-torrent.js +++ b/test/download-dht-torrent.js @@ -77,13 +77,22 @@ test('Download using DHT (via .torrent file)', function (t) { file.getBuffer(function (err, buf) { if (err) throw err t.deepEqual(buf, leavesFile, 'downloaded correct content') + gotBuffer = true + maybeDone() }) }) torrent.once('done', function () { t.pass('client2 downloaded torrent from client1') - cb(null, client2) + torrentDone = true + maybeDone() }) + + var torrentDone = false + var gotBuffer = false + function maybeDone () { + if (torrentDone && gotBuffer) cb(null, client2) + } }) }] }, function (err, r) {
test: don't assume event ordering
diff --git a/jsoncfg/__init__.py b/jsoncfg/__init__.py index <HASH>..<HASH> 100644 --- a/jsoncfg/__init__.py +++ b/jsoncfg/__init__.py @@ -40,7 +40,7 @@ __all__ = [ # case increase only version_info[2]. # version_info[2]: Increase in case of bugfixes. Also use this if you added new features # without modifying the behavior of the previously existing ones. -version_info = (0, 3, 4) +version_info = (0, 4, 0) __version__ = '.'.join(str(n) for n in version_info) __author__ = 'István Pásztor' __license__ = 'MIT'
bumping version to <I>
diff --git a/cas/middleware.py b/cas/middleware.py index <HASH>..<HASH> 100644 --- a/cas/middleware.py +++ b/cas/middleware.py @@ -12,6 +12,10 @@ from django.contrib.auth.views import login, logout from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponseForbidden from django.core.exceptions import ImproperlyConfigured +try: + from django.utils.deprecation import MiddlewareMixin +except ImportError: + MiddlewareMixin = object from cas.exceptions import CasTicketException from cas.views import login as cas_login, logout as cas_logout @@ -19,7 +23,7 @@ from cas.views import login as cas_login, logout as cas_logout __all__ = ['CASMiddleware'] -class CASMiddleware(object): +class CASMiddleware(MiddlewareMixin): """ Middleware that allows CAS authentication on admin pages """ @@ -81,7 +85,7 @@ class CASMiddleware(object): return None -class ProxyMiddleware(object): +class ProxyMiddleware(MiddlewareMixin): # Middleware used to "fake" the django app that it lives at the Proxy Domain def process_request(self, request):
Compatibility with Django <I> Middleware
diff --git a/api/v1/api.go b/api/v1/api.go index <HASH>..<HASH> 100644 --- a/api/v1/api.go +++ b/api/v1/api.go @@ -63,7 +63,11 @@ type getIsiVolumeAttributesResp struct { // Isi PAPI export path JSON struct type ExportPathList struct { - Paths []string `json:"paths"` + Paths []string `json:"paths"` + MapAll struct { + User string `json:"user"` + Groups []string `json:"groups,omitempty"` + } `json:"map_all"` } // Isi PAPI export clients JSON struct @@ -305,6 +309,10 @@ func (papi *PapiConnection) Export(path string) (err error) { } var data = &ExportPathList{Paths: []string{path}} + data.MapAll.User = papi.username + if papi.group != "" { + data.MapAll.Groups = append(data.MapAll.Groups, papi.group) + } headers := map[string]string{"Content-Type": "application/json"} var resp *postIsiExportResp
Fix for volume chown issue. This fixes the issue where ownership of directories created on a mounted volume could not be changed. Added the user in the config file to the map_all list in the export parameters for the volume.
diff --git a/pythran/typing.py b/pythran/typing.py index <HASH>..<HASH> 100644 --- a/pythran/typing.py +++ b/pythran/typing.py @@ -4,6 +4,15 @@ import operator from tables import type_to_str, operator_to_lambda, modules from passes import global_declarations, constant_value +if not "has_path" in nx.__dict__: + def has_path(G,source,target): + try: + sp = nx.shortest_path(G, source, target) + except nx.NetworkXNoPath: + return False + return True + nx.has_path=has_path + class Reorder(ast.NodeVisitor): def __init__(self, typedeps): self.typedeps=typedeps
define has_path in networkx if it is not already defined
diff --git a/lib/markaby/builder.rb b/lib/markaby/builder.rb index <HASH>..<HASH> 100644 --- a/lib/markaby/builder.rb +++ b/lib/markaby/builder.rb @@ -79,7 +79,7 @@ module Markaby @streams = [[]] @assigns = assigns.dup @_helper = helper - @elements = {} + @used_ids = {} @@options.each do |k, v| instance_variable_set("@#{k}", @assigns.delete(k) || v) @@ -149,7 +149,7 @@ module Markaby end if atname == :id ele_id = v.to_s - if @elements.has_key? ele_id + if @used_ids.has_key? ele_id raise InvalidXhtmlError, "id `#{ele_id}' already used (id's must be unique)." end end @@ -163,7 +163,7 @@ module Markaby end f = fragment { @builder.method_missing(tag, *args, &block) } - @elements[ele_id] = f if ele_id + @used_ids[ele_id] = f if ele_id f end
Rename @elements to used_ids
diff --git a/test/candidates_test.rb b/test/candidates_test.rb index <HASH>..<HASH> 100644 --- a/test/candidates_test.rb +++ b/test/candidates_test.rb @@ -114,4 +114,30 @@ class CandidatesTest < TestCaseClass end end + test "allows to iterate through candidates without passing block" do + klass = Class.new model_class do + def slug_candidates + :name + end + end + with_instances_of klass do |_, city| + candidates = FriendlyId::Candidates.new(city, city.slug_candidates) + assert_equal candidates.each, ['new-york'] + end + end + + test "iterates through candidates with passed block" do + klass = Class.new model_class do + def slug_candidates + :name + end + end + with_instances_of klass do |_, city| + collected_candidates = [] + candidates = FriendlyId::Candidates.new(city, city.slug_candidates) + candidates.each { |candidate| collected_candidates << candidate } + assert_equal collected_candidates, ['new-york'] + end + end + end
Tests for Candidates#each.
diff --git a/sukhoi.py b/sukhoi.py index <HASH>..<HASH> 100644 --- a/sukhoi.py +++ b/sukhoi.py @@ -20,7 +20,7 @@ class Miner(list): task.start() def __init__(self, url, headers=default_headers, args={}, - method='get', payload={}, auth=(), attempts=5): + method='get', payload=None, auth=None, attempts=5): """ Resource Param: url
Fixing to work with new websnake version.
diff --git a/dipper/sources/FlyBase.py b/dipper/sources/FlyBase.py index <HASH>..<HASH> 100644 --- a/dipper/sources/FlyBase.py +++ b/dipper/sources/FlyBase.py @@ -416,7 +416,11 @@ class FlyBase(PostgreSQLSource): col = self.files[src_key]['columns'] - with gzip.open(raw, 'rt') as tsvfile: + # JR - I've set encoding to latin-1 to fix the UnicodeDecodeError that happens + # when the default encoding (utf-8) is used. This possibly will break if/when + # the encoding of this file upstream at Flybase is changed to utf-8. If so, + # trying setting encoding='utf-8' below + with gzip.open(raw, 'rt', encoding='latin-1') as tsvfile: reader = csv.reader(tsvfile, delimiter='\t') # skip first four lines for _ in range(0, 2):
Cherry picked commits from dataset PR to fix Flybase ingest (set encoding to latin-1 in _fyref_to_pmid() to fix UnicodeDecodeError)
diff --git a/src/RecordsTransformer.php b/src/RecordsTransformer.php index <HASH>..<HASH> 100644 --- a/src/RecordsTransformer.php +++ b/src/RecordsTransformer.php @@ -155,6 +155,10 @@ class RecordsTransformer $datatableParameters['DT_RowClass'] = $record->laratablesRowClass(); } + if (method_exists($this->model, 'laratablesRowData')) { + $datatableParameters['DT_RowData'] = $record->laratablesRowData(); + } + return $datatableParameters; } }
Allow setting data attributes for each row of the table
diff --git a/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java b/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java index <HASH>..<HASH> 100644 --- a/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java +++ b/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java @@ -23,6 +23,6 @@ import org.junit.runner.RunWith; @RunWith(Cucumber.class) @CucumberOptions( format = {"html:target/cucumber-html-report", "json:target/cucumber-json-report.json"}, -tags = {"~@ignore"}) +tags = {"@livetest"}) public class RunCukesTest { } \ No newline at end of file
Change cucumber test tag to not run by default
diff --git a/environs/tools/tools.go b/environs/tools/tools.go index <HASH>..<HASH> 100644 --- a/environs/tools/tools.go +++ b/environs/tools/tools.go @@ -121,14 +121,7 @@ func FindTools(env environs.BootstrapEnviron, majorVersion, minorVersion int, st if err != nil { return nil, err } - list, err := FindToolsForCloud(sources, cloudSpec, streams, majorVersion, minorVersion, filter) - if err != nil { - return nil, err - } - for _, tool := range list { - logger.Debugf("Located tool version %s at %s", tool.Version.String(), tool.URL) - } - return list, nil + return FindToolsForCloud(sources, cloudSpec, streams, majorVersion, minorVersion, filter) } // FindToolsForCloud returns a List containing all tools in the given streams, with a given
Remove the debug log Instead of this debug log, favour the one in bootstrap.
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -12,6 +12,9 @@ module.exports = function (grunt) { } }, watch: { + options: { + interval: 1000 + }, css: { files: '**/*.scss', tasks: ['sass']
Increase the file watch interval to give my CPU a break.
diff --git a/edisgo/grid/network.py b/edisgo/grid/network.py index <HASH>..<HASH> 100644 --- a/edisgo/grid/network.py +++ b/edisgo/grid/network.py @@ -859,6 +859,38 @@ class Results: self._pfa_v_mag_pu = pypsa @property + def i_res(self): + """ + Current results from power flow analysis in A. + + Holds power flow analysis results for current for the last + iteration step. Index of the DataFrame is a DatetimeIndex indicating + the time period the power flow analysis was conducted for; columns + of the DataFrame are the edges as well as stations of the grid + topology. + ToDo: add unit + + Parameters + ---------- + pypsa: `pandas.DataFrame<dataframe>` + Results time series of current in A from the + `PyPSA network <https://www.pypsa.org/doc/components.html#network>`_ + + Provide this if you want to set values. For retrieval of data do + not pass an argument + + Returns + ------- + :pandas:`pandas.DataFrame<dataframe>` + Current results from power flow analysis + """ + return self._i_res + + @i_res.setter + def i_res(self, pypsa): + self._i_res = pypsa + + @property def equipment_changes(self): """ Tracks changes in the equipment (e.g. replaced or added cable, etc.)
Add getter and setter for i_res
diff --git a/lib/origami/graphics/xobject.rb b/lib/origami/graphics/xobject.rb index <HASH>..<HASH> 100644 --- a/lib/origami/graphics/xobject.rb +++ b/lib/origami/graphics/xobject.rb @@ -659,22 +659,22 @@ module Origami data = fd.read else data = File.binread(File.expand_path(path)) - format ||= File.extname(path) + format ||= File.extname(path)[1..-1] end image = ImageXObject.new raise ArgumentError, "Missing file format" if format.nil? case format.downcase - when '.jpg', 'jpeg', '.jpe', '.jif', '.jfif', '.jfi' + when 'jpg', 'jpeg', 'jpe', 'jif', 'jfif', 'jfi' image.setFilter :DCTDecode image.encoded_data = data - when '.jp2','.jpx','.j2k','.jpf','.jpm','.mj2' + when 'jp2','jpx','j2k','jpf','jpm','mj2' image.setFilter :JPXDecode image.encoded_data = data - when '.jb2', '.jbig', '.jbig2' + when '.b2', 'jbig', 'jbig2' image.setFilter :JBIG2Decode image.encoded_data = data else
graphics/xobject: fix regression in from_image_file
diff --git a/lib/PaymentMethod.php b/lib/PaymentMethod.php index <HASH>..<HASH> 100644 --- a/lib/PaymentMethod.php +++ b/lib/PaymentMethod.php @@ -30,6 +30,7 @@ namespace Stripe; * @property \Stripe\StripeObject $eps * @property \Stripe\StripeObject $fpx * @property \Stripe\StripeObject $giropay + * @property \Stripe\StripeObject $grabpay * @property \Stripe\StripeObject $ideal * @property \Stripe\StripeObject $interac_present * @property bool $livemode Has the value <code>true</code> if the object exists in live mode or the value <code>false</code> if the object exists in test mode.
Codegen for openapi bb9e<I>d
diff --git a/core/src/main/java/hudson/security/LDAPSecurityRealm.java b/core/src/main/java/hudson/security/LDAPSecurityRealm.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/security/LDAPSecurityRealm.java +++ b/core/src/main/java/hudson/security/LDAPSecurityRealm.java @@ -226,9 +226,9 @@ public class LDAPSecurityRealm extends SecurityRealm { /** * Query to locate an entry that identifies the user, given the user name string. * - * Normally something like "uid={0}" + * Normally "uid={0}" * - * @see FilterBasedLdapUserSearch#searchFilter + * @see FilterBasedLdapUserSearch */ public final String userSearch; @@ -282,7 +282,7 @@ public class LDAPSecurityRealm extends SecurityRealm { this.rootDN = rootDN.trim(); this.userSearchBase = fixNull(userSearchBase).trim(); userSearch = fixEmptyAndTrim(userSearch); - this.userSearch = userSearch!=null ? userSearch : "(| (uid={0}) (mail={0}) (cn={0}))"; + this.userSearch = userSearch!=null ? userSearch : "uid={0}"; this.groupSearchBase = fixEmptyAndTrim(groupSearchBase); }
rolling back the LDAP change based on the discussion: <URL>
diff --git a/coconut/command/util.py b/coconut/command/util.py index <HASH>..<HASH> 100644 --- a/coconut/command/util.py +++ b/coconut/command/util.py @@ -239,7 +239,9 @@ def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs): elif show_output: return subprocess.call(cmd, **kwargs) else: - stdout, stderr, _ = call_output(cmd, **kwargs) + stdout, stderr, retcode = call_output(cmd, **kwargs) + if retcode and raise_errs: + raise subprocess.CalledProcessError(retcode, cmd, stdout, stderr) return "".join(stdout + stderr) diff --git a/tests/main_test.py b/tests/main_test.py index <HASH>..<HASH> 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -315,6 +315,7 @@ class TestShell(unittest.TestCase): def test_jupyter(self): call(["coconut", "--jupyter"], assert_output="Coconut: Successfully installed Coconut Jupyter kernel.") + call(["jupyter", "kernelspec", "list"], assert_output="coconut") class TestCompilation(unittest.TestCase):
Fix jupyter installation error Resolves #<I>.
diff --git a/pyathena/cursor.py b/pyathena/cursor.py index <HASH>..<HASH> 100644 --- a/pyathena/cursor.py +++ b/pyathena/cursor.py @@ -191,8 +191,7 @@ class Cursor(object): raise DatabaseError('KeyError `Rows`') processed_rows = [] if len(rows) > 0: - offset = 1 if not self._next_token and \ - self._is_first_row_column_labels(rows) else 0 + offset = 1 if not self._next_token and self._is_first_row_column_labels(rows) else 0 processed_rows = [ tuple([self._converter.convert(meta.get('Type', None), row.get('VarCharValue', None))
Fix E<I> continuation line over-indented for hanging indent
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py index <HASH>..<HASH> 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py @@ -2,6 +2,7 @@ # All rights reserved # Licensed under Simplified BSD License (see LICENSE) import json +import uuid import click @@ -154,6 +155,10 @@ def migrate(ctx, integration, to_version): # Explicitly set the manifest_version first so it appears at the top of the manifest migrated_manifest.set_path("/manifest_version", "2.0.0") + # Generate and introduce a uuid + app_uuid = str(uuid.uuid4()) + migrated_manifest.set_path("/app_uuid", app_uuid) + for key, val in V2_TO_V1_MAP.items(): if val == SKIP_IF_FOUND: continue
Add app_uuid to manifest migrator (#<I>)
diff --git a/resources/lang/ko-KR/dashboard.php b/resources/lang/ko-KR/dashboard.php index <HASH>..<HASH> 100644 --- a/resources/lang/ko-KR/dashboard.php +++ b/resources/lang/ko-KR/dashboard.php @@ -18,7 +18,7 @@ return [ 'incidents' => [ 'title' => '문제 및 예정', 'incidents' => '문제', - 'logged' => '{0} There are no incidents, good work.|[1]You have logged one incident.|[2,*]You have reported <strong>:count</strong> incidents.', + 'logged' => '{0}There are no incidents, good work.|[1]You have logged one incident.|[2,*]You have reported <strong>:count</strong> incidents.', 'incident-create-template' => '템플릿 생성', 'incident-templates' => '문제 템플릿', 'updates' => [
New translations dashboard.php (Korean)
diff --git a/lib/progressPlugBrowser.js b/lib/progressPlugBrowser.js index <HASH>..<HASH> 100644 --- a/lib/progressPlugBrowser.js +++ b/lib/progressPlugBrowser.js @@ -1,4 +1,4 @@ -import { Plug } from './plug.js'; +import { Plug } from '../plug.js'; function _doXhr({ xhr, body, progressInfo }) { return new Promise((resolve, reject) => { xhr.onreadystatechange = function(e) { diff --git a/lib/progressPlugNode.js b/lib/progressPlugNode.js index <HASH>..<HASH> 100644 --- a/lib/progressPlugNode.js +++ b/lib/progressPlugNode.js @@ -1,2 +1,2 @@ -import { Plug } from './plug.js'; +import { Plug } from '../plug.js'; export class ProgressPlugNode extends Plug {}
Fix import paths in Plug subclasses
diff --git a/src/lib/Tables/InventorySelling.php b/src/lib/Tables/InventorySelling.php index <HASH>..<HASH> 100644 --- a/src/lib/Tables/InventorySelling.php +++ b/src/lib/Tables/InventorySelling.php @@ -37,7 +37,7 @@ class InventorySelling extends Table 'Name' => 'name', 'Duration' => 'duration', 'Details' => null, - 'Time left' => 'time_ended', + 'Time left' => 'end_date', 'Options' => null, ]; @@ -58,7 +58,7 @@ class InventorySelling extends Table * * @var string */ - public $defaultSortKey = 'time_ended'; + public $defaultSortKey = 'end_date'; /** * Default sort order. @@ -73,14 +73,4 @@ class InventorySelling extends Table * @var string */ public $presenter = FoundationFivePresenter::class; - - /** - * Sort by time since item ended. - * - * @return void - */ - public function sortTimeEnded($sortOrder) - { - $this->db->orderBy(DB::raw('unix_timestamp() - cast(`end_date` as signed)'), $sortOrder); - } }
Fixed date sorting in inventory selling table
diff --git a/neovim/util.py b/neovim/util.py index <HASH>..<HASH> 100644 --- a/neovim/util.py +++ b/neovim/util.py @@ -47,6 +47,9 @@ class RemoteMap(object): class Current(object): + def __init__(self, vim): + self._vim = vim + @property def line(self): return self._vim.get_current_line()
Add missing initializer for `Current`
diff --git a/pysis/commands.py b/pysis/commands.py index <HASH>..<HASH> 100644 --- a/pysis/commands.py +++ b/pysis/commands.py @@ -120,3 +120,9 @@ class IsisPool(Isis): self.close() self.join() + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close_and_wait() +
Added enter and exit methods for using an isis pool with a `with` statement.
diff --git a/src/Mustache/Compiler.php b/src/Mustache/Compiler.php index <HASH>..<HASH> 100644 --- a/src/Mustache/Compiler.php +++ b/src/Mustache/Compiler.php @@ -211,9 +211,9 @@ class Mustache_Compiler } const BLOCK_VAR = ' - $value = $this->resolveValue($context->%s(%s), $context, $indent); + $value = $this->resolveValue($context->findInBlock(%s), $context, $indent); if($value && !is_array($value) && !is_object($value)) { - $buffer .= %s; + $buffer .= $value; } else { %s } @@ -221,11 +221,9 @@ class Mustache_Compiler private function blockVar($nodes, $id, $start, $end, $otag, $ctag, $level) { - $method = 'findInBlock'; $id_str = var_export($id, true); - $value = $this->getEscape(); - return sprintf($this->prepare(self::BLOCK_VAR, $level), $method, $id_str, $value, $this->walk($nodes, 2)); + return sprintf($this->prepare(self::BLOCK_VAR, $level), $id_str, $this->walk($nodes, 2)); } const BLOCK_ARG = '
Don\'t escape html from blocks, removing unneeded variable assigns
diff --git a/pinax/teams/admin.py b/pinax/teams/admin.py index <HASH>..<HASH> 100644 --- a/pinax/teams/admin.py +++ b/pinax/teams/admin.py @@ -9,7 +9,9 @@ from .hooks import hookset def members_count(obj): return obj.memberships.count() -members_count.short_description = _("Members Count") # noqa + + +members_count.short_description = _("Members Count") admin.site.register(
Go ahead and fix lint error
diff --git a/modules/reports.php b/modules/reports.php index <HASH>..<HASH> 100755 --- a/modules/reports.php +++ b/modules/reports.php @@ -89,7 +89,7 @@ if ( ! class_exists('Reports') ) { <p><?php _e('These monthly reports are generated from the site\'s HTTP access logs. They show every HTTP request of the site, including traffic from both humans and bots. Requests blocked at the firewall level (for example during a DDOS attack) are not logged. Log files can be accessed also directly on the server at <code>/data/slog/html/goaccess-*.html</code>.', 'seravo'); ?></p> </div> <div class="http-requests_info_loading" style="padding: 0px;"> - <table class="widefat fixed striped" style="width: 100%; border: none;"> + <table class="widefat striped" style="width: 100%; border: none;"> <thead> <tr> <th style="width: 25%;"><?php _e('Month', 'seravo'); ?></th>
Prevent overflow of HTTP stats table by removing fixed table layout
diff --git a/spec/request_pattern_spec.rb b/spec/request_pattern_spec.rb index <HASH>..<HASH> 100644 --- a/spec/request_pattern_spec.rb +++ b/spec/request_pattern_spec.rb @@ -32,7 +32,7 @@ describe RequestPattern do end - class RequestPattern + class WebMock::RequestPattern def match(request_signature) self.matches?(request_signature) end
Fixed RequestPattern spec to work with Ruby <I>
diff --git a/sos/report/plugins/rhui.py b/sos/report/plugins/rhui.py index <HASH>..<HASH> 100644 --- a/sos/report/plugins/rhui.py +++ b/sos/report/plugins/rhui.py @@ -27,6 +27,7 @@ class Rhui(Plugin, RedHatPlugin): "/var/log/rhui-subscription-sync.log", "/var/cache/rhui/*", "/root/.rhui/*", + "/var/log/rhui/*", ]) # skip collecting certificate keys self.add_forbidden_path("/etc/pki/rhui/**/*.key", recursive=True)
[rhui] New log folder Included new log folder per Bugzilla <I>
diff --git a/spyder/plugins/plots/widgets/figurebrowser.py b/spyder/plugins/plots/widgets/figurebrowser.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/plots/widgets/figurebrowser.py +++ b/spyder/plugins/plots/widgets/figurebrowser.py @@ -536,7 +536,7 @@ class FigureViewer(QScrollArea): def get_scaling(self): """Get the current scaling of the figure in percent.""" - return self._scalestep**self._scalefactor*100 + return self.figcanvas.size().width() / self.figcanvas.fwidth * 100 def reset_original_image(self): """Reset the image to its original size."""
Calcul scaling from canvas size instead This is to be able to update the value displayed correctly when "Fits plots to window" is checked
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from distutils.core import setup +from setuptools import setup import scoop @@ -12,9 +12,9 @@ setup(name='scoop', author_email='scoop-users@googlegroups.com', url='http://scoop.googlecode.com', download_url='http://code.google.com/p/scoop/downloads/list', - requires=['greenlet (>=0.3.4)', - 'pyzmq (>=2.2.0)', - 'argparse (>=1.1)'], + install_requires=['greenlet>=0.3.4', + 'pyzmq>=2.2.0', + 'argparse>=1.1'], packages=['scoop'], platforms=['any'], keywords=['distributed algorithms', 'parallel programming'],
Changed distutils for distributed and added dependency resolve
diff --git a/mustache.js b/mustache.js index <HASH>..<HASH> 100644 --- a/mustache.js +++ b/mustache.js @@ -138,7 +138,7 @@ var Mustache = function() { translation_mode = { _mode: context['_mode'] }; } - return that.render(_(content, translation_mode), context, partials, true); + return _(content, translation_mode); }); },
don't render during i<I>n step, just do replacements
diff --git a/spec/lib/insert-spec.js b/spec/lib/insert-spec.js index <HASH>..<HASH> 100644 --- a/spec/lib/insert-spec.js +++ b/spec/lib/insert-spec.js @@ -12,6 +12,7 @@ describe('Insert', function() { mongoat.MongoClient.connect('mongodb://localhost:27017/test') .then(function (db) { + db.dropDatabase(); _this.testDb = db; _this.testCol = db.collection('Person'); done();
Drop db on beforeAll tests
diff --git a/tests/test_loading.py b/tests/test_loading.py index <HASH>..<HASH> 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -3,6 +3,7 @@ from django.test.utils import override_settings from django_core.utils.loading import get_class_from_settings from django_core.utils.loading import get_class_from_settings_from_apps from django_core.utils.loading import get_class_from_settings_full_path +from django_core.utils.loading import get_function_from_settings from django_core.utils.loading import get_model_from_settings from tests.test_objects.models import TestModel @@ -31,3 +32,7 @@ class LoadingTestCase(TestCase): model = get_class_from_settings_from_apps(settings_key='MY_MODEL_SETTING') self.assertEqual(model, TestModel) + @override_settings(MY_SETTING='django_core.utils.loading.get_function_from_settings') + def test_get_function_from_settings(self): + func = get_function_from_settings(settings_key='MY_SETTING') + self.assertEqual(func, get_function_from_settings)
added test for loading function from settings string.
diff --git a/test/test_fft.py b/test/test_fft.py index <HASH>..<HASH> 100644 --- a/test/test_fft.py +++ b/test/test_fft.py @@ -448,7 +448,7 @@ class _BaseTestFFTClass(unittest.TestCase): # output arrays; just reuse inarr and outexp (values won't # matter, we're just checking exceptions). output_args = {"delta_f": self.delta, "epoch": self.epoch} - _test_raise_excep_ifft(self,inarr,outexp) + _test_raise_excep_ifft(self,inarr,outexp,output_args) def test_rev_real_fs(self): for rev_dtype in [float32,float64]: @@ -473,7 +473,7 @@ class _BaseTestFFTClass(unittest.TestCase): # output arrays; just reuse inarr and outexp (values won't # matter, we're just checking exceptions). output_args = {"delta_t": self.delta, "epoch": self.epoch} - _test_raise_excep_ifft(self,inarr,outexp) + _test_raise_excep_ifft(self,inarr,outexp,output_args) def test_fwd_complex_arr(self): for fwd_dtype in [complex64,complex128]:
Add missing 'output_args' to fn call in test_fft
diff --git a/p2p/server.go b/p2p/server.go index <HASH>..<HASH> 100644 --- a/p2p/server.go +++ b/p2p/server.go @@ -943,9 +943,8 @@ func (srv *Server) setupConn(c *conn, flags connFlag, dialDest *enode.Node) erro } // If dialing, figure out the remote public key. - var dialPubkey *ecdsa.PublicKey if dialDest != nil { - dialPubkey = new(ecdsa.PublicKey) + dialPubkey := new(ecdsa.PublicKey) if err := dialDest.Load((*enode.Secp256k1)(dialPubkey)); err != nil { err = errors.New("dial destination doesn't have a secp256k1 public key") srv.log.Trace("Setting up connection failed", "addr", c.fd.RemoteAddr(), "conn", c.flags, "err", err)
p2p: reduce the scope of variable dialPubkey (#<I>) dialPubkey isn't used anywhere else after dialDest.Load, so it should be safe to restrict its scope to the if clause.
diff --git a/lib/sprout/remote_file_target.rb b/lib/sprout/remote_file_target.rb index <HASH>..<HASH> 100644 --- a/lib/sprout/remote_file_target.rb +++ b/lib/sprout/remote_file_target.rb @@ -49,10 +49,16 @@ module Sprout def load_unpack_or_ignore_archive if(!unpacked_files_exist?) if(!File.exists?(downloaded_file)) - write_archive download_archive + bytes = download_archive + write_archive bytes end - bytes = File.read downloaded_file + # If we *just* downloaded the file, + # use the bytes directly, otherwise + # read them off disk from a previous + # download attempt: + bytes ||= File.open(downloaded_file, 'r').read + if should_unpack?(bytes, md5) unpack_archive end
Sped up File read code for Windows
diff --git a/lib/components/map/route-viewer-overlay.js b/lib/components/map/route-viewer-overlay.js index <HASH>..<HASH> 100644 --- a/lib/components/map/route-viewer-overlay.js +++ b/lib/components/map/route-viewer-overlay.js @@ -4,6 +4,16 @@ import { FeatureGroup, MapLayer, Polyline } from 'react-leaflet' import polyline from '@mapbox/polyline' +// helper fn to check if geometry has been populated for all patterns in route +const isGeomComplete = routeData => { + return ( + routeData && + routeData.patterns && + Object.values(routeData.patterns) + .every(ptn => typeof ptn.geometry !== 'undefined') + ) +} + class RouteViewerOverlay extends MapLayer { static propTypes = {} @@ -13,18 +23,6 @@ class RouteViewerOverlay extends MapLayer { componentWillUnmount () {} componentWillReceiveProps (nextProps) { - // helper fn to check if geometry has been populated for all patterns in route - const isGeomComplete = routeData => { - return ( - routeData && - routeData.patterns && - Object.values(routeData.patterns).reduce( - (acc, ptn) => acc && typeof ptn.geometry !== 'undefined', - true - ) - ) - } - // if pattern geometry just finished populating, update the map points if ( !isGeomComplete(this.props.routeData) &&
refactor(route-viewer-overlay): use Array.every for geom complete check
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -27,7 +27,7 @@ module.exports = timeout /** * Create a new timeout middleware. * - * @param {number|string} time The timeout as a number of milliseconds or a string for `ms` + * @param {number|string} [time=5000] The timeout as a number of milliseconds or a string for `ms` * @param {object} [options] Additional options for middleware * @param {boolean} [options.respond=true] Automatically emit error when timeout reached * @return {function} middleware
docs: fix jsdoc that time is optional
diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index <HASH>..<HASH> 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -103,10 +103,15 @@ def remove_task_app(project_directory): def remove_pycharm_dir(project_directory): - """Removes the .idea directory if PyCharm isn't going to be used""" + """ + Removes directories related to PyCharm + if it isn't going to be used + """ idea_dir_location = os.path.join(PROJECT_DIRECTORY, '.idea/') shutil.rmtree(idea_dir_location) + docs_dir_location = os.path.join(PROJECT_DIRECTORY, 'docs/pycharm/') + shutil.rmtree(docs_dir_location) # IN PROGRESS # def copy_doc_files(project_directory):
post_get hook removes docs for pycharm if it isn't used
diff --git a/hatenablog.rb b/hatenablog.rb index <HASH>..<HASH> 100644 --- a/hatenablog.rb +++ b/hatenablog.rb @@ -159,7 +159,7 @@ XML categories_tag = categories.inject('') do |s, c| s + "<category term=\"#{c}\" />\n" end - xml % [title, @user_id, content, categories_tag, draft] + xml % [title, author_name, content, categories_tag, draft] end end end
Fix the entry XML generating method
diff --git a/builtin/providers/aws/resource_aws_directory_service_directory.go b/builtin/providers/aws/resource_aws_directory_service_directory.go index <HASH>..<HASH> 100644 --- a/builtin/providers/aws/resource_aws_directory_service_directory.go +++ b/builtin/providers/aws/resource_aws_directory_service_directory.go @@ -232,7 +232,7 @@ func resourceAwsDirectoryServiceDirectoryCreate(d *schema.ResourceData, meta int d.Id(), *ds.Stage) return ds, *ds.Stage, nil }, - Timeout: 10 * time.Minute, + Timeout: 30 * time.Minute, } if _, err := stateConf.WaitForState(); err != nil { return fmt.Errorf( @@ -355,7 +355,7 @@ func resourceAwsDirectoryServiceDirectoryDelete(d *schema.ResourceData, meta int d.Id(), *ds.Stage) return ds, *ds.Stage, nil }, - Timeout: 10 * time.Minute, + Timeout: 30 * time.Minute, } if _, err := stateConf.WaitForState(); err != nil { return fmt.Errorf(
Increase aws_directory_service_directory timeouts According to the AWS docs, creating a MS directory could take up to <I> minutes.
diff --git a/py/selenium/webdriver/chrome/options.py b/py/selenium/webdriver/chrome/options.py index <HASH>..<HASH> 100644 --- a/py/selenium/webdriver/chrome/options.py +++ b/py/selenium/webdriver/chrome/options.py @@ -166,6 +166,6 @@ class Options(object): if self.debugger_address: chrome_options["debuggerAddress"] = self.debugger_address - chrome["chromeOptions"] = chrome_options + chrome["goog:chromeOptions"] = chrome_options return chrome
Update python chromeOptions key for capabilities (#<I>)
diff --git a/scripts/server.js b/scripts/server.js index <HASH>..<HASH> 100644 --- a/scripts/server.js +++ b/scripts/server.js @@ -25,6 +25,7 @@ const responseHeaderObj = { // object containing the name:hex pairs for nearestColor() const rgbColorsArr = []; +// prepare color array colors.forEach((c) => { const rgb = lib.hexToRgb(c.hex); // populates array needed for ClosestVector() @@ -68,6 +69,8 @@ const nameColors = (colorArr) => { distance: closestColor.distance, }; }); + + // closest.clearCache() }; /**
style($server): adds a few comments in server.js
diff --git a/scanpy/tests/test_datasets.py b/scanpy/tests/test_datasets.py index <HASH>..<HASH> 100644 --- a/scanpy/tests/test_datasets.py +++ b/scanpy/tests/test_datasets.py @@ -44,6 +44,16 @@ def test_pbmc3k(tmp_dataset_dir): @pytest.mark.internet +def test_pbmc3k_processed(tmp_dataset_dir): + with pytest.warns(None) as records: + adata = sc.datasets.pbmc3k_processed() + assert adata.shape == (2638, 1838) + assert adata.raw.shape == (2638, 13714) + + assert len(records) == 0 + + +@pytest.mark.internet def test_ebi_expression_atlas(tmp_dataset_dir): adata = sc.datasets.ebi_expression_atlas("E-MTAB-4888") assert adata.shape == (2315, 23852) @@ -70,7 +80,9 @@ def test_toggleswitch(): def test_pbmc68k_reduced(): - sc.datasets.pbmc68k_reduced() + with pytest.warns(None) as records: + sc.datasets.pbmc68k_reduced() + assert len(records) == 0 # Test that loading a dataset does not warn @pytest.mark.internet
Added tests for warnings from datasets
diff --git a/corelib/class.rb b/corelib/class.rb index <HASH>..<HASH> 100644 --- a/corelib/class.rb +++ b/corelib/class.rb @@ -20,8 +20,7 @@ class Class def new(*args, &block) obj = allocate() - #obj.initialize *args, &block - obj.initialize *args + obj.initialize *args, &block obj end diff --git a/lib/opal/parser/processor.rb b/lib/opal/parser/processor.rb index <HASH>..<HASH> 100644 --- a/lib/opal/parser/processor.rb +++ b/lib/opal/parser/processor.rb @@ -601,7 +601,7 @@ module Opal if @scope.uses_block? scope_name = (@scope.name ||= unique_temp) blk = "var $yield = #{scope_name}.proc || $noproc, $yself = $yield.$S, " - blk += "#{block_name} = #{scope_name}.proc, " if block_name + blk += "#{block_name} = #{scope_name}.proc || nil, " if block_name blk += "$break = $bjump; #{scope_name}.proc = null;" code = blk + code
Re-add block passing to Class#new
diff --git a/components/apps.js b/components/apps.js index <HASH>..<HASH> 100644 --- a/components/apps.js +++ b/components/apps.js @@ -499,7 +499,8 @@ SteamUser.prototype.redeemKey = function(key, callback) { var recipeDetails = BinaryKVParser.parse(body.purchase_receipt_info).MessageObject; if (recipeDetails.LineItemCount > 0) { recipeDetails.lineitems.forEach(function(pkg) { - packageList[pkg.PackageID] = pkg.ItemDescription; + var packageID = pkg.PackageID || pkg.packageID || pkg.packageid; + packageList[packageID] = pkg.ItemDescription; }); }
Fix packageid being undefined sometimes when redeeming keys (fixes #<I>)
diff --git a/landsat/landsat.py b/landsat/landsat.py index <HASH>..<HASH> 100755 --- a/landsat/landsat.py +++ b/landsat/landsat.py @@ -254,7 +254,7 @@ def main(args): def exit(message): print(message) - sys.exit() + sys.exit(0) def package_installed(package):
Added exit code 0 for success operations
diff --git a/python/dllib/src/bigdl/dllib/keras/optimizers.py b/python/dllib/src/bigdl/dllib/keras/optimizers.py index <HASH>..<HASH> 100644 --- a/python/dllib/src/bigdl/dllib/keras/optimizers.py +++ b/python/dllib/src/bigdl/dllib/keras/optimizers.py @@ -105,3 +105,20 @@ class AdamWeightDecay(OptimMethod, ZooKerasCreator): epsilon, weight_decay) self.bigdl_type = bigdl_type + + +class PolyEpochDecay(ZooKerasCreator): + """ + A learning rate decay policy, where the effective learning rate + follows a polynomial decay, to be zero by the max_epochs. + Calculation: init_lr * (1 - epoch/max_iteration) ^ (power) + + + :param power: The coefficient of decay. + :param max_epochs: The maximum number of epochs when lr becomes zero. + + >>> poly = PolyEpochDecay(0.5, 5) + creating: createZooKerasPolyEpochDecay + """ + def __init__(self, power, max_epochs, bigdl_type="float"): + JavaValue.__init__(self, None, bigdl_type, power, max_epochs)
Add Polynomial Decay based on epochs (#<I>) * poly on epoch * doc and ut * style
diff --git a/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java b/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java +++ b/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java @@ -195,7 +195,7 @@ public class GitFlowReleaseFinishMojo extends AbstractGitFlowMojo { gitCheckout(gitFlowConfig.getDevelopmentBranch()); gitMerge(releaseBranch, releaseRebase, releaseMergeNoFF, - releaseMergeFFOnly); + false); } // get next snapshot version
releaseMergeFFOnly is not used when merging release to develop. Typically the develop branch moves on while the release is being prepared. Therefore it does not make sense to require the use of fast-forward when merging to the develop branch. On the master branch, however, it is useful to be able to require the use of fast-forward to make sure that the final commit on the release branch ends up in the master branch unmodified.
diff --git a/src/runners/CollectionRunner.js b/src/runners/CollectionRunner.js index <HASH>..<HASH> 100644 --- a/src/runners/CollectionRunner.js +++ b/src/runners/CollectionRunner.js @@ -44,6 +44,7 @@ var CollectionRunner = jsface.Class([AbstractRunner, Options, EventEmitter], { }, this); // Start the runner + RequestRunner.resetIndex(); RequestRunner.setDelay(this.opts.delay); if (!isNaN(this.opts.requestTimeout) && this.opts.requestTimeout % 1 === 0) { diff --git a/src/runners/RequestRunner.js b/src/runners/RequestRunner.js index <HASH>..<HASH> 100644 --- a/src/runners/RequestRunner.js +++ b/src/runners/RequestRunner.js @@ -70,6 +70,10 @@ var RequestRunner = jsface.Class([Queue, EventEmitter], { this.addToQueue(request); }, + resetIndex: function() { + this._currentIndex = -1; + }, + /** * Starts the RequestRunner going to each request in the queue. * @memberOf RequestRunner
Multiple iterations working with setNextRequest
diff --git a/pipeline/compressors/__init__.py b/pipeline/compressors/__init__.py index <HASH>..<HASH> 100644 --- a/pipeline/compressors/__init__.py +++ b/pipeline/compressors/__init__.py @@ -64,6 +64,8 @@ class Compressor(object): def compile_templates(self, paths): compiled = "" + if not paths: + return compiled namespace = settings.PIPELINE_TEMPLATE_NAMESPACE base_path = self.base_path(paths) for path in paths:
don't output templates when there is no templates
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index <HASH>..<HASH> 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,3 +1,4 @@ +import distutils.command import glob import os import sys @@ -134,3 +135,14 @@ def test_test_command_install_requirements(bare_virtualenv, tmpdir): 'python setup.py test -s test', )).format(tmpdir=tmpdir)) assert tmpdir.join('success').check() + + +def test_no_missing_dependencies(bare_virtualenv): + """ + Quick and dirty test to ensure all external dependencies are vendored. + """ + for command in ('upload',):#sorted(distutils.command.__all__): + bare_virtualenv.run(' && '.join(( + 'cd {source}', + 'python setup.py {command} -h', + )).format(command=command, source=SOURCE_DIR))
add a test to catch unvendored dependencies
diff --git a/transport_plugins/jlink/iotile_transport_jlink/jlink.py b/transport_plugins/jlink/iotile_transport_jlink/jlink.py index <HASH>..<HASH> 100644 --- a/transport_plugins/jlink/iotile_transport_jlink/jlink.py +++ b/transport_plugins/jlink/iotile_transport_jlink/jlink.py @@ -40,7 +40,8 @@ class JLinkAdapter(DeviceAdapter): self._mux_func = None self._channel = None self._control_thread = None - self.jlink = pylink.JLink() + self.jlink = None + self._parse_port(port) if on_scan is not None: @@ -130,6 +131,7 @@ class JLinkAdapter(DeviceAdapter): raise ArgumentError("Missing device name or alias, specify using device=name in port string or -c device=name in connect_direct or debug command", known_devices=[x for x in viewkeys(DEVICE_ALIASES)]) try: + self.jlink = pylink.JLink() self.jlink.open(serial_no=self._jlink_serial) self.jlink.set_tif(pylink.enums.JLinkInterfaces.SWD) self.jlink.connect(self._device_info.jlink_name)
move pylink declaration for easier test running
diff --git a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java index <HASH>..<HASH> 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java @@ -117,7 +117,7 @@ public class CanalKafkaClientFlatMessageExample { } for (FlatMessage message : messages) { long batchId = message.getId(); - int size = message.getData().size(); + int size = message.getData() == null ? 0 : message.getData().size(); if (batchId == -1 || size == 0) { // try { // Thread.sleep(1000);
FIX NPE When ddl event (#<I>)
diff --git a/provider/ec2/environ.go b/provider/ec2/environ.go index <HASH>..<HASH> 100644 --- a/provider/ec2/environ.go +++ b/provider/ec2/environ.go @@ -834,6 +834,17 @@ func (e *environ) Subnets(_ instance.Id) ([]network.SubnetInfo, error) { } results := make([]network.SubnetInfo, len(resp.Subnets), len(resp.Subnets)) + for i, subnet := range resp.Subnets { + // No VLANTag available + cidr := subnet.CIDRBlock + allocatableLow := network.DecimalToIP(network.IPToDecimal(start) + 4) + info := network.SubnetInfo{ + CIDR: cidr, + ProviderID: subnet.Id, + AllocatableIPLow: allocatableLow, + } + results[i] = info + } return results, nil }
Calculate AllocatableIPLow for SubnetInfo
diff --git a/internal/config/config.go b/internal/config/config.go index <HASH>..<HASH> 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -33,7 +33,7 @@ import ( var ( // Default sections - sectionDefaults = []string{"agent", "global_tags", "outputs", + sectionDefaults = []string{"global_tags", "agent", "outputs", "processors", "aggregators", "inputs"} // Default input plugins @@ -536,13 +536,13 @@ func printFilteredOutputs(outputFilters []string, commented bool) { } func printFilteredGlobalSections(sectionFilters []string) { - if sliceContains("agent", sectionFilters) { - fmt.Printf(agentConfig) - } - if sliceContains("global_tags", sectionFilters) { fmt.Printf(globalTagsConfig) } + + if sliceContains("agent", sectionFilters) { + fmt.Printf(agentConfig) + } } type printer interface {
Print global_tags first in sample configuration
diff --git a/lib/coral/machine/fog.rb b/lib/coral/machine/fog.rb index <HASH>..<HASH> 100644 --- a/lib/coral/machine/fog.rb +++ b/lib/coral/machine/fog.rb @@ -236,10 +236,6 @@ class Fog < Plugin::Machine if ssh_results ssh_results.each do |result| - ui.info(result.stdout, { :prefix => false }) - ui.warn(result.stderr, { :prefix => false }) - ui.success(result.status) - results << { :status => result.status, :result => result.stdout.strip,
Removing debug printouts in the fog machine provider.
diff --git a/salt/states/mdadm.py b/salt/states/mdadm.py index <HASH>..<HASH> 100644 --- a/salt/states/mdadm.py +++ b/salt/states/mdadm.py @@ -93,8 +93,8 @@ def present(name, can_assemble[dev] = __salt__['cmd.retcode'](cmd) == 0 if True in can_assemble.values() and False in can_assemble.values(): - in_raid = [x[0] for x in devices.items() if x[1]] - not_in_raid = [x[0] for x in devices.items() if not x[1]] + in_raid = sorted([x[0] for x in can_assemble.items() if x[1]]) + not_in_raid = sorted([x[0] for x in can_assemble.items() if not x[1]]) ret['comment'] = 'Devices are a mix of RAID constituents ({0}) and '\ 'non-RAID-constituents({1}).'.format(in_raid, not_in_raid) ret['result'] = False
Fixed a name reference in raid.assemble. Also, raid.assemble sorts the devices alphabetically if it needs to report that the RAID and non-RAID devices are mixed
diff --git a/app/config/bootstrap.php b/app/config/bootstrap.php index <HASH>..<HASH> 100644 --- a/app/config/bootstrap.php +++ b/app/config/bootstrap.php @@ -131,9 +131,17 @@ Libraries::add('app'); // use lithium\g11n\Catalog; // // Catalog::config(array( -// 'runtime' => array('adapter' => 'Memory'), -// 'app' => array('adapter' => 'Gettext', 'path' => LITHIUM_APP_PATH . '/resources/po'), -// 'lithium' => array('adapter' => 'Gettext', 'path' => LITHIUM_LIBRARY_PATH . '/lithium/resources/po') +// 'runtime' => array( +// 'adapter' => 'Memory' +// ), +// 'app' => array( +// 'adapter' => 'Gettext', +// 'path' => LITHIUM_APP_PATH . '/extensions/g11n/data' +// ), +// 'lithium' => array( +// 'adapter' => 'Gettext', +// 'path' => LITHIUM_LIBRARY_PATH . '/lithium/g11n/data' +// ) // )); /**
Updating locations for g<I>n data in app bootstrap.
diff --git a/python/phonenumbers/__init__.py b/python/phonenumbers/__init__.py index <HASH>..<HASH> 100644 --- a/python/phonenumbers/__init__.py +++ b/python/phonenumbers/__init__.py @@ -143,7 +143,7 @@ from .phonenumbermatcher import PhoneNumberMatch, PhoneNumberMatcher, Leniency # Version number is taken from the upstream libphonenumber version # together with an indication of the version of the Python-specific code. -__version__ = "8.5.0" +__version__ = "8.5.1" __all__ = ['PhoneNumber', 'CountryCodeSource', 'FrozenPhoneNumber', 'REGION_CODE_FOR_NON_GEO_ENTITY', 'NumberFormat', 'PhoneNumberDesc', 'PhoneMetadata',
Prep for <I> release
diff --git a/gqltesting/testing.go b/gqltesting/testing.go index <HASH>..<HASH> 100644 --- a/gqltesting/testing.go +++ b/gqltesting/testing.go @@ -43,6 +43,9 @@ func RunTest(t *testing.T, test *Test) { test.Context = context.Background() } result := test.Schema.Exec(test.Context, test.Query, test.OperationName, test.Variables) + + checkErrors(t, test.ExpectedErrors, result.Errors) + // Verify JSON to avoid red herring errors. got, err := formatJSON(result.Data) if err != nil { @@ -53,8 +56,6 @@ func RunTest(t *testing.T, test *Test) { t.Fatalf("want: invalid JSON: %s", err) } - checkErrors(t, test.ExpectedErrors, result.Errors) - if !bytes.Equal(got, want) { t.Logf("got: %s", got) t.Logf("want: %s", want)
Update testing.go check for errors before expected response
diff --git a/command/format/state.go b/command/format/state.go index <HASH>..<HASH> 100644 --- a/command/format/state.go +++ b/command/format/state.go @@ -74,7 +74,11 @@ func State(opts *StateOpts) string { for _, k := range ks { v := m.OutputValues[k] p.buf.WriteString(fmt.Sprintf("%s = ", k)) - p.writeValue(v.Value, plans.NoOp, 0) + if v.Sensitive { + p.buf.WriteString("(sensitive value)") + } else { + p.writeValue(v.Value, plans.NoOp, 0) + } p.buf.WriteString("\n") } } diff --git a/command/format/state_test.go b/command/format/state_test.go index <HASH>..<HASH> 100644 --- a/command/format/state_test.go +++ b/command/format/state_test.go @@ -219,7 +219,7 @@ map_var = { "first" = "foo" "second" = "bar" } -sensitive_var = "secret!!!" +sensitive_var = (sensitive value) string_var = "string value"` func basicState(t *testing.T) *states.State {
Hide sensitive outputs in terraform show
diff --git a/openquake/server/tests/functional_test.py b/openquake/server/tests/functional_test.py index <HASH>..<HASH> 100644 --- a/openquake/server/tests/functional_test.py +++ b/openquake/server/tests/functional_test.py @@ -117,8 +117,8 @@ class EngineServerTestCase(unittest.TestCase): tmpdb = '%s:%s' % (cls.tmpdb, cls.dbserverport) cls.fd, cls.errfname = tempfile.mkstemp() cls.dbs = subprocess.Popen( - [sys.executable, '-m', 'openquake.server.dbserver', tmpdb], - env=env, stderr=cls.fd) # redirect the server errors + [sys.executable, '-m', 'openquake.server.dbserver', + tmpdb, cls.errfname], env=env, stderr=cls.fd) cls.proc = subprocess.Popen( [sys.executable, '-m', 'openquake.server.manage', 'runserver', cls.hostport, '--noreload', '--nothreading', 'tmpdb=' + tmpdb],
Passed the log file to the dbserver in the functional tests Former-commit-id: 1dcd5d<I>b<I>f<I>b<I>a<I>ed [formerly 3cab<I>c5d<I>ee<I>af9f<I>aea<I>e<I>b] Former-commit-id: <I>b0bbc<I>fbeba<I>c4f<I>e<I>
diff --git a/telemetry/telemetry/core/timeline/model.py b/telemetry/telemetry/core/timeline/model.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/core/timeline/model.py +++ b/telemetry/telemetry/core/timeline/model.py @@ -43,10 +43,11 @@ class TimelineModel(object): if self._frozen: raise Exception("Cannot add events once recording is done") self._root_events.append(event) - self._all_events.extend( - event.GetAllChildrenRecursive(include_self=True)) def DidFinishRecording(self): + for event in self._root_events: + self._all_events.extend( + event.GetAllChildrenRecursive(include_self=True)) self._frozen = True def ImportTraces(self, traces, shift_world_to_zero=True):
Fixed generation of TimelineModel._all_events. - Events were added recursively to _all_events during AddEvent for root events - Asynchronous events that were added later on by the importer were not included. - Fixed by moving the recursive population of _all_events to DidFinishRecording BUG=<I> Review URL: <URL>
diff --git a/src/check-types.js b/src/check-types.js index <HASH>..<HASH> 100644 --- a/src/check-types.js +++ b/src/check-types.js @@ -290,7 +290,7 @@ * */ function error (data) { - return Object.prototype.toString.call(data) === '[object Error]'; + return data instanceof Error || Object.prototype.toString.call(data) === '[object Error]'; } /** diff --git a/test/check-types.js b/test/check-types.js index <HASH>..<HASH> 100644 --- a/test/check-types.js +++ b/test/check-types.js @@ -366,6 +366,15 @@ assert.isTrue(check.error(new Error())); }); + test('error with derived error returns true', function () { + function DerivedError () { + Error.call(this); + } + DerivedError.prototype = new Error(); + DerivedError.prototype.constructor = DerivedError; + assert.isTrue(check.error(new DerivedError())); + }); + test('error with object returns false', function () { assert.isFalse(check.error({})); });
Recognise derived error objects.
diff --git a/lib/ruboto/util/xml_element.rb b/lib/ruboto/util/xml_element.rb index <HASH>..<HASH> 100644 --- a/lib/ruboto/util/xml_element.rb +++ b/lib/ruboto/util/xml_element.rb @@ -212,7 +212,7 @@ module Ruboto if_else( "scriptInfo.getCallbackProcs() != null && scriptInfo.getCallbackProcs()[#{constant_string}] != null", [super_string] + ruby_call, - ['String rubyClassName = Script.toCamelCase(scriptInfo.getScriptName());'] + + ['String rubyClassName = scriptInfo.getRubyClassName();'] + if_else( # TODO(uwe): Remove defined?(rubyClassName) if we remove non-class-based class definitions "(Boolean)JRubyAdapter.runScriptlet(\"defined?(\" + rubyClassName + \") == 'constant' && \" + rubyClassName + \".instance_methods(false).any?{|m| m.to_sym == :#{snake_case_attribute}}\")",
Switch to get rubyClassName from scriptInfo
diff --git a/cherrypy/test/test_states.py b/cherrypy/test/test_states.py index <HASH>..<HASH> 100644 --- a/cherrypy/test/test_states.py +++ b/cherrypy/test/test_states.py @@ -101,7 +101,6 @@ class ServerStateTests(helper.CPWebCase): self.assertEqual(len(db_connection.threads), 0) # Test server start - cherrypy.server.quickstart(self.server_class) engine.start() self.assertEqual(engine.state, engine.states.STARTED)
Removing the last call to server.quickstart (which is deprecated).
diff --git a/src/lib/context.js b/src/lib/context.js index <HASH>..<HASH> 100644 --- a/src/lib/context.js +++ b/src/lib/context.js @@ -1,6 +1,8 @@ import { L10nError } from './errors'; import { format } from './resolver'; +const IntlObjects = new WeakMap(); + export class Context { constructor(env, langs, resIds) { this.langs = langs; @@ -105,7 +107,15 @@ export class Context { } _memoizeIntlObject(ctor, {code}, opts) { - return new ctor(code, opts); + const cache = IntlObjects.get(ctor) || {}; + const id = code + JSON.stringify(opts); + + if (!cache[id]) { + cache[id] = new ctor(code, opts); + IntlObjects.set(ctor, cache); + } + + return cache[id]; } }
Naive memoization of Intl formatters This is a very naive implementation of memoization of Intl formatters which assumes opts are always passed in the same order. Doesn't affect node's performance, but I'm seeing an improvement on jsshell: format: mean: <I> (-<I>%) stdev: <I> sample: <I>
diff --git a/lib/easy_upnp/control_point/argument_validator.rb b/lib/easy_upnp/control_point/argument_validator.rb index <HASH>..<HASH> 100644 --- a/lib/easy_upnp/control_point/argument_validator.rb +++ b/lib/easy_upnp/control_point/argument_validator.rb @@ -58,6 +58,7 @@ module EasyUpnp class TypeValidator # Valid UPnP types for each ruby class RUBY_TYPE_TO_UPNP_TYPE = { + Array: %w{list}, Float: %w{r4 r8 number fixed.14.4 float}, Integer: %w{ui1 ui2 ui4 i1 i2 i4 int}, String: %w{char string bin.base64 bin.hex uri uuid}, diff --git a/lib/easy_upnp/control_point/client_wrapper.rb b/lib/easy_upnp/control_point/client_wrapper.rb index <HASH>..<HASH> 100644 --- a/lib/easy_upnp/control_point/client_wrapper.rb +++ b/lib/easy_upnp/control_point/client_wrapper.rb @@ -42,13 +42,13 @@ module EasyUpnp :'xmlns:u' => @urn }, }.merge(@call_options) - + if !@cookies.nil? attrs = attrs.merge( cookies: HTTPI::Cookie.new(@cookies) ) end - + advanced_typecasting = @advanced_typecasting response = @client.call(action_name, attrs) do
Add list as an available UPnP type