diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/buildbot_travis/runner.py b/buildbot_travis/runner.py
index <HASH>..<HASH> 100644
--- a/buildbot_travis/runner.py
+++ b/buildbot_travis/runner.py
@@ -9,10 +9,10 @@ import readline
from subprocess import PIPE, STDOUT, Popen
from threading import Lock
+import urwid
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
-import urwid
from buildbot_travis.steps.create_steps import SetupVirtualEnv
from buildbot_travis.travisyml import TRAVIS_HOOKS, TravisYml
@@ -255,7 +255,7 @@ def run(args):
vecmd = ve.buildCommand()
if not args.dryrun:
rc, out = runner.run(vecmd)
- _, path = runner.run("echo $PATH")
+ _, path = runner.run("echo -n $PATH")
script += 'export PATH="{}/{}/bin:{}"'.format(
runner.pwd, ve.sandboxname, path)
|
fix runner path
not doing echo -n lead to \n in the end of the PATH, so the new PATH had wrong last entry (looking into '/bin\n')
|
diff --git a/tests/functional/cli/test_cli_artifacts.py b/tests/functional/cli/test_cli_artifacts.py
index <HASH>..<HASH> 100644
--- a/tests/functional/cli/test_cli_artifacts.py
+++ b/tests/functional/cli/test_cli_artifacts.py
@@ -1,12 +1,9 @@
import subprocess
-import sys
import textwrap
import time
from io import BytesIO
from zipfile import is_zipfile
-import pytest
-
content = textwrap.dedent(
"""\
test-artifact:
@@ -23,11 +20,12 @@ data = {
}
-@pytest.mark.skipif(sys.version_info < (3, 8), reason="I am the walrus")
def test_cli_artifacts(capsysbinary, gitlab_config, gitlab_runner, project):
project.files.create(data)
- while not (jobs := project.jobs.list(scope="success")):
+ jobs = None
+ while not jobs:
+ jobs = project.jobs.list(scope="success")
time.sleep(0.5)
job = project.jobs.get(jobs[0].id)
|
test(cli): replace assignment expression
This is a feature added in <I>, removing it allows for the test to run
with lower python versions.
|
diff --git a/manager/controlapi/service.go b/manager/controlapi/service.go
index <HASH>..<HASH> 100644
--- a/manager/controlapi/service.go
+++ b/manager/controlapi/service.go
@@ -502,7 +502,7 @@ func (s *Server) UpdateService(ctx context.Context, request *api.UpdateServiceRe
}
if !reflect.DeepEqual(requestSpecNetworks, specNetworks) {
- return errNetworkUpdateNotSupported
+ return grpc.Errorf(codes.Unimplemented, errNetworkUpdateNotSupported.Error())
}
// Check to see if all the secrets being added exist as objects
@@ -516,11 +516,11 @@ func (s *Server) UpdateService(ctx context.Context, request *api.UpdateServiceRe
// with service mode change (comparing current config with previous config).
// proper way to change service mode is to delete and re-add.
if reflect.TypeOf(service.Spec.Mode) != reflect.TypeOf(request.Spec.Mode) {
- return errModeChangeNotAllowed
+ return grpc.Errorf(codes.Unimplemented, errModeChangeNotAllowed.Error())
}
if service.Spec.Annotations.Name != request.Spec.Annotations.Name {
- return errRenameNotSupported
+ return grpc.Errorf(codes.Unimplemented, errRenameNotSupported.Error())
}
service.Meta.Version = *request.ServiceVersion
|
SwarmKit: updateService does not return proper gRPC errors for some errors. Also, no error is returned if a service cannot be found.
|
diff --git a/lib/gpgme.rb b/lib/gpgme.rb
index <HASH>..<HASH> 100644
--- a/lib/gpgme.rb
+++ b/lib/gpgme.rb
@@ -1099,6 +1099,8 @@ keylist_mode=#{KEYLIST_MODE_NAMES[keylist_mode]}>"
def get_key(fingerprint, secret = false)
rkey = Array.new
err = GPGME::gpgme_get_key(self, fingerprint, rkey, secret ? 1 : 0)
+ # if the key is not found, we get GPG_ERR_EOF
+ return nil if err == GPG_ERR_EOF
exc = GPGME::error_to_exception(err)
raise exc if exc
rkey[0]
@@ -1501,7 +1503,7 @@ validity=#{VALIDITY_NAMES[validity]}, signatures=#{signatures.inspect}>"
"Signature made from revoked key #{from}"
when GPGME::GPG_ERR_BAD_SIGNATURE
"Bad signature from #{from}"
- when GPGME::GPG_ERR_NO_ERROR
+ when GPGME::GPG_ERR_NO_PUBKEY
"No public key for #{from}"
end
end
|
(GPGME::Signature#to_s): Detect "No public key error" correctly.
(GPGME::Ctx#get_key): Return nil if no key is found.
Patch from Hamish Downer. (Bug#<I>)
|
diff --git a/CHANGES.rst b/CHANGES.rst
index <HASH>..<HASH> 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,4 +1,4 @@
-0.3 (unreleased)
+0.3 (2017-10-28)
----------------
- Use long instead of int for x/y sizes and indices
diff --git a/fast_histogram/__init__.py b/fast_histogram/__init__.py
index <HASH>..<HASH> 100644
--- a/fast_histogram/__init__.py
+++ b/fast_histogram/__init__.py
@@ -1,3 +1,3 @@
from .histogram import *
-__version__ = "0.3.dev0"
+__version__ = "0.3"
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ extensions = [Extension("fast_histogram._histogram_core",
include_dirs=[np.get_include()])]
setup(name='fast-histogram',
- version='0.3.dev0',
+ version='0.3',
description='Fast simple 1D and 2D histograms',
long_description=open('README.rst').read(),
install_requires=['numpy'],
|
Preparing release <I>
|
diff --git a/lib/pseudohiki/converter.rb b/lib/pseudohiki/converter.rb
index <HASH>..<HASH> 100755
--- a/lib/pseudohiki/converter.rb
+++ b/lib/pseudohiki/converter.rb
@@ -350,7 +350,7 @@ instead of \"#{given_opt}\"."
Encoding.default_internal = internal if internal and not internal.empty?
end
- def parse_command_line_options
+ def setup_command_line_options
OptionParser.new("USAGE: #{File.basename($0)} [OPTION]... [FILE]...
Convert texts written in a Hiki-like notation into another format.") do |opt|
opt.version = PseudoHiki::VERSION
@@ -459,7 +459,7 @@ inside (default: not specified)") do |template|
end
def set_options_from_command_line
- opt = parse_command_line_options
+ opt = setup_command_line_options
yield opt if block_given?
opt.parse!
check_argv
|
renamed OptionManager#parse_command_line_options #setup_command_line_options
|
diff --git a/mirror_maker.go b/mirror_maker.go
index <HASH>..<HASH> 100644
--- a/mirror_maker.go
+++ b/mirror_maker.go
@@ -28,10 +28,10 @@ var TimingField = &avro.SchemaField{
Default: "null",
Type: &avro.UnionSchema{
Types: []avro.Schema{
+ &avro.NullSchema{},
&avro.ArraySchema{
Items: &avro.LongSchema{},
},
- &avro.NullSchema{},
},
},
}
|
re #<I> attempt to fix schema
|
diff --git a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
+++ b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
@@ -699,11 +699,13 @@ class UnitOfWork implements PropertyChangedListener
} elseif ($value instanceof PersistentCollection) {
$value = $value->unwrap();
}
+ $count = 0;
foreach ($value as $key => $entry) {
$targetClass = $this->dm->getClassMetadata(get_class($entry));
$state = $this->getDocumentState($entry, self::STATE_NEW);
$oid = spl_object_hash($entry);
- $path = $mapping['type'] === 'many' ? $mapping['name'].'.'.$key : $mapping['name'];
+ $path = $mapping['type'] === 'many' ? $mapping['name'].'.'.$count : $mapping['name'];
+ $count++;
if ($state == self::STATE_NEW) {
if ( ! $targetClass->isEmbeddedDocument && ! $mapping['isCascadePersist']) {
throw new \InvalidArgumentException("A new document was found through a relationship that was not"
|
Need to use a $count instead of $key since removing an element can change the array keys which results in bad mongodb queries.
|
diff --git a/lib/neo4j/mixins/node_mixin.rb b/lib/neo4j/mixins/node_mixin.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/mixins/node_mixin.rb
+++ b/lib/neo4j/mixins/node_mixin.rb
@@ -98,6 +98,7 @@ module Neo4j
def init_without_node(props) # :nodoc:
props[:_classname] = self.class.to_s
@_java_node = Neo4j.create_node props
+ update_index if props && !props.empty?
@_java_node._wrapper = self
Neo4j.event_handler.node_created(self)
end
|
fixing indexing in initializatoin, [#<I> state:resolved]
|
diff --git a/lib/chef/provider/group/solaris.rb b/lib/chef/provider/group/solaris.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/provider/group/solaris.rb
+++ b/lib/chef/provider/group/solaris.rb
@@ -23,7 +23,7 @@ class Chef
class Group
class Solaris < Chef::Provider::Group::Groupadd
- provides :group, os: "solaris2"
+ provides :group, platform: "solaris2"
def load_current_resource
super
diff --git a/lib/chef/provider/group/usermod.rb b/lib/chef/provider/group/usermod.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/provider/group/usermod.rb
+++ b/lib/chef/provider/group/usermod.rb
@@ -76,7 +76,7 @@ class Chef
def append_flags
case node[:platform]
- when "openbsd", "netbsd", "aix", "solaris2", "smartos", "omnios"
+ when "openbsd", "netbsd", "aix", "smartos", "omnios"
"-G"
when "solaris"
[ "-a", "-G" ]
|
Change the provides lines to reflect the new setup.
|
diff --git a/collatex/src/test/java/eu/interedition/collatex2/alignmenttable/TranspositionAlignmentTest.java b/collatex/src/test/java/eu/interedition/collatex2/alignmenttable/TranspositionAlignmentTest.java
index <HASH>..<HASH> 100644
--- a/collatex/src/test/java/eu/interedition/collatex2/alignmenttable/TranspositionAlignmentTest.java
+++ b/collatex/src/test/java/eu/interedition/collatex2/alignmenttable/TranspositionAlignmentTest.java
@@ -1,6 +1,7 @@
package eu.interedition.collatex2.alignmenttable;
import org.junit.Assert;
+import org.junit.Ignore;
import org.junit.Test;
import eu.interedition.collatex2.implementation.CollateXEngine;
@@ -16,6 +17,7 @@ public class TranspositionAlignmentTest {
Assert.assertEquals("A: | |y| \n" + "B: x| |y|z\n" + "C: |z|y| \n", engine.align(a, b, c).toString());
}
+ @Ignore
@Test
public void transposeInTwoPairs() {
CollateXEngine engine = new CollateXEngine();
|
ignore failing test for alignment of transpositions
|
diff --git a/code/filter/cmd.php b/code/filter/cmd.php
index <HASH>..<HASH> 100644
--- a/code/filter/cmd.php
+++ b/code/filter/cmd.php
@@ -10,7 +10,7 @@
/**
* Command Filter
*
- * A 'command' is a string containing only the characters [A-Za-z0-9.-_]. Used for names of views, controllers, etc
+ * A 'command' is a string containing only the characters [A-Za-z0-9.,-_].
*
* @author Johan Janssens <https://github.com/johanjanssens>
* @package Koowa\Library\Filter
@@ -26,7 +26,7 @@ class KFilterCmd extends KFilterAbstract implements KFilterTraversable
public function validate($value)
{
$value = trim($value);
- $pattern = '/^[A-Za-z0-9.\-_]*$/';
+ $pattern = '/^[A-Za-z0-9.,\-_]*$/';
return (is_string($value) && (preg_match($pattern, $value)) == 1);
}
|
re #<I> : Add ',' to the regex for the cmd filter.
|
diff --git a/database-provider/src/main/java/org/jboss/pressgang/ccms/provider/DBProviderFactory.java b/database-provider/src/main/java/org/jboss/pressgang/ccms/provider/DBProviderFactory.java
index <HASH>..<HASH> 100644
--- a/database-provider/src/main/java/org/jboss/pressgang/ccms/provider/DBProviderFactory.java
+++ b/database-provider/src/main/java/org/jboss/pressgang/ccms/provider/DBProviderFactory.java
@@ -67,7 +67,7 @@ public class DBProviderFactory extends DataProviderFactory {
if (transactionManager != null) {
try {
final int status = transactionManager.getStatus();
- if (status != Status.STATUS_ROLLING_BACK && status != Status.STATUS_ROLLEDBACK && status != Status.STATUS_NO_TRANSACTION) {
+ if (status != Status.STATUS_NO_TRANSACTION) {
transactionManager.rollback();
}
} catch (SystemException e) {
|
Fixed some missed rollback functions from BZ#<I>.
|
diff --git a/guacamole/src/main/webapp/app/settings/types/ActiveConnectionWrapper.js b/guacamole/src/main/webapp/app/settings/types/ActiveConnectionWrapper.js
index <HASH>..<HASH> 100644
--- a/guacamole/src/main/webapp/app/settings/types/ActiveConnectionWrapper.js
+++ b/guacamole/src/main/webapp/app/settings/types/ActiveConnectionWrapper.js
@@ -38,7 +38,7 @@ angular.module('settings').factory('ActiveConnectionWrapper', [
var ActiveConnectionWrapper = function ActiveConnectionWrapper(template) {
/**
- * The identifier of the data source associate dwith the
+ * The identifier of the data source associated with the
* ActiveConnection wrapped by this ActiveConnectionWrapper.
*
* @type String
|
GUAC-<I>: Associate dwith -> associated with.
|
diff --git a/slave/buildslave/null.py b/slave/buildslave/null.py
index <HASH>..<HASH> 100644
--- a/slave/buildslave/null.py
+++ b/slave/buildslave/null.py
@@ -22,7 +22,8 @@ class LocalBuildSlave(BuildSlaveBase):
@defer.inlineCallbacks
def startService(self):
# importing here to avoid dependency on buildbot master package
- from buildbot.buildslave.protocols.null import Connection
+ # requires buildot version >= 0.9.0b5
+ from buildbot.worker.protocols.null import Connection
yield BuildSlaveBase.startService(self)
# TODO: This is a workaround for using worker with "slave"-api with
|
use renamed module in slave
it's ok, since protocols.null module was introduced in nine branch
|
diff --git a/master/buildbot/process/metrics.py b/master/buildbot/process/metrics.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/process/metrics.py
+++ b/master/buildbot/process/metrics.py
@@ -326,7 +326,7 @@ class AttachedWorkersWatcher:
def _get_rss():
- if sys.platform == 'linux2':
+ if sys.platform == 'linux':
try:
with open("/proc/%i/statm" % os.getpid()) as f:
return int(f.read().split()[1])
diff --git a/master/buildbot/test/unit/test_process_metrics.py b/master/buildbot/test/unit/test_process_metrics.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/unit/test_process_metrics.py
+++ b/master/buildbot/test/unit/test_process_metrics.py
@@ -169,8 +169,8 @@ class TestPeriodicChecks(TestMetricBase):
def testGetRSS(self):
self.assertTrue(metrics._get_rss() > 0)
- if sys.platform != 'linux2':
- testGetRSS.skip = "only available on linux2 platforms"
+ if sys.platform != 'linux':
+ testGetRSS.skip = "only available on linux platforms"
class TestReconfig(TestMetricBase):
|
Enable an incorrectly skipped test
Since Python <I>, sys.platform is 'linux' on Linux [1], so
TestPeriodicChecks.testGetRSS is skipped even on Linux.
[1] <URL>
|
diff --git a/src/ORM/Table.php b/src/ORM/Table.php
index <HASH>..<HASH> 100644
--- a/src/ORM/Table.php
+++ b/src/ORM/Table.php
@@ -2206,7 +2206,7 @@ class Table implements RepositoryInterface, EventListenerInterface, EventDispatc
*
* ```
* $article = $this->Articles->patchEntity($article, $this->request->data(), [
- * 'fieldList' => ['title', 'body', 'tags', 'comments],
+ * 'fieldList' => ['title', 'body', 'tags', 'comments'],
* 'associated' => ['Tags', 'Comments.Users' => ['fieldList' => 'username']]
* ]
* );
|
Add closing ' mark
This stops the code colouration going weird on <URL>
|
diff --git a/ripe/atlas/sagan/version.py b/ripe/atlas/sagan/version.py
index <HASH>..<HASH> 100644
--- a/ripe/atlas/sagan/version.py
+++ b/ripe/atlas/sagan/version.py
@@ -1,2 +1,2 @@
-__version__ = "0.7.1"
+__version__ = "0.8.0"
|
Update version.py
Updated the version number for Iñigo's changes.
|
diff --git a/src/api/ledger/pathfind.js b/src/api/ledger/pathfind.js
index <HASH>..<HASH> 100644
--- a/src/api/ledger/pathfind.js
+++ b/src/api/ledger/pathfind.js
@@ -84,7 +84,8 @@ function formatResponse(pathfind, paths) {
const address = pathfind.source.address;
return parsePathfind(address, pathfind.destination.amount, paths);
}
- if (!_.includes(paths.destination_currencies,
+ if (paths.destination_currencies !== undefined &&
+ !_.includes(paths.destination_currencies,
pathfind.destination.amount.currency)) {
throw new NotFoundError('No paths found. ' +
'The destination_account does not accept ' +
|
Fix: Check for destination_currencies property
Example stack:
TypeError: Cannot read property 'join' of undefined
at formatResponse
(ripple-lib/dist/npm/api/ledger/pathfind.js:<I>:<I>)
|
diff --git a/src/Tasks.php b/src/Tasks.php
index <HASH>..<HASH> 100644
--- a/src/Tasks.php
+++ b/src/Tasks.php
@@ -958,7 +958,7 @@ chmod 755 ' . $default_dir . '/settings.php';
$this->say('If you need the very latest data from a Pantheon site, go create a new backup using either the Pantheon backend, or Terminus.');
$which_database = $this->askDefault(
- 'Which database backup should we load (i.e. local/dev/live)?', $default_database
+ 'Which database backup should we load (i.e. local/develop/multidev/live)?', $default_database
);
$getDB = TRUE;
|
Change messaging in Robo prompt for DB backup to use.
|
diff --git a/lib/autowow/cli.rb b/lib/autowow/cli.rb
index <HASH>..<HASH> 100644
--- a/lib/autowow/cli.rb
+++ b/lib/autowow/cli.rb
@@ -5,7 +5,7 @@ require_relative 'vcs'
module Autowow
class CLI < Thor
- # map %w(bm) => :branch_merged
+ map %w(bm) => :branch_merged
desc "branch_merged", "clean working branch and return to master"
def branch_merged(working_dir = '.')
|
Adds short command `bm` for branch_merged
|
diff --git a/PPI/Test/Bootstrap.php b/PPI/Test/Bootstrap.php
index <HASH>..<HASH> 100644
--- a/PPI/Test/Bootstrap.php
+++ b/PPI/Test/Bootstrap.php
@@ -8,7 +8,7 @@
*/
namespace PPI\Test;
-require_once(__DIR__ . '/../AutoLoad.php');
+require_once(__DIR__ . '/../Autoload.php');
require_once(__DIR__ . '/AutoLoad.php');
\PPI\Autoload::config(array(
|
Fix typo in autoloader
Mac's aren't case sensitive but linux is! :D
|
diff --git a/test/analytics.js b/test/analytics.js
index <HASH>..<HASH> 100644
--- a/test/analytics.js
+++ b/test/analytics.js
@@ -852,6 +852,22 @@ describe('Analytics.js', function () {
expect(spy.called).to.be(true);
spy.restore();
});
+
+ it('sends a url along', function () {
+ var spy = sinon.spy(Provider.prototype, 'track');
+ analytics.track(test.url);
+ expect(spy.calledWith(test.url)).to.be(true);
+ spy.restore();
+ });
+
+ it('sends a clone of context along', function () {
+ var spy = sinon.spy(Provider.prototype, 'track');
+ analytics.track(test.url,test.context);
+ expect(spy.args[0][1]).not.to.equal(test.context);
+ expect(spy.args[0][1]).to.eql(test.context);
+ spy.restore();
+ });
+
});
|
#<I> Added tests to pageview api for allowing context
|
diff --git a/src/Services/Issues.php b/src/Services/Issues.php
index <HASH>..<HASH> 100644
--- a/src/Services/Issues.php
+++ b/src/Services/Issues.php
@@ -3,6 +3,7 @@
namespace TZK\Taiga\Services;
+use TZK\Taiga\RestClient;
use TZK\Taiga\Service;
class Issues extends Service {
@@ -13,11 +14,11 @@ class Issues extends Service {
*
* @param RestClient $root
*/
- public function __construct($root) {
+ public function __construct(RestClient $root) {
parent::__construct($root, 'issues');
}
- public function getAll(array $param = []) {
+ public function getList(array $param = []) {
return $this->get(null, $param);
}
|
Renaming getAll method to getList
Aim is to remain coherent with the rest of the services
|
diff --git a/app/routes/annotations.rb b/app/routes/annotations.rb
index <HASH>..<HASH> 100644
--- a/app/routes/annotations.rb
+++ b/app/routes/annotations.rb
@@ -20,17 +20,22 @@ module OpenBEL
status 200
end
+ options '/api/annotations/values' do
+ response.headers['Allow'] = 'OPTIONS,GET'
+ status 200
+ end
+
options '/api/annotations/:annotation' do
response.headers['Allow'] = 'OPTIONS,GET'
status 200
end
- options '/api/annotations/values/match-results/:match' do
+ options '/api/annotations/:annotation/values' do
response.headers['Allow'] = 'OPTIONS,GET'
status 200
end
- options '/api/annotations/:annotation/values/match-results/:match' do
+ options '/api/annotations/:annotation/values/:value' do
response.headers['Allow'] = 'OPTIONS,GET'
status 200
end
|
corrected options routes; refs #<I>
|
diff --git a/algoliasearch/src/test/java/com/algolia/search/saas/IndexTest.java b/algoliasearch/src/test/java/com/algolia/search/saas/IndexTest.java
index <HASH>..<HASH> 100644
--- a/algoliasearch/src/test/java/com/algolia/search/saas/IndexTest.java
+++ b/algoliasearch/src/test/java/com/algolia/search/saas/IndexTest.java
@@ -635,4 +635,10 @@ public class IndexTest extends PowerMockTestCase {
index.search(query);
verify(mockClient, times(nbTimes)).postRequestRaw(anyString(), anyString(), anyBoolean());
}
+
+ @Test
+ public void testNullCompletionHandler() throws Exception {
+ // Check that the code does not crash when no completion handler is specified.
+ index.addObjectAsync(new JSONObject("{\"city\": \"New York\"}"), null);
+ }
}
|
Add test case with null completion handler
|
diff --git a/npm/cli.js b/npm/cli.js
index <HASH>..<HASH> 100755
--- a/npm/cli.js
+++ b/npm/cli.js
@@ -8,3 +8,14 @@ var child = proc.spawn(electron, process.argv.slice(2), {stdio: 'inherit'})
child.on('close', function (code) {
process.exit(code)
})
+
+const handleTerminationSignal = function (signal) {
+ process.on(signal, function signalHandler () {
+ if (!child.killed) {
+ child.kill(signal)
+ }
+ })
+}
+
+handleTerminationSignal('SIGINT')
+handleTerminationSignal('SIGTERM')
|
fix: handle SIGINT and SIGTERM from the Electron CLI helper (#<I>)
Fixes #<I>
|
diff --git a/mode/vbscript/vbscript.js b/mode/vbscript/vbscript.js
index <HASH>..<HASH> 100644
--- a/mode/vbscript/vbscript.js
+++ b/mode/vbscript/vbscript.js
@@ -1,5 +1,5 @@
CodeMirror.defineMode("vbscript", function() {
- var regexVBScriptKeyword = /Call|Case|CDate|Clear|CInt|CLng|Const|CStr|Description|Dim|Do|Each|Else|ElseIf|End|Err|Error|Exit|False|For|Function|If|LCase|Loop|LTrim|Next|Nothing|Now|Number|On|Preserve|Quit|ReDim|Resume|RTrim|Select|Set|Sub|Then|To|Trim|True|UBound|UCase|Until|VbCr|VbCrLf|VbLf|VbTab/im;
+ var regexVBScriptKeyword = /^(?:Call|Case|CDate|Clear|CInt|CLng|Const|CStr|Description|Dim|Do|Each|Else|ElseIf|End|Err|Error|Exit|False|For|Function|If|LCase|Loop|LTrim|Next|Nothing|Now|Number|On|Preserve|Quit|ReDim|Resume|RTrim|Select|Set|Sub|Then|To|Trim|True|UBound|UCase|Until|VbCr|VbCrLf|VbLf|VbTab)$/im;
return {
token: function(stream) {
|
[vbscript mode] Add start/end anchors to keyword regexp
Issue #<I>
|
diff --git a/webmap/models.py b/webmap/models.py
index <HASH>..<HASH> 100644
--- a/webmap/models.py
+++ b/webmap/models.py
@@ -113,7 +113,7 @@ class Poi(models.Model):
# Relationships
marker = models.ForeignKey(Marker, limit_choices_to={'status__show_to_mapper': 'True', 'layer__status__show_to_mapper': 'True'}, verbose_name=_(u"marker"), help_text=_("Select icon, that will be shown in map"), related_name="pois")
status = models.ForeignKey(Status, default=config.DEFAULT_STATUS_ID, help_text=_("POI status, determinse if it will be shown in map"), verbose_name=_(u"status"))
- properties = models.ManyToManyField('Property', blank=True, null=True, help_text=_("POI properties"), verbose_name=_("properties"))
+ properties = models.ManyToManyField('Property', blank=True, null=True, help_text=_("POI properties"), verbose_name=_("properties"), limit_choices_to={'status__show_to_mapper': 'True'})
importance = models.SmallIntegerField(default=0, verbose_name=_(u"importance"),
help_text=_(u"""Minimal zoom modificator (use 20+ to show always).<br/>"""))
|
models: limit property choices for POI
|
diff --git a/src/Controller/UserRegistrationController.php b/src/Controller/UserRegistrationController.php
index <HASH>..<HASH> 100644
--- a/src/Controller/UserRegistrationController.php
+++ b/src/Controller/UserRegistrationController.php
@@ -119,7 +119,7 @@ class UserRegistrationController extends AbstractActionController
/**
* Gets userMapper
*/
- public function getUserMapper()
+ protected function getUserMapper()
{
if (!$this->userMapper) {
$this->userMapper = $this->getServiceLocator()->get('zfcuser_user_mapper');
|
getUserMapper method should be private
|
diff --git a/Kwc/Shop/Products/Directory/Controller.php b/Kwc/Shop/Products/Directory/Controller.php
index <HASH>..<HASH> 100644
--- a/Kwc/Shop/Products/Directory/Controller.php
+++ b/Kwc/Shop/Products/Directory/Controller.php
@@ -5,10 +5,6 @@ class Kwc_Shop_Products_Directory_Controller extends Kwc_Directories_Item_Direct
protected $_buttons = array('add', 'delete', 'save');
protected $_position = 'pos';
- protected $_editDialog = array(
- 'width' => 620,
- 'height' => 500
- );
protected function _initColumns()
{
|
shop-controller does not have editDialog, removed because it caused error with tabPanel
|
diff --git a/lib/config.js b/lib/config.js
index <HASH>..<HASH> 100644
--- a/lib/config.js
+++ b/lib/config.js
@@ -4,6 +4,13 @@ var fs = require('fs');
var extend = require('extend');
var path = require('path');
+// Create the cfg dir if it does not exist
+/* istanbul ignore next: Simple directory creation, we know it will work. */
+var cfgDirPath = path.resolve(__dirname, '../cfg');
+if (!fs.existsSync(cfgDirPath)) {
+ fs.mkdirSync(cfgDirPath);
+}
+
var defaultConfig = {
host: 'localhost',
port: 9090,
@@ -63,14 +70,6 @@ var config = null;
var filteredConfig = null;
var defaultConfigCopy = extend(true, {}, defaultConfig);
-// Create the cfg dir if it does not exist
-if (!process.env.browser) {
- var cfgDirPath = path.resolve(__dirname, '..', 'cfg');
- if (!fs.existsSync(cfgDirPath)) {
- fs.mkdirSync(cfgDirPath);
- }
-}
-
// Load user config if it exists, and merge it
if (fs.existsSync('cfg/nodecg.json')) {
var rawUserConfigFile = fs.readFileSync('cfg/nodecg.json', 'utf8');
|
[config] Remove browserify check. We have a separate lib for the broweserified config now.
|
diff --git a/etrago/cluster/gasclustering.py b/etrago/cluster/gasclustering.py
index <HASH>..<HASH> 100755
--- a/etrago/cluster/gasclustering.py
+++ b/etrago/cluster/gasclustering.py
@@ -77,6 +77,8 @@ def create_gas_busmap(etrago):
Integer weighting for each ch4_buses.index
"""
+ MAX_WEIGHT = 1e5 # relevant only for foreign nodes with extra high CH4 generation capacity
+
to_neglect = [
"CH4",
"H2_to_CH4",
@@ -115,11 +117,9 @@ def create_gas_busmap(etrago):
rel_links[i] += (
etrago.network.loads_t.p_set.loc[:, loads_.loc[i]].mean().sum()
)
- rel_links[i] = int(rel_links[i])
+ rel_links[i] = min(int(rel_links[i]), MAX_WEIGHT)
weightings = pd.DataFrame.from_dict(rel_links, orient="index")
- # RUSSIA CH4 GENERATION IS SET TO 1E9, why? THIS CRASHES THE LOGIC
- weightings.loc["6116"] = 100000
if save:
weightings.to_csv(save)
|
capped maximum bus weight to 1E5 in order to handle extra high/inf ch4 generator capacities from foreign nodes
|
diff --git a/tests/test_wfgenerator.py b/tests/test_wfgenerator.py
index <HASH>..<HASH> 100644
--- a/tests/test_wfgenerator.py
+++ b/tests/test_wfgenerator.py
@@ -9,4 +9,4 @@ class TestWFGenerator(object):
return WorkflowGenerator()
def test_steps_in_library(self, wf):
- assert len(wf.steps_library) > 0
+ assert len(wf.steps_library.steps) > 0
|
Fix reference to new StepsLibrary object
|
diff --git a/filesystems/tests/test_path.py b/filesystems/tests/test_path.py
index <HASH>..<HASH> 100644
--- a/filesystems/tests/test_path.py
+++ b/filesystems/tests/test_path.py
@@ -90,10 +90,13 @@ class TestPath(TestCase):
self.assertEqual(Path().basename(), "")
def test_dirname(self):
- self.assertEqual(Path("a", "b", "c").dirname(), "/a/b")
+ self.assertEqual(
+ Path("a", "b", "c").dirname(),
+ os.path.join(os.sep, "a", "b"),
+ )
def test_root_dirname(self):
- self.assertEqual(Path().dirname(), "/")
+ self.assertEqual(Path().dirname(), os.sep)
class TestRelativePath(TestCase):
@@ -104,4 +107,6 @@ class TestRelativePath(TestCase):
)
def test_str(self):
- self.assertEqual(str(RelativePath("a", "b", "c")), "a/b/c")
+ self.assertEqual(
+ str(RelativePath("a", "b", "c")), os.path.join("a", "b", "c"),
+ )
|
Start to fix a few tests for Windows.
|
diff --git a/lib/generators/rails/templates/controller.rb b/lib/generators/rails/templates/controller.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/rails/templates/controller.rb
+++ b/lib/generators/rails/templates/controller.rb
@@ -2,6 +2,8 @@
class <%= controller_class_name %>Controller < ApplicationController
<%= controller_before_filter %> :set_<%= file_name %>, only: [:show, :edit, :update, :destroy]
+ respond_to :html
+
<% unless options[:singleton] -%>
def index
@<%= table_name %> = <%= orm_class.all(class_name) %>
|
Add a default `respond_to` for generated scaffolded controllers so they will work out of the box.
This way users using `responders` will get a functional scaffold when using the
Rails generator, without having to define the format by themselves.
Based on plataformatec/devise#<I>.
|
diff --git a/src/main/java/com/codeborne/selenide/WebDriverRunner.java b/src/main/java/com/codeborne/selenide/WebDriverRunner.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/codeborne/selenide/WebDriverRunner.java
+++ b/src/main/java/com/codeborne/selenide/WebDriverRunner.java
@@ -98,6 +98,10 @@ public class WebDriverRunner {
return webdriver != null && webdriver instanceof InternetExplorerDriver;
}
+ public static boolean htmlUnit() {
+ return webdriver != null && webdriver instanceof HtmlUnitDriver;
+ }
+
public static void clearBrowserCache() {
if (webdriver != null) {
webdriver.manage().deleteAllCookies();
|
Added method htmlUnit()
|
diff --git a/webpack.config.js b/webpack.config.js
index <HASH>..<HASH> 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -3,15 +3,21 @@ require('babel-loader');
require('json-loader');
module.exports = {
- entry: {
- keo: ['./src/keo.js']
- },
+ entry: './src/keo.js',
output: {
path: __dirname + '/dist',
- filename: '[name].js',
+ filename: 'keo.js',
library: 'keo',
libraryTarget: 'commonjs2'
},
+ externals: {
+ 'axios': true,
+ 'react-dom': true,
+ 'ramda': true,
+ 'react': true,
+ 'redux': true,
+ 'react-redux': true
+ },
module: {
loaders: [
{
|
Added externals. Relates to #<I>
|
diff --git a/core/src/main/java/org/testcontainers/utility/ResourceReaper.java b/core/src/main/java/org/testcontainers/utility/ResourceReaper.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/testcontainers/utility/ResourceReaper.java
+++ b/core/src/main/java/org/testcontainers/utility/ResourceReaper.java
@@ -74,10 +74,7 @@ public final class ResourceReaper {
binds.add(new Bind("//var/run/docker.sock", new Volume("/var/run/docker.sock")));
String ryukContainerId = client.createContainerCmd(ryukImage)
- .withHostConfig(new HostConfig() {
- @JsonProperty("AutoRemove")
- boolean autoRemove = true;
- })
+ .withHostConfig(new HostConfig().withAutoRemove(true))
.withExposedPorts(new ExposedPort(8080))
.withPublishAllPorts(true)
.withName("testcontainers-ryuk-" + DockerClientFactory.SESSION_ID)
|
Fix AutoRemove of Ryuk container (#<I>)
|
diff --git a/lib/sensu/api.rb b/lib/sensu/api.rb
index <HASH>..<HASH> 100644
--- a/lib/sensu/api.rb
+++ b/lib/sensu/api.rb
@@ -302,9 +302,11 @@ module Sensu
:occurrences => 1
}.to_json).callback do
$redis.set('stash:test/test', '{"key": "value"}').callback do
- Thin::Logging.silent = true
- Thin::Server.start(self, $settings.api.port)
- block.call
+ $redis.sadd('stashes', 'test/test').callback do
+ Thin::Logging.silent = true
+ Thin::Server.start(self, $settings.api.port)
+ block.call
+ end
end
end
end
|
[testing] fix test scaffolding
|
diff --git a/src/password/actions.js b/src/password/actions.js
index <HASH>..<HASH> 100644
--- a/src/password/actions.js
+++ b/src/password/actions.js
@@ -228,7 +228,6 @@ function autoSignInError(id, error) {
// TODO: proper error message
// const errorMessage = l.ui.t(lock, ["error", "signIn", error.error], {cred: cred, __textOnly: true}) || l.ui.t(lock, ["error", "signIn", "lock.request"], {cred: cred, __textOnly: true});
const errorMessage = "An error ocurred when logging in";
- console.log("ufff");
swap(updateEntity, "lock", id, m => {
m = l.setSubmitting(m, false, errorMessage);
m = m.set("signedIn", false);
diff --git a/src/password/index.js b/src/password/index.js
index <HASH>..<HASH> 100644
--- a/src/password/index.js
+++ b/src/password/index.js
@@ -9,7 +9,6 @@ export function initPassword(model, options) {
}
function processPasswordOptions(options) {
- console.log("options", options);
let { activities, connection, loginAfterSignUp, usernameStyle } = options;
if (!connection || typeof connection !== "string") {
|
Remove no longer needed console.log debug calls
|
diff --git a/examples/blesh/main.go b/examples/blesh/main.go
index <HASH>..<HASH> 100644
--- a/examples/blesh/main.go
+++ b/examples/blesh/main.go
@@ -357,6 +357,9 @@ func cmdSub(c *cli.Context) error {
if err := doConnect(c); err != nil {
return err
}
+ if err := doDiscover(c); err != nil {
+ return err
+ }
// NotificationHandler
h := func(req []byte) { fmt.Printf("notified: %x | %q\n", req, req) }
if u := curr.profile.Find(ble.NewCharacteristic(curr.uuid)); u != nil {
|
blesh: discover profile before sub
fix #<I>
|
diff --git a/lib/cancan/controller_additions.rb b/lib/cancan/controller_additions.rb
index <HASH>..<HASH> 100644
--- a/lib/cancan/controller_additions.rb
+++ b/lib/cancan/controller_additions.rb
@@ -294,7 +294,7 @@ module CanCan
#
# class ApplicationController < ActionController::Base
# rescue_from CanCan::AccessDenied do |exception|
- # flash[:error] = exception.message
+ # flash[:alert] = exception.message
# redirect_to root_url
# end
# end
|
changing flash[:error] to flash[:alert] in rdocs - closes #<I>
|
diff --git a/playhouse/sqlite_ext.py b/playhouse/sqlite_ext.py
index <HASH>..<HASH> 100644
--- a/playhouse/sqlite_ext.py
+++ b/playhouse/sqlite_ext.py
@@ -554,6 +554,13 @@ def ClosureTable(model_class, foreign_key=None):
return type(name, (BaseClosureTable,), {'Meta': Meta})
+@Node.extend(clone=False)
+def disqualify(self):
+ # In the where clause, prevent the given node/expression from constraining
+ # an index.
+ return Clause('+', self, glue='')
+
+
class SqliteExtDatabase(SqliteDatabase):
"""
Database class which provides additional Sqlite-specific functionality:
|
Disqualify index operator for sqlite.
|
diff --git a/airflow/www/security.py b/airflow/www/security.py
index <HASH>..<HASH> 100644
--- a/airflow/www/security.py
+++ b/airflow/www/security.py
@@ -21,7 +21,7 @@
from flask import g
from flask_appbuilder.security.sqla import models as sqla_models
from flask_appbuilder.security.sqla.manager import SecurityManager
-from sqlalchemy import or_, and_
+from sqlalchemy import and_, or_
from airflow import models
from airflow.exceptions import AirflowException
|
[AIRFLOW-<I>] Fix isort problem (#<I>)
|
diff --git a/moment.js b/moment.js
index <HASH>..<HASH> 100644
--- a/moment.js
+++ b/moment.js
@@ -1409,7 +1409,9 @@
};
for (i in lists) {
- makeList(lists[i]);
+ if (lists.hasOwnProperty(i)) {
+ makeList(lists[i]);
+ }
}
// for use by developers when extending the library
|
added hasOwnProperty check to lists enumeration
|
diff --git a/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/structure/util/ElementHelper.java b/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/structure/util/ElementHelper.java
index <HASH>..<HASH> 100644
--- a/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/structure/util/ElementHelper.java
+++ b/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/structure/util/ElementHelper.java
@@ -117,7 +117,7 @@ public final class ElementHelper {
if (!(propertyKeyValues[i] instanceof String) && !(propertyKeyValues[i] instanceof T))
throw Element.Exceptions.providedKeyValuesMustHaveALegalKeyOnEvenIndices();
- if (propertyKeyValues[i + 1] == null) {
+ if (null == propertyKeyValues[i + 1]) {
throw Property.Exceptions.propertyValueCanNotBeNull();
}
}
|
Reversed logic of if to place null first CTR
|
diff --git a/spec/helper.rb b/spec/helper.rb
index <HASH>..<HASH> 100644
--- a/spec/helper.rb
+++ b/spec/helper.rb
@@ -25,10 +25,6 @@ class TimeWithZone
end
end
-def nsjsonserialization_on_other_than_macruby(engine)
- engine == 'nsjsonserialization' && !macruby?
-end
-
def jruby?
defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby'
end
diff --git a/spec/multi_json_spec.rb b/spec/multi_json_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/multi_json_spec.rb
+++ b/spec/multi_json_spec.rb
@@ -61,10 +61,8 @@ describe "MultiJson" do
end
%w(json_gem json_pure nsjsonserialization oj ok_json yajl).each do |engine|
- if nsjsonserialization_on_other_than_macruby(engine)
- puts "NSJSONSerialization is exclusively available for MacRuby only."
- next
- end
+ next if !macruby? && engine == 'nsjsonserialization'
+ next if jruby? && engine == 'oj'
context engine do
before do
|
Don't test Oj on JRuby
|
diff --git a/Kwf_js/Menu/Index.js b/Kwf_js/Menu/Index.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/Menu/Index.js
+++ b/Kwf_js/Menu/Index.js
@@ -191,7 +191,7 @@ Kwf.Menu.Index = Ext2.extend(Ext2.Toolbar,
if (result.fullname && result.userSelfControllerUrl) {
this.userToolbar.add({
id: 'currentUser',
- text: result.fullname,
+ text: Ext2.util.Format.htmlEncode(result.fullname),
cls: 'x2-btn-text-icon',
icon: '/assets/silkicons/user.png',
disabled: !result.userId,
|
escape username in backend to prevent xss vulnerability
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -46,7 +46,9 @@ function Shrinkwrap(options) {
? options.mirrors
: false;
- this.registry = new Registry({
+ this.registry = options.registry instanceof Registry
+ ? options.registry
+ : new Registry({
registry: options.registry || Registry.mirrors.nodejitsu,
githulk: options.githulk,
mirrors: options.mirrors
|
[fix] Allow pre-configured npmjs instances to be passed.
|
diff --git a/bambi/tests/test_model.py b/bambi/tests/test_model.py
index <HASH>..<HASH> 100644
--- a/bambi/tests/test_model.py
+++ b/bambi/tests/test_model.py
@@ -1,4 +1,4 @@
-import pytest
+import pytest, re
from bambi.models import Term, Model
from bambi.priors import Prior
from os.path import dirname, join
@@ -383,7 +383,10 @@ def test_cell_means_with_random_intercepts(crossed_data):
assert set(priors0) == set(priors1)
# test summary
- full = set(fitted.summary(exclude_ranefs=False, hide_transformed=False).index)
+ # it looks like some versions of pymc3 add a trailing '_' to transformed vars and
+ # some dont. so here for consistency we strip out any trailing '_' that we find
+ full = fitted.summary(exclude_ranefs=False, hide_transformed=False).index
+ full = set([re.sub(r'_$', r'', x) for x in full])
test_set = set(fitted.summary(exclude_ranefs=False).index)
assert test_set == full.difference(set(['Y_sd_interval','u_subj_sd_log']))
test_set = set(fitted.summary(hide_transformed=False).index)
|
Fixed the test of _filter_names
It looks like some versions of pymc3 add a trailing '_' to transformed
vars and some don't. Mine doesn't but it seems that the travis version
does. So for consistency across pymc3 versions, the test strips off
any trailing '_' found in the var names. Note that this is purely a
test issue and actual user functionality should be fine either way.
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLAbstract.java
index <HASH>..<HASH> 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLAbstract.java
@@ -155,7 +155,7 @@ public abstract class OCommandExecutorSQLAbstract extends OCommandExecutorAbstra
for (String clazz : iClassNames) {
final OClass cls = ((OMetadataInternal) db.getMetadata()).getImmutableSchemaSnapshot().getClass(clazz);
if (cls != null)
- for (int clId : cls.getClusterIds()) {
+ for (int clId : cls.getPolymorphicClusterIds()) {
// FILTER THE CLUSTER WHERE THE USER HAS THE RIGHT ACCESS
if (clId > -1 && checkClusterAccess(db, db.getClusterNameById(clId)))
clusters.add(db.getClusterNameById(clId).toLowerCase());
|
Fixed issue #<I> about polymorphic cluster selection on distributed execution
|
diff --git a/lib/cm_client.js b/lib/cm_client.js
index <HASH>..<HASH> 100644
--- a/lib/cm_client.js
+++ b/lib/cm_client.js
@@ -1,6 +1,6 @@
var Steam = require('../index.js');
var ByteBuffer = require('bytebuffer');
-var SteamCrypto = require('steam-crypto');
+var SteamCrypto = require('@doctormckay/steam-crypto');
var BufferCRC32 = require('buffer-crc32');
var Zip = require('adm-zip');
diff --git a/lib/tcp_connection.js b/lib/tcp_connection.js
index <HASH>..<HASH> 100644
--- a/lib/tcp_connection.js
+++ b/lib/tcp_connection.js
@@ -1,4 +1,4 @@
-var SteamCrypto = require('steam-crypto');
+var SteamCrypto = require('@doctormckay/steam-crypto');
var Socket = require('net').Socket;
module.exports = TCPConnection;
diff --git a/lib/udp_connection.js b/lib/udp_connection.js
index <HASH>..<HASH> 100644
--- a/lib/udp_connection.js
+++ b/lib/udp_connection.js
@@ -1,4 +1,4 @@
-var SteamCrypto = require('steam-crypto');
+var SteamCrypto = require('@doctormckay/steam-crypto');
var ByteBuffer = require('bytebuffer');
var Dgram = require('dgram');
|
Fixed broken references to steam-crypto
|
diff --git a/pkg/dns/etcd_dns.go b/pkg/dns/etcd_dns.go
index <HASH>..<HASH> 100644
--- a/pkg/dns/etcd_dns.go
+++ b/pkg/dns/etcd_dns.go
@@ -101,6 +101,15 @@ func (c *coreDNS) list(key string) ([]SrvRecord, error) {
}
srvRecord.Key = strings.TrimPrefix(string(n.Key), key)
srvRecord.Key = strings.TrimSuffix(srvRecord.Key, srvRecord.Host)
+
+ // Skip non-bucket entry like for a key
+ // /skydns/net/miniocloud/10.0.0.1 that may exist as
+ // dns entry for the server (rather than the bucket
+ // itself).
+ if srvRecord.Key == "" {
+ continue
+ }
+
// SRV records are stored in the following form
// /skydns/net/miniocloud/bucket1, so this function serves multiple
// purposes basically when we do a Get(bucketName) this function
|
Skip non-bucket dns entry in federated bucket list (#<I>)
|
diff --git a/src/Money.php b/src/Money.php
index <HASH>..<HASH> 100644
--- a/src/Money.php
+++ b/src/Money.php
@@ -354,6 +354,8 @@ final class Money implements JsonSerializable
/**
* @throws InvalidArgumentException if the given $money is zero.
+ *
+ * @psalm-return numeric-string
*/
public function ratioOf(Money $money): string
{
|
Missing numeric-string type in ratioOf
|
diff --git a/src/android/com/adobe/phonegap/push/PushPlugin.java b/src/android/com/adobe/phonegap/push/PushPlugin.java
index <HASH>..<HASH> 100644
--- a/src/android/com/adobe/phonegap/push/PushPlugin.java
+++ b/src/android/com/adobe/phonegap/push/PushPlugin.java
@@ -138,6 +138,7 @@ public class PushPlugin extends CordovaPlugin implements PushConstants {
NotificationChannel mChannel = new NotificationChannel(DEFAULT_CHANNEL_ID, "PhoneGap PushPlugin",
NotificationManager.IMPORTANCE_DEFAULT);
mChannel.enableVibration(options.optBoolean(VIBRATE, true));
+ mChannel.setShowBadge(true);
notificationManager.createNotificationChannel(mChannel);
}
}
|
✨🐧 Issue #<I>: Implement Android Oreo Notification badges
|
diff --git a/spec/unit/transformer_spec.rb b/spec/unit/transformer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/transformer_spec.rb
+++ b/spec/unit/transformer_spec.rb
@@ -29,6 +29,11 @@ describe Transproc::Transformer do
it { expect(klass.container).to eq(container) }
it { is_expected.to be_a(::Class) }
+ it { expect(klass.ancestors).to include(Transproc::Transformer) }
+
+ it 'does not change super class' do
+ expect(Transproc::Transformer.container).not_to eq(container)
+ end
end
describe '.t' do
|
Add more tests to Transformer.[]
|
diff --git a/test/test-79-npm/nightmare/nightmare.meta.js b/test/test-79-npm/nightmare/nightmare.meta.js
index <HASH>..<HASH> 100644
--- a/test/test-79-npm/nightmare/nightmare.meta.js
+++ b/test/test-79-npm/nightmare/nightmare.meta.js
@@ -1,10 +1,10 @@
'use strict';
module.exports = function (stamp, flags) {
- if (stamp.p === 'win32' && flags.ci) {
+ if (flags.ci) {
return {
allow: false,
- note: 'windows CI seems to hang'
+ note: 'headless CI seems to fail headful electron'
};
}
|
revoke nightmare from all ci platforms
|
diff --git a/tasks/engines/fontforge.js b/tasks/engines/fontforge.js
index <HASH>..<HASH> 100644
--- a/tasks/engines/fontforge.js
+++ b/tasks/engines/fontforge.js
@@ -55,9 +55,6 @@ module.exports = function(o, allDone) {
else {
logger.verbose(chalk.grey('fontforge: ') + line);
}
- if (line.match(version) && success) {
- o.fontforgeVersion.push(line);
- }
});
if (warn.length) {
|
Do not store fontforge version in options object.
|
diff --git a/rah_cache.php b/rah_cache.php
index <HASH>..<HASH> 100644
--- a/rah_cache.php
+++ b/rah_cache.php
@@ -112,11 +112,11 @@ class rah_cache {
*/
public function update_lastmod() {
- global $prefs, $rah_cache;
+ global $rah_cache;
if(!empty($rah_cache['path'])) {
file_put_contents(
- $rah_cache['path'] . '/_lastmod.rah', @strtotime($prefs['lastmod'])
+ $rah_cache['path'] . '/_lastmod.rah', get_pref('lastmod', time(), true)
);
}
}
|
Textpattern doesn't update the lastmod value in memory.
Need to get the up to date value from the database then.
|
diff --git a/routes/http.go b/routes/http.go
index <HASH>..<HASH> 100644
--- a/routes/http.go
+++ b/routes/http.go
@@ -24,6 +24,8 @@ var routes = []route{
{"/", controllers.MainHandler},
{"/openidcallback", login.LoginCallbackHandler},
{"/startLogin", login.LoginHandler},
+ {"/startTwitchLogin", login.TwitchLogin},
+ {"/twitchAuth", login.TwitchAuth},
{"/logout", login.LogoutHandler},
{"/websocket/", controllers.SocketHandler},
|
Add routes for twitch authentication.
|
diff --git a/openquake/risklib/riskinput.py b/openquake/risklib/riskinput.py
index <HASH>..<HASH> 100644
--- a/openquake/risklib/riskinput.py
+++ b/openquake/risklib/riskinput.py
@@ -173,7 +173,8 @@ class EpsilonGetter(object):
If the ``asset_correlation`` is 1 the numbers are the same for
all assets of the same taxonomy.
- >>> epsgetter = EpsilonGetter(42, 1, 5)
+ >>> epsgetter = EpsilonGetter(
+ ... master_seed=42, asset_correlation=1, tot_events=5)
>>> assets = numpy.array([(0, 1), (1, 1), (2, 2)],
... [('ordinal', int), ('taxonomy', int)])
>>> epsgetter.get(assets)
|
Improved doctest [ci skip]
|
diff --git a/prow/gerrit/gerrit.go b/prow/gerrit/gerrit.go
index <HASH>..<HASH> 100644
--- a/prow/gerrit/gerrit.go
+++ b/prow/gerrit/gerrit.go
@@ -212,6 +212,8 @@ func (c *Controller) queryProjectChanges(proj string) ([]gerrit.ChangeInfo, erro
continue
}
+ logrus.Infof("Change %s, last updated %s", change.Number, change.Updated)
+
// process if updated later than last updated
// stop if update was stale
if updated.After(c.lastUpdate) {
|
add some more log entries for gerrit
|
diff --git a/test/basic.js b/test/basic.js
index <HASH>..<HASH> 100644
--- a/test/basic.js
+++ b/test/basic.js
@@ -185,11 +185,13 @@ tape("addon - sample query with an episode", function(t) {
t.ok(resp && !isNaN(resp.availability), "has availability");
//t.ok(resp && !isNaN(resp.uploaders), "has uploaders");
+ /*
var file = resp && resp.map[resp.mapIdx];
t.ok(file, "has selected file");
t.ok(file && file.season && file.episode, "selected file has season/episode");
t.ok(file && file.season==season && file.episode.indexOf(episode)!=-1, "selected file matches query");
-
+ */
+
t.end();
});
});
|
we don't return map now, so fix test
|
diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/serializer/sequencer/HiddenTokenSequencer.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/serializer/sequencer/HiddenTokenSequencer.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/serializer/sequencer/HiddenTokenSequencer.java
+++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/serializer/sequencer/HiddenTokenSequencer.java
@@ -242,6 +242,8 @@ public class HiddenTokenSequencer implements IHiddenTokenSequencer, ISyntacticSe
} else if (belongsToDeletedElement(next)) {
handleDeletedElement(out, deletedSemanticElements, next);
ni.prune();
+ } else if (tokenUtil.isToken(next)) {
+ break;
}
}
}
|
fixed a regression introduced by the fix for [<I>]
|
diff --git a/src/resources/messaging.js b/src/resources/messaging.js
index <HASH>..<HASH> 100644
--- a/src/resources/messaging.js
+++ b/src/resources/messaging.js
@@ -122,7 +122,6 @@ class Messaging extends ResourceBase {
this.convs = {}
this.ecies = ecies
this.events = new EventEmitter()
- this.unreadStatus = UNREAD_STATUS
}
onAccount(account_key) {
@@ -776,6 +775,24 @@ class Messaging extends ResourceBase {
return room_id
}
+ // messages supplied by the 'msg' event have status included
+ // this is a convenience method for tracking status on spoofed messages
+ getStatus({ created, hash }) {
+ const messageStatuses = JSON.parse(
+ localStorage.getItem(`${storeKeys.messageStatuses}:${this.account_key}`)
+ )
+ // convert stored timestamp string to date
+ const subscriptionStart = new Date(
+ +localStorage.getItem(`${storeKeys.messageSubscriptionStart}:${this.account_key}`)
+ )
+ const isWatched = created > subscriptionStart
+ const status =
+ isWatched && messageStatuses && messageStatuses[hash] === READ_STATUS
+ ? READ_STATUS
+ : UNREAD_STATUS
+ return status
+ }
+
// we allow the entire message to be passed in (for consistency with other resources + convenience)
// however all we are updating is the status
set({ hash, status }) {
|
Convenience method for read status (#<I>)
|
diff --git a/systems/builder-plugin.js b/systems/builder-plugin.js
index <HASH>..<HASH> 100644
--- a/systems/builder-plugin.js
+++ b/systems/builder-plugin.js
@@ -128,7 +128,7 @@ function builderPlugin(system) {
const buildTask = this.state[buildSym][task.index];
if (buildTask.touched === false) {
- debugBuild(`starting new build task: `, buildTask);
+ debugBuild(`starting new build task: %o`, buildTask);
}
buildTask.touched = true;
|
quiet down builder debug a little by pushing new tasks to only 1 io line
|
diff --git a/src/components/validationMixin.js b/src/components/validationMixin.js
index <HASH>..<HASH> 100755
--- a/src/components/validationMixin.js
+++ b/src/components/validationMixin.js
@@ -116,9 +116,7 @@ export default function validationMixin(strategy) {
clearValidations={this.clearValidations}
handleValidation={this.handleValidation}
{...this.props}
- >
- {this.props.children}
- </WrappedComponent>
+ />
);
}
}
|
Remove redundant forwarding of children property to wrapped component
|
diff --git a/core/server/middleware/serve-favicon.js b/core/server/middleware/serve-favicon.js
index <HASH>..<HASH> 100644
--- a/core/server/middleware/serve-favicon.js
+++ b/core/server/middleware/serve-favicon.js
@@ -45,7 +45,7 @@ function serveFavicon() {
if (settingsCache.get('icon')) {
// depends on the uploaded icon extension
if (originalExtension !== requestedExtension) {
- return res.redirect(302, '/favicon' + originalExtension);
+ return res.redirect(302, utils.url.urlFor({relativeUrl: '/favicon' + originalExtension}));
}
storage.getStorage()
@@ -66,7 +66,7 @@ function serveFavicon() {
// CASE: always redirect to .ico for default icon
if (originalExtension !== requestedExtension) {
- return res.redirect(302, '/favicon.ico');
+ return res.redirect(302, utils.url.urlFor({relativeUrl: '/favicon.ico'}));
}
fs.readFile(filePath, function readFile(err, buf) {
|
🐛 correct favicon redirects with subdirectory (#<I>)
refs #<I>, #<I>
Use our url util `urlFor` to ensure, the redirect includes the subdirectory, if set up.
|
diff --git a/benchmark.js b/benchmark.js
index <HASH>..<HASH> 100644
--- a/benchmark.js
+++ b/benchmark.js
@@ -309,14 +309,24 @@ run(fileNames.map(function(fileName) {
res.on('data', function(chunk) {
response += chunk;
}).on('end', function() {
- // Extract result from <textarea/>
- var start = response.indexOf('>', response.indexOf('<textarea'));
- var end = response.lastIndexOf('</textarea>');
- var result = response.slice(start + 1, end).replace(/<\\\//g, '</');
var info = infos.willpeavy;
- writeText(info.filePath, result, function() {
- readSizes(info, done);
- });
+ if (res.statusCode === 200) {
+ // Extract result from <textarea/>
+ var start = response.indexOf('>', response.indexOf('<textarea'));
+ var end = response.lastIndexOf('</textarea>');
+ var result = response.slice(start + 1, end).replace(/<\\\//g, '</');
+ writeText(info.filePath, result, function() {
+ readSizes(info, done);
+ });
+ }
+ // Site refused to process content
+ else {
+ info.size = 0;
+ info.gzSize = 0;
+ info.lzSize = 0;
+ info.brSize = 0;
+ done();
+ }
});
}).end(querystring.stringify({
html: data
|
handle errors from Will Peavy's HTML Minifier
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,3 +1,6 @@
+# encoding: utf-8
+
+import io
import sys
import os.path
import setuptools
@@ -6,10 +9,10 @@ import setuptools
MISC_DIR = "misc"
REQUIREMENT_DIR = "requirements"
-with open("README.rst") as fp:
- long_description = fp.read()
+with io.open("README.rst", encoding="utf8") as f:
+ long_description = f.read()
-with open(os.path.join(MISC_DIR, "summary.txt")) as f:
+with io.open(os.path.join(MISC_DIR, "summary.txt"), encoding="utf8") as f:
summary = f.read()
with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f:
|
[ci skip] Update setup.py
|
diff --git a/tests/test_middleware.py b/tests/test_middleware.py
index <HASH>..<HASH> 100644
--- a/tests/test_middleware.py
+++ b/tests/test_middleware.py
@@ -87,19 +87,21 @@ class BarkMiddlewareTest(unittest2.TestCase):
class BarkFilterTest(unittest2.TestCase):
+ @mock.patch.object(middleware.LOG, 'warn')
@mock.patch('ConfigParser.SafeConfigParser')
@mock.patch('bark.proxy.ProxyConfig')
@mock.patch('bark.format.Format.parse')
@mock.patch('bark.handlers.get_handler')
@mock.patch.object(middleware, 'BarkMiddleware', return_result='mid')
def test_noconf(self, mock_BarkMiddleware, mock_get_handler, mock_parse,
- mock_ProxyConfig, mock_SafeConfigParser):
+ mock_ProxyConfig, mock_SafeConfigParser, mock_warn):
filt = middleware.bark_filter({})
self.assertFalse(mock_SafeConfigParser.called)
self.assertFalse(mock_ProxyConfig.called)
self.assertFalse(mock_parse.called)
self.assertFalse(mock_get_handler.called)
+ self.assertFalse(mock_warn.called)
self.assertFalse(mock_BarkMiddleware.called)
mid = filt('app')
|
Test that LOG.warn() isn't called in the no-config case.
|
diff --git a/src/main/java/com/j256/ormlite/table/TableInfo.java b/src/main/java/com/j256/ormlite/table/TableInfo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/j256/ormlite/table/TableInfo.java
+++ b/src/main/java/com/j256/ormlite/table/TableInfo.java
@@ -136,11 +136,11 @@ public class TableInfo<T, ID> {
// build our alias map if we need it
Map<String, FieldType> map = new HashMap<String, FieldType>();
for (FieldType fieldType : fieldTypes) {
- map.put(fieldType.getColumnName(), fieldType);
+ map.put(fieldType.getColumnName().toLowerCase(), fieldType);
}
fieldNameMap = map;
}
- FieldType fieldType = fieldNameMap.get(columnName);
+ FieldType fieldType = fieldNameMap.get(columnName.toLowerCase());
// if column name is found, return it
if (fieldType != null) {
return fieldType;
|
Turned this into case insenstive field match.
|
diff --git a/stellar/command.py b/stellar/command.py
index <HASH>..<HASH> 100644
--- a/stellar/command.py
+++ b/stellar/command.py
@@ -4,6 +4,7 @@ import hashlib
import uuid
import os
import sys
+from time import sleep
from sqlalchemy.exc import ProgrammingError
@@ -130,10 +131,14 @@ class CommandApp(object):
Snapshot.project_name == config['project_name']
):
if not snapshot.is_slave_ready:
- print "Slave for %s is not ready" % (
- snapshot.table_name
- )
- sys.exit(1)
+ sys.stdout.write('Waiting for background process to finish')
+ sys.stdout.flush()
+ while not snapshot.is_slave_ready:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ sleep(1)
+ stellar_db.session.refresh(snapshot)
+ print ''
for snapshot in stellar_db.session.query(Snapshot).filter(
Snapshot.name == name,
|
Wait if backgroud process is still working
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -28,6 +28,7 @@ gulp.task('lint', function () {
* Task to run mocha tests
*/
gulp.task('mocha', function () {
+ require('./').settings.ENTITIESPATH = '../../../tests/unit/back/models/';
return gulp.src(paths.mochaSrc, {read: false})
.pipe(mocha({
reporter: 'spec'
diff --git a/src/back/settings.js b/src/back/settings.js
index <HASH>..<HASH> 100644
--- a/src/back/settings.js
+++ b/src/back/settings.js
@@ -13,5 +13,7 @@ module.exports = {};
* always that on of them is referenced in the code.
* @type {string}
* @constant
+ * @example
+ * settings.ENTITIESPATH = '../../../tests/unit/back/models/';
*/
-module.exports.ENTITIESPATH = '../../../tests/unit/back/models/';
+module.exports.ENTITIESPATH = null;
|
improvements on ENTITIESPATH
|
diff --git a/lib/fog/bin.rb b/lib/fog/bin.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/bin.rb
+++ b/lib/fog/bin.rb
@@ -3,7 +3,7 @@ require 'fog/core/credentials'
module Fog
class << self
def available_providers
- @available_providers ||= Fog.providers.values.select {|provider| Kernel.const_get(provider).available?}.sort
+ @available_providers ||= Fog.providers.values.select {|provider| Kernel.const_get(provider).try(:available?)}.sort
end
def registered_providers
|
make available check in bin resilient to nil
|
diff --git a/dropwizard-client/src/test/java/io/dropwizard/client/DropwizardApacheConnectorTest.java b/dropwizard-client/src/test/java/io/dropwizard/client/DropwizardApacheConnectorTest.java
index <HASH>..<HASH> 100644
--- a/dropwizard-client/src/test/java/io/dropwizard/client/DropwizardApacheConnectorTest.java
+++ b/dropwizard-client/src/test/java/io/dropwizard/client/DropwizardApacheConnectorTest.java
@@ -28,7 +28,6 @@ import org.glassfish.jersey.client.JerseyClient;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -50,7 +49,6 @@ import static org.hamcrest.CoreMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-@Ignore //These tests are consistently failing on travis CI because of network timeouts
public class DropwizardApacheConnectorTest {
private static final int SLEEP_TIME_IN_MILLIS = 1000;
|
Remove @Ignore from DropwizardApacheConnectorTest
|
diff --git a/getmac/getmac.py b/getmac/getmac.py
index <HASH>..<HASH> 100644
--- a/getmac/getmac.py
+++ b/getmac/getmac.py
@@ -8,7 +8,7 @@ try:
except ImportError:
DEVNULL = open(os.devnull, 'wb') # Py2
-__version__ = '0.2.2'
+__version__ = '0.2.3'
DEBUG = False
PY2 = sys.version_info[0] == 2
@@ -187,6 +187,8 @@ def _find_mac(command, args, hw_identifiers, get_index):
def _windows_get_remote_mac_ctypes(host):
+ if not PY2: # Convert to bytes on Python 3+ (Fixes #7)
+ host = host.encode()
try:
inetaddr = ctypes.windll.wsock32.inet_addr(host)
if inetaddr in (0, -1):
|
Fix Windows remote host on Python 3
- In Python 3, the ctypes method for getting the MAC of a remote host would fail. This was caused by the host being implicitly encoded to bytes in Python 2, but not in 3.
Fixes #7
|
diff --git a/lib/drizzlepac/astrodrizzle.py b/lib/drizzlepac/astrodrizzle.py
index <HASH>..<HASH> 100644
--- a/lib/drizzlepac/astrodrizzle.py
+++ b/lib/drizzlepac/astrodrizzle.py
@@ -250,8 +250,8 @@ def run(configobj, wcsmap=None):
image.clean()
image.close()
- del imgObjList
- del outwcs
+ del imgObjList
+ del outwcs
def help(file=None):
|
Fixed a bug in astrodrizzle that affects clean up after a successful run.
git-svn-id: <URL>
|
diff --git a/lib/solargraph/api_map.rb b/lib/solargraph/api_map.rb
index <HASH>..<HASH> 100755
--- a/lib/solargraph/api_map.rb
+++ b/lib/solargraph/api_map.rb
@@ -493,6 +493,11 @@ module Solargraph
meths += yard_map.get_instance_methods('Module')
end
end
+ if namespace == '' and root == ''
+ config.domains.each do |d|
+ meths.concat get_instance_methods(d)
+ end
+ end
strings = meths.map(&:to_s)
live_map.get_methods(namespace, root, 'instance', visibility.include?(:private)).each do |m|
next if strings.include?(m) or !m.match(/^[a-z]/i)
diff --git a/lib/solargraph/code_map.rb b/lib/solargraph/code_map.rb
index <HASH>..<HASH> 100755
--- a/lib/solargraph/code_map.rb
+++ b/lib/solargraph/code_map.rb
@@ -233,8 +233,8 @@ module Solargraph
end
result += api_map.get_constants('')
result += api_map.get_instance_methods('Kernel')
- result += api_map.get_methods('')
- result += api_map.get_instance_methods('')
+ result += api_map.get_methods('', namespace)
+ result += api_map.get_instance_methods('', namespace)
else
result.concat api_map.get_instance_methods(type)
end
|
Infer return types from domain (DSL) methods.
|
diff --git a/examples/java/com/ibm/watson/developer_cloud/document_conversion/v1/DocumentConversionExample.java b/examples/java/com/ibm/watson/developer_cloud/document_conversion/v1/DocumentConversionExample.java
index <HASH>..<HASH> 100644
--- a/examples/java/com/ibm/watson/developer_cloud/document_conversion/v1/DocumentConversionExample.java
+++ b/examples/java/com/ibm/watson/developer_cloud/document_conversion/v1/DocumentConversionExample.java
@@ -105,10 +105,9 @@ public class DocumentConversionExample{
System.out.println("-------------------- Batch Collection ------------------------------");
Map<String, Object> batchListParams = new HashMap<String, Object>();
- batchListParams.put(DocumentConversion.TOKEN, batch.getId());
batchListParams.put(DocumentConversion.LIMIT, 2);
BatchCollection batchCollection = service.getBatchCollection(batchListParams);
- System.out.println("Batch Collection with a token to the next page :\n" + batchCollection);
+ System.out.println("Batch Collection with 2 items in a page :\n" + batchCollection);
// Step 3. Add the document to the batch
String batchId2 = batch.getId();
|
Removes the token from the example since the persistence uses it differently
|
diff --git a/source/rafcon/gui/helpers/state.py b/source/rafcon/gui/helpers/state.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/gui/helpers/state.py
+++ b/source/rafcon/gui/helpers/state.py
@@ -115,6 +115,7 @@ def save_selected_state_as():
storage.save_state_machine_to_path(sm_m.state_machine, base_path=path, save_as=True)
sm_m.store_meta_data()
else:
+ logger.warning("No valid path specified")
return False
# check if state machine is in library path
if library_manager.is_os_path_in_library_paths(path):
diff --git a/source/rafcon/gui/helpers/state_machine.py b/source/rafcon/gui/helpers/state_machine.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/gui/helpers/state_machine.py
+++ b/source/rafcon/gui/helpers/state_machine.py
@@ -154,6 +154,7 @@ def save_state_machine_as(menubar=None, widget=None, data=None, path=None):
path = interface.create_folder_func("Please choose a root folder and a name for the state-machine",
folder_name)
if path is None:
+ logger.warning("No valid path specified")
return False
menubar.model.get_selected_state_machine_model().state_machine.file_system_path = path
|
Add warning if no path is selected in dialog
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -179,13 +179,6 @@ copy.base = function copyBase(src, dest, opts) {
*/
function rewrite(fp, dest, options) {
- // options = options || {};
- // if (options.flatten) {
- // dest = path.basename(dest);
- // }
- // if (options.destBase) {
- // dest = path.join(options.destBase, dest);
- // }
return path.resolve(dest, path.basename(fp));
}
|
clean up before updating rewrite logic
|
diff --git a/src/Klein/Klein.php b/src/Klein/Klein.php
index <HASH>..<HASH> 100644
--- a/src/Klein/Klein.php
+++ b/src/Klein/Klein.php
@@ -457,15 +457,10 @@ class Klein
if ($_route === '*') {
$match = true;
- } elseif ($_route === '404' && !$matched && count($methods_matched) <= 0) {
- // Easily handle 404's
+ } elseif (($_route === '404' && !$matched && count($methods_matched) <= 0)
+ || ($_route === '405' && !$matched && count($methods_matched) > 0)) {
- $this->handleResponseCallback($callback, $matched, $methods_matched);
-
- continue;
-
- } elseif ($_route === '405' && !$matched && count($methods_matched) > 0) {
- // Easily handle 405's
+ // Easily handle 40x's
$this->handleResponseCallback($callback, $matched, $methods_matched);
|
Merging the logic of the <I> and <I> handlers
|
diff --git a/changelog.rb b/changelog.rb
index <HASH>..<HASH> 100755
--- a/changelog.rb
+++ b/changelog.rb
@@ -56,7 +56,7 @@ end
arg_from = args["<from-commit>"]
arg_to = args["<to-commit>"]
-use_markdown = args["--md"] != nil
+use_markdown = args["--md"]
# Find if we're operating on tags
tag_from = tagWithName(repo, arg_from)
|
fix(arguments): arguments for output formats now work correctly
* fix: the output format is not stuck to `md` anymore and defaults to `slack`
|
diff --git a/elki-clustering/src/main/java/elki/clustering/kmeans/spherical/SphericalKMeans.java b/elki-clustering/src/main/java/elki/clustering/kmeans/spherical/SphericalKMeans.java
index <HASH>..<HASH> 100644
--- a/elki-clustering/src/main/java/elki/clustering/kmeans/spherical/SphericalKMeans.java
+++ b/elki-clustering/src/main/java/elki/clustering/kmeans/spherical/SphericalKMeans.java
@@ -136,10 +136,12 @@ public class SphericalKMeans<V extends NumberVector> extends AbstractKMeans<V, K
}
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
NumberVector fv = relation.get(iditer);
- double maxSim = similarity(fv, means[0]);
+ double maxSim = VectorUtil.dot(fv, means[0]);
+ ++diststat;
int maxIndex = 0;
for(int i = 1; i < k; i++) {
- double sim = similarity(fv, means[i]);
+ double sim = VectorUtil.dot(fv, means[i]);
+ ++diststat;
if(sim > maxSim) {
maxIndex = i;
maxSim = sim;
|
Repair spherical kmeans on unnormalized data.
|
diff --git a/src/bootstrap-table.js b/src/bootstrap-table.js
index <HASH>..<HASH> 100644
--- a/src/bootstrap-table.js
+++ b/src/bootstrap-table.js
@@ -1232,14 +1232,8 @@
success: function (res) {
res = calculateObjectValue(that.options, that.options.responseHandler, [res], res);
- var data = res;
-
- if (that.options.sidePagination === 'server') {
- that.options.totalRows = res.total;
- data = res.rows;
- }
- that.load(data);
- that.trigger('load-success', data);
+ that.load(res);
+ that.trigger('load-success', res);
},
error: function (res) {
that.trigger('load-error', res.status);
@@ -1408,6 +1402,12 @@
};
BootstrapTable.prototype.load = function (data) {
+ // #431: support pagination
+ if (this.options.sidePagination === 'server') {
+ this.options.totalRows = data.total;
+ data = data.rows;
+ }
+
this.initData(data);
this.initSearch();
this.initPagination();
|
Fix #<I>: load method support pagination.
|
diff --git a/pghoard/restore.py b/pghoard/restore.py
index <HASH>..<HASH> 100644
--- a/pghoard/restore.py
+++ b/pghoard/restore.py
@@ -7,6 +7,7 @@ See LICENSE for details
from __future__ import print_function
from .common import lzma_decompressor, lzma_open_read, default_log_format_str
from .errors import Error
+from psycopg2.extensions import adapt
from requests import Session
import argh
import logging
@@ -36,11 +37,13 @@ def create_pgdata_dir(pgdata):
def create_recovery_conf(dirpath, site, primary_conninfo):
content = """# pghoard created recovery.conf
standby_mode = 'on'
-primary_conninfo = {}
-trigger_file = '{}'
-restore_command = 'pghoard_restore get %f %p --site {}'
+primary_conninfo = {primary_conninfo}
+trigger_file = {trigger_file}
+restore_command = 'pghoard_restore get %f %p --site {site}'
recovery_target_timeline = 'latest'
-""".format(primary_conninfo, os.path.join(dirpath, "trigger_file"), site)
+""".format(primary_conninfo=adapt(primary_conninfo),
+ trigger_file=adapt(os.path.join(dirpath, "trigger_file")),
+ site=site)
filepath = os.path.join(dirpath, "recovery.conf")
with open(filepath, "w") as fp:
fp.write(content)
|
create_recovery_conf: properly quote recovery.conf entries
Use psycopg2.extensions.adapt to properly quote the primary_conninfo and
trigger_file entries in the generated recovery.conf. Previously any callers
had to make sure they passed in a quoted form (including enclosing quotes)
of the connection string.
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -15,7 +15,7 @@ module.exports = function renameOverwrite (oldPath, newPath) {
case 'EEXIST':
return rimraf(newPath)
.then(() => rename(oldPath, newPath))
- // weird Windows shit
+ // weird Windows stuff
case 'EPERM':
return timeout(200)
.then(() => rimraf(newPath))
@@ -33,7 +33,7 @@ module.exports.sync = function renameOverwriteSync (oldPath, newPath) {
switch (err.code) {
case 'ENOTEMPTY':
case 'EEXIST':
- case 'EPERM': // weird Windows shit
+ case 'EPERM': // weird Windows stuff
rimrafSync(newPath)
fs.renameSync(oldPath, newPath)
return
|
refactor: better language in comments
|
diff --git a/test/test-server-metadata.js b/test/test-server-metadata.js
index <HASH>..<HASH> 100644
--- a/test/test-server-metadata.js
+++ b/test/test-server-metadata.js
@@ -7,6 +7,9 @@ var exec = require('child_process').exec;
var path = require('path');
var util = require('util');
+// TODO: convert to tap test and use tap@1's --bail option instead of asserts
+// to get early bailout on the first failure.
+
var server = app.listen();
var cpuProfilingSupported = require('semver').gt(process.version, '0.11.0');
@@ -95,7 +98,7 @@ function testCpuStop(cb) {
function testCpuWatchdogStart(cb) {
if (!cpuProfilingSupported) return cb();
- ServiceProcess.findOne({where: { workerId: 1, stopTime: null }},
+ ServiceProcess.findOne({where: { workerId: 1, stopTime: null }},
function(err, proc) {
assert.ifError(err);
assert.equal(proc.isProfiling, true);
|
test: mark test-server-metadata for tap@1
This is the last test that is incompatible with tap@1, mainly due to
the lack of any TAP compliant output and in its place a bunch of
TAP-like noise that is mistaken for unplanned nested tests.
See isaacs/node-tap#<I>
|
diff --git a/internal/terraform/node_resource_abstract.go b/internal/terraform/node_resource_abstract.go
index <HASH>..<HASH> 100644
--- a/internal/terraform/node_resource_abstract.go
+++ b/internal/terraform/node_resource_abstract.go
@@ -143,12 +143,17 @@ func (n *NodeAbstractResource) References() []*addrs.Reference {
refs, _ = lang.ReferencesInExpr(c.ForEach)
result = append(result, refs...)
+ for _, expr := range c.TriggersReplacement {
+ refs, _ = lang.ReferencesInExpr(expr)
+ result = append(result, refs...)
+ }
+
// ReferencesInBlock() requires a schema
if n.Schema != nil {
refs, _ = lang.ReferencesInBlock(c.Config, n.Schema)
+ result = append(result, refs...)
}
- result = append(result, refs...)
if c.Managed != nil {
if c.Managed.Connection != nil {
refs, _ = lang.ReferencesInBlock(c.Managed.Connection.Config, connectionBlockSupersetSchema)
|
collect references from replace_triggered_by
The replace_triggered_by expressions create edges in the graph, so must
be returned in the References method.
|
diff --git a/core/src/test/java/tech/tablesaw/filters/TimeDependentFilteringTest.java b/core/src/test/java/tech/tablesaw/filters/TimeDependentFilteringTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/tech/tablesaw/filters/TimeDependentFilteringTest.java
+++ b/core/src/test/java/tech/tablesaw/filters/TimeDependentFilteringTest.java
@@ -115,8 +115,6 @@ public class TimeDependentFilteringTest {
// iterate an individual table and find the rows where concept matches the target concept
for (Row row : patientTable) {
- StringColumn concepts = patientTable.stringColumn("concept");
- DateColumn dates = patientTable.dateColumn("date");
if (row.getString("concept").equals(conceptZ)) {
eventDates.add(row.getDate("date"));
}
|
Remove unused variables (#<I>)
|
diff --git a/src/main/java/com/github/jasminb/jsonapi/ResourceConverter.java b/src/main/java/com/github/jasminb/jsonapi/ResourceConverter.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/jasminb/jsonapi/ResourceConverter.java
+++ b/src/main/java/com/github/jasminb/jsonapi/ResourceConverter.java
@@ -819,8 +819,9 @@ public class ResourceConverter {
for (Field relationshipField : relationshipFields) {
Object relationshipObject = relationshipField.get(object);
+ removeField(attributesNode, relationshipField);
+
if (relationshipObject != null) {
- removeField(attributesNode, relationshipField);
Relationship relationship = configuration.getFieldRelationship(relationshipField);
|
Always removing relationship field (also if it has a null null) as it should never be part of attributes (#<I>)
|
diff --git a/js/okex.js b/js/okex.js
index <HASH>..<HASH> 100644
--- a/js/okex.js
+++ b/js/okex.js
@@ -1314,7 +1314,7 @@ module.exports = class okex extends Exchange {
'datetime': this.iso8601 (timestamp),
});
}
- return rates;
+ return this.sortBy (rates, 'timestamp');
}
async fetchIndexOHLCV (symbol, timeframe = '1m', since = undefined, limit = undefined, params = {}) {
|
okex-sort-fundingRate
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,8 +5,12 @@ For full docs visit https://docs.bigchaindb.com
"""
from setuptools import setup, find_packages
+import sys
+if sys.version_info < (3, 6):
+ sys.exit('Please use Python version 3.6 or higher.')
+
# get the version
version = {}
with open('bigchaindb/version.py') as fp:
@@ -107,7 +111,7 @@ setup(
author_email='dev@bigchaindb.com',
license='Apache Software License 2.0',
zip_safe=False,
-
+ python_requires='>=3.6',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
|
Problem: Python version not check before running (#<I>) (#<I>)
Solution: Check python version
|
diff --git a/src/inner-slider.js b/src/inner-slider.js
index <HASH>..<HASH> 100644
--- a/src/inner-slider.js
+++ b/src/inner-slider.js
@@ -368,7 +368,9 @@ export class InnerSlider extends React.Component {
window.ontouchmove = null
}
swipeStart = (e) => {
- this.disableBodyScroll()
+ if (this.props.verticalSwiping) {
+ this.disableBodyScroll()
+ }
let state = swipeStart(e, this.props.swipe, this.props.draggable)
state !== '' && this.setState(state)
}
@@ -400,7 +402,9 @@ export class InnerSlider extends React.Component {
this.setState(state)
if (triggerSlideHandler === undefined) return
this.slideHandler(triggerSlideHandler)
- this.enableBodyScroll()
+ if (this.props.verticalSwiping) {
+ this.enableBodyScroll()
+ }
}
slickPrev = () => {
// this and fellow methods are wrapped in setTimeout
|
fixed bug related to vertical body scroll while swiping
|
diff --git a/ndio/remote/boss/tests/int_test_group.py b/ndio/remote/boss/tests/int_test_group.py
index <HASH>..<HASH> 100644
--- a/ndio/remote/boss/tests/int_test_group.py
+++ b/ndio/remote/boss/tests/int_test_group.py
@@ -59,8 +59,6 @@ class ProjectGroupTest(unittest.TestCase):
self.existing_grp_name = 'int_test_exists'
self.user_name = 'bossadmin'
- self.rmt.group_create(self.existing_grp_name)
-
def cleanup_db(self):
"""Clean up the data model objects used by this test case.
@@ -71,6 +69,7 @@ class ProjectGroupTest(unittest.TestCase):
def setUp(self):
self.initialize()
+ self.rmt.group_create(self.existing_grp_name)
def tearDown(self):
self.cleanup_db()
|
Moved creation of test group.
No need to create test group in setUpClass().
|
diff --git a/spec/api-browser-window-spec.js b/spec/api-browser-window-spec.js
index <HASH>..<HASH> 100644
--- a/spec/api-browser-window-spec.js
+++ b/spec/api-browser-window-spec.js
@@ -352,7 +352,7 @@ describe('browser-window module', function () {
describe('BrowserWindow.setContentBounds(bounds)', function () {
it('sets the content size and position', function (done) {
- var bounds = {x: 60, y: 60, width: 250, height: 250}
+ var bounds = {x: 10, y: 10, width: 250, height: 250}
w.once('resize', function () {
assert.deepEqual(w.getContentBounds(), bounds)
done()
@@ -368,7 +368,7 @@ describe('browser-window module', function () {
width: 300,
height: 300
})
- var bounds = {x: 60, y: 60, width: 250, height: 250}
+ var bounds = {x: 10, y: 10, width: 250, height: 250}
w.once('resize', function () {
assert.deepEqual(w.getContentBounds(), bounds)
done()
|
Use same position as setPosition test
|
diff --git a/src/Strategy/PrivateCacheStrategy.php b/src/Strategy/PrivateCacheStrategy.php
index <HASH>..<HASH> 100644
--- a/src/Strategy/PrivateCacheStrategy.php
+++ b/src/Strategy/PrivateCacheStrategy.php
@@ -11,6 +11,19 @@ use Kevinrob\GuzzleCache\Storage\DoctrineCacheWrapper;
use Psr\Http\Message\RequestInterface;
use Psr\Http\Message\ResponseInterface;
+/**
+ * This strategy represent a "private" HTTP client.
+ * Pay attention to share storage between application with caution!
+ *
+ * For example, a response with cache-control header "private, max-age=60"
+ * will be cached by this strategy.
+ *
+ * The rules applied are from RFC 7234.
+ *
+ * @see https://tools.ietf.org/html/rfc7234
+ *
+ * @package Kevinrob\GuzzleCache\Strategy
+ */
class PrivateCacheStrategy implements CacheStrategyInterface
{
|
Add PHPDoc for PrivateCacheStrategy
|
diff --git a/tunable/tunablemanager.py b/tunable/tunablemanager.py
index <HASH>..<HASH> 100644
--- a/tunable/tunablemanager.py
+++ b/tunable/tunablemanager.py
@@ -349,8 +349,9 @@ class TunableManager(object):
parser.add_argument(*register['save'], type=str, action=SaveTunablesAction)
@classmethod
- def load(cls, tunables):
- cls.init()
+ def load(cls, tunables, reset=True):
+ if reset:
+ cls.init()
for key, value in tunables.items():
cls.set(key, value)
|
made tunable load conditionally reset the state
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.