diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/Tests/ServiceRepositoryTest.php b/Tests/ServiceRepositoryTest.php
index <HASH>..<HASH> 100644
--- a/Tests/ServiceRepositoryTest.php
+++ b/Tests/ServiceRepositoryTest.php
@@ -48,7 +48,7 @@ class ServiceRepositoryTest extends TestCase
$container->registerExtension($extension);
$extension->load([[
'dbal' => [
- 'driver' => 'pdo_mysql',
+ 'driver' => 'pdo_sqlite',
'charset' => 'UTF8',
],
'orm' => [
|
Changed pdo_mysql dependency to pdo_sqlite
|
diff --git a/nats/js/client.py b/nats/js/client.py
index <HASH>..<HASH> 100644
--- a/nats/js/client.py
+++ b/nats/js/client.py
@@ -922,7 +922,7 @@ class JetStreamContext(JetStreamManager):
req_subject = f"{self._prefix}.STREAM.MSG.GET.{stream_name}"
req = {'last_by_subj': subject}
data = json.dumps(req)
- resp = await self._api_request(req_subject, data.encode())
+ resp = await self._api_request(req_subject, data.encode(), timeout=self._timeout)
raw_msg = api.RawStreamMsg.from_response(resp['message'])
if raw_msg.hdrs:
hdrs = base64.b64decode(raw_msg.hdrs)
|
Fix incorrect timeout propagation
This commit adds timeout propagation on key value get operation.
|
diff --git a/bin/release.py b/bin/release.py
index <HASH>..<HASH> 100755
--- a/bin/release.py
+++ b/bin/release.py
@@ -190,6 +190,7 @@ def checkInMaven2Repo(version, workingDir):
mod_dir = settings[local_mvn_repo_dir_key] + "/" + getModuleName(p)
if not is_in_svn(mod_dir):
newmodules.append(mod_dir)
+ print "New modules added in this release: %s" % newmodules
if len(newmodules) > 0:
client.add(newmodules)
client.add(moduleNames)
|
Added more msgs
|
diff --git a/src/Asana.php b/src/Asana.php
index <HASH>..<HASH> 100644
--- a/src/Asana.php
+++ b/src/Asana.php
@@ -667,9 +667,7 @@ class Asana
*/
public function getCustomFields($workspaceId = null)
{
- if (is_null($workspaceId)) {
- $workspaceId = $this->defaultWorkspaceId;
- }
+ $workspaceId = $workspaceId ?: $this->defaultWorkspaceId;
return $this->curl->get("workspaces/{$workspaceId}/custom_fields");
}
|
use ternary operator instead of if statement
|
diff --git a/projects/samskivert/src/java/com/samskivert/util/SortableArrayList.java b/projects/samskivert/src/java/com/samskivert/util/SortableArrayList.java
index <HASH>..<HASH> 100644
--- a/projects/samskivert/src/java/com/samskivert/util/SortableArrayList.java
+++ b/projects/samskivert/src/java/com/samskivert/util/SortableArrayList.java
@@ -1,5 +1,5 @@
//
-// $Id: SortableArrayList.java,v 1.14 2003/06/16 18:11:30 mdb Exp $
+// $Id: SortableArrayList.java,v 1.15 2003/07/15 00:30:30 ray Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
@@ -237,6 +237,10 @@ public class SortableArrayList extends AbstractList
// documentation inherited from interface
public int indexOf (Object o)
{
+ if (_elements == null) {
+ return -1;
+ }
+
return ListUtil.indexOfEqual(_elements, o);
}
|
There have been so many bugs caused by _elements being allowed to be null.
Fixed another.
git-svn-id: <URL>
|
diff --git a/src/oj.js b/src/oj.js
index <HASH>..<HASH> 100644
--- a/src/oj.js
+++ b/src/oj.js
@@ -38,7 +38,7 @@
oj.version = '0.2.2'
- oj.isClient = !(typeof process !== "undefined" && process !== null ? process.versions != null ? process.versions.node : 0 : 0)
+ oj.isClient = !(typeof process !== _udf && process !== null ? process.versions != null ? process.versions.node : 0 : 0)
// Detect jQuery globally or in required module
if (typeof $ != _udf)
@@ -1021,7 +1021,7 @@
// Compile to dom if requested
// Add dom element with attributes
- if (options.dom && (typeof document !== "undefined" && document !== null)){
+ if (options.dom && (typeof document !== _udf && document !== null)){
// Create element
el = document.createElement(tag)
|
Replaced two instances of `"undefined"` with `_udf`
|
diff --git a/bases.go b/bases.go
index <HASH>..<HASH> 100644
--- a/bases.go
+++ b/bases.go
@@ -45,6 +45,10 @@ func NewHandler(
}
}
+func (h *Handler) LogTime(logger *logrus.Entry, start time.Time) {
+ LogTime(logger, "Handler building", start)
+}
+
func NewController(
c *viper.Viper,
l *logrus.Entry,
|
LogTime method added on handlers.
|
diff --git a/test/integration/tx/transactions.js b/test/integration/tx/transactions.js
index <HASH>..<HASH> 100644
--- a/test/integration/tx/transactions.js
+++ b/test/integration/tx/transactions.js
@@ -179,6 +179,8 @@ describe('Send Transactions', function () {
this.timeout(config.timeout)
describe('Bitcoin - Ledger', () => {
+ before(async function () { await importBitcoinAddresses(chains.bitcoinWithLedger) })
+ beforeEach(async function () { await fundUnusedBitcoinAddress(chains.bitcoinWithLedger) })
testTransaction(chains.bitcoinWithLedger)
})
@@ -193,6 +195,8 @@ describe('Send Batch Transactions', function () {
this.timeout(config.timeout)
describe('Bitcoin - Ledger', () => {
+ before(async function () { await importBitcoinAddresses(chains.bitcoinWithLedger) })
+ beforeEach(async function () { await fundUnusedBitcoinAddress(chains.bitcoinWithLedger) })
testBatchTransaction(chains.bitcoinWithLedger)
})
|
Import and fund ledger and bitcoinjs addresses in tests
|
diff --git a/project_generator/tool.py b/project_generator/tool.py
index <HASH>..<HASH> 100644
--- a/project_generator/tool.py
+++ b/project_generator/tool.py
@@ -50,7 +50,7 @@ class ToolsSupported:
'toolchain': 'gcc_arm',
'toolnames': ['coide'],
'exporter': Coide,
- 'builder': None,
+ 'builder': Coide,
},
'make_gcc_arm': {
'toolchain': 'gcc_arm',
diff --git a/project_generator/tools/coide.py b/project_generator/tools/coide.py
index <HASH>..<HASH> 100644
--- a/project_generator/tools/coide.py
+++ b/project_generator/tools/coide.py
@@ -19,6 +19,8 @@ import copy
from os.path import basename, join, normpath
from os import getcwd
+
+from .builder import Builder
from .exporter import Exporter
from ..targets import Targets
@@ -43,7 +45,7 @@ class CoIDEdefinitions():
},
}
-class Coide(Exporter):
+class Coide(Exporter, Builder):
source_files_dic = [
'source_files_c', 'source_files_s', 'source_files_cpp', 'source_files_obj', 'source_files_lib']
file_types = {'cpp': 1, 'c': 1, 's': 1, 'obj': 1, 'lib': 1}
|
Coide - bugfix builder - should inherit and not implement it
|
diff --git a/src/main/java/org/minimalj/backend/db/DbBackend.java b/src/main/java/org/minimalj/backend/db/DbBackend.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/minimalj/backend/db/DbBackend.java
+++ b/src/main/java/org/minimalj/backend/db/DbBackend.java
@@ -1,5 +1,6 @@
package org.minimalj.backend.db;
+import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
@@ -24,7 +25,8 @@ public class DbBackend extends Backend {
public DbBackend() {
String databaseFile = System.getProperty("MjBackendDatabaseFile", null);
- this.persistence = new DbPersistence(DbPersistence.embeddedDataSource(databaseFile), Application.getApplication().getEntityClasses());
+ boolean createTables = databaseFile == null || !new File(databaseFile).exists();
+ this.persistence = new DbPersistence(DbPersistence.embeddedDataSource(databaseFile), createTables, Application.getApplication().getEntityClasses());
this.queries = Application.getApplication().getQueries();
}
|
DbBackend: only create tables in derby db if the file not yet exists
|
diff --git a/lib/clash/tests.rb b/lib/clash/tests.rb
index <HASH>..<HASH> 100644
--- a/lib/clash/tests.rb
+++ b/lib/clash/tests.rb
@@ -114,6 +114,7 @@ module Clash
def read_config
# Find the config file (fall back to legacy filename)
if path = config_path || config_path('.clash.yml')
+
read_test_line_numbers(path)
config = SafeYAML.load_file(path)
config = [config] unless config.is_a?(Array)
@@ -129,12 +130,22 @@ module Clash
path = File.join('./', @options[:path])
paths = []
+ # Walk up the directory tree looking for a clash file.
(path.count('/') + 1).times do
paths << File.join(path, file)
path.sub!(/\/[^\/]+$/, '')
end
- paths.find {|p| File.file?(p) }
+ # By default search for clash config in the test directory.
+ paths << "./test/_clash.yml"
+
+ path = paths.find {|p| File.file?(p) }
+
+ if path && path =~ %r{test/_clash.yml} && @options[:path] == '.'
+ @options[:path] = 'test'
+ end
+
+ path
end
def print_results
|
Clash config file defaults to looking in test directory
|
diff --git a/tests/Integration/PersistedDataTest.php b/tests/Integration/PersistedDataTest.php
index <HASH>..<HASH> 100644
--- a/tests/Integration/PersistedDataTest.php
+++ b/tests/Integration/PersistedDataTest.php
@@ -89,7 +89,7 @@ class PersistedDataTest extends IntegrationTestCase
// Assertions
$this->assertTrue($updateResult);
$this->assertInstanceOf(ReferencedUser::class, $result);
- $this->assertSame($expected, $result->toArray());
+ $this->assertEquals($expected, $result->toArray());
}
public function testUpdateData()
@@ -131,7 +131,7 @@ class PersistedDataTest extends IntegrationTestCase
// Assertions
$this->assertTrue($updateResult);
$this->assertInstanceOf(ReferencedUser::class, $result);
- $this->assertSame($expected, $result->toArray());
+ $this->assertEquals($expected, $result->toArray());
}
private function getUser(bool $save = false): ReferencedUser
|
Fix test that was breaking on CI
|
diff --git a/core/common/webpack_rules.js b/core/common/webpack_rules.js
index <HASH>..<HASH> 100644
--- a/core/common/webpack_rules.js
+++ b/core/common/webpack_rules.js
@@ -402,8 +402,8 @@ module.exports = ( config ) => {
include: [
path.resolve( inlineNodeModules, 'ansi-regex'),
path.resolve( inlineNodeModules, 'strip-ansi'),
- path.resolve( nodeModulesPath, 'ansi-regex' ),
- path.resolve( nodeModulesPath, 'strip-ansi' ),
+ path.resolve( yarnModulesPath, 'ansi-regex' ),
+ path.resolve( yarnModulesPath, 'strip-ansi' ),
],
use: [
{
|
fix: android 4x & ie9 + compatibility for yarn
|
diff --git a/src/language/CSSUtils.js b/src/language/CSSUtils.js
index <HASH>..<HASH> 100644
--- a/src/language/CSSUtils.js
+++ b/src/language/CSSUtils.js
@@ -810,7 +810,7 @@ define(function (require, exports, module) {
if (token === "}" && !currentSelector) {
return false;
}
- if (token === ";") {
+ if (token === ";" || (state.state === "prop")) {
currentSelector = "";
} else {
if (!currentSelector) {
@@ -979,12 +979,12 @@ define(function (require, exports, module) {
}
if (selectors[j].level < currentLevel) {
break;
- } else if (selectors[j].declListEndLine !== -1) {
+ }
+ if (selectors[j].declListEndLine !== -1) {
return;
- } else {
- selectors[j].declListEndLine = line;
- selectors[j].declListEndChar = stream.pos - 1; // stream.pos actually points to the char after the }
}
+ selectors[j].declListEndLine = line;
+ selectors[j].declListEndChar = stream.pos - 1; // stream.pos actually points to the char after the }
}
} while (currentLevel > 0 && currentLevel === level);
}
|
Fix the issue of last property with no ';' causing runaway nested rule.
|
diff --git a/tests/EseyeTest.php b/tests/EseyeTest.php
index <HASH>..<HASH> 100644
--- a/tests/EseyeTest.php
+++ b/tests/EseyeTest.php
@@ -57,6 +57,9 @@ class EseyeTest extends PHPUnit_Framework_TestCase
// Remove caching
$configuration->cache = NullCache::class;
+ // Force ESI data-source to be singularity
+ $configuration->datasource = 'singularity';
+
$this->esi = new Eseye;
}
@@ -252,7 +255,7 @@ class EseyeTest extends PHPUnit_Framework_TestCase
$uri = $this->esi->buildDataUri('/{foo}/', ['foo' => 'bar']);
- $this->assertEquals('https://esi.evetech.net/latest/bar/?datasource=test',
+ $this->assertEquals('https://esi.evetech.net/latest/bar/?datasource=singularity',
$uri->__toString());
}
|
tests: switch test data-source to singularity
|
diff --git a/Model/Api.php b/Model/Api.php
index <HASH>..<HASH> 100644
--- a/Model/Api.php
+++ b/Model/Api.php
@@ -428,7 +428,7 @@ class Api
'Accept: application/json'
];
- if($method == \Zend_Http_Client::POST || $method == \Zend_Http_Client::PUT) {
+ if($method == \Zend_Http_Client::PUT) {
array_push($headers, 'Content-Type: application/x-www-form-urlencoded');
}
diff --git a/view/adminhtml/web/js/testconnection.js b/view/adminhtml/web/js/testconnection.js
index <HASH>..<HASH> 100644
--- a/view/adminhtml/web/js/testconnection.js
+++ b/view/adminhtml/web/js/testconnection.js
@@ -34,7 +34,7 @@ define([
}
});
});
- }
+ };
function resetAllMessages() {
successBtnMsg.text();
|
Fixed snippet upload bug when uploading for the 1st time
|
diff --git a/chai-immutable.js b/chai-immutable.js
index <HASH>..<HASH> 100644
--- a/chai-immutable.js
+++ b/chai-immutable.js
@@ -53,9 +53,7 @@
this.assert(
size === 0,
'expected #{this} to be empty but got size #{act}',
- 'expected #{this} to not be empty',
- 0,
- size
+ 'expected #{this} to not be empty'
);
}
else _super.apply(this, arguments);
|
Remove unnecessary assert params to the empty assertion
|
diff --git a/lib/textbringer/buffer.rb b/lib/textbringer/buffer.rb
index <HASH>..<HASH> 100644
--- a/lib/textbringer/buffer.rb
+++ b/lib/textbringer/buffer.rb
@@ -606,7 +606,7 @@ module Textbringer
end_of_line
forward_char
s = @point
- while !end_of_buffer? && byte_after != "\n" &&
+ while !end_of_line? &&
Buffer.display_width(substring(s, @point)) < column
forward_char
end
@@ -627,7 +627,7 @@ module Textbringer
backward_char
beginning_of_line
s = @point
- while !end_of_buffer? && byte_after != "\n" &&
+ while !end_of_line? &&
Buffer.display_width(substring(s, @point)) < column
forward_char
end
@@ -656,7 +656,7 @@ module Textbringer
end
def beginning_of_line
- while !beginning_of_buffer? && byte_before != "\n"
+ while !beginning_of_line?
backward_char
end
@point
@@ -667,8 +667,7 @@ module Textbringer
end
def end_of_line
- while !end_of_buffer? &&
- byte_after(@point) != "\n"
+ while !end_of_line?
forward_char
end
@point
@@ -1049,7 +1048,7 @@ module Textbringer
end
def transpose_chars
- if end_of_buffer? || char_after == "\n"
+ if end_of_line?
backward_char
end
if beginning_of_buffer?
|
Use beginning_of_line? and end_of_line?.
|
diff --git a/test/main.js b/test/main.js
index <HASH>..<HASH> 100644
--- a/test/main.js
+++ b/test/main.js
@@ -46,12 +46,11 @@ describe('gulp-flatten', function () {
});
it('should emit arg error with nonstring option', function (done) {
- var stream = flatten(123);
+ var stream = flatten({newPath: 123});
stream.on('error', function (err) {
should.exist(err);
should.exist(err.message);
- should.ok(err.message === 'Path must be a string. Received undefined'
- || err.message === 'Arguments to path.join must be strings')
+ should.ok(err.message === 'The "path" argument must be of type string')
done();
});
stream.write(fileInstance);
|
tests now passes on node 9.x.x
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ import sys
from setuptools import setup, find_packages
-install_requires = ['PyMySQL<=0.6.4']
+install_requires = ['PyMySQL<=0.6.6']
PY_VER = sys.version_info
|
bump supported pymysql version
|
diff --git a/greenhouse/__init__.py b/greenhouse/__init__.py
index <HASH>..<HASH> 100644
--- a/greenhouse/__init__.py
+++ b/greenhouse/__init__.py
@@ -4,5 +4,4 @@ from greenhouse.scheduler import *
from greenhouse.utils import *
from greenhouse.pool import *
from greenhouse.io import *
-from greenhouse.scheduler import EXCEPTION_FILE as exception_file
import greenhouse.poller
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index <HASH>..<HASH> 100644
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -239,16 +239,6 @@ class PausingTestCase(StateClearingTestCase):
class ExceptionsTestCase(StateClearingTestCase):
class CustomError(Exception): pass
- def setUp(self):
- StateClearingTestCase.setUp(self)
- self._oldprintexc = greenhouse.scheduler.PRINT_EXCEPTIONS
-
- greenhouse.scheduler.PRINT_EXCEPTIONS = False
-
- def tearDown(self):
- greenhouse.scheduler.PRINT_EXCEPTIONS = self._oldprintexc
- StateClearingTestCase.tearDown(self)
-
def test_exceptions_raised_in_grlets(self):
l = [False]
|
clean out references to the old exception machinery in greenhouse/__init__.py and test_scheduler.py
|
diff --git a/juju/jasyncio.py b/juju/jasyncio.py
index <HASH>..<HASH> 100644
--- a/juju/jasyncio.py
+++ b/juju/jasyncio.py
@@ -24,6 +24,9 @@ import asyncio
import signal
import functools
import websockets
+import logging
+
+ROOT_LOGGER = logging.getLogger()
from asyncio import Event, TimeoutError, Queue, ensure_future, \
gather, sleep, wait_for, create_subprocess_exec, subprocess, \
@@ -46,7 +49,7 @@ except ImportError:
return asyncio.ensure_future(coro)
-def create_task_with_handler(coro, task_name, logger):
+def create_task_with_handler(coro, task_name, logger=ROOT_LOGGER):
"""Wrapper around "asyncio.create_task" to make sure the task
exceptions are handled properly.
|
Fix for small bug in task handling
Fixes #<I>
|
diff --git a/lakeside/__init__.py b/lakeside/__init__.py
index <HASH>..<HASH> 100755
--- a/lakeside/__init__.py
+++ b/lakeside/__init__.py
@@ -40,7 +40,7 @@ def get_devices(username, password):
info = r.json()
for item in info['items']:
- devices.append({'address': item['device']['wifi']['lan_ip_addr'], 'code': item['device']['local_code'], 'type': item['device']['product']['product_code']})
+ devices.append({'address': item['device']['wifi']['lan_ip_addr'], 'code': item['device']['local_code'], 'type': item['device']['product']['product_code'], 'name': item['device']['alias_name']})
return devices
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,11 +23,11 @@ import warnings
dynamic_requires = []
-version = 0.1
+version = 0.2
setup(
name='lakeside',
- version=0.1,
+ version=0.2,
author='Matthew Garrett',
author_email='mjg59@google.com',
url='http://github.com/google/python-lakeside',
|
Provide the device name and bump to <I>
|
diff --git a/src/nu/validator/servlet/VerifierServletTransaction.java b/src/nu/validator/servlet/VerifierServletTransaction.java
index <HASH>..<HASH> 100644
--- a/src/nu/validator/servlet/VerifierServletTransaction.java
+++ b/src/nu/validator/servlet/VerifierServletTransaction.java
@@ -1226,7 +1226,7 @@ class VerifierServletTransaction implements DocumentModeHandler, SchemaResolver
}
String language = (String) request.getAttribute(
"http://validator.nu/properties/document-language");
- if (!"".equals(language)) {
+ if (!"".equals(language) && language != null) {
String langFieldName = "LANG_" + language.toUpperCase();
if ("zh-hans".equals(language)) {
langFieldName = "LANG_ZH_HANS";
|
Prevent NPE caused by previous commit
|
diff --git a/lib/dialects/http.js b/lib/dialects/http.js
index <HASH>..<HASH> 100644
--- a/lib/dialects/http.js
+++ b/lib/dialects/http.js
@@ -31,9 +31,6 @@ function INIT(options) {
function httpRequest(operation) {
var self = this
return function(req, resp) {
- if (!~ self._allowedJobTypes.indexOf(req.params.jobType))
- return $done('Not a valid job type', 400);
-
var $done = function(err, code, msg) {
if (!msg && !_.isNumber(code)) {
msg = code||'' // `code` is optional
@@ -45,6 +42,9 @@ function httpRequest(operation) {
resp.end(!!err ? ''+err : (''+msg || ''))
}
+ if (!~ self._allowedJobTypes.indexOf(req.params.jobType))
+ return $done('Not a valid job type', 400);
+
return OPS[operation](req, resp, $done)
}
}
|
Fix bug if bad job-type passed to HTTP dialect
|
diff --git a/service/src/main/java/org/ops4j/pax/web/service/internal/HttpServiceRequestWrapper.java b/service/src/main/java/org/ops4j/pax/web/service/internal/HttpServiceRequestWrapper.java
index <HASH>..<HASH> 100644
--- a/service/src/main/java/org/ops4j/pax/web/service/internal/HttpServiceRequestWrapper.java
+++ b/service/src/main/java/org/ops4j/pax/web/service/internal/HttpServiceRequestWrapper.java
@@ -70,7 +70,7 @@ public class HttpServiceRequestWrapper extends HttpServletRequestWrapper {
super.setAttribute(name, value);
}
- private void handleAuthenticationType(final String authenticationType) {
+ private void handleAuthenticationType(final Object authenticationType) {
if (!isJettyRequestAvailable()) {
return;
}
@@ -85,7 +85,7 @@ public class HttpServiceRequestWrapper extends HttpServletRequestWrapper {
m_request.setAuthType((String) authenticationType);
}
- private void handleRemoteUser(final String remoteUser) {
+ private void handleRemoteUser(final Object remoteUser) {
}
private boolean isJettyRequestAvailable() {
|
PAXWEB-<I>
Handle Authentication type in first phase. Remote User will follow.
|
diff --git a/playitagainsam/eventlog.py b/playitagainsam/eventlog.py
index <HASH>..<HASH> 100644
--- a/playitagainsam/eventlog.py
+++ b/playitagainsam/eventlog.py
@@ -52,15 +52,6 @@ class EventLog(object):
# Collapse consecutive writes into a single chunk.
if self.events[-1]["act"] == "WRITE":
self.events[-1]["data"] += event["data"]
- # Collapse DEL and term wiping into a no-op.
- if self.events[-1]["data"] == "\x08\x1b[K":
- if len(self.events) >= 3:
- e_echo, e_del = self.events[-3:-1]
- if e_echo["act"] == "ECHO":
- if e_del["act"] == "READ":
- if e_del["data"] == "\x7f":
- del self.events[-2:]
- e_echo["data"] = e_echo["data"][:-1]
return
# Collapse read/write of same data into an "ECHO".
if self.events[-1]["act"] == "READ":
|
Remove code for collapsing backspaces, it no longer works.
This will have to be redone in a more clever way, possibly as
a separate "filter" pass during write.
|
diff --git a/raiden/network/proxies/secret_registry.py b/raiden/network/proxies/secret_registry.py
index <HASH>..<HASH> 100644
--- a/raiden/network/proxies/secret_registry.py
+++ b/raiden/network/proxies/secret_registry.py
@@ -151,7 +151,6 @@ class SecretRegistry:
estimated_transaction = self.client.estimate_gas(
self.proxy, "registerSecretBatch", log_details, secrets_to_register
)
- receipt = None
transaction_hash = None
msg = None
transaction_mined = None
@@ -186,8 +185,10 @@ class SecretRegistry:
# If the transaction was sent it must not fail. If this happened
# some of our assumptions is broken therefore the error is
# unrecoverable
- if estimated_transaction is not None and receipt is not None:
- if receipt["gasUsed"] == estimated_transaction.estimated_gas:
+ if transaction_mined is not None:
+ receipt = transaction_mined.receipt
+
+ if receipt["gasUsed"] == transaction_mined.startgas:
# The transaction failed and all gas was used. This can
# happen because of:
#
|
bugfix: receit was always none
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -21,7 +21,7 @@ class PublicationServer {
* to.
* @param {Object} server The HTTP server to allow Primus to listen on.
*/
- constructor({authFn, mountPath, errHandler, server} = {}) {
+ constructor({authFn, mountPath, errHandler, server, transformer} = {}) {
assert(authFn, 'Must provide an authorization function');
this._subscriptions = {};
@@ -39,7 +39,7 @@ class PublicationServer {
authorization: this._authFn,
pathname: this._mountPath,
parser: 'EJSON',
- transformer: 'uws',
+ transformer: transformer || 'uws',
pingInterval: false
});
|
Add the option for a configurable transformer
|
diff --git a/container/api/src/main/java/org/wildfly/swarm/container/Container.java b/container/api/src/main/java/org/wildfly/swarm/container/Container.java
index <HASH>..<HASH> 100644
--- a/container/api/src/main/java/org/wildfly/swarm/container/Container.java
+++ b/container/api/src/main/java/org/wildfly/swarm/container/Container.java
@@ -169,8 +169,10 @@ public class Container {
* @return The container.
*/
public Container fraction(Fraction fraction) {
- this.fractions.put(fractionRoot(fraction.getClass()), fraction);
- fraction.initialize(new InitContext());
+ if ( fraction != null ) {
+ this.fractions.put(fractionRoot(fraction.getClass()), fraction);
+ fraction.initialize(new InitContext());
+ }
return this;
}
|
Logstash fraction test but not a super great one.
|
diff --git a/pyemma/coordinates/tests/test_featurereader.py b/pyemma/coordinates/tests/test_featurereader.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/tests/test_featurereader.py
+++ b/pyemma/coordinates/tests/test_featurereader.py
@@ -90,8 +90,8 @@ class TestFeatureReader(unittest.TestCase):
# reproduce outcome
xyz_s = self.xyz.shape
- fake_lagged = np.empty((xyz_s[0]-lag,xyz_s[1],xyz_s[2]))
- fake_lagged = self.xyz[lag:]
+ fake_lagged = np.empty((xyz_s[0]-lag,xyz_s[1]*xyz_s[2]))
+ fake_lagged = self.xyz.reshape((xyz_s[0],-1))[lag:]
self.assertTrue(np.allclose(merged_lagged, fake_lagged))
@@ -128,7 +128,7 @@ class TestFeatureReader(unittest.TestCase):
for _, _, y in reader:
lagged_chunks.append(y)
- coords = self.xyz
+ coords = self.xyz.reshape((self.xyz.shape[0],-1))
for ii, c in enumerate(lagged_chunks[:-1]):
# all despite last chunk shall have chunksize
|
[tests] fixed bug in test_featurereader
|
diff --git a/yaks/lib/yaks.rb b/yaks/lib/yaks.rb
index <HASH>..<HASH> 100644
--- a/yaks/lib/yaks.rb
+++ b/yaks/lib/yaks.rb
@@ -23,8 +23,8 @@ require 'yaks/errors'
require 'yaks/default_policy'
module Yaks
- # A PORO
- Undefined = Object.new
+ Undefined = Module.new.freeze
+
# Set the Root constant as the gems root path
Root = Pathname(__FILE__).join('../..')
|
Make Yaks::Undefined look like "Undefined" when inspected, instead of #<Object...>
|
diff --git a/lib/mail/network/delivery_methods/smtp.rb b/lib/mail/network/delivery_methods/smtp.rb
index <HASH>..<HASH> 100644
--- a/lib/mail/network/delivery_methods/smtp.rb
+++ b/lib/mail/network/delivery_methods/smtp.rb
@@ -88,8 +88,8 @@ module Mail
:openssl_verify_mode => nil,
:ssl => nil,
:tls => nil,
- :open_timeout => nil,
- :read_timeout => nil
+ :open_timeout => 5,
+ :read_timeout => 5
}
def initialize(values)
|
Add default timeouts for SMTP
I get lots of reports of "stuck" Sidekiq jobs due to email delivery. SMTP servers in the wild are notoriously unreliable, e.g. spam honeypots can leave TCP connections lingering. We need network connection timeouts by default.
|
diff --git a/src/DebuggerManager.php b/src/DebuggerManager.php
index <HASH>..<HASH> 100644
--- a/src/DebuggerManager.php
+++ b/src/DebuggerManager.php
@@ -3,9 +3,9 @@
namespace Recca0120\LaravelTracy;
use ErrorException;
-use Exception;
use Illuminate\Contracts\Routing\UrlGenerator;
use Illuminate\Support\Arr;
+use Throwable;
use Tracy\Bar;
use Tracy\BlueScreen;
use Tracy\Debugger;
@@ -224,7 +224,7 @@ class DebuggerManager
* @param \Exception $exception
* @return string
*/
- public function exceptionHandler(Exception $exception)
+ public function exceptionHandler(Throwable $exception)
{
return $this->renderBuffer(function () use ($exception) {
Helpers::improveException($exception);
|
Fixed wrong type hint of DebuggerManager::exceptionHandler()
* this is essential for laravel 7
|
diff --git a/tasks/version_build.js b/tasks/version_build.js
index <HASH>..<HASH> 100644
--- a/tasks/version_build.js
+++ b/tasks/version_build.js
@@ -111,7 +111,6 @@ module.exports = function (grunt) {
// Stage and commit to a branch
function gitCommit () {
- var status = shelljs.exec('git status --porcelain');
var commitMsg = options.commitMsg
.replace(/%sourceName%/g, tokens.name)
.replace(/%sourceCommit%/g, tokens.commit)
@@ -121,7 +120,7 @@ module.exports = function (grunt) {
shelljs.exec('git reset', {silent: true});
// If there are no changes, skip commit
- if (status.output === '') {
+ if (shelljs.exec('git status --porcelain', {silent: true}).output === '') {
grunt.log.writeln('No changes to your branch. Skipping commit.');
return;
}
|
Remove single use var to make code style consistant
|
diff --git a/spec/requests/as_account/channel_spec.rb b/spec/requests/as_account/channel_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/requests/as_account/channel_spec.rb
+++ b/spec/requests/as_account/channel_spec.rb
@@ -79,16 +79,19 @@ describe Yt::Channel, :device_app do
it { expect(channel.videos.where(chart: 'invalid').first).to be_a Yt::Video }
end
- # @note: these tests are slow because they go through multiple pages of
- # results and do so to test that we can overcome YouTube’s limitation of
- # only returning the first 500 results for each query.
- # @note: in principle, the following three counters should match, but in
- # reality +video_count+ and +size+ are only approximations.
context 'with more than 500 videos' do
let(:id) { 'UCsmvakQZlvGsyjyOhmhvOsw' }
+ # @note: in principle, the following three counters should match, but in
+ # reality +video_count+ and +size+ are only approximations.
it { expect(channel.video_count).to be > 500 }
it { expect(channel.videos.size).to be > 500 }
- it { expect(channel.videos.count).to be > 500 }
+ context 'with default order (by date)' do
+ # @note: these tests are slow because they go through multiple pages of
+ # results and do so to test that we can overcome YouTube’s limitation of
+ # only returning the first 500 results when ordered by date.
+ it { expect(channel.videos.count).to be > 500 }
+ it { expect(channel.videos.where(order: 'viewCount').count).to be 500 }
+ end
end
end
|
Add missing spec for b<I>aac<I>
When the order is *not* by date, only the first <I> videos can be
retrieved, since YouTube does not provide a way to paginate beyond
that limit.
|
diff --git a/salt/states/boto_secgroup.py b/salt/states/boto_secgroup.py
index <HASH>..<HASH> 100644
--- a/salt/states/boto_secgroup.py
+++ b/salt/states/boto_secgroup.py
@@ -376,7 +376,7 @@ def _rules_present(
_source_group_name = rule.get('source_group_name', None)
if _source_group_name:
_group_id = __salt__['boto_secgroup.get_group_id'](
- _source_group_name, vpc_id, vpc_id, region, key, keyid, profile
+ _source_group_name, vpc_id, vpc_name, region, key, keyid, profile
)
if not _group_id:
msg = ('source_group_name {0} does not map to a valid'
|
One last bug to squash. Seriously. It's the last one. Ever!
- fixed param vpc_id being passed where vpc_name was intended.
|
diff --git a/src/Sylius/Component/Core/spec/Taxation/OrderShipmentTaxesByZoneApplicatorSpec.php b/src/Sylius/Component/Core/spec/Taxation/OrderShipmentTaxesByZoneApplicatorSpec.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Component/Core/spec/Taxation/OrderShipmentTaxesByZoneApplicatorSpec.php
+++ b/src/Sylius/Component/Core/spec/Taxation/OrderShipmentTaxesByZoneApplicatorSpec.php
@@ -105,7 +105,7 @@ class OrderShipmentTaxesByZoneApplicatorSpec extends ObjectBehavior
$calculator->calculate(1000, $taxRate)->willReturn(0);
- $adjustmentsFactory->createWithData(Argument::any())->shouldNotBeCalled();
+ $adjustmentsFactory->createWithData(Argument::cetera())->shouldNotBeCalled();
$order->addAdjustment(Argument::any())->shouldNotBeCalled();
$this->apply($order, $zone);
|
Use the Argument::cetera to match every adjustment factory createWithData method call in the OrderShipmentTaxesByZoneApplicatorSpec.
|
diff --git a/shellish/layout/table.py b/shellish/layout/table.py
index <HASH>..<HASH> 100644
--- a/shellish/layout/table.py
+++ b/shellish/layout/table.py
@@ -119,9 +119,13 @@ class Table(object):
False to disable this behavior but be warned the table will not look
good. """
self.title = title
- self.columns_def = columns
- self.accessors_def = accessors
- self.headers = headers
+ # Freeze the table definitions...
+ try:
+ self.columns_def = columns.copy() if columns is not None else None
+ except AttributeError:
+ self.columns_def = tuple(columns)
+ self.accessors_def = tuple(accessors or ())
+ self.headers = tuple(headers or ())
self.width = width
self.flex = flex
self.file = file if file is not None else sys.stdout
@@ -187,7 +191,7 @@ class Table(object):
if not self.accessors_def:
accessors = [operator.itemgetter(i) for i in range(columns)]
else:
- accessors = self.accessors_def[:]
+ accessors = list(self.accessors_def)
for i, x in enumerate(accessors):
if not callable(x):
accessors[i] = operator.itemgetter(x)
|
Freeze and iterate through table configurations.
Grab the headers, columns and accessors definitions at init time and put
them into frozen tuples. This lets us grab Sequence type definitions
and protects us from being corrupted should the user modify these
datastructures after they are passed in to the table code.
|
diff --git a/spec/integration/migration_spec.rb b/spec/integration/migration_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration/migration_spec.rb
+++ b/spec/integration/migration_spec.rb
@@ -54,15 +54,20 @@ describe "A Migration" do
end
adapter = DataMapper::Spec.adapter_name
- expected_module = {
- :sqlite => lambda { SQL::Sqlite },
- :mysql => lambda { SQL::Mysql },
+
+ expected_module_lambda = {
+ :sqlite => lambda { SQL::Sqlite },
+ :mysql => lambda { SQL::Mysql },
:postgres => lambda { SQL::Postgres }
- }[adapter.to_sym][]
+ }[adapter.to_sym]
+
+ expected_module = expected_module_lambda ? expected_module_lambda.call : nil
- it "should extend with #{expected_module} when adapter is #{adapter}" do
- migration = DataMapper::Migration.new(1, :"#{adapter}_adapter_test") { }
- (class << migration.adapter; self; end).included_modules.should include(expected_module)
+ if expected_module
+ it "should extend with #{expected_module} when adapter is #{adapter}" do
+ migration = DataMapper::Migration.new(1, :"#{adapter}_adapter_test") { }
+ (class << migration.adapter; self; end).included_modules.should include(expected_module)
+ end
end
end
|
More robust checks before checking module inclusion
Specs for in_memory and yaml still fail, but at
least they're running now. Previously they bailed
out immediately because of calling [] (lambda.call)
on nil.
This (like many other dm-migrations specs) looks
kinda weird and will probably be refactored soonish
|
diff --git a/Services/Navitia.php b/Services/Navitia.php
index <HASH>..<HASH> 100644
--- a/Services/Navitia.php
+++ b/Services/Navitia.php
@@ -199,6 +199,8 @@ class Navitia
public function getRouteStopPoints($perimeter, $externalRouteId)
{
$pathFilter = 'networks/'.$perimeter->getExternalNetworkId().'/routes/'.$externalRouteId;
+ $fromdatetime = new \DateTime('now');
+ $fromdatetime->setTime(4, 0);
$query = array(
'api' => 'coverage',
@@ -206,7 +208,7 @@ class Navitia
'region' => $perimeter->getExternalCoverageId(),
'action' => 'route_schedules',
'path_filter' => $pathFilter,
- 'parameters' => '?depth=0',
+ 'parameters' => '?depth=0&from_datetime='.$fromdatetime->format('Ymd\THis')
),
);
|
added from_datetime to route_schedules call
|
diff --git a/libraries/joomla/utilities/simplecrypt.php b/libraries/joomla/utilities/simplecrypt.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/utilities/simplecrypt.php
+++ b/libraries/joomla/utilities/simplecrypt.php
@@ -43,10 +43,7 @@ class JSimpleCrypt
}
// Build the JCryptKey object.
- $key = new JCryptKey;
- $key->private = $privateKey;
- $key->public = $privateKey;
- $key->type = 'simple';
+ $key = new JCryptKey('simple', $privateKey, $privateKey);
// Setup the JCrypt object.
$this->_crypt = new JCrypt(new JCryptCipherSimple, $key);
|
Fix minor problem in JSimpleCrypt.
|
diff --git a/sphinxcontrib/swaggerdoc/swaggerv2_doc.py b/sphinxcontrib/swaggerdoc/swaggerv2_doc.py
index <HASH>..<HASH> 100644
--- a/sphinxcontrib/swaggerdoc/swaggerv2_doc.py
+++ b/sphinxcontrib/swaggerdoc/swaggerv2_doc.py
@@ -7,6 +7,7 @@ from past.builtins import basestring
from sphinx.locale import _
+from six.moves.urllib import parse as urlparse # Retain Py2 compatibility for urlparse
import requests
from requests_file import FileAdapter
import json
@@ -28,10 +29,21 @@ class SwaggerV2DocDirective(Directive):
has_content = True
def processSwaggerURL(self, url):
- s = requests.Session()
- s.mount('file://', FileAdapter())
- r = s.get(url)
- return r.json()
+ parsed_url = urlparse.urlparse(url)
+ if not parsed_url.scheme: # Assume file relative to documentation
+ env = self.state.document.settings.env
+ relfn, absfn = env.relfn2path(url)
+ env.note_dependency(relfn)
+
+ with open(absfn) as fd:
+ content = fd.read()
+
+ return json.loads(content)
+ else:
+ s = requests.Session()
+ s.mount('file://', FileAdapter())
+ r = s.get(url)
+ return r.json()
def create_item(self, key, value):
para = nodes.paragraph()
|
Support references to locally referenced swagger documents.
resolves unaguil/sphinx-swaggerdoc#<I>
|
diff --git a/client.py b/client.py
index <HASH>..<HASH> 100644
--- a/client.py
+++ b/client.py
@@ -180,7 +180,8 @@ def get_screenshots(s, job_id, res_dir=None):
_mkdir(output_dir)
else:
new_direcory = os.path.join(output_dir, res_dir)
- _mkdir(new_direcory)
+ output_dir=new_direcory
+ _mkdir(output_dir)
try:
print 'Screenshot job complete. Saving files..'
_purge(output_dir, '.diff', 'stale diff')
|
modify output for saving screens - now it depend from file config name
|
diff --git a/scripts/python/startSwarm.py b/scripts/python/startSwarm.py
index <HASH>..<HASH> 100755
--- a/scripts/python/startSwarm.py
+++ b/scripts/python/startSwarm.py
@@ -2,10 +2,10 @@
# Start netplugin and netmaster
import api.tnode
-import time
-import sys
-import os
import argparse
+import os
+import re
+import time
# Parse command line args
# Create the parser and sub parser
@@ -45,6 +45,21 @@ if args.swarm == "swarm_mode":
node.runCmdThread(command)
time.sleep(15)
+
+ print "Check netplugin is installed and enabled"
+ out, _, _ = nodes[0].runCmd("docker plugin ls")
+
+ installed = re.search('contiv/v2plugin', out[1])
+
+ if installed == None:
+ print "Make target failed: Contiv plugin is not installed"
+ os._exit(1)
+
+ enabled = re.search('false', out[1])
+ if enabled != None:
+ print "Make target failed: Contiv plugin is installed but disabled"
+ os._exit(1)
+
print "################### Swarm Mode is up #####################"
else:
swarmScript= scriptPath + "/start-swarm.sh"
|
Add extra validation check for v2plugin (#<I>)
* Add extra validation check for v2plugin
Currently, demo-v2plugin doesn't check if contiv is installed and
enabled.
This patchset is to check if v2plugin is installed and enabled at the
end of the installation
|
diff --git a/edx_rest_api_client/client.py b/edx_rest_api_client/client.py
index <HASH>..<HASH> 100644
--- a/edx_rest_api_client/client.py
+++ b/edx_rest_api_client/client.py
@@ -20,7 +20,7 @@ ACCESS_TOKEN_EXPIRED_THRESHOLD_SECONDS = 5
# How long should we wait to connect to the auth service.
# https://requests.readthedocs.io/en/master/user/advanced/#timeouts
REQUEST_CONNECT_TIMEOUT = 3.05
-REQUEST_READ_TIMEOUT = 1
+REQUEST_READ_TIMEOUT = 5
def user_agent():
|
Make sure oauth requests do not timeout during read
The previous default timeout of 1s was causing issues in communication
between services, in particular when the server was under load. This
timeout is configurable by passing a `timeout=...` keyword argument to
the OAuthClient constructor; but that requires patching every IDA. It
makes more sense to define a comfortable timeout that avoids the need to
override for each IDA.
See these conversations:
<URL>
|
diff --git a/test/test_socks.py b/test/test_socks.py
index <HASH>..<HASH> 100644
--- a/test/test_socks.py
+++ b/test/test_socks.py
@@ -738,7 +738,7 @@ class SocksErrorTests(unittest.TestCase):
self.assertEquals(error.message, message)
self.assertEquals(str(error), message)
- def test_socks_error_factory(self):
+ def test_error_factory(self):
for cls in socks.SocksError.__subclasses__():
error = socks._create_socks_error(cls.code)
self._check_error(error, cls, cls.code, cls.message)
|
Make method names of the `SocksErrorTests` consistent
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -41,6 +41,7 @@ module.exports = function(app, dcPath) {
dcFile,
'run',
'--service-ports',
+ '--rm',
app,
cmd
].join(' ');
|
[#3] add the --rm option to remove containers on exit
|
diff --git a/lib/platform.rb b/lib/platform.rb
index <HASH>..<HASH> 100644
--- a/lib/platform.rb
+++ b/lib/platform.rb
@@ -59,52 +59,4 @@ module Ronin
end
end
-
- class Linux < Platform
-
- def initialize(version,arch)
- super('Linux',version,arch)
- end
-
- end
-
- class FreeBSD < Platform
-
- def initialize(version,arch)
- super('FreeBSD',version,arch)
- end
-
- end
-
- class OpenBSD < Platform
-
- def initialize(version,arch)
- super('OpenBSD',version,arch)
- end
-
- end
-
- class NetBSD < Platform
-
- def initialize(version,arch)
- super('NetBSD',version,arch)
- end
-
- end
-
- class Windows < Platform
-
- def initialize(version,arch)
- super('Windows',version,arch)
- end
-
- end
-
- class OSX < Platform
-
- def initialize(version,arch)
- super('OSX',version,arch)
- end
-
- end
end
|
* Removed extra-classes.
|
diff --git a/src/Illuminate/Database/Eloquent/Builder.php b/src/Illuminate/Database/Eloquent/Builder.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Database/Eloquent/Builder.php
+++ b/src/Illuminate/Database/Eloquent/Builder.php
@@ -34,8 +34,8 @@ class Builder {
* @var array
*/
protected $passthru = array(
- 'toSql', 'lists', 'insert', 'insertGetId', 'pluck',
- 'count', 'min', 'max', 'avg', 'sum', 'exists',
+ 'toSql', 'lists', 'insert', 'insertGetId', 'pluck', 'count',
+ 'min', 'max', 'avg', 'sum', 'exists', 'getBindings',
);
/**
|
Pass getBindings through to querybuilder
|
diff --git a/lib/yap/shell/version.rb b/lib/yap/shell/version.rb
index <HASH>..<HASH> 100644
--- a/lib/yap/shell/version.rb
+++ b/lib/yap/shell/version.rb
@@ -1,5 +1,5 @@
module Yap
module Shell
- VERSION = "0.4.6"
+ VERSION = "0.4.7"
end
end
|
Bumping version to <I>
|
diff --git a/aviator.js b/aviator.js
index <HASH>..<HASH> 100644
--- a/aviator.js
+++ b/aviator.js
@@ -436,7 +436,7 @@ Navigator.prototype = {
pathname,
uri;
- if (ev.metaKey || ev.ctrlKey) return;
+ if (ev.button === 1 || ev.metaKey || ev.ctrlKey) return;
// Sub optimal. It itererates through all ancestors on every single click :/
while (target) {
|
Rebuild the aviator.js file
|
diff --git a/src/main/java/common/CharsetDetector.java b/src/main/java/common/CharsetDetector.java
index <HASH>..<HASH> 100644
--- a/src/main/java/common/CharsetDetector.java
+++ b/src/main/java/common/CharsetDetector.java
@@ -1,5 +1,26 @@
package common;
+/*-
+ * #%L
+ * FOKProjects Common
+ * %%
+ * Copyright (C) 2016 - 2017 Frederik Kammel
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
|
Added the license header to the new class
|
diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py
index <HASH>..<HASH> 100644
--- a/scripts/run_pylint.py
+++ b/scripts/run_pylint.py
@@ -51,6 +51,8 @@ TEST_DISABLED_MESSAGES = [
'import-error',
'invalid-name',
'missing-docstring',
+ 'missing-raises-doc',
+ 'missing-returns-doc',
'no-init',
'no-self-use',
'superfluous-parens',
|
Update disabled messages from new pylint.
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,7 @@ setup(
packages=find_packages(),
- install_requires=['aioredis'],
+ install_requires=['aioredis', 'attrs'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-asyncio', 'pytest-mock', 'pytest-cov']
)
|
Adding attrs to requirements in setup.py
|
diff --git a/fundingmanager_test.go b/fundingmanager_test.go
index <HASH>..<HASH> 100644
--- a/fundingmanager_test.go
+++ b/fundingmanager_test.go
@@ -166,7 +166,7 @@ func createTestWallet(cdb *channeldb.DB, netParams *chaincfg.Params,
ChainIO: bio,
FeeEstimator: estimator,
NetParams: *netParams,
- DefaultConstraints: defaultChannelConstraints,
+ DefaultConstraints: defaultBtcChannelConstraints,
})
if err != nil {
return nil, err
|
fundingmanager_test: use renamed defaultBtcChannelConstraints const
|
diff --git a/src/test/java/net/bramp/ffmpeg/FFmpegExecutorTest.java b/src/test/java/net/bramp/ffmpeg/FFmpegExecutorTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/net/bramp/ffmpeg/FFmpegExecutorTest.java
+++ b/src/test/java/net/bramp/ffmpeg/FFmpegExecutorTest.java
@@ -79,15 +79,6 @@ public class FFmpegExecutorTest {
}
protected void runAndWait(FFmpegJob job) throws ExecutionException, InterruptedException {
- Future<?> future = executor.submit(job);
-
- // TODO Why do we loop, and not future.get()?
- while (!future.isDone()) {
- try {
- future.get(100, TimeUnit.MILLISECONDS);
- break;
- } catch (TimeoutException e) {
- }
- }
+ executor.submit(job).get();
}
}
|
Minor change to FFmpegExecutorTest to not loop while waiting.
|
diff --git a/build/build.transforms.js b/build/build.transforms.js
index <HASH>..<HASH> 100644
--- a/build/build.transforms.js
+++ b/build/build.transforms.js
@@ -10,7 +10,7 @@ const
{ writeFile } = require('./build.utils')
function relative (name) {
- return path.relative(root, name)
+ return path.relative(root, name).split('\\').join('/')
}
function getWithoutExtension (filename) {
|
Fix build of imports file to use unix separator (#<I>)
|
diff --git a/Model/Service/CallbackService.php b/Model/Service/CallbackService.php
index <HASH>..<HASH> 100755
--- a/Model/Service/CallbackService.php
+++ b/Model/Service/CallbackService.php
@@ -170,11 +170,12 @@ class CallbackService {
// Update order status
$order->setStatus($this->gatewayConfig->getOrderStatusAuthorized());
- // Send the email
- $this->orderSender->send($order);
-
- // Set email sent
- $order->setEmailSent(1);
+ // Send the email only for Hosted integration
+ // Frames is using the core placeOrder email sender and doesn't need manual action
+ if ($this->gatewayConfig->isHostedIntegration()) {
+ $this->orderSender->send($order);
+ $order->setEmailSent(1);
+ }
// Comments override
if ($overrideComments) {
|
Order email fix
Fixed an issue creating a double confirmation email with Frames.
|
diff --git a/internal/validate/schema_props.go b/internal/validate/schema_props.go
index <HASH>..<HASH> 100644
--- a/internal/validate/schema_props.go
+++ b/internal/validate/schema_props.go
@@ -180,12 +180,3 @@ func (s *schemaPropsValidator) Validate(data interface{}) *Result {
mainResult.Inc()
return mainResult
}
-
-// IsZero returns true when the value is a zero for the type
-func isZero(data reflect.Value) bool {
- if !data.CanInterface() {
- return true
- }
- tpe := data.Type()
- return reflect.DeepEqual(data.Interface(), reflect.Zero(tpe).Interface())
-}
|
fixes #<I> separate listener creation from serving
|
diff --git a/src/components/input/QInput.js b/src/components/input/QInput.js
index <HASH>..<HASH> 100644
--- a/src/components/input/QInput.js
+++ b/src/components/input/QInput.js
@@ -29,11 +29,12 @@ export default {
decimals: Number,
step: Number,
upperCase: Boolean,
- lowerCase: Boolean
+ lowerCase: Boolean,
+ initialShowPassword: Boolean
},
data () {
return {
- showPass: false,
+ showPass: this.initialShowPassword,
showNumber: true,
model: this.value,
watcher: null,
@@ -358,7 +359,7 @@ export default {
[].concat(this.$slots.before).concat([
this.isTextarea ? this.__getTextarea(h) : this.__getInput(h),
- (!this.disable && this.isPassword && !this.noPassToggle && this.length && h(QIcon, {
+ (!this.disable && this.isPassword && !this.noPassToggle && (this.initialShowPassword || this.length) && h(QIcon, {
slot: 'after',
staticClass: 'q-if-control',
props: {
|
Allow password to be shown by default (#<I>)
* Allow password to be shown by default
Usage `<q-input v-model="user.password" type="password" :show-password="true" float-label="Password" />`
this change will allow a password field and show/hide icon to be visible by default.
This might be good for an admin panel where you have to make a user account and send that user the credentials.
* Update QInput.js
|
diff --git a/pipeline/compress/compress.go b/pipeline/compress/compress.go
index <HASH>..<HASH> 100644
--- a/pipeline/compress/compress.go
+++ b/pipeline/compress/compress.go
@@ -44,9 +44,9 @@ func create(system, arch string, config config.ProjectConfig) error {
gw := gzip.NewWriter(file)
tw := tar.NewWriter(gw)
defer func() {
- _ = file.Close()
- _ = gw.Close()
_ = tw.Close()
+ _ = gw.Close()
+ _ = file.Close()
}()
for _, f := range config.Files {
if err := addFile(tw, f, f); err != nil {
|
Reorder `defer`'d closers
Commit <URL> whereby
the tarball is closed before data is completely written, thus breaking the release package
|
diff --git a/go/porcelain/site.go b/go/porcelain/site.go
index <HASH>..<HASH> 100644
--- a/go/porcelain/site.go
+++ b/go/porcelain/site.go
@@ -51,12 +51,15 @@ func (n *Netlify) CreateSite(ctx context.Context, site *models.Site, configureDN
}
// UpdateSite modifies an existent site.
-func (n *Netlify) UpdateSite(ctx context.Context, site *models.Site) error {
+func (n *Netlify) UpdateSite(ctx context.Context, site *models.Site) (*models.Site, error) {
authInfo := context.GetAuthInfo(ctx)
params := operations.NewUpdateSiteParams().WithSite(site).WithSiteID(site.ID)
- _, err := n.Netlify.Operations.UpdateSite(params, authInfo)
- return err
+ resp, err := n.Netlify.Operations.UpdateSite(params, authInfo)
+ if err != nil {
+ return nil, err
+ }
+ return resp.Payload, nil
}
// ConfigureSiteTLS provisions a TLS certificate for a site with a custom domain.
|
Make site update to return the updated site.
So we can show up to date settings.
|
diff --git a/extensions/waitForSelector/waitForSelector.js b/extensions/waitForSelector/waitForSelector.js
index <HASH>..<HASH> 100644
--- a/extensions/waitForSelector/waitForSelector.js
+++ b/extensions/waitForSelector/waitForSelector.js
@@ -4,15 +4,23 @@
/* global document: true */
'use strict';
-exports.version = '0.1';
+exports.version = '0.2';
function checkSelector(phantomas, selector) {
var res = phantomas.evaluate(function(selector) {
- try {
- return document.querySelector(selector) !== null;
- } catch (ex) {
- return ex.toString();
- }
+ (function(phantomas) {
+ try {
+ var result;
+
+ phantomas.spyEnabled(false, 'checking the selector');
+ result = (document.querySelector(selector) !== null);
+ phantomas.spyEnabled(true);
+
+ return result;
+ } catch (ex) {
+ return ex.toString();
+ }
+ }(window.__phantomas));
}, selector);
phantomas.log('Selector: query for "%s" returned %j', selector, res);
|
waitForSelector: use phantomas.spyEnabled()
|
diff --git a/micrometer-core/src/main/java/io/micrometer/core/instrument/binder/tomcat/TomcatMetrics.java b/micrometer-core/src/main/java/io/micrometer/core/instrument/binder/tomcat/TomcatMetrics.java
index <HASH>..<HASH> 100644
--- a/micrometer-core/src/main/java/io/micrometer/core/instrument/binder/tomcat/TomcatMetrics.java
+++ b/micrometer-core/src/main/java/io/micrometer/core/instrument/binder/tomcat/TomcatMetrics.java
@@ -283,6 +283,7 @@ public class TomcatMetrics implements MeterBinder {
* </ul>
*
* @param jmxDomain JMX domain to be used
+ * @since 1.0.11
*/
public void setJmxDomain(String jmxDomain) {
this.jmxDomain = jmxDomain;
|
Add Javadoc since to TomcatMetrics.setJmxDomain() (#<I>)
|
diff --git a/bhmm/estimators/maximum_likelihood.py b/bhmm/estimators/maximum_likelihood.py
index <HASH>..<HASH> 100644
--- a/bhmm/estimators/maximum_likelihood.py
+++ b/bhmm/estimators/maximum_likelihood.py
@@ -278,6 +278,10 @@ class MaximumLikelihoodEstimator(object):
C = np.zeros((self._nstates, self._nstates))
for k in range(len(self._observations)): # update count matrix
C += count_matrices[k]
+
+ # trim counts below machine precision
+ C[C < 1e-16] = 0
+
return C
def _update_model(self, gammas, count_matrices, maxiter=10000000):
|
[estimators.MaximumLikelihoodEstimator] fix count matrix summation for numbers < 1e-<I>
Disconnected 2 state system could not detect disconnected T matrix
but instead returned implied timescales = inf and T matrix off diagonal
entries ~1e-<I>. Cf. <URL>
|
diff --git a/modules/Cockpit/AuthController.php b/modules/Cockpit/AuthController.php
index <HASH>..<HASH> 100755
--- a/modules/Cockpit/AuthController.php
+++ b/modules/Cockpit/AuthController.php
@@ -20,7 +20,7 @@ class AuthController extends \LimeExtra\Controller {
$user = $app->module('cockpit')->getUser();
if (!$user) {
- $app->reroute('/auth/login?to='.$this->app->retrieve('route'));
+ $app->reroute('/auth/login?to='.$app->retrieve('route'));
$app->stop();
}
|
fixed "Call to a member function retrieve() on null" on login page
|
diff --git a/externs/html5.js b/externs/html5.js
index <HASH>..<HASH> 100644
--- a/externs/html5.js
+++ b/externs/html5.js
@@ -594,6 +594,12 @@ Window.prototype.openDatabase = function(name, version, description, size) {};
HTMLImageElement.prototype.complete;
/**
+ * @type {string}
+ * @see http://www.whatwg.org/specs/web-apps/current-work/multipage/embedded-content-1.html#attr-img-crossorigin
+ */
+HTMLImageElement.prototype.crossOrigin;
+
+/**
* The postMessage method (as defined by HTML5 spec and implemented in FF3).
* @param {*} message
* @param {string|Array} targetOrigin The target origin in the 2-argument
|
Adds the HTMLImageElement 'crossOrigin' property to the externs.
This is a new tag being added to the spec to help tighten security regarding
handling of cross-domain resources. More information here:
<URL>
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
diff --git a/pep/project.py b/pep/project.py
index <HASH>..<HASH> 100644
--- a/pep/project.py
+++ b/pep/project.py
@@ -1119,8 +1119,9 @@ def check_sample_sheet(sample_file, dtype=str):
missing = set(req) - set(df.columns)
if len(missing) != 0:
raise ValueError(
- "Annotation sheet ('{}') is missing column(s): {}; has: {}".
- format(sample_file, missing, df.columns))
+ "Annotation sheet ('{}') is missing column(s):\n{}\nIt has: {}".
+ format(sample_file, "\n".join(missing),
+ ", ".join(list(df.columns))))
return df
|
better messaging for case of insufficient columns in annotations sheet
|
diff --git a/pybomb/clients/base_client.py b/pybomb/clients/base_client.py
index <HASH>..<HASH> 100644
--- a/pybomb/clients/base_client.py
+++ b/pybomb/clients/base_client.py
@@ -99,7 +99,7 @@ class BaseClient(object):
"""
return ','.join(
['{0}:{1}'.format(key, value) for
- key, value in filter_by.iteritems() if value is not None]
+ key, value in filter_by.items() if value is not None]
)
def _query(self, params):
@@ -133,7 +133,7 @@ class BaseClient(object):
try:
response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
- raise pybomb.exceptions.BadRequestException(http_error.message)
+ raise pybomb.exceptions.BadRequestException(str(http_error))
response_data = response.json()
if response_data['status_code'] != self.RESPONSE_STATUS_OK:
|
Fix base_client to work with python 3
|
diff --git a/hangups/__main__.py b/hangups/__main__.py
index <HASH>..<HASH> 100644
--- a/hangups/__main__.py
+++ b/hangups/__main__.py
@@ -270,6 +270,9 @@ class ConversationWidget(urwid.WidgetWrap):
def _on_return(self, text):
"""Called when the user presses return on the send message widget."""
+ # Ignore if the user hasn't typed a message.
+ if len(text) == 0:
+ return
# XXX: Exception handling here is still a bit broken. Uncaught
# exceptions in _on_message_sent will only be logged.
self._conversation.send_message(text).add_done_callback(
diff --git a/hangups/client.py b/hangups/client.py
index <HASH>..<HASH> 100644
--- a/hangups/client.py
+++ b/hangups/client.py
@@ -189,6 +189,8 @@ class Conversation(object):
def send_message(self, text):
"""Send a message to this conversation.
+ text may not be empty.
+
Raises hangups.NetworkError if the message can not be sent.
"""
yield self._client.sendchatmessage(self._id, text)
@@ -763,6 +765,8 @@ class Client(object):
is_underlined=False):
"""Send a chat message to a conversation.
+ message may not be empty.
+
Raises hangups.NetworkError if the message can not be sent.
"""
client_generated_id = random.randint(0, 2**32)
|
Don't try to send empty chat messages
|
diff --git a/pythran/unparse.py b/pythran/unparse.py
index <HASH>..<HASH> 100644
--- a/pythran/unparse.py
+++ b/pythran/unparse.py
@@ -482,7 +482,7 @@ class Unparser:
binop = {"Add": "+", "Sub": "-", "Mult": "*", "Div": "/", "Mod": "%",
"LShift": "<<", "RShift": ">>", "BitOr": "|", "BitXor": "^",
- "BitAnd": "&", "FloorDiv": "//", "Pow": "**"}
+ "BitAnd": "&", "FloorDiv": "//", "Pow": "**", "MatMult": "@"}
def _BinOp(self, t):
self.write("(")
|
Add support for matplut in unparse
|
diff --git a/pyfolio/timeseries.py b/pyfolio/timeseries.py
index <HASH>..<HASH> 100644
--- a/pyfolio/timeseries.py
+++ b/pyfolio/timeseries.py
@@ -551,7 +551,7 @@ def rolling_regression(returns, factor_returns,
rolling_window=APPROX_BDAYS_PER_MONTH * 6,
nan_threshold=0.1):
"""
- Computes rolling single factor betas using a multivariate linear regression
+ Computes rolling factor betas using a multivariate linear regression
(separate linear regressions is problematic because the factors may be
confounded).
|
DOC: cleared up confusing regression terminology
|
diff --git a/lib/coach/handler.rb b/lib/coach/handler.rb
index <HASH>..<HASH> 100644
--- a/lib/coach/handler.rb
+++ b/lib/coach/handler.rb
@@ -93,7 +93,11 @@ module Coach
end
def middleware
- @middleware ||= ActiveSupport::Dependencies.constantize(name)
+ @middleware ||= if ActiveSupport::Dependencies.respond_to?(:constantize)
+ ActiveSupport::Dependencies.constantize(name)
+ else
+ name.constantize
+ end
end
# Remove middleware that have been included multiple times with the same
|
Remove reference to ActiveSupport::Dependencies
This is removed in rails 7 due to the classic autoloader being removed -
see <URL>
|
diff --git a/client_test.go b/client_test.go
index <HASH>..<HASH> 100644
--- a/client_test.go
+++ b/client_test.go
@@ -169,7 +169,7 @@ func TestPipelineClientIssue832(t *testing.T) {
}()
select {
- case <-time.After(time.Second):
+ case <-time.After(time.Second * 2):
t.Fatal("PipelineClient did not restart worker")
case <-done:
}
@@ -2580,7 +2580,7 @@ func TestHostClientMaxConnWaitTimeoutSuccess(t *testing.T) {
return ln.Dial()
},
MaxConns: 1,
- MaxConnWaitTimeout: 200 * time.Millisecond,
+ MaxConnWaitTimeout: time.Second,
}
for i := 0; i < 5; i++ {
@@ -2618,7 +2618,7 @@ func TestHostClientMaxConnWaitTimeoutSuccess(t *testing.T) {
}
select {
case <-serverStopCh:
- case <-time.After(time.Second):
+ case <-time.After(time.Second * 5):
t.Fatalf("timeout")
}
|
Increase timeouts for Windows github actions
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,9 +27,9 @@ setup_options = dict(
# For these actions, NumPy is not required. We want them to succeed without,
# for example when pip is used to install seqlearn without NumPy present.
-NO_NUMPY_ACTIONS = ('egg_info', '--version', 'clean')
-if len(sys.argv) < 2 or (not sys.argv[1:].startswith('--help')
- and sys.argv[1] not in NO_NUMPY_ACTIONS):
+NO_NUMPY_ACTIONS = ('--help-commands', 'egg_info', '--version', 'clean')
+if not (len(sys.argv) >= 2 and ('--help' in sys.argv[1:]
+ or sys.argv[1] in NO_NUMPY_ACTIONS)):
import numpy
setup_options['include_dirs'] = [numpy.get_include()]
|
FIX setup.py numpy-awareness
XXX [@larsmans]: I screwed up this commit by @kmike,
so cherry-picking it again.
|
diff --git a/PAM.py b/PAM.py
index <HASH>..<HASH> 100644
--- a/PAM.py
+++ b/PAM.py
@@ -163,6 +163,7 @@ PAM_TTY = 3
PAM_USER = 2
PAM_USER_PROMPT = 9
PAM_USER_UNKNOWN = 10
+PAM_XDISPLAY = 11
class error(Exception): # noqa: N801
@@ -282,7 +283,7 @@ class pam(object): # noqa: N801
self.user = item
elif item_type == PAM_SERVICE:
self.service = item
- elif item_type not in (PAM_TTY,):
+ elif item_type not in (PAM_TTY, PAM_XDISPLAY):
raise TypeError("bad parameter")
item = c_char_p(self.__securestring(item))
retval = pam_set_item(self.pamh, int(item_type), cast(item, c_void_p))
diff --git a/pam.py b/pam.py
index <HASH>..<HASH> 100644
--- a/pam.py
+++ b/pam.py
@@ -96,6 +96,7 @@ class pam():
# set the TTY, needed when pam_securetty is used and the username root is used
p.set_item(PAM.PAM_TTY, ctty)
+ p.set_item(PAM.PAM_XDISPLAY, ctty)
try:
p.authenticate()
p.acct_mgmt()
|
Add PAM_XDISPLAY
|
diff --git a/shared/container.go b/shared/container.go
index <HASH>..<HASH> 100644
--- a/shared/container.go
+++ b/shared/container.go
@@ -133,11 +133,10 @@ func IsDeviceID(value string) error {
return nil
}
-// IsRootDiskDevice returns true if the given device representation is
-// configured as root disk for a container. It typically get passed a specific
-// entry of api.Container.Devices.
+// IsRootDiskDevice returns true if the given device representation is configured as root disk for
+// a container. It typically get passed a specific entry of api.Container.Devices.
func IsRootDiskDevice(device map[string]string) bool {
- if device["type"] == "disk" && device["path"] == "/" && device["source"] == "" {
+ if device["type"] == "disk" && device["path"] == "/" && device["source"] == "" && device["pool"] != "" {
return true
}
|
shared/container: Updates IsRootDiskDevice to use same definition of rootfs as container_lxc
|
diff --git a/spec/unit/property_spec.rb b/spec/unit/property_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/unit/property_spec.rb
+++ b/spec/unit/property_spec.rb
@@ -141,17 +141,18 @@ describe Puppet::Property do
end
describe "when shadowing metaparameters" do
- before :each do
- @shadow_class = Class.new(Puppet::Property) do
+ let :shadow_class do
+ shadow_class = Class.new(Puppet::Property) do
@name = :alias
end
- @shadow_class.initvars
+ shadow_class.initvars
+ shadow_class
end
it "should create an instance of the metaparameter at initialization" do
Puppet::Type.metaparamclass(:alias).expects(:new).with(:resource => resource)
- @shadow_class.new :resource => resource
+ shadow_class.new :resource => resource
end
it "should munge values using the shadow's munge method" do
@@ -160,7 +161,7 @@ describe Puppet::Property do
shadow.expects(:munge).with "foo"
- property = @shadow_class.new :resource => resource
+ property = shadow_class.new :resource => resource
property.munge("foo")
end
end
|
Property Spec cleanup: last let method extraction.
This helps eliminate member variables in tests, making it easier to see where
state is or isn't shared between tests.
|
diff --git a/src/main/java/com/github/underscore/U.java b/src/main/java/com/github/underscore/U.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/underscore/U.java
+++ b/src/main/java/com/github/underscore/U.java
@@ -3149,6 +3149,10 @@ public class U<T> {
return reference;
}
+ public static boolean nonNull(Object obj) {
+ return obj != null;
+ }
+
public static <T> T defaultTo(T value, T defaultValue) {
if (value == null) {
return defaultValue;
diff --git a/src/test/java/com/github/underscore/UnderscoreTest.java b/src/test/java/com/github/underscore/UnderscoreTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/github/underscore/UnderscoreTest.java
+++ b/src/test/java/com/github/underscore/UnderscoreTest.java
@@ -448,6 +448,12 @@ _.elementAtOrNull(arr, 3) // => null
}
@Test
+ public void nonNull() {
+ assertFalse(U.nonNull(null));
+ assertTrue(U.nonNull(""));
+ }
+
+ @Test
public void defaultTo() {
assertNull(U.defaultTo(null, null));
}
|
Add support for U.nonNull(object).
|
diff --git a/packages/openneuro-server/graphql/schema.js b/packages/openneuro-server/graphql/schema.js
index <HASH>..<HASH> 100644
--- a/packages/openneuro-server/graphql/schema.js
+++ b/packages/openneuro-server/graphql/schema.js
@@ -149,7 +149,7 @@ const typeDefs = `
# File tree
input FileTree {
name: ID! # directory name (or empty string for root)
- path: String! # path to file
+ path: String # path to file
files: [Upload!] # files within the directory
directories: [FileTree] # directories within the directory
}
|
update schema to account for client FileTree type
|
diff --git a/cpu/facts/facts.go b/cpu/facts/facts.go
index <HASH>..<HASH> 100644
--- a/cpu/facts/facts.go
+++ b/cpu/facts/facts.go
@@ -257,7 +257,8 @@ func (prof *Profiler) Get() (facts *Facts, err error) {
}
continue
}
- if v == 'b' { // bogomips
+ // also check 2nd name pos for o as some output also have a bugs line.
+ if v == 'b' && prof.Val[1] == 'o' { // bogomips
f, err := strconv.ParseFloat(string(prof.Val[nameLen:]), 32)
if err != nil {
return nil, &joe.ParseError{Info: string(prof.Val[:nameLen]), Err: err}
|
add check of 2nd char in name for bogomips as some systems also have a "bugs" line
|
diff --git a/ipyrad/assemble/util.py b/ipyrad/assemble/util.py
index <HASH>..<HASH> 100644
--- a/ipyrad/assemble/util.py
+++ b/ipyrad/assemble/util.py
@@ -244,6 +244,10 @@ def merge_pairs(data, files_to_merge, merged_file, merge):
raise IPyradWarningExit(" Attempting to merge file that "\
"doesn't exist - {}.".format(f))
+ ## If it already exists, clean up the old merged file
+ if os.path.exists(merged_file):
+ os.remove(merged_file)
+
## if merge then catch nonmerged in a separate file
if merge:
nonmerged1 = tempfile.NamedTemporaryFile(mode='wb',
|
Fixed a nasty bug in merge_pairs that resulted in constant reconcatenation to the merged outfile
|
diff --git a/gears/asset_attributes.py b/gears/asset_attributes.py
index <HASH>..<HASH> 100644
--- a/gears/asset_attributes.py
+++ b/gears/asset_attributes.py
@@ -19,8 +19,8 @@ class AssetAttributes(object):
@cached_property
def path_without_extensions(self):
- if self.extensions:
- return self.path[:-len(''.join(self.extensions))]
+ if self.suffix:
+ return self.path[:-len(''.join(self.suffix))]
return self.path
@cached_property
diff --git a/tests/test_asset_attributes.py b/tests/test_asset_attributes.py
index <HASH>..<HASH> 100644
--- a/tests/test_asset_attributes.py
+++ b/tests/test_asset_attributes.py
@@ -36,6 +36,9 @@ class AssetAttributesTests(TestCase):
check('js/readme', 'js/readme')
check('js/app.min.js.coffee', 'js/app')
+ self.environment.mimetypes.register('.js', 'application/javascript')
+ check('js/app.min.js.coffee', 'js/app.min')
+
def test_search_paths(self):
def check(path, expected_result):
|
path_without_extensions removes only suffix now
Earlier it removed all extensions from asset path, so result for
'jquery' and 'jquery.min' was the same.
|
diff --git a/system/Commands/Utilities/Routes.php b/system/Commands/Utilities/Routes.php
index <HASH>..<HASH> 100644
--- a/system/Commands/Utilities/Routes.php
+++ b/system/Commands/Utilities/Routes.php
@@ -97,7 +97,7 @@ class Routes extends BaseCommand
$tbody[] = [
strtoupper($method),
$route,
- is_string($handler) ? $handler : 'Closure',
+ is_string($handler) ? $handler : '(Closure)',
];
}
}
diff --git a/tests/system/Commands/CommandTest.php b/tests/system/Commands/CommandTest.php
index <HASH>..<HASH> 100644
--- a/tests/system/Commands/CommandTest.php
+++ b/tests/system/Commands/CommandTest.php
@@ -110,7 +110,7 @@ final class CommandTest extends CIUnitTestCase
{
command('routes');
- $this->assertStringContainsString('| Closure', $this->getBuffer());
+ $this->assertStringContainsString('| (Closure)', $this->getBuffer());
$this->assertStringContainsString('| Route', $this->getBuffer());
$this->assertStringContainsString('| testing', $this->getBuffer());
$this->assertStringContainsString('\\TestController::index', $this->getBuffer());
|
Feature: "spark routes" closure. Decoration.
|
diff --git a/chef/lib/chef/shell_out.rb b/chef/lib/chef/shell_out.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/chef/shell_out.rb
+++ b/chef/lib/chef/shell_out.rb
@@ -288,6 +288,10 @@ class Chef
File.umask(umask) if umask
end
+ def set_cwd
+ Dir.chdir(cwd) if cwd
+ end
+
def initialize_ipc
@stdout_pipe, @stderr_pipe, @process_status_pipe = IO.pipe, IO.pipe, IO.pipe
@process_status_pipe.last.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
@@ -381,6 +385,7 @@ class Chef
set_group
set_environment
set_umask
+ set_cwd
begin
command.kind_of?(Array) ? exec(*command) : exec(command)
|
CHEF-<I>: chdir to :cwd arg before running a command
|
diff --git a/pyinotify.py b/pyinotify.py
index <HASH>..<HASH> 100755
--- a/pyinotify.py
+++ b/pyinotify.py
@@ -711,6 +711,12 @@ class _SysProcessEvent(_ProcessEvent):
# to provide as additional information to the IN_MOVED_TO event
# the original pathname of the moved file/directory.
to_append['src_pathname'] = mv_[0]
+ elif raw_event.mask & IN_ISDIR and watch_.auto_add:
+ # We got a diretory that's "moved in" from an unknown source and
+ # auto_add is enabled. Manually add watches to the inner subtrees.
+ self._watch_manager.add_watch(dst_path, watch_.mask,
+ proc_fun=watch_.proc_fun,
+ rec=True, auto_add=True)
return self.process_default(raw_event, to_append)
def process_IN_MOVE_SELF(self, raw_event):
@@ -731,7 +737,7 @@ class _SysProcessEvent(_ProcessEvent):
if mv_:
dest_path = mv_[0]
watch_.path = dest_path
- # The next loop renames all watches.
+ # The next loop renames all watches with src_path as base path.
# It seems that IN_MOVE_SELF does not provide IN_ISDIR information
# therefore the next loop is iterated even if raw_event is a file.
for w in self._watch_manager.watches.itervalues():
|
Automatically watch a non watched tree moved to a watched directory
with flag auto_add activated (contributed by John Feuerstein
<EMAIL>).
|
diff --git a/emoji/urls.py b/emoji/urls.py
index <HASH>..<HASH> 100644
--- a/emoji/urls.py
+++ b/emoji/urls.py
@@ -1,7 +1,7 @@
-from django.conf.urls import patterns, url
+from django.conf.urls import url
from .views import EmojiJSONListView
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^all.json$', EmojiJSONListView.as_view(), name='list.json'),
-)
+]
|
Start getting ready for <I>
|
diff --git a/helpers/array.php b/helpers/array.php
index <HASH>..<HASH> 100644
--- a/helpers/array.php
+++ b/helpers/array.php
@@ -89,7 +89,7 @@ if (!function_exists('in_array_multi')) {
if (!function_exists('arrayExtractProperty')) {
function arrayExtractProperty(array $aInput, $sProperty)
{
- return ArrayHelper::arrayExtractProperty($aInput, $sProperty);
+ return ArrayHelper::extract($aInput, $sProperty);
}
}
diff --git a/src/Common/Helper/ArrayHelper.php b/src/Common/Helper/ArrayHelper.php
index <HASH>..<HASH> 100644
--- a/src/Common/Helper/ArrayHelper.php
+++ b/src/Common/Helper/ArrayHelper.php
@@ -277,7 +277,7 @@ class ArrayHelper
*
* @return array
*/
- public static function arrayExtractProperty(array $aInput, $sProperty)
+ public static function extract(array $aInput, $sProperty)
{
$aOutput = [];
foreach ($aInput as $mItem) {
|
Renamed arrayExtractProperty method
|
diff --git a/modules/utils/DateTime.js b/modules/utils/DateTime.js
index <HASH>..<HASH> 100644
--- a/modules/utils/DateTime.js
+++ b/modules/utils/DateTime.js
@@ -568,14 +568,13 @@ DateTime.createFromTime = function(time, guessDate) {
}
}
- // PM: after 12 PM
- else if ( ns >= 43200 ) {
- // Assume early morning times (before 4 AM) are next day
- if ( ts <= 14400 ) {
+ // PM: after 4 PM
+ else if ( ns >= 57600 ) {
+ // Assume early morning times (before 8 AM) are next day
+ if ( ts <= 28800 ) {
delta = +1;
}
}
-
}
// Create the DateTime
|
DateTime: createFromTime guessDate
increase time ranges to guess next day
|
diff --git a/session/manager.go b/session/manager.go
index <HASH>..<HASH> 100644
--- a/session/manager.go
+++ b/session/manager.go
@@ -102,6 +102,11 @@ func (bw *bufferedResponseWriter) Flush() {
}
}
+func (bw *bufferedResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ hj := bw.ResponseWriter.(http.Hijacker)
+ return hj.Hijack()
+}
+
func defaultErrorFunc(w http.ResponseWriter, r *http.Request, err error) {
log.Output(2, err.Error())
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
implement Hijack interface for bufferedResponseWriter
|
diff --git a/mergexml.js b/mergexml.js
index <HASH>..<HASH> 100644
--- a/mergexml.js
+++ b/mergexml.js
@@ -4,7 +4,7 @@
*
* @package MergeXML
* @author Vallo Reima
- * @copyright (C)2014
+ * @copyright (C)2014-2016
*/
/**
|
Update mergexml.js
|
diff --git a/lib/github/kv.rb b/lib/github/kv.rb
index <HASH>..<HASH> 100644
--- a/lib/github/kv.rb
+++ b/lib/github/kv.rb
@@ -136,7 +136,7 @@ module GitHub
validate_expires(expires) if expires
rows = kvs.map { |key, value|
- [key, value, GitHub::SQL::NOW, GitHub::SQL::NOW, expires || GitHub::SQL::NULL]
+ [key, GitHub::SQL::BINARY(value), GitHub::SQL::NOW, GitHub::SQL::NOW, expires || GitHub::SQL::NULL]
}
encapsulate_error do
@@ -225,7 +225,7 @@ module GitHub
DELETE FROM key_values WHERE `key` = :key AND expires_at <= NOW()
SQL
- sql = GitHub::SQL.run(<<-SQL, :key => key, :value => value, :expires => expires || GitHub::SQL::NULL, :connection => connection)
+ sql = GitHub::SQL.run(<<-SQL, :key => key, :value => GitHub::SQL::BINARY(value), :expires => expires || GitHub::SQL::NULL, :connection => connection)
INSERT IGNORE INTO key_values (`key`, value, created_at, updated_at, expires_at)
VALUES (:key, :value, NOW(), NOW(), :expires)
SQL
|
Use binary escaping for value
Since the `value` is a binary blob in the MySQL database, we also need to ensure we escape it as binary. This prevent potential query warnings (or errors if MySQL is configured that way) for when 4 byte UTF-8 characters get sent to a binary field but with string escaping.
|
diff --git a/wallace/sources.py b/wallace/sources.py
index <HASH>..<HASH> 100644
--- a/wallace/sources.py
+++ b/wallace/sources.py
@@ -9,14 +9,14 @@ class Source(Node):
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
- def create_information(self, what=None, to_whom=None):
+ def create_information(self):
"""Generate new information."""
raise NotImplementedError(
"You need to overwrite the default create_information.")
def transmit(self, what=None, to_whom=None):
- self.create_information(what=what, to_whom=to_whom)
- super(Source, self).transmit(to_whom=to_whom, what=what)
+ info = self.create_information()
+ super(Source, self).transmit(to_whom=to_whom, what=info)
class RandomBinaryStringSource(Source):
@@ -26,11 +26,12 @@ class RandomBinaryStringSource(Source):
__mapper_args__ = {"polymorphic_identity": "random_binary_string_source"}
- def create_information(self, what=None, to_whom=None):
- Info(
+ def create_information(self):
+ info = Info(
origin=self,
origin_uuid=self.uuid,
contents=self._binary_string())
+ return info
def _binary_string(self):
return "".join([str(random.randint(0, 1)) for i in range(2)])
|
Fix bug that arose through grammar tweaking
|
diff --git a/uportal-war/src/main/java/org/jasig/portal/security/provider/PersonImpl.java b/uportal-war/src/main/java/org/jasig/portal/security/provider/PersonImpl.java
index <HASH>..<HASH> 100644
--- a/uportal-war/src/main/java/org/jasig/portal/security/provider/PersonImpl.java
+++ b/uportal-war/src/main/java/org/jasig/portal/security/provider/PersonImpl.java
@@ -249,7 +249,7 @@ public class PersonImpl implements IPerson {
public boolean isGuest() {
boolean isGuest = false; // default
String userName = (String) getAttribute(IPerson.USERNAME);
- if (PersonFactory.GUEST_USERNAME.equals(userName) &&
+ if (PersonFactory.GUEST_USERNAME.equalsIgnoreCase(userName) &&
(m_securityContext == null || !m_securityContext.isAuthenticated())) {
isGuest = true;
}
|
UP-<I>: Perform the guest user name comparison in a case-insensitive way; some LDAP setups (MS AD) have a 'Guest' user account that is making the first letter uppercase and therefore generating a bad result from this method
|
diff --git a/syntax/filetests_test.go b/syntax/filetests_test.go
index <HASH>..<HASH> 100644
--- a/syntax/filetests_test.go
+++ b/syntax/filetests_test.go
@@ -1637,6 +1637,13 @@ var fileTests = []testCase{
},
},
{
+ Strs: []string{`${foo[@]}`},
+ bash: &ParamExp{
+ Param: lit("foo"),
+ Ind: &Index{Expr: litWord("@")},
+ },
+ },
+ {
Strs: []string{`${foo[*]-etc}`},
bash: &ParamExp{
Param: lit("foo"),
diff --git a/syntax/parser.go b/syntax/parser.go
index <HASH>..<HASH> 100644
--- a/syntax/parser.go
+++ b/syntax/parser.go
@@ -949,8 +949,12 @@ func (p *parser) paramExp() *ParamExp {
}
lpos := p.pos
p.quote = paramExpInd
- if p.next(); p.tok == star {
+ p.next()
+ switch p.tok {
+ case star:
p.tok, p.val = _LitWord, "*"
+ case at:
+ p.tok, p.val = _LitWord, "@"
}
pe.Ind = &Index{
Expr: p.arithmExpr(leftBrack, lpos, 0, false, false),
|
syntax: parse ${foo[@]} properly again
This is a regression from <I>d<I>, which introduced support for
${foo@bar}. '@' is now a token in this context, so make sure it's
handled separately like '*'.
|
diff --git a/src/main/java/org/dasein/cloud/google/capabilities/GCEFirewallCapabilities.java b/src/main/java/org/dasein/cloud/google/capabilities/GCEFirewallCapabilities.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dasein/cloud/google/capabilities/GCEFirewallCapabilities.java
+++ b/src/main/java/org/dasein/cloud/google/capabilities/GCEFirewallCapabilities.java
@@ -114,14 +114,4 @@ public class GCEFirewallCapabilities extends AbstractCapabilities<Google> implem
public boolean supportsFirewallDeletion() throws CloudException, InternalException {
return false;
}
-
- private static volatile Iterable<Protocol> allProtocolTypes;
-
- @Override
- public Iterable<Protocol> listSupportedProtocols( boolean inVlan ) throws InternalException, CloudException {
- if( allProtocolTypes == null ) {
- allProtocolTypes = Collections.unmodifiableList(Arrays.asList(Protocol.UDP, Protocol.TCP, Protocol.ICMP));
- }
- return allProtocolTypes;
- }
}
|
Not ready for this one until core updates...
Revert "Cope with new core method listSupportedProtocols"
This reverts commit <I>c<I>e<I>b<I>a<I>efbf<I>fa1cf<I>.
|
diff --git a/unleash/unleash.py b/unleash/unleash.py
index <HASH>..<HASH> 100644
--- a/unleash/unleash.py
+++ b/unleash/unleash.py
@@ -69,14 +69,6 @@ class Unleash(object):
def __init__(self, plugins=[]):
self.plugins = plugins
- def set_global_opts(self, root, debug=False, opts=None):
- self.opts = opts or {}
- self.root = root
- self.debug = debug
-
- self.repo = Repo(root)
- self.gitconfig = self.repo.get_config_stack()
-
def create_release(self, ref):
try:
opts = self.opts
@@ -154,6 +146,14 @@ class Unleash(object):
def run_user_shell(self, **kwargs):
return subprocess.call(os.environ['SHELL'], env=os.environ, **kwargs)
+ def set_global_opts(self, root, debug=False, opts=None):
+ self.opts = opts or {}
+ self.root = root
+ self.debug = debug
+
+ self.repo = Repo(root)
+ self.gitconfig = self.repo.get_config_stack()
+
def ____():
for obj in objects_to_add:
|
Moved set_global_opts function.
|
diff --git a/java/src/main/java/com/mapd/parser/server/CalciteDirect.java b/java/src/main/java/com/mapd/parser/server/CalciteDirect.java
index <HASH>..<HASH> 100644
--- a/java/src/main/java/com/mapd/parser/server/CalciteDirect.java
+++ b/java/src/main/java/com/mapd/parser/server/CalciteDirect.java
@@ -127,6 +127,10 @@ final static Logger MAPDLOGGER = LoggerFactory.getLogger(CalciteDirect.class);
}
String relAlgebra;
try {
+ if (Thread.currentThread().getContextClassLoader() == null) {
+ ClassLoader cl = ClassLoader.getSystemClassLoader();
+ Thread.currentThread().setContextClassLoader(cl);
+ }
relAlgebra = parser.getRelAlgebra(sqlText, legacySyntax, mapDUser);
MAPDLOGGER.debug("After get relalgebra");
} catch (SqlParseException ex) {
|
Set context class loader in CalciteDirect
Calcite needs it when running through JNI when serializing RexSubQuery nodes
because of Janino.
|
diff --git a/lice/core.py b/lice/core.py
index <HASH>..<HASH> 100644
--- a/lice/core.py
+++ b/lice/core.py
@@ -66,6 +66,7 @@ LANGS = {
"php": "c",
"pl": "perl",
"py": "unix",
+ "ps": "powershell",
"rb": "ruby",
"scm": "lisp",
"sh": "unix",
@@ -84,6 +85,7 @@ LANG_CMT = {
"lua": [u'--[[', u'', u'--]]'],
"ml": [u'(*', u'', u'*)'],
"perl": [u'=item', u'', u'=cut'],
+ "powershell": [u'<#', u'#', u'#>'],
"ruby": [u'=begin', u'', u'=end'],
"text": [u'', u'', u''],
"unix": [u'', u'#', u''],
|
Add PowerShell language (#<I>)
PowerShell is my day to day language and being able to automatically add the license to the header of every single file would be a god-send for me as it takes a lot of work. Please share the implementation with this pull request for all other users. Let's not restrict this to just the core compiled languages that we were all once used to.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.