diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/CanvasArray.php b/CanvasArray.php
index <HASH>..<HASH> 100644
--- a/CanvasArray.php
+++ b/CanvasArray.php
@@ -123,13 +123,9 @@ class CanvasArray implements Iterator, ArrayAccess {
**/
private function requestPageNumber($pageNumber, $forceRefresh = false) {
if (!isset($this->data[$this->pageNumberToKey($pageNumber)]) || $forceRefresh) {
- $page = $this->api->get(
- $this->pagination[CanvasPageLink::CURRENT]->getEndpoint(),
- array(
- CanvasPageLink::PARAM_PAGE_NUMBER => $pageNumber,
- CanvasPageLink::PARAM_PER_PAGE => $this->pagination[CanvasPageLink::CURRENT]->getPerPage()
- )
- );
+ $params = $this->pagination[CanvasPageLink::CURRENT]->getParams();
+ $params[CanvasPageLink::PARAM_PAGE_NUMBER] = $pageNumber;
+ $page = $this->api->get($this->pagination[CanvasPageLink::CURRENT]->getEndpoint(), $params);
$this->data = array_replace($this->data, $page->data);
return true;
}
|
Keep parameters
Turns out we were losing the parameters to the query when we were paging through the array. Now we're not.
|
diff --git a/lib/emitter.js b/lib/emitter.js
index <HASH>..<HASH> 100644
--- a/lib/emitter.js
+++ b/lib/emitter.js
@@ -12,6 +12,12 @@ try {
}
/**
+ * Module exports.
+ */
+
+module.exports = Emitter;
+
+/**
* Node-compatible `EventEmitter#removeListener`
*
* @api public
|
emitter: re-export `Emitter`
|
diff --git a/course/enrol.php b/course/enrol.php
index <HASH>..<HASH> 100644
--- a/course/enrol.php
+++ b/course/enrol.php
@@ -18,7 +18,8 @@
if (isguest()) {
add_to_log($course->id, "course", "guest", "view.php?id=$course->id", "$REMOTE_ADDR, $REMOTE_HOST");
- } else {
+ } else if (!record_exists("user_students", "userid", $USER->id, "course", $course->id)) {
+
if (! enrol_student($USER->id, $course->id)) {
error("An error occurred while trying to enrol you.");
}
|
Don't go through enrolment procedure if they are already enrolled.
|
diff --git a/bosh-dev/lib/bosh/dev/sandbox/services/director_service.rb b/bosh-dev/lib/bosh/dev/sandbox/services/director_service.rb
index <HASH>..<HASH> 100644
--- a/bosh-dev/lib/bosh/dev/sandbox/services/director_service.rb
+++ b/bosh-dev/lib/bosh/dev/sandbox/services/director_service.rb
@@ -78,10 +78,12 @@ module Bosh::Dev::Sandbox
@worker_processes.each(&:start)
start_time = Time.now
timeout = 60 * 5
- sleep 0.5 until resque_is_ready? do
+ until resque_is_ready? do
if (Time.now - start_time) > timeout
raise "Resque failed to start workers in #{timeout} seconds"
end
+
+ sleep 0.5
end
end
@@ -89,10 +91,12 @@ module Bosh::Dev::Sandbox
@logger.debug('Waiting for Resque queue to drain...')
start_time = Time.now
timeout = 60
- sleep 0.1 until resque_is_done? do
+ until resque_is_done? do
if (Time.now - start_time) > timeout
- @logger.err("Resque queue failed to drain in #{timeout} seconds")
+ raise "Resque queue failed to drain in #{timeout} seconds"
end
+
+ sleep 0.1
end
@logger.debug('Resque queue drained')
|
Fail if stopping workers fails within timeout
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -43,9 +43,10 @@ requirements = [
]
if not os.getenv("READTHEDOCS"):
- requirements.pop("xarray>=0.14.1")
- requirements.append("git+https://github.com/pydata/xarray@master#egg=xarray")
requirements.append("rtree>=0.9")
+else:
+ requirements.remove("xarray>=0.14.1")
+ requirements.append("git+https://github.com/pydata/xarray@master#egg=xarray")
setup_requirements = ["pytest-runner"]
|
Use remove and ensure that RtD uses xarray@master
|
diff --git a/test/browsers.spec.js b/test/browsers.spec.js
index <HASH>..<HASH> 100644
--- a/test/browsers.spec.js
+++ b/test/browsers.spec.js
@@ -29,7 +29,7 @@ function testFixture(title, browsers, expectThrows){
}
testFixture('Valid: ["last 2 versions"]', ['last 2 versions'], false);
-testFixture('Invalid: ["dummy"]', ['dummy'], true);
+testFixture('Invalid: ["dummy"]', ['dummy'], false);
testFixture('Invalid: []', [], true);
testFixture('Invalid: {}', {}, true);
testFixture('Invalid: 123', 123, true);
|
Updated a test for property `browsers`
`cssnext` used to check contents of "browsers", `postcss-preset-env` doesn't
|
diff --git a/pavement.py b/pavement.py
index <HASH>..<HASH> 100644
--- a/pavement.py
+++ b/pavement.py
@@ -58,4 +58,4 @@ def docs():
@task
def test():
- sh("nosetests --processes=8")
+ sh("nosetests")
|
with the increasing port numbers, multiple processes wind up deadlocking
|
diff --git a/lib/parse-args.js b/lib/parse-args.js
index <HASH>..<HASH> 100644
--- a/lib/parse-args.js
+++ b/lib/parse-args.js
@@ -61,7 +61,6 @@ function buildYargs (withCommands = false) {
type: 'boolean'
})
.option('temp-directory', {
- default: './coverage/tmp',
describe: 'directory V8 coverage data is written to and read from'
})
.option('resolve', {
@@ -85,6 +84,12 @@ function buildYargs (withCommands = false) {
.pkgConf('c8')
.config(config)
.demandCommand(1)
+ .check((argv) => {
+ if (!argv.tempDirectory) {
+ argv.tempDirectory = argv.reportsDir
+ }
+ return true
+ })
.epilog('visit https://git.io/vHysA for list of available reporters')
const checkCoverage = require('./commands/check-coverage')
|
feat!: default temp directory to report directory (#<I>)
BREAKING CHANGE: temp directory now defaults to setting for report directory
|
diff --git a/lib/mshoplib/src/MShop/Locale/Manager/Default.php b/lib/mshoplib/src/MShop/Locale/Manager/Default.php
index <HASH>..<HASH> 100644
--- a/lib/mshoplib/src/MShop/Locale/Manager/Default.php
+++ b/lib/mshoplib/src/MShop/Locale/Manager/Default.php
@@ -361,6 +361,7 @@ class MShop_Locale_Manager_Default
}
+ // Try to find the best matching locale
$search = $this->createSearch( $active );
$expr = array (
diff --git a/lib/mshoplib/src/MShop/Locale/Manager/Site/Default.php b/lib/mshoplib/src/MShop/Locale/Manager/Site/Default.php
index <HASH>..<HASH> 100644
--- a/lib/mshoplib/src/MShop/Locale/Manager/Site/Default.php
+++ b/lib/mshoplib/src/MShop/Locale/Manager/Site/Default.php
@@ -406,6 +406,8 @@ class MShop_Locale_Manager_Site_Default
throw new MShop_Locale_Exception( sprintf( 'Tree root with code "%1$s" in "%2$s" not found', 'default', 'locale.site.code' ) );
}
+ $this->_cache[ $item->getId() ] = $item;
+
return $item;
}
|
Minor improvment in locale managers
|
diff --git a/scripts/bcbio_setup_genome.py b/scripts/bcbio_setup_genome.py
index <HASH>..<HASH> 100755
--- a/scripts/bcbio_setup_genome.py
+++ b/scripts/bcbio_setup_genome.py
@@ -155,6 +155,7 @@ if __name__ == "__main__":
raise ValueError("--mirbase and --srna_gtf both need a value.")
env.hosts = ["localhost"]
+ os.environ["PATH"] += os.pathsep + os.path.dirname(sys.executable)
cbl = get_cloudbiolinux(REMOTES)
sys.path.insert(0, cbl["dir"])
genomemod = __import__("cloudbio.biodata", fromlist=["genomes"])
|
Allow bcbio_setup_genome to use bioconda installed binaries.
|
diff --git a/blueprints/ember-cli-chosen/index.js b/blueprints/ember-cli-chosen/index.js
index <HASH>..<HASH> 100644
--- a/blueprints/ember-cli-chosen/index.js
+++ b/blueprints/ember-cli-chosen/index.js
@@ -3,8 +3,6 @@ module.exports = {
description: 'Include "Chosen" bower package',
afterInstall: function(options) {
- return this.addBowerPackagesToProject([
- { name: 'chosen=https://github.com/harvesthq/chosen/releases/download/v1.3.0/chosen_v1.3.0.zip' }
- ]);
+ return this.addBowerPackageToProject('chosen', 'https://github.com/harvesthq/chosen/releases/download/v1.3.0/chosen_v1.3.0.zip');
}
};
|
Fixing issue with bower package resolution in blueprint
|
diff --git a/tests/test_bugzilla.py b/tests/test_bugzilla.py
index <HASH>..<HASH> 100644
--- a/tests/test_bugzilla.py
+++ b/tests/test_bugzilla.py
@@ -20,6 +20,7 @@ class TestBugzillaService(ServiceTest):
arbitrary_record = {
'component': 'Something',
'priority': 'urgent',
+ 'status': 'NEW',
'summary': 'This is the issue summary',
'id': 1234567,
}
@@ -40,6 +41,7 @@ class TestBugzillaService(ServiceTest):
'priority': issue.PRIORITY_MAP[arbitrary_record['priority']],
'annotations': arbitrary_extra['annotations'],
+ issue.STATUS: arbitrary_record['status'],
issue.URL: arbitrary_extra['url'],
issue.SUMMARY: arbitrary_record['summary'],
issue.BUG_ID: arbitrary_record['id']
|
Ooops, add status field to tests
|
diff --git a/src/Composer/Repository/Vcs/GitHubDriver.php b/src/Composer/Repository/Vcs/GitHubDriver.php
index <HASH>..<HASH> 100755
--- a/src/Composer/Repository/Vcs/GitHubDriver.php
+++ b/src/Composer/Repository/Vcs/GitHubDriver.php
@@ -76,7 +76,7 @@ class GitHubDriver extends VcsDriver
return $this->gitDriver->getUrl();
}
- return $this->url;
+ return 'https://github.com/'.$this->owner.'/'.$this->repository.'.git';
}
/**
|
Normalize github URLs generated by the GitHubDriver, fixes #<I>
|
diff --git a/src/Entity/AbstractEntity.php b/src/Entity/AbstractEntity.php
index <HASH>..<HASH> 100644
--- a/src/Entity/AbstractEntity.php
+++ b/src/Entity/AbstractEntity.php
@@ -441,10 +441,7 @@ abstract class AbstractEntity implements IEntity
}
}
- if (!$metadata->isValid($value)) {
- $class = get_class($this);
- throw new InvalidArgumentException("Value for {$class}::\${$name} property is invalid.");
- }
+ $this->validate($metadata, $name, $value);
$this->data[$name] = $value;
$this->modified[$name] = TRUE;
}
@@ -501,6 +498,22 @@ abstract class AbstractEntity implements IEntity
/**
+ * Validates the value.
+ * @param PropertyMetadata $metadata
+ * @param string $name
+ * @param mixed $value
+ * @throws InvalidArgumentException
+ */
+ protected function validate(PropertyMetadata $metadata, $name, & $value)
+ {
+ if (!$metadata->isValid($value)) {
+ $class = get_class($this);
+ throw new InvalidArgumentException("Value for {$class}::\${$name} property is invalid.");
+ }
+ }
+
+
+ /**
* @param PropertyMetadata $metadata
* @return IProperty $property
*/
|
entity: extracted value validation into own method
|
diff --git a/lib/upnp/control_point/device.rb b/lib/upnp/control_point/device.rb
index <HASH>..<HASH> 100644
--- a/lib/upnp/control_point/device.rb
+++ b/lib/upnp/control_point/device.rb
@@ -255,6 +255,9 @@ module UPnP
def extract_spec_version
"#{@description[:root][:specVersion][:major]}.#{@description[:root][:specVersion][:minor]}"
+ if @description[:root]
+ "#{@description[:root][:specVersion][:major]}.#{@description[:root][:specVersion][:minor]}"
+ end
end
def start_service_extraction
|
Fix for descriptions that don't have specVersion. Relates to gh-5.
|
diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_watch_server.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_watch_server.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_watch_server.py
+++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_watch_server.py
@@ -2,6 +2,7 @@
import time
+import pytest
from dagster.grpc.client import DagsterGrpcClient
from dagster.grpc.server import open_server_process
from dagster.grpc.server_watcher import create_grpc_watch_thread
@@ -154,6 +155,7 @@ def test_grpc_watch_thread_server_error():
assert called["on_error"]
+@pytest.mark.skip
def test_grpc_watch_thread_server_complex_cycle():
# Server goes down, comes back up as the same server three times, then goes away and comes
# back as a new server
@@ -216,6 +218,7 @@ def test_grpc_watch_thread_server_complex_cycle():
assert events[-1] == "on_updated"
+@pytest.mark.skip
def test_grpc_watch_thread_server_complex_cycle_2():
# Server goes down, comes back up as the same server three times, then goes away and comes
# back as a new server
|
Mark complex cycle grpc server watch tests as skipped
Summary: Title
Test Plan: none
Reviewers: prha
Reviewed By: prha
Differential Revision: <URL>
|
diff --git a/src/main/java/com/voxeo/tropo/ActionResult.java b/src/main/java/com/voxeo/tropo/ActionResult.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/voxeo/tropo/ActionResult.java
+++ b/src/main/java/com/voxeo/tropo/ActionResult.java
@@ -24,6 +24,11 @@ public class ActionResult implements Serializable {
private String xml;
private Integer duration;
private String url;
+
+ /*Depend on upload status after recording completion. "uploadStatus": ["success"|"failed"|"unavailable�], will provide
+ information about the recording status
+ */
+ private String uploadStatus;
public String getName() {
return name;
@@ -91,6 +96,12 @@ public class ActionResult implements Serializable {
public void setUrl(String url) {
this.url = url;
}
+ public String getuploadStatus() {
+ return uploadStatus;
+ }
+ public void setuploadStatus(String uploadStatus) {
+ this.uploadStatus = uploadStatus;
+ }
@Override
public String toString(){
|
[CASE# <I>][Problem Description: During recording Upload failure over
HTTP, Application didn’t get the correct result][Solution: If the upload
fails tropo will give a new field "uploadStatus":
["success"|"failed"|"unavailable”]
|
diff --git a/demo.js b/demo.js
index <HASH>..<HASH> 100644
--- a/demo.js
+++ b/demo.js
@@ -25,12 +25,10 @@ const memory = Memory({
start: VIDEO_ADDRESS_OFFSET, // -3280 00iii iiiii
end: VIDEO_ADDRESS_SIZE + VIDEO_ADDRESS_OFFSET, // 29524, end 11111 11111
},
- /* TODO
- input: {
- start: -3,
- end: -1,
+ chargen: {
+ start: -3281, // 0i111 11111,
+ end: -3281,
},
- */
}
});
@@ -48,6 +46,10 @@ memory.map.video.write = (address, value) => {
term.tc.refresh();
};
+memory.map.chargen.write = (address, value) => {
+ term.writeUChar(value);
+ // TODO: write to row,col from another memory address value (no trap needed). -3282, -3283? - for cursor
+};
const cpu = CPU({
memory: memory
@@ -76,6 +78,8 @@ var lines = [
'TAX',
+ 'STA -3281',
+
'HALT_Z'
];
|
Chargen - write trit-text character to terminal
|
diff --git a/ghettoq/backends/pyredis.py b/ghettoq/backends/pyredis.py
index <HASH>..<HASH> 100644
--- a/ghettoq/backends/pyredis.py
+++ b/ghettoq/backends/pyredis.py
@@ -1,6 +1,6 @@
from Queue import Empty
-from redis import Redis as Redis
+from redis import Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
@@ -48,4 +48,7 @@ class RedisBackend(BaseBackend):
return item, dest
def purge(self, queue):
- return self.client.delete(queue)
+ size = self.client.llen(queue)
+ self.client.delete(queue)
+ return size
+
|
Redis: purge now returns number of messages deleted.
|
diff --git a/digitalocean/v2/digitalocean/client.go b/digitalocean/v2/digitalocean/client.go
index <HASH>..<HASH> 100644
--- a/digitalocean/v2/digitalocean/client.go
+++ b/digitalocean/v2/digitalocean/client.go
@@ -35,17 +35,19 @@ func (c *Client) loadResponse(path string, i interface{}) error {
return json.Unmarshal(b, &i)
}
+func New(token string) (*Client, error) {
+ if token == "" {
+ return nil, fmt.Errorf("token must be set")
+ }
+ return &Client{Client: &http.Client{Transport: &transport{apiToken: token}}}, nil
+}
+
func NewFromEnv() (*Client, error) {
- cl := &transport{apiToken: os.Getenv("DIGITAL_OCEAN_API_KEY")}
- if cl.apiToken == "" {
+ token := os.Getenv("DIGITAL_OCEAN_API_KEY")
+ if token == "" {
return nil, fmt.Errorf("DIGITAL_OCEAN_API_KEY must be set in env")
}
- return &Client{
- Client: &http.Client{
- Transport: cl,
- },
- },
- nil
+ return New(token)
}
type transport struct {
|
add constructor for initializing do client with a specific token
|
diff --git a/plucky/__init__.py b/plucky/__init__.py
index <HASH>..<HASH> 100644
--- a/plucky/__init__.py
+++ b/plucky/__init__.py
@@ -3,7 +3,7 @@ Plucking (deep) keys/paths safely from python collections has never been easier.
"""
__title__ = 'plucky'
-__version__ = '0.3.2'
+__version__ = '0.3.3'
__author__ = 'Radomir Stevanovic'
__author_email__ = 'radomir.stevanovic@gmail.com'
__copyright__ = 'Copyright 2014 Radomir Stevanovic'
|
bumped to <I>
|
diff --git a/tests/Phinx/Config/ConfigFileTest.php b/tests/Phinx/Config/ConfigFileTest.php
index <HASH>..<HASH> 100644
--- a/tests/Phinx/Config/ConfigFileTest.php
+++ b/tests/Phinx/Config/ConfigFileTest.php
@@ -37,7 +37,9 @@ class ConfigFileTest extends \PHPUnit_Framework_TestCase
public function testWorkingGetConfigFile($input, $dir, $expectedFile)
{
$foundPath = $this->runLocateFile($input, $dir);
- $this->assertEquals($foundPath, $this->baseDir . '/' . $dir . '/' . $expectedFile);
+ $expectedPath = $this->baseDir . DIRECTORY_SEPARATOR . $dir . DIRECTORY_SEPARATOR . $expectedFile;
+
+ $this->assertEquals($foundPath, $expectedPath);
}
/**
@@ -130,4 +132,4 @@ class VoidCommand extends AbstractCommand
return parent::locateConfigFile($input);
}
-}
\ No newline at end of file
+}
|
Use DIRECTORY_SEPARATOR in expected result
Who still uses windows ?
|
diff --git a/cgroups/fs/apply_raw.go b/cgroups/fs/apply_raw.go
index <HASH>..<HASH> 100644
--- a/cgroups/fs/apply_raw.go
+++ b/cgroups/fs/apply_raw.go
@@ -57,12 +57,13 @@ func GetStats(c *cgroups.Cgroup) (*cgroups.Stats, error) {
d, err := getCgroupData(c, 0)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("getting CgroupData %s", err)
}
- for _, sys := range subsystems {
- if err := sys.GetStats(d, stats); err != nil {
- return nil, err
+ for sysName, sys := range subsystems {
+ // Don't fail if a cgroup hierarchy was not found.
+ if err := sys.GetStats(d, stats); err != nil && err != cgroups.ErrNotFound {
+ return nil, fmt.Errorf("getting stats for system %q %s", sysName, err)
}
}
|
Don't fail getting stats of unknown hierarchies.
Docker-DCO-<I>-
|
diff --git a/core/client/Brocfile.js b/core/client/Brocfile.js
index <HASH>..<HASH> 100644
--- a/core/client/Brocfile.js
+++ b/core/client/Brocfile.js
@@ -29,7 +29,8 @@ app = new EmberApp({
source: './app/styles/app.css',
inputFile: 'app.css',
browsers: 'last 2 versions',
- sourcemap: !mythCompress,
+ // @TODO: enable sourcemaps for development without including them in the release
+ sourcemap: false,
compress: mythCompress,
outputFile: isProduction ? 'ghost.min.css' : 'ghost.css'
},
|
Temporarily disable sourcemaps
- Sourcemaps are adding ~<I>mb to the release zip, which is not cool
- Long term, we need to swap this out for a system that will let us do sourcemaps in dev, and
generate a separate non-minified css file without the sourcemap when doing a release
- Short term, I'm disabling sourcemaps & they'll need to be enabled when needed
|
diff --git a/lib/metasploit/model/version.rb b/lib/metasploit/model/version.rb
index <HASH>..<HASH> 100644
--- a/lib/metasploit/model/version.rb
+++ b/lib/metasploit/model/version.rb
@@ -8,6 +8,8 @@ module Metasploit
MINOR = 24
# The patch number, scoped to the {MINOR} version number.
PATCH = 1
+ # The prerelease version number, scoped to the {PATCH} version number.
+ PRERELEASE = 'metasploit-model-search-operator-and-operation-groups'
# The full version string, including the {MAJOR}, {MINOR}, {PATCH}, and optionally, the {PRERELEASE} in the
# {http://semver.org/spec/v2.0.0.html semantic versioning v2.0.0} format.
|
Change PRERELEASE for branch
MSP-<I>
|
diff --git a/test/mailinSpec.js b/test/mailinSpec.js
index <HASH>..<HASH> 100644
--- a/test/mailinSpec.js
+++ b/test/mailinSpec.js
@@ -104,6 +104,17 @@ describe('Mailin', function () {
// ]
}],
dkim: 'failed',
+ envelopeFrom: [{
+ address: "me@jokund.com",
+ name: ""
+ }],
+ envelopeTo: [{
+ address: "first@jokund.com",
+ name: ""
+ }, {
+ address: "second@jokund.com",
+ name: ""
+ }],
spf: 'failed',
spamScore: 3.3,
language: 'pidgin',
@@ -176,6 +187,17 @@ describe('Mailin', function () {
length: '28'
}],
dkim: 'failed',
+ envelopeFrom: [{
+ address: 'me@jokund.com',
+ name: ''
+ }],
+ envelopeTo: [{
+ address: 'first@jokund.com',
+ name: ''
+ }, {
+ address: 'second@jokund.com',
+ name: ''
+ }],
spf: 'failed',
spamScore: 3.3,
language: 'pidgin',
|
Added envelope to expected data in tests
|
diff --git a/src/mixins/trackHelper.js b/src/mixins/trackHelper.js
index <HASH>..<HASH> 100644
--- a/src/mixins/trackHelper.js
+++ b/src/mixins/trackHelper.js
@@ -15,6 +15,8 @@ export var getTrackCSS = function(spec) {
var trackWidth, trackHeight;
+ const trackChildren = (spec.slideCount + 2 * spec.slidesToShow);
+
if (!spec.vertical) {
if (spec.variableWidth) {
trackWidth = (spec.slideCount + 2*spec.slidesToShow) * spec.slideWidth;
|
Fixed trackChildren is not defined error.
|
diff --git a/stanza/pipeline/tokenize_processor.py b/stanza/pipeline/tokenize_processor.py
index <HASH>..<HASH> 100644
--- a/stanza/pipeline/tokenize_processor.py
+++ b/stanza/pipeline/tokenize_processor.py
@@ -5,7 +5,7 @@ Processor for performing tokenization
import io
import logging
-from stanza.models.tokenization.data import DataLoader
+from stanza.models.tokenization.data import DataLoader, NEWLINE_WHITESPACE_RE
from stanza.models.tokenization.trainer import Trainer
from stanza.models.tokenization.utils import output_predictions
from stanza.pipeline._constants import *
@@ -81,7 +81,7 @@ class TokenizeProcessor(UDProcessor):
# set up batches
if self.config.get('lang') == 'vi':
# special processing is due for Vietnamese
- text = '\n\n'.join([x for x in raw_text.split('\n\n')]).rstrip()
+ text = '\n\n'.join([x.rstrip() for x in NEWLINE_WHITESPACE_RE.split(raw_text)]).rstrip()
dummy_labels = '\n\n'.join(['0' * len(x) for x in text.split('\n\n')])
data = paras_to_chunks(text, dummy_labels)
batches = DataLoader(self.config, input_data=data, vocab=self.vocab, evaluation=True)
|
Fix inconsistency issue between vi and the rest of the languages on how consecutive newlines are handled (#<I>)
|
diff --git a/cslbot/commands/quote.py b/cslbot/commands/quote.py
index <HASH>..<HASH> 100644
--- a/cslbot/commands/quote.py
+++ b/cslbot/commands/quote.py
@@ -148,7 +148,10 @@ def cmd(send, msg, args):
else:
send("You aren't allowed to edit quotes. Please ask a bot admin to do it")
elif cmdargs.search:
- send(search_quote(session, cmdargs.offset, cmdargs.search))
+ if cmdargs.approve or cmdargs.nick:
+ send("Invalid option for --search")
+ else:
+ send(search_quote(session, cmdargs.offset, cmdargs.search))
else:
if msg.isdigit():
send(do_get_quote(session, int(msg)))
|
Don't silently eat --nick
|
diff --git a/service/gcs/bridge/bridge.go b/service/gcs/bridge/bridge.go
index <HASH>..<HASH> 100644
--- a/service/gcs/bridge/bridge.go
+++ b/service/gcs/bridge/bridge.go
@@ -41,6 +41,9 @@ var capabilities = prot.GcsCapabilities{
},
},
RuntimeOsType: prot.OsTypeLinux,
+ GuestDefinedCapabilities: prot.GcsGuestCapabilities{
+ SignalProcessSupported: true,
+ },
}
// UnknownMessage represents the default handler logic for an unmatched request
diff --git a/service/gcs/prot/protocol.go b/service/gcs/prot/protocol.go
index <HASH>..<HASH> 100644
--- a/service/gcs/prot/protocol.go
+++ b/service/gcs/prot/protocol.go
@@ -249,7 +249,14 @@ type GcsCapabilities struct {
// GuestDefinedCapabilities define any JSON object that will be directly
// passed to a client of the HCS. This can be useful to pass runtime
// specific capabilities not tied to the platform itself.
- GuestDefinedCapabilities interface{} `json:",omitempty"`
+ GuestDefinedCapabilities GcsGuestCapabilities `json:",omitempty"`
+}
+
+// GcsGuestCapabilities represents the customized guest capabilities supported
+// by this GCS.
+type GcsGuestCapabilities struct {
+ NamespaceAddRequestSupported bool `json:",omitempty"`
+ SignalProcessSupported bool `json:",omitempty"`
}
// MessageBase is the base type embedded in all messages sent from the HCS to
|
Return GuestDefinedCapabilities with SignalProcess support
|
diff --git a/SimpleAudioIndexer/__init__.py b/SimpleAudioIndexer/__init__.py
index <HASH>..<HASH> 100755
--- a/SimpleAudioIndexer/__init__.py
+++ b/SimpleAudioIndexer/__init__.py
@@ -629,7 +629,7 @@ class SimpleAudioIndexer(object):
seconds_passed = 0
for split_index, splitted_file_timestamp in enumerate(
self.__timestamps[timestamp_basename]):
- total_seconds = self.get_audio_duration_seconds(
+ total_seconds = self._get_audio_duration_seconds(
"{}/staging/{}".format(
self.src_dir,
staged_splitted_file_basenames[split_index]
|
Missed an additional underscore for list audio method
|
diff --git a/config/application.rb b/config/application.rb
index <HASH>..<HASH> 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -74,6 +74,8 @@ module Peoplefinder
# NOTE: may need to eager load paths instead if lib code is commonly called
config.autoload_paths << Rails.root.join('lib')
+ require Rails.root.join('lib', 'csv_publisher', 'user_behavior_report.rb')
+
config.active_record.sqlite3.represent_boolean_as_integer = true
end
|
CT-<I> Autoloading has changed - require module in lib directly (#<I>)
Generate report on production failed with
NameError (uninitialized constant Admin::ManagementController::CsvPublisher):
app/controllers/admin/management_controller.rb:7:in `generate_user_behavior_report'
|
diff --git a/AlphaTwirl/EventReader/ProgressMonitor.py b/AlphaTwirl/EventReader/ProgressMonitor.py
index <HASH>..<HASH> 100755
--- a/AlphaTwirl/EventReader/ProgressMonitor.py
+++ b/AlphaTwirl/EventReader/ProgressMonitor.py
@@ -55,12 +55,12 @@ class MPProgressMonitor(object):
def monitor(self):
if time.time() - self.lastTime < 0.1: return
self.lastTime = time.time()
- self.present()
+ self._present()
def last(self):
- self.present()
+ self._present()
- def present(self):
+ def _present(self):
while not self.queue.empty():
report = self.queue.get()
self._presentation.present(report)
|
rename present() _present(), indicating private
|
diff --git a/src/test/java/cleanzephyr/rubycollect4j/RubyHashTest.java b/src/test/java/cleanzephyr/rubycollect4j/RubyHashTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/cleanzephyr/rubycollect4j/RubyHashTest.java
+++ b/src/test/java/cleanzephyr/rubycollect4j/RubyHashTest.java
@@ -188,4 +188,14 @@ public class RubyHashTest {
assertEquals(ra(2, 4, 6), ints);
}
+ @Test
+ public void testEmptyʔ() {
+ rh = rh(1, 2, 3, 4, 5, 6);
+ assertFalse(rh.emptyʔ());
+ rh = rh();
+ assertTrue(rh.emptyʔ());
+ rh = rh(null, null);
+ assertFalse(rh.emptyʔ());
+ }
+
}
|
Add test for RubyHash::empty?
|
diff --git a/tests/appveyor_test_cases.py b/tests/appveyor_test_cases.py
index <HASH>..<HASH> 100644
--- a/tests/appveyor_test_cases.py
+++ b/tests/appveyor_test_cases.py
@@ -57,24 +57,6 @@ class TestAppMethods(unittest.TestCase):
app.close()
lackey.wait(0.9)
- def test_app_title(self):
- """
- App selected by title should capture existing window if open,
- including case-insensitive matches.
- """
- app = lackey.App("notepad.exe")
- app.open()
- lackey.wait(2)
- app2 = lackey.App("Notepad")
- app3 = lackey.App("notepad")
- lackey.wait(1)
-
- self.assertTrue(app2.isRunning())
- self.assertTrue(app3.isRunning())
- self.assertEqual(app2.getName(), app.getName())
- self.assertEqual(app3.getName(), app.getName())
- app.close()
-
class TestScreenMethods(unittest.TestCase):
def setUp(self):
self.primaryScreen = lackey.Screen(0)
|
Removed app title test in AppVeyor
Unsure why this doesn’t work in Appveyor, but it works locally so I’m
assuming it has something to do with the environment.
|
diff --git a/guava/src/com/google/common/collect/ComparisonChain.java b/guava/src/com/google/common/collect/ComparisonChain.java
index <HASH>..<HASH> 100644
--- a/guava/src/com/google/common/collect/ComparisonChain.java
+++ b/guava/src/com/google/common/collect/ComparisonChain.java
@@ -41,6 +41,9 @@ import javax.annotation.Nullable;
* nonzero</i> comparison result in the chain, or will be zero if every
* comparison result was zero.
*
+ * <p><b>Note:</b> {@code ComparisonChain} instances are <b>immutable</b>. For
+ * this utility to work correctly, calls must be chained as illustrated above.
+ *
* <p>Performance note: Even though the {@code ComparisonChain} caller always
* invokes its {@code compare} methods unconditionally, the {@code
* ComparisonChain} implementation stops calling its inputs' {@link
|
Warn about immutability.
-------------
Created by MOE: <URL>
|
diff --git a/src/utils/getPurchaseEstimate.js b/src/utils/getPurchaseEstimate.js
index <HASH>..<HASH> 100644
--- a/src/utils/getPurchaseEstimate.js
+++ b/src/utils/getPurchaseEstimate.js
@@ -48,13 +48,16 @@ export function getPurchaseEstimate({ client, amount, rate, remaining }) {
? new BigNumber(remaining)
.multipliedBy(new BigNumber(rate))
.dividedBy(new BigNumber(client.toWei('1')))
- .decimalPlaces(18)
+ .decimalPlaces(0)
.toString(10)
: null
const excessCoinAmount =
isValidAmount && excedes
- ? weiAmount.minus(usedCoinAmount).toString(10)
+ ? weiAmount
+ .minus(usedCoinAmount)
+ .decimalPlaces(0)
+ .toString(10)
: null
return { expectedMETamount, excedes, usedCoinAmount, excessCoinAmount }
|
Fix issue in purchase estimates when MET price is extremely low
|
diff --git a/src/js/flowchart.js b/src/js/flowchart.js
index <HASH>..<HASH> 100644
--- a/src/js/flowchart.js
+++ b/src/js/flowchart.js
@@ -2724,7 +2724,7 @@
'#{id} .flowchart-relation[data-shape="polyline"] .flowchart-relation-text,',
'#{id} .flowchart-relation[data-shape="bessel"] .flowchart-relation-text {cursor: move}',
- '#{id} .flowchart-element-focused .flowchart-relation-text {pointer-events: auto; border: 1px solid {activeColor}}',
+ '#{id} .flowchart-element-focused .flowchart-relation-text {opacity: 1; pointer-events: auto; border: 1px solid {activeColor}}',
'#{id} .flowchart-svg-canvas .flowchart-relation-line:hover {stroke: {activeColor}!important}',
|
* fix double click not work in relation element without text.
|
diff --git a/openxc-it/src/main/java/com/openxc/remote/sources/trace/TraceVehicleDataSourceTest.java b/openxc-it/src/main/java/com/openxc/remote/sources/trace/TraceVehicleDataSourceTest.java
index <HASH>..<HASH> 100644
--- a/openxc-it/src/main/java/com/openxc/remote/sources/trace/TraceVehicleDataSourceTest.java
+++ b/openxc-it/src/main/java/com/openxc/remote/sources/trace/TraceVehicleDataSourceTest.java
@@ -91,7 +91,7 @@ public class TraceVehicleDataSourceTest extends AndroidTestCase {
startTrace(source);
assertTrue(receivedNumericalCallback);
assertTrue(receivedBooleanCallback);
- assertEquals(receivedBoolean, false);
+ assertEquals(receivedBoolean, true);
}
@SmallTest
|
Update trace test with new included boolean.
|
diff --git a/querydsl-core/src/main/java/com/mysema/query/group/GroupBy.java b/querydsl-core/src/main/java/com/mysema/query/group/GroupBy.java
index <HASH>..<HASH> 100644
--- a/querydsl-core/src/main/java/com/mysema/query/group/GroupBy.java
+++ b/querydsl-core/src/main/java/com/mysema/query/group/GroupBy.java
@@ -248,17 +248,17 @@ public class GroupBy<S> implements ResultTransformer<Map<S, Group>> {
@Override
public <T> T getOne(Expression<T> expr) {
- return get(expr);
+ return this.<T, T>get(expr);
}
@Override
public <T> Set<T> getSet(Expression<T> expr) {
- return get(expr);
+ return this.<T, Set<T>>get(expr);
}
@Override
public <T> List<T> getList(Expression<T> expr) {
- return get(expr);
+ return this.<T, List<T>>get(expr);
}
private <T, R> R get(Expression<T> expr) {
|
Fixed generics that break under some older Java version
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -39,8 +39,8 @@ function _update(current, name, fn) {
var lookupKey = key;
key = lookupIndex(current, key);
if (key === undefined) {
- throw new Error(`no object found by ${lookupKey}. autocreate is not supported`);
- }
+ throw new Error('no object found by ' + lookupKey + '. autocreate is not supported');
+ }
}
if (current[key] === undefined) {
|
fix message interpolation when throwing lookup error
- accidental usage of ES6 interpolation causes problems
when processing code that expected to be ES5-compatible.
this commit replaces interpolation by string concatenation
|
diff --git a/examples/file_uncompress.js b/examples/file_uncompress.js
index <HASH>..<HASH> 100644
--- a/examples/file_uncompress.js
+++ b/examples/file_uncompress.js
@@ -12,7 +12,8 @@ var outputFile = process.argv[3] || path.basename(inputFile, lz4.extension)
var decoder = lz4.createDecoderStream()
-var input = fs.createReadStream( inputFile )
+// Higher buffer size increases performance
+var input = fs.createReadStream( inputFile, { highWaterMark: 4 << 20 } )
var output = fs.createWriteStream( outputFile )
// Timing
|
Added buffer size in uncompress example (throughput is doubled)
|
diff --git a/lib/rest-ftp-daemon/job.rb b/lib/rest-ftp-daemon/job.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/job.rb
+++ b/lib/rest-ftp-daemon/job.rb
@@ -271,7 +271,8 @@ module RestFtpDaemon
ftp_connect_and_login
# Connect remote server, login and chdir
- ftp_chdir_or_buildpath @target_url.path
+ path = '/' + Helpers.extract_dirname(@target_url.path).to_s
+ ftp_chdir_or_buildpath path
# Check source files presence and compute total size, they should be there, coming from Dir.glob()
@transfer_total = 0
|
bugfix: a target path without subdirs caused a process crash
|
diff --git a/gobblin-aws/src/main/java/gobblin/aws/GobblinAWSClusterLauncher.java b/gobblin-aws/src/main/java/gobblin/aws/GobblinAWSClusterLauncher.java
index <HASH>..<HASH> 100644
--- a/gobblin-aws/src/main/java/gobblin/aws/GobblinAWSClusterLauncher.java
+++ b/gobblin-aws/src/main/java/gobblin/aws/GobblinAWSClusterLauncher.java
@@ -548,7 +548,7 @@ public class GobblinAWSClusterLauncher {
private String buildClusterWorkerCommand(String memory) {
StringBuilder userDataCmds = new StringBuilder().append("#!/bin/bash").append("\n");
- String clusterWorkerClassName = GobblinAWSClusterMaster.class.getSimpleName();
+ String clusterWorkerClassName = GobblinAWSTaskRunner.class.getSimpleName();
// Connect to NFS server
// TODO: Replace with EFS when available in GA
|
Minor change: Changed the Gobblin Worker log file name prefix
|
diff --git a/devices/philips.js b/devices/philips.js
index <HASH>..<HASH> 100644
--- a/devices/philips.js
+++ b/devices/philips.js
@@ -213,7 +213,7 @@ module.exports = [
vendor: 'Philips',
description: 'Hue white E12',
meta: {turnsOffAtBrightness1: true},
- extend: hueExtend.light_onoff_brightness(),
+ extend: hueExtend.light_onoff_brightness_colortemp({colorTempRange: [153, 454]}),
ota: ota.zigbeeOTA,
},
{
|
Add color temp for philips hue <I> (#<I>)
|
diff --git a/scapy/utils.py b/scapy/utils.py
index <HASH>..<HASH> 100644
--- a/scapy/utils.py
+++ b/scapy/utils.py
@@ -714,6 +714,7 @@ class RawPcapWriter:
try:
p = pkt.next()
except StopIteration:
+ self._write_header("")
return
self._write_header(p)
self._write_packet(p)
|
wrpcap() creates a valid PCAP file when called with an empty list
--HG--
branch : issue-<I>
|
diff --git a/fastlane/lib/fastlane/swift_fastlane_function.rb b/fastlane/lib/fastlane/swift_fastlane_function.rb
index <HASH>..<HASH> 100644
--- a/fastlane/lib/fastlane/swift_fastlane_function.rb
+++ b/fastlane/lib/fastlane/swift_fastlane_function.rb
@@ -232,7 +232,7 @@ module Fastlane
# Adds newlines between each documentation element.
documentation = documentation_elements.flat_map { |element| [element, separator] }.tap(&:pop).join("\n")
- return "/**\n#{documentation}\n*/\n"
+ return "/**\n#{documentation.gsub('/*', '/\\*')}\n*/\n"
end
def swift_parameter_documentation
|
[Fastlane.Swift] Sanitize Swift document comments (#<I>)
|
diff --git a/classes/Collector.php b/classes/Collector.php
index <HASH>..<HASH> 100644
--- a/classes/Collector.php
+++ b/classes/Collector.php
@@ -221,6 +221,10 @@ abstract class QM_Collector {
}
public static function hide_qm() {
+ if ( ! defined( 'QM_HIDE_SELF' ) ) {
+ return false;
+ }
+
if ( null === self::$hide_qm ) {
self::$hide_qm = QM_HIDE_SELF;
}
|
Don't try to access `QM_HIDE_SELF` before it's defined. Fixes #<I>.
|
diff --git a/dist/milsymbol.js b/dist/milsymbol.js
index <HASH>..<HASH> 100644
--- a/dist/milsymbol.js
+++ b/dist/milsymbol.js
@@ -2728,7 +2728,7 @@ function textfields(){
}
}
//Land or letterbased SIDC
- if(isNaN(this.SIDC) || this.properties.dimension == "Ground"){
+ if(isNaN(this.SIDC) || this.properties.baseDimension == "Ground"){
gStrings.L1 = this.dtg;
if(this.altitudeDepth||this.location){
a = [];
|
Think we should use baseDimension here and not dimension
|
diff --git a/lib/arel/algebra/header.rb b/lib/arel/algebra/header.rb
index <HASH>..<HASH> 100644
--- a/lib/arel/algebra/header.rb
+++ b/lib/arel/algebra/header.rb
@@ -4,9 +4,7 @@ module Arel
def initialize(attrs = [])
@attributes = attrs.to_ary
- @names = Hash.new do |h,k|
- h[k] = @attributes.detect { |a| a.named?(k) }
- end
+ @names = {}
end
def each
@@ -55,7 +53,8 @@ module Arel
end
def find_by_name(name)
- @names[name.to_sym]
+ k = name.to_sym
+ @names[k] ||= @attributes.detect { |a| a.named?(k) }
end
def find_by_attribute(attr)
|
PERF: fewer objects, less lambdas, less memory
|
diff --git a/cluster.go b/cluster.go
index <HASH>..<HASH> 100644
--- a/cluster.go
+++ b/cluster.go
@@ -703,12 +703,12 @@ func (c *ClusterClient) WithContext(ctx context.Context) *ClusterClient {
if ctx == nil {
panic("nil context")
}
- c2 := c.copy()
+ c2 := c.clone()
c2.ctx = ctx
return c2
}
-func (c *ClusterClient) copy() *ClusterClient {
+func (c *ClusterClient) clone() *ClusterClient {
cp := *c
cp.init()
return &cp
diff --git a/ring.go b/ring.go
index <HASH>..<HASH> 100644
--- a/ring.go
+++ b/ring.go
@@ -381,12 +381,12 @@ func (c *Ring) WithContext(ctx context.Context) *Ring {
if ctx == nil {
panic("nil context")
}
- c2 := c.copy()
+ c2 := c.clone()
c2.ctx = ctx
return c2
}
-func (c *Ring) copy() *Ring {
+func (c *Ring) clone() *Ring {
cp := *c
return &cp
}
|
Rename copy to clone
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -86,10 +86,10 @@ setup(
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
- 'six>=1.10.0',
- 'graphql-core>=2.0',
- 'graphql-relay>=0.4.5',
- 'promise>=2.1',
+ 'six>=1.10.0,<2',
+ 'graphql-core>=2.0,<3',
+ 'graphql-relay>=0.4.5,<1',
+ 'promise>=2.1,<3',
],
tests_require=tests_require,
extras_require={
|
Prevent requirement breaking changes
I have a project still in <I> thats has been broken in my last release since it used `'graphql-core>=<I>'` in the `install_requires`. Since `graphql-core` has released version <I> with breaking changes and there was no instruction to maintain version 1, it was included as a dependency. This prevents this situation for the future.
|
diff --git a/sexpr/grammar.py b/sexpr/grammar.py
index <HASH>..<HASH> 100644
--- a/sexpr/grammar.py
+++ b/sexpr/grammar.py
@@ -15,7 +15,6 @@ grammar_str_form = \
class Grammar(Matcher):
default_options = {}
- default_parser_options = {}
def __init__(self, source, options = None):
rules = source.get('rules', {})
@@ -27,7 +26,7 @@ class Grammar(Matcher):
try:
self.root = self.options.get('root', list(rules.items())[0][0])
except IndexError:
- self.root = None
+ raise ValueError('Cannot load root node. Grammar is ill-formed.')
def sexpr(self, sexpr):
if isinstance(sexpr, Sexpr):
|
Throw ValueError if loading root rule fails
|
diff --git a/zipline/finance/performance.py b/zipline/finance/performance.py
index <HASH>..<HASH> 100644
--- a/zipline/finance/performance.py
+++ b/zipline/finance/performance.py
@@ -133,8 +133,6 @@ omitted).
"""
import logbook
-import datetime
-import pytz
import math
import numpy as np
@@ -154,7 +152,6 @@ class PerformanceTracker(object):
def __init__(self, sim_params):
self.sim_params = sim_params
- self.started_at = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
self.period_start = self.sim_params.period_start
self.period_end = self.sim_params.period_end
@@ -230,7 +227,6 @@ class PerformanceTracker(object):
Returns a dict object of the form described in header comments.
"""
return {
- 'started_at': self.started_at,
'period_start': self.period_start,
'period_end': self.period_end,
'progress': self.progress,
|
MAINT: Removes unused started_at member from performance tracker.
|
diff --git a/lib/jitsu/commands/wizard.js b/lib/jitsu/commands/wizard.js
index <HASH>..<HASH> 100644
--- a/lib/jitsu/commands/wizard.js
+++ b/lib/jitsu/commands/wizard.js
@@ -4,19 +4,27 @@
* (C) 2012, Nodejitsu Inc.
*
*/
-
-var fs = require('fs'),
- path = require('path'),
- common = require('../common'),
- cpr = common.cpr,
- rimraf = common.rimraf,
+
+var fs = require('fs'),
+ path = require('path'),
+ common = require('../common'),
+ cpr = common.cpr,
+ rimraf = common.rimraf,
npmModule = require('npm'),
- jitsu = require('../../jitsu');
+ jitsu = require('../../jitsu'),
+ utile = require('utile');
var thisPath = process.cwd();
module.exports = function (callback) {
+ //
+ // Allows arbitrary amount of arguments
+ //
+ if(arguments.length) {
+ callback = utile.args(arguments).callback;
+ }
+
jitsu.plugins.cli.executeCommand(['help'], callback);
};
|
[api][fix] made/added argument currying congruent in functions
|
diff --git a/termgraph.py b/termgraph.py
index <HASH>..<HASH> 100755
--- a/termgraph.py
+++ b/termgraph.py
@@ -32,7 +32,7 @@ def main(args):
# read data
labels, data = read_data(args['filename'])
- chart(labels, data)
+ chart(labels, data, args)
def chart(labels, data, args):
|
Fix args being passed to chart
|
diff --git a/lib/calabash/android/device.rb b/lib/calabash/android/device.rb
index <HASH>..<HASH> 100644
--- a/lib/calabash/android/device.rb
+++ b/lib/calabash/android/device.rb
@@ -46,7 +46,7 @@ module Calabash
def test_server_responding?
begin
- http_client.get(HTTP::Request.new('ping')).body == 'pong'
+ http_client.get(HTTP::Request.new('ping'), retries: 1).body == 'pong'
rescue HTTP::Error => _
false
end
|
Android::Device: #test_server_responding? don't auto retry
|
diff --git a/AcmeClient.php b/AcmeClient.php
index <HASH>..<HASH> 100644
--- a/AcmeClient.php
+++ b/AcmeClient.php
@@ -82,7 +82,27 @@ class AcmeClient implements AcmeClientInterface
$payload['contact'] = ['mailto:'.$email];
}
- return (array) $this->requestResource('POST', ResourcesDirectory::NEW_REGISTRATION, $payload);
+ $response = (array) $this->requestResource('POST', ResourcesDirectory::NEW_REGISTRATION, $payload);
+ $links = $this->httpClient->getLastLinks();
+ foreach ($links as $link) {
+ if ('terms-of-service' === $link['rel']) {
+ $agreement = substr($link[0], 1, -1);
+ $payload = [];
+ $payload['resource'] = ResourcesDirectory::REGISTRATION;
+ $payload['agreement'] = $agreement;
+
+ $this->httpClient->signedRequest(
+ 'POST',
+ $this->httpClient->getLastLocation(),
+ $payload,
+ true
+ );
+
+ break;
+ }
+ }
+
+ return $response;
}
/**
|
Automatically agreed with agrement (#<I>)
|
diff --git a/tasks/titanium.js b/tasks/titanium.js
index <HASH>..<HASH> 100644
--- a/tasks/titanium.js
+++ b/tasks/titanium.js
@@ -67,14 +67,18 @@ module.exports = function(grunt) {
function(callback) { return execCommand('create', createOpts, callback); },
function(callback) {
+ console.log(self.filesSrc);
+ console.log(self.files);
+
// copy all from "files" to destination
self.files.forEach(function(fileObj) {
var base = path.dirname(fileObj.orig.src),
match = base.match(/^([^\*]+)/),
- relPath = match ? match[1] : '.';
+ relPath = match ? match[1] : '.',
+ dest = fileObj.dest || path.join(buildOpts.projectDir, 'Resources');
fileObj.src.forEach(function(file) {
- fs.copySync(file, path.join(fileObj.dest, path.relative(relPath, file)));
+ fs.copySync(file, path.join(dest, path.relative(relPath, file)));
});
});
|
process files/src from grunt config
|
diff --git a/dev/FixtureBlueprint.php b/dev/FixtureBlueprint.php
index <HASH>..<HASH> 100644
--- a/dev/FixtureBlueprint.php
+++ b/dev/FixtureBlueprint.php
@@ -129,8 +129,9 @@ class FixtureBlueprint {
$parsedItems = array();
$items = preg_split('/ *, */',trim($fieldVal));
foreach($items as $item) {
- // Check for correct format: =><relationname>.<identifier>
- if(!preg_match('/^=>[^\.]+\.[^\.]+/', $item)) {
+ // Check for correct format: =><relationname>.<identifier>.
+ // Ignore if the item has already been replaced with a numeric DB identifier
+ if(!is_numeric($item) && !preg_match('/^=>[^\.]+\.[^\.]+/', $item)) {
throw new InvalidArgumentException(sprintf(
'Invalid format for relation "%s" on class "%s" ("%s")',
$fieldName,
|
Don't complain about pre-replaced YAML fixture relations
|
diff --git a/deblur/workflow.py b/deblur/workflow.py
index <HASH>..<HASH> 100644
--- a/deblur/workflow.py
+++ b/deblur/workflow.py
@@ -819,15 +819,11 @@ def launch_workflow(seqs_fp, working_dir, mean_error, error_dist,
# Step 1: Trim sequences to specified length
output_trim_fp = join(working_dir, "%s.trim" % basename(seqs_fp))
- if trim_length > 0:
- with open(output_trim_fp, 'w') as out_f:
- for label, seq in trim_seqs(
- input_seqs=sequence_generator(seqs_fp),
- trim_len=trim_length):
- out_f.write(">%s\n%s\n" % (label, seq))
- else:
- # If trim length is -1, files are already trimmed
- os.symlink(seqs_fp, output_trim_fp)
+ with open(output_trim_fp, 'w') as out_f:
+ for label, seq in trim_seqs(
+ input_seqs=sequence_generator(seqs_fp),
+ trim_len=trim_length):
+ out_f.write(">%s\n%s\n" % (label, seq))
# Step 2: Dereplicate sequences
output_derep_fp = join(working_dir,
"%s.derep" % basename(output_trim_fp))
|
Regression: trimming was possibly passing .gz files to vsearch; not adding a test as this is a removal of code which was added in this past cycle
|
diff --git a/spead2/__init__.py b/spead2/__init__.py
index <HASH>..<HASH> 100644
--- a/spead2/__init__.py
+++ b/spead2/__init__.py
@@ -160,8 +160,9 @@ class Descriptor(object):
class Item(Descriptor):
def __init__(self, *args, **kw):
+ value = kw.pop('value', None)
super(Item, self).__init__(*args, **kw)
- self._value = None
+ self._value = value
self.version = 1
@property
|
Allow initial value to be provided for Item in constructor
|
diff --git a/src/gluonnlp/data/stream.py b/src/gluonnlp/data/stream.py
index <HASH>..<HASH> 100644
--- a/src/gluonnlp/data/stream.py
+++ b/src/gluonnlp/data/stream.py
@@ -103,19 +103,27 @@ class _LazyTransformDataStream(DataStream):
def __iter__(self):
stream_iter = iter(self._stream)
- try:
- item = next(stream_iter)
- except StopIteration:
- return
- istuple = isinstance(item, tuple)
- if istuple:
- yield self._fn(*item)
- for item in stream_iter:
+
+ # Yield must be hidden in closure so that __iter__ is called before
+ # __next__ is called. This is important, as calling iter(self._stream)
+ # may trigger multi-threaded or multi-processing prefetching of the
+ # stream.
+ def _closure():
+ try:
+ item = next(stream_iter)
+ except StopIteration:
+ return
+ istuple = isinstance(item, tuple)
+ if istuple:
yield self._fn(*item)
- else:
- yield self._fn(item)
- for item in stream_iter:
+ for item in stream_iter:
+ yield self._fn(*item)
+ else:
yield self._fn(item)
+ for item in stream_iter:
+ yield self._fn(item)
+
+ return _closure()
class DatasetStream(DataStream):
|
Fix LazyTransformDataStream prefetching (#<I>)
|
diff --git a/lib/lhc/interceptors/throttle.rb b/lib/lhc/interceptors/throttle.rb
index <HASH>..<HASH> 100644
--- a/lib/lhc/interceptors/throttle.rb
+++ b/lib/lhc/interceptors/throttle.rb
@@ -33,7 +33,7 @@ class LHC::Throttle < LHC::Interceptor
def break_when_quota_reached!
options = request.options.dig(:throttle)
track = (self.class.track || {}).dig(options[:provider])
- return unless track
+ return if track.blank? || track[:remaining].blank? || track[:limit].blank?
# avoid floats by multiplying with 100
remaining = track[:remaining] * 100
limit = track[:limit]
diff --git a/spec/interceptors/throttle/main_spec.rb b/spec/interceptors/throttle/main_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/interceptors/throttle/main_spec.rb
+++ b/spec/interceptors/throttle/main_spec.rb
@@ -77,5 +77,14 @@ describe LHC::Throttle do
it 'does not raise an exception' do
LHC.get('http://local.ch', options)
end
+
+ context 'no remaining tracked, but break enabled' do
+ let(:break_option) { '90%' }
+
+ it 'does not fail if a remaining was not tracked yet' do
+ LHC.get('http://local.ch', options)
+ LHC.get('http://local.ch', options)
+ end
+ end
end
end
|
Fix case where tacked reamining is nil (#<I>)
|
diff --git a/scripts/importer/mtasks/berkeley.py b/scripts/importer/mtasks/berkeley.py
index <HASH>..<HASH> 100644
--- a/scripts/importer/mtasks/berkeley.py
+++ b/scripts/importer/mtasks/berkeley.py
@@ -30,12 +30,12 @@ def do_ucb_photo(events, stubs, args, tasks, task_obj, log):
photom = json.loads(jsontxt)
photom = sorted(photom, key=lambda kk: kk['ObjName'])
for phot in pbar(photom, desc=current_task):
- name = phot['ObjName']
- events, name = add_event(tasks, args, events, name, log)
+ oldname = phot['ObjName']
+ events, name = add_event(tasks, args, events, oldname, log)
sec_source = events[name].add_source(srcname=sec_ref, url=sec_refurl, bibcode=sec_refbib,
secondary=True)
- events[name].add_quantity('alias', name, sec_source)
+ events[name].add_quantity('alias', oldname, sec_source)
sources = [sec_source]
if phot['Reference']:
sources += [events[name].add_source(bibcode=phot['Reference'])]
|
MAINT: have 'berkeley' stuff store 'oldname'.
|
diff --git a/src/js/components/Select/Select.js b/src/js/components/Select/Select.js
index <HASH>..<HASH> 100644
--- a/src/js/components/Select/Select.js
+++ b/src/js/components/Select/Select.js
@@ -72,9 +72,11 @@ const Select = forwardRef(
messages,
multiple,
name,
+ onBlur,
onChange,
onClick,
onClose,
+ onFocus,
onKeyDown,
onMore,
onOpen,
@@ -295,6 +297,8 @@ const Select = forwardRef(
open={open}
alignSelf={alignSelf}
focusIndicator={focusIndicator}
+ onFocus={onFocus}
+ onBlur={onBlur}
gridArea={gridArea}
margin={margin}
onOpen={onRequestOpen}
|
Fix: Introducing Select onBlur and onFocus (#<I>)
* Select onBlur and onFocus: introduced both to select component
* Small cleanup
* Small syntax fix
|
diff --git a/graffiti/graph/graph.go b/graffiti/graph/graph.go
index <HASH>..<HASH> 100644
--- a/graffiti/graph/graph.go
+++ b/graffiti/graph/graph.go
@@ -103,14 +103,14 @@ type graphElement struct {
// Node of the graph
type Node struct {
- graphElement
+ graphElement `mapstructure:",squash"`
}
// Edge of the graph linked by a parent and a child
type Edge struct {
- graphElement
- Parent Identifier
- Child Identifier
+ graphElement `mapstructure:",squash"`
+ Parent Identifier
+ Child Identifier
}
// Graph errors
diff --git a/js/runtime.go b/js/runtime.go
index <HASH>..<HASH> 100644
--- a/js/runtime.go
+++ b/js/runtime.go
@@ -392,6 +392,7 @@ func (r *Runtime) Exec(code string) (v otto.Value, err error) {
return v, err
}
+// CallFunction takes the source of a function and evaluate it with the specifed parameters
func (r *Runtime) CallFunction(source string, params ...interface{}) (otto.Value, error) {
result, err := r.Exec("(" + source + ")")
if err != nil {
@@ -431,6 +432,7 @@ func (r *Runtime) Start() {
// Stop the runtime evaluation loop
func (r *Runtime) Stop() {
+ r.stopEventLoop <- true
}
// NewRuntime returns a new JavaScript runtime environment
|
graph: allow to use mapstructure on nodes and edges
|
diff --git a/Kwc/Directories/List/ViewMap/Coordinates/Component.php b/Kwc/Directories/List/ViewMap/Coordinates/Component.php
index <HASH>..<HASH> 100644
--- a/Kwc/Directories/List/ViewMap/Coordinates/Component.php
+++ b/Kwc/Directories/List/ViewMap/Coordinates/Component.php
@@ -21,8 +21,7 @@ class Kwc_Directories_List_ViewMap_Coordinates_Component extends Kwc_Abstract_Aj
->where(new Kwf_Model_Select_Expr_Higher('longitude', $lowestLng))
->where(new Kwf_Model_Select_Expr_Higher('latitude', $lowestLat))
->where(new Kwf_Model_Select_Expr_Lower('longitude', $highestLng))
- ->where(new Kwf_Model_Select_Expr_Lower('latitude', $highestLat))
- ->order('name', 'ASC');
+ ->where(new Kwf_Model_Select_Expr_Lower('latitude', $highestLat));
$parentComponentClass = $this->getData()->parent->componentClass;
$itemDirectory = $this->getData()->parent->parent->getComponent()->getItemDirectory();
|
there is no sorting needed because this component shoews points on a map, sorting by name also causes an error if there is no row called map in table
|
diff --git a/sklearn_porter/Estimator.py b/sklearn_porter/Estimator.py
index <HASH>..<HASH> 100644
--- a/sklearn_porter/Estimator.py
+++ b/sklearn_porter/Estimator.py
@@ -104,8 +104,8 @@ class Estimator(EstimatorApiABC):
import BaseSearchCV # pylint: disable=protected-access
except ImportError:
L.warn('Your installed version of scikit-learn '
- 'v% does not support optimizers in general.',
- sklearn_version)
+ 'v% does not support optimizers in general.',
+ sklearn_version)
else:
if isinstance(est, BaseSearchCV):
L.info('Yes, the estimator is embedded in an optimizer.')
|
feature/oop-api-refactoring: Indent string on multiple lines correctly
|
diff --git a/yotta/init.py b/yotta/init.py
index <HASH>..<HASH> 100644
--- a/yotta/init.py
+++ b/yotta/init.py
@@ -71,4 +71,10 @@ def execCommand(args):
# TODO: more questions ( bugs url,...), check that the name is available in
# the registry...
+ # Create folders while initing
+ folders_to_creat = ["./source", "./test", "./" + c.getName()]
+ for folder_name in folders_to_creat:
+ if not os.path.exists(folder_name):
+ os.mkdir(folder_name)
+
c.writeDescription()
|
Creating folders while initing
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,6 @@ setup(
description='User-friendly billing for communal households',
long_description=open('README.rst').read() if exists("README.rst") else "",
install_requires=[
- 'django>=1.8',
'django-hordak>=1.1.0',
'path.py',
'django-model-utils>=2.5.0',
@@ -31,5 +30,6 @@ setup(
'six',
'python-dateutil',
'django-adminlte2>=0.1.4',
+ 'django>=1.8',
],
)
|
Attempt to fix installation error (perhaps similar to <URL>)
|
diff --git a/packages/xod-client-browser/test-func/pageObjects/PromptPopup.js b/packages/xod-client-browser/test-func/pageObjects/PromptPopup.js
index <HASH>..<HASH> 100644
--- a/packages/xod-client-browser/test-func/pageObjects/PromptPopup.js
+++ b/packages/xod-client-browser/test-func/pageObjects/PromptPopup.js
@@ -15,7 +15,7 @@ PromptPopup.findOnPage = async page => {
};
PromptPopup.waitOnPage = async page => {
- await page.waitFor('.PopupPrompt', { timeout: 2000 });
+ await page.waitFor('.PopupPrompt', { timeout: 5000 });
return PromptPopup.findOnPage(page);
};
|
chore(xod-client-browser): enlarge timeout for mostly failed by timeout selector
|
diff --git a/kiali/models.py b/kiali/models.py
index <HASH>..<HASH> 100644
--- a/kiali/models.py
+++ b/kiali/models.py
@@ -3,7 +3,7 @@ from kiali.client import ApiObject
class Data (ApiObject):
__slots__ = [
'id', "source", 'target', 'version', 'text', 'color', 'style', 'rate', 'service', 'group_by',
- "is_root", "is_circuit_breaker", "flag_circuit_breaker"
+ "is_root", "has_CB"
]
def __repr__(self):
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ from distutils.core import setup
setup(name='kiali-client',
packages=['kiali'],
- version='0.3.5',
+ version='0.3.6',
description='Python client to communicate with Kiali server over HTTP(S)',
author='Guilherme Baufaker Rego',
author_email='gbaufake@redhat.com',
|
Updating Client to <I>
|
diff --git a/openquake/calculators/base.py b/openquake/calculators/base.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/base.py
+++ b/openquake/calculators/base.py
@@ -664,11 +664,11 @@ class HazardCalculator(BaseCalculator):
logging.info('Computing hazard maps for PoEs=%s', oq.poes)
with mon:
N = len(self.sitecol.complete)
- ct = oq.concurrent_tasks
+ ct = oq.concurrent_tasks or 1
if 'hcurves' in self.datastore:
kinds = self.datastore['hcurves']
hmaps_dt = numpy.dtype(
- [('%s-%s' % (imt, poe), float)
+ [('%s-%s' % (imt, poe), F32)
for imt in oq.imtls for poe in oq.poes])
for kind in kinds:
self.datastore.create_dset(
|
Parallelized save_hmaps [skip hazardlib][demos]
Former-commit-id: e<I>c7f9bc4d<I>f<I>e<I>b0b8c3ce9acb8bb<I>c
|
diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py
index <HASH>..<HASH> 100644
--- a/gcloud/storage/connection.py
+++ b/gcloud/storage/connection.py
@@ -414,13 +414,7 @@ class Connection(_Base):
if isinstance(bucket, Bucket):
return bucket
- # Support Python 2 and 3.
- try:
- string_type = six.string_types
- except NameError: # pragma: NO COVER PY3k
- string_type = str
-
- if isinstance(bucket, string_type):
+ if isinstance(bucket, six.string_types):
return Bucket(connection=self, name=bucket)
raise TypeError('Invalid bucket: %s' % bucket)
|
No need for 'try: ... except:' when we have 'six'.
|
diff --git a/src/typeahead/typeahead.js b/src/typeahead/typeahead.js
index <HASH>..<HASH> 100644
--- a/src/typeahead/typeahead.js
+++ b/src/typeahead/typeahead.js
@@ -325,7 +325,7 @@ var Typeahead = (function() {
.val('')
.removeData()
.addClass('tt-hint')
- .removeAttr('id name placeholder')
+ .removeAttr('id name placeholder required')
.prop('disabled', true)
.attr({ autocomplete: 'off', spellcheck: 'false' });
|
Remove required attribute from hint. Closes #<I>.
|
diff --git a/app/assets/javascripts/foreman_proxmox/proxmox.js b/app/assets/javascripts/foreman_proxmox/proxmox.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/foreman_proxmox/proxmox.js
+++ b/app/assets/javascripts/foreman_proxmox/proxmox.js
@@ -35,9 +35,7 @@ function cdromSelected(item) {
},
success: function(isos) {
$('#host_compute_attributes_config_attributes_cdrom_iso').empty();
- console.log('isos='+isos);
$.each(isos, function(i,iso){
- console.log('iso='+iso);
$('#host_compute_attributes_config_attributes_cdrom_iso').append($("<option></option>").val(iso.volid).text(iso.volid));
});
}
|
Refactor
Remove unnecessary debug logs
|
diff --git a/lib/rtkit/cr_series.rb b/lib/rtkit/cr_series.rb
index <HASH>..<HASH> 100644
--- a/lib/rtkit/cr_series.rb
+++ b/lib/rtkit/cr_series.rb
@@ -57,7 +57,7 @@ module RTKIT
super(series_uid, 'CR', study, options)
# Default attributes:
@images = Array.new
- @image_uids = Hash.new
+ @associated_images = Hash.new
# Register ourselves with the study:
@study.add_series(self)
end
@@ -94,7 +94,7 @@ module RTKIT
def add_image(image)
raise ArgumentError, "Invalid argument 'image'. Expected Image, got #{image.class}." unless image.is_a?(Image)
@images << image
- @image_uids[image.uid] = image
+ @associated_images[image.uid] = image
end
# Computes a hash code for this object.
@@ -118,7 +118,7 @@ module RTKIT
def image(*args)
raise ArgumentError, "Expected one or none arguments, got #{args.length}." unless [0, 1].include?(args.length)
if args.length == 1
- return @image_uids[args.first]
+ return @associated_images[args.first]
else
# No argument used, therefore we return the first Image instance:
return @images.first
|
Rename a class variable
This makes it equal to that of of the RTImage class.
|
diff --git a/lib/dill/widget.rb b/lib/dill/widget.rb
index <HASH>..<HASH> 100644
--- a/lib/dill/widget.rb
+++ b/lib/dill/widget.rb
@@ -219,7 +219,7 @@ module Dill
# Compares the current widget with +value+, waiting for the comparison
# to return +true+.
def ==(value)
- wait_for { cast_to(value) == value }
+ wait_for { cast_to_type_of(value) == value }
end
# Calls +=~+ on this widget's text content.
@@ -235,7 +235,7 @@ module Dill
# Compares the current widget with +value+, waiting for the comparison
# to return +false+.
def !=(value)
- wait_for { cast_to(value) != value }
+ wait_for { cast_to_type_of(value) != value }
end
def checkpoint(wait_time)
@@ -340,7 +340,7 @@ module Dill
protected
- def cast_to(value)
+ def cast_to_type_of(value)
case value
when Float
to_f
|
[widget] Clarify method name.
|
diff --git a/lib/openscap/version.rb b/lib/openscap/version.rb
index <HASH>..<HASH> 100644
--- a/lib/openscap/version.rb
+++ b/lib/openscap/version.rb
@@ -10,5 +10,5 @@
#
module OpenSCAP
- VERSION = '0.4.0'
+ VERSION = '0.4.1'
end
|
ruby-openscap-<I>
- Rubocop fixes
- Allows for bzip2ed memory to be used as OpenSCAP::Source
- requires OpenSCAP-<I> base package
|
diff --git a/domaintools_async/__init__.py b/domaintools_async/__init__.py
index <HASH>..<HASH> 100644
--- a/domaintools_async/__init__.py
+++ b/domaintools_async/__init__.py
@@ -1,8 +1,6 @@
"""Adds async capabilities to the base product object"""
import asyncio
-import aiohttp
from httpx import AsyncClient
-import ssl
from domaintools.base_results import Results
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ from os import path
from setuptools import Extension, find_packages, setup
from setuptools.command.test import test as TestCommand
-requires = ['requests', 'aiohttp', 'dateparser']
+requires = ['httpx', 'dateparser']
packages = ['domaintools', 'domaintools_async']
diff --git a/tox.ini b/tox.ini
index <HASH>..<HASH> 100644
--- a/tox.ini
+++ b/tox.ini
@@ -7,7 +7,7 @@ passenv=TEST_USER TEST_KEY
deps=
pytest
pytest-cov
- requests
+ httpx
vcrpy
.
commands=py.test --capture=sys --cov domaintools tests
|
Remove some unused imports and modify dependencies.
|
diff --git a/lib/growl/growl.rb b/lib/growl/growl.rb
index <HASH>..<HASH> 100644
--- a/lib/growl/growl.rb
+++ b/lib/growl/growl.rb
@@ -1,4 +1,3 @@
-
module Growl
@@path = 'growlnotify'
@@ -212,6 +211,7 @@ module Growl
switch :port
switch :auth
switch :crypt
+ switch :url
end
|
Add url switch
Adding the url switch allows for Growl.url to be called. What this
brings is the ability to link a notification to a specific url in the
notification itself.
|
diff --git a/thinc/api.py b/thinc/api.py
index <HASH>..<HASH> 100644
--- a/thinc/api.py
+++ b/thinc/api.py
@@ -12,7 +12,7 @@ from .util import to_categorical, get_width, xp2torch, torch2xp
from .backends import get_ops, set_current_ops, get_current_ops, use_device
from .backends import Ops, CupyOps, NumpyOps
-from .layers import Affine, Dropout, Embed, ExtractWindow, HashEmbed, LayerNorm
+from .layers import Affine, CauchySimilarity, Dropout, Embed, ExtractWindow, HashEmbed, LayerNorm
from .layers import Maxout, Mish, MultiSoftmax, ReLu, Residual, Softmax, BiLSTM, LSTM
from .layers import add, bidirectional, chain, clone, concatenate, foreach, noop
|
add CauchySimilarity also to api
|
diff --git a/lib/interceptor.js b/lib/interceptor.js
index <HASH>..<HASH> 100644
--- a/lib/interceptor.js
+++ b/lib/interceptor.js
@@ -10,7 +10,7 @@ const parserFactory = require('./data-processor-row');
*/
class RowProcessorInterceptor extends Interceptor {
- constructor(endpoint, config) {
+ constructor(config, endpoint) {
super(endpoint, config);
// just validate the config once
|
fix(constructor): Changed the order of config, and endpoint
|
diff --git a/satpy/scene.py b/satpy/scene.py
index <HASH>..<HASH> 100644
--- a/satpy/scene.py
+++ b/satpy/scene.py
@@ -1522,12 +1522,11 @@ def _check_file_protocols_for_dicts(filenames):
def _check_file_protocols(filenames):
local_files, remote_files, fs_files = _sort_files_to_local_remote_and_fsfiles(filenames)
- try:
- new_fs_files = _filenames_to_fsfile(remote_files)
- except ImportError:
- return filenames
- return local_files + fs_files + new_fs_files
+ if remote_files:
+ return local_files + fs_files + _filenames_to_fsfile(remote_files)
+
+ return local_files + fs_files
def _sort_files_to_local_remote_and_fsfiles(filenames):
|
Let import error stop processing when remote files can't be handled
|
diff --git a/activerecord/lib/active_record/associations/has_many_association.rb b/activerecord/lib/active_record/associations/has_many_association.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/associations/has_many_association.rb
+++ b/activerecord/lib/active_record/associations/has_many_association.rb
@@ -41,6 +41,14 @@ module ActiveRecord
end
end
+ def empty?
+ if has_cached_counter?
+ size.zero?
+ else
+ super
+ end
+ end
+
private
# Returns the number of records in this collection.
diff --git a/activerecord/test/cases/associations/has_many_associations_test.rb b/activerecord/test/cases/associations/has_many_associations_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/associations/has_many_associations_test.rb
+++ b/activerecord/test/cases/associations/has_many_associations_test.rb
@@ -807,6 +807,13 @@ class HasManyAssociationsTest < ActiveRecord::TestCase
end
end
+ def test_calling_empty_with_counter_cache
+ post = posts(:welcome)
+ assert_queries(0) do
+ assert_not post.comments.empty?
+ end
+ end
+
def test_custom_named_counter_cache
topic = topics(:first)
|
If a counter_cache exists, use it for #empty?
|
diff --git a/spyder/plugins/editor/plugin.py b/spyder/plugins/editor/plugin.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/editor/plugin.py
+++ b/spyder/plugins/editor/plugin.py
@@ -888,15 +888,12 @@ class Editor(SpyderPluginWidget):
self.main.debug_toolbar_actions += debug_toolbar_actions
# ---- Source menu/toolbar construction ----
- source_menu_actions = [eol_menu,
- showblanks_action,
+ source_menu_actions = [showblanks_action,
scrollpastend_action,
showindentguides_action,
show_classfunc_dropdown_action,
showcode_analysis_pep8_action,
show_docstring_warnings_action,
- trailingspaces_action,
- fixindentation_action,
MENU_SEPARATOR,
self.todo_list_action,
self.warning_list_action,
@@ -905,7 +902,11 @@ class Editor(SpyderPluginWidget):
MENU_SEPARATOR,
self.previous_edit_cursor_action,
self.previous_cursor_action,
- self.next_cursor_action]
+ self.next_cursor_action,
+ MENU_SEPARATOR,
+ eol_menu,
+ trailingspaces_action,
+ fixindentation_action]
self.main.source_menu_actions += source_menu_actions
source_toolbar_actions = [self.todo_list_action,
|
Editor: Reorganize Source menu a bit
|
diff --git a/recipe/shelf.py b/recipe/shelf.py
index <HASH>..<HASH> 100644
--- a/recipe/shelf.py
+++ b/recipe/shelf.py
@@ -36,11 +36,11 @@ def ingredient_from_validated_dict(ingr_dict, selectable):
return InvalidIngredient(error=error)
except VisitError as e:
# Lark returns the InvalidColumnError wrapped in a VisitError
- if isinstance(e.__context__, InvalidColumnError):
+ if isinstance(e.orig_exc, InvalidColumnError):
# custom exception handling
error = {
"type": "invalid_column",
- "extra": {"column_name": e.__context__.column_name},
+ "extra": {"column_name": e.orig_exc.column_name},
}
return InvalidIngredient(error=error)
else:
|
user VisitError.orig_exc
|
diff --git a/scripts/reenroller.rb b/scripts/reenroller.rb
index <HASH>..<HASH> 100644
--- a/scripts/reenroller.rb
+++ b/scripts/reenroller.rb
@@ -41,6 +41,7 @@ module RightScale
# === Return
# true:: Always return true
def run(options)
+ check_privileges
AgentConfig.root_dir = AgentConfig.right_link_root_dirs
if RightScale::Platform.windows?
@@ -81,10 +82,6 @@ module RightScale
res = system("/etc/init.d/rightlink #{action} > /dev/null")
end
true
- rescue Errno::EACCES => e
- STDERR.puts e.message
- STDERR.puts "Try elevating privilege (sudo/runas) before invoking this command."
- exit(2)
end
# Create options hash from command line arguments
@@ -149,7 +146,7 @@ module RightScale
rescue Errno::ESRCH
false
end
-
+
# Version information
#
# === Return
|
acu<I> make rs_reenroll to check privileges before doing anything
|
diff --git a/pyrogram/__init__.py b/pyrogram/__init__.py
index <HASH>..<HASH> 100644
--- a/pyrogram/__init__.py
+++ b/pyrogram/__init__.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
-__version__ = "1.0.0b2"
+__version__ = "1.0.0"
__license__ = "GNU Lesser General Public License v3 or later (LGPLv3+)"
__copyright__ = "Copyright (C) 2017-2020 Dan <https://github.com/delivrance>"
|
Update Pyrogram to <I>
|
diff --git a/latinol/latinol.py b/latinol/latinol.py
index <HASH>..<HASH> 100644
--- a/latinol/latinol.py
+++ b/latinol/latinol.py
@@ -13,7 +13,7 @@ RULES = ((ur'cc', ur'ks'),
(ur'qu([eiéí])', ur'k\1'),
(ur'ü', ur'u'),
(ur'q([aouáóú])', ur'k\1'),
- (ur'w', ur'gu'),
+ (ur'w(\S|.|$)', ur'gu\1'),
(ur'y([^aeiouáóúéí]|$)', ur'i\1'))
|
fix capitalization for w => gu
|
diff --git a/lib/core/engine/iteration.js b/lib/core/engine/iteration.js
index <HASH>..<HASH> 100644
--- a/lib/core/engine/iteration.js
+++ b/lib/core/engine/iteration.js
@@ -105,6 +105,8 @@ class Iteration {
if (recordVideo && !combine) {
await setOrangeBackground(browser.getDriver());
await video.record();
+ // Give ffmpeg some time to settle
+ await Promise.delay(400);
}
await browser.loadAndWait(url, options.pageCompleteCheck);
|
Give ffmpeg some extra time before we start
|
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -1054,6 +1054,10 @@ class LocalClient(object):
if raw['data']['return'] == {}:
continue
+ if 'return' in raw['data']['return'] and \
+ raw['data']['return']['return'] == {}:
+ continue
+
# if we didn't originally target the minion, lets add it to the list
if raw['data']['id'] not in minions:
minions.add(raw['data']['id'])
|
Fixing a weird edge case when using salt syndics and targetting via pillar. Without this fix the master of masters ends up in an infinite loop since the data returned from the minions is differently structured than if a sync was not in use. (#<I>)
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -150,6 +150,7 @@ def CDF_build(self, ppath):
'OS=' + os_name,
'ENV=' + env_name,
'CURSES=no',
+ 'SHARED=no',
'UCOPTIONS=-Dsingle_underscore',
'all',]
cmd2 = ['make',
|
Trying to get build working on Travis CI
|
diff --git a/commands/authenticate.go b/commands/authenticate.go
index <HASH>..<HASH> 100644
--- a/commands/authenticate.go
+++ b/commands/authenticate.go
@@ -100,9 +100,11 @@ func (cmd *Authenticate) Handle(mechanisms map[string]sasl.Server, conn Authenti
return err
}
- scanner.Scan()
- if err := scanner.Err(); err != nil {
- return err
+ if !scanner.Scan() {
+ if err := scanner.Err(); err != nil {
+ return err
+ }
+ return errors.New("unexpected EOF")
}
encoded = scanner.Text()
|
commands: Properly handle EOF in Authenticate.Handle
scanner.Scan returns false but scanner.Err returns nil in case of EOF.
|
diff --git a/datasource/configdrive/configdrive.go b/datasource/configdrive/configdrive.go
index <HASH>..<HASH> 100644
--- a/datasource/configdrive/configdrive.go
+++ b/datasource/configdrive/configdrive.go
@@ -69,7 +69,9 @@ func (cd *configDrive) FetchMetadata() (metadata datasource.Metadata, err error)
metadata.SSHPublicKeys = m.SSHAuthorizedKeyMap
metadata.Hostname = m.Hostname
- metadata.NetworkConfig, err = cd.tryReadFile(path.Join(cd.openstackRoot(), m.NetworkConfig.ContentPath))
+ if m.NetworkConfig.ContentPath != "" {
+ metadata.NetworkConfig, err = cd.tryReadFile(path.Join(cd.openstackRoot(), m.NetworkConfig.ContentPath))
+ }
return
}
|
configdrive: check the network config path
Check to make sure that a network config path has been specified before
trying to read from it. Otherwise, it will end up trying to read a
directory.
|
diff --git a/lib/headerNameSpecialCases.js b/lib/headerNameSpecialCases.js
index <HASH>..<HASH> 100644
--- a/lib/headerNameSpecialCases.js
+++ b/lib/headerNameSpecialCases.js
@@ -70,10 +70,11 @@ module.exports = {
'x-att-deviceid': 'X-ATT-DeviceId',
'x-cdn': 'X-CDN',
'x-csa-complaints': 'x-csa-complaints',
- 'x-ua-compatible': 'X-UA-Compatible',
+ 'x-originating-ip': 'X-Originating-IP',
'x-riferimento-message-id': 'X-Riferimento-Message-ID',
'x-sg-eid': 'X-SG-EID',
'x-tiporicevuta': 'X-TipoRicevuta',
+ 'x-ua-compatible': 'X-UA-Compatible',
'x-verificasicurezza': 'X-VerificaSicurezza',
'x-xss-protection': 'X-XSS-Protection'
};
|
Added special case for X-Original-IP header name.
|
diff --git a/sendbeacon.js b/sendbeacon.js
index <HASH>..<HASH> 100755
--- a/sendbeacon.js
+++ b/sendbeacon.js
@@ -12,7 +12,7 @@ function polyfill() {
};
function sendBeacon(url, data) {
- const event = this.event && this.event.type;
+ const event = this.event && this.event.type ? this.event.type : this.event;
const sync = event === 'unload' || event === 'beforeunload';
const xhr = ('XMLHttpRequest' in this) ? new XMLHttpRequest() : new ActiveXObject('Microsoft.XMLHTTP');
|
fix: so that sync will not always be false
the old state makes sync always false because the value for event will always be true/false not 'unload' or 'beforeunload'
|
diff --git a/src/spec.js b/src/spec.js
index <HASH>..<HASH> 100644
--- a/src/spec.js
+++ b/src/spec.js
@@ -14,5 +14,5 @@
* limitations under the License.
*/
-var testContext = require.context('.', true, /\.spec\.ts/);
+var testContext = require.context('.', true, /\.spec\.ts$/);
testContext.keys().map(testContext);
|
chore: use better regular expression for unit test paths
|
diff --git a/cassandra/connection.py b/cassandra/connection.py
index <HASH>..<HASH> 100644
--- a/cassandra/connection.py
+++ b/cassandra/connection.py
@@ -374,6 +374,8 @@ class Connection(object):
@defunct_on_error
def _handle_options_response(self, options_response):
+ if self.is_defunct:
+ return
log.debug("Received options response on new Connection from %s" % self.host)
self.supported_cql_versions = options_response.cql_versions
self.remote_supported_compressions = options_response.options['COMPRESSION']
@@ -410,6 +412,8 @@ class Connection(object):
@defunct_on_error
def _handle_startup_response(self, startup_response):
+ if self.is_defunct:
+ return
if isinstance(startup_response, ReadyMessage):
log.debug("Got ReadyMessage on new Connection from %s" % self.host)
if self._compressor:
|
Prevent infinite loop when errored on initial connection
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.