diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/moto/server.py b/moto/server.py
index <HASH>..<HASH> 100644
--- a/moto/server.py
+++ b/moto/server.py
@@ -26,6 +26,8 @@ class RegexConverter(BaseConverter):
def configure_urls(service):
backend = globals()["{}_backend".format(service)]
from werkzeug.routing import Map
+ # Reset view functions to reset the app
+ app.view_functions = {}
app.url_map = Map()
app.url_map.converters['regex'] = RegexConverter
for url_path, handler in backend.flask_paths.iteritems():
|
Need to reset app view functions to prevent Flask assertion error on resetting view functions.
|
py
|
diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -442,3 +442,8 @@ def test_pre_and_post_mutation_hook(single_mutant_filesystem, tmpdir):
assert "post mutation stub" in result.output
assert result.output.index("pre mutation stub") < result.output.index("post mutation stub")
+
+def test_simple_output(filesystem):
+ result = CliRunner().invoke(climain, ['run', '--paths-to-mutate=foo.py', "--simple-output"], catch_exceptions=False)
+ print(repr(result.output))
+ assert '14/14 KILLED 14 TIMEOUT 0 SUSPICIOUS 0 SURVIVED 0 SKIPPED 0' in repr(result.output)
|
adding test for running mutmut with the `--simple-output` option
|
py
|
diff --git a/helpme/client/__init__.py b/helpme/client/__init__.py
index <HASH>..<HASH> 100644
--- a/helpme/client/__init__.py
+++ b/helpme/client/__init__.py
@@ -101,7 +101,6 @@ def main():
# Customize parser
- from helpme.main import Helper
parser = get_parser()
subparsers = get_subparsers(parser)
@@ -112,8 +111,8 @@ def main():
version = helpme.__version__
- bot.custom(message='HelpMe Command Line Tool v%s' %version,
- prefix='\n[%s]' %Helper.name,
+ bot.custom(message='Command Line Tool v%s' %version,
+ prefix='\n[HelpMe] ',
color='CYAN')
parser.print_help()
|
fixing bug that helpme --help asks for token to close #<I>
|
py
|
diff --git a/modules/engine.py b/modules/engine.py
index <HASH>..<HASH> 100644
--- a/modules/engine.py
+++ b/modules/engine.py
@@ -434,6 +434,7 @@ class GameStage (Stage):
self.world.setup()
for actor in self.actors:
+ print actor
actor.setup(self.world)
def update(self, time):
@@ -444,7 +445,7 @@ class GameStage (Stage):
for actor in self.actors:
actor.update(time)
- if not actor.is_finished():
+ if not actor.is_finished(self.world):
still_playing = True
if not still_playing:
@@ -545,8 +546,8 @@ class Actor (object):
def get_messenger(self):
return self.messenger
- def is_finished(self):
- return self.world.has_game_ended()
+ def is_finished(self, world):
+ return world.has_game_ended()
def setup(self, world):
@@ -588,12 +589,12 @@ class RemoteActor (Actor):
message = IdMessage(id)
self.pipe.send(message)
- def is_finished(self):
- return self.pipe.finished() or Actor.is_finished(self)
+ def is_finished(self, world):
+ return self.pipe.finished() or Actor.is_finished(self, world)
- def setup(self):
- serializer = TokenSerializer(self.world)
+ def setup(self, world):
+ serializer = TokenSerializer(world)
self.pipe.push_serializer(serializer)
def update(self, time):
|
Fixed some world bugs in the setup function.
|
py
|
diff --git a/salt/modules/iptables.py b/salt/modules/iptables.py
index <HASH>..<HASH> 100644
--- a/salt/modules/iptables.py
+++ b/salt/modules/iptables.py
@@ -10,7 +10,6 @@ import re
import sys
import uuid
import shlex
-import string
# Import salt libs
import salt.utils
@@ -345,10 +344,7 @@ def build_rule(table=None, chain=None, command=None, position='', full=None, fam
for after_jump_argument in after_jump_arguments:
if after_jump_argument in kwargs:
value = kwargs[after_jump_argument]
- if any(ws_char in str(value) for ws_char in string.whitespace):
- after_jump.append('--{0} "{1}"'.format(after_jump_argument, value))
- else:
- after_jump.append('--{0} {1}'.format(after_jump_argument, value))
+ after_jump.append('--{0} {1}'.format(after_jump_argument, value))
del kwargs[after_jump_argument]
if 'log' in kwargs:
|
fix target rule, remove unneeded quotation mark
|
py
|
diff --git a/yoti_python_sdk/tests/test_document_details.py b/yoti_python_sdk/tests/test_document_details.py
index <HASH>..<HASH> 100644
--- a/yoti_python_sdk/tests/test_document_details.py
+++ b/yoti_python_sdk/tests/test_document_details.py
@@ -85,8 +85,6 @@ def test_expiration_date_is_dash():
def test_invalid_date():
DATA = "PASSPORT GBR 1234abc X016-05-01"
- try:
+ with pytest.raises(ValueError) as exc:
DocumentDetails(DATA)
- except ValueError:
- return
- return False # An exception should have been thrown
+ assert str(exc.value) == "Invalid value for DocumentDetails"
|
SDK-<I>: Add missed opportunity to use pytest.raises
|
py
|
diff --git a/perceval/_version.py b/perceval/_version.py
index <HASH>..<HASH> 100644
--- a/perceval/_version.py
+++ b/perceval/_version.py
@@ -1,2 +1,2 @@
# Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440
-__version__ = "0.9.14"
+__version__ = "0.9.15"
|
Update version number to <I>
|
py
|
diff --git a/salt/states/debconfmod.py b/salt/states/debconfmod.py
index <HASH>..<HASH> 100644
--- a/salt/states/debconfmod.py
+++ b/salt/states/debconfmod.py
@@ -36,6 +36,30 @@ set_file
.. note::
Due to how PyYAML imports nested dicts (see :ref:`here <yaml-idiosyncrasies>`),
the values in the ``data`` dict must be indented four spaces instead of two.
+
+If you're setting debconf values that requires `dpkg-reconfigure`, you can use
+the ``onchanges`` requisite to reconfigure your package:
+
+.. code-block:: yaml
+
+ set-default-shell:
+ debconf.set:
+ - name: dash
+ - data:
+ 'dash/sh': {'type': 'boolean', 'value': false}
+
+ reconfigure-dash:
+ cmd.run:
+ - name: dpkg-reconfigure -f noninteractive dash
+ - onchanges:
+ - debconf: set-default-shell
+
+Every time the ``set-default-shell`` state changes, the ``reconfigure-dash``
+state will also run.
+
+.. note::
+ For boolean types, the value should be ``true`` or ``false``, not
+ ``'true'`` or ``'false'``.
'''
from __future__ import absolute_import, print_function, unicode_literals
from salt.ext import six
|
Porting PR #<I> to <I>
|
py
|
diff --git a/salt/thorium/calc.py b/salt/thorium/calc.py
index <HASH>..<HASH> 100644
--- a/salt/thorium/calc.py
+++ b/salt/thorium/calc.py
@@ -10,7 +10,19 @@ values are stored and computed, such as averages etc.
# import python libs
from __future__ import absolute_import
-import statistics
+
+try:
+ import statistics
+ HAS_STATS = True
+except ImportError:
+ HAS_STATS = False
+
+
+def __virtual__():
+ '''
+ The statistics module must be pip installed
+ '''
+ return HAS_STATS
def calc(name, num, oper, ref=None):
|
Gate the statistics module (#<I>)
|
py
|
diff --git a/trailblazer/mip/files.py b/trailblazer/mip/files.py
index <HASH>..<HASH> 100644
--- a/trailblazer/mip/files.py
+++ b/trailblazer/mip/files.py
@@ -55,7 +55,7 @@ def parse_sampleinfo(data: dict) -> dict:
'sv_vcf_binary_file' in data else None),
'research_vcf': (data['sv_vcf_binary_file']['research']['path'] if
'sv_vcf_binary_file' in data else None),
- 'bcf': data.get('sv_bcf_file'),
+ 'bcf': data.get('sv_bcf_file', {}).get('path'),
'merged': (f"{data['program']['svdb']['outdirectory']}/"
f"{data['program']['svdb']['outfile']}"),
},
|
fix issue getting path of SV bcf file
|
py
|
diff --git a/cheroot/test/test_ssl.py b/cheroot/test/test_ssl.py
index <HASH>..<HASH> 100644
--- a/cheroot/test/test_ssl.py
+++ b/cheroot/test/test_ssl.py
@@ -315,6 +315,16 @@ def test_tls_client_auth(
assert resp.text == 'Hello world!'
return
+ # xfail some flaky tests
+ # https://github.com/cherrypy/cheroot/issues/237
+ issue_237 = (
+ IS_MACOS
+ and adapter_type == 'builtin'
+ and tls_verify_mode != ssl.CERT_NONE
+ )
+ if issue_237:
+ pytest.xfail('Test sometimes fails')
+
expected_ssl_errors = (
requests.exceptions.SSLError,
OpenSSL.SSL.Error,
|
Disable some flaky tests. Ref #<I>.
|
py
|
diff --git a/malcolm/modules/xspress3/blocks/__init__.py b/malcolm/modules/xspress3/blocks/__init__.py
index <HASH>..<HASH> 100644
--- a/malcolm/modules/xspress3/blocks/__init__.py
+++ b/malcolm/modules/xspress3/blocks/__init__.py
@@ -1,6 +1,7 @@
from malcolm.yamlutil import check_yaml_names, make_block_creator
xspress3_driver_block = make_block_creator(__file__, "xspress3_driver_block.yaml")
+xspress3_dtc_block = make_block_creator(__file__, "xspress3_dtc_block.yaml")
xspress3_runnable_block = make_block_creator(__file__, "xspress3_runnable_block.yaml")
__all__ = check_yaml_names(globals())
|
added dtc block to __init__.py
|
py
|
diff --git a/pysoundfile.py b/pysoundfile.py
index <HASH>..<HASH> 100644
--- a/pysoundfile.py
+++ b/pysoundfile.py
@@ -222,7 +222,7 @@ class SoundFile(object):
"""
def __init__(self, name, sample_rate=0, channels=0, format=0,
- file_mode=read_write_mode):
+ mode=read_write_mode):
"""Open a new SoundFile.
If a file is only opened in read_mode or in read_write_mode,
@@ -248,7 +248,7 @@ class SoundFile(object):
info.channels = channels
info.format = format
filename = ffi.new('char[]', name.encode())
- self._file_mode = file_mode
+ self._file_mode = mode
self._file = _snd.sf_open(filename, self._file_mode, info)
self._handle_error()
|
changed file_mode to mode for consistency
|
py
|
diff --git a/allegedb/allegedb/tests/test_load.py b/allegedb/allegedb/tests/test_load.py
index <HASH>..<HASH> 100644
--- a/allegedb/allegedb/tests/test_load.py
+++ b/allegedb/allegedb/tests/test_load.py
@@ -4,14 +4,14 @@ from allegedb import ORM
import networkx as nx
-scalefreestart = nx.MultiDiGraph(name='scale_free_graph_2')
+scalefreestart = nx.MultiDiGraph(name='scale_free_graph_5')
scalefreestart.add_edges_from([(0, 1), (1, 2), (2, 0)])
testgraphs = [
nx.chvatal_graph(),
nx.scale_free_graph(5, create_using=scalefreestart),
- nx.chordal_cycle_graph(5, create_using=nx.MultiGraph(name='chordal_cycle_graph_2')),
+ nx.chordal_cycle_graph(5, create_using=nx.MultiGraph(name='chordal_cycle_graph_5')),
]
# have to name it after creation because it clears the create_using
path_graph_9 = nx.path_graph(9)
|
Correct the names of those test graphs
|
py
|
diff --git a/openquake/calculators/risk/general.py b/openquake/calculators/risk/general.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/risk/general.py
+++ b/openquake/calculators/risk/general.py
@@ -213,7 +213,7 @@ class BaseRiskCalculator(base.CalculatorNext):
# If this was an existing model, it was already parsed and should be in
# the DB.
- if models.ExposureModel.objects.filter(
+ if not self.rc.force_inputs and models.ExposureModel.objects.filter(
input=exposure_model_input).exists():
return exposure_model_input.exposuremodel
@@ -405,6 +405,12 @@ def store_risk_model(rc, input_type):
"""
[vulnerability_input] = models.inputs4rcalc(rc.id, input_type=input_type)
+ # if a vulnerability model already exists for the same input, then
+ # we do not need to create again.
+ if models.VulnerabilityModel.objects.filter(
+ input=vulnerability_input).exists() and not rc.force_inputs:
+ return
+
for record in parsers.VulnerabilityModelParser(
vulnerability_input.path):
vulnerability_model, _ = (
|
honor force_inputs in risk
|
py
|
diff --git a/can/notifier.py b/can/notifier.py
index <HASH>..<HASH> 100644
--- a/can/notifier.py
+++ b/can/notifier.py
@@ -118,9 +118,9 @@ class Notifier:
self.exception = exc
if self._loop is not None:
self._loop.call_soon_threadsafe(self._on_error, exc)
- else:
- self._on_error(exc)
- raise
+ raise
+ elif not self._on_error(exc):
+ raise
def _on_message_available(self, bus: BusABC):
msg = bus.recv(0)
@@ -134,10 +134,15 @@ class Notifier:
# Schedule coroutine
self._loop.create_task(res)
- def _on_error(self, exc: Exception):
- for listener in self.listeners:
- if hasattr(listener, "on_error"):
- listener.on_error(exc)
+ def _on_error(self, exc: Exception) -> bool:
+ listeners_with_on_error = [
+ listener for listener in self.listeners if hasattr(listener, "on_error")
+ ]
+
+ for listener in listeners_with_on_error:
+ listener.on_error(exc)
+
+ return bool(listeners_with_on_error)
def add_listener(self, listener: Listener):
"""Add new Listener to the notification list.
|
Notifier no longer raises handled exceptions in rx_thread (#<I>)
|
py
|
diff --git a/src/cr/cube/cube_slice.py b/src/cr/cube/cube_slice.py
index <HASH>..<HASH> 100644
--- a/src/cr/cube/cube_slice.py
+++ b/src/cr/cube/cube_slice.py
@@ -118,8 +118,10 @@ class CubeSlice(object):
def _call_cube_method(self, method, *args, **kwargs):
kwargs = self._update_args(kwargs)
result = getattr(self._cube, method)(*args, **kwargs)
- if method in ('labels', 'inserted_hs_indices') and not self.ca_as_0th:
- return result[-2:]
+ if method in ('labels', 'inserted_hs_indices'):
+ if not self.ca_as_0th:
+ result = result[-2:]
+ return result
return self._update_result(result)
@property
|
[#<I>]: Fix test failures after rebase
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,6 @@ import os
from setuptools import setup
install_requires=[
- 'tensorflow',
'numpy',
'six',
'scipy',
|
removed tensorflow from install_requires
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ setup(
author_email='joachim.ungar@gmail.com',
url='https://github.com/ungarj/tilematrix',
license='MIT',
- packages=['tilematrix'],
+ packages=['tilematrix', 'tilematrix.tmx'],
entry_points={
'console_scripts': ['tmx=tilematrix.tmx.main:tmx']
},
|
attempt to fix tmx command when installing tilematrix via pip
|
py
|
diff --git a/km3pipe/io/tests/test_h5.py b/km3pipe/io/tests/test_h5.py
index <HASH>..<HASH> 100644
--- a/km3pipe/io/tests/test_h5.py
+++ b/km3pipe/io/tests/test_h5.py
@@ -199,7 +199,6 @@ class TestH5Sink(TestCase):
assert version == h5file.root._v_attrs.km3pipe.decode()
assert tb.__version__ == h5file.root._v_attrs.pytables.decode()
assert FORMAT_VERSION == h5file.root._v_attrs.format_version
- assert 'None' == h5file.root._v_attrs.jpp.decode()
fobj.close()
|
Don't check for jpp version
|
py
|
diff --git a/lyricsgenius/api/api.py b/lyricsgenius/api/api.py
index <HASH>..<HASH> 100644
--- a/lyricsgenius/api/api.py
+++ b/lyricsgenius/api/api.py
@@ -177,11 +177,9 @@ class API(Sender):
for y in x['annotations'] if y['verified']]
"""
msg = "Must supply `song_id`, `web_page_id`, or `created_by_id`."
- if not any([song_id, web_page_id, created_by_id]):
- raise ValueError(msg)
+ assert any([song_id, web_page_id, created_by_id]), msg
msg = "Pass only one of `song_id` and `web_page_id`, not both."
- if not (bool(song_id) ^ bool(web_page_id)):
- raise ValueError(msg)
+ assert bool(song_id) ^ bool(web_page_id), msg
# Construct the URI
endpoint = "referents?"
|
reverted back to assert for now
|
py
|
diff --git a/vimball/utils.py b/vimball/utils.py
index <HASH>..<HASH> 100644
--- a/vimball/utils.py
+++ b/vimball/utils.py
@@ -6,7 +6,7 @@ def mkdir_p(path):
try:
os.makedirs(path)
except OSError as e:
- if e.errno == errno.EEXIST:
+ if e.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
|
utils: only allow mkdir_p to not raise OSError on existing dirs Trying to make new dirs on top of existing files or other objects should raise an exception.
|
py
|
diff --git a/cwltool/executors.py b/cwltool/executors.py
index <HASH>..<HASH> 100644
--- a/cwltool/executors.py
+++ b/cwltool/executors.py
@@ -179,6 +179,7 @@ class SingleJobExecutor(JobExecutor):
prov_obj = job.prov_obj
if prov_obj:
runtime_context.prov_obj = prov_obj
+ runtime_context.research_obj.fsaccess = runtime_context.make_fs_access('')
prov_obj.evaluate(
process, job, job_order_object,
runtime_context.research_obj)
|
Added fsaccess regeneration for prov object generation
|
py
|
diff --git a/ratcave/console_scripts/arena_scanner.py b/ratcave/console_scripts/arena_scanner.py
index <HASH>..<HASH> 100644
--- a/ratcave/console_scripts/arena_scanner.py
+++ b/ratcave/console_scripts/arena_scanner.py
@@ -390,6 +390,7 @@ if __name__ == '__main__':
ax = plot_3d(points, square_axis=True)
for idx, verts in vertices.items():
vert_loop = np.vstack((verts, verts[0,:])) # so the line reconnects with the first point to show a complete outline
- ax = plot_3d(vert_loop, ax=ax, title='Triangulated Model', line=True, show=True)
+ show = True if idx == len(vertices)-1 else False
+ ax = plot_3d(vert_loop, ax=ax, title='Triangulated Model', line=True, show=show)
|
put imports inside functions to make more portable, for when doing projector calibration script.
|
py
|
diff --git a/src/sentry_plugins/jira/client.py b/src/sentry_plugins/jira/client.py
index <HASH>..<HASH> 100644
--- a/src/sentry_plugins/jira/client.py
+++ b/src/sentry_plugins/jira/client.py
@@ -163,7 +163,7 @@ class JIRAClient(object):
def make_request(self, method, url, payload=None):
if url[:4] != "http":
url = self.instance_url + url
- auth = self.username, self.password
+ auth = self.username.encode('utf8'), self.password.encode('utf8')
session = build_session()
try:
if method == 'get':
|
utf8 encode auth for jira (#<I>)
|
py
|
diff --git a/twittytwister/twitter.py b/twittytwister/twitter.py
index <HASH>..<HASH> 100644
--- a/twittytwister/twitter.py
+++ b/twittytwister/twitter.py
@@ -143,7 +143,7 @@ class Twitter(object):
'Content-Length': str(len(body))
}
- headers = self._makeAuthHeader('POST', url, headers=headers)
+ self._makeAuthHeader('POST', url, headers=headers)
return client.getPage(url, method='POST',
agent=self.agent,
@@ -154,7 +154,7 @@ class Twitter(object):
url = self.base_url + path
- headers = self._makeAuthHeader('POST', url, args, headers)
+ self._makeAuthHeader('POST', url, args, headers)
if self.client_info != None:
headers.update(self.client_info.get_headers())
|
Twitter: don't reassign headers variable when using _makeAuthHeader() _makeAuthHeader() now changes the headers dictionary in-place, so we don't need to reassign the variable.
|
py
|
diff --git a/epc/client.py b/epc/client.py
index <HASH>..<HASH> 100644
--- a/epc/client.py
+++ b/epc/client.py
@@ -34,6 +34,27 @@ class EPCClient(EPCCore):
"""
EPC client class to call remote functions and serve Python functions.
+
+ >>> client = EPCClient()
+ >>> client.connect(('localhost', 9999)) #doctest: +SKIP
+ >>> client.call_sync('echo', [111, 222, 333]) #doctest: +SKIP
+ [111, 222, 333]
+
+ To serve Python functions, you can use :meth:`register_function`.
+
+ >>> client.register_function(str.upper)
+ <method 'upper' of 'str' objects>
+
+ :meth:`register_function` can be used as a decorator.
+
+ >>> @client.register_function
+ ... def add(x, y):
+ ... return x + y
+
+ Also, you can initialize client and connect to the server by one line.
+
+ >>> client = EPCClient(('localhost', 0)) #doctest: +SKIP
+
"""
thread_daemon = True
|
Add doctest to EPCClient
|
py
|
diff --git a/src/saml2/ecp.py b/src/saml2/ecp.py
index <HASH>..<HASH> 100644
--- a/src/saml2/ecp.py
+++ b/src/saml2/ecp.py
@@ -256,29 +256,12 @@ class ECPClient(Saml2Client):
# <samlp:AuthnRequest>
# ----------------------------------------
- spentityid = self._entityid()
location = self._sso_location(entityid)
- service_url = self._service_url()
- my_name = self._my_name()
-
- if log is None:
- log = self.logger
-
- if log:
- log.info("spentityid: %s" % spentityid)
- log.info("location: %s" % location)
- log.info("service_url: %s" % service_url)
- log.info("my_name: %s" % my_name)
-
session_id = sid()
- authen_req = self.authn_request(session_id, location,
- service_url, spentityid, my_name,
- scoping, log, sign)
-
- authn_request = samlp.AuthnRequest()
+ authn_req = self.authn(location, session_id, log=log)
body = soapenv.Body()
- body.extension_elements = [element_to_extension_element(authn_request)]
+ body.extension_elements = [element_to_extension_element(authn_req)]
# ----------------------------------------
# The SOAP envelope
|
Made ECPClient actually produce something usable
|
py
|
diff --git a/schedule/models/events.py b/schedule/models/events.py
index <HASH>..<HASH> 100644
--- a/schedule/models/events.py
+++ b/schedule/models/events.py
@@ -13,8 +13,8 @@ from django.template.defaultfilters import date
from django.urls import reverse
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
-from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
+from django.utils.translation import ugettext_lazy as _
from schedule.models.calendars import Calendar
from schedule.models.rules import Rule
|
Update imports per new isort release
|
py
|
diff --git a/proton-c/bindings/python/proton/handlers.py b/proton-c/bindings/python/proton/handlers.py
index <HASH>..<HASH> 100644
--- a/proton-c/bindings/python/proton/handlers.py
+++ b/proton-c/bindings/python/proton/handlers.py
@@ -379,7 +379,7 @@ class MessagingHandler(Handler, Acking):
def __init__(self, prefetch=10, auto_accept=True, auto_settle=True, peer_close_is_error=False):
self.handlers = []
if prefetch:
- self.handlers.append(FlowController(prefetch))
+ self.handlers.append(CFlowController(prefetch))
self.handlers.append(EndpointStateHandler(peer_close_is_error, self))
self.handlers.append(IncomingMessageHandler(auto_accept, self))
self.handlers.append(OutgoingMessageHandler(auto_settle, self))
|
Use c-based flowcontroller
|
py
|
diff --git a/porespy/generators/__imgen__.py b/porespy/generators/__imgen__.py
index <HASH>..<HASH> 100644
--- a/porespy/generators/__imgen__.py
+++ b/porespy/generators/__imgen__.py
@@ -815,7 +815,9 @@ def _cylinders(shape: List[int],
n = 0
L = min(H, R)
# Disable tqdm if called from another tqdm to prevent double pbars
- tqdm_settings = {**settings.tqdm, **{'disable': not verbose}}
+ tqdm_settings = settings.tqdm.copy()
+ if not settings.tqdm["disable"]:
+ tqdm_settings = {**settings.tqdm, **{'disable': not verbose}}
with tqdm(ncylinders, **tqdm_settings) as pbar:
while n < ncylinders:
# Choose a random starting point in domain
|
Fixed a subtle bug in _cylinders function regarding how tqdm operates
|
py
|
diff --git a/openpnm/algorithms/ReactiveTransport.py b/openpnm/algorithms/ReactiveTransport.py
index <HASH>..<HASH> 100644
--- a/openpnm/algorithms/ReactiveTransport.py
+++ b/openpnm/algorithms/ReactiveTransport.py
@@ -135,7 +135,7 @@ class ReactiveTransport(GenericTransport):
if conductance:
self.settings['conductance'] = conductance
if nlin_max_iter:
- self.settings['max_iter'] = nlin_max_iter
+ self.settings['nlin_max_iter'] = nlin_max_iter
if relaxation_source:
self.settings['relaxation_source'] = relaxation_source
if relaxation_quantity:
|
Fixed bug in ReactiveTransport: max_iter was set inst. of nlin_max_iter
|
py
|
diff --git a/util.py b/util.py
index <HASH>..<HASH> 100644
--- a/util.py
+++ b/util.py
@@ -54,6 +54,8 @@ def get_platform ():
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
+ elif osname[:3] == "aix":
+ return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
rel_re = re.compile (r'[\d.]+')
m = rel_re.match(release)
|
Patch #<I>: generate a reasonable platform string for AIX
|
py
|
diff --git a/thinc/initializers.py b/thinc/initializers.py
index <HASH>..<HASH> 100644
--- a/thinc/initializers.py
+++ b/thinc/initializers.py
@@ -52,7 +52,7 @@ def configure_uniform_init(
def normal_init(ops: Ops, shape: Shape, *, fan_in: int = -1) -> FloatsXd:
if fan_in == -1:
fan_in = shape[1]
- scale = ops.xp.sqrt(1.0 / fan_in)
+ scale = float(ops.xp.sqrt(1.0 / fan_in))
size = int(ops.xp.prod(ops.xp.asarray(shape)))
inits = numpy.random.normal(scale=scale, size=size).astype("float32")
inits = ops.reshape_f(inits, shape)
|
Cast scale to float in normal_init() for cupy (#<I>)
|
py
|
diff --git a/pods/datasets.py b/pods/datasets.py
index <HASH>..<HASH> 100644
--- a/pods/datasets.py
+++ b/pods/datasets.py
@@ -16,6 +16,7 @@ import re
from .util import download_url
from .config import *
+from io import open
from functools import reduce
ipython_available=True
|
fix #<I> open() did not provide encoding option in py2.x
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -23,6 +23,18 @@ import codecs
from setuptools import setup, find_packages
+# These python versions of explicitly not supported
+# by sure. This is nostly because of the incompatiblities
+# with unicode strings. If there is an urgent reason why
+# to support it after all or if you have a quick fix
+# please open an issue on GitHub.
+EXPL_NOT_SUPPORTED_VERSIONS = ((3, 0), (3, 1), (3, 2))
+
+if sys.version_info[0:2] in EXPL_NOT_SUPPORTED_VERSIONS:
+ raise SystemExit("sure does explicitly not support the following python versions "
+ "due to big incompatibilities: {0}".format(EXPL_NOT_SUPPORTED_VERSIONS))
+
+
PROJECT_ROOT = os.path.dirname(__file__)
|
Fail installation for python <I>, <I> and <I>
|
py
|
diff --git a/doc/source/conf.py b/doc/source/conf.py
index <HASH>..<HASH> 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -63,7 +63,7 @@ copyright = u'2014, CMB-group'
from pyemma import _version
version = _version.get_versions()['version']
# The full version, including alpha/beta/rc tags.
-release = _version.get_versions()['full']
+release = version # _version.get_versions()['full']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
[doc] changed release info to version, since fullversion of versioneer is not convenient
|
py
|
diff --git a/tofu/geom/_comp_optics.py b/tofu/geom/_comp_optics.py
index <HASH>..<HASH> 100644
--- a/tofu/geom/_comp_optics.py
+++ b/tofu/geom/_comp_optics.py
@@ -430,13 +430,13 @@ def _calc_spect1d_from_data2d(ldata, lamb, phi,
# Check / format inputs
if spect1d is None:
spect1d = 'mean'
- P if isinstance(ldata, np.ndarray):
+ if isinstance(ldata, np.ndarray):
ldata = [ldata]
lc = [isinstance(spect1d, tuple) and len(spect1d) == 2,
- [MaP (isinstance(spect1d, list)
+ (isinstance(spect1d, list)
and all([isinstance(ss, tuple) and len(ss) == 2
for ss in spect1d])),
- [MaPP spect1d in ['mean', 'cent']]
+ spect1d in ['mean', 'cent']]
if lc[0]:
spect1d = [spect1d]
elif lc[1]:
|
[#<I>] Resolution of syntax error on comp_optics.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ tests_require = open(os.path.join(os.path.dirname(__file__), 'test_requirements.
setup(
name='mocket',
- version='1.3.2',
+ version='1.3.3',
author='Andrea de Marco, Giorgio Salluzzo',
author_email='24erre@gmail.com, giorgio.salluzzo@gmail.com',
url='https://github.com/mocketize/python-mocket',
|
Upgrading version for the next release.
|
py
|
diff --git a/rqalpha/data/data_source.py b/rqalpha/data/data_source.py
index <HASH>..<HASH> 100644
--- a/rqalpha/data/data_source.py
+++ b/rqalpha/data/data_source.py
@@ -88,8 +88,9 @@ class LocalDataSource:
if v.type == 'CS' and any(c in v.concept_names.split('|') for c in concepts)]
def get_trading_dates(self, start_date, end_date):
- left, right = self._trading_dates.searchsorted(start_date), self._trading_dates.searchsorted(end_date)
- return self._trading_dates[left:right + 1]
+ left = self._trading_dates.searchsorted(start_date)
+ right = self._trading_dates.searchsorted(end_date, side='right')
+ return self._trading_dates[left:right]
def get_yield_curve(self, start_date, end_date):
duration = (end_date - start_date).days
|
fix: trading dates should include end_date
|
py
|
diff --git a/colour.py b/colour.py
index <HASH>..<HASH> 100644
--- a/colour.py
+++ b/colour.py
@@ -410,7 +410,14 @@ def rgb2hsl(rgb):
>>> rgb2hsl((0.0, 0.0, 1.0)) # doctest: +ELLIPSIS
(0.66..., 1.0, 0.5)
+ Regression check upon very close values in every component of
+ red, green and blue:
+
+ >>> rgb2hsl((0.9999999999999999, 1.0, 0.9999999999999994))
+ (0.0, 0.0, 0.999...)
+
Of course:
+
>>> rgb2hsl((0.0, 2.0, 0.5)) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
@@ -438,7 +445,7 @@ def rgb2hsl(rgb):
l = vsum / 2
- if diff == 0.0: ## This is a gray, no chroma...
+ if diff < FLOAT_ERROR: ## This is a gray, no chroma...
return (0.0, 0.0, l)
##
|
fix: ``rgb2hsl`` would produce invalid hsl triplet when red, blue, green component would be all very close to ``<I>``. (fixes #<I>) Typically, saturation would shoot out of range <I>. That could then lead to exceptions being casts afterwards when trying to reconvert this HSL triplet to RGB values.
|
py
|
diff --git a/xapian_backend.py b/xapian_backend.py
index <HASH>..<HASH> 100755
--- a/xapian_backend.py
+++ b/xapian_backend.py
@@ -895,7 +895,7 @@ class SearchQuery(BaseSearchQuery):
DOCUMENT_CT_TERM_PREFIX,
model._meta.app_label, model._meta.module_name
)
- ), 0
+ ), 0 # Pure boolean sub-query
) for model in self.models
]
query = xapian.Query(
|
Added a comment explaining the pure boolean subquery line
|
py
|
diff --git a/pycanlib/CAN.py b/pycanlib/CAN.py
index <HASH>..<HASH> 100644
--- a/pycanlib/CAN.py
+++ b/pycanlib/CAN.py
@@ -366,7 +366,7 @@ class Bus(object):
_id_type = ID_TYPE_EXTENDED
else:
_id_type = ID_TYPE_STANDARD
- _rx_msg = Message(arbitration_id=_arb_id.value, data=_data_array[:_dlc.value], dlc=int(_dlc.value), id_type=_id_type, timestamp = (float(_timestamp.value) / 1000000))
+ _rx_msg = Message(arbitration_id=_arb_id.value, data=_data_array[:_dlc.value], dlc=int(_dlc.value), id_type=_id_type, timestamp = (float(_timestamp.value - self.__timer_offset) / 1000000))
_rx_msg.flags = int(_flags.value) & canstat.canMSG_MASK
return _rx_msg
else:
|
Timestamps on CAN messages are now relative to the time the bus object was created, not the time the handle was opened
|
py
|
diff --git a/dragonpy/tests/test_base.py b/dragonpy/tests/test_base.py
index <HASH>..<HASH> 100644
--- a/dragonpy/tests/test_base.py
+++ b/dragonpy/tests/test_base.py
@@ -448,7 +448,12 @@ class Test6809_Dragon32_Base(BaseCPUTestCase):
output = []
existing_OK_count = 0
for op_call_count in xrange(max_ops):
- self.cpu.get_and_call_next_op()
+ try:
+ self.cpu.get_and_call_next_op()
+ except Exception as err:
+ log.critical("Execute Error: %s", err)
+ cycles = self.cpu.cycles - old_cycles
+ return op_call_count, cycles, self.periphery.striped_output()
output_lines = self.periphery.output_lines
if output_lines[-1] == "OK":
|
catch exception in unitest while running CPU
|
py
|
diff --git a/quart/exceptions.py b/quart/exceptions.py
index <HASH>..<HASH> 100644
--- a/quart/exceptions.py
+++ b/quart/exceptions.py
@@ -1,5 +1,5 @@
from http import HTTPStatus
-from typing import Iterable, Optional
+from typing import Iterable, NoReturn, Optional
from .wrappers import Response
@@ -118,7 +118,11 @@ if not please click the link
return headers
-def abort(status_code: int, description: Optional[str] = None, name: Optional[str] = None) -> None:
+def abort(
+ status_code: int,
+ description: Optional[str] = None,
+ name: Optional[str] = None,
+) -> NoReturn:
error_class = all_http_exceptions.get(status_code)
if error_class is None:
raise HTTPException(status_code, description or 'Unknown', name or 'Unknown')
|
quart: use NoReturn for abort method As it currently returns None, mypy think functions that expect a return value never return. This method only raises Exceptions, so it the ideal use case for NoReturn which mypy can use to realise it doesn't have to worry about branches that call this.
|
py
|
diff --git a/spyderlib/plugins/editor.py b/spyderlib/plugins/editor.py
index <HASH>..<HASH> 100644
--- a/spyderlib/plugins/editor.py
+++ b/spyderlib/plugins/editor.py
@@ -1406,16 +1406,11 @@ class Editor(PluginWidget):
if not osp.isfile(self.TEMPFILE_PATH):
# Creating temporary file
default = ['# -*- coding: utf-8 -*-',
- '"""',
- self.tr("Spyder Editor"),
- '',
+ '"""', self.tr("Spyder Editor"), '',
self.tr("This temporary script file is located here:"),
self.TEMPFILE_PATH,
- '"""',
- '',
- '',
- ]
- text = "\r\n".join([unicode(qstr) for qstr in default])
+ '"""', '', '']
+ text = os.linesep.join([unicode(qstr) for qstr in default])
encoding.write(unicode(text), self.TEMPFILE_PATH, 'utf-8')
self.load(self.TEMPFILE_PATH)
|
Editor: fixed Issue <I> - Cannot run scripts in interactive console (Linux only)
|
py
|
diff --git a/falafel/__init__.py b/falafel/__init__.py
index <HASH>..<HASH> 100644
--- a/falafel/__init__.py
+++ b/falafel/__init__.py
@@ -1,7 +1,7 @@
import os
__here__ = os.path.dirname(os.path.abspath(__file__))
-VERSION = "1.0.0"
+VERSION = "1.1.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
|
Bumping version to <I>
|
py
|
diff --git a/proxmoxer/core.py b/proxmoxer/core.py
index <HASH>..<HASH> 100644
--- a/proxmoxer/core.py
+++ b/proxmoxer/core.py
@@ -3,7 +3,7 @@ __copyright__ = '(c) Oleg Butovich 2013-2016'
__licence__ = 'MIT'
import os
-import imp
+import importlib
import posixpath
import logging
@@ -103,8 +103,7 @@ class ProxmoxAPI(ProxmoxResourceBase):
def __init__(self, host, backend='https', **kwargs):
#load backend module
- found_module = imp.find_module(backend, [os.path.join(os.path.dirname(__file__), 'backends')])
- self._backend = imp.load_module(backend, *found_module).Backend(host, **kwargs)
+ self._backend = importlib.import_module('.backends.%s' % backend, 'proxmoxer').Backend(host, **kwargs)
self._store = {
"base_url": self._backend.get_base_url(),
|
replaced imp with importlib imp.load_module causes a reload if the module is already imported. This casues issues with multithreaded applications utilising the ProxmoxAPI.
|
py
|
diff --git a/qtpy/__init__.py b/qtpy/__init__.py
index <HASH>..<HASH> 100644
--- a/qtpy/__init__.py
+++ b/qtpy/__init__.py
@@ -73,6 +73,7 @@ API_NAME = {'pyqt5': 'PyQt5', 'pyqt': 'PyQt4', 'pyqt4': 'PyQt4',
is_old_pyqt = is_pyqt46 = False
PYQT5 = True
+PYQT4 = PYSIDE = False
class PythonQtError(Exception):
@@ -106,6 +107,7 @@ if API in PYQT4_API:
from PyQt4.QtCore import PYQT_VERSION_STR as __version__ # analysis:ignore
from PyQt4 import uic # analysis:ignore
PYQT5 = False
+ PYQT4 = True
except ImportError:
API = os.environ['QT_API'] = 'pyside'
API_NAME = 'PySide'
@@ -122,5 +124,6 @@ if API in PYSIDE_API:
try:
from PySide import __version__ # analysis:ignore
PYQT5 = False
+ PYSIDE = True
except ImportError:
raise PythonQtError('No Qt bindings could be found')
|
Add boolean constants for pyqt4 and pyside - pyqt5 already has one
|
py
|
diff --git a/allauth/account/adapter.py b/allauth/account/adapter.py
index <HASH>..<HASH> 100644
--- a/allauth/account/adapter.py
+++ b/allauth/account/adapter.py
@@ -269,6 +269,8 @@ class DefaultAccountAdapter(object):
def ajax_response(self, request, response, redirect_to=None, form=None):
data = {}
+ status = response.status_code
+
if redirect_to:
status = 200
data['location'] = redirect_to
|
Account adapter: ensure `status` is always set
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,13 +20,13 @@ install_requires = [
setup(
name='figgypy',
- version='0.2.2',
+ version='0.3.dev',
description='Simple configuration tool. Get config from yaml, json, or xml.',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/figgypy',
- download_url='https://github.com/theherk/figgypy/archive/0.2.2.zip',
+ download_url='https://github.com/theherk/figgypy/archive/0.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
|
Update setup.py to develop version
|
py
|
diff --git a/onecodex/models/collection.py b/onecodex/models/collection.py
index <HASH>..<HASH> 100644
--- a/onecodex/models/collection.py
+++ b/onecodex/models/collection.py
@@ -160,6 +160,22 @@ class SampleCollection(ResourceList, AnalysisMixin):
new_classifications.append(c)
+ # warn if some of the classifications in this collection are not alike
+ job_names_to_ids = {}
+
+ for obj in new_classifications:
+ try:
+ job_names_to_ids[obj.job.name].append(obj.job.id)
+ except KeyError:
+ job_names_to_ids[obj.job.name] = [obj.job.id]
+
+ if len(job_names_to_ids) > 1:
+ warnings.warn(
+ "SampleCollection contains multiple analysis types: {}".format(
+ ", ".join(job_names_to_ids.keys())
+ )
+ )
+
self._cached["classifications"] = new_classifications
@property
|
Warn if mixing analysis types in SampleCollection
|
py
|
diff --git a/desdeo/method/__init__.py b/desdeo/method/__init__.py
index <HASH>..<HASH> 100755
--- a/desdeo/method/__init__.py
+++ b/desdeo/method/__init__.py
@@ -4,7 +4,7 @@
# Copyright (c) 2016 Vesa Ojalehto
-__all__ = ["NAUTILUSv1", "ENAUTILUS", "NNAUTILUS", "print_iteration"]
+__all__ = ["NAUTILUSv1", "ENAUTILUS", "NNAUTILUS"]
from .method import InteractiveMethod
|
Remove missing identifier from __all__: print_iteration
|
py
|
diff --git a/pythonforandroid/logger.py b/pythonforandroid/logger.py
index <HASH>..<HASH> 100644
--- a/pythonforandroid/logger.py
+++ b/pythonforandroid/logger.py
@@ -146,7 +146,9 @@ def shprint(command, *args, **kwargs):
kwargs["_err_to_out"] = True
kwargs["_bg"] = True
is_critical = kwargs.pop('_critical', False)
- tail_n = kwargs.pop('_tail', 0)
+ tail_n = kwargs.pop('_tail', None)
+ if "P4A_FULL_DEBUG" in os.environ:
+ tail_n = 0
filter_in = kwargs.pop('_filter', None)
filter_out = kwargs.pop('_filterout', None)
if len(logger.handlers) > 1:
@@ -193,7 +195,7 @@ def shprint(command, *args, **kwargs):
stdout.write('{}\r{:>{width}}\r'.format(
Err_Style.RESET_ALL, ' ', width=(columns - 1)))
stdout.flush()
- if tail_n or filter_in or filter_out:
+ if tail_n is not None or filter_in or filter_out:
def printtail(out, name, forecolor, tail_n=0,
re_filter_in=None, re_filter_out=None):
lines = out.splitlines()
|
add a way to force dumping the full log of a android command if it fail: P4A_FULL_DEBUG=1
|
py
|
diff --git a/src/python/pants/releases/packages.py b/src/python/pants/releases/packages.py
index <HASH>..<HASH> 100644
--- a/src/python/pants/releases/packages.py
+++ b/src/python/pants/releases/packages.py
@@ -61,19 +61,13 @@ class Package:
j = json.load(f)
return j["info"]["version"]
- def owners(self,
- html_node_type='a',
- html_node_class='sidebar-section__user-gravatar',
- html_node_attr='aria-label'):
+ def owners(self):
url = "https://pypi.org/pypi/{}/{}".format(self.name, self.latest_version())
url_content = urlopen(url).read()
parser = BeautifulSoup(url_content, 'html.parser')
- owners = [
- item.attrs[html_node_attr]
- for item
- in parser.find_all(html_node_type, class_=html_node_class)
- ]
- return {owner.lower() for owner in owners}
+ owners = {span.find('a', recursive=False).get_text().strip().lower()
+ for span in parser.find_all('span', class_='sidebar-section__maintainer')}
+ return owners
def core_packages():
|
Fix the PyPI project owner scraping heuristic. (#<I>) Apparently PyPI changed their HTML structure recently.
|
py
|
diff --git a/udata/api/oauth2.py b/udata/api/oauth2.py
index <HASH>..<HASH> 100644
--- a/udata/api/oauth2.py
+++ b/udata/api/oauth2.py
@@ -109,6 +109,8 @@ class OAuth2Client(ClientMixin, db.Datetimed, db.Document):
return self.secret == client_secret
def check_token_endpoint_auth_method(self, method):
+ if not self.has_client_secret():
+ return method == 'none'
return method in ('client_secret_post', 'client_secret_basic')
def check_response_type(self, response_type):
|
Allow none auth method for public client
|
py
|
diff --git a/tarbell/cli.py b/tarbell/cli.py
index <HASH>..<HASH> 100644
--- a/tarbell/cli.py
+++ b/tarbell/cli.py
@@ -339,8 +339,6 @@ def tarbell_publish(command, args):
puts(colored.green("http://{0}\n".format(bucket_url)))
except KeyboardInterrupt:
show_error("ctrl-c pressed, bailing out!")
- except KeyError:
- show_error("Credentials for bucket {0} not configured\nRun {1} or add credentials to {2}".format(colored.red(bucket_url), colored.yellow("tarbell configure s3"), colored.yellow("~/.tarbell/settings.yaml")))
finally:
_delete_dir(tempdir)
|
don't catch keyerror if creds are wrong when publishing
|
py
|
diff --git a/stacker/hooks/keypair.py b/stacker/hooks/keypair.py
index <HASH>..<HASH> 100644
--- a/stacker/hooks/keypair.py
+++ b/stacker/hooks/keypair.py
@@ -17,7 +17,7 @@ def ensure_keypair_exists(region, namespace, mappings, parameters, **kwargs):
logger.info('keypair: "%s" not found', keypair_name)
create_or_upload = raw_input(
- 'import or create keypair "%s"? (import/create/cancel) ' % (
+ 'import or create keypair "%s"? (import/create/Cancel) ' % (
keypair_name,
),
)
|
Capitalize Cancel since its the default action
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -269,6 +269,7 @@ class CheckSDist(sdist_class):
'pandas/index.pyx',
'pandas/algos.pyx',
'pandas/parser.pyx',
+ 'pandas/src/period.pyx',
'pandas/src/sparse.pyx',
'pandas/src/testing.pyx']
|
Add period.pyx to package
|
py
|
diff --git a/python/sparknlp/annotator.py b/python/sparknlp/annotator.py
index <HASH>..<HASH> 100755
--- a/python/sparknlp/annotator.py
+++ b/python/sparknlp/annotator.py
@@ -89,8 +89,8 @@ class Tokenizer(AnnotatorModel):
name = 'Tokenizer'
@keyword_only
- def __init__(self):
- super(Tokenizer, self).__init__(classname="com.johnsnowlabs.nlp.annotators.Tokenizer")
+ def __init__(self, classname="com.johnsnowlabs.nlp.annotators.Tokenizer", java_model=None):
+ super(Tokenizer, self).__init__(classname=classname, java_model=java_model)
self._setDefault(
targetPattern="\\S+",
|
Fixed pretrained constructor for Tokenizer
|
py
|
diff --git a/icekit/publishing/apps.py b/icekit/publishing/apps.py
index <HASH>..<HASH> 100644
--- a/icekit/publishing/apps.py
+++ b/icekit/publishing/apps.py
@@ -97,15 +97,6 @@ class AppConfig(AppConfig):
Q(publication_end_date__isnull=True) |
Q(publication_end_date__gte=now())
)
-
- # Include publishable items that are published themselves, or are
- # draft copies with a published copy.
- items_to_include_pks = [
- i.pk for i in qs
- if i.is_published or getattr(i, 'has_been_published', False)
- ]
- qs = qs.filter(pk__in=items_to_include_pks)
-
return _exchange_for_published(qs)
# Monkey-patch `UrlNodeQuerySet.get_for_path` to add filtering by
|
Remove redundant processing when filtering/exchanging published items Remove a pre-filtering step that used Python code to find and collect only items that have been published, prior to doing the actual exchange to get just the published copies. This pre-filtering is redundant since the `_exchange_for_published()` method ignores unpublished items anyway, and any potential performance improvement of pre-filtering by PK was swamped by the overhead of iterating twice through the candidates.
|
py
|
diff --git a/conftest.py b/conftest.py
index <HASH>..<HASH> 100644
--- a/conftest.py
+++ b/conftest.py
@@ -11,6 +11,7 @@ def no_progress_bars():
if 'pyemma' in sys.modules:
pyemma = sys.modules['pyemma']
pyemma.config.show_progress_bars = False
+ pyemma.config.coordinates_check_output = True
pyemma.config.use_trajectory_lengths_cache = False
yield
|
[conftest] enable output checking during testing
|
py
|
diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py
index <HASH>..<HASH> 100644
--- a/samcli/lib/bootstrap/bootstrap.py
+++ b/samcli/lib/bootstrap/bootstrap.py
@@ -17,6 +17,7 @@ from samcli import __version__
from samcli.cli.global_config import GlobalConfig
from samcli.commands.exceptions import UserException, CredentialsError, RegionError
+
SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default"
LOG = logging.getLogger(__name__)
@@ -142,9 +143,10 @@ def _get_stack_template():
Fn::Join:
- ""
-
- - "arn:aws:s3:::"
- -
- !Ref SamCliSourceBucket
+ - "arn:"
+ - !Ref AWS::Partition
+ - ":s3:::"
+ - !Ref SamCliSourceBucket
- "/*"
Principal:
Service: serverlessrepo.amazonaws.com
|
fix(deploy): Allow all Partitions for S3 Policy on managed stack (#<I>) Solves #<I>, where creating the managed stack in any partition but aws will fail.
|
py
|
diff --git a/dyndnsc/detector/base.py b/dyndnsc/detector/base.py
index <HASH>..<HASH> 100644
--- a/dyndnsc/detector/base.py
+++ b/dyndnsc/detector/base.py
@@ -83,3 +83,7 @@ class IPDetector(Subject):
warnings.warn("getName() is deprecated; use names() "
"instead", DeprecationWarning, stacklevel=2)
return cls.names()[0]
+
+ @staticmethod
+ def names():
+ raise NotImplementedError("Please implement in subclass")
|
"abstract" method for names()
|
py
|
diff --git a/salt/states/network.py b/salt/states/network.py
index <HASH>..<HASH> 100644
--- a/salt/states/network.py
+++ b/salt/states/network.py
@@ -276,8 +276,8 @@ def routes(name, **kwargs):
kwargs['test'] = __opts__['test']
# Build interface routes
try:
- old = __salt__['ip.get_routes'](**kwargs)
- new = __salt__['ip.build_routes'](**kwargs)
+ old = __salt__['ip.get_routes'](name)
+ new = __salt__['ip.build_routes'](name, **kwargs)
if __opts__['test']:
if old == new:
return ret
@@ -306,7 +306,7 @@ def routes(name, **kwargs):
# Apply interface routes
if apply_net_settings:
try:
- __salt__['ip.apply_network_routes'](**kwargs)
+ __salt__['ip.apply_network_settings'](**kwargs)
except AttributeError as error:
ret['result'] = False
ret['comment'] = error.message
|
Sync compatibility between network state and ip module
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -126,7 +126,7 @@ setup(
'salmon = multiqc.modules.salmon:MultiqcModule',
'samblaster = multiqc.modules.samblaster:MultiqcModule',
'samtools = multiqc.modules.samtools:MultiqcModule',
- 'sargasso = multiqc.modules.sargasso:MultiqcModule'
+ 'sargasso = multiqc.modules.sargasso:MultiqcModule',
'seqyclean = multiqc.modules.seqyclean:MultiqcModule',
'skewer = multiqc.modules.skewer:MultiqcModule',
'slamdunk = multiqc.modules.slamdunk:MultiqcModule',
|
added a comma to setup.py (accidently removed)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -50,6 +50,7 @@ EXTRA_DEPENDENCIES = {
'pep8',
'pylint',
'isort',
+ 'wheel',
'bumpversion'],
}
|
Add wheel as a dev dependency.
|
py
|
diff --git a/digsandpaper/elasticsearch_mapping/generate.py b/digsandpaper/elasticsearch_mapping/generate.py
index <HASH>..<HASH> 100644
--- a/digsandpaper/elasticsearch_mapping/generate.py
+++ b/digsandpaper/elasticsearch_mapping/generate.py
@@ -58,7 +58,12 @@ def generate(default_mapping, semantic_types,
data_type = "string"
knowledge_graph[semantic_type] = kg_to_copy
semantic_type_props = {"high_confidence_keys": {"type": data_type,
- "index": "not_analyzed"}}
+ "index": "not_analyzed"},
+ "key_count": {"index": "no",
+ "type": "long"},
+ "provenance_count": {"index": "no",
+ "type": "long"},
+ }
if data_type in elasticsearch_numeric_types:
semantic_type_props["high_confidence_keys"]["ignore_malformed"] = True
root_props[semantic_type] = {"properties": semantic_type_props}
|
Don't index key_count and provenance_count
|
py
|
diff --git a/src/Exscript/workqueue/WorkQueue.py b/src/Exscript/workqueue/WorkQueue.py
index <HASH>..<HASH> 100644
--- a/src/Exscript/workqueue/WorkQueue.py
+++ b/src/Exscript/workqueue/WorkQueue.py
@@ -20,12 +20,14 @@ class WorkQueue(object):
This class implements the asynchronous workqueue and is the main API
for using the workqueue module.
"""
- def __init__(self, **kwargs):
+ def __init__(self, debug = 0, max_threads = 1):
"""
Constructor.
- @keyword debug: The debug level (default is 0)
- @keyword max_threads: Number of concurrent connections (default is 1).
+ @type debug: int
+ @param debug: The debug level.
+ @type max_threads: int
+ @param max_threads: The maximum number of concurrent threads.
"""
self.job_init_event = Event()
self.job_started_event = Event()
@@ -33,8 +35,8 @@ class WorkQueue(object):
self.job_succeeded_event = Event()
self.job_aborted_event = Event()
self.queue_empty_event = Event()
- self.debug = kwargs.get('debug', 0)
- self.max_threads = kwargs.get('max_threads', 1)
+ self.debug = debug
+ self.max_threads = max_threads
self.main_loop = None
self._init()
|
Exscript.workqueue: replace **kwargs by explicit keywords.
|
py
|
diff --git a/umi_tools/umi_methods.py b/umi_tools/umi_methods.py
index <HASH>..<HASH> 100644
--- a/umi_tools/umi_methods.py
+++ b/umi_tools/umi_methods.py
@@ -1432,7 +1432,11 @@ class get_bundles:
# get the umi +/- cell barcode and update dictionaries
if self.options.ignore_umi:
- umi, cell = "", ""
+ if self.options.per_cell:
+ umi, cell = self.barcode_getter(read)
+ umi = ""
+ else:
+ umi, cell = "", ""
else:
umi, cell = self.barcode_getter(read)
|
deals with situation where umi ignored but cell barcode still used
|
py
|
diff --git a/robot-server/tests/integration/fixtures.py b/robot-server/tests/integration/fixtures.py
index <HASH>..<HASH> 100644
--- a/robot-server/tests/integration/fixtures.py
+++ b/robot-server/tests/integration/fixtures.py
@@ -11,8 +11,8 @@ def check_health_response(response):
'name': 'opentrons-dev',
'api_version': __version__,
'fw_version': 'Virtual Smoothie',
- 'board_revision': response.json().get('board_revision'),
- 'logs': ['/logs/serial.log', '/logs/api.log'],
+ 'board_revision': "2.1",
+ 'logs': ['/logs/serial.log', '/logs/api.log', '/logs/server.log'],
'system_version': config.OT_SYSTEM_VERSION,
'minimum_protocol_api_version': minimum_version,
'maximum_protocol_api_version': maximum_version,
@@ -20,8 +20,9 @@ def check_health_response(response):
'apiLog': '/logs/api.log',
'serialLog': '/logs/serial.log',
'apiSpec': '/openapi.json',
- 'systemTime': '/system/time'
+ 'systemTime': '/system/time',
+ 'serverLog': '/logs/server.log'
}
}
- return response.json() == expected
+ assert response.json() == expected
|
fix(robot-server): health endpoint tavern test didn't actually perform validation of response. (#<I>)
|
py
|
diff --git a/airflow/providers/google/firebase/example_dags/example_firestore.py b/airflow/providers/google/firebase/example_dags/example_firestore.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/google/firebase/example_dags/example_firestore.py
+++ b/airflow/providers/google/firebase/example_dags/example_firestore.py
@@ -17,7 +17,7 @@
# under the License.
"""
-Example Airflow DAG that showss interactions with Google Cloud Firestore.
+Example Airflow DAG that shows interactions with Google Cloud Firestore.
Prerequisites
=============
|
fix typo in firebase/example_filestore DAG (#<I>)
|
py
|
diff --git a/homely/install.py b/homely/install.py
index <HASH>..<HASH> 100644
--- a/homely/install.py
+++ b/homely/install.py
@@ -6,7 +6,7 @@ from homely._utils import haveexecutable, isnecessarypath
from homely._ui import note, allowinteractive, allowpull, system
-def installpkg(name=None, wantcmd=None, **methods):
+def installpkg(name, wantcmd=None, **methods):
for key in methods:
assert key in InstallPackage.METHODS
|
homely.install: installpkg() now requires a name
|
py
|
diff --git a/TestWeatherStation.py b/TestWeatherStation.py
index <HASH>..<HASH> 100755
--- a/TestWeatherStation.py
+++ b/TestWeatherStation.py
@@ -77,6 +77,12 @@ def main(argv=None):
for k in sorted(ws.fixed_format):
if 'unk' in k:
print k, ws.get_fixed_block([k])
+ for k in sorted(ws.fixed_format):
+ if 'settings' in k or 'display' in k or 'alarm' in k:
+ bits = ws.get_fixed_block([k])
+ for b in sorted(bits):
+ if 'bit' in b:
+ print k, b, bits[b]
if history_count > 0:
lo_fix = ws.get_lo_fix_block()
print "Recent history", lo_fix
|
Added various status bits to the 'unknown' option printout.
|
py
|
diff --git a/bokeh/tests/test_bokeh_init.py b/bokeh/tests/test_bokeh_init.py
index <HASH>..<HASH> 100644
--- a/bokeh/tests/test_bokeh_init.py
+++ b/bokeh/tests/test_bokeh_init.py
@@ -21,9 +21,10 @@ class TestPrintVersions(unittest.TestCase):
def test_print(self):
import bokeh
- #remove nodename from the system info
+ #remove nodename and processor from the system info
sysinfo = list(platform.uname())
del sysinfo[1]
+ del sysinfo[-1]
bokeh.print_versions()
#check the correct info is present
for info in sysinfo:
|
print info test does not need to check processor
|
py
|
diff --git a/bdbag/fetch/transports/fetch_ftp.py b/bdbag/fetch/transports/fetch_ftp.py
index <HASH>..<HASH> 100644
--- a/bdbag/fetch/transports/fetch_ftp.py
+++ b/bdbag/fetch/transports/fetch_ftp.py
@@ -60,7 +60,7 @@ def get_file(url, output_path, auth_config, credentials=None):
elapsed = datetime.datetime.now() - start
total = os.path.getsize(output_path)
totalSecs = elapsed.total_seconds()
- totalMBs = total / (1024 * 1024)
+ totalMBs = float(total) / float((1024 * 1024))
throughput = str("%.3f MB/second" % (totalMBs / totalSecs if totalSecs > 0 else 0.001))
logger.info('File [%s] transfer successful. %.3f MB transferred at %s. Elapsed time: %s. ' %
(output_path, totalMBs, throughput, elapsed))
|
Force floating point division for Python <I>.
|
py
|
diff --git a/plenum/server/node.py b/plenum/server/node.py
index <HASH>..<HASH> 100644
--- a/plenum/server/node.py
+++ b/plenum/server/node.py
@@ -1521,7 +1521,7 @@ class Node(HasActionQueue, Motor, Propagator, MessageProcessor, HasFileStorage,
if self.viewNo - view_no > 1:
self.discard(msg, "un-acceptable viewNo {}"
.format(view_no), logMethod=logger.warning)
- elif (view_no > self.viewNo):
+ elif (view_no > self.viewNo) or from_current_state:
if view_no not in self.msgsForFutureViews:
self.msgsForFutureViews[view_no] = deque()
logger.debug('{} stashing a message for a future view: {}'.format(self, msg))
|
[INDY-<I>] process case when CURRENT_STATE with viewNo=0
|
py
|
diff --git a/crabpy_pyramid/utils.py b/crabpy_pyramid/utils.py
index <HASH>..<HASH> 100644
--- a/crabpy_pyramid/utils.py
+++ b/crabpy_pyramid/utils.py
@@ -76,7 +76,7 @@ def list_straten_adapter(obj, request):
return {
'id': obj.id,
'label': obj.label,
- 'status': obj.status
+ 'status': obj.status,
}
def list_huisnummers_adapter(obj, request):
@@ -215,7 +215,7 @@ def item_straat_adapter(obj, request):
'namen': obj.namen,
'status': obj.status,
'taal': obj.taal,
- 'metadata': obj.metadata
+ 'metadata': obj.metadata,
}
def item_huisnummer_adapter(obj, request):
|
added forgotten , at end of line
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,18 @@
-from setuptools import setup, Extension
-from setuptools.command.build_ext import build_ext
+import sys
+
+try:
+ import numpy
+except ImportError:
+ print "Numpy is needed for running and building of PHOEBE"
+ sys.exit(1)
+
+from numpy.distutils.core import setup, Extension
+from numpy.distutils.command.build_ext import build_ext
+
from distutils.version import LooseVersion, StrictVersion
+
import platform
import os
-import numpy
import re
#
@@ -205,7 +214,6 @@ class build_check(build_ext):
build_ext.build_extensions(self)
else:
- import sys
print("Quitting setup.py of phoebe2.")
sys.exit(1)
#
|
Reverting in setup.py to numpy.distutils.
|
py
|
diff --git a/mistletoe/interactive.py b/mistletoe/interactive.py
index <HASH>..<HASH> 100644
--- a/mistletoe/interactive.py
+++ b/mistletoe/interactive.py
@@ -18,14 +18,13 @@ def interactive():
more = True
else:
prompt = '... '
- sys.stdout.write(prompt)
try:
- line = input() + '\n'
+ line = input(prompt) + '\n'
contents.append(line)
except EOFError:
- sys.stdout.write('\n' + mistletoe.markdown(contents))
+ print('\n' + mistletoe.markdown(contents), end='')
more = False
contents.clear()
except KeyboardInterrupt:
- sys.stdout.write('\nExiting.\n')
+ print('\nExiting.')
break
|
☕️ cleaned up prompts and outputs
|
py
|
diff --git a/eqcorrscan/core/match_filter/template.py b/eqcorrscan/core/match_filter/template.py
index <HASH>..<HASH> 100644
--- a/eqcorrscan/core/match_filter/template.py
+++ b/eqcorrscan/core/match_filter/template.py
@@ -520,7 +520,10 @@ class Template(object):
parallel_process=parallel_process, xcorr_func=xcorr_func,
concurrency=concurrency, cores=cores, ignore_length=ignore_length,
overlap=overlap, full_peaks=full_peaks, **kwargs)
- return party[0]
+ family = party[0]
+ # Remove duplicates
+ family.detections = family._uniq().detections
+ return family
def construct(self, method, name, lowcut, highcut, samp_rate, filt_order,
length, prepick, swin="all", process_len=86400,
|
Ensure duplicates are squashed after template.detect
|
py
|
diff --git a/curdling/services/base.py b/curdling/services/base.py
index <HASH>..<HASH> 100644
--- a/curdling/services/base.py
+++ b/curdling/services/base.py
@@ -87,7 +87,8 @@ class Service(SignalEmitter):
result = self(requester, **sender_data) or {}
self._queue.task_done()
except BaseException as exception:
- self.logger.exception('%s.run(from="%s") failed', name, requester)
+ self.logger.exception('%s.run(from="%s", data="%s") failed',
+ name, requester, sender_data)
self.emit('failed', self.name, exception=exception)
else:
self.logger.debug('%s.run(data="%s"): %s', name, sender_data, result)
|
More data for debugging exceptions in Service._worker()
|
py
|
diff --git a/napalm/ios/ios.py b/napalm/ios/ios.py
index <HASH>..<HASH> 100644
--- a/napalm/ios/ios.py
+++ b/napalm/ios/ios.py
@@ -911,7 +911,6 @@ class IOSDriver(NetworkDriver):
entry["remote_chassis_id"] = napalm.base.helpers.mac(
entry["remote_chassis_id"]
)
- print(entry["remote_chassis_id"])
# Older IOS versions don't have 'Local Intf' defined in LLDP detail.
# We need to get them from the non-detailed command
|
Update napalm/ios/ios.py
|
py
|
diff --git a/nhlib/gsim/akkar_bommer_2010.py b/nhlib/gsim/akkar_bommer_2010.py
index <HASH>..<HASH> 100644
--- a/nhlib/gsim/akkar_bommer_2010.py
+++ b/nhlib/gsim/akkar_bommer_2010.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
-Module exports :class:`AkB_2010_AttenRel`.
+Module exports :class:`AkkarBommer2010`.
"""
from __future__ import division
@@ -25,7 +25,7 @@ from nhlib import const
from nhlib.imt import PGA, PGV, SA
-class AkB_2010_AttenRel(GMPE):
+class AkkarBommer2010(GMPE):
"""
Implements GMPE developed by Sinan Akkar and Julian J. Bommer
and published as "Empirical Equations for the Prediction of PGA, PGV,
|
gsim/akkar_bommer_<I>: Renamed GSIM class `AkB_<I>_AttenRel` to `AkkarBommer<I>` to match the naming convention.
|
py
|
diff --git a/salt/states/boto_apigateway.py b/salt/states/boto_apigateway.py
index <HASH>..<HASH> 100644
--- a/salt/states/boto_apigateway.py
+++ b/salt/states/boto_apigateway.py
@@ -384,8 +384,6 @@ def _name_matches(name, matches):
def _object_reducer(o, names=('id', 'name', 'path', 'httpMethod',
'statusCode', 'Created', 'Deleted',
'Updated', 'Flushed', 'Associated', 'Disassociated')):
- log.info('object_reducer')
- log.info(_dict_to_json_pretty(o))
result = {}
if isinstance(o, dict):
for k, v in o.iteritems():
|
removed few unnecessary debug printouts
|
py
|
diff --git a/jawa/util/utf.py b/jawa/util/utf.py
index <HASH>..<HASH> 100644
--- a/jawa/util/utf.py
+++ b/jawa/util/utf.py
@@ -18,7 +18,8 @@ def decode_modified_utf8(s):
:returns: A unicode representation of the original string.
"""
s = bytearray(s)
- final_string = unicode()
+ buffer = []
+ buffer_append = buffer.append
ix = 0
while ix < len(s):
x = s[ix]
@@ -47,8 +48,8 @@ def decode_modified_utf8(s):
elif x == 0xC0 and s[ix] == 0x80:
ix += 1
x = 0
- final_string += unichr(x)
- return final_string
+ buffer_append(x)
+ return u''.join(map(unichr, buffer))
def encode_modified_utf8(u):
|
Speed up character processing with list buffer * Characters are appended to a list buffer and converted to a unicode string at the end at once. This speeds up string processing when there is a large number of characters to decode and avoids the creation of many small intermediate strings.
|
py
|
diff --git a/SoftLayer/CLI/licenses/cancel.py b/SoftLayer/CLI/licenses/cancel.py
index <HASH>..<HASH> 100644
--- a/SoftLayer/CLI/licenses/cancel.py
+++ b/SoftLayer/CLI/licenses/cancel.py
@@ -16,4 +16,7 @@ def cli(env, key, immediate):
licenses = SoftLayer.LicensesManager(env.client)
- env.fout(licenses.cancel_item(key, immediate))
+ item = licenses.cancel_item(key, immediate)
+
+ if item:
+ env.fout("License key: {} was cancelled.".format(key))
|
Improved successful response to command - slcli licenses cancel
|
py
|
diff --git a/virtualchain/lib/workpool.py b/virtualchain/lib/workpool.py
index <HASH>..<HASH> 100644
--- a/virtualchain/lib/workpool.py
+++ b/virtualchain/lib/workpool.py
@@ -378,7 +378,7 @@ class Workpool(object):
if 'PYTHONPATH' in worker_env and platform.system().lower() == 'darwin':
# Mac OS X-specific work-around
- self.worker_env['PYTHONPATH'] = worker_env['PYTHONPATH'] + "/python"
+ worker_env['PYTHONPATH'] = worker_env['PYTHONPATH'] + "/python"
# start processes
for i in xrange(0, num_workers):
@@ -386,7 +386,7 @@ class Workpool(object):
try:
p = subprocess.Popen([self.worker_bin_path] + worker_argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=sys.stderr, env=worker_env, close_fds=True)
except Exception, e:
- log.error("Failed to start worker process '%s'" self.worker_bin_path)
+ log.error("Failed to start worker process '%s'" % self.worker_bin_path)
log.error("Worker environment:\n%s" % json.dumps(self.worker_env, indent=4, sort_keys=True))
raise
|
fix missing comma and ref to worker_env
|
py
|
diff --git a/icekit/publishing/admin.py b/icekit/publishing/admin.py
index <HASH>..<HASH> 100644
--- a/icekit/publishing/admin.py
+++ b/icekit/publishing/admin.py
@@ -257,9 +257,9 @@ class _PublishingHelpersMixin(object):
try:
if isinstance(obj, PublishingModel):
c['publish_url'] = reverse(
- self.publish_reverse(type(obj)), args=(obj.pk, )),
+ self.publish_reverse(type(obj)), args=(obj.pk, ))
c['unpublish_url'] = reverse(
- self.unpublish_reverse(type(obj)), args=(obj.pk, )),
+ self.unpublish_reverse(type(obj)), args=(obj.pk, ))
except NoReverseMatch:
pass
return t.render(c)
|
Fix publish/unpublish links in publishing status column, re #5 #<I>
|
py
|
diff --git a/Mixtape/param_sweep.py b/Mixtape/param_sweep.py
index <HASH>..<HASH> 100644
--- a/Mixtape/param_sweep.py
+++ b/Mixtape/param_sweep.py
@@ -21,7 +21,7 @@ from __future__ import print_function, division, absolute_import
from sklearn import clone
from sklearn.grid_search import ParameterGrid
import numpy as np
-from joblib import Parallel, delayed
+from sklearn.externals.joblib import Parallel, delayed
def _fit_helper(args):
"""
|
changed joblib to point to sklearn.externals.joblib
|
py
|
diff --git a/cheroot/server.py b/cheroot/server.py
index <HASH>..<HASH> 100644
--- a/cheroot/server.py
+++ b/cheroot/server.py
@@ -1038,10 +1038,8 @@ class HTTPRequest:
# Don't use simple_response here, because it emits headers
# we don't want. See
# https://github.com/cherrypy/cherrypy/issues/951
- msg = b'{} {}'.format(
- self.server.protocol.encode('ascii'),
- b'100 Continue\r\n\r\n',
- )
+ msg = '{} {}'.format(self.server.protocol, '100 Continue\r\n\r\n')
+ .encode('ascii')
try:
self.conn.wfile.write(msg)
except socket.error as ex:
|
encode both operands in ascii rather than b''
|
py
|
diff --git a/test/unit/test_collection.py b/test/unit/test_collection.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_collection.py
+++ b/test/unit/test_collection.py
@@ -24,20 +24,20 @@ def PaddedCollection(**kw):
(
'head',
'diff_head.',
- '^diff_head.(?P<index>(?P<padding>0*)\d+?).tail$',
+ '^diff\\_head\\.(?P<index>(?P<padding>0*)\d+?)\\.tail$',
'diff_head.1.tail'
),
(
'tail',
'.diff_tail',
- '^head.(?P<index>(?P<padding>0*)\d+?).diff_tail$',
+ '^head\\.(?P<index>(?P<padding>0*)\d+?)\\.diff\\_tail$',
'head.1.diff_tail'
),
(
'padding',
4,
- '^head.(?P<index>(?P<padding>0*)\d+?).tail$',
+ '^head\\.(?P<index>(?P<padding>0*)\d+?)\\.tail$',
'head.0001.tail'
)
])
|
[#5] Update tests affected by change in expression.
|
py
|
diff --git a/spyder/plugins/ipythonconsole.py b/spyder/plugins/ipythonconsole.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/ipythonconsole.py
+++ b/spyder/plugins/ipythonconsole.py
@@ -303,8 +303,8 @@ class IPythonConsoleConfigPage(PluginConfigPage):
_("Ask for confirmation before reseting the IPython "
"console namespace"), 'show_reset_namespace_warning',
tip=_("This option lets you hide the warning message shown\n"
- "when reseting the IPython console namespace from the\n"
- "variable explorer 'reset namespace' button."))
+ "when reseting the IPython console namespace from\n"
+ "the Spyder GUI"))
interface_layout = QVBoxLayout()
interface_layout.addWidget(banner_box)
|
Minor change to the option tip Made the description more general since this option mute both the message when resetting the IPython console from the toolbar button AND from the ctrl-alt-R shortcut.
|
py
|
diff --git a/past/tests/test_translation.py b/past/tests/test_translation.py
index <HASH>..<HASH> 100644
--- a/past/tests/test_translation.py
+++ b/past/tests/test_translation.py
@@ -123,6 +123,8 @@ class TestTranslate(unittest.TestCase):
"""
module = self.write_and_import(code, 'future_standard_library')
self.assertTrue('configparser' in dir(module))
+ from future import standard_library
+ standard_library.remove_hooks()
def test_old_builtin_functions(self):
code = """
|
Fix past standard_library hook test. The test framework here calls remove_hook() from past, but the test installs hooks from future, so they bleed through.
|
py
|
diff --git a/diagram.py b/diagram.py
index <HASH>..<HASH> 100644
--- a/diagram.py
+++ b/diagram.py
@@ -572,13 +572,11 @@ class VerticalBarGraph(BarGraph):
y = y + 1 if y else y
self.screen[(x, y)] = self.blocks[frac]
else:
- for y in range(full):
- yr = self.screen.size.y - y
- self.screen[(x, yr)] = self.blocks[-1]
+ for y in range(self.size.y, self.size.y - full - 1, -1):
+ self.screen[(x, y)] = self.blocks[-1]
if frac:
- y = y + 1 if y else y
- yr = self.screen.size.y - y
- self.screen[(x, yr)] = self.blocks[frac]
+ y = self.size.y - full - 1
+ self.screen[(x, y)] = self.blocks[frac]
@property
def scale(self):
|
Fixed drawing bug in vertical bar graphs
|
py
|
diff --git a/bayes_opt/bayesian_optimization.py b/bayes_opt/bayesian_optimization.py
index <HASH>..<HASH> 100644
--- a/bayes_opt/bayesian_optimization.py
+++ b/bayes_opt/bayesian_optimization.py
@@ -41,7 +41,7 @@ def acq_max(ac, gp, ymax, restarts, bounds):
x_try = numpy.asarray([numpy.random.uniform(x[0], x[1], size=1) for x in bounds]).T
#Find the minimum of minus the acquisition function
- res = minimize(lambda x: -ac(x, gp=gp, ymax=ymax), x_try, bounds=bounds, method='L-BFGS-B')
+ res = minimize(lambda x: -ac(x.reshape(-1, 1), gp=gp, ymax=ymax), x_try, bounds=bounds, method='L-BFGS-B')
#Store it if better than previous minimum(maximum).
if -res.fun >= ei_max:
|
Fixes deprecation warning Fixes deprecation warning by reshaping array before passing to scipy's minimizing object
|
py
|
diff --git a/drivers/python/rethinkdb/asyncio_net/net_asyncio.py b/drivers/python/rethinkdb/asyncio_net/net_asyncio.py
index <HASH>..<HASH> 100644
--- a/drivers/python/rethinkdb/asyncio_net/net_asyncio.py
+++ b/drivers/python/rethinkdb/asyncio_net/net_asyncio.py
@@ -230,7 +230,8 @@ class ConnectionInstance(object):
cursor._error(err_message)
for query, future in iter(self._user_queries.values()):
- future.set_exception(ReqlDriverError(err_message))
+ if not future.done():
+ future.set_exception(ReqlDriverError(err_message))
self._user_queries = { }
self._cursor_cache = { }
|
Fixes #<I> InvalidStateError after cancelling a RethinkDB operation. If a Python coroutine is cancelled while it is suspended inside a RethinkDB query, the query future is cancelled. When the connection is later closed, the `close()` method tries to set the future result, which throws `InvalidStateError` because the future's was already cancelled. This commit adds a guard condition to avoid this.
|
py
|
diff --git a/memorious/logic/http.py b/memorious/logic/http.py
index <HASH>..<HASH> 100644
--- a/memorious/logic/http.py
+++ b/memorious/logic/http.py
@@ -1,12 +1,12 @@
import cgi
import json
-import pytz
import pickle
from lxml import html, etree
from hashlib import sha1
from banal import hash_data, is_mapping
from urlnormalizer import normalize_url
from normality import guess_file_encoding, stringify
+from normality.encoding import normalize_encoding
from requests import Session, Request
from requests.structures import CaseInsensitiveDict
from datetime import datetime, timedelta
@@ -226,9 +226,7 @@ class ContextHttpResponse(object):
if content_type is not None:
content_type, options = cgi.parse_header(content_type)
charset = options.get('charset', '')
- charset = stringify(charset.lower().strip())
- if charset is not None:
- self._encoding = charset
+ self._encoding = normalize_encoding(charset, None)
if self._encoding is None:
with open(self.file_path, 'rb') as fh:
self._encoding = guess_file_encoding(fh)
|
Normalize encodings from HTTP headers.
|
py
|
diff --git a/buildbot_travis/steps/create_steps.py b/buildbot_travis/steps/create_steps.py
index <HASH>..<HASH> 100644
--- a/buildbot_travis/steps/create_steps.py
+++ b/buildbot_travis/steps/create_steps.py
@@ -17,10 +17,11 @@ import re
import textwrap
import traceback
+from twisted.internet import defer
+
from buildbot.process.buildstep import (SUCCESS, BuildStep, LoggingBuildStep,
ShellMixin)
from buildbot.steps import shell
-from twisted.internet import defer
from ..travisyml import TRAVIS_HOOKS
from .base import ConfigurableStep
@@ -221,6 +222,7 @@ class TravisSetupSteps(ConfigurableStep):
condition = None
shell = "bash"
step = None
+ original_command = command
if isinstance(command, dict):
name = command.get("title")
shell = command.get("shell", shell)
@@ -243,6 +245,11 @@ class TravisSetupSteps(ConfigurableStep):
return
if step is None:
+ if command is None:
+ self.addCompleteLog("bbtravis.yml error",
+ "Neither step nor cmd is defined: %r" %(original_command,))
+ return
+
if not isinstance(command, list):
command = [shell, '-c', command]
step = ShellCommand(
|
error out if neither step of cmd is defined
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,7 @@ install_requires=[
"cheroot",
"beaker",
"packaging",
- "pymongo"
+ "pymongo",
"pylibmc",
"diskcache"
]
|
Add a comma to list of dependencies in setup.py
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.