diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/acestream/request.py b/acestream/request.py
index <HASH>..<HASH> 100644
--- a/acestream/request.py
+++ b/acestream/request.py
@@ -16,7 +16,7 @@ class Response(object):
class Request(object):
def __init__(self, schema='http', host='127.0.0.1', port=6878):
- self.base = self._getapi_base(schema, host, port)
+ self.base = self._geturl_base(schema, host, port)
def get(self, url, **params):
apiurl = self._geturl(url, **params)
@@ -87,7 +87,7 @@ class Request(object):
else:
return Response(error=error)
- def _getapi_base(self, schema, host, port):
+ def _geturl_base(self, schema, host, port):
return '{0}://{1}:{2}'.format(schema, host, port)
def _get_response_key(self, response, key):
|
request: rename base url function
|
py
|
diff --git a/paramiko/sftp_client.py b/paramiko/sftp_client.py
index <HASH>..<HASH> 100644
--- a/paramiko/sftp_client.py
+++ b/paramiko/sftp_client.py
@@ -254,7 +254,7 @@ class SFTPClient (BaseSFTP):
# If we've hit the end of our queued requests, reset nums.
nums = list()
- except EOFError as e:
+ except EOFError:
self._request(CMD_CLOSE, handle)
return
|
Add python <I> support except block to listdir_iter
|
py
|
diff --git a/cherrypy/filters/baseurlfilter.py b/cherrypy/filters/baseurlfilter.py
index <HASH>..<HASH> 100644
--- a/cherrypy/filters/baseurlfilter.py
+++ b/cherrypy/filters/baseurlfilter.py
@@ -13,7 +13,14 @@ class BaseUrlFilter(BaseFilter):
return
request = cherrypy.request
- newBaseUrl = cherrypy.config.get('base_url_filter.base_url', 'http://localhost')
+
+ port = str(cherrypy.config.get('server.socket_port', '80'))
+ if port == "80":
+ defaultUrl = 'http://localhost'
+ else:
+ defaultUrl = 'http://localhost:%s' % port
+ newBaseUrl = cherrypy.config.get('base_url_filter.base_url', defaultUrl)
+
if cherrypy.config.get('base_url_filter.use_x_forwarded_host', True):
newBaseUrl = request.headers.get("X-Forwarded-Host", newBaseUrl)
|
Fix for #<I> (BaseURLFilter doesn't use socket_port).
|
py
|
diff --git a/pytorch_pretrained_bert/modeling_gpt2.py b/pytorch_pretrained_bert/modeling_gpt2.py
index <HASH>..<HASH> 100644
--- a/pytorch_pretrained_bert/modeling_gpt2.py
+++ b/pytorch_pretrained_bert/modeling_gpt2.py
@@ -218,7 +218,7 @@ class Attention(nn.Module):
w = w / math.sqrt(v.size(-1))
nd, ns = w.size(-2), w.size(-1)
b = self.bias[:, :, ns-nd:ns, :ns]
- w = w * b - 1e10 * (1 - b)
+ w = w * b - 1e4 * (1 - b)
w = nn.Softmax(dim=-1)(w)
return torch.matmul(w, v)
|
Fix gradient overflow issue during attention mask This fix is in reference to issue #<I>. GPT2 can now be trained in mixed precision, which I've confirmed with testing. I also tested unconditional generation on multiple seeds before and after changing 1e<I> to 1e4 and there was no difference. Please let me know if there is anything else I can do to make this pull request better. Thanks for all your work!
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -123,6 +123,7 @@ CLASSIFIERS = (
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: Implementation :: PyPy',
)
|
Add PyPy to Trove classifier in setup().
|
py
|
diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index <HASH>..<HASH> 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -55,7 +55,7 @@ def test_autocommit_enable_api_version(client, api_version):
def test_protocol_type(coordinator):
- assert coordinator.protocol_type() is 'consumer'
+ assert coordinator.protocol_type() == 'consumer'
def test_group_protocols(coordinator):
|
Use ==/!= to compare str, bytes, and int literals Identity is not the same thing as equality in Python so use ==/!= to compare str, bytes, and int literals. In Python >= <I>, these instances will raise __SyntaxWarnings__ so it is best to fix them now. <URL>
|
py
|
diff --git a/zounds/learn/preprocess.py b/zounds/learn/preprocess.py
index <HASH>..<HASH> 100644
--- a/zounds/learn/preprocess.py
+++ b/zounds/learn/preprocess.py
@@ -417,9 +417,7 @@ class MeanStdNormalization(Preprocessor):
def _forward_func(self):
def x(d, mean=None, std=None):
import numpy as np
- x = (d - mean) / std
- x[np.isinf(x)] = 0
- return x
+ return np.divide(d - mean, std, where=std != 0)
return x
|
Eliminate warning associated with mean std normalization
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -50,8 +50,11 @@ class install(_install):
def run(self):
_install.run(self)
# Installing the plugins requires write permission to plugins directory
- # (Default: /usr/share/munin/plugins) which is default owned by root
- munin_plugin_dir = os.path.join(self.install_data, PYMUNIN_PLUGIN_DIR)
+ # (/usr/share/munin/plugins) which is default owned by root.
+ if os.environ.has_key('MUNIN_PLUGIN_DIR'):
+ munin_plugin_dir = os.environ.get('MUNIN_PLUGIN_DIR')
+ else:
+ munin_plugin_dir = os.path.join(self.install_data, PYMUNIN_PLUGIN_DIR)
print "Munin Plugin Directory: %s" % munin_plugin_dir
if os.path.exists(munin_plugin_dir):
try:
|
Implement env var for forcing plugin dir in install script.
|
py
|
diff --git a/tests/test_comms.py b/tests/test_comms.py
index <HASH>..<HASH> 100644
--- a/tests/test_comms.py
+++ b/tests/test_comms.py
@@ -45,7 +45,12 @@ class CommsTest(unittest.TestCase):
self.assertFalse(cm.delete_channel(temp_channel, 'FAKE'))
self.assertTrue('channel : temp_input : 0 incoming messages' in str(cm))
self.assertTrue(cm.delete_channel(temp_channel, '12345'))
-
+
+ def test_03_message(self):
+ m = mod_comms.Message('sender', 'receiver', 'title', 'details')
+ self.assertTrue(str(m), 'sender attempting to send message to receiver')
+ self.assertTrue(m.send())
+
if __name__ == '__main__':
unittest.main()
|
basic test for message class in comms
|
py
|
diff --git a/pusherclient/connection.py b/pusherclient/connection.py
index <HASH>..<HASH> 100755
--- a/pusherclient/connection.py
+++ b/pusherclient/connection.py
@@ -234,7 +234,7 @@ class Connection(Thread):
self._start_timers()
def _pong_handler(self, data):
- # self. logger.info("Connection: pong from pusher")
+ self.logger.info("Connection: pong from pusher")
self.pong_received = True
def _pusher_error_handler(self, data):
|
Re-enable pong logging.
|
py
|
diff --git a/plip/basic/config.py b/plip/basic/config.py
index <HASH>..<HASH> 100644
--- a/plip/basic/config.py
+++ b/plip/basic/config.py
@@ -1,4 +1,4 @@
-__version__ = '2.1.6'
+__version__ = '2.1.7'
__maintainer__ = 'PharmAI GmbH (2020) - www.pharm.ai - hello@pharm.ai'
import logging
|
Update config.py Changes version to <I>
|
py
|
diff --git a/container/docker/engine.py b/container/docker/engine.py
index <HASH>..<HASH> 100644
--- a/container/docker/engine.py
+++ b/container/docker/engine.py
@@ -280,8 +280,9 @@ class Engine(BaseEngine):
cap_add=['SYS_ADMIN']
)
- if command == 'build':
- run_kwargs['privileged'] = True
+ # Anytime a playbook is executed, /src is bind mounted to a tmpdir, and that seems to
+ # require privileged=True
+ run_kwargs['privileged'] = True
logger.debug('Docker run:', image=image_id, params=run_kwargs)
try:
|
Always run conductor privileged (#<I>)
|
py
|
diff --git a/bika/lims/browser/analysisrequest/workflow.py b/bika/lims/browser/analysisrequest/workflow.py
index <HASH>..<HASH> 100644
--- a/bika/lims/browser/analysisrequest/workflow.py
+++ b/bika/lims/browser/analysisrequest/workflow.py
@@ -38,6 +38,10 @@ class AnalysisRequestWorkflowAction(WorkflowAction):
form = self.request.form
plone.protect.CheckAuthenticator(form)
action, came_from = WorkflowAction._get_form_workflow_action(self)
+ if type(action) in (list, tuple):
+ action = action[0]
+ if type(came_from) in (list, tuple):
+ came_from = came_from[0]
# Call out to the workflow action method
# Use default bika_listing.py/WorkflowAction for other transitions
method_name = 'workflow_action_' + action
|
Protective request handling sentry.bikalabs.com/bikalims/bikalims/group/<I>/
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -74,7 +74,7 @@ setup(
long_description=find_longdesc(),
url='http://code.google.com/p/selenium/',
src_root=src_root,
- classifiers=['Development Status :: 4 - Beta',
+ classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
|
DavidBurns setting development status to stable r<I>
|
py
|
diff --git a/py/selenium/webdriver/remote/remote_connection.py b/py/selenium/webdriver/remote/remote_connection.py
index <HASH>..<HASH> 100644
--- a/py/selenium/webdriver/remote/remote_connection.py
+++ b/py/selenium/webdriver/remote/remote_connection.py
@@ -471,6 +471,10 @@ class RemoteConnection(object):
request.add_header('Accept', 'application/json')
request.add_header('Content-Type', 'application/json;charset=UTF-8')
+ if parsed_url.username:
+ base64string = base64.b64encode('%s:%s' % (parsed_url.username, parsed_url.password))
+ request.add_header("Authorization", "Basic %s" % base64string)
+
if password_manager:
opener = url_request.build_opener(url_request.HTTPRedirectHandler(),
HttpErrorHandler(),
|
fix python HTTPS encoding for python driver (#<I>) * fix python HTTPS encoding for python driver * adding an if statement here to only do this if there is a username and password passed * fixing if statement * missed a colon
|
py
|
diff --git a/irc/bot.py b/irc/bot.py
index <HASH>..<HASH> 100644
--- a/irc/bot.py
+++ b/irc/bot.py
@@ -43,6 +43,11 @@ class ServerSpec(object):
@six.add_metaclass(abc.ABCMeta)
class ReconnectStrategy(object):
+ """
+ An abstract base class describing the interface used by
+ SingleServerIRCBot for handling reconnect following
+ disconnect events.
+ """
@abc.abstractmethod
def run(self, bot):
"""
@@ -53,13 +58,17 @@ class ReconnectStrategy(object):
class ExponentialBackoff(ReconnectStrategy):
- def __init__(self, min_interval=60, max_interval=300):
- if not min_interval or min_interval < 0:
- min_interval = 2 ** 31
- self.min_interval = min_interval
- if not max_interval or max_interval < min_interval:
- max_interval = min_interval
- self.max_interval = max_interval
+ """
+ A ReconnectStrategy implementing exponential backoff
+ with jitter.
+ """
+
+ min_interval = 60
+ max_interval = 300
+
+ def __init__(self, **attrs):
+ vars(self).update(attrs)
+ assert 0 <= self.min_interval <= self.max_interval
self._check_scheduled = False
self.connection_attempts = 1
|
Simplify construction of ExponentialBackoff to perform simple sanity checks on the bounds. No longer accept None or other False values for the min_interval (or reconnection_interval).
|
py
|
diff --git a/payu/experiment.py b/payu/experiment.py
index <HASH>..<HASH> 100644
--- a/payu/experiment.py
+++ b/payu/experiment.py
@@ -21,6 +21,9 @@ import shutil
import subprocess as sp
import sysconfig
+# Extensions
+import yaml
+
# Local
from payu import envmod
from payu.fsops import mkdir_p, make_symlink, read_config
|
Added missing import to experiment.py
|
py
|
diff --git a/torchvision/transforms/transforms.py b/torchvision/transforms/transforms.py
index <HASH>..<HASH> 100644
--- a/torchvision/transforms/transforms.py
+++ b/torchvision/transforms/transforms.py
@@ -553,8 +553,9 @@ class RandomResizedCrop(object):
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
+ area = img.size[0] * img.size[1]
+
for attempt in range(10):
- area = img.size[0] * img.size[1]
target_area = random.uniform(*scale) * area
aspect_ratio = random.uniform(*ratio)
|
move area calculation out of loop (#<I>) * move area calculation out of loop No need to calculate area <I> times when it doesn't change. * lint
|
py
|
diff --git a/pandas/util/testing.py b/pandas/util/testing.py
index <HASH>..<HASH> 100644
--- a/pandas/util/testing.py
+++ b/pandas/util/testing.py
@@ -983,6 +983,7 @@ _network_error_messages = (
# 'timeout: timed out',
# 'socket.timeout: timed out',
'timed out',
+ 'Server Hangup',
'HTTP Error 503: Service Unavailable',
)
|
BLD: more exception message skipping in @network
|
py
|
diff --git a/leonardo/module/web/widget/application/models.py b/leonardo/module/web/widget/application/models.py
index <HASH>..<HASH> 100644
--- a/leonardo/module/web/widget/application/models.py
+++ b/leonardo/module/web/widget/application/models.py
@@ -2,7 +2,7 @@
from __future__ import absolute_import, unicode_literals
-from django.core.urlresolvers import (Resolver404, resolve)
+from django.core.urlresolvers import Resolver404, resolve
from django.db import models
from django.http import HttpResponse
from django.shortcuts import render_to_response
@@ -12,6 +12,7 @@ from django.utils.functional import curry as partial
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from feincms.utils import get_object
+from leonardo.forms import Select2Widget
from leonardo.module.web.models import Widget
from leonardo.module.web.widgets.forms import WidgetUpdateForm
@@ -25,6 +26,10 @@ class ApplicationWidget(Widget, ApplicationContent):
icon = "fa fa-plug"
+ widgets = {
+ 'urlconf_path': Select2Widget
+ }
+
@classmethod
def initialize_type(cls, APPLICATIONS):
for i in APPLICATIONS:
|
Add select2 widget to external app.
|
py
|
diff --git a/tests/test_transport_http.py b/tests/test_transport_http.py
index <HASH>..<HASH> 100644
--- a/tests/test_transport_http.py
+++ b/tests/test_transport_http.py
@@ -280,6 +280,8 @@ def test_sending_using_network_sockets(send_method, monkeypatch):
def __init__(self, mocker):
self.__mocker = mocker
self.mock_reset()
+ def close(self):
+ pass
def connect(self, address):
assert address is self.__mocker.host_address
def makefile(self, *args, **kwargs):
@@ -330,6 +332,10 @@ def test_sending_using_network_sockets(send_method, monkeypatch):
# Socket's sendall() method twice - once for sending the HTTP request
# headers and once for its body.
assert mocker.mock_socket.mock_call_count("sendall") in (1, 2)
+ # Python versions prior to 3.4.2 do not explicitly close their HTTP server
+ # connection socket in case of our custom exceptions, e.g. version 3.4.1.
+ # closes it only on OSError exceptions.
+ assert mocker.mock_socket.mock_call_count("close") in (0, 1)
# With older Python versions, e.g. Python 2.4, Socket class does not
# implement the settimeout() method.
assert mocker.mock_socket.mock_call_count("settimeout") in (0, 1)
|
fix test failure with Python <I> New Python <I> urllib implementation now explicitly closes its HTTP server connection socket in case of any exceptions.
|
py
|
diff --git a/hooks/xkcd.py b/hooks/xkcd.py
index <HASH>..<HASH> 100755
--- a/hooks/xkcd.py
+++ b/hooks/xkcd.py
@@ -30,10 +30,12 @@ substitutions={'keyboard': 'leopard', 'witnesses': 'these dudes I know',
@Hook(types=['pubmsg', 'action'], args=['nick', 'type'])
def handle(send, msg, args):
""" Implements several XKCD comics """
+ subbed=False
for key in substitutions.keys:
if key in msg:
msg = msg.replace(key, substitutions[key])
- if not re.search('[\w]-ass ', msg):
+ subbed=True
+ if not re.search('[\w]-ass ', msg) and not subbed:
return
output = re.sub(r'(.*)(?:-ass )(.*)', r'\1 ass-\2', msg)
if args['type'] == 'pubmsg':
|
Also actuwlly do the change even if -ass isn't in the message
|
py
|
diff --git a/salt/state.py b/salt/state.py
index <HASH>..<HASH> 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -952,9 +952,10 @@ class State(object):
if name not in high:
errors.append(
'Cannot extend ID {0} in "{1}:{2}". It is not part '
- 'of the high state.'.format(name,
- body['__env__'],
- body['__sls__'])
+ 'of the high state.'.format(
+ name,
+ body.get('__env__', 'base'),
+ body.get('__sls__', 'base'))
)
continue
for state, run in body.items():
|
Fix #<I> This adds a fallback to prevent the env from being missed
|
py
|
diff --git a/python/thunder/rdds/fileio/seriesloader.py b/python/thunder/rdds/fileio/seriesloader.py
index <HASH>..<HASH> 100644
--- a/python/thunder/rdds/fileio/seriesloader.py
+++ b/python/thunder/rdds/fileio/seriesloader.py
@@ -35,13 +35,19 @@ class SeriesLoader(object):
@staticmethod
def __normalizeDatafilePattern(datapath, ext):
- if ext and (not datapath.endswith(ext)):
- if datapath.endswith("*"):
- datapath += ext
- elif datapath.endswith("/"):
- datapath += "*" + ext
- else:
- datapath += "/*" + ext
+ if ext:
+ if not ext.startswith("."):
+ # protect (partly) against case where ext happens to *also* be the name
+ # of a directory. If your directory is named "something.bin", well, you
+ # get what you deserve, I guess.
+ ext = "." + ext
+ if not datapath.endswith(ext):
+ if datapath.endswith("*"):
+ datapath += ext
+ elif datapath.endswith("/"):
+ datapath += "*" + ext
+ else:
+ datapath += "/*" + ext
parseresult = urlparse.urlparse(datapath)
if parseresult.scheme:
|
protect against case where directory name happens to match file extension by prepending . to extension in seriesloader
|
py
|
diff --git a/dev_tools/change-log.py b/dev_tools/change-log.py
index <HASH>..<HASH> 100755
--- a/dev_tools/change-log.py
+++ b/dev_tools/change-log.py
@@ -204,7 +204,7 @@ class Fragment(NamedTuple):
}
if self.issues:
meta_data['issues'] = self.issues
- if self.pulls:
+ if self.pull_requests:
meta_data['pull requests'] = self.pull_requests
if self.version != UNRELEASED.semver:
meta_data['version'] = self.version
|
fixed rename missed during refactoring
|
py
|
diff --git a/carto/sql.py b/carto/sql.py
index <HASH>..<HASH> 100644
--- a/carto/sql.py
+++ b/carto/sql.py
@@ -277,12 +277,10 @@ class CopySQLClient(object):
def _compress_chunks(self, chunk_generator, compression_level):
zlib_mode = 16 + zlib.MAX_WBITS
compressor = zlib.compressobj(compression_level, zlib.DEFLATED, zlib_mode)
- compressed_chunk = ""
for chunk in chunk_generator:
- compressed_chunk += compressor.compress(chunk)
+ compressed_chunk = compressor.compress(chunk)
if len(compressed_chunk) > 0:
yield compressed_chunk
- compressed_chunk = ""
yield compressor.flush()
|
Fix stream compression This makes the code python2/3 compatible and simplifies it at the same time.
|
py
|
diff --git a/safe/impact_statistics/function_options_dialog.py b/safe/impact_statistics/function_options_dialog.py
index <HASH>..<HASH> 100644
--- a/safe/impact_statistics/function_options_dialog.py
+++ b/safe/impact_statistics/function_options_dialog.py
@@ -37,7 +37,9 @@ from collections import OrderedDict
from safe_qgis.ui.function_options_dialog_base import (
Ui_FunctionOptionsDialogBase)
-from safe_qgis.safe_interface import safeTr, get_postprocessor_human_name
+from safe.common.utilities import ugettext as safeTr
+from safe.postprocessors.postprocessor_factory import (
+ get_postprocessor_human_name)
from safe_extras.parameters.qt_widgets.parameter_container import (
ParameterContainer)
|
Get rid of safe_interface usage in function_options_dialog.py.
|
py
|
diff --git a/source/rafcon/mvc/controllers/modification_history.py b/source/rafcon/mvc/controllers/modification_history.py
index <HASH>..<HASH> 100755
--- a/source/rafcon/mvc/controllers/modification_history.py
+++ b/source/rafcon/mvc/controllers/modification_history.py
@@ -120,6 +120,8 @@ class ModificationHistoryTreeController(ExtendedController):
def on_reset_button_clicked(self, widget, event=None):
# logger.debug("do reset")
self._selected_sm_model.history.modifications.reset()
+ logger.debug("release storage_lock")
+ self._selected_sm_model.storage_lock.release()
def on_toggle_mode(self, widget, event=None):
if self.view['branch_checkbox'].get_active():
|
add conservative storage_lock release for modification-history reset by controller
|
py
|
diff --git a/tabledata/normalizer.py b/tabledata/normalizer.py
index <HASH>..<HASH> 100644
--- a/tabledata/normalizer.py
+++ b/tabledata/normalizer.py
@@ -51,10 +51,12 @@ class AbstractTableDataNormalizer(TableDataNormalizerInterface):
:rtype: tabledata.TableData
"""
+ normalize_header_list = self._normalize_header_list()
+
return TableData(
self.__normalize_table_name(),
- self._normalize_header_list(),
- self._normalize_row_list(),
+ normalize_header_list,
+ self._normalize_row_list(normalize_header_list),
dp_extractor=self._tabledata.dp_extractor)
@abc.abstractmethod
@@ -127,7 +129,7 @@ class AbstractTableDataNormalizer(TableDataNormalizerInterface):
:rtype: str
"""
- def _normalize_row_list(self):
+ def _normalize_row_list(self, normalize_header_list):
return self._tabledata.row_list
def _validate_header_list(self):
|
Change to pass normalized headers to _normalize_row_list method
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -25,4 +25,7 @@ setup(
]
},
zip_safe=False,
+ install_requires=(
+ 'decorator>=3.0.1'
+ ),
)
|
Added decorator to required packages in setup.py
|
py
|
diff --git a/holoviews/plotting/sheetplots.py b/holoviews/plotting/sheetplots.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/sheetplots.py
+++ b/holoviews/plotting/sheetplots.py
@@ -360,10 +360,10 @@ class MatrixGridPlot(GridPlot, OverlayPlot):
border = param.Number(default=10, doc="""
Aggregate border as a fraction of total plot size.""")
- num_ticks = param.Number(default=5)
-
show_frame = param.Boolean(default=False)
+ show_title = param.Boolean(default=True)
+
style_opts = param.List(default=['alpha', 'cmap', 'interpolation',
'visible', 'filterrad', 'origin'],
constant=True, doc="""
|
Enabled titles by default on MatrixGridPlot
|
py
|
diff --git a/cwltool/process.py b/cwltool/process.py
index <HASH>..<HASH> 100644
--- a/cwltool/process.py
+++ b/cwltool/process.py
@@ -689,8 +689,8 @@ def mergedirs(listing):
for e in listing:
if e["basename"] not in ents:
ents[e["basename"]] = e
- elif e["class"] == "Directory":
- ents[e["basename"]]["listing"].extend(e["listing"])
+ elif e["class"] == "Directory" and e.get("listing"):
+ ents[e["basename"]].setdefault("listing", []).extend(e["listing"])
for e in ents.itervalues():
if e["class"] == "Directory" and "listing" in e:
e["listing"] = mergedirs(e["listing"])
|
Fix mergedirs() so it doesn't fail if a Directory object lacks a listing field. (#<I>)
|
py
|
diff --git a/canary/api.py b/canary/api.py
index <HASH>..<HASH> 100644
--- a/canary/api.py
+++ b/canary/api.py
@@ -251,11 +251,10 @@ class Entry:
self._entry_type = data["entry_type"]
self._start_time = data["start_time"]
self._end_time = data["end_time"]
- self._thumbnails_by_device_uuid = {}
+ self._thumbnails = []
for thumbnail_data in data["thumbnails"]:
- thumbnail = Thumbnail(thumbnail_data)
- self._thumbnails_by_device_uuid[thumbnail.device_uuid] = thumbnail
+ self._thumbnails.append(Thumbnail(thumbnail_data))
@property
def entry_id(self):
@@ -283,11 +282,7 @@ class Entry:
@property
def thumbnails(self):
- return self._thumbnails_by_device_uuid.values()
-
- @property
- def thumbnail(self, device_uuid):
- return self._thumbnails_by_device_uuid[device_uuid]
+ return self._thumbnails
class Thumbnail:
|
Refactored Entry and Thumbnail
|
py
|
diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py
index <HASH>..<HASH> 100644
--- a/gcloud/datastore/query.py
+++ b/gcloud/datastore/query.py
@@ -308,3 +308,29 @@ class Query(object):
return [Entity.from_protobuf(entity, dataset=self.dataset())
for entity in entity_pbs]
+
+ def order(self, *properties):
+ """Adds a sort order to the query. If more than one sort order is added,
+ they will be applied in the order specified.
+
+ :type properties: string
+ :param properties: String giving the name of the property on which to sort,
+ optionally preceded by a hyphen (-) to specify descending order.
+ Omitting the hyphen specifies ascending order by default.
+
+ :rtype: :class:`Query`
+ :returns: A Query order by properties.
+ """
+ clone = self._clone()
+
+ for p in properties:
+ property_order = clone._pb.order.add()
+
+ if p.startswith('-'):
+ property_order.property.name = p[1:]
+ property_order.direct = property_order.DESCENDING
+ else:
+ property_order.property.name = p
+ property_order.direction = property_order.ASCENDING
+
+ return clone
|
Add 'order' method to query. Manually apply diff from Tagtoo-master: 5f<I>e<I>a<I>fe1cef<I>cdffe<I>b<I>dc9^.. 4a<I>c<I>fda<I>a<I>e<I>bd0c<I>a0bb2fc<I>c5
|
py
|
diff --git a/pymatbridge/pymatbridge.py b/pymatbridge/pymatbridge.py
index <HASH>..<HASH> 100644
--- a/pymatbridge/pymatbridge.py
+++ b/pymatbridge/pymatbridge.py
@@ -95,7 +95,7 @@ def decode_pymat(dct):
if 'ndarray' in dct and 'data' in dct:
value = decode_arr(dct['data'])
shape = dct['shape']
- if type(dct['shape']) is unicode:
+ if type(dct['shape']) is not list:
shape = decode_arr(dct['shape']).astype(int)
return value.reshape(shape, order='F')
elif 'ndarray' in dct and 'imag' in dct:
|
Made json encode/decode reversible with Python 3 support
|
py
|
diff --git a/paramiko/file.py b/paramiko/file.py
index <HASH>..<HASH> 100644
--- a/paramiko/file.py
+++ b/paramiko/file.py
@@ -121,8 +121,8 @@ class BufferedFile (object):
raise IOError('File not open for reading')
if (size is None) or (size < 0):
# go for broke
- result = self.rbuffer
- self.rbuffer = ''
+ result = self._rbuffer
+ self._rbuffer = ''
self._pos += len(result)
while 1:
try:
@@ -191,7 +191,7 @@ class BufferedFile (object):
if (size is not None) and (size >= 0):
if len(line) >= size:
# truncate line and return
- self.rbuffer = line[size:]
+ self._rbuffer = line[size:]
line = line[:size]
self._pos += len(line)
return line
|
[project @ Arch-1:<EMAIL><I>-public%secsh--dev--<I>--patch-<I>] fix rbuffer -> _rbuffer in 3 places i missed fix 3 places where "rbuffer" hadn't been converted to "_rbuffer". thanks to kevin c. dorff for the bug report.
|
py
|
diff --git a/highton/models/person.py b/highton/models/person.py
index <HASH>..<HASH> 100644
--- a/highton/models/person.py
+++ b/highton/models/person.py
@@ -48,11 +48,11 @@ class Person(
OFFSET = 500
def __init__(self, **kwargs):
- self.company_id = fields.IntegerField(name=HightonConstants.COMPANY_ID)
- self.company_name = fields.StringField(name=HightonConstants.COMPANY_NAME)
- self.first_name = fields.StringField(name=HightonConstants.FIRST_NAME)
- self.last_name = fields.StringField(name=HightonConstants.LAST_NAME)
- self.title = fields.StringField(name=HightonConstants.TITLE)
+ self.company_id = fields.IntegerField(name=HightonConstants.COMPANY_ID, required=True)
+ self.company_name = fields.StringField(name=HightonConstants.COMPANY_NAME, required=True)
+ self.first_name = fields.StringField(name=HightonConstants.FIRST_NAME, required=True)
+ self.last_name = fields.StringField(name=HightonConstants.LAST_NAME, required=True)
+ self.title = fields.StringField(name=HightonConstants.TITLE, required=True)
super().__init__(**kwargs)
|
changed person fields to being required, so they are also send in an request even when they are empty/None
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -58,7 +58,7 @@ requirements = [
'pyqode.qt',
'pyqode.core',
'jedi',
- 'pep8',
+ 'pycodestyle',
'pyflakes',
'docutils'
]
|
Remove pep8 dependency since it is now replaced by pycodestyle.
|
py
|
diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py
index <HASH>..<HASH> 100644
--- a/satpy/readers/clavrx.py
+++ b/satpy/readers/clavrx.py
@@ -443,7 +443,7 @@ class CLAVRXNetCDFFileHandler(_CLAVRxHelper, BaseFileHandler):
l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)),
str(self.nc.attrs.get('sensor', None)))
- return (inst_att != 'AHI') or (l1b_att is None)
+ return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None)
def get_area_def(self, key):
"""Get the area definition of the data at hand."""
|
Add the check for GOES in the netCDF file reader as well, so it does not get assigned a polar swath
|
py
|
diff --git a/rest_framework_jwt/views.py b/rest_framework_jwt/views.py
index <HASH>..<HASH> 100644
--- a/rest_framework_jwt/views.py
+++ b/rest_framework_jwt/views.py
@@ -25,7 +25,8 @@ class JSONWebTokenAPIView(APIView):
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
- serializer = self.serializer_class(data=request.DATA)
+ serializer = self.serializer_class(data=request.DATA,
+ context={'request': request})
if serializer.is_valid():
user = serializer.object.get('user') or request.user
|
Some specialized serializers needs the request in context.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ setup(
version = '0.1.0',
url = 'https://github.com/gregmuellegger/django-mobile',
license = 'BSD',
- description = '',
+ description = u'Detect mobile browsers and serve different template flavours to them.',
long_description = long_description,
author = UltraMagicString('Gregor Müllegger'),
author_email = 'gregor@muellegger.de',
|
Adding description to setup.py
|
py
|
diff --git a/OpenPNM/Utilities/IO.py b/OpenPNM/Utilities/IO.py
index <HASH>..<HASH> 100644
--- a/OpenPNM/Utilities/IO.py
+++ b/OpenPNM/Utilities/IO.py
@@ -1056,7 +1056,7 @@ class MARock():
net['pore.coords'] = _sp.array([ni, nj, nk]).T
with open(th2np_file, mode='rb') as f:
- Nt = _sp.fromfile(file=f, count=1, dtype='u4').astype(int)
+ [Nt] = _sp.fromfile(file=f, count=1, dtype='u4')
net['throat.area'] = _sp.ones([Nt, ], dtype=int)*(-1)
for i in range(0, Nt):
ID = _sp.fromfile(file=f, count=1, dtype='u4')
|
Wow, the numpy changes were quite severe...still can't believe it's only 2 errors
|
py
|
diff --git a/vecnet/openmalaria/tests/test_scenario.py b/vecnet/openmalaria/tests/test_scenario.py
index <HASH>..<HASH> 100644
--- a/vecnet/openmalaria/tests/test_scenario.py
+++ b/vecnet/openmalaria/tests/test_scenario.py
@@ -40,7 +40,7 @@ class TestGetSchemaVersion(unittest.TestCase):
self.assertRaises(AttributeError, setattr, scenario, "schemaVersion", 31)
# Check if xml code is correct
- self.assertEqual(len(scenario.xml), 20612)
+ self.assertEqual(len(scenario.xml), 20613)
# Changing attributes
scenario.wuID = 1
|
-Fix: Test scenario xml string's length is now equal to <I> characters.
|
py
|
diff --git a/runfiles.py b/runfiles.py
index <HASH>..<HASH> 100644
--- a/runfiles.py
+++ b/runfiles.py
@@ -122,8 +122,8 @@ def main():
files_or_dirs.append(file + ':' + test)
elif test_framework == PY_TEST_FRAMEWORK:
- file = _NormFile(file)
py_test_accept_filter[file] = tests
+ py_test_accept_filter[_NormFile(file)] = tests
files_or_dirs.append(file)
else:
@@ -139,8 +139,8 @@ def main():
files_or_dirs.append(file + ':' + t)
elif test_framework == PY_TEST_FRAMEWORK:
- file = _NormFile(file)
py_test_accept_filter[file] = configuration.tests
+ py_test_accept_filter[_NormFile(file)] = configuration.tests
files_or_dirs.append(file)
else:
|
Don't normalize the entry point for pytest anymore.
|
py
|
diff --git a/telethon/session.py b/telethon/session.py
index <HASH>..<HASH> 100644
--- a/telethon/session.py
+++ b/telethon/session.py
@@ -325,10 +325,10 @@ class Session:
"""Generates a new unique message ID based on the current
time (in ms) since epoch"""
# Refer to mtproto_plain_sender.py for the original method
- now = time.time()
+ now = time.time() + self.time_offset
nanoseconds = int((now - int(now)) * 1e+9)
# "message identifiers are divisible by 4"
- new_msg_id = ((int(now) + self.time_offset) << 32) | (nanoseconds << 2)
+ new_msg_id = (int(now) << 32) | (nanoseconds << 2)
with self._msg_id_lock:
if self._last_msg_id >= new_msg_id:
@@ -343,6 +343,7 @@ class Session:
now = int(time.time())
correct = correct_msg_id >> 32
self.time_offset = correct - now
+ self._last_msg_id = 0
# Entity processing
|
Fix time offset failing if system time was ahead of time While the offset was working, the last message ID was never reset, so it would always pick an higher message ID (safety check), which completely defeated the purpose of negative time offsets. Should close #<I>.
|
py
|
diff --git a/pymc3/tests/test_models_utils.py b/pymc3/tests/test_models_utils.py
index <HASH>..<HASH> 100644
--- a/pymc3/tests/test_models_utils.py
+++ b/pymc3/tests/test_models_utils.py
@@ -30,6 +30,7 @@ class TestUtils:
m, l = utils.any_to_tensor_and_labels(self.data, labels=['x2', 'x3'])
self.assertMatrixLabels(m, l, lt=['x2', 'x3'])
+ @pytest.mark.xfail
def test_dict_input(self):
m, l = utils.any_to_tensor_and_labels(self.data.to_dict('dict'))
self.assertMatrixLabels(m, l, mt=self.data[l].values, lt=l)
|
Disable test_dict_input while upstream issue is resolved.
|
py
|
diff --git a/ores/features_reverted.py b/ores/features_reverted.py
index <HASH>..<HASH> 100644
--- a/ores/features_reverted.py
+++ b/ores/features_reverted.py
@@ -6,12 +6,12 @@ prints a TSV to stdout of the format:
Usage:
features_reverted -h | --help
- features_reverted <features> --api=<url> [--language=<module>]
+ features_reverted [--features=<features>] --api=<url> [--language=<module>]
[--rev-pages=<path>]
Options:
-h --help Prints out this documentation
- <features> Classpath to a list of features to extract
+ --features=<features> Classpath to a list of features to extract
--api=<url> The url of the API to use to extract features
--language=<module> The Classpath of a language module (required if a
feature depends on 'language')
@@ -61,10 +61,10 @@ def main():
rev_pages = read_rev_pages(sys.stdin)
else:
rev_pages = read_rev_pages(open(args['--rev-pages']))
- if '<features>' in args:
- features = import_from_path(args['<features>'])
- else:
+ if args['--features'] is None:
features = import_from_path("ores.features.generic.generic")
+ else:
+ features = import_from_path(args['--features'])
if args['--language'] is not None:
language = import_from_path(args['--language'])
else:
|
modify features_reverted to make features named, if features is not defined, use generic
|
py
|
diff --git a/recipe-server/normandy/settings.py b/recipe-server/normandy/settings.py
index <HASH>..<HASH> 100644
--- a/recipe-server/normandy/settings.py
+++ b/recipe-server/normandy/settings.py
@@ -383,6 +383,7 @@ class Production(Base):
LOGGING_USE_JSON = values.Value(True)
SECURE_HSTS_SECONDS = values.IntegerValue(31536000) # 1 year
DEFAULT_FILE_STORAGE = values.Value('storages.backends.s3boto3.S3Boto3Storage')
+ AWS_S3_FILE_OVERWRITE = False
class ProductionReadOnly(Production):
|
Fix bug <I>: Do not reuse XPI filenames in prod configurations.
|
py
|
diff --git a/pynos/versions/base/interface.py b/pynos/versions/base/interface.py
index <HASH>..<HASH> 100644
--- a/pynos/versions/base/interface.py
+++ b/pynos/versions/base/interface.py
@@ -2696,11 +2696,9 @@ class Interface(object):
xmlns="urn:brocade.com:mgmt:brocade-interface-ext"
)
if last_vlan_id != '':
- last_received_int = ET.SubElement(request_interface,
+ last_received_int_el = ET.SubElement(request_interface,
"last-rcvd-vlan-id")
- last_vlan_id_el = ET.SubElement(last_received_int,
- "vlan-id")
- last_vlan_id_el.text = last_vlan_id
+ last_received_int_el.text = last_vlan_id
return request_interface
@property
|
NOJIRA:- Fix has more request for vlans Change-Id: I<I>fdbb<I>d<I>b<I>ece2fba8b<I>f<I>
|
py
|
diff --git a/workalendar/usa/__init__.py b/workalendar/usa/__init__.py
index <HASH>..<HASH> 100644
--- a/workalendar/usa/__init__.py
+++ b/workalendar/usa/__init__.py
@@ -56,7 +56,6 @@ from .west_virginia import WestVirginia
from .wisconsin import Wisconsin
from .wyoming import Wyoming
-NONE, NEAREST_WEEKDAY, MONDAY = range(3)
__all__ = [
'UnitedStates', # Generic federal calendar
|
unrelated: cleaned up unused constants.
|
py
|
diff --git a/pypsa/linopf.py b/pypsa/linopf.py
index <HASH>..<HASH> 100644
--- a/pypsa/linopf.py
+++ b/pypsa/linopf.py
@@ -235,7 +235,7 @@ def define_committable_generator_constraints(n, sns):
def define_ramp_limit_constraints(n, sns, c):
"""
- Defines ramp limits for generators and links with valid ramplimit.
+ Defines ramp limits for a given component with valid ramplimit.
"""
test_components = ['Generator', 'Link']
|
generalized the generator and links into components
|
py
|
diff --git a/server/workbench.py b/server/workbench.py
index <HASH>..<HASH> 100644
--- a/server/workbench.py
+++ b/server/workbench.py
@@ -48,7 +48,11 @@ class WorkBench():
self.indexer = els_indexer.ELS_Indexer(**{'hosts': els_hosts} if els_hosts else {})
# Neo4j DB
- self.neo_db = neo_db.NeoDB(**{'uri': neo_uri} if neo_uri else {})
+ try:
+ self.neo_db = neo_db.NeoDB(**{'uri': neo_uri} if neo_uri else {})
+ except RuntimeError:
+ print 'Could not connect to Neo4j DB. Is it running? $ neo4j start'
+ self.neo_db = neo_db.NeoDBStub(**{'uri': neo_uri} if neo_uri else {})
# Create Plugin Grabber
self.plugin_meta = {}
|
trying to capture failed Neo4j connection Former-commit-id: f6db4af<I>ad<I>ee5d0eae<I>d5f<I>b<I>
|
py
|
diff --git a/build.py b/build.py
index <HASH>..<HASH> 100755
--- a/build.py
+++ b/build.py
@@ -676,6 +676,9 @@ def host_examples(t):
t.cp(EXAMPLES, examples_dir)
for example in [path.replace('.html', '.js') for path in EXAMPLES]:
split_example_file(example, examples_dir % vars(variables))
+ for example in [path.replace('.html', '.css') for path in EXAMPLES]:
+ if os.path.isfile(example):
+ t.cp(example, examples_dir)
t.cp_r('examples/data', examples_dir + '/data')
t.cp('bin/loader_hosted_examples.js', examples_dir + '/loader.js')
t.cp('build/ol.js', 'build/ol-debug.js', build_dir)
|
Make sure we copy the example css if it exists
|
py
|
diff --git a/OpenPNM/Network/__GenericNetwork__.py b/OpenPNM/Network/__GenericNetwork__.py
index <HASH>..<HASH> 100644
--- a/OpenPNM/Network/__GenericNetwork__.py
+++ b/OpenPNM/Network/__GenericNetwork__.py
@@ -1088,17 +1088,6 @@ class GenericNetwork(Core):
'''
raise NotImplementedError()
- def isolated_pores(self):
- r'''
- This method checks to see whether any pores are isolated from the network and
- returns a boolean mask
- '''
- isolated = [False]*(self.num_pores())
- for pore in self.pores():
- if pore not in self["throat.conns"]:
- isolated[pore]=True
- return isolated
-
def domain_pore_volume(self):
r'''
'''
|
Removed a method called 'isolated_pores' which I didn't realized was even there...the check_network_health method does this
|
py
|
diff --git a/authapi/permissions.py b/authapi/permissions.py
index <HASH>..<HASH> 100644
--- a/authapi/permissions.py
+++ b/authapi/permissions.py
@@ -182,7 +182,8 @@ class UserPermission(BaseComposedPermision):
only_admins_create_admins = Or(
AllowAdmin,
And(
- ObjAttrTrue(lambda r, _: r.data.get('admin').lower() != 'true'),
+ ObjAttrTrue(
+ lambda r, _: r.data.get('admin').lower() != 'true'),
Or(
AllowPermission('user:create'),
AllowPermission('org:admin')
|
flake8 shorten line that's too long
|
py
|
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -341,9 +341,14 @@ def latest_version(*names, **kwargs):
refresh_db(_get_branch_option(**kwargs), repo_arg, exclude_arg)
# Get updates for specified package(s)
- updates = _repoquery_pkginfo(
- '{0} {1} --pkgnarrow=available {2}'
- .format(repo_arg, exclude_arg, ' '.join(names))
+ # Sort by version number (highest to lowest) for loop below
+ updates = sorted(
+ _repoquery_pkginfo(
+ '{0} {1} --pkgnarrow=available {2}'
+ .format(repo_arg, exclude_arg, ' '.join(names))
+ ),
+ key=lambda pkginfo: _LooseVersion(pkginfo.version),
+ reverse=True
)
for name in names:
|
yumpkg.py: sort updates for latest_package to always actually get latest available
|
py
|
diff --git a/ugali/utils/healpix.py b/ugali/utils/healpix.py
index <HASH>..<HASH> 100644
--- a/ugali/utils/healpix.py
+++ b/ugali/utils/healpix.py
@@ -131,7 +131,8 @@ def query_disc(nside, vec, radius, inclusive=False, fact=4, nest=False):
try:
# New-style call (healpy 1.6.3)
return healpy.query_disc(nside, vec, numpy.radians(radius), inclusive, fact, nest)
- except:
+ except Exception as e:
+ print e
# Old-style call (healpy 0.10.2)
return healpy.query_disc(nside, vec, numpy.radians(radius), nest, deg=False)
|
Verbose about query_disc failure
|
py
|
diff --git a/h2o-py/h2o/frame.py b/h2o-py/h2o/frame.py
index <HASH>..<HASH> 100644
--- a/h2o-py/h2o/frame.py
+++ b/h2o-py/h2o/frame.py
@@ -380,7 +380,8 @@ class H2OFrame(H2OFrameWeakRefMixin):
nrows = min(self.nrow,rows)
ncols = min(self.ncol, cols)
colnames = self.names[:ncols]
- head = self[0:nrows,0:ncols]
+ if nrows == self.nrow and ncols == self.ncol: head = self
+ else: head = self[0:nrows,0:ncols]
res = head.as_data_frame(as_pandas) if as_pandas else head.as_data_frame(as_pandas)[1:]
if show: self._do_show(as_pandas,res,colnames)
return res if as_pandas else head
|
no need to do additional subsetting if frame is small enuf
|
py
|
diff --git a/django_airavata/apps/api/views.py b/django_airavata/apps/api/views.py
index <HASH>..<HASH> 100644
--- a/django_airavata/apps/api/views.py
+++ b/django_airavata/apps/api/views.py
@@ -15,7 +15,6 @@ from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
-from airavata.api.error.ttypes import ProjectNotFoundException
from airavata.model.appcatalog.computeresource.ttypes import (
CloudJobSubmission,
GlobusJobSubmission,
@@ -505,10 +504,13 @@ class FullExperimentViewSet(mixins.RetrieveModelMixin,
log.exception("Failed to load compute resource for {}".format(
compute_resource_id))
compute_resource = None
- try:
+ if self.request.airavata_client.userHasAccess(
+ self.authz_token,
+ experimentModel.projectId,
+ ResourcePermissionType.READ):
project = self.request.airavata_client.getProject(
self.authz_token, experimentModel.projectId)
- except ProjectNotFoundException as pnfe:
+ else:
# User may not have access to project, only experiment
project = None
job_details = self.request.airavata_client.getJobDetails(
|
AIRAVATA-<I> Check if user has READ access to project
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,4 +9,5 @@ setup(
url = 'https://github.com/rhgrant10/Groupy', # use the URL to the github repo
keywords = ['api', 'GroupMe'], # arbitrary keywords
classifiers = [],
+ long_description=open('README.rst', 'r').read()
)
|
Added long description to setup.py
|
py
|
diff --git a/gwpy/utils/tests/test_misc.py b/gwpy/utils/tests/test_misc.py
index <HASH>..<HASH> 100644
--- a/gwpy/utils/tests/test_misc.py
+++ b/gwpy/utils/tests/test_misc.py
@@ -19,7 +19,6 @@
"""Tests for :mod:`gwpy.utils.misc`
"""
-import contextlib
import sys
import pytest
@@ -40,7 +39,8 @@ def test_gprint(capsys):
def test_null_context():
ctx = utils_misc.null_context()
- assert isinstance(ctx, contextlib.GeneratorContextManager)
+ with ctx:
+ print('this should work')
@pytest.mark.parametrize('func, value, out', [
@@ -48,4 +48,4 @@ def test_null_context():
(str, 1, '1'),
])
def test_if_not_none(func, value, out):
- assert utils_misc.if_not_none(func, value) is out
+ assert utils_misc.if_not_none(func, value) == out
|
gwpy.utils: fixed buggy tests on python3
|
py
|
diff --git a/post_office/test_settings.py b/post_office/test_settings.py
index <HASH>..<HASH> 100644
--- a/post_office/test_settings.py
+++ b/post_office/test_settings.py
@@ -14,13 +14,13 @@ DATABASES = {
CACHES = {
'default': {
- 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
- 'LOCATION': '127.0.0.1:11211',
+ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
- 'KEY_PREFIX': 'stamps:',
+ 'KEY_PREFIX': 'post-office',
},
'post_office': {
- 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
- 'LOCATION': '127.0.0.1:11211',
+ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
+ 'TIMEOUT': 36000,
+ 'KEY_PREFIX': 'post-office',
}
}
|
Use locmem cache for tests.
|
py
|
diff --git a/hydpy/models/evap/evap_model.py b/hydpy/models/evap/evap_model.py
index <HASH>..<HASH> 100644
--- a/hydpy/models/evap/evap_model.py
+++ b/hydpy/models/evap/evap_model.py
@@ -665,8 +665,7 @@ class Calc_PossibleSunshineDuration_V1(modeltools.Method):
class Calc_ClearSkySolarRadiation_V1(modeltools.Method):
"""Calculate the clear sky solar radiation.
- Basic equation (adjusted to |Calc_GlobalRadiation_V1|, `Allen`_ equation 35
- according to Angstrom-Prescott regression):
+ Basic equation (adjusted to |Calc_GlobalRadiation_V1|, `Allen`_ eq. 35):
:math:`ClearSkySolarRadiation =
ExtraterrestrialRadiation \\cdot (AngstromConstant + AngstromFactor)`
|
Fix an unexpected indentation in method `Calc_ClearSkySolarRadiation_V1` of module `evap_model`.
|
py
|
diff --git a/openid/oidUtil.py b/openid/oidUtil.py
index <HASH>..<HASH> 100644
--- a/openid/oidUtil.py
+++ b/openid/oidUtil.py
@@ -26,8 +26,8 @@ except AttributeError:
except ImportError:
raise RuntimeError('No adequate source of randomness found!')
- # Implementation is like random.SystemRandom in Python >= 2.3
- from binascii import hexlify as _hexlify
+ # Implementation mostly copied from random.SystemRandom in Python 2.4
+ _hexlify = binascii.hexlify
BPF = 53 # Number of bits in a float
RECIP_BPF = 2**-BPF
@@ -37,9 +37,16 @@ except AttributeError:
def __init__(self, pool):
self.pool = pool
- def _notImplemented(self, *args): raise NotImplementedError
+ def _stub(self, *args, **kwds):
+ "Stub method. Not used for a system random number generator."
+ return None
+ seed = jumpahead = _stub
- seed = getstate = setstate = jumpahead = _notImplemented
+ def _notimplemented(self, *args, **kwds):
+ "Method should not be called for a system random number generator."
+ raise NotImplementedError(
+ 'System entropy source does not have state.')
+ getstate = setstate = _notimplemented
def random(self):
if self.pool.entropy < self._bytes_per_call:
|
[project @ Make the PyCrypto-based random generator implementation the same as SystemRandom in Python <I>]
|
py
|
diff --git a/pandas/util/testing.py b/pandas/util/testing.py
index <HASH>..<HASH> 100644
--- a/pandas/util/testing.py
+++ b/pandas/util/testing.py
@@ -1097,7 +1097,9 @@ def network(t, url="http://www.google.com",
try:
return t(*args, **kwargs)
except Exception as e:
- errno = getattr(e,'errno',None)
+ errno = getattr(e, 'errno', None)
+ if not errno and hasattr(errno, "reason"):
+ errno = getattr(e.reason, 'errno', None)
if not isinstance(e, error_classes):
raise
|
BLD: try harder to find errno in @network
|
py
|
diff --git a/datajoint/version.py b/datajoint/version.py
index <HASH>..<HASH> 100644
--- a/datajoint/version.py
+++ b/datajoint/version.py
@@ -1 +1 @@
-__version__ = "0.12.0"
+__version__ = "0.12.dev"
|
change version to <I>.dev
|
py
|
diff --git a/tests/linalg_test.py b/tests/linalg_test.py
index <HASH>..<HASH> 100644
--- a/tests/linalg_test.py
+++ b/tests/linalg_test.py
@@ -1368,6 +1368,8 @@ class ScipyLinalgTest(jtu.JaxTestCase):
if i == len(target_norms) - 1:
# TODO(zhangqiaorjc): Reduce tol to default 1e-5 for norm = 3.0.
tol = {
+ # Note that due to inner_product, float and complex tol are coupled.
+ np.dtype(np.float32): 0.02,
np.dtype(np.complex64): 0.02,
np.dtype(np.float64): 1e-4,
np.dtype(np.complex128): 1e-4,
|
Bump tol of float<I> for complex<I> inner product.
|
py
|
diff --git a/flaky/flaky_nose_plugin.py b/flaky/flaky_nose_plugin.py
index <HASH>..<HASH> 100644
--- a/flaky/flaky_nose_plugin.py
+++ b/flaky/flaky_nose_plugin.py
@@ -32,10 +32,7 @@ class FlakyPlugin(_FlakyPlugin, Plugin):
self.add_report_option(parser.add_option)
def configure(self, options, conf):
- """
- Base class override.
-
- """
+ """Base class override."""
super(FlakyPlugin, self).configure(options, conf)
if not self.enabled:
return
|
Fix extra blank line in docstring.
|
py
|
diff --git a/openstack_dashboard/settings.py b/openstack_dashboard/settings.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/settings.py
+++ b/openstack_dashboard/settings.py
@@ -172,6 +172,7 @@ INSTALLED_APPS = [
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
AUTHENTICATION_BACKENDS = ('openstack_auth.backend.KeystoneBackend',)
AUTHENTICATION_URLS = ['openstack_auth.urls']
+AUTH_USER_MODEL = 'openstack_auth.User'
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
|
Use the User model from d-o-a This patch moves us to explicitly using the replacement User model from django-openstack-auth. Change-Id: I<I>b9e0af3dd4c<I>f<I>cb9da<I>ef0bcc<I>e Closes-Bug: <I> Depends-On: I<I>cc5d<I>c<I>e<I>f2ad8c<I>
|
py
|
diff --git a/tests/test_cli.py b/tests/test_cli.py
index <HASH>..<HASH> 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -15,6 +15,9 @@ from keepassx.main import main
from keepassx.main import CONFIG_FILENAME
+PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
@contextmanager
def without_config_file():
backup_file = None
@@ -34,15 +37,17 @@ class TestCLI(unittest.TestCase):
# password and keyfiles using relative paths.
def setUp(self):
self._original_dir = os.getcwd()
- misc_dir = os.path.join(
- os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
- 'misc')
+ misc_dir = os.path.join(PROJECT_DIR, 'misc')
os.chdir(misc_dir)
def tearDown(self):
os.chdir(self._original_dir)
def kp_run(self, command):
+ if command.startswith('kp '):
+ # Replace it with the full path to <root>/bin
+ command = command.replace(
+ 'kp', os.path.join(PROJECT_DIR, 'bin', 'kp'))
env = os.environ.copy()
env['KP_INSECURE_PASSWORD'] = 'password'
with without_config_file():
|
Replace kp with full absolute path in tests
|
py
|
diff --git a/metric_learn/itml.py b/metric_learn/itml.py
index <HASH>..<HASH> 100644
--- a/metric_learn/itml.py
+++ b/metric_learn/itml.py
@@ -101,7 +101,8 @@ class ITML(BaseMetricLearner):
_lambda[i] -= alpha
beta = alpha/(1 - alpha*wtw)
pos_bhat[i] = 1./((1 / pos_bhat[i]) + (alpha / gamma))
- A += beta * A.dot(np.outer(v,v)).dot(A)
+ Av = A.dot(v)
+ A += beta * np.outer(Av, Av)
# update negatives
vv = self.X[c] - self.X[d]
@@ -111,7 +112,8 @@ class ITML(BaseMetricLearner):
_lambda[i+num_pos] -= alpha
beta = -alpha/(1 + alpha*wtw)
neg_bhat[i] = 1./((1 / neg_bhat[i]) - (alpha / gamma))
- A += beta * A.dot(np.outer(v,v)).dot(A)
+ Av = A.dot(v)
+ A += beta * np.outer(Av, Av)
normsum = np.linalg.norm(_lambda) + np.linalg.norm(lambdaold)
if normsum == 0:
|
replaced A*(v*v)*A with (A*v)*(A*v), which improves the performance from O(n^3) to O(n^2), as described in the original ITML paper. (#<I>) note that A=A' in all stages of the algorithm since A is a Mahalanobis metrix
|
py
|
diff --git a/asyncpg/connection.py b/asyncpg/connection.py
index <HASH>..<HASH> 100644
--- a/asyncpg/connection.py
+++ b/asyncpg/connection.py
@@ -793,7 +793,7 @@ class Connection(metaclass=ConnectionMeta):
async def set_type_codec(self, typename, *,
schema='public', encoder, decoder,
- binary=None, format='text'):
+ format='text'):
"""Set an encoder/decoder pair for the specified data type.
:param typename:
|
Actually remove the `binary` argument from `set_type_codec` signature. Compatibility support was removed in <I>.
|
py
|
diff --git a/hvac/api/secrets_engines/rabbitmq.py b/hvac/api/secrets_engines/rabbitmq.py
index <HASH>..<HASH> 100644
--- a/hvac/api/secrets_engines/rabbitmq.py
+++ b/hvac/api/secrets_engines/rabbitmq.py
@@ -136,4 +136,3 @@ class RabbitMQ(VaultApiBase):
return self._adapter.get(
url=api_path,
).json()
-
|
removed blank line that upset the linter
|
py
|
diff --git a/tests/test_build_jobs/test_views.py b/tests/test_build_jobs/test_views.py
index <HASH>..<HASH> 100644
--- a/tests/test_build_jobs/test_views.py
+++ b/tests/test_build_jobs/test_views.py
@@ -47,7 +47,6 @@ class TestProjectBuildListViewV1(BaseViewTest):
self.factory_class()
self.queryset = self.model_class.objects.filter(project=self.project)
self.queryset = self.queryset.order_by('-updated_at')
- self.other_object = self.factory_class(project=self.other_project)
def test_get(self):
resp = self.auth_client.get(self.url)
|
Remove redundant object creation in build job tests
|
py
|
diff --git a/AegeanTools/BANE.py b/AegeanTools/BANE.py
index <HASH>..<HASH> 100644
--- a/AegeanTools/BANE.py
+++ b/AegeanTools/BANE.py
@@ -415,7 +415,12 @@ def filter_mc_sharemem(filename, step_size, box_size, cores, shape, dobkg=True):
args.append((filename, region, step_size, box_size, shape, dobkg))
pool = multiprocessing.Pool(processes=cores)
- pool.map(_sf2, args)
+ try:
+ pool.map_async(_sf2, args).get(timeout=10000000)
+ except KeyboardInterrupt:
+ logging.error("Caught keyboard interrupt")
+ pool.close()
+ sys.exit(1)
pool.close()
pool.join()
|
cause BANE to shutdown gracefully when ctrl+C'd. reslove #<I>
|
py
|
diff --git a/spyder/widgets/variableexplorer/collectionseditor.py b/spyder/widgets/variableexplorer/collectionseditor.py
index <HASH>..<HASH> 100644
--- a/spyder/widgets/variableexplorer/collectionseditor.py
+++ b/spyder/widgets/variableexplorer/collectionseditor.py
@@ -256,7 +256,7 @@ class ReadOnlyCollectionsModel(QAbstractTableModel):
def get_index_from_key(self, key):
try:
return self.createIndex(self.keys.index(key), 0)
- except ValueError:
+ except (RuntimeError, ValueError):
return QModelIndex()
def get_key(self, index):
|
Variable Explorer: Avoid a RuntimeError when modifying nested objects in the wrong order
|
py
|
diff --git a/mhcflurry/data.py b/mhcflurry/data.py
index <HASH>..<HASH> 100644
--- a/mhcflurry/data.py
+++ b/mhcflurry/data.py
@@ -149,7 +149,6 @@ def load_allele_dicts(
species_column_name="species",
allele_column_name="mhc",
peptide_column_name=None,
- peptide_length_column_name="peptide_length",
ic50_column_name="meas",
only_human=False,
min_allele_size=1):
@@ -165,7 +164,6 @@ def load_allele_dicts(
species_column_name=species_column_name,
allele_column_name=allele_column_name,
peptide_column_name=peptide_column_name,
- peptide_length_column_name=peptide_length_column_name,
ic50_column_name=ic50_column_name,
only_human=only_human)
# map peptides to either the raw IC50 or rescaled log IC50 depending
|
got rid of peptide_length_column_name
|
py
|
diff --git a/loky/backend/spawn.py b/loky/backend/spawn.py
index <HASH>..<HASH> 100644
--- a/loky/backend/spawn.py
+++ b/loky/backend/spawn.py
@@ -160,7 +160,7 @@ def prepare(data):
process.ORIGINAL_DIR = data['orig_dir']
if hasattr(mp, 'set_start_method'):
- mp.set_start_method('loky')
+ mp.set_start_method('loky', force=True)
if 'init_main_from_name' in data:
_fixup_main_from_name(data['init_main_from_name'])
|
FIX force loky context in spawn - This permits to avoid conflict due to importing multiprocessing objects and make sure the default context is loky in subprocess
|
py
|
diff --git a/abilian/web/admin/panels/audit.py b/abilian/web/admin/panels/audit.py
index <HASH>..<HASH> 100644
--- a/abilian/web/admin/panels/audit.py
+++ b/abilian/web/admin/panels/audit.py
@@ -363,7 +363,7 @@ class SecurityEntryPresenter(BaseEntryPresenter):
entity_url = url_for(e.object)
entity = render(
- u'{%- if url %}<a href="{{ url_for(entity) }}">{%- endif %}}'
+ u'{%- if url %}<a href="{{ url }}">{%- endif %}'
u'{{ name }}{%- if url %}</a>{%- endif %}',
url=entity_url,
name=entity_name)
|
admin audit panel: fix security entry presentation
|
py
|
diff --git a/indra/literature/pubmed_client.py b/indra/literature/pubmed_client.py
index <HASH>..<HASH> 100644
--- a/indra/literature/pubmed_client.py
+++ b/indra/literature/pubmed_client.py
@@ -243,10 +243,10 @@ def _get_journal_info(medline_citation, get_issns_from_nlm):
# Add publish date from JournalIssue/PubDate in Journal info
pub_date = {}
- journal_pubDate = journal.find('JournalIssue/PubDate')
- pub_date['year'] = _find_elem_text(journal_pubDate, 'Year')
- pub_date['month'] = _find_elem_text(journal_pubDate, 'Month')
- pub_date['day'] = _find_elem_text(journal_pubDate, 'Day')
+ journal_pub_date = journal.find('JournalIssue/PubDate')
+ pub_date['year'] = _find_elem_text(journal_pub_date, 'Year')
+ pub_date['month'] = _find_elem_text(journal_pub_date, 'Month')
+ pub_date['day'] = _find_elem_text(journal_pub_date, 'Day')
# Add the ISSN from the article record
issn_list = []
|
Change camel case to pep8
|
py
|
diff --git a/salt/returners/local_cache.py b/salt/returners/local_cache.py
index <HASH>..<HASH> 100644
--- a/salt/returners/local_cache.py
+++ b/salt/returners/local_cache.py
@@ -305,7 +305,7 @@ def clean_old_jobs():
# No jid file means corrupted cache entry, scrub it
shutil.rmtree(f_path)
else:
- jid_ctime = os.stat(jid_file).st_ctime()
+ jid_ctime = os.stat(jid_file).st_ctime
hours_difference = (cur - jid_ctime) / 3600.0
if hours_difference > __opts__['keep_jobs']:
shutil.rmtree(f_path)
|
st_ctime is an attribute, not a method
|
py
|
diff --git a/hpICsp/__init__.py b/hpICsp/__init__.py
index <HASH>..<HASH> 100644
--- a/hpICsp/__init__.py
+++ b/hpICsp/__init__.py
@@ -43,6 +43,7 @@ __status__ = 'Development'
import sys
+import warnings
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
@@ -50,10 +51,14 @@ PY3 = (PYTHON_VERSION[0] == 3)
if PY2:
if PYTHON_VERSION < (2, 7, 9):
- raise Exception('Must use Python 2.7.9 or later')
+ warning_message = 'Running unsupported Python version: %s, unexpected errors might occur.'
+ warning_message += ' Use of Python v2.7.9+ is advised.'
+ warnings.warn(warning_message % '.'.join(map(str, PYTHON_VERSION)), Warning)
elif PY3:
if PYTHON_VERSION < (3, 4):
- raise Exception('Must use Python 3.4 or later')
+ warning_message = 'Running unsupported Python version> %s, unexpected errors might occur.'
+ warning_message += ' Use of Python v3.4+ is advised.'
+ warnings.warn(warning_message % '.'.join(map(str, PYTHON_VERSION)), Warning)
from hpICsp.common import *
from hpICsp.exceptions import *
|
Swapping out error in case of unsupported python version for a warning.
|
py
|
diff --git a/zipline/assets/assets.py b/zipline/assets/assets.py
index <HASH>..<HASH> 100644
--- a/zipline/assets/assets.py
+++ b/zipline/assets/assets.py
@@ -362,7 +362,10 @@ class AssetFinder(object):
"""
Retrieve the Asset for a given sid.
"""
- return self.retrieve_all((sid,), default_none=default_none)[0]
+ try:
+ return self._asset_cache[sid]
+ except KeyError:
+ return self.retrieve_all((sid,), default_none=default_none)[0]
def retrieve_all(self, sids, default_none=False):
"""
|
PERF: Try cache on scalar asset lookups. This provides a <I>% speedup for an algo that calls `data.current` with <I> every minute.
|
py
|
diff --git a/tests/pipeline/test_computable_term.py b/tests/pipeline/test_computable_term.py
index <HASH>..<HASH> 100644
--- a/tests/pipeline/test_computable_term.py
+++ b/tests/pipeline/test_computable_term.py
@@ -115,7 +115,7 @@ class FillNATestCase(BaseUSEquityPipelineTestCase):
str_fillval = "filled"
str_expected = np.where(null_locs, str_fillval, strs)
- ints = np.arange(num_cells).reshape(shape)
+ ints = np.arange(num_cells, dtype='i8').reshape(shape)
ints[null_locs] = -1
int_fillval = 777
int_expected = np.where(null_locs, int_fillval, ints)
|
TEST: Fix int<I> expected type on windows.
|
py
|
diff --git a/bandicoot/individual.py b/bandicoot/individual.py
index <HASH>..<HASH> 100755
--- a/bandicoot/individual.py
+++ b/bandicoot/individual.py
@@ -431,3 +431,16 @@ def number_of_interactions(records, direction=None):
return len([r for r in records])
else:
return len([r for r in records if r.direction == direction])
+
+
+@grouping(interaction='callandtext')
+def burstiness(records):
+ """
+ Computes the burstiness for an individual, defined as the change in the
+ interevent time distribution between two consecutive events.
+ """
+ stats = interevent_time(records)
+ std = stats.std
+ mean = stats.mean
+
+ return float(std - mean) / (std + mean)
|
Added function to compute new burstiness metric
|
py
|
diff --git a/omgorm/utils.py b/omgorm/utils.py
index <HASH>..<HASH> 100644
--- a/omgorm/utils.py
+++ b/omgorm/utils.py
@@ -17,7 +17,7 @@ class ppartial(functools.partial):
merged_args = (next(iter_args) if a is ... else a
for a in self.args)
merged_keywords = {**self.keywords, **keywords}
- return self.func(*merged_args, **merged_keywords)
+ return self.func(*merged_args, *iter_args, **merged_keywords)
class EnsurePep487Meta(type): # pragma: no cover
|
fix ppartial issue with positional args
|
py
|
diff --git a/master/buildbot/schedulers/timed.py b/master/buildbot/schedulers/timed.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/schedulers/timed.py
+++ b/master/buildbot/schedulers/timed.py
@@ -450,7 +450,7 @@ class NightlyTriggerable(NightlyBase):
@defer.inlineCallbacks
def startBuild(self):
if self._lastTrigger is None:
- defer.returnValue(None)
+ return
(sourcestamps, set_props) = self._lastTrigger
self._lastTrigger = None
|
defer.returnValue(None) is the same as "return"
|
py
|
diff --git a/salt/config/schemas/esxi.py b/salt/config/schemas/esxi.py
index <HASH>..<HASH> 100644
--- a/salt/config/schemas/esxi.py
+++ b/salt/config/schemas/esxi.py
@@ -18,6 +18,7 @@ from salt.utils.schema import (DefinitionsSchema,
ComplexSchemaItem,
ArrayItem,
IntegerItem,
+ BooleanItem,
StringItem)
@@ -57,6 +58,22 @@ class DiskGroupsDiskIdSchema(DefinitionsSchema):
required=True)
+class SimpleHostCacheSchema(Schema):
+ '''
+ Simplified Schema of ESXi host cache
+ '''
+
+ title = 'Simple Host Cache Schema'
+ description = 'Simplified schema of the ESXi host cache'
+ enabled = BooleanItem(
+ title='Enabled',
+ required=True)
+ datastore_name = StringItem(title='Datastore Name',
+ required=True)
+ swap_size_MiB = IntegerItem(title='Host cache swap size in MiB',
+ minimum=1)
+
+
class EsxiProxySchema(Schema):
'''
Schema of the esxi proxy input
|
Added SimpleHostCacheSchema JSON schema
|
py
|
diff --git a/hwt/pyUtils/arrayQuery.py b/hwt/pyUtils/arrayQuery.py
index <HASH>..<HASH> 100755
--- a/hwt/pyUtils/arrayQuery.py
+++ b/hwt/pyUtils/arrayQuery.py
@@ -67,7 +67,7 @@ def where(iterable, fn):
yield i
-def last_iter(iterable):
+def iter_with_last(iterable):
"""
Iterate iterable and yield tuples (isLastFlag, item)
"""
|
rename last_iter to iter_with_last
|
py
|
diff --git a/tabula/section.py b/tabula/section.py
index <HASH>..<HASH> 100644
--- a/tabula/section.py
+++ b/tabula/section.py
@@ -193,7 +193,7 @@ class Section(object):
for col in self._get_col_hdrs():
for row in self._get_row_hdrs():
meta = self._get_meta(row, col)
- for mk, mv in meta.iteritems():
+ for mk, mv in sorted(meta.iteritems()):
if mk in self.meta_funcs.iterkeys():
tmp[col][self.irt[row]] = \
self.meta_funcs[mk](tmp[col][self.irt[row]], mv)
|
sort meta keys Sometimes, the sequence to apply formatters are critical. This change allows callers to define the sequency by carefully choosing meta keys (e.g: alphabetically)
|
py
|
diff --git a/pysparkling/tests/test_rdd.py b/pysparkling/tests/test_rdd.py
index <HASH>..<HASH> 100644
--- a/pysparkling/tests/test_rdd.py
+++ b/pysparkling/tests/test_rdd.py
@@ -183,7 +183,7 @@ class RDDTest(unittest.TestCase):
(1, r[1::3]),
(2, r[2::3]))
- grouped_dict = {k: v for k, v in actual_group}
+ grouped_dict = dict(actual_group)
for k, v in expected_group:
self.assertIn(k, grouped_dict)
@@ -216,7 +216,7 @@ class RDDTest(unittest.TestCase):
(1, IncomparableValueAddable(5)),
(2, IncomparableValueAddable(7)))
- grouped_dict = {k: v for k, v in actual_group}
+ grouped_dict = dict(actual_group)
# Keep this order-agnostic
for k, v in expected_group:
@@ -247,7 +247,7 @@ class RDDTest(unittest.TestCase):
(1, IncomparableValueAddable(5)),
(2, IncomparableValueAddable(7)))
- grouped_dict = {k: v for k, v in actual_group}
+ grouped_dict = dict(actual_group)
# Keep this order-agnostic
for k, v in expected_group:
|
Replace unnecessary comprehensions with dict constructor
|
py
|
diff --git a/billy/web/admin/views.py b/billy/web/admin/views.py
index <HASH>..<HASH> 100644
--- a/billy/web/admin/views.py
+++ b/billy/web/admin/views.py
@@ -699,7 +699,6 @@ def duplicate_versions(request, abbr):
{'metadata': meta, 'report': report})
-@login_required
def _bill_spec(meta, limit):
abbr = meta['abbreviation']
@@ -1232,7 +1231,6 @@ def mom_commit(request, abbr):
})
-@login_required
def _mom_attr_diff(merge, leg1, leg2):
mv_info = {
"1": "Root Legislator",
@@ -1259,7 +1257,6 @@ def _mom_attr_diff(merge, leg1, leg2):
return (mv, mv_info)
-@login_required
def _mom_mangle(attr):
args = {"sort_keys": True, "indent": 4, "cls": JSONDateEncoder}
if isinstance(attr, types.ListType):
|
Removing some required lines from non-view fns
|
py
|
diff --git a/ariba/reference_data.py b/ariba/reference_data.py
index <HASH>..<HASH> 100644
--- a/ariba/reference_data.py
+++ b/ariba/reference_data.py
@@ -294,7 +294,7 @@ class ReferenceData:
def sanity_check(self, outprefix):
variants_only_removed = self._remove_bad_genes(self.seq_dicts['variants_only'], outprefix + '.00.check_fasta_variants_only.log')
presence_absence_removed = self._remove_bad_genes(self.seq_dicts['presence_absence'], outprefix + '.00.check_fasta_presence_absence.log')
- self._filter_bad_variant_data(outprefix + '.01.check_variants', variants_only_removed, presence_absence_removed)
+ self._filter_bad_variant_data(outprefix + '.01.check_variants', presence_absence_removed, variants_only_removed)
@classmethod
|
Bug fix reporting removed presabs/variants only wrong way around
|
py
|
diff --git a/jardin/query_builders.py b/jardin/query_builders.py
index <HASH>..<HASH> 100644
--- a/jardin/query_builders.py
+++ b/jardin/query_builders.py
@@ -281,7 +281,7 @@ class WriteQueryBuilder(PGQueryBuilder):
if isinstance(v, pd.Timestamp) and ((self.scheme == 'mysql' \
and sys.version_info[0] == 3) or self.scheme == 'sqlite'):
v = v.strftime('%Y-%m-%d %H:%M:%S')
- if isinstance(v, pd._libs.tslib.NaTType):
+ if isinstance(v, type(pd.NaT)):
v = None
if isinstance(v, float) and np.isnan(v):
v = None
|
pd.NaT instead of pd._libs.tslib.NaTType
|
py
|
diff --git a/fut/core.py b/fut/core.py
index <HASH>..<HASH> 100644
--- a/fut/core.py
+++ b/fut/core.py
@@ -385,9 +385,11 @@ class Core(object):
self.logger.error("{0} (itemId: {1}) NOT MOVED to {2} Pile. REASON: {3}".format(trade_id, item_id, pile, rc['itemData'][0]['reason']))
return rc['itemData'][0]['success']
- def logout(self):
+ def logout(self, save=True):
"""Logs out nicely (like clicking on logout button)."""
self.r.get('https://www.easports.com/fifa/logout')
+ if save:
+ self.saveSession()
return True
def saveSession(self):
|
core: logout automatically saves cookies to file
|
py
|
diff --git a/test/unit/test_caches.py b/test/unit/test_caches.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_caches.py
+++ b/test/unit/test_caches.py
@@ -117,7 +117,7 @@ VALUES['cache_ids'] = {
'components': {
'partition_name': VALUES['basic']['partition_name'],
'algorithm': Algorithm.AES_256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384,
- 'encrypted_data_keys': set([entry['key'] for entry in VALUES['basic']['encrypted_data_keys']]),
+ 'encrypted_data_keys': {entry['key'] for entry in VALUES['basic']['encrypted_data_keys']},
'encryption_context': VALUES['basic']['encryption_context']['full']['raw']
},
'id': b'+rtwUe38CGnczGmYu12iqGWHIyDyZ44EvYQ4S6ACmsgS8VaEpiw0RTGpDk6Z/7YYN/jVHOAcNKDyCNP8EmstFg=='
|
move from a set casting of a list comprehension to a set comprehension
|
py
|
diff --git a/angr/analyses/cfg_accurate.py b/angr/analyses/cfg_accurate.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/cfg_accurate.py
+++ b/angr/analyses/cfg_accurate.py
@@ -1435,8 +1435,13 @@ class CFGAccurate(ForwardAnalysis, CFGBase): # pylint: disable=abstract-metho
current_function_addr = self._simrun_key_current_func_addr(simrun_key)
if current_function_addr is not None:
current_function = self.kb.functions.function(current_function_addr)
- call_site_addr = self._simrun_key_addr(pe.src_simrun_key)
- current_function._call_sites[call_site_addr] = (func.addr, None)
+ if current_function is not None:
+ call_site_addr = self._simrun_key_addr(pe.src_simrun_key)
+ current_function._call_sites[call_site_addr] = (func.addr, None)
+ else:
+ l.warning('An expected function at %#x is not found. Please report it to Fish.',
+ current_function_addr
+ )
for simrun_key in pending_exits_to_remove:
l.debug('Removing a pending exit to 0x%x since the target function 0x%x does not return',
|
CFGAccurate: robustness improvement
|
py
|
diff --git a/girder/api/v1/resource.py b/girder/api/v1/resource.py
index <HASH>..<HASH> 100644
--- a/girder/api/v1/resource.py
+++ b/girder/api/v1/resource.py
@@ -226,7 +226,7 @@ class Resource(BaseResource):
.errorResponse('Path refers to a resource that does not exist.')
.errorResponse('Read access was denied for the resource.', 403))
- @access.cookie
+ @access.cookie(force=True)
@access.public
def download(self, params):
"""
|
Allow cookie authorization to be used for "POST resource/download"
|
py
|
diff --git a/lib/webuser.py b/lib/webuser.py
index <HASH>..<HASH> 100644
--- a/lib/webuser.py
+++ b/lib/webuser.py
@@ -637,6 +637,9 @@ def auth_apache_user_p(user, password):
Apache password data file. Return 0 in case of failure, 1 in case
of success."""
try:
+ from invenio.config import cfg_apache_password_file
+ if not cfg_apache_password_file.startswith("/"):
+ cfg_apache_password_file = tmpdir + "/" + cfg_apache_password_file
pipe_input, pipe_output = os.popen2(["grep", "^" + user + ":", cfg_apache_password_file], 'r')
line = pipe_output.readlines()[0]
password_apache = string.split(string.strip(line),":")[1]
@@ -652,6 +655,10 @@ def auth_apache_user_in_groups(user):
"""Return list of Apache groups to which Apache user belong."""
out = []
try:
+ from invenio.config import cfg_apache_group_file
+ if not cfg_apache_group_file.startswith("/"):
+ cfg_apache_group_file = tmpdir + "/" + cfg_apache_group_file
+ print cfg_apache_group_file
pipe_input,pipe_output = os.popen2(["grep", user, cfg_apache_group_file], 'r')
for line in pipe_output.readlines():
out.append(string.split(string.strip(line),":")[0])
|
If cfg_apache_password_file and cfg_apache_group_file are not absolute pathnames, try to look for them in tmpdir.
|
py
|
diff --git a/curtsies/escseqparse.py b/curtsies/escseqparse.py
index <HASH>..<HASH> 100644
--- a/curtsies/escseqparse.py
+++ b/curtsies/escseqparse.py
@@ -86,7 +86,9 @@ def token_type(info):
"""
"""
if info['command'] == 'm':
- value, = info['numbers']
+ # The default action for ESC[m is to act like ESC[0m
+ # Ref: https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
+ value, = info['numbers'] if len(info['numbers']) else [0]
if value in FG_NUMBER_TO_COLOR: return {'fg':FG_NUMBER_TO_COLOR[value]}
if value in BG_NUMBER_TO_COLOR: return {'bg':BG_NUMBER_TO_COLOR[value]}
if value in NUMBER_TO_STYLE: return {NUMBER_TO_STYLE[value]:True}
|
Added support for the shorthand ESC[m, which is equivalent to ESC[0m
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.