diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/mpdlcd/display_fields.py b/mpdlcd/display_fields.py
index <HASH>..<HASH> 100644
--- a/mpdlcd/display_fields.py
+++ b/mpdlcd/display_fields.py
@@ -261,7 +261,7 @@ class SamplingField(Field):
def __init__(self, **kwargs):
width = len(self._format_sampling())
- super(BitRateField, self).__init__(width=width, **kwargs)
+ super(SamplingField, self).__init__(width=width, **kwargs)
def add_to_screen(self, screen, left, top):
return screen.add_string_widget(self.name,
|
Fix typo in display_fields (See #<I>).
|
py
|
diff --git a/pyqode/core/server.py b/pyqode/core/server.py
index <HASH>..<HASH> 100644
--- a/pyqode/core/server.py
+++ b/pyqode/core/server.py
@@ -111,6 +111,7 @@ class Server(object):
self._workQueue = []
if autoCloseOnQuit:
QtGui.QApplication.instance().aboutToQuit.connect(self.close)
+ self._lock = thread.allocate_lock()
def close(self):
"""
@@ -148,7 +149,6 @@ class Server(object):
logger.info("Connected to Code Completion Server on 127.0.0.1:%d" %
port)
self.__running = True
- self._lock = thread.allocate_lock()
thread.start_new_thread(self._threadFct, ())
except OSError:
logger.exception("Failed to connect to Code Completion Server on "
|
Fix issue with _lock not allocated
|
py
|
diff --git a/cablemap.core/cablemap/core/handler.py b/cablemap.core/cablemap/core/handler.py
index <HASH>..<HASH> 100644
--- a/cablemap.core/cablemap/core/handler.py
+++ b/cablemap.core/cablemap/core/handler.py
@@ -55,7 +55,8 @@ Constant for a subject locator reference.
class CableHandler(object):
"""\
- An instance of this class is capable to process one or more cables.
+ Defines an interface for classes which are capable to process one or more
+ cables.
The first event is `start` and the last event must be `end`.
Between these events one or more `start_cable`/`end_cable` events
@@ -241,6 +242,7 @@ class CableHandler(object):
`datetime`
ISO 8601 formatted datetime
"""
+ pass
def handle_release(self, datetime):
"""\
@@ -249,6 +251,7 @@ class CableHandler(object):
`datetime`
ISO 8601 formatted datetime
"""
+ pass
def handle_partial(self, partial):
"""\
@@ -257,3 +260,4 @@ class CableHandler(object):
`partial`
A boolean value.
"""
+ pass
|
Better docs, added obligatory "pass" statements
|
py
|
diff --git a/paranoid/types/numeric.py b/paranoid/types/numeric.py
index <HASH>..<HASH> 100644
--- a/paranoid/types/numeric.py
+++ b/paranoid/types/numeric.py
@@ -41,7 +41,7 @@ class Numeric(Type):
yield np.uint16(1)
yield np.int0(-1)
yield np.float16(3.141)
- yield np.float128(.01)
+ yield np.float64(.01)
class ExtendedReal(Numeric):
"""Any integer or float, excluding nan."""
@@ -65,7 +65,7 @@ class ExtendedReal(Numeric):
yield np.uint16(1)
yield np.int0(-1)
yield np.float16(3.141)
- yield np.float128(.01)
+ yield np.float64(.01)
class Number(Numeric):
"""Any integer or float, excluding inf, -inf, and nan."""
@@ -86,8 +86,8 @@ class Number(Numeric):
yield np.uint16(1)
yield np.int0(-1)
yield np.float16(3.141)
- yield np.float128(.01)
- yield np.float128(10)
+ yield np.float64(.01)
+ yield np.float64(10)
class Integer(Number):
"""Any integer."""
|
Remove float<I> tests, not supported in Windows
|
py
|
diff --git a/about_time/about_time.py b/about_time/about_time.py
index <HASH>..<HASH> 100644
--- a/about_time/about_time.py
+++ b/about_time/about_time.py
@@ -106,3 +106,33 @@ class HandleResult(Handle):
@property
def result(self):
return self.__result
+
+
+class HandleStats(Handle):
+ def __init__(self, timings, count):
+ super().__init__(timings)
+ self.__count = count
+
+ @property
+ def count(self):
+ return self.__count
+
+ @property
+ def throughput(self):
+ return self.count / self.duration
+
+ @property
+ def throughput_human(self):
+ value = self.throughput
+ spec = (
+ (1. / 60, 60 * 60, 60, '/h'),
+ (1., 60, 60, '/m'),
+ )
+ for top, mult, size, unit in spec:
+ if value < top:
+ result = round(value * mult, ndigits=2)
+ if result < size:
+ return '{}{}'.format(result, unit)
+
+ result = round(value, ndigits=2)
+ return '{}{}'.format(result, '/s')
|
feat(*) new counter and throughput class
|
py
|
diff --git a/progressbar/bar.py b/progressbar/bar.py
index <HASH>..<HASH> 100644
--- a/progressbar/bar.py
+++ b/progressbar/bar.py
@@ -45,10 +45,12 @@ class DefaultFdMixin(ProgressBarMixinBase):
def update(self, *args, **kwargs):
ProgressBarMixinBase.update(self, *args, **kwargs)
self.fd.write('\r' + self._format_line())
+ self.fd.flush()
def finish(self, *args, **kwargs): # pragma: no cover
ProgressBarMixinBase.finish(self, *args, **kwargs)
self.fd.write('\n')
+ self.fd.flush()
class ResizableMixin(ProgressBarMixinBase):
|
made sure to flush after writing to stream
|
py
|
diff --git a/knights/loader.py b/knights/loader.py
index <HASH>..<HASH> 100644
--- a/knights/loader.py
+++ b/knights/loader.py
@@ -26,6 +26,8 @@ def load_template(name, paths=None):
src = fin.read()
return kompile(src)
+ except FileNotFoundError:
+ pass
except:
import traceback
traceback.print_exc()
|
Swallow FileNotFound when looking for templates
|
py
|
diff --git a/photutils/conftest.py b/photutils/conftest.py
index <HASH>..<HASH> 100644
--- a/photutils/conftest.py
+++ b/photutils/conftest.py
@@ -2,13 +2,23 @@
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
+import os
from astropy.tests.pytest_plugins import *
+# This is to figure out the photutil version, rather than using Astropy's
+from . import version
+
+try:
+ packagename = os.path.basename(os.path.dirname(__file__))
+ TESTED_VERSIONS[packagename] = version.version
+except NameError:
+ pass
+
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
enable_deprecations_as_exceptions()
# Add scikit-image to test header information
PYTEST_HEADER_MODULES['scikit-image'] = 'skimage'
-PYTEST_HEADER_MODULES['astropy'] = 'astropy'
+PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
del PYTEST_HEADER_MODULES['h5py']
|
Adding photutils version number to testing header
|
py
|
diff --git a/pirate-get.py b/pirate-get.py
index <HASH>..<HASH> 100755
--- a/pirate-get.py
+++ b/pirate-get.py
@@ -264,7 +264,7 @@ def main():
mirrors = ["http://thepiratebay.se"]
try:
opener = urllib2.build_opener(NoRedirection)
- f = opener.open("http://proxybay.info/list.txt")
+ f = opener.open("https://proxybay.info/list.txt")
if f.getcode() != 200:
raise Exception("The pirate bay responded with an error.")
res = f.read()
|
Fixed issue #<I> wrong url for proxy list
|
py
|
diff --git a/nidmresults/objects/modelfitting.py b/nidmresults/objects/modelfitting.py
index <HASH>..<HASH> 100644
--- a/nidmresults/objects/modelfitting.py
+++ b/nidmresults/objects/modelfitting.py
@@ -292,8 +292,8 @@ class Data(NIDMObject):
(NIDM_GRAND_MEAN_SCALING, self.grand_mean_sc),
(NIDM_TARGET_INTENSITY, self.target_intensity)))
- if nidm_version['major'] < 1 or \
- (nidm_version['major'] == 1 and nidm_version['minor'] < 3):
+ if nidm_version['major'] > 1 or \
+ (nidm_version['major'] == 1 and nidm_version['minor'] > 2):
if self.mri_protocol is not None:
self.add_attributes(
[(NIDM_HAS_MRI_PROTOCOL, self.mri_protocol)])
|
has MRI protocol only for ><I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,7 @@ CLASSIFIERS = [
MAJOR = 0
MINOR = 3
-MICRO = 0
+MICRO = 1
VERSION = "{}.{}.{}".format(MAJOR, MINOR, MICRO)
DEV = False
|
Increment version micro to update pip/conda-forge
|
py
|
diff --git a/seleniumbase/fixtures/base_case.py b/seleniumbase/fixtures/base_case.py
index <HASH>..<HASH> 100755
--- a/seleniumbase/fixtures/base_case.py
+++ b/seleniumbase/fixtures/base_case.py
@@ -695,7 +695,7 @@ class BaseCase(unittest.TestCase):
step_value = float(distance) / total_steps
new_position = scroll_position
for y in xrange(int(total_steps)):
- time.sleep(0.0115)
+ time.sleep(0.0114)
new_position += step_value
scroll_script = "window.scrollTo(0, %s);" % new_position
self.execute_script(scroll_script)
@@ -705,7 +705,7 @@ class BaseCase(unittest.TestCase):
time.sleep(0.01)
if distance > 430 or distance < -300:
# Add small recovery time for long-distance slow-scrolling
- time.sleep(0.165)
+ time.sleep(0.162)
# PyTest-Specific Code #
|
Finding optimal timing for slow-scrolling
|
py
|
diff --git a/fuse.py b/fuse.py
index <HASH>..<HASH> 100644
--- a/fuse.py
+++ b/fuse.py
@@ -19,6 +19,7 @@ except:
from string import join
import sys
+import os
from errno import *
from os import environ
import re
@@ -326,7 +327,7 @@ class FuseOptParse(SubbedOptParse):
def parse_args(self, args=None, values=None):
o, a = SubbedOptParse.parse_args(self, args, values)
if a and self.fetch_mp:
- self.fuse_args.mountpoint = a.pop()
+ self.fuse_args.mountpoint = os.path.realpath(a.pop())
return o, a
def add_option(self, *opts, **attrs):
|
absolutify the mount path when "fetch_mp" is set
|
py
|
diff --git a/dvc/repo/plots/__init__.py b/dvc/repo/plots/__init__.py
index <HASH>..<HASH> 100644
--- a/dvc/repo/plots/__init__.py
+++ b/dvc/repo/plots/__init__.py
@@ -18,13 +18,17 @@ class Plots:
(out,) = self.repo.find_outs_by_path(path)
# This out will become a plot unless it is one already
- if not out.plot:
+ if not isinstance(out.plot, dict):
out.plot = {}
for field in unset or ():
out.plot.pop(field, None)
out.plot.update(props or {})
+ # Empty dict will move it to non-plots
+ if not out.plot:
+ out.plot = True
+
out.verify_metric()
dvcfile = Dvcfile(self.repo, out.stage.path)
|
Fix modifying plot (#<I>) - no error when modifying plot with no props yet - do not turn into non-plot if all props are unset
|
py
|
diff --git a/src/saml2/attributemaps/saml_uri.py b/src/saml2/attributemaps/saml_uri.py
index <HASH>..<HASH> 100644
--- a/src/saml2/attributemaps/saml_uri.py
+++ b/src/saml2/attributemaps/saml_uri.py
@@ -7,7 +7,7 @@ NETSCAPE_LDAP = "urn:oid:2.16.840.1.113730.3.1."
UCL_DIR_PILOT = 'urn:oid:0.9.2342.19200300.100.1.'
PKCS_9 = "urn:oid:1.2.840.113549.1.9.1."
UMICH = "urn:oid:1.3.6.1.4.1.250.1.57."
-SCHAC = "urn:oid:1.3.6.1.4.1.25178.2."
+SCHAC = "urn:oid:1.3.6.1.4.1.25178.1.2."
#urn:oid:1.3.6.1.4.1.1466.115.121.1.26
|
Missed one number in the schac attribute base urn.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -56,7 +56,7 @@ dev_requirements = [
setup(
- name='ipa',
+ name='python3-ipa',
version='0.4.0',
description="Package for automated testing of cloud images.",
long_description=readme,
|
Fix name in setup to match package name.
|
py
|
diff --git a/src/foremast/configs/prepare_configs.py b/src/foremast/configs/prepare_configs.py
index <HASH>..<HASH> 100644
--- a/src/foremast/configs/prepare_configs.py
+++ b/src/foremast/configs/prepare_configs.py
@@ -23,10 +23,8 @@ from base64 import b64decode
import gitlab
-from ..consts import GIT_URL, GITLAB_TOKEN
+from ..consts import ENVS, GIT_URL, GITLAB_TOKEN
-ENVS = ('build', 'dev', 'stage', 'prod', 'prodp', 'stagepci', 'prods',
- 'stagesox')
LOG = logging.getLogger(__name__)
JSON_ERROR_MSG = '"{0}" appears to be invalid json. Please validate it with http://jsonlint.com.'
|
discovered duplicate hardcoded environments. Removed this
|
py
|
diff --git a/tests/test_parse_metadata.py b/tests/test_parse_metadata.py
index <HASH>..<HASH> 100644
--- a/tests/test_parse_metadata.py
+++ b/tests/test_parse_metadata.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import logging
+import os
import unittest
from pathlib import Path
@@ -89,6 +90,12 @@ class TestParseMetadata(unittest.TestCase):
self.assertIn(HGNC_KEYWORD, self.parser.namespace_dict)
self.assertIn('TextLocation', self.parser.annotations_dict)
+ @unittest.skipUnless('PYBEL_BASE' in os.environ, "Need local files to test local files")
+ def test_squiggly_filepath(self):
+ line = 'DEFINE NAMESPACE {} AS URL "~/dev/pybel/tests/belns/hgnc-human-genes.belns"'.format(HGNC_KEYWORD)
+ self.parser.parseString(line)
+ help_check_hgnc(self, self.parser.namespace_dict)
+
def test_document_metadata_exception(self):
s = 'SET DOCUMENT InvalidKey = "nope"'
with self.assertRaises(InvalidMetadataException):
|
Added testing for local file parsing Closes #<I>
|
py
|
diff --git a/faradayio_cli/faradayio_cli.py b/faradayio_cli/faradayio_cli.py
index <HASH>..<HASH> 100644
--- a/faradayio_cli/faradayio_cli.py
+++ b/faradayio_cli/faradayio_cli.py
@@ -45,7 +45,7 @@ def setupSerialPort(loopback, port):
the command line option. If loopback is True then it overrides physical port
specification.
- Arguments:
+ Args:
loopback: argparse option
port: argparse option
|
Fixed args statement in docstring for #8
|
py
|
diff --git a/src/astral/geocoder.py b/src/astral/geocoder.py
index <HASH>..<HASH> 100644
--- a/src/astral/geocoder.py
+++ b/src/astral/geocoder.py
@@ -18,7 +18,6 @@ from typing import Dict, Generator, List, Tuple, Union
from astral import LocationInfo, latlng_to_float
-
__all__ = ["lookup", "database", "add_locations", "all_locations"]
@@ -159,6 +158,7 @@ Masqat,Oman,Asia/Muscat,23°37'N,58°36'E,8.0
Mbabane,Swaziland,Africa/Mbabane,26°18'S,31°06'E,1243.0
Mecca,Saudi Arabia,Asia/Riyadh,21°26'N,39°49'E,240.0
Medina,Saudi Arabia,Asia/Riyadh,24°28'N,39°36'E,631.0
+Melbourne,Australia,-37°48'S,144°57'E,Australia/Melbourne,31
Mexico,Mexico,America/Mexico_City,19°20'N,99°10'W,2254.0
Minsk,Belarus,Europe/Minsk,53°52'N,27°30'E,231.0
Mogadishu,Somalia,Africa/Mogadishu,02°02'N,45°25'E,9.0
|
Added Melbourne to the list of locations
|
py
|
diff --git a/airtest/utils/version.py b/airtest/utils/version.py
index <HASH>..<HASH> 100644
--- a/airtest/utils/version.py
+++ b/airtest/utils/version.py
@@ -1,4 +1,4 @@
-__version__ = "1.1.9"
+__version__ = "1.1.10"
import os
import sys
|
level up to <I> (cherry picked from commit ca<I>ba<I>c<I>c4cd<I>e3cf9dab4bba<I>cb<I>c<I>)
|
py
|
diff --git a/can/__init__.py b/can/__init__.py
index <HASH>..<HASH> 100644
--- a/can/__init__.py
+++ b/can/__init__.py
@@ -5,7 +5,7 @@ from __future__ import absolute_import
import logging
-__version__ = "2.0.0-rc.1"
+__version__ = "2.0.0"
log = logging.getLogger('can')
|
remove rc<I> from version
|
py
|
diff --git a/neovim/msgpack_rpc/event_loop/asyncio.py b/neovim/msgpack_rpc/event_loop/asyncio.py
index <HASH>..<HASH> 100644
--- a/neovim/msgpack_rpc/event_loop/asyncio.py
+++ b/neovim/msgpack_rpc/event_loop/asyncio.py
@@ -93,6 +93,8 @@ class AsyncioEventLoop(BaseEventLoop, asyncio.Protocol,
self._loop.run_until_complete(coroutine)
def _connect_child(self, argv):
+ self._child_watcher = asyncio.get_child_watcher()
+ self._child_watcher.attach_loop(self._loop)
coroutine = self._loop.subprocess_exec(self._fact, *argv)
self._loop.run_until_complete(coroutine)
|
event_loop/asyncio.py: Attach loop to child watcher (#<I>) Closes #<I>
|
py
|
diff --git a/sos/plugins/ceph.py b/sos/plugins/ceph.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/ceph.py
+++ b/sos/plugins/ceph.py
@@ -67,6 +67,7 @@ class Ceph(Plugin, RedHatPlugin, UbuntuPlugin):
"ceph osd crush show-tunables",
"ceph-disk list",
"ceph versions",
+ "ceph insights",
"ceph osd crush dump",
"ceph -v",
"ceph-volume lvm list"
|
[ceph] Add 'ceph insights' command output This change adds the output of the recently added 'ceph insights' command. Resolves: #<I>
|
py
|
diff --git a/dragnet/models.py b/dragnet/models.py
index <HASH>..<HASH> 100644
--- a/dragnet/models.py
+++ b/dragnet/models.py
@@ -46,6 +46,8 @@ kohlschuetter_weninger_model._blockifier = TagCountNoCSSBlockifier
content_extractor._blockifier = TagCountNoCSSReadabilityBlockifier
content_extractor._block_model = SklearnWrapper(content_extractor._block_model)
content_comments_extractor._blockifier = TagCountNoCSSReadabilityBlockifier
+content_comments_extractor._block_model = SklearnWrapper(
+ content_comments_extractor._block_model)
# finally make the model that returns both main content and content+comments
content_and_content_comments_extractor = ContentCommentsExtractionModel(
|
Add thresholding for content_comments_extractor
|
py
|
diff --git a/esgfpid/assistant/consistency.py b/esgfpid/assistant/consistency.py
index <HASH>..<HASH> 100644
--- a/esgfpid/assistant/consistency.py
+++ b/esgfpid/assistant/consistency.py
@@ -24,8 +24,12 @@ class Checker(object):
self.__will_run_check = True
self.__message_why_not = 'No reason specified.'
- # query solr for previous file handles:
- self.__data_consistency_check_preparation()
+ # Is solr switched off?
+ if self.__coupler.is_solr_switched_off():
+ self.__will_run_check = False
+ else:
+ # query solr for previous file handles:
+ self.__data_consistency_check_preparation()
def can_run_check(self):
|
If solr is switched off, do not even try to ask it for consistency info.
|
py
|
diff --git a/moto/core/access_control.py b/moto/core/access_control.py
index <HASH>..<HASH> 100644
--- a/moto/core/access_control.py
+++ b/moto/core/access_control.py
@@ -1,3 +1,17 @@
+"""
+This implementation is NOT complete, there are many things to improve.
+The following is a list of the most important missing features and inaccuracies.
+
+TODO add support for more principals, apart from IAM users and assumed IAM roles
+TODO add support for the Resource and Condition parts of IAM policies
+TODO add support and create tests for all services in moto (for example, API Gateway is probably not supported currently)
+TODO implement service specific error messages (currently, EC2 and S3 are supported separately, everything else defaults to the errors IAM returns)
+TODO include information about the action's resource in error messages (once the Resource element in IAM policies is supported)
+TODO check all other actions that are performed by the action called by the user (for example, autoscaling:CreateAutoScalingGroup requires permission for iam:CreateServiceLinkedRole too - see https://docs.aws.amazon.com/autoscaling/ec2/userguide/control-access-using-iam.html)
+TODO add support for resource-based policies
+
+"""
+
import json
import logging
import re
@@ -319,8 +333,6 @@ class IAMPolicyStatement(object):
if self._check_element_matches("Action", action):
is_action_concerned = True
- # TODO: check Resource/NotResource and Condition
-
if is_action_concerned:
if self._statement["Effect"] == "Allow":
return PermissionResult.PERMITTED
|
Collected TODOs in the header of the access_control file.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,6 +15,7 @@ setup(
open(os.path.join(here, 'CHANGES')).read(),
license='LPGL, see LICENSE file.',
install_requires=[
+ 'Django',
'geojson == 1.0.1',
'shapely',
],
|
Django should be listed in requirements
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,7 @@ setup(
download_url='https://github.com/sebotic/WikidataIntegrator/tarball/0.0.325',
packages=find_packages(),
include_package_data=True,
- long_description=read('../README.md'),
+ #long_description=read('README.md'),
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
|
setup.py adapted for pypi
|
py
|
diff --git a/src/python/turicreate/toolkits/object_detector/object_detector.py b/src/python/turicreate/toolkits/object_detector/object_detector.py
index <HASH>..<HASH> 100644
--- a/src/python/turicreate/toolkits/object_detector/object_detector.py
+++ b/src/python/turicreate/toolkits/object_detector/object_detector.py
@@ -1798,6 +1798,7 @@ class ObjectDetector(_Model):
model_fields = [
('Model', 'model'),
('Number of classes', 'num_classes'),
+ ('Input image shape', 'input_image_shape')
]
training_fields = [
('Training time', '_training_time_as_string'),
|
add input_image_shape to model summary. (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -58,6 +58,7 @@ setup(
author_email='tobias.l.gustafsson@gmail.com',
url='http://github.com/tobgu/pyrsistent/',
license='MIT',
+ license_files=['LICENCE.mit'],
py_modules=['_pyrsistent_version'],
classifiers=[
'Intended Audience :: Developers',
|
Include LICENSE file in packaged distribution
|
py
|
diff --git a/salt/states/archive.py b/salt/states/archive.py
index <HASH>..<HASH> 100644
--- a/salt/states/archive.py
+++ b/salt/states/archive.py
@@ -127,7 +127,10 @@ def extracted(name,
cwd=name)
if results['retcode'] != 0:
return results
- files = results['stdout']
+ if __salt__['cmd.retcode']('tar --version | grep bsdtar') == 0:
+ files = results['stderr']
+ else:
+ files = results['stdout']
if len(files) > 0:
ret['result'] = True
ret['changes']['directories_created'] = [name]
|
archive state: make it work with bsdtar (for e.g: on the Mac OS X)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@ setup(name=metadata["__title__"],
url=metadata["__uri__"],
license=metadata["__license__"],
packages=['quarkc'],
- package_data={'': ['*.q', "lib/*.q", "lib/*.py", "lib/*.js",
+ package_data={'': ['*.q', "lib/*.q", "lib/*.py", "lib/*.js", "lib/*.rb",
"lib/io/datawire/quark/netty/*.java",
"lib/io/datawire/quark/runtime/*.java"]},
include_package_data=True,
|
Adding lib/.rb files to setup.py
|
py
|
diff --git a/hamper/commander.py b/hamper/commander.py
index <HASH>..<HASH> 100644
--- a/hamper/commander.py
+++ b/hamper/commander.py
@@ -190,13 +190,18 @@ class CommanderProtocol(irc.IRCClient):
self.factory.loader.runPlugins(category, func, self, *args)
def _hamper_send(self, func, comm, message, encode, tag, vars, kwvars):
+ if type(message) == str:
+ log.warning('Warning, passing message as ascii instead of unicode '
+ 'will cause problems. The message is: {0}'
+ .format(message))
+
format_kwargs = {}
format_kwargs.update(kwvars)
format_kwargs.update(comm)
try:
message = message.format(*vars, **format_kwargs)
- except (ValueError, KeyError, IndexError):
- pass
+ except (ValueError, KeyError, IndexError) as e:
+ log.error('Could not format message: {e}'.format(e=e))
if encode:
message = message.encode('utf8')
|
Add warning and error logging to Commander::_hamper_send (reply)
|
py
|
diff --git a/salt/modules/elasticsearch.py b/salt/modules/elasticsearch.py
index <HASH>..<HASH> 100644
--- a/salt/modules/elasticsearch.py
+++ b/salt/modules/elasticsearch.py
@@ -69,6 +69,7 @@ def _get_instance(hosts=None, profile=None):
'''
Return the elasticsearch instance
'''
+ es = None
if profile:
if isinstance(profile, string_types):
_profile = __salt__['config.option'](profile)
@@ -82,7 +83,14 @@ def _get_instance(hosts=None, profile=None):
hosts = ['127.0.0.1:9200']
if isinstance(hosts, string_types):
hosts = [hosts]
- return elasticsearch.Elasticsearch(hosts)
+ try:
+ es = elasticsearch.Elasticsearch(hosts)
+ if not es.ping():
+ raise CommandExecutionError('Could not connect to Elasticsearch host/ cluster {0}, is it unhealthy?'.format(hosts))
+ log.warn(es.ping())
+ except elasticsearch.exceptions.ConnectionError:
+ raise CommandExecutionError('Could not connect to Elasticsearch host/ cluster {0}'.format(hosts))
+ return es
def alias_create(indices, alias, hosts=None, body=None, profile=None):
|
raise exception in modules.elasticsearch when connection to ES cluster failed
|
py
|
diff --git a/insights/client/constants.py b/insights/client/constants.py
index <HASH>..<HASH> 100644
--- a/insights/client/constants.py
+++ b/insights/client/constants.py
@@ -3,7 +3,7 @@ import os
class InsightsConstants(object):
app_name = 'insights-client'
- version = '3.0.0-5'
+ version = '3.0.2-6'
auth_method = 'BASIC'
package_path = os.path.dirname(
os.path.dirname(os.path.abspath(__file__)))
|
gotta keep em seperated (#<I>) * gotta keep em seperated * actually version <I>
|
py
|
diff --git a/tests/test_mixins.py b/tests/test_mixins.py
index <HASH>..<HASH> 100644
--- a/tests/test_mixins.py
+++ b/tests/test_mixins.py
@@ -128,8 +128,8 @@ def _test_mixins_imageUrl(obj, attr):
url = getattr(obj, attr + 'Url')
if getattr(obj, attr):
assert url.startswith(utils.SERVER_BASEURL)
- assert "/library/metadata/" in url
- assert attr in url
+ assert "/library/metadata/" in url or "/library/collections/" in url
+ assert attr in url or "composite" in url
if attr == 'thumb':
assert getattr(obj, 'posterUrl') == url
else:
|
Fix collections image url mixins test
|
py
|
diff --git a/distutils/tests/support.py b/distutils/tests/support.py
index <HASH>..<HASH> 100644
--- a/distutils/tests/support.py
+++ b/distutils/tests/support.py
@@ -108,8 +108,7 @@ class DummyCommand:
"""Class to store options for retrieval via set_undefined_options()."""
def __init__(self, **kwargs):
- for kw, val in kwargs.items():
- setattr(self, kw, val)
+ vars(self).update(kwargs)
def ensure_finalized(self):
pass
|
In DummyCommand, simplify setting of kwargs.
|
py
|
diff --git a/pmagpy/validate_upload3.py b/pmagpy/validate_upload3.py
index <HASH>..<HASH> 100644
--- a/pmagpy/validate_upload3.py
+++ b/pmagpy/validate_upload3.py
@@ -174,7 +174,7 @@ def cv(row, col_name, arg, current_data_model, df, con):
cell_values = cell_value.split(":")
cell_values = [c.strip() for c in cell_values]
for value in cell_values:
- if str(value).lower() in [str(v.encode('utf-8')).lower() for v in vocabulary[col_name]]:
+ if str(value).lower() in [str(v.encode('utf-8')).lower() if isinstance(v, str) else str(v) for v in vocabulary[col_name]]:
continue
elif value.lower() == "none":
continue
|
prevent validation error with boolean controlled vocabulary type
|
py
|
diff --git a/src/astral.py b/src/astral.py
index <HASH>..<HASH> 100644
--- a/src/astral.py
+++ b/src/astral.py
@@ -57,7 +57,7 @@ try:
except ImportError:
raise ImportError('The astral module requires the pytz module to be available.')
-__all__ = ['City', 'Astral','AstralError']
+__all__ = ['City','Astral','AstralError']
__version__ = "0.3"
__author__ = "Simon Kennedy <python@sffjunkie.co.uk>"
@@ -821,8 +821,7 @@ class City(object):
class Astral(object):
def __init__(self):
- """Initialise the list of cities.
- """
+ """Initialise the list of cities."""
self._cities = {}
self._init_cities()
@@ -832,6 +831,12 @@ class Astral(object):
def __getitem__(self, value):
"""Returns a City object for the specified city.
+ You can supply an optional country name by adding a comma
+ followed by the country name. Where multiple cities have the
+ same name you may need to supply the country name otherwise
+ the first result will be returned which may not be the one
+ you're looking for.
+
Handles city names with spaces and mixed case.
"""
|
Changed __getitem__ docstring Minor cosmetic change
|
py
|
diff --git a/fut/core.py b/fut/core.py
index <HASH>..<HASH> 100644
--- a/fut/core.py
+++ b/fut/core.py
@@ -172,6 +172,9 @@ class Core(object):
self.timeout = timeout
self.delay = delay
self.request_time = 0
+ # db
+ self._players = None
+ self._nations = None
if debug: # save full log to file (fut.log)
self.logger = logger(save=True)
else: # NullHandler
@@ -538,11 +541,12 @@ class Core(object):
self.saveSession()
return True
- # TODO: probably there is no need to refresh on every call?
@property
def players(self):
"""Return all players in dict {id: c, f, l, n, r}."""
- return players()
+ if not self._players:
+ self._players = players()
+ return self._players
@property
def nations(self):
@@ -550,7 +554,9 @@ class Core(object):
:params year: Year.
"""
- return nations()
+ if not self._nations:
+ self._nations = nations()
+ return self._nations
@property
def leagues(self, year=2017):
|
cache players & nations db
|
py
|
diff --git a/molo/commenting/models.py b/molo/commenting/models.py
index <HASH>..<HASH> 100644
--- a/molo/commenting/models.py
+++ b/molo/commenting/models.py
@@ -1,4 +1,5 @@
from django_comments.models import Comment
+from django_comments.models import CommentFlag
from django.dispatch import receiver
from django_comments.signals import comment_was_flagged
from django.conf import settings
@@ -32,6 +33,12 @@ def remove_comment_if_flag_limit(sender, comment, flag, created, **kwargs):
except AttributeError:
return
+ if flag.flag != CommentFlag.SUGGEST_REMOVAL:
+ return
+ # Don't remove comments that have been approved by a moderator
+ if (comment.flags.filter(flag=CommentFlag.MODERATOR_APPROVAL).count() > 0):
+ return
+
if (comment.flags.count() >= threshold_count):
comment.is_removed = True
comment.save()
|
ensure other flags don't result in comment removal and approved comments aren't removed
|
py
|
diff --git a/python_modules/dagstermill/setup.py b/python_modules/dagstermill/setup.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagstermill/setup.py
+++ b/python_modules/dagstermill/setup.py
@@ -41,6 +41,7 @@ def _do_setup(name='dagstermill'):
'dagster-pandas',
'ipykernel>=4.9.0',
'nteract-scrapbook>=0.2.0',
+ 'nbformat<5.0.0',
'papermill>=1.0.0',
'scikit-learn>=0.19.0',
'six',
|
Pin nbformat to less than <I> Summary: Nbformat released a new major rev a couple hours ago which broke our tests. Pinning it for now. Test Plan: BK Reviewers: alangenfeld, prha Reviewed By: prha Differential Revision: <URL>
|
py
|
diff --git a/lib/devpipeline_core/sanitizer.py b/lib/devpipeline_core/sanitizer.py
index <HASH>..<HASH> 100644
--- a/lib/devpipeline_core/sanitizer.py
+++ b/lib/devpipeline_core/sanitizer.py
@@ -8,19 +8,17 @@ import devpipeline_core.plugin
def _sanitize_empty_depends(configuration, error_fn):
- for component_name in configuration.components():
- component = configuration.get(component_name)
+ for name, component in configuration.items():
for dep in component.get_list("depends"):
if not dep:
- error_fn("Empty dependency in {}".format(component_name))
+ error_fn("Empty dependency in {}".format(name))
_IMPLICIT_PATTERN = re.compile(R'\$\{([a-z_\-0-9\.]+):.+\}')
def _sanitize_implicit_depends(configuration, error_fn):
- for component_name in configuration.components():
- component = configuration.get(component_name)
+ for name, component in configuration.items():
component_deps = component.get_list("depends")
for key in component:
val = component.get(key, raw=True)
@@ -30,7 +28,7 @@ def _sanitize_implicit_depends(configuration, error_fn):
if dep not in component_deps:
error_fn(
"{}:{} has an implicit dependency on {}".format(
- component_name, key, dep))
+ name, key, dep))
_SANITIZERS = devpipeline_core.plugin.query_plugins(
|
Fix #5 - Use dictionary-like interface to access component configurations
|
py
|
diff --git a/IPython/html/widgets/__init__.py b/IPython/html/widgets/__init__.py
index <HASH>..<HASH> 100644
--- a/IPython/html/widgets/__init__.py
+++ b/IPython/html/widgets/__init__.py
@@ -1,5 +1,6 @@
from base import Widget, init_widget_js
+from bool import BoolWidget
from container import ContainerWidget
from float_range import FloatRangeWidget
from int_range import IntRangeWidget
|
Added missing import reference for bool widget
|
py
|
diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py
index <HASH>..<HASH> 100644
--- a/superset/db_engine_specs.py
+++ b/superset/db_engine_specs.py
@@ -1067,11 +1067,15 @@ class HiveEngineSpec(PrestoEngineSpec):
@classmethod
def fetch_data(cls, cursor, limit):
+ import pyhive
from TCLIService import ttypes
state = cursor.poll()
if state.operationState == ttypes.TOperationState.ERROR_STATE:
raise Exception('Query error', state.errorMessage)
- return super(HiveEngineSpec, cls).fetch_data(cursor, limit)
+ try:
+ return super(HiveEngineSpec, cls).fetch_data(cursor, limit)
+ except pyhive.exc.ProgrammingError:
+ return []
@staticmethod
def create_table_from_csv(form, table):
|
Allow empty results in Hive (from SET, eg) (#<I>) * Allow empty results in Hive (from SET, eg) * Remove patch * Merge heads * Delete merge heads
|
py
|
diff --git a/drivers/python/rethinkdb/ast.py b/drivers/python/rethinkdb/ast.py
index <HASH>..<HASH> 100644
--- a/drivers/python/rethinkdb/ast.py
+++ b/drivers/python/rethinkdb/ast.py
@@ -705,9 +705,9 @@ class ReQLDecoder(py_json.JSONDecoder):
'''
Default JSONDecoder subclass to handle pseudo-type conversion.
'''
- def __init__(self, reql_format_opts={}):
+ def __init__(self, reql_format_opts=None):
py_json.JSONDecoder.__init__(self, object_hook=self.convert_pseudotype)
- self.reql_format_opts = reql_format_opts
+ self.reql_format_opts = reql_format_opts or {}
def convert_time(self, obj):
if 'epoch_time' not in obj:
|
Apply a patch from @sontek for a mutable global object.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -69,6 +69,8 @@ setup(
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Email',
'Topic :: Utilities',
],
|
According the README, script was successfully tested in Python <I> and <I>.
|
py
|
diff --git a/howdoi/howdoi.py b/howdoi/howdoi.py
index <HASH>..<HASH> 100755
--- a/howdoi/howdoi.py
+++ b/howdoi/howdoi.py
@@ -171,7 +171,7 @@ def _extract_links(html, search_engine):
def _get_search_url(search_engine):
return SEARCH_URLS.get(search_engine, SEARCH_URLS['google'])
-def _detect_block(page):
+def _is_blocked(page):
for indicator in BLOCK_INDICATORS:
if page.find(indicator) != -1:
return True
@@ -183,7 +183,7 @@ def _get_links(query):
search_url = _get_search_url(search_engine)
result = _get_result(search_url.format(URL, url_quote(query)))
- if _detect_block(result):
+ if _is_blocked(result):
_print_err('Unable to find an answer because the search engine temporarily blocked the request. '
'Please wait a few minutes or select a different search engine.')
raise BlockError("Temporary block by search engine")
|
_detect_block -> _is_blocked
|
py
|
diff --git a/apng/__init__.py b/apng/__init__.py
index <HASH>..<HASH> 100644
--- a/apng/__init__.py
+++ b/apng/__init__.py
@@ -30,12 +30,18 @@ def is_png(png):
"""
if isinstance(png, str):
with open(png, "rb") as f:
- png = f.read(8)
+ png_header = f.read(8)
+ elif hasattr(png, "read"):
+ position = png.tell()
+ png_header = png.read(8)
+ png.seek(position)
+ elif isinstance(png, bytes):
+ png_header = png[:8]
+ else:
+ raise TypeError("Muse be file, bytes, or str but get {}"
+ .format(type(png)))
- if hasattr(png, "read"):
- png = png.read(8)
-
- return png[:8] == PNG_SIGN
+ return png_header == PNG_SIGN
def chunks_read(b):
"""Parse PNG bytes into different chunks, yielding (type, data).
@@ -72,9 +78,8 @@ def chunks(png):
if isinstance(png, str):
# file name
with open(png, "rb") as f:
- png = f.read()
-
- if hasattr(png, "read"):
+ png = f.read()
+ elif hasattr(png, "read"):
# file like
png = png.read()
|
Fix: Reset file object position in is_png() (#2) * Reset file object position in is_png() Otherwise chunk() function will miss the first 8 bytes when `png` is file-object-like. * Update __init__.py
|
py
|
diff --git a/napd/nap.py b/napd/nap.py
index <HASH>..<HASH> 100644
--- a/napd/nap.py
+++ b/napd/nap.py
@@ -283,6 +283,9 @@ class Nap:
raise NapInputError("pool specification contain both 'id' and 'key', specify pool id or name")
# name is only unique together with schema, find schema
# check that given schema exists and populate 'schema' with correct id
+ # TODO: can we split this into a separate function or something?
+ # This whole thing looks awefully complex when it really
+ # should not
if 'schema_id' in spec:
if 'schema_name' in spec:
raise NapInputError("schema specification contain both 'id' and 'name', specify schema id or name")
|
Add TODO entry on improving code readability
|
py
|
diff --git a/pymmd/download.py b/pymmd/download.py
index <HASH>..<HASH> 100644
--- a/pymmd/download.py
+++ b/pymmd/download.py
@@ -24,10 +24,10 @@ SHLIB_EXT = {
}
def build_posix():
- return subprocess.call(['make'])
+ return subprocess.call(['make', 'libMultiMarkdownShared'])
def build_ms():
- return subprocess.call(['msbuild', 'ALL_BUILD.vcxproj', '/p:Configuration=Release'])
+ return subprocess.call(['msbuild', 'libMultiMarkdownShared.vcxproj', '/p:Configuration=Release'])
PLATFORM_BUILDS = {
'Linux': build_posix,
|
Change download to only build shared library component
|
py
|
diff --git a/json5/lib.py b/json5/lib.py
index <HASH>..<HASH> 100644
--- a/json5/lib.py
+++ b/json5/lib.py
@@ -269,15 +269,17 @@ def _dump_str(obj, ensure_ascii):
ret.append('\\0')
elif not ensure_ascii:
ret.append(ch)
- elif ord(ch) < 128:
- ret.append(ch)
- elif ord(ch) < 65536:
- ret.append('\u' + '%04x' % ord(ch))
else:
- val = ord(ch) - 0x10000
- high = 0xd800 + val >> 10
- low = 0xdc00 + val & 0x3ff
- ret.append('\\u%04x\\u%04x' % (high, low))
+ o = ord(ch)
+ if o >= 32 and o < 128:
+ ret.append(ch)
+ elif ord(ch) < 65536:
+ ret.append('\u' + '%04x' % ord(ch))
+ else:
+ val = ord(ch) - 0x10000
+ high = 0xd800 + val >> 10
+ low = 0xdc00 + val & 0x3ff
+ ret.append('\\u%04x\\u%04x' % (high, low))
return u''.join(ret) + '"'
|
escape chars < 0x<I> as well
|
py
|
diff --git a/currencies/utils.py b/currencies/utils.py
index <HASH>..<HASH> 100644
--- a/currencies/utils.py
+++ b/currencies/utils.py
@@ -8,7 +8,7 @@ def calculate_price(price, currency):
factor = Currency.objects.get(code__exact=currency).factor
except Currency.DoesNotExist:
if settings.DEBUG:
- raise Currency.DoesNotExist
+ raise
else:
factor = Decimal('0.0')
new_price = Decimal(price) * factor
|
Simplify a raise in debug mode
|
py
|
diff --git a/wake.py b/wake.py
index <HASH>..<HASH> 100644
--- a/wake.py
+++ b/wake.py
@@ -64,7 +64,11 @@ class Vulture(ast.NodeVisitor):
def scan(self, node_string):
self.code = node_string.splitlines()
- node = ast.parse(node_string)
+ try:
+ node = ast.parse(node_string)
+ except SyntaxError as e:
+ print('Error in file', self.file)
+ raise e
self.visit(node)
def _get_modules(self, paths, toplevel=True):
|
print file for SyntaxErrors
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,9 +3,9 @@ import sdist_upip
setup(name='picoweb',
- version='1.4.1',
+ version='1.5',
description="A very lightweight, memory-efficient async web framework \
-for MicroPython.org and its uasyncio module.",
+for MicroPython and its uasyncio module.",
long_description=open('README.rst').read(),
url='https://github.com/pfalcon/picoweb',
author='Paul Sokolovsky',
|
setup: Release <I>.
|
py
|
diff --git a/spotify/sync/models.py b/spotify/sync/models.py
index <HASH>..<HASH> 100644
--- a/spotify/sync/models.py
+++ b/spotify/sync/models.py
@@ -25,7 +25,7 @@ class SyncMeta(type):
elif base_name != "Client":
def __init__(self, client, *args, **kwargs):
- super().__init__(client, *args, **kwargs)
+ base.__init__(self, client, *args, **kwargs)
self.__client_thread__ = client.__client_thread__
try:
|
bugfix #<I> bad mro resolution for super call
|
py
|
diff --git a/torf/_torrent.py b/torf/_torrent.py
index <HASH>..<HASH> 100644
--- a/torf/_torrent.py
+++ b/torf/_torrent.py
@@ -1551,12 +1551,7 @@ class Torrent():
raise error.BdecodeError(filepath)
def copy(self):
- """
- Return a new object with the same metainfo
-
- Internally, this simply copies the internal metainfo dictionary with
- :func:`copy.deepcopy` and gives it to the new instance.
- """
+ """Return a new object with the same metainfo"""
from copy import deepcopy
cp = type(self)()
cp._metainfo = deepcopy(self._metainfo)
|
Torrent.copy(): Be less chatty in docstring
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ with open(os.path.join(os.path.dirname(__file__), 'test_requirements.txt')) as f
setup(
name='zappa',
- version='0.17.3',
+ version='0.17.4',
packages=['zappa'],
install_requires=required,
tests_require=test_required,
|
<I> - Version bump to publish a few recent fixes
|
py
|
diff --git a/pysat/utils/_core.py b/pysat/utils/_core.py
index <HASH>..<HASH> 100644
--- a/pysat/utils/_core.py
+++ b/pysat/utils/_core.py
@@ -495,13 +495,15 @@ def fmt_output_in_cols(out_strs, ncols=3, max_num=6, lpad=None):
output += '\n'
# Print out remaining variables one at a time on a single line
- for i in range(sel_len - ncols * num):
- if middle >= 0:
- if i == 0 and num > 0:
- output += "...".center(lpad * ncols) + '\n'
- elif num == 0 and i == nhalf:
- output += "...".center(lpad if lpad > 4 else 4)
- output += out_strs[nsel][i + ncols * num].ljust(lpad)
- output += '\n'
+ extra_cols = sel_len - ncols * num
+ if extra_cols > 0:
+ for i in range(extra_cols):
+ if middle >= 0:
+ if i == 0 and num > 0:
+ output += "...".center(lpad * ncols) + '\n'
+ elif num == 0 and i == nhalf:
+ output += "...".center(lpad if lpad > 4 else 4)
+ output += out_strs[nsel][i + ncols * num].ljust(lpad)
+ output += '\n'
return output
|
MAINT: removed extra newline Removed extra newline printed when there is no extra data.
|
py
|
diff --git a/pypsa/networkclustering.py b/pypsa/networkclustering.py
index <HASH>..<HASH> 100644
--- a/pypsa/networkclustering.py
+++ b/pypsa/networkclustering.py
@@ -78,8 +78,9 @@ def aggregategenerators(network, busmap, with_time=True, carriers=None, custom_s
columns = (set(attrs.index[attrs.static & attrs.status.str.startswith('Input')]) | {'weight'}) & set(generators.columns)
grouper = [generators.bus, generators.carrier]
- generators.weight.fillna(1., inplace=True)
- weighting = generators.weight.groupby(grouper, axis=0).transform(lambda x: x/x.sum() )
+ def normed_or_uniform(x):
+ return x/x.sum() if x.notnull().all() else pd.Series(1./len(x), x.index)
+ weighting = (generators.weight.groupby(grouper, axis=0).transform(normed_or_uniform)
generators['capital_cost'] *= weighting
strategies = {'p_nom_max': np.min, 'weight': np.sum, 'p_nom': np.sum, 'capital_cost': np.sum}
strategies.update(custom_strategies)
|
networkclustering: Improve aggregategenerators w/o weights
|
py
|
diff --git a/pupa/importers/bills.py b/pupa/importers/bills.py
index <HASH>..<HASH> 100644
--- a/pupa/importers/bills.py
+++ b/pupa/importers/bills.py
@@ -69,7 +69,7 @@ class BillImporter(BaseImporter):
sponsor['person_id'])
if 'organization_id' in sponsor:
- sponsor['organization_id'] = self.person_importer.resolve_json_id(
+ sponsor['organization_id'] = self.org_importer.resolve_json_id(
sponsor['organization_id'])
return data
|
Importer for org sponsors should be the org_importer
|
py
|
diff --git a/holoviews/plotting/mpl/plot.py b/holoviews/plotting/mpl/plot.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/mpl/plot.py
+++ b/holoviews/plotting/mpl/plot.py
@@ -178,7 +178,7 @@ class MPLPlot(DimensionedPlot):
return self.handles['axis']
else:
fig = self.handles['fig']
- if getattr(self, 'overlaid', True) and self._close_figures:
+ if not getattr(self, 'overlaid', False) and self._close_figures:
plt.close(fig)
return fig
|
Fixed closing of matplotlib figures
|
py
|
diff --git a/bot/action/core/filter.py b/bot/action/core/filter.py
index <HASH>..<HASH> 100644
--- a/bot/action/core/filter.py
+++ b/bot/action/core/filter.py
@@ -43,6 +43,14 @@ class MessageAction(IntermediateAction):
self._continue(event)
+class ChosenInlineResultAction(IntermediateAction):
+ def process(self, event):
+ chosen_inline_result = event.update.chosen_inline_result
+ if chosen_inline_result is not None:
+ event.chosen_result = chosen_inline_result
+ self._continue(event)
+
+
class InlineQueryAction(IntermediateAction):
def process(self, event):
inline_query = event.update.inline_query
|
Add support for ChosenInlineResult with a filter action
|
py
|
diff --git a/qless/profile.py b/qless/profile.py
index <HASH>..<HASH> 100644
--- a/qless/profile.py
+++ b/qless/profile.py
@@ -90,5 +90,3 @@ class Profiler(object):
def __exit__(self, typ, value, trace):
self.stop()
self.display()
- if typ:
- raise typ, value, trace
|
No need to re-raise in context manager exit handlers
|
py
|
diff --git a/taskqueue_cli/taskqueue_cli.py b/taskqueue_cli/taskqueue_cli.py
index <HASH>..<HASH> 100644
--- a/taskqueue_cli/taskqueue_cli.py
+++ b/taskqueue_cli/taskqueue_cli.py
@@ -65,7 +65,7 @@ def release(queuepath):
@main.command()
@click.argument("src")
@click.argument("dest")
-def copy(src, dest):
+def cp(src, dest):
"""
Copy the contents of a queue to another
service or location. Do not run this
|
refactor: rename copy to cp
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -46,7 +46,7 @@ setup(
include_package_data=True,
zip_safe=False,
install_requires=[
- 'pysaml2==0.4.1',
+ 'pysaml2==0.4.3',
'python-memcached==1.48',
],
)
|
Update pysaml2 to <I>
|
py
|
diff --git a/src/scs_core/gas/afe_calib.py b/src/scs_core/gas/afe_calib.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/gas/afe_calib.py
+++ b/src/scs_core/gas/afe_calib.py
@@ -162,27 +162,18 @@ class AFECalib(PersistentJSONable):
def __eq__(self, other):
try:
- print(1)
-
if len(self) != len(other):
return False
- print(2)
-
for i in range(len(self)):
if self.sensor_calib(i) != other.sensor_calib(i):
return False
- print(3)
-
return self.serial_number == other.serial_number and self.afe_type == other.afe_type and \
self.calibrated_on == other.calibrated_on and self.dispatched_on == other.dispatched_on and \
self.pt1000_calib == other.pt1000_calib
except (TypeError, AttributeError):
-
- print(4)
-
return False
|
Added __eq__(..) methods
|
py
|
diff --git a/synapse/lib/net.py b/synapse/lib/net.py
index <HASH>..<HASH> 100644
--- a/synapse/lib/net.py
+++ b/synapse/lib/net.py
@@ -390,12 +390,12 @@ class Chan(Link):
def _tx_real(self, mesg):
+ name = 'data'
if self._chan_txinit:
- self._chan_txinit = True
- self.link.tx(('init', {'chan': self._chan_iden, 'data': mesg}))
- return
+ self._chan_txinit = False
+ name = 'init'
- self.link.tx(('data', {'chan': self._chan_iden, 'data': mesg}))
+ self.link.tx((name, {'chan': self._chan_iden, 'data': mesg}))
def txfini(self, data=None):
self.link.tx(('fini', {'chan': self._chan_iden, 'data': data}))
|
Set _chan_txinit to False after it sends the initial init msg
|
py
|
diff --git a/trelliopg/sql.py b/trelliopg/sql.py
index <HASH>..<HASH> 100755
--- a/trelliopg/sql.py
+++ b/trelliopg/sql.py
@@ -5,6 +5,7 @@ import sys
from asyncpg.transaction import Transaction
from asyncpg.connection import Connection
from asyncpg.pool import Pool, create_pool
+from trelliolibs.utils.helpers import json_response
PY_36 = sys.version_info >= (3, 6)
@@ -65,7 +66,7 @@ def async_atomic(on_exception=None, raise_exception=False, **kwargs):
resp_dict = {}
resp_dict['status'] = type(exc)
resp_dict['message'] = str(exc)
- return resp_dict
+ return json_response(resp_dict)
on_exception = default_on_exception
elif raise_exception and not on_exception:
async def raise_exception(exp_args):
|
async_atomic bad response bug fix
|
py
|
diff --git a/www/src/Lib/browser/webworker.py b/www/src/Lib/browser/webworker.py
index <HASH>..<HASH> 100644
--- a/www/src/Lib/browser/webworker.py
+++ b/www/src/Lib/browser/webworker.py
@@ -9,9 +9,8 @@ from browser import window
import asyncio
import os
import sys
-
-DEFAULT_BRYTHON_OPTIONS = {
-}
+
+DEFAULT_BRYTHON_OPTIONS = getattr(__BRYTHON__, '$options')
CHILD_WORKERS = []
@@ -274,7 +273,8 @@ class WorkerParent(WorkerCommon):
The parameter `brython_options` will be passed to the `brython` function when called from
the webworker. If it contains an `imports` key, this should be a list of javascript files
to be loaded instead of the standard brython files. If it contains the key `import_dist`,
- it will load brython from the `brython_webworker_dist.js` script.
+ it will load brython from the `brython_webworker_dist.js` script. By default it uses the
+ same options passed to the parent `brython` function.
"""
if not _can_launch_workers:
raise WorkerError("Cannot spawn workers (webkit based browsers don't support running webworkers inside webworkers)")
|
Use the parent's brython options as the default for workers
|
py
|
diff --git a/swingtix/bookkeeper/models.py b/swingtix/bookkeeper/models.py
index <HASH>..<HASH> 100644
--- a/swingtix/bookkeeper/models.py
+++ b/swingtix/bookkeeper/models.py
@@ -94,7 +94,8 @@ class Account(models.Model, _AccountApi):
positive_credit = models.BooleanField(
"""credit entries increase the value of this account. Set to False for
- Asset & Expense accounts, True for Liability, Revenue and Equity accounts.""")
+ Asset & Expense accounts, True for Liability, Revenue and Equity accounts.""",
+ default=False)
name = models.TextField() #slugish? Unique?
description = models.TextField(blank=True)
|
Resolution of that part of SwingTix/bookkeeper/#4 The relevant warning message was : "BooleanField does not have a default value". I provided a default of False as that was the default for a BooleanField before you were asked to make an explicit default.
|
py
|
diff --git a/dusty/systems/compose/__init__.py b/dusty/systems/compose/__init__.py
index <HASH>..<HASH> 100644
--- a/dusty/systems/compose/__init__.py
+++ b/dusty/systems/compose/__init__.py
@@ -36,7 +36,7 @@ def _compose_up():
def _compose_stop():
logging.info('Running docker-compose stop')
- check_call_demoted(['docker-compose', '-f', _composefile_path(), '-p', 'dusty', 'stop', '-t', '1'])
+ check_and_log_output_and_error_demoted(['docker-compose', '-f', _composefile_path(), '-p', 'dusty', 'stop', '-t', '1'])
def update_running_containers_from_spec(compose_config):
"""Takes in a Compose spec from the Dusty Compose compiler,
|
near suicide. i hate this pr
|
py
|
diff --git a/spyderlib/widgets/sourcecode/codeeditor.py b/spyderlib/widgets/sourcecode/codeeditor.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/sourcecode/codeeditor.py
+++ b/spyderlib/widgets/sourcecode/codeeditor.py
@@ -2121,8 +2121,11 @@ class CodeEditor(TextEditBaseWidget):
cursor = self.textCursor()
last_three = self.get_text('sol', 'cursor')[-3:]
last_two = self.get_text('sol', 'cursor')[-2:]
-
- if self.has_open_quotes(line_text) and (not last_three == 3*char):
+ trailing_text = self.get_text('cursor', 'eol').strip()
+
+ if len(trailing_text) > 0:
+ self.insert_text(char)
+ elif self.has_open_quotes(line_text) and (not last_three == 3*char):
self.insert_text(char)
# Move to the right if we are between two quotes
elif self.__between_quotes(char):
|
Editor: Don't autoinsert quotes if there is text to the right of the cursor
|
py
|
diff --git a/imhotep/main.py b/imhotep/main.py
index <HASH>..<HASH> 100644
--- a/imhotep/main.py
+++ b/imhotep/main.py
@@ -55,4 +55,4 @@ def main():
if __name__ == '__main__':
- main()
\ No newline at end of file
+ main()
|
W<I> no newline at end of file
|
py
|
diff --git a/dedupe/predicates.py b/dedupe/predicates.py
index <HASH>..<HASH> 100644
--- a/dedupe/predicates.py
+++ b/dedupe/predicates.py
@@ -57,7 +57,7 @@ def initials(field, n=None):
>>> initials("noslice")
('noslice', )
"""
- return (field[:n], ) if not n or len(field) > n-1 else ()
+ return (field[:n], ) if n is not None or len(field) > n-1 else ()
def wholeFieldPredicate(field):
"""return the whole field
|
Caugh edge cause for initials of n=0 v. n=None
|
py
|
diff --git a/tests/CLI/modules/securitygroup_tests.py b/tests/CLI/modules/securitygroup_tests.py
index <HASH>..<HASH> 100644
--- a/tests/CLI/modules/securitygroup_tests.py
+++ b/tests/CLI/modules/securitygroup_tests.py
@@ -118,8 +118,11 @@ class SecurityGroupTests(testing.TestCase):
'remoteGroupId': None,
'protocol': None,
'portRangeMin': None,
- 'portRangeMax': None}],
- json.loads(result.output))
+ 'portRangeMax': None,
+ 'createDate': None,
+ 'modifyDate': None
+ }],
+ json.loads(result.output))
def test_securitygroup_rule_add(self):
result = self.run_command(['sg', 'rule-add', '100',
|
NETWORK-<I> - modifying the rule class and test class to have the createDate and modifyDate parameters
|
py
|
diff --git a/ipa/ipa_cloud.py b/ipa/ipa_cloud.py
index <HASH>..<HASH> 100644
--- a/ipa/ipa_cloud.py
+++ b/ipa/ipa_cloud.py
@@ -605,7 +605,7 @@ class IpaCloud(object):
If tests pass and cleanup flag is none, or
cleanup flag is true, terminate instance.
"""
- if status == 0 and self.cleanup is None or self.cleanup:
+ if (status == 0 and self.cleanup is None) or self.cleanup:
self.logger.info(
'Terminating instance %s' % self.running_instance_id
)
|
Wrap first clause in brackets. In cleanup instance method to make the order clear.
|
py
|
diff --git a/metanl/wordlist.py b/metanl/wordlist.py
index <HASH>..<HASH> 100644
--- a/metanl/wordlist.py
+++ b/metanl/wordlist.py
@@ -201,7 +201,7 @@ def get_frequency(word, lang, default_freq=0, scale=1e9):
"but %r contains a space" % word)
lookup = preprocess_text(word).lower()
- return factor * freqs[lookup] or default_freq
+ return factor * freqs[lookup] + default_freq
def multilingual_word_frequency(multiword, default_freq=0):
"""
|
Word frequencies should be monotonic. If you look up a word in the wordlist, and it occurs 0 times, then you get the default_freq back as your result. If it occurs once, previously you would get 1. What you should get is (default_freq + 1), so that it's higher than not being there at all.
|
py
|
diff --git a/graphene_django_extras/converter.py b/graphene_django_extras/converter.py
index <HASH>..<HASH> 100644
--- a/graphene_django_extras/converter.py
+++ b/graphene_django_extras/converter.py
@@ -322,7 +322,8 @@ def convert_field_to_list_or_connection(
def dynamic_type():
if input_flag and not nested_field:
return DjangoListField(
- ID, required=is_required(field) and input_flag == "create"
+ ID, required=is_required(field) and input_flag == "create",
+ description=field.help_text or field.verbose_name,
)
else:
_type = registry.get_type_for_model(model, for_input=input_flag)
@@ -334,11 +335,13 @@ def convert_field_to_list_or_connection(
return DjangoFilterListField(
_type,
required=is_required(field) and input_flag == "create",
+ description=field.help_text or field.verbose_name,
filterset_class=_type._meta.filterset_class,
)
else:
return DjangoListField(
_type, required=is_required(field) and input_flag == "create"
+ description=field.help_text or field.verbose_name,
)
return Dynamic(dynamic_type)
|
Add description on manytomanyfields
|
py
|
diff --git a/pre_commit/languages/python.py b/pre_commit/languages/python.py
index <HASH>..<HASH> 100644
--- a/pre_commit/languages/python.py
+++ b/pre_commit/languages/python.py
@@ -114,11 +114,6 @@ def get_default_version() -> str: # pragma: no cover (platform dependent)
if _find_by_py_launcher(exe):
return exe
- # Give a best-effort try for windows
- default_folder_name = exe.replace('.', '')
- if os.path.exists(fr'C:\{default_folder_name}\python.exe'):
- return exe
-
# We tried!
return C.DEFAULT
@@ -155,12 +150,6 @@ def norm_version(version: str) -> str:
if version_exec and version_exec != version:
return version_exec
- # If it is in the form pythonx.x search in the default
- # place on windows
- if version.startswith('python'):
- default_folder_name = version.replace('.', '')
- return fr'C:\{default_folder_name}\python.exe'
-
# Otherwise assume it is a path
return os.path.expanduser(version)
|
remove hardcoded python location
|
py
|
diff --git a/Lib/fontbakery/profiles/universal.py b/Lib/fontbakery/profiles/universal.py
index <HASH>..<HASH> 100644
--- a/Lib/fontbakery/profiles/universal.py
+++ b/Lib/fontbakery/profiles/universal.py
@@ -1126,20 +1126,13 @@ def com_google_fonts_check_unreachable_glyphs(ttFont):
for ligatures in sub.ligatures.values():
all_glyphs -= set(lig.LigGlyph for lig in ligatures)
- # TODO: Review lookupTypes 5 and 6
- # Either handle them or add a comment
- # explaining why that might not be necessary.
- #
- # if lookup.LookupType in [5, 6]:
- # import ipdb; ipdb.set_trace()
-
- if lookup.LookupType == 5: # Context:
- # Replace one or more glyphs in context
- pass # FIXME
-
- if lookup.LookupType == 6: # Chaining Context
- # Replace one or more glyphs in chained context
- pass # FIXME
+ if lookup.LookupType in [5, 6]:
+ # We do nothing here, because these contextual lookup types don't
+ # generate glyphs directly; they only dispatch to other lookups
+ # stored elsewhere in the lookup list. As we are examining all
+ # lookups in the lookup list, other calls to this function will
+ # deal with the lookups that a contextual lookup references.
+ pass
if lookup.LookupType == 7: # Extension Substitution:
# Extension mechanism for other substitutions
|
Comment about GSUB5/6 lookups in unreachable glyphs check
|
py
|
diff --git a/tests/test_scrubbers.py b/tests/test_scrubbers.py
index <HASH>..<HASH> 100644
--- a/tests/test_scrubbers.py
+++ b/tests/test_scrubbers.py
@@ -66,9 +66,10 @@ class ScrubberTestCase(unittest.TestCase):
filth = list(scrubber.iter_filth('hello jane@example.com'))
self.assertEqual(len(filth), 1)
self.assertEqual(len(filth[0].filths), 3)
- self.assertEqual(filth[0].filths[0].detector_name, 'email')
- self.assertEqual(filth[0].filths[1].detector_name, 'email_two')
- self.assertEqual(filth[0].filths[2].detector_name, 'email_three')
+ self.assertEqual(
+ sorted([f.detector_name for f in filth[0].filths]),
+ sorted(['email', 'email_two', 'email_three'])
+ )
def test_add_non_detector(self):
"""make sure you can't add a detector that is not a Detector"""
|
change test as py<I> and py<I> return different orders
|
py
|
diff --git a/ariadne/validation/query_cost.py b/ariadne/validation/query_cost.py
index <HASH>..<HASH> 100644
--- a/ariadne/validation/query_cost.py
+++ b/ariadne/validation/query_cost.py
@@ -314,11 +314,11 @@ def validate_cost_map(cost_map: Dict[str, Dict[str, Any]], schema: GraphQLSchema
)
-def report_error(context, error: Exception):
+def report_error(context: ValidationContext, error: Exception):
context.report_error(GraphQLError(str(error), original_error=error))
-def cost_analysis_message(maximum_cost, cost):
+def cost_analysis_message(maximum_cost: int, cost: int) -> str:
return "The query exceeds the maximum cost of %d. Actual cost is %d" % (
maximum_cost,
cost,
@@ -326,7 +326,12 @@ def cost_analysis_message(maximum_cost, cost):
def cost_validator(
- maximum_cost, *, default_cost=0, default_complexity=1, variables=None, cost_map=None
+ maximum_cost: int,
+ *,
+ default_cost: int = 0,
+ default_complexity: int = 1,
+ variables: Optional[Dict] = None,
+ cost_map: Optional[Dict[str, Dict[str, Any]]] = None,
) -> ASTValidationRule:
validator = partial(
CostValidator,
|
Add more type annotations to query cost validator
|
py
|
diff --git a/hwrt/datasets/__init__.py b/hwrt/datasets/__init__.py
index <HASH>..<HASH> 100644
--- a/hwrt/datasets/__init__.py
+++ b/hwrt/datasets/__init__.py
@@ -18,7 +18,8 @@ username2id = {}
def formula_to_dbid(formula_str, backslash_fix=False):
- """Convert a LaTeX formula to the database index.
+ """
+ Convert a LaTeX formula to the database index.
Parameters
----------
@@ -68,9 +69,8 @@ def formula_to_dbid(formula_str, backslash_fix=False):
cursor = connection.cursor()
sql = ("INSERT INTO `wm_formula` (`user_id`, `formula_name`, "
"`formula_in_latex`, "
- "`mode`, `package`, "
- "`is_important`) VALUES ("
- "'10', %s, %s, 'bothmodes', NULL, '0');")
+ "`mode`, `package`) VALUES ("
+ "'10', %s, %s, 'bothmodes', NULL);")
if len(formula_str) < 20:
logging.info("Insert formula %s.", formula_str)
cursor.execute(sql, (formula_str, formula_str))
|
Remove 'is_important' from 'wm_raw_draw_data'
|
py
|
diff --git a/cldoc/generators/xml.py b/cldoc/generators/xml.py
index <HASH>..<HASH> 100644
--- a/cldoc/generators/xml.py
+++ b/cldoc/generators/xml.py
@@ -177,6 +177,8 @@ class Xml(Generator):
elem.set('typedef', 'yes')
def struct_to_xml(self, node, elem):
+ self.class_to_xml(node, elem)
+
if not node.typedef is None:
elem.set('typedef', 'yes')
|
Call class_to_xml from struct_to_xml C++ structs are like classes
|
py
|
diff --git a/scheduler/coreos.py b/scheduler/coreos.py
index <HASH>..<HASH> 100644
--- a/scheduler/coreos.py
+++ b/scheduler/coreos.py
@@ -112,7 +112,7 @@ class FleetClient(object):
status = None
for _ in range(60):
status = subprocess.check_output(
- "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $4}}'".format(**locals()),
+ "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $5}}'".format(**locals()),
shell=True, env=env).strip('\n')
if status == 'running':
break
|
fix(controller): watch fifth column for state fleetctl <I> introduced a fifth column, so we need to update the location of the state column.
|
py
|
diff --git a/src/djangorecipe/wsgi.py b/src/djangorecipe/wsgi.py
index <HASH>..<HASH> 100644
--- a/src/djangorecipe/wsgi.py
+++ b/src/djangorecipe/wsgi.py
@@ -29,7 +29,7 @@ def main(settings_file, logfile=None):
self.log(data)
def log(self, msg):
- line = '%s - %s\n' %% (
+ line = '%s - %s\n' % (
datetime.datetime.now().strftime('%Y%m%d %H:%M:%S'), msg)
fp = open(self.logfile, 'a')
try:
|
Fixed typo in wsgi.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -304,7 +304,6 @@ install_requires = [
'pylint',
'psutil',
'qtawesome',
- 'path.py',
'pickleshare'
]
|
Setuo.py: Remove path.py from our list of deps
|
py
|
diff --git a/zipline/transforms/utils.py b/zipline/transforms/utils.py
index <HASH>..<HASH> 100644
--- a/zipline/transforms/utils.py
+++ b/zipline/transforms/utils.py
@@ -24,6 +24,7 @@ from copy import deepcopy
from datetime import datetime
from collections import deque
from abc import ABCMeta, abstractmethod
+from numbers import Integral
import pandas as pd
@@ -394,7 +395,7 @@ class BatchTransform(EventWindow):
self.compute_only_full = compute_only_full
self.sids = sids
- if isinstance(self.sids, (str, int)):
+ if isinstance(self.sids, (basestring, Integral)):
self.sids = [self.sids]
self.field_names = fields
|
MIN: Changed isinstance check to allow more types.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ setup(
packages=find_packages('src', exclude=['ez_setup']),
package_dir={'': 'src'},
namespace_packages=['calmjs'],
- zip_safe=True,
+ zip_safe=False,
install_requires=[
'setuptools>=12',
],
|
Remove zip_safe - Huge liability. - Makes it impossible for unittest to pick up tests automatically.
|
py
|
diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py
index <HASH>..<HASH> 100644
--- a/src/aks-preview/setup.py
+++ b/src/aks-preview/setup.py
@@ -8,7 +8,7 @@
from codecs import open as open1
from setuptools import setup, find_packages
-VERSION = "0.5.24"
+VERSION = "0.5.25"
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
|
update aks-preview version (#<I>)
|
py
|
diff --git a/cassandra/cluster.py b/cassandra/cluster.py
index <HASH>..<HASH> 100644
--- a/cassandra/cluster.py
+++ b/cassandra/cluster.py
@@ -821,6 +821,7 @@ class Cluster(object):
return
log.debug("Preparing all known prepared statements against host %s", host)
+ connection = None
try:
connection = self.connection_factory(host.address)
try:
@@ -857,7 +858,8 @@ class Cluster(object):
except Exception:
log.exception("Error trying to prepare all statements on host %s", host)
finally:
- connection.close()
+ if connection:
+ connection.close()
def prepare_on_all_sessions(self, query_id, prepared_statement, excluded_host):
with self._prepared_statement_lock:
|
Don't reference uninitialized conn in finally block If we failed to get a connection, the finally block would try to reference an uninitialized variable
|
py
|
diff --git a/commands.py b/commands.py
index <HASH>..<HASH> 100644
--- a/commands.py
+++ b/commands.py
@@ -431,7 +431,7 @@ def format_who_when(fields):
def format_property(name, value):
"""Format the name and value (both unicode) of a property as a string."""
utf8_name = name.encode('utf8')
- if value:
+ if value is not None:
utf8_value = value.encode('utf8')
result = "property %s %d %s" % (utf8_name, len(utf8_value), utf8_value)
else:
|
Ensure empty string property values stay as empty strings
|
py
|
diff --git a/floyd/cli/experiment.py b/floyd/cli/experiment.py
index <HASH>..<HASH> 100644
--- a/floyd/cli/experiment.py
+++ b/floyd/cli/experiment.py
@@ -6,7 +6,7 @@ import sys
import floyd
from floyd.cli.utils import (
- get_module_task_instance_id, normalize_job_name, normalize_data_name
+ get_module_task_instance_id, normalize_job_name
)
from floyd.client.experiment import ExperimentClient
from floyd.client.module import ModuleClient
@@ -125,8 +125,9 @@ def info(job_name_or_id):
task_instance_id = get_module_task_instance_id(experiment.task_instances)
task_instance = TaskInstanceClient().get(task_instance_id) if task_instance_id else None
- table = [["Job name", normalize_job_name(experiment.name)],
- ["Output name", normalize_data_name(experiment.name + '/output') if task_instance else None],
+ normalized_job_name = normalize_job_name(experiment.name)
+ table = [["Job name", normalized_job_name],
+ ["Output name", normalized_job_name + '/output' if task_instance else None],
["Created", experiment.created_pretty],
["Status", experiment.state], ["Duration(s)", experiment.duration_rounded],
["Instance", experiment.instance_type_trimmed],
|
fix: do not read data config for info command
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -42,6 +42,7 @@ setup(name='motor',
author_email='jesse@10gen.com',
url='https://github.com/mongodb/motor/',
install_requires=[
+ 'pymongo >= 2.4.2',
'tornado >= 2.4.0',
'greenlet >= 0.4.0',
],
|
PyMongo <I> is out; it's the first release Motor can use
|
py
|
diff --git a/tests/parse_ifconfig.py b/tests/parse_ifconfig.py
index <HASH>..<HASH> 100644
--- a/tests/parse_ifconfig.py
+++ b/tests/parse_ifconfig.py
@@ -73,7 +73,8 @@ class IfConfig:
else:
env = os.environ.copy()
env.update(LANG='C.utf8')
- p = Popen('ifconfig', stdout=PIPE, stderr=PIPE, env=env)
+ p = Popen('ifconfig', stdout=PIPE, stderr=PIPE,
+ env=env, universal_newlines=True)
self.stdout, self.stderr = p.communicate()
if self.stderr != '':
raise ValueError('stderr from ifconfig was nonempty:\n%s' % self.stderr)
|
Use universal_newlines with Popen, get strs, not bytes
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,13 @@
"""
SQLAlchemy-JSONAPI
--------------
+------------------
JSON API Mixin for SQLAlchemy that aims to meet the full JSON API spec as
published at http://jsonapi.org/format.
+
+Full documentation is available at:
+
+https://github.com/coltonprovias/sqlalchemy-jsonapi
"""
from setuptools import setup
|
Adding link to documentation in setup.py
|
py
|
diff --git a/pyfakefs/fake_filesystem.py b/pyfakefs/fake_filesystem.py
index <HASH>..<HASH> 100644
--- a/pyfakefs/fake_filesystem.py
+++ b/pyfakefs/fake_filesystem.py
@@ -98,7 +98,7 @@ except ImportError:
__pychecker__ = 'no-reimportself'
-__version__ = '2.6'
+__version__ = '2.7'
PERM_READ = 0o400 # Read permission bit.
PERM_WRITE = 0o200 # Write permission bit.
|
Increment version to <I>
|
py
|
diff --git a/apps/align.py b/apps/align.py
index <HASH>..<HASH> 100644
--- a/apps/align.py
+++ b/apps/align.py
@@ -145,7 +145,7 @@ def nucmer(args):
mm = MakeManager()
for i, (r, q) in enumerate(product(reflist, querylist)):
- pf = "{0:03d}".format(i)
+ pf = "{0:04d}".format(i)
cmd = "nucmer -maxmatch"
cmd += " {0}".format(opts.extra)
cmd += " {0} {1} -p {2}".format(r, q, pf)
|
[apps] pad with 4 digits in apps.align.nucmer()
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.