diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/anthemav/protocol.py b/anthemav/protocol.py
index <HASH>..<HASH> 100755
--- a/anthemav/protocol.py
+++ b/anthemav/protocol.py
@@ -1,6 +1,5 @@
import asyncio
import logging
-from functools import partial
# In Python 3.4.4, `async` was renamed to `ensure_future`.
try:
@@ -10,8 +9,6 @@ except AttributeError:
# These properties apply even when the AVR is powered off
ATTR_CORE = {'Z1POW', 'IDM'}
-ATTR_MORE = {'Z1POW', 'IDM', 'Z1VOL', 'IDS', 'IDR', 'IDB', 'IDH', 'IDN',
- 'Z1VIR', 'Z1MUT', 'ICN', 'Z1INP', 'FPB'}
LOOKUP = {}
@@ -91,6 +88,11 @@ class AnthemProtocol(asyncio.Protocol):
def connection_made(self, transport):
self.log.info('Connection established to AVR')
self.transport = transport
+
+ self.transport.set_write_buffer_limits(64)
+ limit_low,limit_high = self.transport.get_write_buffer_limits()
+ self.log.debug("Write buffer limits %d to %d" % (limit_low, limit_high))
+
self.refresh_core()
def data_received(self, data):
|
Drop the write buffer limit to <I> bytes to avoid overrunning device
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,7 @@
+import os
from distutils.core import setup
+from distutils.command.sdist import sdist
+
import skyfield # safe, because __init__.py contains no import statements
extras = {
@@ -8,7 +11,15 @@ extras = {
],
}
+class my_sdist(sdist):
+ def make_distribution(self):
+ # See https://github.com/skyfielders/python-skyfield/issues/378
+ for path in self.filelist.files:
+ os.chmod(path, 0o644)
+ sdist.make_distribution(self)
+
setup(
+ cmdclass={'sdist': my_sdist},
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
|
Fix #<I> by forcing <I> file permissions in sdist
|
py
|
diff --git a/forgetful-bench.py b/forgetful-bench.py
index <HASH>..<HASH> 100755
--- a/forgetful-bench.py
+++ b/forgetful-bench.py
@@ -86,7 +86,7 @@ cpuBefore = client.redis.info()['used_cpu_user'] + client.redis.info()['used_cpu
putTime = -time.time()
# Alright, let's make a bunch of jobs
testing = client.queue('testing')
-jids = [testing.put({'test': 'benchmark', 'count': c, 'stages':args.stages}, retries=args.retries) for c in range(args.numJobs)]
+jids = [testing.put(qless.Job, {'test': 'benchmark', 'count': c, 'stages':args.stages}, retries=args.retries) for c in range(args.numJobs)]
putTime += time.time()
# This is how long it took to run the workers
|
Updated forgetfulbench to include a klass.
|
py
|
diff --git a/scripts/importer/mtasks/nedd.py b/scripts/importer/mtasks/nedd.py
index <HASH>..<HASH> 100644
--- a/scripts/importer/mtasks/nedd.py
+++ b/scripts/importer/mtasks/nedd.py
@@ -10,7 +10,6 @@ from astropy.cosmology import z_at_value
from cdecimal import Decimal
from scripts import PATH
-from .. import Events
from ...utils import is_number, pbar
from ..funcs import (get_sig_digits, host_clean, name_clean, pretty_num,
uniq_cdl)
@@ -50,10 +49,8 @@ def do_nedd(catalog):
if dist:
nedd_dict.setdefault(cleanhost, []).append(Decimal(dist))
if snname and 'HOST' not in snname:
- snname, secondarysource = Events.new_event(snname,
- srcname=reference,
- url=refurl,
- secondary=True)
+ snname, secondarysource = catalog.new_event(
+ snname, srcname=reference, url=refurl, secondary=True)
if bibcode:
source = catalog.events[snname].add_source(bibcode=bibcode)
sources = uniq_cdl([source, secondarysource])
|
BUG: changed old new_event call
|
py
|
diff --git a/Lib/fontmake/font_project.py b/Lib/fontmake/font_project.py
index <HASH>..<HASH> 100644
--- a/Lib/fontmake/font_project.py
+++ b/Lib/fontmake/font_project.py
@@ -103,6 +103,8 @@ class FontProject(object):
# no need to also set the relative 'filename' attribute as that
# will be auto-updated on writing the designspace document
source.path = ufo_path
+ if not os.path.isdir(master_dir):
+ os.makedirs(master_dir)
source.font.save(ufo_path)
if designspace_path is None:
|
make master_dir if it's not already present unlike defcon, ufoLib2 doesn't automatically create one for us (rightly so, I think)
|
py
|
diff --git a/squad/ci/backend/lava.py b/squad/ci/backend/lava.py
index <HASH>..<HASH> 100644
--- a/squad/ci/backend/lava.py
+++ b/squad/ci/backend/lava.py
@@ -77,6 +77,7 @@ class Backend(BaseBackend):
message = self.socket.recv_multipart()
self.log_debug("message received: %r" % message)
(topic, uuid, dt, username, data) = (u(m) for m in message[:])
+ data = json.loads(data)
lava_id = data['job']
if 'sub_id' in data.keys():
lava_id = data['sub_id']
|
ci/lava: fix access to ZMQ message data the `data` field needs to be parsed as JSON before being used.
|
py
|
diff --git a/scripts/merge-pr.py b/scripts/merge-pr.py
index <HASH>..<HASH> 100755
--- a/scripts/merge-pr.py
+++ b/scripts/merge-pr.py
@@ -297,8 +297,8 @@ if not bool(pr["mergeable"]):
continue_maybe(msg)
print("\n=== Pull Request #%s ===" % pr_num)
-print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s"
- % (title, pr_repo_desc, target_ref, url))
+print("title\t{title}\nsource\t{source}\ntarget\t{target}\nurl\t{url}".format(
+ title=title, source=pr_repo_desc, target=target_ref, url=url))
|
BLD: edit on merge-pr script
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,8 @@ from setuptools import setup, find_packages
setup(name='bandersnatch',
version='1.0dev',
description='Mirroring tool that implements the client (mirror) side of PEP 381',
- long_description=open('README').read(),
+ long_description='\n\n'.join(
+ [open('README').read(), open('CHANGES.txt').read()]),
author='Christian Theune',
author_email='ct@gocept.com',
license = 'Academic Free License, version 3',
|
Include changelog in pypi page
|
py
|
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -1030,7 +1030,7 @@ class LocalClient(object):
syndic_wait = 0
while True:
raw = self.event.get_event(timeout, jid)
- if raw is not None and 'id' in raw:
+ if raw is not None:
if 'minions' in raw.get('data', {}):
minions.update(raw['data']['minions'])
continue
|
Process events even without 'id' in them This was breaking syndic matching. The syndic would return its list of expected minions, but that event does not have 'id' in it. Thus, the master would not process that event and it would either result in huge delays for minion returns or the returns for minions under syndics wouldn't appear at all on the CLI.
|
py
|
diff --git a/pysnow.py b/pysnow.py
index <HASH>..<HASH> 100644
--- a/pysnow.py
+++ b/pysnow.py
@@ -194,6 +194,10 @@ class Request(object):
raise InvalidUsage('Attempted to update a non-existing record')
except MultipleResults:
raise NotImplementedError("Update of multiple records is not supported")
+
+ if not isinstance(payload, dict):
+ raise InvalidUsage("Update payload must be of type dict")
+
response = self.session.put(self._get_url(self.table, sys_id), data=json.dumps(payload))
return self._get_content(response) # @TODO - update to return first key (API breakage)
|
Make sure payload is of type dict() in updates
|
py
|
diff --git a/backtrader/lineseries.py b/backtrader/lineseries.py
index <HASH>..<HASH> 100644
--- a/backtrader/lineseries.py
+++ b/backtrader/lineseries.py
@@ -64,6 +64,13 @@ class Lines(object):
return newcls
+ @classmethod
+ def _getlinealias(cls, i):
+ linealias = cls._getlines()[i]
+ if not isinstance(linealias, basestring):
+ linealias = linealias[0]
+ return linealias
+
def __init__(self):
self.lines = list()
for line, linealias in enumerate(self._getlines()):
|
LineSeries_getlinealias method - Translate backwards from line number to defined alias nams
|
py
|
diff --git a/salt/output/virt_query.py b/salt/output/virt_query.py
index <HASH>..<HASH> 100644
--- a/salt/output/virt_query.py
+++ b/salt/output/virt_query.py
@@ -11,4 +11,6 @@ def output(data):
out += ' CPUS: {0}\n'.format(vm_data['cpu'])
if 'mem' in vm_data:
out += ' MEMORY: {0}\n'.format(vm_data['mem'])
+ if 'state' in vm_data:
+ out += ' STATE: {0}\n'.format(vm_data['state'])
return out
|
Add state check to virt_query
|
py
|
diff --git a/Lib/pyhsm/version.py b/Lib/pyhsm/version.py
index <HASH>..<HASH> 100644
--- a/Lib/pyhsm/version.py
+++ b/Lib/pyhsm/version.py
@@ -58,4 +58,4 @@ class YHSM_Version():
"""
This is a key handle permission flag that was introduced in 0.9.9.
"""
- return self.ver > (0, 9, 9,)
+ return self.ver >= (0, 9, 9,)
|
Correct have_YSM_BUFFER_LOAD.
|
py
|
diff --git a/MAVProxy/mavproxy.py b/MAVProxy/mavproxy.py
index <HASH>..<HASH> 100755
--- a/MAVProxy/mavproxy.py
+++ b/MAVProxy/mavproxy.py
@@ -123,6 +123,7 @@ class MPState(object):
('shownoise', int, 1),
('basealt', int, 0),
('wpalt', int, 100),
+ ('flushlogs', int, 0),
('requireexit', int, 0)]
)
@@ -856,8 +857,9 @@ def log_writer():
mpstate.logfile_raw.write(mpstate.logqueue_raw.get())
while not mpstate.logqueue.empty():
mpstate.logfile.write(mpstate.logqueue.get())
- mpstate.logfile.flush()
- mpstate.logfile_raw.flush()
+ if mpstate.settings.flushlogs:
+ mpstate.logfile.flush()
+ mpstate.logfile_raw.flush()
def open_logs():
'''open log files'''
|
don't flush logs unless flushlogs setting is set
|
py
|
diff --git a/tests/utils/helpers.py b/tests/utils/helpers.py
index <HASH>..<HASH> 100644
--- a/tests/utils/helpers.py
+++ b/tests/utils/helpers.py
@@ -960,6 +960,7 @@ def get_hazard_job(cfg, username=None):
def random_location_generator(min_x=-180, max_x=180, min_y=-90, max_y=90):
+ rnd = random.Random()
return shapely.geometry.Point(
- (min_x + random.random() * (max_x - min_x),
- min_y + random.random() * (max_y - min_y)))
+ rnd.randint(min_x, max_x),
+ rnd.randint(min_y, max_y))
|
tests/utils/helpers: Random locations sometimes cause test failures, because WKB is not a good way to compare location. At a certain level of precision, two different numbers will have the same WKB. This random data causes _random_ test failures. This should stabilize it. Former-commit-id: c<I>b<I>fe2b<I>af5b6c<I>bc<I>c5e7e8d<I>a5
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -53,7 +53,6 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
|
Remove python <I> from supported versions Refs #<I>
|
py
|
diff --git a/spyder/plugins/editor/utils/editor.py b/spyder/plugins/editor/utils/editor.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/editor/utils/editor.py
+++ b/spyder/plugins/editor/utils/editor.py
@@ -456,6 +456,7 @@ class TextHelper(object):
text_cursor.endEditBlock()
editor._cleaning = False
+ editor.document_did_change()
def select_whole_line(self, line=None, apply_selection=True):
"""
@@ -645,6 +646,7 @@ class TextHelper(object):
text_cursor.setPosition(s)
text_cursor.setPosition(e, text_cursor.KeepAnchor)
self._editor.setTextCursor(text_cursor)
+ self._editor.document_did_change()
def clear_selection(self):
"""Clears text cursor selection."""
|
Editor: Add document_did_change to TextHelper utility class
|
py
|
diff --git a/refcycle/test/test_refcycle.py b/refcycle/test/test_refcycle.py
index <HASH>..<HASH> 100644
--- a/refcycle/test/test_refcycle.py
+++ b/refcycle/test/test_refcycle.py
@@ -40,9 +40,14 @@ class TestRefcycle(unittest.TestCase):
self.assertEqual(len(object_graph), 4)
self.assertEqual(len(object_graph.references()), 4)
+ # Case where no cycles created.
+ object_graph = cycles_created_by(lambda: None)
+ self.assertEqual(len(object_graph), 0)
+
# Check that we didn't unnecessarily add anything to gc.garbage.
self.assertEqual(len(gc.garbage), original_garbage)
+
def test_snapshot(self):
with disable_gc():
original_objects = snapshot()
|
Add test covering empty case of cycles_created_by.
|
py
|
diff --git a/empyrical/stats.py b/empyrical/stats.py
index <HASH>..<HASH> 100644
--- a/empyrical/stats.py
+++ b/empyrical/stats.py
@@ -654,7 +654,7 @@ def sharpe_ratio(returns,
Daily returns of the strategy, noncumulative.
- See full explanation in :func:`~empyrical.stats.cum_returns`.
risk_free : int, float
- Constant risk-free return throughout the period.
+ Constant daily risk-free return throughout the period.
period : str, optional
Defines the periodicity of the 'returns' data for purposes of
annualizing. Value ignored if `annualization` parameter is specified.
|
DOC Note daily frequency of risk-free rate (#<I>)
|
py
|
diff --git a/tofu/tests/tests01_geom/tests03_core.py b/tofu/tests/tests01_geom/tests03_core.py
index <HASH>..<HASH> 100644
--- a/tofu/tests/tests01_geom/tests03_core.py
+++ b/tofu/tests/tests01_geom/tests03_core.py
@@ -937,6 +937,7 @@ class Test03_Rays(object):
# Just to check the loaded version works fine
obj2.strip(0, verb=verb)
os.remove(pfe)
+
def test15_get_sample_same_res_unit(self):
dmeths = ['rel', 'abs']
qmeths = ['simps', 'romb', 'sum']
|
[Issue<I>] One last print of Config left in unit tests, can't find it
|
py
|
diff --git a/dvc/version.py b/dvc/version.py
index <HASH>..<HASH> 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -7,7 +7,7 @@ import os
import subprocess
-_BASE_VERSION = "0.74.0"
+_BASE_VERSION = "0.75.0"
def _generate_version(base_version):
|
dvc: bump to <I>
|
py
|
diff --git a/simuvex/s_state.py b/simuvex/s_state.py
index <HASH>..<HASH> 100644
--- a/simuvex/s_state.py
+++ b/simuvex/s_state.py
@@ -444,6 +444,14 @@ class SimState(object): # pylint: disable=R0904
return state
#
+ # Other helper methods
+ #
+
+ def set_mode(self, mode):
+ self.mode = mode
+ self.options = set(o.default_options[mode])
+
+ #
# Concretization
#
|
added set_mode to s_state
|
py
|
diff --git a/Qt.py b/Qt.py
index <HASH>..<HASH> 100644
--- a/Qt.py
+++ b/Qt.py
@@ -1399,7 +1399,8 @@ def _install():
setattr(our_submodule, member, their_member)
# Backwards compatibility
- Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
+ if hasattr(Qt.QtCompat, 'loadUi'):
+ Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
_install()
|
Prevent error reported in #<I>
|
py
|
diff --git a/sh.py b/sh.py
index <HASH>..<HASH> 100644
--- a/sh.py
+++ b/sh.py
@@ -3504,7 +3504,11 @@ class SelfWrapper(ModuleType):
# but it seems to be the only way to make reload() behave
# nicely. if i make these attributes dynamic lookups in
# __getattr__, reload sometimes chokes in weird ways...
- for attr in ["__builtins__", "__doc__", "__file__", "__name__", "__package__"]:
+ super(SelfWrapper, self).__init__(
+ name=getattr(self_module, '__name__', None),
+ doc=getattr(self_module, '__doc__', None)
+ )
+ for attr in ["__builtins__", "__file__", "__package__"]:
setattr(self, attr, getattr(self_module, attr, None))
# python 3.2 (2.7 and 3.3 work fine) breaks on osx (not ubuntu)
|
Make sure to call super() in SelfWrapper.__init__
|
py
|
diff --git a/tcex/testing/validate_data.py b/tcex/testing/validate_data.py
index <HASH>..<HASH> 100644
--- a/tcex/testing/validate_data.py
+++ b/tcex/testing/validate_data.py
@@ -133,6 +133,7 @@ class Validator(object):
test_data = json.loads(json.dumps(test_data))
except ValueError:
pass
+
try:
if isinstance(app_data, list) and isinstance(test_data, list):
for index, data in enumerate(app_data):
@@ -152,9 +153,10 @@ class Validator(object):
test_data = self.remove_excludes(test_data, paths)
except AttributeError:
pass
+
# run operator
try:
- ddiff = DeepDiff(app_data, test_data, ignore_order=True, **kwargs)
+ ddiff = DeepDiff(app_data, test_data, **kwargs)
except KeyError:
return False, 'Encountered KeyError when running deepdiff'
except NameError:
|
removed ignore_order param for deepdiff validation
|
py
|
diff --git a/examples/get_images.py b/examples/get_images.py
index <HASH>..<HASH> 100644
--- a/examples/get_images.py
+++ b/examples/get_images.py
@@ -55,6 +55,12 @@ if __name__ == '__main__':
image_instances = ImageInstanceCollection().fetch_with_filter("project", params.id_project)
print(image_instances)
+ if params.download_path:
+ f= open(params.download_path+"images-"+params.id_project+".csv","w+")
+ f.write("ID;Width;Height;Resolution;Magnification;Filename \n")
+ for image in image_instances:
+ f.write("{};{};{};{};{};{}\n".format(image.id,image.width,image.height,image.resolution,image.magnification,image.filename))
+
for image in image_instances:
print("Image ID: {} | Width: {} | Height: {} | Resolution: {} | Magnification: {} | Filename: {}".format(
image.id, image.width, image.height, image.resolution, image.magnification, image.filename
|
Create an images-PROJECTID.csv file with images informations
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup
setup(
name = 'pypdb',
packages = ['pypdb'], # same as 'name'
- version = '1.200',
+ version = '1.300',
install_requires=[
'xmltodict',
'beautifulsoup4'
|
increment version for pypi
|
py
|
diff --git a/tests/_Run_Tests.py b/tests/_Run_Tests.py
index <HASH>..<HASH> 100644
--- a/tests/_Run_Tests.py
+++ b/tests/_Run_Tests.py
@@ -22,7 +22,7 @@ def run_tests():
okay = []
for i in os.listdir("."):
if i.find("_test_") > -1 and i.endswith(".py"):
- if 0 != subprocess.call("python " + i, shell=True):
+ if 0 != subprocess.call("python " + i):
fail.append(i)
else:
okay.append(i)
|
Trying to fix broken CI tests.
|
py
|
diff --git a/nodeconductor/structure/serializers.py b/nodeconductor/structure/serializers.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/structure/serializers.py
+++ b/nodeconductor/structure/serializers.py
@@ -535,12 +535,14 @@ class UserSerializer(serializers.HyperlinkedModelSerializer):
'civil_number',
'description',
'is_staff', 'is_active',
+ 'date_joined',
)
read_only_fields = (
'uuid',
'civil_number',
'organization',
'organization_approved',
+ 'date_joined',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
|
Expose date_joined attribute in User serializer - NC-<I>
|
py
|
diff --git a/scarlet/versioning/view_mixins.py b/scarlet/versioning/view_mixins.py
index <HASH>..<HASH> 100644
--- a/scarlet/versioning/view_mixins.py
+++ b/scarlet/versioning/view_mixins.py
@@ -19,6 +19,7 @@ class PreviewableObject(SingleObjectMixin):
"""
schema = manager.get_schema()
+ vid = None
if self.request.GET.get('vid') and self.request.user.is_staff and \
self.request.user.is_active:
try:
@@ -38,9 +39,9 @@ class PreviewableObject(SingleObjectMixin):
slug = self.kwargs.get(self.slug_url_kwarg, None)
if pk is not None:
if vid:
- queryset = queryset.filter(object_id=pk)
+ queryset = queryset.filter(vid=vid)
else:
- queryset = queryset
+ queryset = queryset.filter(object_id=pk)
# Next, try looking up by slug.
elif slug is not None:
|
Fix version id based preview
|
py
|
diff --git a/zinnia/moderator.py b/zinnia/moderator.py
index <HASH>..<HASH> 100644
--- a/zinnia/moderator.py
+++ b/zinnia/moderator.py
@@ -8,12 +8,8 @@ from django.contrib.sites.models import Site
from django.utils.translation import activate
from django.utils.translation import get_language
from django.utils.translation import ugettext_lazy as _
-from django.contrib.comments import signals
-from django.contrib.comments.models import CommentFlag
from django.contrib.comments.moderation import CommentModerator
-from zinnia.flags import SPAM
-from zinnia.flags import get_user_flagger
from zinnia.settings import PROTOCOL
from zinnia.settings import MAIL_COMMENT_REPLY
from zinnia.settings import MAIL_COMMENT_AUTHORS
@@ -125,12 +121,6 @@ class EntryCommentModerator(CommentModerator):
if check_is_spam(comment, content_object, request,
self.spam_checker_backends):
- comment.save()
- flag, created = CommentFlag.objects.get_or_create(
- comment=comment, user=get_user_flagger(), flag=SPAM)
- signals.comment_was_flagged.send(
- sender=comment.__class__, comment=comment,
- flag=flag, created=created, request=request)
return True
return False
|
moderate method does not apply a SPAM flag anymore and become more respectful of the API by removing the save
|
py
|
diff --git a/discord/ext/commands/bot.py b/discord/ext/commands/bot.py
index <HASH>..<HASH> 100644
--- a/discord/ext/commands/bot.py
+++ b/discord/ext/commands/bot.py
@@ -41,7 +41,7 @@ from .formatter import HelpFormatter
def when_mentioned(bot, msg):
"""A callable that implements a command prefix equivalent
to being mentioned, e.g. ``@bot ``."""
- return [bot.user.mention, '<@!%s>' % bot.user.id]
+ return [bot.user.mention + ' ', '<@!%s> ' % bot.user.id]
def when_mentioned_or(*prefixes):
"""A callable that implements when mentioned or other prefixes provided.
|
[commands] Fix lack of space in when_mentioned
|
py
|
diff --git a/splunklib/client.py b/splunklib/client.py
index <HASH>..<HASH> 100644
--- a/splunklib/client.py
+++ b/splunklib/client.py
@@ -933,7 +933,10 @@ class Entity(Endpoint):
def _load_atom_entry(self, response):
elem = _load_atom(response, XNAME_ENTRY)
if isinstance(elem, list):
- raise AmbiguousReferenceException("Fetch from server returned multiple entries for name %s." % self.name)
+ apps = [ele.entry.content.get('eai:appName') for ele in elem]
+
+ raise AmbiguousReferenceException(
+ "Fetch from server returned multiple entries for name '%s' in apps %s." % (elem[0].entry.title, apps))
else:
return elem.entry
|
Identical entity names will cause an infinite loop "RuntimeError: maximum recursion depth exceeded". Give a clear message about which applications have this entity.
|
py
|
diff --git a/tests/test_update_query.py b/tests/test_update_query.py
index <HASH>..<HASH> 100644
--- a/tests/test_update_query.py
+++ b/tests/test_update_query.py
@@ -120,6 +120,10 @@ def test_with_query_list_int():
@pytest.mark.parametrize(
"query,expected",
[
+ pytest.param({"a": []}, "?", id="empty list"),
+ pytest.param({"a": ()}, "?", id="empty tuple"),
+ pytest.param({"a": [1]}, "?a=1", id="single list"),
+ pytest.param({"a": (1,)}, "?a=1", id="single tuple"),
pytest.param({"a": [1, 2]}, "?a=1&a=2", id="list"),
pytest.param({"a": (1, 2)}, "?a=1&a=2", id="tuple"),
pytest.param({"a[]": [1, 2]}, "?a[]=1&a[]=2", id="key with braces"),
|
Add tests for edge cases of quoting lists and tuples
|
py
|
diff --git a/indra/tests/test_pysb_assembler.py b/indra/tests/test_pysb_assembler.py
index <HASH>..<HASH> 100644
--- a/indra/tests/test_pysb_assembler.py
+++ b/indra/tests/test_pysb_assembler.py
@@ -813,10 +813,11 @@ def _check_mod_assembly(mod_class):
assert(len(model.monomers)==2)
def test_modification_assembly():
- for mod_class in Modification.__subclasses__():
+ classes = AddModification.__subclasses__() + \
+ RemoveModification.__subclasses__()
+ for mod_class in classes:
_check_mod_assembly(mod_class)
-
def test_rule_annotation():
a = Agent('A', db_refs={'HGNC': '1234'})
b = Agent('B', db_refs={'HGNC': '5678'})
@@ -834,7 +835,9 @@ def test_rule_annotation():
assert len(obj) == 1
assert obj[0] == 'B'
- for mod_class in Modification.__subclasses__():
+ classes = AddModification.__subclasses__() + \
+ RemoveModification.__subclasses__()
+ for mod_class in classes:
stmt = mod_class(a, b)
check_rule_annotation(stmt, 'one_step')
check_rule_annotation(stmt, 'two_step')
|
Fix PysbAssembler tests for Modification subclasses
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -52,7 +52,7 @@ with VenvLinkDeleted():
name='django-endless-pagination',
version=project.get_version(),
description=project.__doc__,
- long_description=read('README'),
+ long_description=read('README.rst'),
author='Francesco Banconi',
author_email='francesco.banconi@gmail.com',
url='http://code.google.com/p/django-endless-pagination/',
|
Update setup.py Cannot install package with setup.py since README does not exist (README.rst exists instead)
|
py
|
diff --git a/discord/client.py b/discord/client.py
index <HASH>..<HASH> 100644
--- a/discord/client.py
+++ b/discord/client.py
@@ -464,6 +464,24 @@ class Client:
passing status code.
"""
+ if email == "token":
+ log.info('logging in using static token')
+ self.token = password
+ self.headers['authorization'] = 'Bot {}'.format(self.token)
+ resp = yield from self.session.get(endpoints.ME, headers=self.headers)
+ log.debug(request_logging_format.format(method='GET', response=resp))
+
+ if resp.status != 200:
+ yield from resp.release()
+ if resp.status == 400:
+ raise LoginFailure('Improper token has been passed.')
+ else:
+ raise HTTPException(resp, None)
+
+ log.info('token auth returned status code {}'.format(resp.status))
+ self._is_logged_in.set()
+ return
+
# attempt to read the token from cache
if self.cache_auth:
yield from self._login_via_cache(email, password)
|
Add support for token login (for bots)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ requirements = (
setup(
name='combine',
- version='0.0.18.dev0',
+ version='0.0.18',
description='A helpful, simple static site generator.',
long_description=long_description,
long_description_content_type='text/markdown',
|
Preparing release <I>
|
py
|
diff --git a/tests/simulation/TestSimulation.py b/tests/simulation/TestSimulation.py
index <HASH>..<HASH> 100644
--- a/tests/simulation/TestSimulation.py
+++ b/tests/simulation/TestSimulation.py
@@ -44,7 +44,7 @@ class TestSimulation(BaseTestSimulation):
}
for cycle in tests:
synergy_object_manager = self._getSynergyObjectManagerForCycle(cycles=cycle, main_process=main_process)
- self.assertEqual(tests[cycle], self._getObjectsResume(synergy_object_manager))
+ self.assertEqual(sorted(tests[cycle]), sorted(self._getObjectsResume(synergy_object_manager)))
def _getObjectsResume(self, synergy_object_manager):
resume = []
|
Fix core simulation test: sort results to prevent random order du to process
|
py
|
diff --git a/core_examples/public_timeline.py b/core_examples/public_timeline.py
index <HASH>..<HASH> 100644
--- a/core_examples/public_timeline.py
+++ b/core_examples/public_timeline.py
@@ -5,4 +5,4 @@ twitter = Twython()
public_timeline = twitter.getPublicTimeline()
for tweet in public_timeline:
- print tweet["text"]
+ print tweet["text"]
|
PEP8 Edit: Removed Tab -> added 4 spaces
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,6 +10,8 @@ setup(
packages=find_packages(exclude=["*test*"]),
install_requires=open("requirements.txt").readlines(),
description="Simple script for sending emails",
+ long_description=open("README.md").read(),
+ long_description_content_type="text/markdown",
author_email="dariusz.izak@ibb.waw.pl",
url="https://github.com/dizak/headnode_notifier",
license="MIT",
|
FIX: Added missing long_description
|
py
|
diff --git a/mail_deduplicate/deduplicate.py b/mail_deduplicate/deduplicate.py
index <HASH>..<HASH> 100644
--- a/mail_deduplicate/deduplicate.py
+++ b/mail_deduplicate/deduplicate.py
@@ -36,6 +36,7 @@ from . import (
MissingMessageID,
SizeDiffAboveThreshold,
logger,
+ MD_SUBDIRS,
)
from .mail import Mail
@@ -496,6 +497,14 @@ class Deduplicate:
if source_path.is_dir():
logger.info(f"Opening {source_path} as a maildir...")
+
+ # Validates folder is a maildir.
+ for subdir in MD_SUBDIRS:
+ if not source_path.joinpath(subdir).is_dir():
+ raise ValueError(
+ f"{source_path} is not a maildir folder (missing {subdir!r} "
+ "sub-directory).")
+
mail_source = Maildir(source_path, factory=None, create=False)
elif source_path.is_file():
|
Re-introduce maildir validation.
|
py
|
diff --git a/rt.py b/rt.py
index <HASH>..<HASH> 100644
--- a/rt.py
+++ b/rt.py
@@ -14,6 +14,20 @@ def global_loop(queue):
actor.behavior(message)
+def initial_behavior(f):
+ f.initial_behavior = True
+ return f
+
+
+class MetaActor(type):
+
+ def __new__(mcls, name, bases, dict):
+ for meth in list(dict.values()):
+ if getattr(meth, 'initial_behavior', False):
+ dict['behavior'] = meth
+ return type.__new__(mcls, name, bases, dict)
+
+
class EventLoop(object):
loop = None
@@ -36,8 +50,11 @@ class EventLoop(object):
return cls.loop
-class AbstractActor(object):
+class AbstractActor(object, metaclass=MetaActor):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
def __call__(self, message):
self._put(message)
|
More convenient Actor declaration via metaclasses
|
py
|
diff --git a/master/buildbot/steps/source.py b/master/buildbot/steps/source.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/steps/source.py
+++ b/master/buildbot/steps/source.py
@@ -18,6 +18,7 @@ from warnings import warn
from email.Utils import formatdate
from twisted.python import log
from buildbot.process.buildstep import LoggingBuildStep, LoggedRemoteCommand
+from buildbot.process.properties import WithProperties
from buildbot.interfaces import BuildSlaveTooOldError
from buildbot.status.builder import SKIPPED
@@ -186,7 +187,8 @@ class Source(LoggingBuildStep):
'''
assert not repository or callable(repository) or isinstance(repository, dict) or \
- isinstance(repository, str) or isinstance(repository, unicode)
+ isinstance(repository, str) or isinstance(repository, unicode) or \
+ isinstance(repository, WithProperties)
s = self.build.getSourceStamp()
props = self.build.getProperties()
@@ -199,6 +201,8 @@ class Source(LoggingBuildStep):
return str(props.render(repository(s.repository)))
elif isinstance(repository, dict):
return str(props.render(repository.get(s.repository)))
+ elif isinstance(repository, WithProperties):
+ return str(props.render(repository))
else: # string or unicode
try:
repourl = str(repository % s.repository)
|
Make Source steps conform to docs We state that we allow WithProperties in the repository kwarg, but repository=WithProperties(...) didn't work. Now it does.
|
py
|
diff --git a/zinnia_tinymce/__init__.py b/zinnia_tinymce/__init__.py
index <HASH>..<HASH> 100644
--- a/zinnia_tinymce/__init__.py
+++ b/zinnia_tinymce/__init__.py
@@ -1,5 +1,5 @@
"""TinyMCE for Django-blog-zinnia"""
-__version__ = '1.1'
+__version__ = '1.2'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
|
Bumping to version <I>
|
py
|
diff --git a/salt/config.py b/salt/config.py
index <HASH>..<HASH> 100644
--- a/salt/config.py
+++ b/salt/config.py
@@ -1975,6 +1975,9 @@ def client_config(path, env_var='SALT_CLIENT_CONFIG', defaults=None):
if os.path.isfile(opts['token_file']):
with salt.utils.fopen(opts['token_file']) as fp_:
opts['token'] = fp_.read().strip()
+ # On some platforms, like OpenBSD, 0.0.0.0 won't catch a master running on localhost
+ if opts['interface'] == '0.0.0.0':
+ opts['interface'] = '127.0.0.1'
# Return the client options
_validate_opts(opts)
return opts
|
Possible fix for #<I> * Rewrite a client's interface directive to localhost if it's set to <I>.
|
py
|
diff --git a/moto/route53/responses.py b/moto/route53/responses.py
index <HASH>..<HASH> 100644
--- a/moto/route53/responses.py
+++ b/moto/route53/responses.py
@@ -174,6 +174,7 @@ LIST_HOSTED_ZONES_RESPONSE = """<ListHostedZonesResponse xmlns="https://route53.
</HostedZone>
{% endfor %}
</HostedZones>
+ <IsTruncated>false</IsTruncated>
</ListHostedZonesResponse>"""
CREATE_HEALTH_CHECK_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
add Istruncated to False in template list_hosted_zones_response in route<I>. Because it always has it.
|
py
|
diff --git a/thunder/base.py b/thunder/base.py
index <HASH>..<HASH> 100644
--- a/thunder/base.py
+++ b/thunder/base.py
@@ -1,6 +1,6 @@
from numpy import array, asarray, ndarray, prod, ufunc, add, subtract, \
multiply, divide, isscalar, newaxis, unravel_index, argsort
-from bolt.utils import inshape, tupleize
+from bolt.utils import inshape, tupleize, slicify
from bolt.base import BoltArray
from bolt.spark.array import BoltArraySpark
from bolt.spark.chunk import ChunkedArray
@@ -195,11 +195,11 @@ class Data(Base):
_attributes = Base._attributes + ['labels']
def __getitem__(self, item):
- # handle values
+ # handle values -- convert ints to slices so no dimensions are dropped
if isinstance(item, int):
- item = slice(item, item+1, None)
+ item = tuple([slicify(item, self.shape[0])])
if isinstance(item, tuple):
- item = tuple([slice(i, i+1, None) if isinstance(i, int) else i for i in item])
+ item = tuple([slicify(i, n) if isinstance(i, int) else i for i, n in zip(item, self.shape[:len(item)])])
if isinstance(item, (list, ndarray)):
item = (item,)
new = self._values.__getitem__(item)
|
updated indexing to take advantage of better indexing recently added to Bolt
|
py
|
diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py
index <HASH>..<HASH> 100644
--- a/pymc/distributions/discrete.py
+++ b/pymc/distributions/discrete.py
@@ -194,6 +194,7 @@ def DiscreteUniform(lower, upper):
- `upper` : Upper limit (upper > lower).
"""
+ lower, upper = lower.astype('int32'), upper.astype('int32')
def logp(value):
|
Ensured upper and lower are ints for DiscreteUniform
|
py
|
diff --git a/docs/convert_notebooks_to_html_partial.py b/docs/convert_notebooks_to_html_partial.py
index <HASH>..<HASH> 100644
--- a/docs/convert_notebooks_to_html_partial.py
+++ b/docs/convert_notebooks_to_html_partial.py
@@ -145,10 +145,6 @@ def _preamble_cell(path):
# HIDDEN
# Clear previously defined variables
%reset -f
-
- # Set directory for data loading to work properly
- import os
- os.chdir(os.path.expanduser('~/docs/{}'))
'''.format(path)
)
return nbformat.v4.new_code_cell(source=code)
|
Remove buggy notebook preamble code
|
py
|
diff --git a/r128gain/__init__.py b/r128gain/__init__.py
index <HASH>..<HASH> 100755
--- a/r128gain/__init__.py
+++ b/r128gain/__init__.py
@@ -117,7 +117,6 @@ def get_r128_loudness(audio_filepaths, *, calc_peak=True, enable_ffmpeg_threadin
sample_fmts="s16",
sample_rates="48000",
channel_layouts="stereo")
- ffmpeg_r128_stream = ffmpeg_r128_stream.filter("afifo") # TODO remove?
ffmpeg_r128_streams.append(ffmpeg_r128_stream)
if len(audio_filepaths) > 1:
|
Remove afifo FFmpeg filter
|
py
|
diff --git a/xigt/codecs/xigtxml.py b/xigt/codecs/xigtxml.py
index <HASH>..<HASH> 100644
--- a/xigt/codecs/xigtxml.py
+++ b/xigt/codecs/xigtxml.py
@@ -272,13 +272,18 @@ def default_encode_meta(meta, indent=2, level=1):
# raise ValueError('Invalid subtype of Meta: {}'
# .format(meta.type))
attrs = encode_attributes(meta, ['id', 'type'])
- cnt = ''.join([escape(meta.text)] + meta.children)
- s = '{}<meta{}{}>'.format(
+ cnt = ''.join([escape(meta.text)] + (meta.children or []))
+ lines = ['{}<meta{}{}>'.format(
' ' * ((level * indent) - 2),
attrs,
- '/' if cnt is None else '>{}</meta>'.format(cnt)
- )
- return s
+ '/' if not cnt else ''
+ )]
+ if cnt:
+ lines.append(cnt.rstrip())
+ if indent:
+ lines.append('\n')
+ lines.append('{}</meta>'.format(' ' * ((level * indent) - 2)))
+ return ''.join(lines)
##############################################################################
|
Fix xigtxml so it doesn't crash when Meta.children is None. Also this fixes spacing issues with the <meta> start and end tags, and prevents an extra > from outputting after </meta>.
|
py
|
diff --git a/tests/test_pipenv.py b/tests/test_pipenv.py
index <HASH>..<HASH> 100644
--- a/tests/test_pipenv.py
+++ b/tests/test_pipenv.py
@@ -459,7 +459,29 @@ requests = {version = "*"}
c = p.pipenv('check --style .')
assert 'requests' in c.out
+ @pytest.mark.extras
+ @pytest.mark.install
+ @pytest.mark.requirements
+ def test_requirements_to_pipfile(self):
+ with PipenvInstance(pipfile=False) as p:
+ # Write a requirements file
+ with open('requirements.txt', 'w') as f:
+ f.write('requests[socks]==2.18.1\n')
+ c = p.pipenv('install')
+ # Assert that the files get converted the requirements
+ assert p.pipfile
+ assert p.lockfile
+ # assert stuff in pipfile
+ assert 'requests' in p.pipfile['packages']
+ assert 'extras' in p.pipfile['packages']['requests']
+
+ # assert stuff in lockfile
+ assert 'requests' in p.lockfile['default']
+ assert 'chardet' in p.lockfile['default']
+ assert 'idna' in p.lockfile['default']
+ assert 'urllib3' in p.lockfile['default']
+ assert 'pysocks' in p.lockfile['default']
\ No newline at end of file
|
added a test for converting requirements to pipfile
|
py
|
diff --git a/thinc/about.py b/thinc/about.py
index <HASH>..<HASH> 100644
--- a/thinc/about.py
+++ b/thinc/about.py
@@ -1,2 +1,2 @@
-__version__ = "8.0.9"
+__version__ = "8.0.10"
__release__ = True
|
Set version to <I> (#<I>)
|
py
|
diff --git a/lhc/file_format/vcf_/index.py b/lhc/file_format/vcf_/index.py
index <HASH>..<HASH> 100644
--- a/lhc/file_format/vcf_/index.py
+++ b/lhc/file_format/vcf_/index.py
@@ -37,3 +37,11 @@ class IndexedVcfFile(object):
start + 1
lines = self.index.fetch(chr, start, stop)
return [self.iterator._parseLine(line) for line in lines]
+
+ def getVariantsAtPosition(self, chr, pos):
+ lines = self.index.fetch(chr, pos, pos + 1)
+ return [self.iterator._parseLine(line) for line in lines]
+
+ def getVariantsInInterval(self, chr, start, stop):
+ line = self.index.fetch(chr, start, stop)
+ return [self.iterator._parseLine(line) for line in lines]
|
added convenience functions to vcf index
|
py
|
diff --git a/scout/server/blueprints/dashboard/controllers.py b/scout/server/blueprints/dashboard/controllers.py
index <HASH>..<HASH> 100644
--- a/scout/server/blueprints/dashboard/controllers.py
+++ b/scout/server/blueprints/dashboard/controllers.py
@@ -1,6 +1,5 @@
import logging
from flask_login import current_user
-from scout.server.utils import user_institutes
LOG = logging.getLogger(__name__)
@@ -157,15 +156,11 @@ def get_general_case_info(adapter, institute_id=None, slice_query=None):
general(dict)
"""
general = {}
- # Fetch information about cases with certain activities
- cases = {}
- # Collect available institute IDs for current_user
- institute_ids = [ inst['_id'] for inst in list(user_institutes(adapter, current_user))]
cases_owner = None
name_query = None
- if institute_id and institute_id in institute_ids: # OK to filter for a given institute
+ if institute_id and institute_id in current_user.institutes: # OK to filter for a given institute
LOG.debug('Dashboard with stats for an institute')
cases_owner = institute_id
if slice_query:
|
a better way to check if institute is available for a user
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,8 @@ metadata = {'__file__': os.path.join(here, 'plenum', '__metadata__.py')}
with open(metadata['__file__'], 'r') as f:
exec(f.read(), metadata)
-tests_require = ['attrs==19.1.0', 'pytest==3.3.1', 'pytest-xdist==1.22.1', 'python3-indy==1.11.1-dev-1343', 'pytest-asyncio==0.8.0']
+tests_require = ['attrs==19.1.0', 'pytest==3.3.1', 'pytest-xdist==1.22.1', 'pytest-forked==0.2',
+ 'python3-indy==1.11.1-dev-1343', 'pytest-asyncio==0.8.0']
class PyZMQCommand(distutils.cmd.Command):
|
INDY-<I>: fix pytest-forked version for <I>
|
py
|
diff --git a/salt/cloud/clouds/nova.py b/salt/cloud/clouds/nova.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/nova.py
+++ b/salt/cloud/clouds/nova.py
@@ -386,8 +386,6 @@ def destroy(name, conn=None, call=None):
{'name': name},
transport=__opts__['transport']
)
- print __opts__.get('ssh_interface')
- print str(node)
if __opts__.get('delete_sshkeys', False) is True:
salt.utils.cloud.remove_sshkey(getattr(node, __opts__.get('ssh_interface', 'public_ips'))[0])
if __opts__.get('update_cachedir', False) is True:
|
removed some debugging code accidentally commited earlier
|
py
|
diff --git a/pyasn1/compat/binary.py b/pyasn1/compat/binary.py
index <HASH>..<HASH> 100644
--- a/pyasn1/compat/binary.py
+++ b/pyasn1/compat/binary.py
@@ -10,6 +10,14 @@ if version_info[0:2] < (2, 6):
def bin(value):
bitstring = []
+ if value > 0:
+ prefix = '0b'
+ elif value < 0:
+ prefix = '-0b'
+ value = abs(value)
+ else:
+ prefix = '0b0'
+
while value:
if value & 1 == 1:
bitstring.append('1')
@@ -20,6 +28,6 @@ if version_info[0:2] < (2, 6):
bitstring.reverse()
- return '0b' + ''.join(bitstring)
+ return prefix + ''.join(bitstring)
else:
bin = bin
|
fixed compat bin() for negatives
|
py
|
diff --git a/libraries/botbuilder-core/botbuilder/core/bot_framework_adapter.py b/libraries/botbuilder-core/botbuilder/core/bot_framework_adapter.py
index <HASH>..<HASH> 100644
--- a/libraries/botbuilder-core/botbuilder/core/bot_framework_adapter.py
+++ b/libraries/botbuilder-core/botbuilder/core/bot_framework_adapter.py
@@ -340,6 +340,7 @@ class BotFrameworkAdapter(
If the conversation is established with the specified users, the ID of the activity
will contain the ID of the new conversation.
"""
+
try:
if not service_url:
service_url = reference.service_url
@@ -366,8 +367,10 @@ class BotFrameworkAdapter(
# Mix in the tenant ID if specified. This is required for MS Teams.
if reference.conversation and reference.conversation.tenant_id:
# Putting tenant_id in channel_data is a temporary while we wait for the Teams API to be updated
- parameters.channel_data = {
- "tenant": {"tenantId": reference.conversation.tenant_id}
+ if parameters.channel_data is None:
+ parameters.channel_data = {}
+ parameters.channel_data["tenant"] = {
+ "tenantId": reference.conversation.tenant_id
}
# Permanent solution is to put tenant_id in parameters.tenant_id
|
Fixed issue with channel_data being overwritten in create_conversation (#<I>)
|
py
|
diff --git a/dirutility/__init__.py b/dirutility/__init__.py
index <HASH>..<HASH> 100644
--- a/dirutility/__init__.py
+++ b/dirutility/__init__.py
@@ -1,9 +1,10 @@
from dirutility.move import FlattenTree, CreateTree, move_files_to_folders
from dirutility.walk import DirPaths, DirTree
from dirutility.view import desktop, open_window
-from dirutility.ftp import FTP
from dirutility.backup import ZipBackup
+from dirutility.ftp import FTP
+from dirutility.permissions import Permissions
__all__ = ['FlattenTree', 'CreateTree', 'move_files_to_folders', 'DirTree', 'DirPaths', 'desktop', 'ZipBackup', 'FTP',
- 'open_window']
+ 'open_window', 'Permissions']
|
ADD Permissions class to dirutility __all__ declaration
|
py
|
diff --git a/welly/curve.py b/welly/curve.py
index <HASH>..<HASH> 100644
--- a/welly/curve.py
+++ b/welly/curve.py
@@ -160,9 +160,12 @@ class Curve(np.ndarray):
ax.set(**axkwargs)
+ lw = getattr(d, 'lineweight', None) or getattr(d, 'lw', 1)
+ ls = getattr(d, 'linestyle', None) or getattr(d, 'ls', '-')
+
ax.set_title(self.mnemonic)
ax.set_xlabel(self.units)
- ax.plot(self, self.basis, c=c)
+ ax.plot(self, self.basis, c=c, lw=lw, ls=ls)
ax.set_ylim([self.stop, self.start])
ax.grid('on', color='k', alpha=0.2, lw=0.25, linestyle='-')
|
more decors params for curve plot
|
py
|
diff --git a/django_extensions/management/commands/sync_media_s3.py b/django_extensions/management/commands/sync_media_s3.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/sync_media_s3.py
+++ b/django_extensions/management/commands/sync_media_s3.py
@@ -162,6 +162,11 @@ class Command(BaseCommand):
if root_dir == dirname:
return # We're in the root media folder
+ # Later we assume the MEDIA_ROOT ends with a trailing slash
+ # TODO: Check if we should check os.path.sep for Windows
+ if not root_dir.endswith('/'):
+ root_dir = root_dir + '/'
+
for file in names:
headers = {}
@@ -172,7 +177,7 @@ class Command(BaseCommand):
if os.path.isdir(filename):
continue # Don't try to upload directories
- file_key = filename[len(root_dir)+1:]
+ file_key = filename[len(root_dir):]
if self.prefix:
file_key = '%s/%s' % (self.prefix, file_key)
|
This update fixes an issue where the file name was losing the first character if the MEDIA_ROOT ended with a trailing slash. I've changed the assumption to be that MEDIA_ROOT does end with a trailing slash and will append one if none exists. Thanks to lukasz.korzybski for the report and debugging.
|
py
|
diff --git a/processors/generic_processor.py b/processors/generic_processor.py
index <HASH>..<HASH> 100644
--- a/processors/generic_processor.py
+++ b/processors/generic_processor.py
@@ -492,6 +492,10 @@ class GenericProcessor(processor.ImportProcessor):
self._revision_count += 1
self.report_progress("(%s)" % cmd.id)
+ if cmd.ref.startswith('refs/tags/'):
+ tag_name = cmd.ref[len('refs/tags/'):]
+ self._set_tag(tag_name, cmd.id)
+
# Check if we should finish up or automatically checkpoint
if (self.max_commits is not None and
self._revision_count >= self.max_commits):
|
Set a tag when touching a refs/tags/ ref with a commit command.
|
py
|
diff --git a/ca/django_ca/tests/base.py b/ca/django_ca/tests/base.py
index <HASH>..<HASH> 100644
--- a/ca/django_ca/tests/base.py
+++ b/ca/django_ca/tests/base.py
@@ -9,6 +9,9 @@ import os
import shutil
import tempfile
+from datetime import datetime
+from datetime import timedelta
+
from OpenSSL import crypto
from mock import patch
@@ -156,6 +159,13 @@ class DjangoCATestCase(TestCase):
self.assertEqual(cert.authorityKeyIdentifier().strip(),
'keyid:%s' % issuer.subjectKeyIdentifier())
+ def expires(self, days, now=None):
+ if now is None:
+ now = datetime.utcnow()
+ now = now.replace(hour=0, minute=0, second=0, microsecond=0)
+
+ return now + timedelta(days + 1)
+
@classmethod
def load_ca(cls, name, x509, enabled=True, parent=None, **kwargs):
"""Load a CA from one of the preloaded files."""
|
add function to get datetime from number of days
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -50,7 +50,7 @@ setup(
packages=find_packages(),
install_requires=[
'pytest >= 2.8.1',
- 'pytest-logging',
+ 'pytest-catchlog',
'pytest-tempdir'
],
setup_requires=[
|
Depend on pytest-catchlog instead of pytest-logging
|
py
|
diff --git a/scrapekit/tasks.py b/scrapekit/tasks.py
index <HASH>..<HASH> 100644
--- a/scrapekit/tasks.py
+++ b/scrapekit/tasks.py
@@ -60,6 +60,8 @@ class TaskManager(object):
Do not call this directly, use Task.queue/Task.run instead.
"""
+ if self.num_threads == 0:
+ return task(*args, **kwargs)
if self.queue is None:
self._spawn()
self.queue.put((task, args, kwargs))
|
Run without threads if num_threads is 0
|
py
|
diff --git a/salt/log/handlers/logstash_mod.py b/salt/log/handlers/logstash_mod.py
index <HASH>..<HASH> 100644
--- a/salt/log/handlers/logstash_mod.py
+++ b/salt/log/handlers/logstash_mod.py
@@ -139,7 +139,7 @@ __virtualname__ = 'logstash'
def __virtual__():
if not any(['logstash_udp_handler' in __opts__,
'logstash_zmq_handler' in __opts__]):
- log.debug(
+ log.trace(
'None of the required configuration sections, '
'\'logstash_udp_handler\' and \'logstash_zmq_handler\', '
'were found the in the configuration. Not loading the Logstash '
|
Quiet the logstash handler loading message @s0undt3ch FYI. If you think this is a bad change, I'd be happy to discuss it with you, but <I>% of salt users don't use that handler (as far as I know) and it's just garbage in the debug log.
|
py
|
diff --git a/tests/test_providers.py b/tests/test_providers.py
index <HASH>..<HASH> 100644
--- a/tests/test_providers.py
+++ b/tests/test_providers.py
@@ -1089,3 +1089,27 @@ class ConfigTests(unittest.TestCase):
'ChildConfig({0}) at {1}>'.format(
repr('.'.join(('category', 'setting'))),
hex(id(category_setting))))
+
+
+class FactoryAsDecoratorTests(unittest.TestCase):
+ """Factory as decorator tests."""
+
+ def test_decoration(self):
+ """Test decoration of some class with Factory provider."""
+ @providers.Factory
+ class AuthService(object):
+ """Auth service."""
+
+ @providers.Factory
+ @injections.inject(auth_service=AuthService)
+ class UsersService(object):
+ """Users service."""
+
+ def __init__(self, auth_service):
+ """Initializer."""
+ self.auth_service = auth_service
+
+ users_service = UsersService()
+
+ self.assertIsInstance(users_service, UsersService.cls)
+ self.assertIsInstance(users_service.auth_service, AuthService.cls)
|
Add FactoryAsDecoratorTests
|
py
|
diff --git a/tornado_pyuv/__init__.py b/tornado_pyuv/__init__.py
index <HASH>..<HASH> 100644
--- a/tornado_pyuv/__init__.py
+++ b/tornado_pyuv/__init__.py
@@ -15,17 +15,11 @@ def install():
_tornado_ioloop = __import__('tornado.ioloop', fromlist=['foobar'])
_IOLoop = _tornado_ioloop.IOLoop
- class FDWrapper(object):
- def __init__(self, fd):
- self.fd = fd
- def fileno(self):
- return self.fd
-
class Waker(object):
def __init__(self, loop):
self._lock = thread.allocate_lock()
self._async = pyuv.Async(loop, self._cb)
- #self._async.unref()
+ self._async.unref()
def _cb(self, handle):
pass
def wake(self):
@@ -99,7 +93,7 @@ def install():
def add_handler(self, fd, handler, events):
self._handlers[fd] = stack_context.wrap(handler)
- poll = pyuv.Poll(self._loop, FDWrapper(fd))
+ poll = pyuv.Poll(self._loop, fd)
poll_events = 0
if (events & IOLoop.READ):
poll_events |= pyuv.UV_READABLE
|
Adapted to API changes in pyuv
|
py
|
diff --git a/test/unit/test_runner_config.py b/test/unit/test_runner_config.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_runner_config.py
+++ b/test/unit/test_runner_config.py
@@ -215,6 +215,7 @@ def test_generate_ansible_command():
assert cmd == ['ansible', '-i', '/inventory', '-m', 'setup', '-a', 'test=string']
rc.module_args = None
+
def test_generate_ansible_command_with_api_extravars():
rc = RunnerConfig(private_data_dir='/', playbook='main.yaml', extravars={"foo":"bar"})
rc.prepare_inventory()
|
add two blank lines so flake8 is happy
|
py
|
diff --git a/pycdlib/rockridge.py b/pycdlib/rockridge.py
index <HASH>..<HASH> 100644
--- a/pycdlib/rockridge.py
+++ b/pycdlib/rockridge.py
@@ -787,6 +787,9 @@ class RRSLRecord(object):
if not cr_flags & (1 << 0):
self.symlink_components.append(name)
name = b''
+ # FIXME: if this is the last component in this SL record,
+ # but the component continues on in the next SL record, we will
+ # fail to record this bit. We should fix that.
cr_offset += len_cp
data_len -= len_cp
|
Add in a FIXME message for dealing with continued SL records.
|
py
|
diff --git a/pyunpack/__init__.py b/pyunpack/__init__.py
index <HASH>..<HASH> 100644
--- a/pyunpack/__init__.py
+++ b/pyunpack/__init__.py
@@ -69,11 +69,7 @@ class Archive(object):
if self.backend == 'auto':
if is_zipfile:
- try:
- self.extractall_zipfile(directory)
- except AttributeError:
- # py25
- self.extractall_patool(directory, patool_path)
+ self.extractall_zipfile(directory)
else:
self.extractall_patool(directory, patool_path)
|
drop py<I> support
|
py
|
diff --git a/pyup/config.py b/pyup/config.py
index <HASH>..<HASH> 100644
--- a/pyup/config.py
+++ b/pyup/config.py
@@ -134,7 +134,7 @@ class Config(object):
Config.UPDATE_INSECURE_TYPO)
def is_valid_schedule(self):
- return SCHEDULE_REGEX.search(self.schedule) is not None
+ return SCHEDULE_REGEX.search(self.schedule) if type(self.schedule) == str else None
def __repr__(self):
return str(self.__dict__)
|
Check for regex only if we have string
|
py
|
diff --git a/safe/impact_functions/inundation/flood_raster_road/impact_function.py b/safe/impact_functions/inundation/flood_raster_road/impact_function.py
index <HASH>..<HASH> 100644
--- a/safe/impact_functions/inundation/flood_raster_road/impact_function.py
+++ b/safe/impact_functions/inundation/flood_raster_road/impact_function.py
@@ -332,7 +332,7 @@ class FloodRasterRoadsFunction(
if threshold_min > threshold_max:
message = tr(
- 'The minimal threshold is greater then the maximal specified '
+ 'The minimal threshold is greater than the maximal specified '
'threshold. Please check the values.')
raise GetDataError(message)
|
Fix #<I> *typo only.
|
py
|
diff --git a/openfisca_core/tracers.py b/openfisca_core/tracers.py
index <HASH>..<HASH> 100644
--- a/openfisca_core/tracers.py
+++ b/openfisca_core/tracers.py
@@ -103,13 +103,12 @@ class Tracer(object):
u"Something went wrong with the simulation tracer: result of '{0}' was expected, got results for '{1}' instead. This does not make sense as the last variable we started computing was '{0}'."
.format(expected_key, key).encode('utf-8')
)
- intermediate_result = result.tolist()
+ intermediate_result = result.tolist() # Cast numpy array into a python list
if isinstance(intermediate_result[0], Enum):
- for item in range(len(intermediate_result)):
- intermediate_result[item] = intermediate_result[item].name
-
- self.trace[key]['value'] = intermediate_result # Cast numpy array into a python list
+ self.trace[key]['value'] = [item.name for item in intermediate_result]
+ else:
+ self.trace[key]['value'] = intermediate_result
def record_calculation_abortion(self, variable_name, period, **parameters):
"""
|
Add new enum calls to tracers
|
py
|
diff --git a/thumbor/detectors/__init__.py b/thumbor/detectors/__init__.py
index <HASH>..<HASH> 100644
--- a/thumbor/detectors/__init__.py
+++ b/thumbor/detectors/__init__.py
@@ -48,10 +48,10 @@ class CascadeLoaderDetector(BaseDetector):
image,
self.__class__.cascade,
cv.CreateMemStorage(0),
- scaleFactor=1.1,
- minNeighbors=3,
+ scale_factor=1.1,
+ min_neighbors=3,
flags=cv.CV_HAAR_DO_CANNY_PRUNING,
- minSize=(20, 20)
+ min_size=(20, 20)
)
return faces
|
upz i used the names from <I> version, now its correct
|
py
|
diff --git a/python/phonenumbers/__init__.py b/python/phonenumbers/__init__.py
index <HASH>..<HASH> 100644
--- a/python/phonenumbers/__init__.py
+++ b/python/phonenumbers/__init__.py
@@ -147,7 +147,7 @@ from .phonenumbermatcher import PhoneNumberMatch, PhoneNumberMatcher, Leniency
# Version number is taken from the upstream libphonenumber version
# together with an indication of the version of the Python-specific code.
-__version__ = "8.8.7"
+__version__ = "8.8.8"
__all__ = ['PhoneNumber', 'CountryCodeSource', 'FrozenPhoneNumber',
'REGION_CODE_FOR_NON_GEO_ENTITY', 'NumberFormat', 'PhoneNumberDesc', 'PhoneMetadata',
|
Prep for <I> release
|
py
|
diff --git a/tests/test_fixtures.py b/tests/test_fixtures.py
index <HASH>..<HASH> 100644
--- a/tests/test_fixtures.py
+++ b/tests/test_fixtures.py
@@ -93,6 +93,6 @@ class ZenpyApiTestCase(BetamaxTestCase):
prop_val = getattr(zenpy_object, attr_name)
if prop_val and issubclass(prop_val.__class__, BaseObject):
self.recursively_call_properties(prop_val)
- elif issubclass(prop_val, BaseResultGenerator):
+ elif issubclass(prop_val.__class__, BaseResultGenerator):
for obj in prop_val:
self.recursively_call_properties(obj)
|
Actually pass a class to issubclass...
|
py
|
diff --git a/galpy/potential_src/Potential.py b/galpy/potential_src/Potential.py
index <HASH>..<HASH> 100644
--- a/galpy/potential_src/Potential.py
+++ b/galpy/potential_src/Potential.py
@@ -2646,8 +2646,12 @@ def _check_c(Pot):
2014-02-17 - Written - Bovy (IAS)
"""
+ from galpy.potential_src.SimpleWrapperPotential \
+ import SimpleWrapperPotential
if isinstance(Pot,list):
- return nu.all(nu.array([p.hasC for p in Pot],dtype='bool'))
+ return nu.all(nu.array([_check_c(p) for p in Pot],dtype='bool'))
+ elif isinstance(Pot,SimpleWrapperPotential):
+ return bool(Pot.hasC*_check_c(Pot._pot))
elif isinstance(Pot,Potential):
return Pot.hasC
|
Check whether all elements of a potential have C implementations in Potential._check_c
|
py
|
diff --git a/mapping/util.py b/mapping/util.py
index <HASH>..<HASH> 100644
--- a/mapping/util.py
+++ b/mapping/util.py
@@ -213,10 +213,15 @@ def calc_rets(returns, weights):
# in later indexing of rets even when ret has weight of 0
gnrc_wts = gnrc_wts.loc[gnrc_wts != 0]
root_rets = returns[root]
+ # necessary instead of missing_keys.any() to support MultiIndex
if not gnrc_wts.index.isin(root_rets.index).all():
- raise KeyError("'root_rets.index' labels missing from "
- "'gnrc_wts' for root '{0}', generic '{1}'"
- .format(root, generic))
+ # as list instead of MultiIndex for legibility when stack trace
+ missing_keys = (gnrc_wts.index.difference(root_rets.index)
+ .tolist())
+ raise KeyError("From the [index] of 'gnrc_wts' none of {0} "
+ "are in the [index] of 'root_rets' "
+ "for root '{1}', generic '{2}'"
+ .format(missing_keys, root, generic))
rets = root_rets.loc[gnrc_wts.index]
# groupby time
group_rets = (rets * gnrc_wts).groupby(level=0)
|
Improve KeyError description When KeyError is raised include the missing keys in the error message.
|
py
|
diff --git a/drf_dynamic_fields/__init__.py b/drf_dynamic_fields/__init__.py
index <HASH>..<HASH> 100644
--- a/drf_dynamic_fields/__init__.py
+++ b/drf_dynamic_fields/__init__.py
@@ -29,7 +29,7 @@ class DynamicFieldsMixin(object):
params = getattr(
request, 'query_params', getattr(request, 'GET', None)
)
- if not params:
+ if params is None:
warnings.warn('Request object does not contain query paramters')
try:
|
Warn only if the params are missing from `request` Previously it would warn if they were present but empty.
|
py
|
diff --git a/lib/svtplay_dl/output.py b/lib/svtplay_dl/output.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/output.py
+++ b/lib/svtplay_dl/output.py
@@ -148,6 +148,8 @@ def output(options, extention="mp4", openfd=True, mode="wb"):
ext = re.search(r"(\.[a-z0-9]+)$", options.output)
if not ext:
options.output = "%s.%s" % (options.output, extention)
+ if options.output_auto and ext:
+ options.output = "%s.%s" % (options.output, extention)
if extention == "srt" and ext:
options.output = "%s.srt" % options.output[:options.output.rfind(ext.group(1))]
log.info("Outfile: %s", options.output)
|
output: walkaround for titles with .XXX at the in the end.
|
py
|
diff --git a/pygccxml/declarations/calldef.py b/pygccxml/declarations/calldef.py
index <HASH>..<HASH> 100644
--- a/pygccxml/declarations/calldef.py
+++ b/pygccxml/declarations/calldef.py
@@ -431,6 +431,20 @@ class constructor_t( member_calldef_t ):
def __init__( self, *args, **keywords ):
member_calldef_t.__init__( self, *args, **keywords )
+ def __str__(self):
+ # Get the full name of the calldef...
+ name = algorithm.full_name(self)
+ if name[:2]=="::":
+ name = name[2:]
+ # Add the arguments...
+ args = map(lambda a: str(a), self.arguments)
+ res = "%s(%s)"%(name, ", ".join(args))
+ # Append the declaration class
+ cls = 'constructor'
+ if self.is_copy_constructor:
+ cls = 'copy ' + cls
+ return "%s [%s]"%(res, cls)
+
def _get_is_copy_constructor(self):
args = self.arguments
if 1 != len( args ):
|
a group of small improvements to the projects, mainly improving error messages
|
py
|
diff --git a/publib/test/test_functions.py b/publib/test/test_functions.py
index <HASH>..<HASH> 100644
--- a/publib/test/test_functions.py
+++ b/publib/test/test_functions.py
@@ -29,7 +29,6 @@ def test_routines(**kwargs):
import numpy as np
import matplotlib.pyplot as plt
- plt.ion() # force interactive mode (so we're not stuck when run from terminal)
# %% Examples
def example1(title, seed):
@@ -91,6 +90,7 @@ def test_routines(**kwargs):
seed = int(time.time())
mpl.rcdefaults()
+ plt.ion() # force interactive mode (so we're not stuck when run from terminal)
set_style()
example('basic', seed)
|
ensure interactive mode so test are not stuck
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -50,7 +50,7 @@ setup(
extras_require={"dev": read_requirements("dev.txt")},
include_package_data=True,
classifiers=[
- "Development Status :: 4 - Beta",
+ "Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Other Environment",
"Intended Audience :: Developers",
|
fix(classifiers): Mark AWS SAM CLI as Production/Stable (#<I>) * classifiers definition present at: <URL>
|
py
|
diff --git a/mongoctl/mongoctl.py b/mongoctl/mongoctl.py
index <HASH>..<HASH> 100644
--- a/mongoctl/mongoctl.py
+++ b/mongoctl/mongoctl.py
@@ -3539,6 +3539,15 @@ def get_host_ips(host):
ip = elem[4]
if ip not in ips:
ips.append(ip)
+
+ # TODO remove this temp hack that works around the case where
+ # host X has more IPs than X.foo.com.
+ if len(host.split(".")) == 3:
+ try:
+ ips.extend(get_host_ips(host.split(".")[0]))
+ except Exception, ex:
+ pass
+
return ips
except Exception, e:
raise MongoctlException("Invalid host '%s'. Cause: %s" % (host, e))
|
Deal with an edge case for replset conf member resolution when a host has multiple ip addresses
|
py
|
diff --git a/salt/grains/core.py b/salt/grains/core.py
index <HASH>..<HASH> 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1332,7 +1332,14 @@ def os_data():
if os.path.exists('/proc/1/cmdline'):
with salt.utils.fopen('/proc/1/cmdline') as fhr:
init_cmdline = fhr.read().replace('\x00', ' ').split()
- init_bin = salt.utils.which(init_cmdline[0])
+ try:
+ init_bin = salt.utils.which(init_cmdline[0])
+ except IndexError:
+ # Emtpy init_cmdline
+ init_bin = None
+ log.warning(
+ "Unable to fetch data from /proc/1/cmdline"
+ )
if init_bin is not None and init_bin.endswith('bin/init'):
supported_inits = (six.b('upstart'), six.b('sysvinit'), six.b('systemd'))
edge_len = max(len(x) for x in supported_inits) - 1
|
Catch a possible error, especially trigered in unit tests
|
py
|
diff --git a/pipenv/core.py b/pipenv/core.py
index <HASH>..<HASH> 100644
--- a/pipenv/core.py
+++ b/pipenv/core.py
@@ -1218,6 +1218,7 @@ def activate_virtualenv(source=True):
# Support for csh shell.
if PIPENV_SHELL and 'csh' in PIPENV_SHELL:
suffix = '.csh'
+ command = 'source'
# Escape any spaces located within the virtualenv path to allow
# for proper activation.
|
Correct csh syntax in activate_virtualenv In csh, the . operator executes a directory, which is the wrong behaviour here. At best, when pipenv is installed system-wide (preferably via the operating system package manager), the result is a permission denied error from the shell. As csh uses the source command, reflect that here.
|
py
|
diff --git a/hgvs/location.py b/hgvs/location.py
index <HASH>..<HASH> 100644
--- a/hgvs/location.py
+++ b/hgvs/location.py
@@ -202,10 +202,10 @@ class BaseOffsetInterval(Interval):
if self.start.datum == CDS_END:
self.end.datum = CDS_END
- #self.validate()
+ self.validate()
def validate(self):
- # check for valid combinations of start and end datums
+ # check for valid combinations of start and end datums
if (self.start.datum, self.end.datum) not in [
(SEQ_START, SEQ_START),
(CDS_START, CDS_START),
|
uncommented validation statement in BaseOffsetInterval, from PR comment; tests pass
|
py
|
diff --git a/chm/chm.py b/chm/chm.py
index <HASH>..<HASH> 100644
--- a/chm/chm.py
+++ b/chm/chm.py
@@ -247,12 +247,10 @@ class CHMFile:
result, ui = chmlib.chm_resolve_object(self.file, '/#SYSTEM')
if (result != chmlib.CHM_RESOLVE_SUCCESS):
- sys.stderr.write('GetArchiveInfo: #SYSTEM does not exist\n')
return 0
size, text = chmlib.chm_retrieve_object(self.file, ui, 4l, ui.length)
if (size == 0):
- sys.stderr.write('GetArchiveInfo: file size = 0\n')
return 0
buff = array.array('B', text)
@@ -333,7 +331,6 @@ class CHMFile:
size, text = chmlib.chm_retrieve_object(self.file, ui, 0l, ui.length)
if (size == 0):
- sys.stderr.write('GetTopicsTree: file size = 0\n')
return None
return text
@@ -352,7 +349,6 @@ class CHMFile:
size, text = chmlib.chm_retrieve_object(self.file, ui, 0l, ui.length)
if (size == 0):
- sys.stderr.write('GetIndex: file size = 0\n')
return None
return text
|
Stop writing diagnostic information to stderr If necessary, these messages may be reinstated later when a suitable logger is passed to the CHMFile constructor
|
py
|
diff --git a/src/you_get/extractors/acfun.py b/src/you_get/extractors/acfun.py
index <HASH>..<HASH> 100644
--- a/src/you_get/extractors/acfun.py
+++ b/src/you_get/extractors/acfun.py
@@ -117,7 +117,9 @@ def acfun_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
vid = r1('data-vid="(\d+)"', html)
up = r1('data-name="([^"]+)"', html)
- title = title + ' - ' + up
+ p_title = r1('active">([^<]+)', html)
+ title = '%s (%s)' % (title, up)
+ if p_title: title = '%s - %s' % (title, p_title)
acfun_download_by_vid(vid, title,
output_dir=output_dir,
merge=merge,
|
[acfun] fix active single-p title
|
py
|
diff --git a/tests/test_bdist_rpm.py b/tests/test_bdist_rpm.py
index <HASH>..<HASH> 100644
--- a/tests/test_bdist_rpm.py
+++ b/tests/test_bdist_rpm.py
@@ -3,9 +3,7 @@
import unittest
import sys
import os
-import tempfile
-import shutil
-from test.support import run_unittest
+from test.support import run_unittest, requires_zlib
from distutils.core import Distribution
from distutils.command.bdist_rpm import bdist_rpm
@@ -48,6 +46,7 @@ class BuildRpmTestCase(support.TempdirManager,
# spurious sdtout/stderr output under Mac OS X
@unittest.skipUnless(sys.platform.startswith('linux'),
'spurious sdtout/stderr output under Mac OS X')
+ @requires_zlib
@unittest.skipIf(find_executable('rpm') is None,
'the rpm command is not found')
@unittest.skipIf(find_executable('rpmbuild') is None,
@@ -90,6 +89,7 @@ class BuildRpmTestCase(support.TempdirManager,
# spurious sdtout/stderr output under Mac OS X
@unittest.skipUnless(sys.platform.startswith('linux'),
'spurious sdtout/stderr output under Mac OS X')
+ @requires_zlib
# http://bugs.python.org/issue1533164
@unittest.skipIf(find_executable('rpm') is None,
'the rpm command is not found')
|
Some distutils tests require zlib for creating tar.gz source distribution.
|
py
|
diff --git a/sos/plugins/processor.py b/sos/plugins/processor.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/processor.py
+++ b/sos/plugins/processor.py
@@ -36,7 +36,7 @@ class Processor(Plugin, RedHatPlugin, UbuntuPlugin, DebianPlugin):
self.add_cmd_output("cpupower idle-info")
self.add_cmd_output("cpupower frequency-info")
- if self.policy().get_arch().endswith("386"):
+ if '86' in self.policy().get_arch():
self.add_cmd_output("x86info -a")
|
Fix x<I> arch detection in processor plugin
|
py
|
diff --git a/lancet/issue_tracker.py b/lancet/issue_tracker.py
index <HASH>..<HASH> 100644
--- a/lancet/issue_tracker.py
+++ b/lancet/issue_tracker.py
@@ -68,11 +68,18 @@ class GitlabTracker(Tracker):
project = self.api.projects.get(project_id, lazy=True)
group = self.api.groups.get(self.group_id, lazy=True)
+ def fromisoformat(datestr):
+ # datetime.date.isoformat is only available on python 3.7+
+ year, month, day = datestr.split("-")
+ return datetime.date(
+ year=int(year), month=int(month), day=int(day)
+ )
+
def is_current(milestone):
return (
- datetime.date.fromisoformat(milestone.start_date)
+ fromisoformat(milestone.start_date)
<= datetime.date.today()
- <= datetime.date.fromisoformat(milestone.due_date)
+ <= fromisoformat(milestone.due_date)
)
if add_to_active_sprint:
|
Restore compatibility with python < <I>
|
py
|
diff --git a/lib/svtplay_dl/output.py b/lib/svtplay_dl/output.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/output.py
+++ b/lib/svtplay_dl/output.py
@@ -157,12 +157,12 @@ def output(options, extension="mp4", openfd=True, mode="wb", **kwargs):
findexpisode(os.path.dirname(os.path.realpath(options.output)), options.service, os.path.basename(options.output)):
if extension in subtitlefiles:
if not options.force_subtitle:
- if not (options.silent or options.silent_semi):
+ if not options.silent:
log.warn("File (%s) already exists. Use --force-subtitle to overwrite" % options.output)
return None
else:
if not options.force:
- if not (options.silent or options.silent_semi):
+ if not options.silent:
log.warn("File (%s) already exists. Use --force to overwrite" % options.output)
return None
if openfd:
|
Cleanup code for silent, 'File (%s) already exists.'
|
py
|
diff --git a/src/sos/Julia/kernel.py b/src/sos/Julia/kernel.py
index <HASH>..<HASH> 100644
--- a/src/sos/Julia/kernel.py
+++ b/src/sos/Julia/kernel.py
@@ -133,7 +133,7 @@ using Feather
using NamedArrays
using DataFrames
function __s_o_s__julia_py_repr_logical_1(obj)
- obj==true ? "true" : "false"
+ obj==true ? "True" : "False"
end
function __s_o_s__julia_py_repr_integer_1(obj)
return string(obj)
|
fix bug for converting boolean variable from julia to python
|
py
|
diff --git a/spyder/plugins/pylint/plugin.py b/spyder/plugins/pylint/plugin.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/pylint/plugin.py
+++ b/spyder/plugins/pylint/plugin.py
@@ -15,6 +15,7 @@ import os.path as osp
from qtpy.QtCore import Qt, Signal, Slot
# Local imports
+from spyder.api.exceptions import SpyderAPIError
from spyder.api.plugins import Plugins, SpyderDockablePlugin
from spyder.api.plugin_registration.decorators import (
on_plugin_available, on_plugin_teardown)
@@ -180,9 +181,13 @@ class Pylint(SpyderDockablePlugin):
"""
Set filename without code analysis.
"""
- editor = self.get_plugin(Plugins.Editor)
- if editor:
- self.get_widget().set_filename(editor.get_current_filename())
+ try:
+ editor = self.get_plugin(Plugins.Editor)
+ if editor:
+ self.get_widget().set_filename(editor.get_current_filename())
+ except SpyderAPIError:
+ # Editor was deleted
+ pass
# --- Public API
# ------------------------------------------------------------------------
|
Remove hard reference to editor in pylint
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,7 +23,8 @@ setup(
packages=find_packages(exclude=['test']),
install_requires=['click>=4.0', 'dulwich', 'logbook', 'tempdir>=0.6',
'virtualenv>=1.10.1', 'python-dateutil', 'versio',
- 'stuf', 'pluginbase', 'networkx', 'werkzeug', 'pkginfo'],
+ 'stuf', 'pluginbase', 'networkx', 'werkzeug', 'pkginfo',
+ 'shutilwhich'],
entry_points={
'console_scripts': [
'unleash = unleash.cli:main',
|
Added missing dependency: shutilwhich.
|
py
|
diff --git a/networking_cisco/plugins/cisco/db/device_manager/hosting_device_manager_db.py b/networking_cisco/plugins/cisco/db/device_manager/hosting_device_manager_db.py
index <HASH>..<HASH> 100644
--- a/networking_cisco/plugins/cisco/db/device_manager/hosting_device_manager_db.py
+++ b/networking_cisco/plugins/cisco/db/device_manager/hosting_device_manager_db.py
@@ -112,7 +112,9 @@ class HostingDeviceManagerMixin(hosting_devices_db.HostingDeviceDBMixin):
if cls._keystone_session:
return cls._keystone_session
else:
- auth_url = cfg.CONF.keystone_authtoken.auth_url + "/v3"
+ auth_url = cfg.CONF.keystone_authtoken.auth_url
+ if auth_url.rsplit('/', 1)[-1] != 'v3':
+ auth_url += '/v3'
# user = cfg.CONF.keystone_authtoken.admin_user
# pw = cfg.CONF.keystone_authtoken.admin_password
# project_name = cfg.CONF.keystone_authtoken.admin_tenant_name
|
ASR1K plugin: conditionally append v3 to authurl. This change checks whether the keystone_authurl already has "v3" appended prior to appending it with "/v3". Retains the assumption that the URL always needs to be v3. Change-Id: Id<I>c1c1c<I>b8f<I>c5e6fd<I>da3ac<I>da3ed2b
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.