diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/quantecon/models/solow/model.py b/quantecon/models/solow/model.py index <HASH>..<HASH> 100644 --- a/quantecon/models/solow/model.py +++ b/quantecon/models/solow/model.py @@ -75,7 +75,6 @@ References @date : 2014-08-18 TODO: -2. Write more tests! 5. Finish section on solving Solow model in demo notebook. 6. Write code for computing impulse response functions. 7. Write code for plotting impulse response functions.
Fixed some typos in the test_solow.py that were causing fails.
py
diff --git a/examples/ea/ea.py b/examples/ea/ea.py index <HASH>..<HASH> 100644 --- a/examples/ea/ea.py +++ b/examples/ea/ea.py @@ -41,12 +41,14 @@ class EA: return generation def crossover(self, parents) -> Chromosome: - child = Chromosome(np.zeros(parents[0].values.shape)) - - for i, v in enumerate(parents[1].values): - child.values[i] = v if np.random.rand() > 0.5 \ - else parents[0].values[i] - + # child = Chromosome(np.zeros(parents[0].values.shape)) + # + # for i, v in enumerate(parents[1].values): + # child.values[i] = v if np.random.rand() > 0.5 \ + # else parents[0].values[i] + + child = Chromosome(np.where(np.random.random(parents[0].values.shape) > 0.5, + parents[0].values, parents[1].values)) return child def mutate(self, child) -> Chromosome:
made crossover into a one-liner
py
diff --git a/tests/run.py b/tests/run.py index <HASH>..<HASH> 100755 --- a/tests/run.py +++ b/tests/run.py @@ -3,11 +3,6 @@ import sys sys.path.insert(0, '..') -import os import nose -mac_library_path = '/Users/dustin/build/libarchive/build/libarchive' -if os.path.exists(mac_library_path) is True: - os.environ['DYLD_LIBRARY_PATH'] = mac_library_path - nose.run()
Removed environment-specific test logic.
py
diff --git a/wpull/async.py b/wpull/async.py index <HASH>..<HASH> 100644 --- a/wpull/async.py +++ b/wpull/async.py @@ -84,6 +84,7 @@ def wait_future(future, seconds=None): future = yield async_result.get(io_loop.time() + seconds) result = future.result() except toro.Timeout as error: + future.add_done_callback(lambda future: future.result()) raise TimedOut() from error raise tornado.gen.Return(result)
async: Ensure future is consumed after timeout.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -114,7 +114,7 @@ if os.path.exists('CHANGES (links).txt'): else: # but if the release script has not run, fall back to the source file changes_file = open('CHANGES.txt') -long_description = readme_file.read() + changes_file.read() +long_description = readme_file.read() + '\n' + changes_file.read() readme_file.close() changes_file.close()
Make sure there's a blank line between files
py
diff --git a/salt/modules/artifactory.py b/salt/modules/artifactory.py index <HASH>..<HASH> 100644 --- a/salt/modules/artifactory.py +++ b/salt/modules/artifactory.py @@ -468,7 +468,7 @@ def __save_artifact(artifact_url, target_file, headers): try: request = urllib.request.Request(artifact_url, None, headers) f = urllib.request.urlopen(request) - with salt.utils.fopen(target_file, "wb") as local_file: + with salt.utils.files.fopen(target_file, "wb") as local_file: local_file.write(f.read()) result['status'] = True result['comment'] = __append_comment(('Artifact downloaded from URL: {0}'.format(artifact_url)), result['comment'])
Reverted salt.utils.files.fopen change - unsure how that change actually happened
py
diff --git a/tests/test_utils.py b/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,5 @@ try: from unittest2 import TestCase, SkipTest except ImportError: - from unittest import TestCase SkipTest + from unittest import TestCase, SkipTest
TRIVIAL Fixing import of SkipTest
py
diff --git a/napalm_junos/junos.py b/napalm_junos/junos.py index <HASH>..<HASH> 100644 --- a/napalm_junos/junos.py +++ b/napalm_junos/junos.py @@ -1023,6 +1023,9 @@ class JunOSDriver(NetworkDriver): if isinstance(test_param_value, float): test_results[test_param_name] = test_param_value * 1e-3 # convert from useconds to mseconds test_name = test_results.pop('test_name', '') + source = test_results.pop('source', u'') + if source is None: + test_results['source'] = u'' if probe_name not in probes_results.keys(): probes_results[probe_name] = dict() probes_results[probe_name][test_name] = test_results
False is not True. never
py
diff --git a/py/selenium/webdriver/support/relative_locator.py b/py/selenium/webdriver/support/relative_locator.py index <HASH>..<HASH> 100644 --- a/py/selenium/webdriver/support/relative_locator.py +++ b/py/selenium/webdriver/support/relative_locator.py @@ -27,9 +27,9 @@ def with_tag_name(tag_name): class RelativeBy(object): - def __init__(self, root=None, filters=[]): + def __init__(self, root=None, filters=None): self.root = root - self.filters = filters + self.filters = filters or [] def above(self, element_or_locator=None): if element_or_locator is None:
[py] Fixing relative locator constructor
py
diff --git a/aversion.py b/aversion.py index <HASH>..<HASH> 100644 --- a/aversion.py +++ b/aversion.py @@ -271,8 +271,7 @@ class Result(object): populated. """ - return (self.version is not None and self.ctype is not None and - self.orig_ctype is not None) + return self.version is not None and self.ctype is not None def set_version(self, version): """
Once the ctype is set, the orig_ctype cannot be changed.
py
diff --git a/libaio/__init__.py b/libaio/__init__.py index <HASH>..<HASH> 100644 --- a/libaio/__init__.py +++ b/libaio/__init__.py @@ -37,7 +37,7 @@ class EventFD(object): flags (int) Bit mask of EFD_* constants. """ - self._file = os.fdopen(eventfd.eventfd(initval, flags), 'r+b') + self._file = os.fdopen(eventfd(initval, flags), 'r+b') def close(self): """
Fix EventFD instanciation.
py
diff --git a/cablemap.tm/cablemap/tm/handler.py b/cablemap.tm/cablemap/tm/handler.py index <HASH>..<HASH> 100644 --- a/cablemap.tm/cablemap/tm/handler.py +++ b/cablemap.tm/cablemap/tm/handler.py @@ -175,6 +175,7 @@ class MIOCableHandler(object): route, name, precedence, mcn = recipient.route, recipient.name, recipient.precedence, recipient.mcn if not name: return + h.role(psis.CABLE_TYPE, self._cable) h.role(psis.RECIPIENT_TYPE, psis.station_psi(name, route)) if route: h.role(psis.ROUTE_TYPE, psis.route_psi(route))
Forgot the cable itself in the assoc
py
diff --git a/test/test_onnx.py b/test/test_onnx.py index <HASH>..<HASH> 100644 --- a/test/test_onnx.py +++ b/test/test_onnx.py @@ -299,6 +299,10 @@ class ONNXExporterTester(unittest.TestCase): # This test also compares both paste_masks_in_image and _onnx_paste_masks_in_image # (since jit_trace witll call _onnx_paste_masks_in_image). def test_paste_mask_in_image(self): + # disable profiling + torch._C._jit_set_profiling_executor(False) + torch._C._jit_set_profiling_mode(False) + masks = torch.rand(10, 1, 26, 26) boxes = torch.rand(10, 4) boxes[:, 2:] += torch.rand(10, 2)
Disable Profiling in Failing Test (#<I>) * disable test * disable profiling * Update test_onnx.py
py
diff --git a/estnltk/wordnet/wordnet.py b/estnltk/wordnet/wordnet.py index <HASH>..<HASH> 100644 --- a/estnltk/wordnet/wordnet.py +++ b/estnltk/wordnet/wordnet.py @@ -2,7 +2,7 @@ import sqlite3 import os.path import math import networkx as nx -from typing import Union +from typing import Union, List from estnltk.wordnet.synset import Synset MAX_TAXONOMY_DEPTHS = {'a': 2, 'n': 13, 'r': 0, 'v': 10} @@ -360,6 +360,19 @@ class Wordnet: return (2.0 * lcs_depth) / (self_depth + other_depth) + @property + def all_relation_types(self) -> List[str]: + """ + Finds and returns all relation types used in this Wordnet. + + Returns + ------- + A list of strings: relation types used in this Wordnet. + """ + self.cur.execute("SELECT DISTINCT relation FROM wordnet_relation") + wn_all_relation_types = self.cur.fetchall() + return [r[0] for r in wn_all_relation_types] + def __str__(self): return "Wordnet version {}".format(self.version)
Updated Wordnet: added query for all relation types
py
diff --git a/.travis/complementary/api_file_generation.py b/.travis/complementary/api_file_generation.py index <HASH>..<HASH> 100644 --- a/.travis/complementary/api_file_generation.py +++ b/.travis/complementary/api_file_generation.py @@ -15,7 +15,6 @@ from PyFunceble import DomainAvailabilityChecker from PyFunceble.cli.filesystem.dir_structure.restore import ( DirectoryStructureRestoration, ) -from PyFunceble.cli.threads.file_producer import FileProducerThread from PyFunceble.cli.processes.producer import ProducerProcessesManager # We initiate the coloration. @@ -52,7 +51,9 @@ dir_structure_restoration = DirectoryStructureRestoration( ).restore_from_backup() # We start the producer thread. -producer_process_manager = ProducerProcessesManager() +producer_process_manager = ProducerProcessesManager( + max_worker=1, daemon=True, generate_output_queue=False +) producer_process_manager.start() # We start and configure our availability checker.
Freeze number of workers to generate and block output queue generation.
py
diff --git a/engine/handler.py b/engine/handler.py index <HASH>..<HASH> 100644 --- a/engine/handler.py +++ b/engine/handler.py @@ -60,6 +60,19 @@ class Handler(tornado.web.RequestHandler, dpEngine): self.finish_with_error(404, 'Page Not Found') return False + temp_paths = {} + paths = [] + + for e in path.split('/'): + if e.find('.') != -1: + uniqid = self.helper.random.uuid() + paths.append(uniqid) + temp_paths[uniqid] = e + else: + paths.append(e) + + path = '/'.join(paths) + module_path = '%s.%s' % (self.prefix, path.replace('/', '.')) module_paths = str.split(self.helper.string.to_str(module_path), '.') parameters = [] @@ -118,7 +131,7 @@ class Handler(tornado.web.RequestHandler, dpEngine): parameters.reverse() try: - method(*parameters) + method(*[temp_paths[x] if x in temp_paths else x for x in parameters]) return handler except tornado.web.HTTPError as e:
fixed issue with dot(.) url handling.
py
diff --git a/perf/_utils.py b/perf/_utils.py index <HASH>..<HASH> 100644 --- a/perf/_utils.py +++ b/perf/_utils.py @@ -355,7 +355,10 @@ def python_implementation(): def python_has_jit(): - return (python_implementation() == 'pypy') + if python_implementation() == 'pypy': + return sys.pypy_translation_info["translation.jit"] + + return False def popen_communicate(proc):
Support PyPy compiled without JIT python_has_jit() now supports PyPy compiled without JIT: check sys.pypy_translation_info["translation.jit"].
py
diff --git a/src/foundations/core.py b/src/foundations/core.py index <HASH>..<HASH> 100644 --- a/src/foundations/core.py +++ b/src/foundations/core.py @@ -57,6 +57,7 @@ __all__ = ["THREADS_IDENTIFIERS", "getModule", "getObjectName", "executionTrace", + "memoize", "NestedAttribute", "Structure", "OrderedStructure", @@ -300,6 +301,42 @@ def executionTrace(object): return function +def memoize(cache=None): + """ + | This decorator is used for method / definition memoization. + | Any method / definition decorated will get its return value cached and restored whenever called + with the same arguments. + + :param cache: Alternate cache. ( Dictionary ) + :return: Object. ( Object ) + """ + + if cache is None: + cache = {} + + def wrapper(object): + """ + This decorator is used for object memoization. + + :param object: Object to decorate. ( Object ) + :return: Object. ( Object ) + """ + + @functools.wraps(object) + def function(*args): + """ + This decorator is used for object memoization. + + :param \*args: Arguments. ( \* ) + :return: Object. ( Object ) + """ + + if args not in cache: + cache[args] = object(*args) + return cache[args] + return function + return wrapper + class NestedAttribute(object): """ This class is an helper object providing methods to manipulate nested attributes.
Add "foundations.core.memoize" decorator function.
py
diff --git a/txaws/client/base.py b/txaws/client/base.py index <HASH>..<HASH> 100644 --- a/txaws/client/base.py +++ b/txaws/client/base.py @@ -37,7 +37,7 @@ def error_wrapper(error, errorClass): error.raiseException() try: fallback_error = errorClass( - xml_payload, error.value.status, error.value.message, + xml_payload, error.value.status, str(error.value), error.value.response) except (ParseError, AWSResponseParseError): error_message = http.RESPONSES.get(http_status)
- Minor change to quell warnings on Python <I>.
py
diff --git a/pelix/framework.py b/pelix/framework.py index <HASH>..<HASH> 100644 --- a/pelix/framework.py +++ b/pelix/framework.py @@ -43,7 +43,6 @@ import logging import os import sys import threading -from manifest import Bundle ACTIVATOR = "activator"
Removed an import added by PyDev from manifest import Bundle -> nothing related to Pelix
py
diff --git a/lib/python/voltcli/voltadmin.d/stop.py b/lib/python/voltcli/voltadmin.d/stop.py index <HASH>..<HASH> 100644 --- a/lib/python/voltcli/voltadmin.d/stop.py +++ b/lib/python/voltcli/voltadmin.d/stop.py @@ -94,7 +94,7 @@ def stop(runner): if not proex.isTimeout: raise runner.info(proex.message) - runner.info('This may be caused by a disabled export target; the node shutdown process will proceed.') + runner.info('This may be caused by an export target either disabled or removed from the configuration. No action is required; the stop node process will proceed.') except StatisticsProcedureException as proex: runner.info(stateMessage) runner.error(proex.message)
stop node messages to inform on disabled export targets
py
diff --git a/openpnm/algorithms/GenericTransport.py b/openpnm/algorithms/GenericTransport.py index <HASH>..<HASH> 100644 --- a/openpnm/algorithms/GenericTransport.py +++ b/openpnm/algorithms/GenericTransport.py @@ -21,7 +21,7 @@ def_set = {'phase': None, 'solver_preconditioner': 'jacobi', 'solver_atol': 1e-6, 'solver_rtol': 1e-6, - 'solver_maxiter': 1000, + 'solver_maxiter': 5000, 'gui': {'setup': {'quantity': '', 'conductance': ''}, 'set_rate_BC': {'pores': None,
Increased maxiter to <I> from <I>, necessary for large systems.
py
diff --git a/omego/artifacts.py b/omego/artifacts.py index <HASH>..<HASH> 100644 --- a/omego/artifacts.py +++ b/omego/artifacts.py @@ -52,6 +52,7 @@ class Artifacts(object): unzipped = filename.replace(".zip", "") if os.path.exists(unzipped): + self.create_symlink(unzipped) return unzipped log.info("Checking %s", componenturl)
Create new symlink if already downloaded
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -52,5 +52,5 @@ setup( 'Programming Language :: Python' ], ext_modules=ext_modules, - python_requires='=2.7, =3.4, =3.5, =3.6' + python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' )
Update python_requires in setup.py
py
diff --git a/salt/modules/cron.py b/salt/modules/cron.py index <HASH>..<HASH> 100644 --- a/salt/modules/cron.py +++ b/salt/modules/cron.py @@ -136,8 +136,10 @@ def raw_cron(user): cmd = 'crontab -l {0}'.format(user) else: cmd = 'crontab -l -u {0}'.format(user) - return __salt__['cmd.run_stdout'](cmd, rstrip=False) - + lines = __salt__['cmd.run_stdout'](cmd, rstrip=False).splitlines() + if len(lines) != 0 and lines[0].startswith('# DO NOT EDIT THIS FILE - edit the master and reinstall.'): + del(lines[0:3]) + return '\n'.join(lines) def list_tab(user): '''
Remove first three lines if starting with "DO NOT EDIT THIS FILE" Patch for #<I>
py
diff --git a/django_mfa/views.py b/django_mfa/views.py index <HASH>..<HASH> 100644 --- a/django_mfa/views.py +++ b/django_mfa/views.py @@ -57,7 +57,7 @@ def enable_mfa(request): secret_key=request.POST['secret_key']) messages.success(request, "You have successfully enabled multi-factor authentication on your account.") response = redirect(settings.LOGIN_REDIRECT_URL) - return update_rmb_cookie(request, response) + return response else: totp_obj = totp.TOTP(base_32_secret) qr_code = totp_obj.provisioning_uri(request.user.email) @@ -77,8 +77,8 @@ def update_rmb_cookie(request, response): if remember_my_browser: # better not to reveal the username. Revealing the number seems harmless cookie_name = MFA_COOKIE_PREFIX + str(request.user.pk) - response.set_signed_cookie(cookie_name, True, salt=cookie_salt, max_age=remember_days*24*3600, secure=True, - httponly=True) + response.set_signed_cookie(cookie_name, True, salt=cookie_salt, max_age=remember_days*24*3600, + secure=(not settings.DEBUG), httponly=True) return response
Don't set the remember-my-browser cookie when MFA is enabled. Make the user enter the code on next login Set cookie secure flag based on DEBUG
py
diff --git a/tests/support/gitfs.py b/tests/support/gitfs.py index <HASH>..<HASH> 100644 --- a/tests/support/gitfs.py +++ b/tests/support/gitfs.py @@ -654,8 +654,9 @@ class GitPillarHTTPTestBase(GitPillarTestBase, WebserverMixin): if proc is not None: try: proc.send_signal(signal.SIGTERM) + time.sleep(1) if proc.is_running(): - proc.send_signal(signal.SIGQUIT) + proc.send_signal(signal.SIGKILL) except psutil.NoSuchProcess: pass shutil.rmtree(cls.root_dir, ignore_errors=True)
Use Sigkill and add time.sleep befor check
py
diff --git a/spyder_notebook/utils/nbopen.py b/spyder_notebook/utils/nbopen.py index <HASH>..<HASH> 100644 --- a/spyder_notebook/utils/nbopen.py +++ b/spyder_notebook/utils/nbopen.py @@ -12,6 +12,7 @@ import atexit import os import os.path as osp import subprocess +import sys import time from notebook import notebookapp @@ -64,7 +65,7 @@ def nbopen(filename): nbdir = osp.dirname(filename) print("Starting new server") - command = ['jupyter', 'notebook', '--no-browser', + command = [sys.executable, '-m', 'notebook', '--no-browser', '--notebook-dir={}'.format(nbdir), '--NotebookApp.password=', "--KernelSpecManager.kernel_spec_class='{}'".format(
Do not call the jupyter executable when opening notebooks This should make it more robust in case the jupyter executable is not in the path.
py
diff --git a/pulsar/client/transport/requests.py b/pulsar/client/transport/requests.py index <HASH>..<HASH> 100644 --- a/pulsar/client/transport/requests.py +++ b/pulsar/client/transport/requests.py @@ -1,7 +1,7 @@ from __future__ import absolute_import try: from galaxy import eggs - eggs.require("requets") + eggs.require("requests") except ImportError: pass
Fix typo Nate caught in Galaxy.
py
diff --git a/src/transformers/models/auto/feature_extraction_auto.py b/src/transformers/models/auto/feature_extraction_auto.py index <HASH>..<HASH> 100644 --- a/src/transformers/models/auto/feature_extraction_auto.py +++ b/src/transformers/models/auto/feature_extraction_auto.py @@ -69,7 +69,7 @@ FEATURE_EXTRACTOR_MAPPING_NAMES = OrderedDict( ("swin", "ViTFeatureExtractor"), ("swinv2", "ViTFeatureExtractor"), ("van", "ConvNextFeatureExtractor"), - ("videomae", "ViTFeatureExtractor"), + ("videomae", "VideoMAEFeatureExtractor"), ("vilt", "ViltFeatureExtractor"), ("vit", "ViTFeatureExtractor"), ("vit_mae", "ViTFeatureExtractor"),
fix a possible typo in auto feature extraction (#<I>)
py
diff --git a/hug/route.py b/hug/route.py index <HASH>..<HASH> 100644 --- a/hug/route.py +++ b/hug/route.py @@ -39,11 +39,8 @@ from hug.routing import URLRouter as http class Object(http): """Defines a router for classes and objects""" - def __init__(self, **route): - if 'requires' in route: - requires = route['requires'] - route['requires'] = (requires, ) if not isinstance(requires, (tuple, list)) else requires - self.route = route + def __init__(self, urls=None, accept=HTTP_METHODS, output=None, **kwargs): + super().__init__(urls=urls, accept=accept, output=output, **kwargs) def __call__(self, method_or_class): if isinstance(method_or_class, (MethodType, FunctionType)):
Fix root object class based route to have same signature
py
diff --git a/lib/git/utils.py b/lib/git/utils.py index <HASH>..<HASH> 100644 --- a/lib/git/utils.py +++ b/lib/git/utils.py @@ -166,7 +166,12 @@ class LockFile(object): """ if not self._has_lock(): return - os.remove(self._lock_file_path()) + + # if someone removed our file beforhand, lets just flag this issue + # instead of failing, to make it more usable. + lfp = self._lock_file_path() + if os.path.isfile(lfp): + os.remove(lfp) self._owns_lock = False
LockFile: release_lock now checks whether the lockfile to be removed still exists. Previously it would just fail
py
diff --git a/git_aggregator/main.py b/git_aggregator/main.py index <HASH>..<HASH> 100644 --- a/git_aggregator/main.py +++ b/git_aggregator/main.py @@ -129,10 +129,12 @@ def get_parser(): nargs='?', default='aggregate', help='aggregate (default): run the aggregation process.\n' - 'show-closed-prs: show pull requests that are not open anymore\n' - ' such pull requests are indentified as having\n' - ' a github.com remote and a\n' - ' refs/pull/NNN/head ref in the merge section.') + 'show-all-prs: show GitHub pull requests in merge sections\n' + ' such pull requests are indentified as having\n' + ' a github.com remote and a\n' + ' refs/pull/NNN/head ref in the merge section.\n' + 'show-closed-prs: show pull requests that are not open anymore.\n' + ) return main_parser @@ -152,7 +154,8 @@ def main(): try: if args.config and \ - args.command in ('aggregate', 'show-closed-prs'): + args.command in \ + ('aggregate', 'show-closed-prs', 'show-all-prs'): run(args) else: parser.print_help()
Finish show-all-prs command
py
diff --git a/twitter_ads/enum.py b/twitter_ads/enum.py index <HASH>..<HASH> 100644 --- a/twitter_ads/enum.py +++ b/twitter_ads/enum.py @@ -147,6 +147,10 @@ CREATIVE_TYPE = enum( BANNER='BANNER', INTERSTITIAL='INTERSTITIAL', PREROLL='PREROLL', - VAST_PREROLL='VAST_PREROLL' - + VAST_PREROLL='VAST_PREROLL', + MEDIUM_RECTANGLE='MEDIUM_RECTANGLE', + BANNER_TABLET='BANNER_TABLET', + INTERSTITIAL_LANDSCAPE='INTERSTITIAL_LANDSCAPE', + INTERSTITIAL_TABLET='INTERSTITIAL_TABLET', + INTERSTITIAL_LANDSCAPE_TABLET='INTERSTITIAL_LANDSCAPE_TABLET' )
additional creative types (#<I>)
py
diff --git a/src/python/dxpy/utils/describe.py b/src/python/dxpy/utils/describe.py index <HASH>..<HASH> 100644 --- a/src/python/dxpy/utils/describe.py +++ b/src/python/dxpy/utils/describe.py @@ -24,6 +24,7 @@ containers, dataobjects, apps, and jobs). import datetime, time, json, math, sys from collections import defaultdict +from copy import copy from dxpy.utils.printing import * @@ -229,13 +230,10 @@ def render_bundleddepends(thing): def render_execdepends(thing): rendered = [] for item in thing: - if len(item) == 1: - rendered.append(item['name']) - elif 'package_manager' in item: - if item['package_manager'] == 'apt': - rendered.append(item['name']) - else: - rendered.append(item['package_manager'] + ":" + item['name']) + dep = copy(item) + dep.setdefault('package_manager', 'apt') + dep['version'] = ' = '+dep['version'] if 'version' in dep else '' + rendered.append("{package_manager}: {name}{version}".format(**dep)) return rendered def print_field(label, value):
[PTFM-<I>] describe execDepends better
py
diff --git a/pgpy/pgp.py b/pgpy/pgp.py index <HASH>..<HASH> 100644 --- a/pgpy/pgp.py +++ b/pgpy/pgp.py @@ -262,6 +262,15 @@ class PGPSignature(Armorable, ParentRef, PGPObject): return self._signature.signer @property + def signer_fingerprint(self): + """ + The fingerprint of the key that generated this signature, if it contained. Otherwise, an empty ``str``. + """ + if 'IssuerFingerprint' in self._signature.subpackets: + return next(iter(self._signature.subpackets['IssuerFingerprint'])).issuer_fingerprint + return '' + + @property def target_signature(self): return NotImplemented
Add .signer_fingerprint property to PGPSignature. - This returns the issuer fingerprint if the IssuerFingerprint subpacket is present, otherwise empty string.
py
diff --git a/fix_yahoo_finance/__init__.py b/fix_yahoo_finance/__init__.py index <HASH>..<HASH> 100644 --- a/fix_yahoo_finance/__init__.py +++ b/fix_yahoo_finance/__init__.py @@ -221,6 +221,7 @@ def download(tickers, start=None, end=None, as_panel=True, print("\nThe following tickers failed to download:\n", ', '.join(_FAILED_)) + _DFS_ = {} return data
Solved issue #<I> Separating data frames on multiple requests by the user.
py
diff --git a/boundary/alarm_update.py b/boundary/alarm_update.py index <HASH>..<HASH> 100644 --- a/boundary/alarm_update.py +++ b/boundary/alarm_update.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +import json + from boundary import AlarmModify @@ -46,6 +49,9 @@ class AlarmUpdate(AlarmModify): def get_api_parameters(self): AlarmModify.get_api_parameters(self) self.method = "PUT" + if self._alarm_id is not None: + self._payload['id'] = float(self._alarm_id) + self.data = json.dumps(self._payload, sort_keys=True) self.path = "v1/alarm/{0}".format(self._alarm_id) def get_description(self):
Add required id to data payload when updating an alarm definition
py
diff --git a/spyderlib/utils/introspection/manager.py b/spyderlib/utils/introspection/manager.py index <HASH>..<HASH> 100644 --- a/spyderlib/utils/introspection/manager.py +++ b/spyderlib/utils/introspection/manager.py @@ -48,9 +48,14 @@ class PluginManager(QObject): self.info = None self.request = None self.pending = None + self.pending_request = None + self.waiting = False def send_request(self, info): """Handle an incoming request from the user.""" + if self.waiting: + self.pending_request = info + return debug_print('%s request' % info.name) desired = None self.info = info @@ -119,6 +124,10 @@ class PluginManager(QObject): % (self.info.name, response['plugin_name'], str(response['result'])[:100], delta)) self.introspection_complete.emit(response) + if self.pending_request: + info = self.pending_request + self.pending_request = None + self.send_request(info) def _handle_timeout(self): self.waiting = False
Buffer incoming messages until the previous one finishes
py
diff --git a/tests/test_pulla.py b/tests/test_pulla.py index <HASH>..<HASH> 100644 --- a/tests/test_pulla.py +++ b/tests/test_pulla.py @@ -108,6 +108,16 @@ class test_do_pull_in(unittest.TestCase): mock_get_formatted_status_message.assert_called_once_with(self.directory, 'Fail') +class test_get_git_version(unittest.TestCase): + def setUp(self): + self.GIT_VERSION_RESPONSE = 'git version 2.2.2' + self.puller = Pulla() + + def test_correct_git_version_returned(self): + git_version = self.puller.get_git_version() + + self.assertEqual(git_version, '2.2.2') + if __name__ == '__main__': unittest.main()
Ensure correct splitting is done with version number
py
diff --git a/phono3py/phonon3/imag_self_energy.py b/phono3py/phonon3/imag_self_energy.py index <HASH>..<HASH> 100644 --- a/phono3py/phonon3/imag_self_energy.py +++ b/phono3py/phonon3/imag_self_energy.py @@ -485,6 +485,7 @@ class ImagSelfEnergy(object): def delete_integration_weights(self): self._g = None self._g_zero = None + self._pp_strength = None def _run_with_band_indices(self): if self._g is not None:
Set None to pp_strength to free memory (hopefully).
py
diff --git a/pytds/dbapi.py b/pytds/dbapi.py index <HASH>..<HASH> 100644 --- a/pytds/dbapi.py +++ b/pytds/dbapi.py @@ -15,7 +15,7 @@ import errno import uuid from .tds import ( Error, LoginError, DatabaseError, - InterfaceError, TimeoutError, + InterfaceError, TimeoutError, OperationalError, TDS_PENDING, TDS74, TDS_ENCRYPTION_OFF, TDS_ODBC_ON, SimpleLoadBalancer, IS_TDS7_PLUS, @@ -315,8 +315,14 @@ class _Connection(object): except socket.error as e: if e.errno in (errno.ENETRESET, errno.ECONNRESET): return + raise except ClosedConnectionError: pass + except OperationalError as e: + # ignore ROLLBACK TRANSACTION without BEGIN TRANSACTION + if e.number == 3903: + return + raise def __del__(self): if self._conn is not None:
ignore error 'The ROLLBACK TRANSACTION request has no corresponding BEGIN TRANSACTION.' in rollback method
py
diff --git a/cdflib/epochs.py b/cdflib/epochs.py index <HASH>..<HASH> 100644 --- a/cdflib/epochs.py +++ b/cdflib/epochs.py @@ -413,11 +413,12 @@ class CDFepoch: count = len(new_tt2000) toutcs = np.zeros((count, 9)).astype(int) nansecs = np.zeros((count)).astype(int) + datxs = [CDFepoch._LeapSecondsfromJ2000(x) for x in new_tt2000] for x in range(count): nanoSecSinceJ2000 = new_tt2000[x] t3 = nanoSecSinceJ2000 - datx = CDFepoch._LeapSecondsfromJ2000(nanoSecSinceJ2000) + datx = datxs[x] if (nanoSecSinceJ2000 > 0): secSinceJ2000 = int(nanoSecSinceJ2000/CDFepoch.SECinNanoSecsD) nansec = int(nanoSecSinceJ2000 - secSinceJ2000 *
Pre-caclulate datxs
py
diff --git a/km3pipe/core.py b/km3pipe/core.py index <HASH>..<HASH> 100644 --- a/km3pipe/core.py +++ b/km3pipe/core.py @@ -84,6 +84,7 @@ class Pipeline(object): def drain(self, cycles=None): """Execute _drain while trapping KeyboardInterrupt""" + log.info("Now draining...") signal.signal(signal.SIGINT, self._handle_ctrl_c) try: self._drain(cycles)
Adds log.info to drain()
py
diff --git a/knxip/tests/test_core.py b/knxip/tests/test_core.py index <HASH>..<HASH> 100644 --- a/knxip/tests/test_core.py +++ b/knxip/tests/test_core.py @@ -9,8 +9,8 @@ class KNXIPCoreTestCase(unittest.TestCase): """Does the group address parser work correctly?""" self.assertEquals(parse_group_address("1"), 1) self.assertEquals(parse_group_address("1678"), 1678) - self.assertEquals(parse_group_address("1/1"), 257) - self.assertEquals(parse_group_address("2/2"), 514) + self.assertEquals(parse_group_address("1/1"), 2049) + self.assertEquals(parse_group_address("2/2"), 4098) self.assertEquals(parse_group_address("0/0/1"), 1) self.assertEquals(parse_group_address("1/1/1"), 2305) self.assertEquals(parse_group_address("4/8/45"), 10285)
Bugfix in test: 2-layer group addresses were handled incorrectly
py
diff --git a/pyspot/pyspot.py b/pyspot/pyspot.py index <HASH>..<HASH> 100644 --- a/pyspot/pyspot.py +++ b/pyspot/pyspot.py @@ -133,9 +133,11 @@ class HubSpotLeadsClient(HubSpotClient): def get_lead(self, lead_guid): return self._make_request('list/', {'guids[0]': lead_guid}) - def search_leads(self, term, params): - params['search'] = term + def list_leads(self, params): return self._make_request('list/', params) + + def search_leads(self, term, params): + return self.list_leads(params.update({'search': term}) or params) def update_lead(self, lead_guid, data): return self._make_request(
break out more general listing tool for leads
py
diff --git a/buildbot/scripts/runner.py b/buildbot/scripts/runner.py index <HASH>..<HASH> 100644 --- a/buildbot/scripts/runner.py +++ b/buildbot/scripts/runner.py @@ -765,7 +765,6 @@ class DebugClientOptions(OptionsWithOptionsFile): ["master", "m", None, "Location of the buildmaster's slaveport (host:port)"], ["passwd", "p", None, "Debug password to use"], - ["myoption", "O", "DEF", "My Option!"], ] buildbotOptions = [ [ 'debugMaster', 'passwd' ], @@ -780,10 +779,6 @@ class DebugClientOptions(OptionsWithOptionsFile): if len(args) > 2: raise usage.UsageError("I wasn't expecting so many arguments") - def postOptions(self): - print self['myoption'] - sys.exit(1) - def debugclient(config): from buildbot.clients import debug
(fixes #<I>) fix what appears to be some debug code that was inadvertently left in
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -14,6 +14,12 @@ setup( author_email="admin@analystcollective.org", url="https://github.com/fishtown-analytics/dbt", packages=find_packages(), + package_data={ + 'dbt': [ + 'include/global_project/dbt_project.yml', + 'include/global_project/macros/**/*.sql', + ] + }, test_suite='test', entry_points={ 'console_scripts': [
include global_project via setup.py
py
diff --git a/sphinx-jsonschema/__init__.py b/sphinx-jsonschema/__init__.py index <HASH>..<HASH> 100644 --- a/sphinx-jsonschema/__init__.py +++ b/sphinx-jsonschema/__init__.py @@ -47,7 +47,8 @@ class JsonSchema(Directive): 'lift_definitions': flag, 'auto_reference': flag, 'auto_target': flag, - 'timeout': float} + 'timeout': float, + 'encoding': directives.encoding} def run(self): try: @@ -159,7 +160,7 @@ class JsonSchema(Directive): source = filename try: - with open(source) as file: + with open(source, encoding=self.options.get('encoding')) as file: data = file.read() except IOError as error: raise self.error(u'"%s" directive encountered an IOError while loading file: %s\n%s'
New option "encoding" for parsing schemas from file.
py
diff --git a/eliot/tests/test_output.py b/eliot/tests/test_output.py index <HASH>..<HASH> 100644 --- a/eliot/tests/test_output.py +++ b/eliot/tests/test_output.py @@ -241,7 +241,7 @@ class MemoryLoggerTests(TestCase): L{MemoryLogger.write} can be called from multiple threads concurrently. """ thread_count = 10 - write_count = 1000 + write_count = 10000 logger = MemoryLogger()
Bump up the message count for extra confidence. It's still really, really fast.
py
diff --git a/python/setup.py b/python/setup.py index <HASH>..<HASH> 100644 --- a/python/setup.py +++ b/python/setup.py @@ -135,7 +135,7 @@ write_version_file(version_info) author_info = get_author_info() setuptools.setup( - name='opencc-py', + name='OpenCC', version=version_info, author=author_info[0], author_email=author_info[1],
Rename python binding to 'OpenCC'.
py
diff --git a/spinoff/component/transport/inmem.py b/spinoff/component/transport/inmem.py index <HASH>..<HASH> 100644 --- a/spinoff/component/transport/inmem.py +++ b/spinoff/component/transport/inmem.py @@ -69,7 +69,7 @@ class InMemoryRouting(object): dealer.put(outbox=inbox, message=message) break else: - raise RoutingException("No dealer ID matches the specified routing key") + raise RoutingException("No dealer ID matches the specified routing key (%s)" % routing_key) def assign_server(self, server, inbox, outbox): if self._server:
Clarified the RoutingException when transport.inmem gets a bad routing_key
py
diff --git a/src/rez/resolver.py b/src/rez/resolver.py index <HASH>..<HASH> 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -223,8 +223,17 @@ class Resolver(object): new_state = variant_states.get(variant) if new_state is None: - repo = variant.resource._repository - new_state = repo.get_variant_state_handle(variant.resource) + try: + repo = variant.resource._repository + new_state = repo.get_variant_state_handle(variant.resource) + except (IOError, OSError) as e: + # if, ie a package file was deleted on disk, then + # an IOError or OSError will be raised when we try to + # read from it - assume that the packages have changed! + self._print("Error loading %r (assuming cached state " + "changed): %s", variant.qualified_name, + e) + return True variant_states[variant] = new_state if old_state != new_state:
invalidate memcache if a package file has been deleted / moved / etc
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,6 +31,7 @@ MOCK_MODULES = ['numpy', 'parmed.periodic_table', 'scipy', 'scipy.spatial', + 'scipy.constants', 'numpy.linalg'] for mod_name in MOCK_MODULES: sys.modules[mod_name] = mock.Mock()
Add scipy.constants to conf.py
py
diff --git a/pyrogram/client/ext/utils.py b/pyrogram/client/ext/utils.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/ext/utils.py +++ b/pyrogram/client/ext/utils.py @@ -215,7 +215,7 @@ def parse_channel_chat(channel: types.Channel) -> pyrogram_types.Chat: title=channel.title, username=getattr(channel, "username", None), photo=parse_chat_photo(getattr(channel, "photo", None)), - restriction_reason=channel.restriction_reason + restriction_reason=getattr(channel, "restriction_reason") )
The restriction_reason field is not always available
py
diff --git a/tests/tests.py b/tests/tests.py index <HASH>..<HASH> 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -149,6 +149,30 @@ class MarkupFieldFormTests(TestCase): AdminMarkItUpWidget) +class MarkupFieldFormSaveTests(TestCase): + + def setUp(self): + self.data = {'title': 'example post', 'body': '**markdown**'} + self.form_class = modelform_factory(Post, fields=['title', 'body']) + + def testFormCreate(self): + form = self.form_class(self.data) + form.save() + + actual = Post.objects.get(title=self.data['title']) + self.assertEquals(actual.body.raw, self.data['body']) + + def testFormUpdate(self): + existing = Post.objects.create(title=self.data['title'], body=self.data['body']) + + update = {'title': 'New title', 'body': '**different markdown**'} + form = self.form_class(update, instance=existing) + form.save() + + actual = Post.objects.get(title=update['title']) + self.assertEquals(actual.body.raw, update['body']) + + class HiddenFieldFormTests(TestCase): def setUp(self): self.post = CallableDefault(body='[link](http://example.com) & "text"')
Added tests to reproduce Django <I> issues
py
diff --git a/host/daq/readout_utils.py b/host/daq/readout_utils.py index <HASH>..<HASH> 100644 --- a/host/daq/readout_utils.py +++ b/host/daq/readout_utils.py @@ -34,6 +34,9 @@ def interpret_pixel_data(data, dc, pixel_array, invert=True): address_split = np.array_split(address, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) value_split = np.array_split(value, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) + if len(address_split) > 5: + raise NotImplementedError('Only the data from one double column can be interpreted at once!') + mask = np.empty_like(pixel_array.data) # BUG in numpy: pixel_array is de-masked if not .data is used mask[:] = len(address_split)
ENH: sanity check added
py
diff --git a/perfscale_controller_stress.py b/perfscale_controller_stress.py index <HASH>..<HASH> 100755 --- a/perfscale_controller_stress.py +++ b/perfscale_controller_stress.py @@ -51,7 +51,7 @@ def deploy_swarm_to_new_model(client, model_name): before_add = datetime.utcnow() new_client = client.add_model(model_name) - new_client.deploy(get_charm_url(), series='trusty') + new_client.deploy(get_charm_url()) new_client.wait_for_started() new_client.wait_for_workloads()
Don't use series for bundle.
py
diff --git a/wily/decorators.py b/wily/decorators.py index <HASH>..<HASH> 100644 --- a/wily/decorators.py +++ b/wily/decorators.py @@ -9,6 +9,12 @@ from wily import __version__ def add_version(f): + """ + Add the version of wily to the help heading. + + :param f: function to decorate + :return: decorated function + """ doc = f.__doc__ f.__doc__ = "Version: " + __version__ + "\n\n" + doc return f
fix: added docstring to public function
py
diff --git a/test/test_vim.py b/test/test_vim.py index <HASH>..<HASH> 100644 --- a/test/test_vim.py +++ b/test/test_vim.py @@ -90,9 +90,9 @@ def test_vars(vim): def test_options(vim): - assert vim.options['listchars'] == 'tab:> ,trail:-,nbsp:+' - vim.options['listchars'] = 'tab:xy' - assert vim.options['listchars'] == 'tab:xy' + assert vim.windows[0].options['listchars'] == 'tab:> ,trail:-,nbsp:+' + vim.windows[0].options['listchars'] = 'tab:xy' + assert vim.windows[0].options['listchars'] == 'tab:xy' def test_buffers(vim):
test: fix test_options IIRC this became more strict at some point: since 'listchars' is window-local, it is not resolved by vim.options (nvim_get_option).
py
diff --git a/commands.py b/commands.py index <HASH>..<HASH> 100644 --- a/commands.py +++ b/commands.py @@ -168,8 +168,8 @@ def release(config, version=None, date=None, tag_name=None, next_version=None, p abort(5, 'Could not find section in change log') printer.info('Version:', version) - printer.info('Release date:', date) printer.info('Tag name:', tag_name) + printer.info('Release date:', date) printer.info('Next version:', next_version) msg = 'Continue with release?: {version} - {date}'.format_map(locals()) yes or confirm(config, msg, abort_on_unconfirmed=True)
In release command, show tag name right after version These are usually the same. Showing them together makes it easier to see when they're different.
py
diff --git a/web/file.py b/web/file.py index <HASH>..<HASH> 100644 --- a/web/file.py +++ b/web/file.py @@ -8,9 +8,12 @@ import web routes = {} class FileHandler(web.HTTPHandler): + filename = None + def __init__(self, request, response, groups): web.HTTPHandler.__init__(self, request, response, groups) - self.filename = self.local + self.groups[0] + if not self.filename: + self.filename = self.local + self.groups[0] def get_body(self): return False
Add support for subclasses specifying a constant filename
py
diff --git a/discord/utils.py b/discord/utils.py index <HASH>..<HASH> 100644 --- a/discord/utils.py +++ b/discord/utils.py @@ -31,6 +31,7 @@ from base64 import b64encode import asyncio import json +DISCORD_EPOCH = 1420070400000 class cached_property: def __init__(self, function): @@ -73,6 +74,12 @@ def parse_time(timestamp): return datetime.datetime(*map(int, re_split(r'[^\d]', timestamp.replace('+00:00', '')))) return None +def snowflake_time(id): + ''' + Returns the creation date of a discord id. + ''' + return datetime.datetime.utcfromtimestamp(((int(id) >> 22) + DISCORD_EPOCH) / 1000) + def find(predicate, seq): """A helper to return the first element found in the sequence that meets the predicate. For example: ::
Add util method to extract creation date from discord ids
py
diff --git a/regions/io/tests/test_ds9_language.py b/regions/io/tests/test_ds9_language.py index <HASH>..<HASH> 100644 --- a/regions/io/tests/test_ds9_language.py +++ b/regions/io/tests/test_ds9_language.py @@ -4,7 +4,14 @@ from ..read_ds9 import read_ds9 from ..write_ds9 import objects_to_ds9_string from astropy.utils.data import get_pkg_data_filename, get_pkg_data_filenames from astropy.tests.helper import pytest +import distutils.version as v +import astropy.version as astrov +_ASTROPY_MINVERSION = v.StrictVersion('1.1') +_ASTROPY_VERSION = v.StrictVersion(astrov.version) + +@pytest.mark.xfail(_ASTROPY_VERSION < _ASTROPY_MINVERSION, + reason='Some coordinates systems not available in older version of astropy') def test_read(): #Check that all test files including reference files are readable files = get_pkg_data_filenames('data')
add xfail to tests
py
diff --git a/internetarchive/files.py b/internetarchive/files.py index <HASH>..<HASH> 100644 --- a/internetarchive/files.py +++ b/internetarchive/files.py @@ -32,6 +32,7 @@ import logging import socket import six.moves.urllib as urllib +import six from requests.exceptions import HTTPError, RetryError, ConnectTimeout, \ ConnectionError, ReadTimeout @@ -109,6 +110,11 @@ class File(BaseFile): :param file_metadata: (optional) a dict of metadata for the given fille. """ + if six.PY2: + try: + name = name.decode('utf-8') + except UnicodeEncodeError: + pass super(File, self).__init__(item.item_metadata, name, file_metadata) self.item = item url_parts = dict(
Fixed bug where get_file in PY2 would not work on unicode names
py
diff --git a/openprovider/models.py b/openprovider/models.py index <HASH>..<HASH> 100644 --- a/openprovider/models.py +++ b/openprovider/models.py @@ -161,7 +161,17 @@ class RegistryMessage(Model): @property def date(self): - return datetime.datetime.strptime(str(self._obj.date), '%Y-%m-%d %H:%M:%S') + date = None + try: + date = self._attrs['date'] + except KeyError: + if self._obj is not None: + try: + date = self._obj['date'] + except (AttributeError, KeyError): + pass + + return datetime.datetime.strptime(str(date), '%Y-%m-%d %H:%M:%S') if date else None class DomainDetails(Model):
Allow date to be overridden through the constructor This makes the following possible: RegistryMessage(date='<I>-<I>-<I> <I>:<I>:<I>')
py
diff --git a/deploy-cluster-aws/run_and_tag.py b/deploy-cluster-aws/run_and_tag.py index <HASH>..<HASH> 100644 --- a/deploy-cluster-aws/run_and_tag.py +++ b/deploy-cluster-aws/run_and_tag.py @@ -37,7 +37,7 @@ for _ in range(nodes): # 0, 1, ..., (nodes-1) = nodes items reservation = conn.run_instances( 'ami-accff2b1', # ubuntu-image #'ami-596b7235', # ubuntu w/ iops storage - key_name='bigchain', + key_name='bigchaindb', # IMPORTANT!!!! - here you change the machine type for the cluster instance_type='m3.2xlarge', #instance_type='c3.8xlarge',
Change AWS key_name to bigchaindb in run_and_tag.py
py
diff --git a/djstripe/models/api.py b/djstripe/models/api.py index <HASH>..<HASH> 100644 --- a/djstripe/models/api.py +++ b/djstripe/models/api.py @@ -60,6 +60,11 @@ class APIKey(StripeModel): def __str__(self): return self.name or self.secret_redacted + def clean(self): + if self.type == APIKeyType.secret and not self.djstripe_owner_account: + self.refresh_account() + return super().clean() + def refresh_account(self): from .account import Account
Clean APIKey on creation by automatically refreshing the account if possible This enables the use case of creating a new APIKey from the Django administration, which will subsequently automatically fetch and set the matching Account object.
py
diff --git a/lucid/optvis/transform.py b/lucid/optvis/transform.py index <HASH>..<HASH> 100644 --- a/lucid/optvis/transform.py +++ b/lucid/optvis/transform.py @@ -137,8 +137,9 @@ def collapse_alpha_random(sd=0.5): def _rand_select(xs, seed=None): - rand_n = tf.random_uniform((), 0, len(xs), "int32", seed=seed) - return tf.constant(xs)[rand_n] + xs_list = list(xs) + rand_n = tf.random_uniform((), 0, len(xs_list), "int32", seed=seed) + return tf.constant(xs_list)[rand_n] def _angle2rads(angle, units): @@ -150,6 +151,15 @@ def _angle2rads(angle, units): return angle +def crop_or_pad_to(height, width): + """Ensures the specified spatial shape by either padding or cropping. + Meant to be used as a last transform for architectures insisting on a specific + spatial shape of their inputs. + """ + def inner(t_image): + return tf.image.resize_image_with_crop_or_pad(t_image, height, width) + return inner + standard_transforms = [ pad(12, mode="constant", constant_value=.5), jitter(8),
Add `crop_or_pad_to(height, width)` to transforms to more easily support models which require precise input shapes
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -101,6 +101,7 @@ html_theme = 'alabaster' # further. For a list of options available for each theme, see the # documentation. # +# http://alabaster.readthedocs.io/en/latest/customization.html html_theme_options = { 'description': 'A Python-based framework for conducting high-quality empirical studies of software artefacts.', 'fixed_sidebar': True,
added link to alabaster docs in doc conf
py
diff --git a/flange/dbengine.py b/flange/dbengine.py index <HASH>..<HASH> 100644 --- a/flange/dbengine.py +++ b/flange/dbengine.py @@ -16,7 +16,7 @@ dbengine_schema = { } def dbengine_create_func(config): - url_format_string = "{:s}://{:s}:{:s}@{:s}:{:s}/{:s}?charset=utf8" + url_format_string = "{:s}://{:s}:{:s}@{:s}:{:s}/{:s}?" engine = create_engine(url_format_string.format( config['driver'], config['user'],
postgres didn't like the charset param that was hard-coded in url
py
diff --git a/flask_socketio/cli.py b/flask_socketio/cli.py index <HASH>..<HASH> 100644 --- a/flask_socketio/cli.py +++ b/flask_socketio/cli.py @@ -19,8 +19,11 @@ import click @click.option('--eager-loading/--lazy-loader', default=None, help='Enable or disable eager loading. By default eager ' 'loading is enabled if the reloader is disabled.') +@click.option('--with-threads/--without-threads', is_flag=True, + help='These options are only supported for compatibility with ' + 'the original Flask local development server and are ignored.') @pass_script_info -def run(info, host, port, reload, debugger, eager_loading): +def run(info, host, port, reload, debugger, eager_loading, with_threads): """Runs a local development server for the Flask-SocketIO application. The reloader and debugger are by default enabled if the debug flag of
support --with(out)-threads for compatibility (#<I>)
py
diff --git a/lavalink/Client.py b/lavalink/Client.py index <HASH>..<HASH> 100644 --- a/lavalink/Client.py +++ b/lavalink/Client.py @@ -136,16 +136,12 @@ class Client: except asyncio.TimeoutError: raise NoNodesAvailable if guild_id in self.players: - log.debug('Found player in cache.') return self.players[guild_id] - log.debug('Player not in cache') if not create: return None guild = self.bot.get_guild(guild_id) if guild is None: - log.debug('Couldn\'t find the guild.') return self.players.get(guild_id, self.nodes.nodes[0]) - log.debug('Getting new player from geo.') return self.nodes.get_by_region(guild) # Bot Events @@ -167,7 +163,7 @@ class Client: guild_id = int(data['d']['guild_id']) player = self.players[guild_id] if not player: - log.info('Client received an updated for a non-existent player. {}'.format(guild_id)) + log.debug('Client received an update for a non-existent player. {}'.format(guild_id)) return if data['t'] == 'VOICE_SERVER_UPDATE':
hahayes devoxin is a big buli
py
diff --git a/asv/plugins/git.py b/asv/plugins/git.py index <HASH>..<HASH> 100644 --- a/asv/plugins/git.py +++ b/asv/plugins/git.py @@ -153,11 +153,9 @@ class Git(Repo): if not name: return None except util.ProcessError as err: - if err.retcode == 128: - # Nothing found - return None - raise - + # Failed to obtain. + return None + # Return tags without prefix for prefix in ['tags/']: if name.startswith(prefix):
git: don't try to deal with git-name-rev failure types The specific exit codes appear to vary, so it's better to not try to interpret them.
py
diff --git a/rootpy/memory/keepalive.py b/rootpy/memory/keepalive.py index <HASH>..<HASH> 100644 --- a/rootpy/memory/keepalive.py +++ b/rootpy/memory/keepalive.py @@ -24,9 +24,10 @@ def keepalive(nurse, *patients): """ if DISABLED: return - for p in patients: - log.debug("Keeping {0} alive for lifetime of {1}".format(p, nurse)) - if sys.version_info[0] >= 3 and not isinstance(nurse, Hashable): - # PyROOT missing __hash__ for Python 3 - nurse.__class__.__hash__ = object.__hash__ - KEEPALIVE.setdefault(nurse, set()).update(patients) + if isinstance(nurse, Hashable): + for p in patients: + log.debug("Keeping {0} alive for lifetime of {1}".format(p, nurse)) + KEEPALIVE.setdefault(nurse, set()).update(patients) + else: + log.warning("Unable to keep objects alive for lifetime of " + "unhashable type {0}".format(nurse))
fix issues like #<I>: unable use keepalive with unhashable type. Most likely due to mixing bare PyROOT and rootpy
py
diff --git a/test/test_create.py b/test/test_create.py index <HASH>..<HASH> 100644 --- a/test/test_create.py +++ b/test/test_create.py @@ -59,6 +59,7 @@ class Tests(unittest.TestCase): def test_rerooted_tree_with_node_names(self): with tempdir.TempDir() as tmp: with tempdir.TempDir() as tmp2: + cmd1 = "%s create --verbosity 2 --sequences %s --alignment %s --taxonomy %s --rerooted_tree %s --output %s" \ %(path_to_script, os.path.join(path_to_data,'create','homologs.trimmed.unaligned.faa'), @@ -72,6 +73,7 @@ class Tests(unittest.TestCase): "%s.gpkg" % tmp, os.path.join(path_to_data,'create','test.faa'), tmp2+"_") + extern.run(cmd2) def test_min_aligned_percent(self):
merged <I>s_hmm_fix
py
diff --git a/salt/client.py b/salt/client.py index <HASH>..<HASH> 100644 --- a/salt/client.py +++ b/salt/client.py @@ -71,7 +71,7 @@ class LocalClient(object): ''' def __init__(self, c_path='/etc/salt/master', mopts=None): if mopts: - self.opts - mopts + self.opts = mopts else: self.opts = salt.config.client_config(c_path) self.serial = salt.payload.Serial(self.opts)
Fix typo in pre-passing opts to localclient
py
diff --git a/click_shell/version.py b/click_shell/version.py index <HASH>..<HASH> 100644 --- a/click_shell/version.py +++ b/click_shell/version.py @@ -2,6 +2,8 @@ click_shell.version """ +# pylint: disable=redefined-variable-type + import datetime import os import subprocess @@ -64,7 +66,7 @@ def get_git_changeset(): shell=True, cwd=repo_dir, universal_newlines=True) timestamp = git_log.communicate()[0] try: - timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) # pylint: disable=redefined-variable-type + timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) return timestamp.strftime('%Y%m%d%H%M%S') except ValueError: return None
pylint should finally be happy
py
diff --git a/buchner/cmdline.py b/buchner/cmdline.py index <HASH>..<HASH> 100644 --- a/buchner/cmdline.py +++ b/buchner/cmdline.py @@ -45,8 +45,8 @@ def perror(s): print s -def create_project(command, argv): - parser = build_parser('%prog createproject <PROJECTNAME>') +def create(command, argv): + parser = build_parser('%prog create <PROJECTNAME>') (options, args) = parser.parse_args() if not argv: @@ -115,7 +115,7 @@ def create_project(command, argv): HANDLERS = ( - ('createproject', create_project, 'Creates a new buchner project.'),) + ('create', create, 'Creates a new buchner project.'),) def cmdline_handler(scriptname, argv):
Change createproject to create "create" is shorter and what else is someone going to create with buchner? A chocolate cake?
py
diff --git a/openquake/commonlib/readinput.py b/openquake/commonlib/readinput.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/readinput.py +++ b/openquake/commonlib/readinput.py @@ -907,6 +907,7 @@ def get_gmfs(oqparam): if missing_eids: raise InvalidFile('Missing eids in the gmfs.csv file: %s' % missing_eids) + assert expected_eids == found_eids, (expected_eids, found_eids) eidx = {eid: e for e, eid in enumerate(eids)} sitecol = get_site_collection(oqparam) expected_sids = set(sitecol.sids)
Added an assertion [skip hazardlib]
py
diff --git a/graphene_django/views.py b/graphene_django/views.py index <HASH>..<HASH> 100644 --- a/graphene_django/views.py +++ b/graphene_django/views.py @@ -282,6 +282,8 @@ class GraphQLView(View): raise HttpError(HttpResponseBadRequest('Variables are invalid JSON.')) operation_name = request.GET.get('operationName') or data.get('operationName') + if operation_name == "null": + operation_name = None return query, variables, operation_name, id
Fix graphiql refreshing operationName undefined operationName is serialized into the query string as a string. When getting the query value for operationName, we need to convert string "null" into None.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ setup( author_email='miguelgrinberg50@gmail.com', description='Engine.IO server', long_description=long_description, - packages=find_packages(), + packages=["engineio"], zip_safe=False, include_package_data=True, platforms='any',
Remove tests from built package (#<I>) ...to prevent the tests to be included in production code. From "Using find_packages()": For simple projects, it’s usually easy enough to manually add packages to the packages argument of setup(). However, for very large projects (Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the package list updated. That’s what setuptools.find_packages() is for. Read: <URL>
py
diff --git a/pylint_django/compat.py b/pylint_django/compat.py index <HASH>..<HASH> 100644 --- a/pylint_django/compat.py +++ b/pylint_django/compat.py @@ -22,10 +22,3 @@ except ImportError: except ImportError: from astroid.util import Uninferable -try: - django = __import__("django") - django_version = django.VERSION -except ImportError: - # if not available, will be handled by the django_installed checker - django_version = (1, 5) -
Remove unnecessary django_version compat this is not really used and elsewhere we do: from django import VERSION as django_version
py
diff --git a/tests/test_objects.py b/tests/test_objects.py index <HASH>..<HASH> 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -178,7 +178,10 @@ class TestGridFsObjects(unittest.TestCase): """ Drop the default gridfs instance (i.e. ``fs``) associate to this database """ - return defer.gatherResults([db.fs.files.remove({}), db.fs.chunks.remove({})]) + return defer.gatherResults([ + db.drop_collection('fs.files'), + db.drop_collection('fs.chunks') + ]) @defer.inlineCallbacks def test_GridFileObjects(self):
test_objects's drop_gridfs also drop indexes
py
diff --git a/aiotg/bot.py b/aiotg/bot.py index <HASH>..<HASH> 100644 --- a/aiotg/bot.py +++ b/aiotg/bot.py @@ -341,7 +341,7 @@ class Bot: else: err_msg = await response.read() logger.error(err_msg) - raise RuntimeError(err_msg) + raise BotApiError(err_msg, response=response) async def get_me(self): """ @@ -637,3 +637,9 @@ class CallbackQuery: callback_query_id=self.query_id, **options ) + + +class BotApiError(RuntimeError): + def __init__(self, *args, response): + super().__init__(*args) + self.response = response
RuntimeError is too broad kind of exception, replaced with BotApiError (#<I>)
py
diff --git a/src/python/dxpy/utils/config.py b/src/python/dxpy/utils/config.py index <HASH>..<HASH> 100644 --- a/src/python/dxpy/utils/config.py +++ b/src/python/dxpy/utils/config.py @@ -280,7 +280,11 @@ class DXConfig(MutableMapping): try: os.makedirs(conf_dir, 0o700) except OSError: - os.chmod(conf_dir, 0o700) + try: + os.chmod(conf_dir, 0o700) + except OSError as e: + warn(fill("Error while writing configuration data: " + format_exception(e))) + return env_jsonfile_path = os.path.join(conf_dir, "environment.json") # Make sure the file has 600 permissions
Fail gracefully if unable to chmod conf dir
py
diff --git a/drivers/python2/rethinkdb/ast.py b/drivers/python2/rethinkdb/ast.py index <HASH>..<HASH> 100644 --- a/drivers/python2/rethinkdb/ast.py +++ b/drivers/python2/rethinkdb/ast.py @@ -180,7 +180,13 @@ class RDBValue(RDBOp): def order_by(self, *obs): return OrderBy(self, *obs) - def between(self, left_bound=(), right_bound=()): + def between(self, left_bound=None, right_bound=None): + # This is odd and inconsistent with the rest of the API. Blame a + # poorly thought out spec. + if left_bound is None: + left_bound = () + if right_bound is None: + right_bound = () return Between(self, left_bound=left_bound, right_bound=right_bound) def distinct(self):
changes the behavior of betweeen to be inconsistent
py
diff --git a/salt/utils/http.py b/salt/utils/http.py index <HASH>..<HASH> 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -126,7 +126,7 @@ def query(url, os.path.join(syspaths.CONFIG_DIR, 'master') ) elif node == 'minion': - opts = salt.config.master_config( + opts = salt.config.minion_config( os.path.join(syspaths.CONFIG_DIR, 'minion') ) else:
minion nodes should parse minion configs Small copy/paste typo there
py
diff --git a/src/foremast/consts.py b/src/foremast/consts.py index <HASH>..<HASH> 100644 --- a/src/foremast/consts.py +++ b/src/foremast/consts.py @@ -9,7 +9,8 @@ LOG = logging.getLogger(__name__) def find_config(): """Look for **foremast.cfg** in config_locations. - If not found, give a fatal error. + Raises: + SystemExit: No configuration file found. Returns: ConfigParser: found configuration file @@ -24,7 +25,7 @@ def find_config(): cfg_file = configurations.read(config_locations) if not cfg_file: - LOG.error('No config found in the following locations: %s\n', config_locations) + raise SystemExit('No configuration found in the following locations:\n\n{0}\n'.format('\n'.join(config_locations))) return configurations
fix: Exit with error when no configuration found
py
diff --git a/api/opentrons/util/calibration_functions.py b/api/opentrons/util/calibration_functions.py index <HASH>..<HASH> 100644 --- a/api/opentrons/util/calibration_functions.py +++ b/api/opentrons/util/calibration_functions.py @@ -65,14 +65,14 @@ def probe_instrument(instrument, robot) -> Point: 'x': [], 'y': [], 'z': [] } + safe_height = center.z + Z_CROSSOVER_CLEARANCE + robot.home() + robot.poses = instrument._move(robot.poses, z=safe_height) for axis, *probing_vector, distance in hot_spots: x, y, z = array(probing_vector) + center - safe_height = center.z + Z_CROSSOVER_CLEARANCE - - robot.poses = instrument._move(robot.poses, z=safe_height) robot.poses = instrument._move(robot.poses, x=x, y=y) robot.poses = instrument._move(robot.poses, z=z)
move to safe probe height before looping through switches
py
diff --git a/openhtf/conf.py b/openhtf/conf.py index <HASH>..<HASH> 100644 --- a/openhtf/conf.py +++ b/openhtf/conf.py @@ -230,7 +230,9 @@ class ConfigModel(object): @threads.Synchronized def ContainsKey(self, name): - return name in self._state + return name in self._state or ( + name in self._declarations and + self._declarations[name].default_value is not None) # pylint: enable=missing-docstring
Make 'in' checks on config True if the key isn't in the config but a default_value is declared
py
diff --git a/django_zappa/handler.py b/django_zappa/handler.py index <HASH>..<HASH> 100644 --- a/django_zappa/handler.py +++ b/django_zappa/handler.py @@ -107,7 +107,7 @@ def lambda_handler(event, context, settings_name="zappa_settings"): exception = (b64_content) # Internal are changed to become relative redirects # so they still work for apps on raw APIGW and on a domain. - elif response.status_code in [301, 302]: + elif response.status_code[0] == 3 and response.has_header('Location'): location = returnme['Location'] location = '/' + location.replace("http://zappa/", "") exception = location
Fixes #<I> -- Adds support for various 3** HTTP respones Adds support for <I> - <I> HTTP responses. <I> and <I> want you to use the same METHOD as the original request. This is something your javascript code doesn't do yet.
py
diff --git a/tests/test_cmd2.py b/tests/test_cmd2.py index <HASH>..<HASH> 100644 --- a/tests/test_cmd2.py +++ b/tests/test_cmd2.py @@ -352,5 +352,9 @@ def test_base_colorize(base_app): # But if we create a fresh Cmd() instance, it will fresh_app = cmd2.Cmd() color_test = fresh_app.colorize('Test', 'red') - assert color_test == '\x1b[31mTest\x1b[39m' + # Actually, colorization only ANSI escape codes is only applied on non-Windows systems + if sys.platform == 'win32': + assert out.startswith('Elapsed: 0:00:00') + else: + assert color_test == '\x1b[31mTest\x1b[39m'
Fix to colorize unit test for Windows
py
diff --git a/test/conftest.py b/test/conftest.py index <HASH>..<HASH> 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -11,7 +11,6 @@ from nameko import memory memory.patch() - def get_connection(): #conn = Connection('amqp://guest:guest@10.11.105.128:5672//platform') conn = Connection(transport='memory') @@ -27,6 +26,12 @@ def pytest_addoption(parser): def pytest_configure(config): + # monkey patch an encoding attribute onto GreenPipe to + # satisfy a pytest assertion + import py + from eventlet.greenio import GreenPipe + GreenPipe.encoding = py.std.sys.stdout.encoding + if config.option.blocking_detection: from eventlet import debug debug.hub_blocking_detection(True)
patch eventlet so that it behaves with pytest
py
diff --git a/socketIO_client/logs.py b/socketIO_client/logs.py index <HASH>..<HASH> 100644 --- a/socketIO_client/logs.py +++ b/socketIO_client/logs.py @@ -2,7 +2,16 @@ import logging import time logger = logging.getLogger("socketIO-client") -logger.addHandler(logging.NullHandler()) + +try: + logger.addHandler(logging.NullHandler()) +except AttributeError: + # Workaround for Python 2.6 not having NullHandler + # See https://docs.python.org/release/2.6/library/logging.html#configuring-logging-for-a-library + class NullHandler(logging.Handler): + def emit(self, record): + pass + logger.addHandler(NullHandler()) class LoggingMixin(object):
Fixes NullHandler not present in Python <I>
py
diff --git a/py/doc/conftest.py b/py/doc/conftest.py index <HASH>..<HASH> 100644 --- a/py/doc/conftest.py +++ b/py/doc/conftest.py @@ -1,7 +1,6 @@ from __future__ import generators import py from py.__.misc import rest -from py.__.rest import directive Option = py.test.config.Option option = py.test.config.addoptions("documentation check options", @@ -15,11 +14,18 @@ option = py.test.config.addoptions("documentation check options", ) ) +_initialized = False def checkdocutils(): + global _initialized try: import docutils except ImportError: py.test.skip("docutils not importable") + if not _initialized: + from py.__.rest import directive + directive.register_linkrole('api', resolve_linkrole) + directive.register_linkrole('source', resolve_linkrole) + _initialized = True def restcheck(path): localpath = path @@ -251,6 +257,4 @@ def resolve_linkrole(name, text): else: relpath += '.html' return text, '../../apigen/source/%s' % (relpath,) -directive.register_linkrole('api', resolve_linkrole) -directive.register_linkrole('source', resolve_linkrole)
[svn r<I>] fix failures in case of docutils not being installed (related to importing 'py.__.rest.directive') --HG-- branch : trunk
py
diff --git a/indra/sources/reach/reach_api.py b/indra/sources/reach/reach_api.py index <HASH>..<HASH> 100644 --- a/indra/sources/reach/reach_api.py +++ b/indra/sources/reach/reach_api.py @@ -137,7 +137,11 @@ def process_text(text, citation=None, offline=False): logger.error('Could not process text.') logger.error(e) return None + # REACH version < 1.3.5 json_str = result_map.get('resultJson') + if not json_str: + # REACH version >= 1.3.5 + json_str = result_map.get('result') if not isinstance(json_str, bytes): json_str = json_str.encode('utf-8') else:
Get result from new results_map format in REACH
py
diff --git a/tests/test_plot.py b/tests/test_plot.py index <HASH>..<HASH> 100644 --- a/tests/test_plot.py +++ b/tests/test_plot.py @@ -1,3 +1,5 @@ +from more_itertools.recipes import flatten + from svg.charts.plot import Plot @@ -56,3 +58,27 @@ class TestPlot: assert b'Sam' in svg assert b'Dan' in svg + @staticmethod + def get_data(): + yield (1, 0) + yield (2, 1) + + def test_iterable_data_grouped(self): + g = Plot() + spec = dict( + data=self.get_data(), + title='labels', + ) + g.add_data(spec) + svg = g.burn() + assert b'text="(1.00, 0.00)"' in svg + + def test_iterable_data_flat(self): + g = Plot() + spec = dict( + data=flatten(self.get_data()), + title='labels', + ) + g.add_data(spec) + svg = g.burn() + assert b'text="(1.00, 0.00)"' in svg
Add tests capturing failures with iterable data. Ref #<I>.
py
diff --git a/tests/jenkins/pages/treeherder.py b/tests/jenkins/pages/treeherder.py index <HASH>..<HASH> 100644 --- a/tests/jenkins/pages/treeherder.py +++ b/tests/jenkins/pages/treeherder.py @@ -269,9 +269,11 @@ class TreeherderPage(Base): self.find_element(*self._filter_panel_exception_failures_locator).click() def select_mozilla_central_repo(self): - # Fix me: https://github.com/mozilla/treeherder-tests/issues/43 self.open_repos_menu() + # FIXME workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=1411264 + el = self.find_element(By.CSS_SELECTOR, 'body') self.find_element(*self._mozilla_central_repo_locator).click() + self.wait.until(EC.staleness_of(el)) self.wait_for_page_to_load() def select_next_job(self):
Jenkins: Improve test_switch_repo by working around bug <I> (#<I>)
py
diff --git a/spyder/utils/qthelpers.py b/spyder/utils/qthelpers.py index <HASH>..<HASH> 100644 --- a/spyder/utils/qthelpers.py +++ b/spyder/utils/qthelpers.py @@ -86,8 +86,8 @@ def qapplication(translate=True, test_time=3): if translate: install_translator(app) - test_travis = os.environ.get('TEST_CI_WIDGETS', None) - if test_travis is not None: + test_ci = os.environ.get('TEST_CI_WIDGETS', None) + if test_ci is not None: timer_shutdown = QTimer(app) timer_shutdown.timeout.connect(app.quit) timer_shutdown.start(test_time*1000)
Testing: Change the name of a variable
py