diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/lago/plugins/vm.py b/lago/plugins/vm.py index <HASH>..<HASH> 100644 --- a/lago/plugins/vm.py +++ b/lago/plugins/vm.py @@ -479,10 +479,21 @@ class VMPlugin(plugins.Plugin): for net in nets.values(): mapping = net.mapping() for hostname, ip in mapping.items(): - if hostname.startswith(self.name()): + # hostname is <hostname>-<ifacename> + if hostname.startswith(self.name() + "-"): ips.append(str(ip)) return ips + def ips_in_net(self, net_name): + ips = [] + net = self.virt_env.get_net(name=net_name) + mapping = net.mapping() + for hostname, ip in mapping.items(): + # hostname is <hostname>-<ifacename> + if hostname.startswith(self.name() + "-"): + ips.append(str(ip)) + return ips + def ssh( self, command,
Add ips_in_net() function - retrieve all IPs in a net Add a helper function to get all IPs a VM has in a specific network. Will be used to ensure in oVirt System Test that NFS goes over the storage network, and not the management network.
py
diff --git a/beetle/builder.py b/beetle/builder.py index <HASH>..<HASH> 100644 --- a/beetle/builder.py +++ b/beetle/builder.py @@ -90,8 +90,12 @@ def give_subpages(site): for page in site['pages']: if 'subpages' not in page: continue - subpage_group = page['subpages']['group'] - page['subpages'] = site['groups'][subpage_group] + if 'group' in page['subpages']: + subpage_group = page['subpages']['group'] + page['subpages'] = site['groups'][subpage_group] + elif 'category' in page['subpages']: + subpage_group = page['subpages']['category'] + page['subpages'] = site['categories'][subpage_group] def make_page(path, page_defaults):
Can now give subpages based on categories.
py
diff --git a/geomdl/construct.py b/geomdl/construct.py index <HASH>..<HASH> 100644 --- a/geomdl/construct.py +++ b/geomdl/construct.py @@ -60,7 +60,7 @@ def construct_surface(direction, *args, **kwargs): if rational: if arg.weights is None: raise GeomdlException("Expecting a rational curve", - data=dict(idx=idx, degree=degree, degree_arg=arg.degree)) + data=dict(idx=idx, rational=rational, rational_arg=arg.rational)) new_weights += list(arg.weights) # Set variables w.r.t. input direction
Fix debugging output for rational checking
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ with open("README.md", "r") as fh: setup( name='py_expression_eval', - version='0.3.13', + version='0.3.14', description='Python Mathematical Expression Evaluator', long_description=long_description, long_description_content_type="text/markdown",
Bumped version for <I> release.
py
diff --git a/nuimo.py b/nuimo.py index <HASH>..<HASH> 100644 --- a/nuimo.py +++ b/nuimo.py @@ -225,8 +225,9 @@ class NuimoController(gattlib.GATTRequester): NuimoGestureEvent.FLY_TOWARD, NuimoGestureEvent.FLY_BACKWARDS, NuimoGestureEvent.FLY_UP_DOWN] fly_direction = ord(received_data[3]) + fly_distance = ord(received_data[4]) event_kind = directions[fly_direction] - event = NuimoGestureEvent(event_kind, fly_direction) + event = NuimoGestureEvent(event_kind, fly_distance) return event @staticmethod
Get the fly distance and put it in the fly_event as value
py
diff --git a/test/unit_test/run/test_acq_manager.py b/test/unit_test/run/test_acq_manager.py index <HASH>..<HASH> 100644 --- a/test/unit_test/run/test_acq_manager.py +++ b/test/unit_test/run/test_acq_manager.py @@ -629,7 +629,7 @@ class TestAcquisitionManager(): def create_acqmodel(self, winsz, acq_rate=None): manager = AcquisitionManager() fname = os.path.join(self.tempfolder, 'testdata' +rand_id()+ '.hdf5') - manager.create_data_file(fname) + manager.load_data_file(fname, 'w-') if acq_rate is None: acq_rate = manager.calibration_genrate() manager.set(aochan=u"PCI-6259/ao0", aichan=u"PCI-6259/ai0",
fixed test for removed funciton
py
diff --git a/quantecon/tests/test_ces.py b/quantecon/tests/test_ces.py index <HASH>..<HASH> 100644 --- a/quantecon/tests/test_ces.py +++ b/quantecon/tests/test_ces.py @@ -9,6 +9,7 @@ import sympy as sp from ..ces import * + class CESTestSuite(unittest.TestCase): """Base class for ces.py module tests.""" @@ -257,6 +258,7 @@ class LeontiefCase(CESTestSuite): alpha, beta, sigma) testing.assert_almost_equal(expected_elasticity, actual_elasticity) + class GeneralCESCase(CESTestSuite): def setUp(self):
Added a couple of extra lines.
py
diff --git a/dvc/version.py b/dvc/version.py index <HASH>..<HASH> 100644 --- a/dvc/version.py +++ b/dvc/version.py @@ -6,7 +6,7 @@ import os import subprocess -_BASE_VERSION = "2.0.12" +_BASE_VERSION = "2.0.13" def _generate_version(base_version):
dvc: bump to <I>
py
diff --git a/buildbot/status/tinderbox.py b/buildbot/status/tinderbox.py index <HASH>..<HASH> 100644 --- a/buildbot/status/tinderbox.py +++ b/buildbot/status/tinderbox.py @@ -172,7 +172,7 @@ class TinderboxMailNotifier(mail.MailNotifier): text += "%s build: %s\n" % (t, self.columnName) elif isinstance(self.columnName, WithProperties): # interpolate the WithProperties instance, use that - text += "%s build: %s\n" % (t, self.columnName.render(build)) + text += "%s build: %s\n" % (t, build.getProperties().render(self.columnName)) else: raise Exception("columnName is an unhandled value") text += "%s errorparser: %s\n" % (t, self.errorparser)
axel-tinderbox-fix.patch Fix by Axel Hecht <l<I><EMAIL>> to a regression in tinderbox
py
diff --git a/OpenSSL/test/test_ssl.py b/OpenSSL/test/test_ssl.py index <HASH>..<HASH> 100644 --- a/OpenSSL/test/test_ssl.py +++ b/OpenSSL/test/test_ssl.py @@ -944,8 +944,9 @@ class ContextTests(TestCase, _LoopbackMixin): ``unicode`` instance and uses the certificates within for verification purposes. """ - cafile = self.mktemp() + NON_ASCII - self._load_verify_cafile(cafile) + self._load_verify_cafile( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) def test_load_verify_invalid_file(self): @@ -995,7 +996,9 @@ class ContextTests(TestCase, _LoopbackMixin): ``unicode`` instance and uses the certificates within for verification purposes. """ - self._load_verify_directory_locations_capath(self.mktemp() + NON_ASCII) + self._load_verify_directory_locations_capath( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) def test_load_verify_locations_wrong_args(self):
Fix some accidental bytes/unicode mixing on Python 3.
py
diff --git a/spyderlib/widgets/comboboxes.py b/spyderlib/widgets/comboboxes.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/comboboxes.py +++ b/spyderlib/widgets/comboboxes.py @@ -58,7 +58,6 @@ class BaseComboBox(QComboBox): def add_current_text(self): """Add current text to combo box history (convenient method)""" self.add_text(self.currentText()) - self.selected() def keyPressEvent(self, event): """Handle key press events""" @@ -66,6 +65,7 @@ class BaseComboBox(QComboBox): valid = self.is_valid(self.currentText()) if valid or valid is None: self.add_current_text() + self.selected() else: QComboBox.keyPressEvent(self, event)
Find/replace: one item was skipped when pressing F3 since Enter key was added as a shortcut to go to next found item Update Issue <I> Status: Fixed
py
diff --git a/aiohttp/worker.py b/aiohttp/worker.py index <HASH>..<HASH> 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -55,11 +55,12 @@ class GunicornWebWorker(base.Worker): def make_handler(self, app): if hasattr(self.wsgi, 'make_handler'): + access_log = self.log.access_log if self.cfg.accesslog else None return app.make_handler( logger=self.log, slow_request_timeout=self.cfg.timeout, keepalive_timeout=self.cfg.keepalive, - access_log=self.log.access_log, + access_log=access_log, access_log_format=self._get_valid_log_format( self.cfg.access_log_format)) else:
disable access log if gunicron is not configured
py
diff --git a/openquake/calculators/event_based_risk.py b/openquake/calculators/event_based_risk.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based_risk.py +++ b/openquake/calculators/event_based_risk.py @@ -208,9 +208,10 @@ class EbrPostCalculator(base.RiskCalculator): # build rcurves-stats (sequentially) # this is a fundamental output, being used to compute loss_maps-stats if R > 1: - ss = scientific.SimpleStats(self.datastore['realizations'], - self.oqparam.quantile_loss_curves) - self.datastore['rcurves-stats'] = ss.compute(rcurves) + with self.monitor('computing rcurves-stats'): + ss = scientific.SimpleStats(self.datastore['realizations'], + self.oqparam.quantile_loss_curves) + self.datastore['rcurves-stats'] = ss.compute(rcurves) # build an aggregate loss curve per realization if 'agg_loss_table' in self.datastore:
Monitored rcurves-stats
py
diff --git a/dramatiq/broker.py b/dramatiq/broker.py index <HASH>..<HASH> 100644 --- a/dramatiq/broker.py +++ b/dramatiq/broker.py @@ -5,7 +5,8 @@ global_broker = None def get_broker(): - """Get the global broker instance. + """Get the global broker instance. If no global broker is set, + this initializes a RabbitmqBroker and returns that. """ global global_broker if global_broker is None: @@ -16,6 +17,9 @@ def get_broker(): def set_broker(broker): """Configure the global broker instance. + + Parameters: + broker(Broker) """ global global_broker global_broker = broker @@ -30,7 +34,7 @@ class Broker: """ def __init__(self, middleware=None): - self.logger = logging.getLogger("Broker") + self.logger = logging.getLogger(type(self).__name__) self.middleware = middleware or [] def _emit_before(self, signal, *args, **kwargs):
refactor: name broker logger after its class name
py
diff --git a/wallace/__init__.py b/wallace/__init__.py index <HASH>..<HASH> 100644 --- a/wallace/__init__.py +++ b/wallace/__init__.py @@ -1,4 +1,4 @@ -from . import models from .wallace import Wallace +from . import models, networks, processes, agents -__all__ = ['models', 'Wallace', 'networks', 'processes', 'agents'] +__all__ = ['Wallace', 'models', 'networks', 'processes', 'agents']
Import networks, processes, and agents to wallace
py
diff --git a/scapy.py b/scapy.py index <HASH>..<HASH> 100755 --- a/scapy.py +++ b/scapy.py @@ -21,6 +21,9 @@ # # $Log: scapy.py,v $ +# Revision 0.9.17.29 2005/01/22 21:48:55 pbi +# - fixed need for warning() before it was declared +# # Revision 0.9.17.28 2005/01/22 21:47:11 pbi # - added ARPingResult to handle arping() results # - moved ARPing displaying logic to ARPing object @@ -530,7 +533,7 @@ from __future__ import generators -RCSID="$Id: scapy.py,v 0.9.17.28 2005/01/22 21:47:11 pbi Exp $" +RCSID="$Id: scapy.py,v 0.9.17.29 2005/01/22 21:48:55 pbi Exp $" VERSION = RCSID.split()[2]+"beta" @@ -707,7 +710,7 @@ except ImportError: try: from Crypto.Cipher import ARC4 except ImportError: - warning("Can't find Crypto python lib. Won't be able to decrypt WEP") + print "WARNING: Can't find Crypto python lib. Won't be able to decrypt WEP" # Workarround bug 643005 : https://sourceforge.net/tracker/?func=detail&atid=105470&aid=643005&group_id=5470
- fixed need for warning() before it was declared
py
diff --git a/mmtf/api/__init__.py b/mmtf/api/__init__.py index <HASH>..<HASH> 100644 --- a/mmtf/api/__init__.py +++ b/mmtf/api/__init__.py @@ -1 +1 @@ -from .default_api import parse,parse_gzip,fetch,MMTFDecoder,MMTFEncoder,TemplateEncoder,get_url \ No newline at end of file +from .default_api import parse,parse_gzip,fetch,MMTFDecoder,MMTFEncoder,TemplateEncoder,get_url,write_mmtf,pass_data_on \ No newline at end of file
Expose write_mmtf and pass_data_on to the API
py
diff --git a/test.py b/test.py index <HASH>..<HASH> 100644 --- a/test.py +++ b/test.py @@ -21,7 +21,7 @@ server.login(None, None, gsfId, authSubToken) # SEARCH -apps = server.search('termux', 34, None) +apps = server.search('telegram', 34, None) print('nb_result: 34') print('number of results: %d' % len(apps)) @@ -33,7 +33,7 @@ for a in apps: # DOWNLOAD docid = apps[0]['docId'] version = apps[0]['versionCode'] -print('\nTermux docid is: %s\n' % docid) +print('\nTelegram docid is: %s\n' % docid) print('\nAttempting to download %s\n' % docid) fl = server.download(docid, version, progress_bar=True) with open(docid + '.apk', 'wb') as f:
test.py: download telegram rather than termux since telegram is supported by far more devices than termux
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -124,9 +124,9 @@ napoleon_include_special_with_doc = True napoleon_use_admonition_for_examples = False napoleon_use_admonition_for_notes = False napoleon_use_admonition_for_references = False -napoleon_use_ivar = True -napoleon_use_param = True -napoleon_use_rtype = True +napoleon_use_ivar = True # overwritten by patch below! +napoleon_use_param = False +napoleon_use_rtype = False # -- Extensions to the Napoleon GoogleDocstring class --------------------- @@ -155,8 +155,6 @@ GoogleDocstring._unpatched_parse = GoogleDocstring._parse GoogleDocstring._parse = patched_parse - - # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for
Avoid :param: for Google-style docstrings By settings this options, a "Parameters" section in a Google-style docstrings is formatted exactly like other sections (e.g. "Attributes"). The ':param:' role is no longer used. This improves consistent formatting. Also, we now have more flexibility in specifying parameter types ('list of :obj:`Ket`'). However, we lose auto-linkification.
py
diff --git a/oauth2client/clientsecrets.py b/oauth2client/clientsecrets.py index <HASH>..<HASH> 100644 --- a/oauth2client/clientsecrets.py +++ b/oauth2client/clientsecrets.py @@ -69,8 +69,18 @@ class InvalidClientSecretsError(Error): def _validate_clientsecrets(obj): - if obj is None or len(obj) != 1: - raise InvalidClientSecretsError('Invalid file format.') + _INVALID_FILE_FORMAT_MSG = ( + 'Invalid file format. See ' + 'https://developers.google.com/api-client-library/' + 'python/guide/aaa_client_secrets') + + if obj is None: + raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG) + if len(obj) != 1: + raise InvalidClientSecretsError( + _INVALID_FILE_FORMAT_MSG + ' ' + 'Expected a JSON object with a single property for a "web" or ' + '"installed" application') client_type = tuple(obj)[0] if client_type not in VALID_CLIENT: raise InvalidClientSecretsError('Unknown client type: %s.' % (client_type,))
Modify invalid exception error to be less cryptic and give more context to the developer. This is to fix my personal debugging experience when trying to debug the fact that I had downloaded the wrong client secret JSON type (not web/installed).
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,8 +3,15 @@ import sys import subprocess from distutils.spawn import find_executable from setuptools import setup, find_packages +from basesetup import write_version_py + +VERSION = '1.0.0.dev0' +ISRELEASED = False +__version__ = VERSION def main(**kwargs): + write_version_py(VERSION, ISRELEASED, 'osprey/version.py') + classifiers = """\ Development Status :: 3 - Alpha Intended Audience :: Science/Research @@ -28,7 +35,7 @@ def main(**kwargs): platforms=["Windows", "Linux", "Mac OS-X", "Unix"], license='Apache Software License', download_url='https://pypi.python.org/pypi/osprey/', - version='TODO', + version=VERSION, packages=find_packages(), zip_safe=False, package_data={'osprey': ['data/*']},
Modify setup.py to use new version writing
py
diff --git a/pods/datasets.py b/pods/datasets.py index <HASH>..<HASH> 100644 --- a/pods/datasets.py +++ b/pods/datasets.py @@ -559,7 +559,7 @@ def pmlr(volumes='all', data_set='pmlr', refresh_data=False): file = entry['yaml'].split('/')[-1] proto, url = entry['yaml'].split('//') file = os.path.basename(url) - dir = os.path.dirname(url) + dir = '/'.join(url.split('/')[1:]) urln = proto + '//' + url.split('/')[0] data_resources[data_name_full]['files'].append([file]) data_resources[data_name_full]['dirs'].append([dir])
Fix bug from pmlr_volumes
py
diff --git a/python/src/wslink/backends/aiohttp/__init__.py b/python/src/wslink/backends/aiohttp/__init__.py index <HASH>..<HASH> 100644 --- a/python/src/wslink/backends/aiohttp/__init__.py +++ b/python/src/wslink/backends/aiohttp/__init__.py @@ -125,7 +125,9 @@ def create_webserver(server_config): static_routes = server_config["static"] routes = [] - for route, server_path in static_routes.items(): + # Ensure longer path are registered first + for route in sorted(static_routes.keys(), reverse=True): + server_path = static_routes[route] routes.append(aiohttp_web.static(_fix_path(route), server_path)) # Resolve / => /index.html
fix(static): fix routes order definition for static content
py
diff --git a/dvc/version.py b/dvc/version.py index <HASH>..<HASH> 100644 --- a/dvc/version.py +++ b/dvc/version.py @@ -6,7 +6,7 @@ import os import subprocess -_BASE_VERSION = "1.1.3" +_BASE_VERSION = "1.1.4" def _generate_version(base_version):
dvc: bump to <I>
py
diff --git a/fluids/drag.py b/fluids/drag.py index <HASH>..<HASH> 100644 --- a/fluids/drag.py +++ b/fluids/drag.py @@ -1217,7 +1217,8 @@ def integrate_drag_sphere(D, rhop, rho, mu, t, V=0, Method=None, This can be relatively slow as drag correlations can be complex. There are analytical solutions available for the Stokes law regime (Re < - 0.3). They were obtained from Wolfram Alpha. + 0.3). They were obtained from Wolfram Alpha. [1]_ was not used in the + derivation, but also describes the derivation fully. .. math:: V(t) = \frac{\exp(-at) (V_0 a + b(\exp(at) - 1))}{a} @@ -1234,6 +1235,12 @@ def integrate_drag_sphere(D, rhop, rho, mu, t, V=0, Method=None, >>> integrate_drag_sphere(D=0.001, rhop=2200., rho=1.2, mu=1.78E-5, t=0.5, ... V=30, distance=True) (9.686465044053476, 7.8294546436299175) + + References + ---------- + .. [1] Timmerman, Peter, and Jacobus P. van der Weele. "On the Rise and + Fall of a Ball with Linear or Quadratic Drag." American Journal of + Physics 67, no. 6 (June 1999): 538-46. https://doi.org/10.1119/1.19320. ''' if Method == 'Stokes': try:
Found a paper describing the derivation of the laminar particle drag coefficients which were previously added
py
diff --git a/src/diamond/collector.py b/src/diamond/collector.py index <HASH>..<HASH> 100644 --- a/src/diamond/collector.py +++ b/src/diamond/collector.py @@ -159,6 +159,8 @@ class Collector(object): else: self.config['enabled'] = False + self.collect_running = False + def get_default_config_help(self): """ Returns the help text for the configuration options for this collector @@ -357,17 +359,21 @@ class Collector(object): def _run(self): """ - Run the collector + Run the collector unless it's already running """ + if self.collect_running: + return # Log self.log.debug("Collecting data from: %s" % self.__class__.__name__) try: try: start_time = time.time() + self.collect_runnig = True # Collect Data self.collect() + self.collect_runnig = False end_time = time.time() if 'measure_collector_time' in self.config:
make sure per collector only one collect() at a time
py
diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -3,7 +3,7 @@ import hashlib from django.db import models from django.contrib.contenttypes.models import ContentType -from sumo.models import ModelBase +from sumo.models import ModelBase, LocaleField from sumo.urlresolvers import reverse from sumo.helpers import urlparams @@ -19,7 +19,7 @@ class EventWatch(ModelBase): # an instance. watch_id = models.IntegerField(db_index=True, null=True) event_type = models.CharField(max_length=20, db_index=True) - locale = models.CharField(default='', max_length=7, db_index=True) + locale = LocaleField(default='', db_index=True) email = models.EmailField(db_index=True) hash = models.CharField(max_length=40, null=True, db_index=True)
[bug <I>] Use LocaleField in all models where applicable.
py
diff --git a/pyperclip/__init__.py b/pyperclip/__init__.py index <HASH>..<HASH> 100644 --- a/pyperclip/__init__.py +++ b/pyperclip/__init__.py @@ -60,13 +60,6 @@ def determine_clipboard(): if HAS_DISPLAY: # Determine which command/module is installed, if any. try: - import gtk # check if gtk is installed - except ImportError: - pass - else: - return init_gtk_clipboard() - - try: import PyQt4 # check if PyQt4 is installed except ImportError: pass @@ -79,6 +72,13 @@ def determine_clipboard(): return init_xsel_clipboard() if _executable_exists("klipper") and _executable_exists("qdbus"): return init_klipper_clipboard() + try: # Moved to bottom since currently broken on Ubuntu + import gtk # check if gtk is installed + except ImportError: + pass + else: + return init_gtk_clipboard() + return init_no_clipboard()
Temporary fix for QT4 being broken on Ubuntu.
py
diff --git a/glue/segments.py b/glue/segments.py index <HASH>..<HASH> 100644 --- a/glue/segments.py +++ b/glue/segments.py @@ -1207,8 +1207,8 @@ class segmentlistdict(dict): # ============================================================================= # - -try: - from __segments import * -except ImportError: - pass +# Removed on highmass branch to work with existing segment tools +#try: +# from __segments import * +#except ImportError: +# pass
fixed segments.py on highmass branch to be compatible with LSCsegFind
py
diff --git a/remoto/connection.py b/remoto/connection.py index <HASH>..<HASH> 100644 --- a/remoto/connection.py +++ b/remoto/connection.py @@ -76,6 +76,7 @@ class Connection(object): def import_module(self, module): self.remote_module = ModuleExecute(self.gateway, module, self.logger) + return self.remote_module class ModuleExecute(object):
make import_module return the module as well
py
diff --git a/tests/test_build_ext.py b/tests/test_build_ext.py index <HASH>..<HASH> 100644 --- a/tests/test_build_ext.py +++ b/tests/test_build_ext.py @@ -233,7 +233,7 @@ class BuildExtTestCase(support.TempdirManager, def test_get_outputs(self): tmp_dir = self.mkdtemp() c_file = os.path.join(tmp_dir, 'foo.c') - self.write_file(c_file, '') + self.write_file(c_file, 'void initfoo() {};\n') ext = Extension('foo', [c_file]) dist = Distribution({'name': 'xx', 'ext_modules': [ext]})
Merged revisions <I> via svnmerge from svn+ssh://<EMAIL>/python/trunk ........ r<I> | tarek.ziade | <I>-<I>-<I> <I>:<I>:<I> <I> (Wed, <I> May <I>) | 1 line added an inifoo in the C file, to avoid a warning by the MSVC9 linker ........
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup( description="Django LDAP authentication backend", long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions. -This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>. +This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/. """, url="http://bitbucket.org/psagers/django-auth-ldap/", author="Peter Sagerson",
Fix url in distutils description.
py
diff --git a/moto/ec2/models.py b/moto/ec2/models.py index <HASH>..<HASH> 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -2819,7 +2819,7 @@ class Subnet(TaggedEC2Resource): self.vpc_id = vpc_id self.cidr_block = cidr_block self.cidr = ipaddress.IPv4Network(six.text_type(self.cidr_block), strict=False) - self.available_ips = ipaddress.IPv4Network(cidr_block).num_addresses - 5 + self.available_ip_addresses = str(ipaddress.IPv4Network(six.text_type(self.cidr_block)).num_addresses - 5) self._availability_zone = availability_zone self.default_for_az = default_for_az self.map_public_ip_on_launch = map_public_ip_on_launch
available_ip_addresses is now a string
py
diff --git a/iarm/arm_instructions/_meta.py b/iarm/arm_instructions/_meta.py index <HASH>..<HASH> 100644 --- a/iarm/arm_instructions/_meta.py +++ b/iarm/arm_instructions/_meta.py @@ -254,7 +254,7 @@ class _Meta(iarm.cpu.RegisterCpu): :return: """ Rx, other = self.get_parameters(regex_exp, parameters) - if other is not None and not other.strip(): + if other is not None and other.strip(): raise iarm.exceptions.ParsingError("Extra arguments found: {}".format(other)) return Rx @@ -268,7 +268,7 @@ class _Meta(iarm.cpu.RegisterCpu): :return: """ Rx, Ry, other = self.get_parameters(regex_exp, parameters) - if other is not None and not other.strip(): + if other is not None and other.strip(): raise iarm.exceptions.ParsingError("Extra arguments found: {}".format(other)) if Rx and Ry: return Rx, Ry @@ -287,7 +287,7 @@ class _Meta(iarm.cpu.RegisterCpu): :return: """ Rx, Ry, Rz, other = self.get_parameters(regex_exp, parameters) - if other is not None and not other.strip(): + if other is not None and other.strip(): raise iarm.exceptions.ParsingError("Extra arguments found: {}".format(other)) return Rx, Ry, Rz
Needed and, not and not. was trying to call strip on None
py
diff --git a/zipline/gens/tradesimulation.py b/zipline/gens/tradesimulation.py index <HASH>..<HASH> 100644 --- a/zipline/gens/tradesimulation.py +++ b/zipline/gens/tradesimulation.py @@ -123,7 +123,6 @@ class AlgorithmSimulator(object): # Save events to stream through blotter below. events.append(event) - # Update our portfolio. self.algo.set_portfolio( self.algo.perf_tracker.get_portfolio() @@ -153,7 +152,6 @@ class AlgorithmSimulator(object): self.algo.perf_tracker.process_event(event) - # The benchmark is our internal clock. When it # updates, we need to emit a performance message. if bm_updated:
STY: Remove extra lines between statements.
py
diff --git a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py index <HASH>..<HASH> 100644 --- a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py +++ b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py @@ -6,6 +6,7 @@ from aws_cdk import ( core ) + class %name.PascalCased%Stack(core.Stack): def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
fix(cli): Python sample app template does not follow PEP8 (#<I>) Classes should be surrounded by 2 blank lines according to PEP8 related PR: <URL>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() setup(name='kotti_contactform', - version= '0.1.0b3', + version= '0.1.0b4', description="Simple contact form for Kotti sites", long_description=README + '\n\n' + CHANGES, classifiers=[
Released <I>b4
py
diff --git a/examples/disassemble.py b/examples/disassemble.py index <HASH>..<HASH> 100644 --- a/examples/disassemble.py +++ b/examples/disassemble.py @@ -65,6 +65,7 @@ def main(class_path, classes): OperandTypes.BRANCH: f'J[{operand.value}]', OperandTypes.LITERAL: f'#[{operand.value}]', OperandTypes.LOCAL_INDEX: f'L[{operand.value}]', + OperandTypes.PADDING: 'P' }[operand.op_type]) print(' ' + ' '.join(line))
Add missing support for padding operands to the disassemble example.
py
diff --git a/glances/core/glances_processes.py b/glances/core/glances_processes.py index <HASH>..<HASH> 100644 --- a/glances/core/glances_processes.py +++ b/glances/core/glances_processes.py @@ -360,7 +360,7 @@ class GlancesProcesses(object): if procstat['cpu_percent'] == '' or procstat['memory_percent'] == '': # Do not display process if we cannot get the basic # cpu_percent or memory_percent stats - raise psutil.NoSuchProcess + return None # Process command line (cached with internal cache) try: @@ -540,10 +540,7 @@ class GlancesProcesses(object): procstat = proc.as_dict(attrs=['pid']) if mandatory_stats: - try: - procstat = self.__get_mandatory_stats(proc, procstat) - except psutil.NoSuchProcess: - return None + procstat = self.__get_mandatory_stats(proc, procstat) if standard_stats: procstat = self.__get_standard_stats(proc, procstat)
Correct raise issue on Mac OS X
py
diff --git a/properties/base.py b/properties/base.py index <HASH>..<HASH> 100644 --- a/properties/base.py +++ b/properties/base.py @@ -317,7 +317,7 @@ class Instance(basic.Property): return self.from_json(value) @staticmethod - def as_json(value): + def to_json(value): """Convert instance to JSON""" if isinstance(value, HasProperties): return value.serialize() @@ -472,7 +472,7 @@ class List(basic.Property): return [self.prop.deserialize(val) for val in value] @staticmethod - def as_json(value): + def to_json(value): """Return a copy of the list If the list contains HasProperties instances, they are serialized. @@ -621,7 +621,7 @@ class Union(basic.Property): return prop.serialize(value) except (ValueError, KeyError, TypeError): continue - return self.as_json(value) + return self.to_json(value) def deserialize(self, value): """Return a deserialized value @@ -642,7 +642,7 @@ class Union(basic.Property): return self.from_json(value) @staticmethod - def as_json(value): + def to_json(value): """Return value, serialized if value is a HasProperties instance""" if isinstance(value, HasProperties): return value.serialize()
More renaming as_json -> to_json
py
diff --git a/salt/engines/docker_events.py b/salt/engines/docker_events.py index <HASH>..<HASH> 100644 --- a/salt/engines/docker_events.py +++ b/salt/engines/docker_events.py @@ -74,8 +74,12 @@ def start(docker_url='unix://var/run/docker.sock', else: __salt__['event.send'](tag, msg) - client = docker.Client(base_url=docker_url, - timeout=timeout) + try: + # docker-py 2.0 renamed this client attribute + client = docker.APIClient(base_url=docker_url, timeout=timeout) + except AttributeError: + client = docker.Client(base_url=docker_url, timeout=timeout) + try: events = client.events() for event in events:
Allow docker_events engine to work with newer docker-py The Client attribute was renamed to APIClient in docker-py <I>
py
diff --git a/tests/pygobject/test_structs.py b/tests/pygobject/test_structs.py index <HASH>..<HASH> 100644 --- a/tests/pygobject/test_structs.py +++ b/tests/pygobject/test_structs.py @@ -29,7 +29,14 @@ class StructTest(unittest.TestCase): Gtk.main_iteration() window.destroy() - @unittest.skip("FIXME") + @unittest.skipUnless(is_gi, "FIXME") + def test_struct_out_simple(self): + button = Gtk.Button() + req = button.size_request() + self.assertTrue(req) + self.assertTrue(isinstance(req.height, int)) + + @unittest.skipUnless(is_gi, "FIXME") def test_struct_out(self): model = Gtk.ListStore(int) iter_ = model.insert_with_valuesv(0, [], [])
A failing test for callee allocated out structs
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,14 +6,25 @@ setup( url='https://github.com/Duroktar/YuleLog', packages=['yule_log'], package_data = {'yule_log': ['yule_log.ico']}, - license='MIT', + license='MIT License', author='Scott Doucet', author_email='duroktar@gmail.com', description='Terminal based X-Mas Yule Log Fireplace', install_requires=["asciimatics"], + keywords='christmas yule fireplace, + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: End Users/Desktop', + 'Topic :: Games/Entertainment', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + ], entry_points={ 'console_scripts': [ 'YuleLog = yule_log.__main__:main' ] - } + }, )
Added classifiers and keywords to setup.py
py
diff --git a/elasticmodels/indexes.py b/elasticmodels/indexes.py index <HASH>..<HASH> 100644 --- a/elasticmodels/indexes.py +++ b/elasticmodels/indexes.py @@ -368,7 +368,9 @@ class Index(metaclass=IndexBase): '_index': self.index, '_type': self.doc_type, '_id': model.pk, - '_source': self.prepare(model), + # we don't do all the work of preparing a model when we're deleting + # it + '_source': self.prepare(model) if action != "delete" else None, } for model in thing] # if running in the suspended_updates context, we just save the thing
Don't call self.prepare(model) on when deleting
py
diff --git a/tools/pyboard.py b/tools/pyboard.py index <HASH>..<HASH> 100755 --- a/tools/pyboard.py +++ b/tools/pyboard.py @@ -170,7 +170,7 @@ class ProcessPtyToTerminal: import subprocess import re import serial - self.subp = subprocess.Popen(cmd.split(), bufsize=0, shell=True, preexec_fn=os.setsid, + self.subp = subprocess.Popen(cmd.split(), bufsize=0, shell=False, preexec_fn=os.setsid, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) pty_line = self.subp.stderr.readline().decode("utf-8") m = re.search(r"/dev/pts/[0-9]+", pty_line)
tools/pyboard: execpty: Use shell=False to workaround some curdir issues. Without this, Zephyr's port "make test" doesn't work.
py
diff --git a/cosmic_ray/importing.py b/cosmic_ray/importing.py index <HASH>..<HASH> 100644 --- a/cosmic_ray/importing.py +++ b/cosmic_ray/importing.py @@ -29,3 +29,6 @@ class Finder(MetaPathFinder, dict): ASTLoader(self[fullname], fullname)) except KeyError: pass + + def __repr__(self): + return '{}'.format(self.__class__)
Added better repr() to Finder.
py
diff --git a/data/pyinstaller_helper.py b/data/pyinstaller_helper.py index <HASH>..<HASH> 100644 --- a/data/pyinstaller_helper.py +++ b/data/pyinstaller_helper.py @@ -62,10 +62,10 @@ if __name__ == '__main__': p = plistlib.load(f) p["NSHighResolutionCapable"] = True p["NSRequiresAquaSystemAppearance"] = True + p["NSMicrophoneUsageDescription"] = "URH needs access to your microphone to capture signals via Soundcard." with open("pyinstaller/urh.app/Contents/Info.plist", "wb") as f: plistlib.dump(p, f) - else: for cmd in [urh_cmd, cli_cmd, urh_debug_cmd]: run_pyinstaller(cmd)
Fix microphone access for macOS DMG (#<I>)
py
diff --git a/blockstack/lib/nameset/virtualchain_hooks.py b/blockstack/lib/nameset/virtualchain_hooks.py index <HASH>..<HASH> 100644 --- a/blockstack/lib/nameset/virtualchain_hooks.py +++ b/blockstack/lib/nameset/virtualchain_hooks.py @@ -331,6 +331,7 @@ def get_db_state(disposition=DISPOSITION_RO): reload_lock.acquire() + ret = None mtime = None db_filename = virtualchain.get_db_filename() @@ -345,6 +346,9 @@ def get_db_state(disposition=DISPOSITION_RO): if disposition == DISPOSITION_RO: # cache blockstack_db = new_db + ret = blockstack_db + else: + ret = new_db last_check_time = time.time() if mtime is not None:
return the db on read/write
py
diff --git a/internetarchive/files.py b/internetarchive/files.py index <HASH>..<HASH> 100644 --- a/internetarchive/files.py +++ b/internetarchive/files.py @@ -275,7 +275,7 @@ class File(BaseFile): without sending the delete request. """ - cascade_delete = False if not cascade_delete else True + cascade_delete = '0' if not cascade_delete else '1' access_key = self.item.session.access_key if not access_key else access_key secret_key = self.item.session.secret_key if not secret_key else secret_key debug = False if not debug else debug @@ -291,7 +291,7 @@ class File(BaseFile): request = iarequest.S3Request( method='DELETE', url=url, - headers={'x-archive-cascade-delete': int(cascade_delete)}, + headers={'x-archive-cascade-delete': cascade_delete}, access_key=access_key, secret_key=secret_key )
Convert x-archive-cascade-delete header to string. As of version <I> of the requests library, all header values must be strings (i.e. not integers).
py
diff --git a/cairocffi/test_cairo.py b/cairocffi/test_cairo.py index <HASH>..<HASH> 100644 --- a/cairocffi/test_cairo.py +++ b/cairocffi/test_cairo.py @@ -255,12 +255,8 @@ def test_metadata(): assert b'/Creator (creator)' in pdf_bytes assert b'/Author (author)' in pdf_bytes assert b'/Keywords (keywords)' in pdf_bytes - # According to PDF 32000-1:2008, section 7.9.4 ("Dates") PDF date strings - # do not end with a apostrophe even though that format was described in - # the "PDF reference, Sixth Edition". - # See also: https://stackoverflow.com/q/41661477/138526 - # cairo 1.17.4 contains a commit which adds the apostrophe unconditionally: - # https://gitlab.freedesktop.org/cairo/cairo/-/issues/392#note_742384 + # cairo 1.17.4 adds an apostrophe at the end of dates: + # https://gitlab.freedesktop.org/cairo/cairo/-/issues/392 assert b"/CreationDate (20130721234600+01'00" in pdf_bytes assert b'/ModDate (20130721234600Z)' in pdf_bytes
Fix comment related to apostrophes at the end of dates
py
diff --git a/examples/multi_script_CLI.py b/examples/multi_script_CLI.py index <HASH>..<HASH> 100644 --- a/examples/multi_script_CLI.py +++ b/examples/multi_script_CLI.py @@ -284,12 +284,15 @@ def menu_follow(): 1.Insert hashtag 2.Use hashtag database """) + hashtags = [] if "1" in sys.stdin.readline(): - hashtag = input("what?\n").strip() + hashtags = input("Insert hashtags separated by spaces\nExample: cat dog\nwhat hashtags?\n").strip().split(' ') else: - hashtag = random.choice(bot.read_list_from_file(hashtag_file)) - users = bot.get_hashtag_users(hashtag) - bot.follow_users(users) + hashtags = bot.read_list_from_file(hashtag_file) + for hashtag in hashtags: + print("Begin following: " + hashtag) + users = bot.get_hashtag_users(hashtag) + bot.follow_users(users) menu_follow() elif ans == "2":
Change multi_script_CLI.py to allow you to follow multiple users like in likes.
py
diff --git a/gitlab/__init__.py b/gitlab/__init__.py index <HASH>..<HASH> 100644 --- a/gitlab/__init__.py +++ b/gitlab/__init__.py @@ -61,10 +61,10 @@ class Gitlab(object): email (str): The user email or login. password (str): The user password (associated with email). ssl_verify (bool): Whether SSL certificates should be validated. - timeout (float or tuple(float,float)): Timeout to use for requests to - the GitLab server. - http_username: (str): Username for HTTP authentication - http_password: (str): Password for HTTP authentication + timeout (float): Timeout to use for requests to the GitLab server. + http_username (str): Username for HTTP authentication + http_password (str): Password for HTTP authentication + Attributes: user_emails (UserEmailManager): Manager for GitLab users' emails. user_keys (UserKeyManager): Manager for GitLab users' SSH keys.
Fix docstring for http_{username,password}
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -104,4 +104,9 @@ setup( ], tests_require=['tox', 'nose', 'scripttest'], cmdclass={'test': Tox}, + entry_points={ + 'console_scripts': [ + 'python-escpos = escpos.cli:main' + ] + }, )
SETUP register cli entry point in setuptools
py
diff --git a/beekeeper/data_handlers.py b/beekeeper/data_handlers.py index <HASH>..<HASH> 100644 --- a/beekeeper/data_handlers.py +++ b/beekeeper/data_handlers.py @@ -67,7 +67,7 @@ MIMETYPES = { } def code(action, data, mimetype, encoding='utf-8'): - if hasattr(data, 'read'): + if action == 'dump' and hasattr(data, 'read'): data = data.read() if action == 'dump' and isinstance(data, bytes): return getattr(Binary, action)(data, encoding)
Making file-like object handling more specific
py
diff --git a/pyemu/utils/os_utils.py b/pyemu/utils/os_utils.py index <HASH>..<HASH> 100644 --- a/pyemu/utils/os_utils.py +++ b/pyemu/utils/os_utils.py @@ -165,7 +165,8 @@ def start_workers( This option is usually not needed unless you are one of those crazy people who spreads files across countless subdirectories. local (`bool`, optional): flag for using "localhost" instead of actual hostname/IP address on - worker command line. Default is True + worker command line. Default is True. `local` can also be passed as an `str`, in which + case `local` is used as the hostname (for example `local="192.168.10.1"`) cleanup (`bool`, optional): flag to remove worker directories once processes exit. Default is True. Set to False for debugging issues master_dir (`str`): name of directory for master instance. If `master_dir`
first attempt at skipping unwanted rows in csv to ins
py
diff --git a/scot/varica.py b/scot/varica.py index <HASH>..<HASH> 100644 --- a/scot/varica.py +++ b/scot/varica.py @@ -5,7 +5,7 @@ import numpy as np from . import config -from .datatools import cat_trials, dot_special +from .datatools import cat_trials, dot_special, atleast_3d from . import xvschema @@ -71,7 +71,7 @@ def mvarica(x, var, cl=None, reducedim=0.99, optimize_var=False, backend=None, v .. [1] G. Gomez-Herrero et al. "Measuring directional coupling between EEG sources", NeuroImage, 2008 """ - x = np.atleast_3d(x) + x = atleast_3d(x) t, m, l = np.shape(x) if backend is None: @@ -198,7 +198,7 @@ def cspvarica(x, var, cl, reducedim=None, optimize_var=False, backend=None, varf .. [1] M. Billinger et al. "SCoT: A Python Toolbox for EEG Source Connectivity", Frontiers in Neuroinformatics, 2014 """ - x = np.atleast_3d(x) + x = atleast_3d(x) t, m, l = np.shape(x) if backend is None:
Data orientation: use SCoT's atleast_3d
py
diff --git a/stravalib/util/limiter.py b/stravalib/util/limiter.py index <HASH>..<HASH> 100644 --- a/stravalib/util/limiter.py +++ b/stravalib/util/limiter.py @@ -24,6 +24,15 @@ from datetime import datetime, timedelta from stravalib import exc + +def total_seconds(td): + """Alternative to datetime.timedelta.total_seconds + total_seconds() only available since Python 2.7 + https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds + """ + return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 + + class RateLimiter(object): def __init__(self): @@ -68,8 +77,10 @@ class RateLimitRule(object): raise exc.RateLimitExceeded("Rate limit exceeded (can try again in {0})".format(self.timeframe - delta)) else: # Wait the difference between timeframe and the oldest request. - self.log.debug("Rate limit triggered; sleeping for {0}".format(delta)) - time.sleep(self.timeframe - delta) + td = self.timeframe - delta + sleeptime = hasattr(td, 'total_seconds') and td.total_seconds() or total_seconds(td) + self.log.debug("Rate limit triggered; sleeping for {0}".format(sleeptime)) + time.sleep(sleeptime) self.tab.append(datetime.now()) class DefaultRateLimiter(RateLimiter):
time.sleep() expects a float Also handling case where python < <I> (datetime.timedelta.total_seconds is only available since Python >= <I>)
py
diff --git a/telethon/client/dialogs.py b/telethon/client/dialogs.py index <HASH>..<HASH> 100644 --- a/telethon/client/dialogs.py +++ b/telethon/client/dialogs.py @@ -370,7 +370,7 @@ class DialogMethods: await client.edit_folder(dialogs, [0, 1]) # Un-archiving all dialogs - await client.archive(unpack=1) + await client.edit_folder(unpack=1) """ if (entity is None) == (unpack is None): raise ValueError('You can only set either entities or unpack, not both')
Change outdated reference to archive with edit_folder (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -29,6 +29,7 @@ setup(name='latools', package_data={ 'latools': ['latools.cfg', 'resources/*', + 'resources/data_formats/*', 'resources/test_data/*'], }, zip_safe=False)
add resources/data_format/* to package_data
py
diff --git a/geotiepoints/modisinterpolator.py b/geotiepoints/modisinterpolator.py index <HASH>..<HASH> 100644 --- a/geotiepoints/modisinterpolator.py +++ b/geotiepoints/modisinterpolator.py @@ -109,11 +109,11 @@ def _interpolate( (N rows per scan) and contain the entire scan width. """ - interp = _Interpolator(coarse_resolution, fine_resolution, coarse_scan_width=coarse_scan_width) + interp = _MODISInterpolator(coarse_resolution, fine_resolution, coarse_scan_width=coarse_scan_width) return interp.interpolate(lon1, lat1, satz1) -class _Interpolator: +class _MODISInterpolator: """Helper class for MODIS interpolation. Not intended for public use. Use ``modis_X_to_Y`` functions instead.
Rename _Interpolator to _MODISInterpolator
py
diff --git a/ryu/ofproto/ofproto_v1_0_parser.py b/ryu/ofproto/ofproto_v1_0_parser.py index <HASH>..<HASH> 100644 --- a/ryu/ofproto/ofproto_v1_0_parser.py +++ b/ryu/ofproto/ofproto_v1_0_parser.py @@ -53,7 +53,7 @@ def msg_parser(datapath, version, msg_type, msg_len, xid, buf): # OFP_MSG_REPLY = { # OFPFeaturesRequest: OFPSwitchFeatures, -# OFPBarrierRequest: OFPBarrierReplay, +# OFPBarrierRequest: OFPBarrierReply, # OFPQueueGetConfigRequest: OFPQueueGetConfigReply, # # # ofp_stats_request -> ofp_stats_reply
fix typo in ofproto_v1_0_parser comment
py
diff --git a/tests/test_middleware.py b/tests/test_middleware.py index <HASH>..<HASH> 100644 --- a/tests/test_middleware.py +++ b/tests/test_middleware.py @@ -116,7 +116,11 @@ class TestBasicAuthMiddleware(TestCase): self.assertEqual(result.status_code, basic_auth.basic_challenge().status_code) def test_passes_basic_authentication(self): - pass + request = self.DummyRequest('basic', 'user', 'pass') + result = self.middleware.process_request(request) + self.assertEqual(result, None) def test_falls_through_to_basic_challenge(self): - pass + request = self.DummyRequest('basic', 'other_user', 'other_pass') + result = self.middleware.process_request(request) + self.assertEqual(result.status_code, basic_auth.basic_challenge().status_code)
Fill in the rest of the BasicAuth tests.
py
diff --git a/timepiece/utils.py b/timepiece/utils.py index <HASH>..<HASH> 100644 --- a/timepiece/utils.py +++ b/timepiece/utils.py @@ -303,11 +303,11 @@ def date_filter(func): return inner_decorator -def get_hours(entries): +def get_hours(entries, key='billable'): hours = {'total': 0} for entry in entries: hours['total'] += entry['hours'] - if entry['billable']: + if entry[key]: hours['billable'] = entry['hours'] else: hours['non_billable'] = entry['hours']
[#<I>] Fixed broken get_hours from merge in master
py
diff --git a/graphene/utils/is_graphene_type.py b/graphene/utils/is_graphene_type.py index <HASH>..<HASH> 100644 --- a/graphene/utils/is_graphene_type.py +++ b/graphene/utils/is_graphene_type.py @@ -11,9 +11,7 @@ def is_graphene_type(_type): from ..relay.mutation import ClientIDMutation from ..relay.connection import Connection - if _type in [Interface, InputObjectType, ObjectType, Mutation, ClientIDMutation, Connection]: - return False - return inspect.isclass(_type) and issubclass(_type, ( + return inspect.isclass(_type) and hasattr(_type, '_meta') and issubclass(_type, ( Interface, ObjectType, InputObjectType,
Improved is_graphene_type
py
diff --git a/src/gquery.py b/src/gquery.py index <HASH>..<HASH> 100644 --- a/src/gquery.py +++ b/src/gquery.py @@ -311,9 +311,13 @@ def rewrite_query(query, parameters, get_args): glogger.debug(parameters) requireXSD = False - requiredParams = set(parameters.keys()) + required_params = {} + for k,v in parameters.iteritems(): + if parameters[k]['required']: + required_params[k] = v + requiredParams = set(required_params.keys()) providedParams = set(get_args.keys()) - glogger.debug("Parameters: {} Request args: {}".format(requiredParams, providedParams)) + glogger.debug("Required parameters: {} Request args: {}".format(requiredParams, providedParams)) assert requiredParams == providedParams, 'Provided parameters do not match with required parameters!' for pname, p in list(parameters.items()):
supplied params matching required params only
py
diff --git a/denonavr/denonavr.py b/denonavr/denonavr.py index <HASH>..<HASH> 100644 --- a/denonavr/denonavr.py +++ b/denonavr/denonavr.py @@ -19,7 +19,8 @@ import requests _LOGGER = logging.getLogger("DenonAVR") DEVICEINFO_AVR_X_PATTERN = re.compile( - r"(.*AVR-S.*|.*AVR-X.*|.*SR500[6-9]|.*SR60(07|08|09|10|11|12|13)|.*NR1604)") + r"(.*AVR-S.*|.*AVR-X.*|.*SR500[6-9]|.*SR60(07|08|09|10|11|12|13)| + .*NR1604)") DEVICEINFO_COMMAPI_PATTERN = re.compile(r"(0210|0300)") ReceiverType = namedtuple('ReceiverType', ["type", "port"])
Add Denon AVR-S series support Correct Line too long
py
diff --git a/tests/test_mixins.py b/tests/test_mixins.py index <HASH>..<HASH> 100644 --- a/tests/test_mixins.py +++ b/tests/test_mixins.py @@ -382,6 +382,6 @@ class TestLoggingMixin(APITestCase): self.assertEqual(log.response_ms, 0) def test_custom_log_handler(self): - self.client.get('/custom-log-handler') - self.client.post('/custom-log-handler') - self.assertEqual(APIRequestLog.objects.all().count(), 1) + self.client.get('/custom-log-handler') + self.client.post('/custom-log-handler') + self.assertEqual(APIRequestLog.objects.all().count(), 1)
fix indent for flake8
py
diff --git a/lib/websearchadminlib.py b/lib/websearchadminlib.py index <HASH>..<HASH> 100644 --- a/lib/websearchadminlib.py +++ b/lib/websearchadminlib.py @@ -3505,6 +3505,7 @@ def get_detailed_page_tabs_counts(recID): num_reviews = 0 #num of reviews tabs_counts = {'Citations' : 0, 'References' : -1, + 'Discussions' : 0, 'Comments' : 0, 'Reviews' : 0 } @@ -3527,7 +3528,9 @@ def get_detailed_page_tabs_counts(recID): num_reviews = get_nb_reviews(recID, count_deleted=False) if num_comments: tabs_counts['Comments'] = num_comments + tabs_counts['Discussions'] += num_comments if num_reviews: tabs_counts['Reviews'] = num_reviews + tabs_counts['Discussions'] += num_reviews return tabs_counts
WebSearch: discussions compatibility fix * Reverts counter of discussions for legacy app. (closes #<I>)
py
diff --git a/core/eolearn/core/utilities.py b/core/eolearn/core/utilities.py index <HASH>..<HASH> 100644 --- a/core/eolearn/core/utilities.py +++ b/core/eolearn/core/utilities.py @@ -19,8 +19,6 @@ import numpy as np import geopandas as gpd from geopandas.testing import assert_geodataframe_equal -from sentinelhub import CRS - from .constants import FeatureType LOGGER = logging.getLogger(__name__) @@ -502,7 +500,12 @@ def bgr_to_rgb(bgr): def to_gpd_crs(sh_crs): - """ + """ Transforms sentinelhub CRS object into geopandas CRS comprehension, which is differs between geopandas versions + + :param sh_crs: A sentinelhub CRS definition + :type sh_crs: sentinelhub.CRS + :return: A geopandas CRS object + :rtype: pyproj.CRS or dict """ if gpd.__version__ >= '0.7.0': return sh_crs.pyproj_crs()
added docs strings for to_gpd_crs
py
diff --git a/backtrader/feed.py b/backtrader/feed.py index <HASH>..<HASH> 100644 --- a/backtrader/feed.py +++ b/backtrader/feed.py @@ -70,7 +70,9 @@ class MetaAbstractDataBase(dataseries.OHLCDateTime.__class__): super(MetaAbstractDataBase, cls).dopostinit(_obj, *args, **kwargs) # Either set by subclass or the parameter or use the dataname (ticker) - _obj._name = _obj._name or _obj.p.name or _obj.p.dataname + _obj._name = _obj._name or _obj.p.name + if not _obj._name and isinstance(_obj.p.dataname, string_types): + _obj._name = _obj.p.dataname _obj._compression = _obj.p.compression _obj._timeframe = _obj.p.timeframe
Avoid assigning a non-string dataname to _name
py
diff --git a/montblanc/impl/common/loaders/loaders.py b/montblanc/impl/common/loaders/loaders.py index <HASH>..<HASH> 100644 --- a/montblanc/impl/common/loaders/loaders.py +++ b/montblanc/impl/common/loaders/loaders.py @@ -18,9 +18,11 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. -import pyrap.tables as pt import os +import numpy as np +import pyrap.tables as pt + import montblanc import montblanc.util as mbu @@ -55,7 +57,7 @@ class MeasurementSetLoader(BaseLoader): # Determine the problem dimensions na = self.tables['ant'].nrows() nbl = mbu.nr_of_baselines(na, auto_correlations) - nchan = self.tables['freq'].getcol('CHAN_FREQ').size + nchan = np.asscalar(self.tables['freq'].getcol('NUM_CHAN')) ntime = self.tables['main'].nrows() // nbl return ntime, na, nchan
Query number of channels more efficiently. Instead of T.getcol('CHAN_FREQ'), T.getcol('NUM_CHAN') which is a scalar.
py
diff --git a/salt/cloud/clouds/vmware.py b/salt/cloud/clouds/vmware.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/vmware.py +++ b/salt/cloud/clouds/vmware.py @@ -2422,6 +2422,11 @@ def create(vm_): new_vm_ref = salt.utils.vmware.get_mor_by_property(_get_si(), vim.VirtualMachine, vm_name, container_ref=container_ref) + # Re-configure to make sure all info is correct, without this new network settings specifically + # ip settings and connect on startup may not apply + task = new_vm_ref.ReconfigVM_Task(spec=config_spec) + salt.utils.vmware.wait_for_task(task, vm_name, 'reconfig', 5, 'info') + # Find how to power on in CreateVM_Task (if possible), for now this will do if not clone_type and power: task = new_vm_ref.PowerOn()
Fix a bug when creating a new VM and changing the network info - This commit fixes a small bug where the network adapter won't be connected at startup of the VM and therefore can never be contacted by the salt master, the fix is to just simply apply the same config changes once the clone is completed
py
diff --git a/tools/pyinstaller_hooks/hook-raiden.py b/tools/pyinstaller_hooks/hook-raiden.py index <HASH>..<HASH> 100644 --- a/tools/pyinstaller_hooks/hook-raiden.py +++ b/tools/pyinstaller_hooks/hook-raiden.py @@ -17,10 +17,16 @@ def copy_metadata(package_name): # Add metadata of all required packages to allow pkg_resources.require() to work required_packages = [("raiden", [])] +processed_packages = set() # break out of circular dependencies while required_packages: req_name, req_extras = required_packages.pop() for req in pkg_resources.get_distribution(req_name).requires(req_extras): - required_packages.append((req.project_name, list(req.extras))) + dep_tuple = (req.project_name, tuple(req.extras)) + if dep_tuple in processed_packages: + continue + + required_packages.append(dep_tuple) + processed_packages.add(dep_tuple) try: datas.extend(copy_metadata(req_name)) except AssertionError:
Avoid circular dependencies in pyinstaller hook eth-hash introduced circular deps into raiden between versions <I> and <I>. This threw our pyinstaller hook-raiden.py into an infinite loop. Fixes <URL>
py
diff --git a/rest_api/api.py b/rest_api/api.py index <HASH>..<HASH> 100644 --- a/rest_api/api.py +++ b/rest_api/api.py @@ -355,6 +355,24 @@ def get_ccle_cna(): return res +@route('/databases/cbio/get_mutations_ccle', method=['POST', 'OPTIONS']) +@allow_cors +def get_mutations_ccle(): + """Get CCLE mutations + returns the amino acid changes for a given list of genes and cell lines + """ + if request.method == 'OPTIONS': + return {} + response = request.body.read().decode('utf-8') + body = json.loads(response) + gene_list = body.get('gene_list') + cell_lines = body.get('cell_lines') + mutations = cbio_client.get_mutations_ccle(gene_list, cell_lines) + mutations_str = json.dumps(mutations) + res = {'mutations': mutations_str} + return res + + @route('/preassembly/map_grounding', method=['POST', 'OPTIONS']) @allow_cors def map_grounding():
get_mutations_ccle function fro cbio_client exposed in API - mutations can be queried in the API using the function with the same format as in the cbio_client
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -44,5 +44,13 @@ setup(name='scales', 'nose', ], test_suite = 'nose.collector', - zip_safe = True + zip_safe = True, + classifiers=[ + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + ], )
Document Python versions supported in setup.py
py
diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index <HASH>..<HASH> 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -189,9 +189,9 @@ six = "*" def test_install_local_vcs_not_in_lockfile(PipenvInstance, pip_src_dir): with PipenvInstance(chdir=True) as p: # six_path = os.path.join(p.path, "six") - six_path = p._pipfile.get_fixture_path("git/six").as_posix() - requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() - c = p.pipenv("install -e {0}".format(six_path)) + six_path = p._pipfile.get_fixture_path("git/six/").as_posix() + c = delegator.run("git clone {0} ./six".format(six_path)) + c = p.pipenv("install -e ./six".format(six_path)) assert c.return_code == 0 six_key = list(p.pipfile["packages"].keys())[0] # we don't need the rest of the test anymore, this just works on its own
clone a fresh copy of six before running tests
py
diff --git a/python/vals2colors.py b/python/vals2colors.py index <HASH>..<HASH> 100644 --- a/python/vals2colors.py +++ b/python/vals2colors.py @@ -6,6 +6,7 @@ def vals2colors(vals,cmap='husl',res=100): Args: values (list or list of lists) - list of values to map to colors cmap (str) - color map (default is 'husl') + res (int) - resolution of the color map (default: 100) Returns: list of rgb tuples """
added arg description for res
py
diff --git a/abilian/app.py b/abilian/app.py index <HASH>..<HASH> 100644 --- a/abilian/app.py +++ b/abilian/app.py @@ -88,7 +88,6 @@ class ServiceManager(object): class PluginManager(object): """Mixin that provides support for loading plugins.""" - @deprecated def register_plugin(self, name): """Load and register a plugin given its package name.""" logger.info("Registering plugin: " + name)
remove deprecation marker for now.
py
diff --git a/jp_proxy_widget/proxy_widget.py b/jp_proxy_widget/proxy_widget.py index <HASH>..<HASH> 100644 --- a/jp_proxy_widget/proxy_widget.py +++ b/jp_proxy_widget/proxy_widget.py @@ -146,8 +146,8 @@ class JSProxyWidget(widgets.DOMWidget): _model_name = Unicode('JSProxyModel').tag(sync=True) _view_module = Unicode('jp_proxy_widget').tag(sync=True) _model_module = Unicode('jp_proxy_widget').tag(sync=True) - _view_module_version = Unicode('^1.0.0').tag(sync=True) - _model_module_version = Unicode('^1.0.0').tag(sync=True) + _view_module_version = Unicode('^1.0.1').tag(sync=True) + _model_module_version = Unicode('^1.0.1').tag(sync=True) # traitlet port to use for sending commands to javascript #commands = traitlets.List([], sync=True)
sync version numbers in py to js
py
diff --git a/bcloud/net.py b/bcloud/net.py index <HASH>..<HASH> 100644 --- a/bcloud/net.py +++ b/bcloud/net.py @@ -17,6 +17,7 @@ sys.path.insert(0, os.path.dirname(__file__)) import const RETRIES = 3 +TIMEOUT = 30 default_headers = { 'User-agent': const.USER_AGENT, @@ -73,7 +74,7 @@ def urlopen(url, headers={}, data=None, retries=RETRIES): for _ in range(retries): try: - req = opener.open(url, data=data) + req = opener.open(url, data=data, timeout=TIMEOUT) encoding = req.headers.get('Content-encoding') req.data = req.read() if encoding == 'gzip':
set timeout to <I>s
py
diff --git a/pyowm/constants.py b/pyowm/constants.py index <HASH>..<HASH> 100644 --- a/pyowm/constants.py +++ b/pyowm/constants.py @@ -2,6 +2,6 @@ Constants for the PyOWM library """ -PYOWM_VERSION = '2.3.1' +PYOWM_VERSION = '2.3.2' LATEST_OWM_API_VERSION = '2.5' DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
Bump to version <I>
py
diff --git a/scripts/dccsend.py b/scripts/dccsend.py index <HASH>..<HASH> 100644 --- a/scripts/dccsend.py +++ b/scripts/dccsend.py @@ -21,7 +21,7 @@ class DCCSend(irc.client.SimpleIRCClient): self.receiver = receiver self.filename = filename self.filesize = os.path.getsize(self.filename) - self.file = open(filename) + self.file = open(filename, 'rb') self.sent_bytes = 0 def on_welcome(self, connection, event):
DCC Send should open files in binary mode
py
diff --git a/sos/monitor.py b/sos/monitor.py index <HASH>..<HASH> 100644 --- a/sos/monitor.py +++ b/sos/monitor.py @@ -25,7 +25,7 @@ import threading import time from datetime import datetime import stat -from .utils import env +from .utils import env, expand_time, format_HHMMSS class ProcessMonitor(threading.Thread): def __init__(self, task_id, monitor_interval, resource_monitor_interval, max_walltime=None, max_mem=None, max_procs=None):
Fix importing expand_time in sos.monitor
py
diff --git a/spyder/plugins/__init__.py b/spyder/plugins/__init__.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/__init__.py +++ b/spyder/plugins/__init__.py @@ -278,6 +278,8 @@ class SpyderPluginMixin(object): def initialize_plugin(self): """Initialize plugin: connect signals, setup actions, ...""" + self.create_toggle_view_action() + self.plugin_actions = self.get_plugin_actions() if self.show_message is not None: self.show_message.connect(self.__show_message) if self.update_plugin_title is not None: @@ -285,8 +287,6 @@ class SpyderPluginMixin(object): if self.sig_option_changed is not None: self.sig_option_changed.connect(self.set_option) self.setWindowTitle(self.get_plugin_title()) - self.create_toggle_view_action() - self.plugin_actions = self.get_plugin_actions() def on_first_registration(self): """Action to be performed on first plugin registration"""
Plugins: Leave previous order in which plugin actions are created
py
diff --git a/molo/core/tasks.py b/molo/core/tasks.py index <HASH>..<HASH> 100644 --- a/molo/core/tasks.py +++ b/molo/core/tasks.py @@ -15,7 +15,6 @@ from django.core import management from django.contrib.auth.models import User from django.db.models import Q from django.shortcuts import get_object_or_404 -from django.utils.translation import ugettext_lazy as _ from molo.core.utils import ( create_new_article_relations, copy_translation_pages) @@ -308,10 +307,14 @@ def copy_to_all_task(page_id, user_id, site_pk): copy_translation_pages(page, new_page) create_new_article_relations(page, new_page) revision = new_page.save_revision() - if page.status_string == _('scheduled') and \ - new_page.status_string == _('draft') and \ - new_page.go_live_at is not None: - revision.publish() + # If the original page is scheduled + if not page.live and not page.expired and \ + page.approved_schedule: + # If the new page is in draft + if not new_page.live and not new_page.expired and \ + not new_page.approved_schedule: + if new_page.go_live_at is not None: + revision.publish() else: errors.append(str( page.title + ' already exists in ' + main.title))
Remove reliance on translations when checking to publish the page
py
diff --git a/mike/app_version.py b/mike/app_version.py index <HASH>..<HASH> 100644 --- a/mike/app_version.py +++ b/mike/app_version.py @@ -1 +1 @@ -version = '0.4.0' +version = '0.5.0.dev0'
Update version to <I>.dev0
py
diff --git a/pinax/teams/views.py b/pinax/teams/views.py index <HASH>..<HASH> 100644 --- a/pinax/teams/views.py +++ b/pinax/teams/views.py @@ -118,15 +118,18 @@ def team_manage(request): def team_join(request): team = request.team state = team.state_for(request.user) + if team.manager_access == Team.MEMBER_ACCESS_INVITATION and \ state is None and not request.user.is_staff: raise Http404() if team.can_join(request.user) and request.method == "POST": membership, created = Membership.objects.get_or_create(team=team, user=request.user) - membership.state = Membership.STATE_MEMBER + membership.role = Membership.ROLE_MEMBER + membership.state = Membership.STATE_AUTO_JOINED membership.save() messages.success(request, "Joined team.") + return redirect("team_detail", slug=team.slug)
fixed membership role/state setting in team_join
py
diff --git a/tensorflow_probability/python/bijectors/weibull_cdf_test.py b/tensorflow_probability/python/bijectors/weibull_cdf_test.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/python/bijectors/weibull_cdf_test.py +++ b/tensorflow_probability/python/bijectors/weibull_cdf_test.py @@ -40,7 +40,7 @@ class WeibullCDFBijectorTest(test_util.TestCase): self.assertStartsWith(bijector.name, 'weibull') x = np.array([[[0.], [1.], [14.], [20.], [100.]]], dtype=np.float32) # Weibull distribution - weibull_dist = stats.frechet_r(c=concentration, scale=scale) + weibull_dist = stats.weibull_min(c=concentration, scale=scale) y = weibull_dist.cdf(x).astype(np.float32) self.assertAllClose(y, self.evaluate(bijector.forward(x))) self.assertAllClose(x, self.evaluate(bijector.inverse(y)))
frechet_r -> weibull_min The old API point has been deprecated for some time, and was removed in newer scipy releases. PiperOrigin-RevId: <I>
py
diff --git a/xtuml/model.py b/xtuml/model.py index <HASH>..<HASH> 100644 --- a/xtuml/model.py +++ b/xtuml/model.py @@ -576,12 +576,12 @@ def _defered_association_operation(inst, end, op): kind = inst.__class__.__name__ l = list() for ass in chain(*inst.__r__.values()): - if kind != ass.source.kind: + if kind != ass.target.kind: continue - elif len(set(end.ids) & set(ass.source.ids)) == 0: + if set(end.ids) & set(ass.target.ids) == 0: continue - nav = navigate_many(inst).nav(ass.target.kind, ass.id, ass.target.phrase) + nav = navigate_many(inst).nav(ass.source.kind, ass.id, ass.source.phrase) for from_inst in nav(): fn = partial(op, from_inst, inst, ass.id, ass.target.phrase) l.append(fn) @@ -686,3 +686,4 @@ def unrelate(from_inst, to_inst, rel_id, phrase=''): defered_unrelate() return updated +
model: fixed deferred association updates
py
diff --git a/proso_models/json_enrich.py b/proso_models/json_enrich.py index <HASH>..<HASH> 100644 --- a/proso_models/json_enrich.py +++ b/proso_models/json_enrich.py @@ -9,14 +9,15 @@ def prediction(request, json_list, nested): object_item_ids = map(lambda x: x['item_id'], json_list) user = get_user_id(request) time = get_time(request) - predictions = _predictive_model().predict_more_items( - _environment(request), - user, - object_item_ids, - time) + predictions = _predictive_model().predict_more_items(_environment(request), user, object_item_ids, time) for object_json, prediction in zip(json_list, predictions): object_json['prediction'] = float("{0:.2f}".format(prediction)) object_json['mastered'] = prediction >= models.MASTERY_TRESHOLD + if "new_user_predictions" in request.GET: + user = -1 + predictions = _predictive_model().predict_more_items(_environment(request), user, object_item_ids, time) + for object_json, prediction in zip(json_list, predictions): + object_json['new_user_prediction'] = float("{0:.2f}".format(prediction)) return json_list
proso_flashcards: add possibility to get prediction of new user - resolve #<I>
py
diff --git a/delphi/AnalysisGraph.py b/delphi/AnalysisGraph.py index <HASH>..<HASH> 100644 --- a/delphi/AnalysisGraph.py +++ b/delphi/AnalysisGraph.py @@ -39,7 +39,6 @@ class AnalysisGraph(nx.DiGraph): self.dateCreated = datetime.now() self.name: str = "Linear Dynamical System with Stochastic Transition Model" self.res: int = 100 - self.data = None # ========================================================================== # Constructors
Removing data attribute from AnalysisGraph class
py
diff --git a/docker/ssladapter/ssladapter.py b/docker/ssladapter/ssladapter.py index <HASH>..<HASH> 100644 --- a/docker/ssladapter/ssladapter.py +++ b/docker/ssladapter/ssladapter.py @@ -46,6 +46,19 @@ class SSLAdapter(HTTPAdapter): self.poolmanager = PoolManager(**kwargs) + def get_connection(self, *args, **kwargs): + """ + Ensure assert_hostname is set correctly on our pool + + We already take care of a normal poolmanager via init_poolmanager + + But we still need to take care of when there is a proxy poolmanager + """ + conn = super(SSLAdapter, self).get_connection(*args, **kwargs) + if conn.assert_hostname != self.assert_hostname: + conn.assert_hostname = self.assert_hostname + return conn + def can_override_ssl_version(self): urllib_ver = urllib3.__version__.split('-')[0] if urllib_ver is None:
Ensure assert_hostname is set on the pool connection If you have assert_hostname turned off and are using a proxy on your computer then assert_hostname wasn't being properly set on the pool connection and it was failing to connect to docker
py
diff --git a/pytds/tds.py b/pytds/tds.py index <HASH>..<HASH> 100644 --- a/pytds/tds.py +++ b/pytds/tds.py @@ -3798,8 +3798,13 @@ class _TdsSession(object): elif marker in (TDS_DONE_TOKEN, TDS_DONEPROC_TOKEN, TDS_DONEINPROC_TOKEN): self.process_end(marker) if self.done_flags & TDS_DONE_MORE_RESULTS: - continue - return False + if self.done_flags & TDS_DONE_COUNT: + return True + else: + # skip results without rowcount + continue + else: + return False else: self.process_token(marker)
find_result_or_done changed as per denisenkom suggestion now assigning select returns empty resultset
py
diff --git a/wikidataintegrator/wdi_helpers/publication.py b/wikidataintegrator/wdi_helpers/publication.py index <HASH>..<HASH> 100644 --- a/wikidataintegrator/wdi_helpers/publication.py +++ b/wikidataintegrator/wdi_helpers/publication.py @@ -297,6 +297,8 @@ class Publication: if self.source == 'arxiv': success = try_write(item, self.ids['arxiv'], PROPS["arxiv id"], login) + elif self.source == 'biorxiv': + success = try_write(item, self.ids['biorxiv'], PROPS["biorxiv id"], login) else: success = try_write(item, self.ids['doi'], PROPS["DOI"], login) return item.wd_item_id, self.warnings, success
Use biorxiv id as primary key
py
diff --git a/tensorflow_probability/examples/vae.py b/tensorflow_probability/examples/vae.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/examples/vae.py +++ b/tensorflow_probability/examples/vae.py @@ -34,7 +34,7 @@ Here we also compute tighter bounds, the IWAE [Burda et. al. (2015)][2]. These as well as image summaries can be seen in Tensorboard. For help using Tensorboard see -https://www.tensorflow.org/programmers_guide/summaries_and_tensorboard +https://www.tensorflow.org/guide/summaries_and_tensorboard which can be run with `python -m tensorboard.main --logdir=MODEL_DIR`
Rename programmers_guide/ directory to guide/ in tfp. PiperOrigin-RevId: <I>
py
diff --git a/certvalidator/validate.py b/certvalidator/validate.py index <HASH>..<HASH> 100644 --- a/certvalidator/validate.py +++ b/certvalidator/validate.py @@ -420,7 +420,7 @@ def _validate_path(validation_context, path, end_entity_name_override=None): _cert_type(index, last_index, end_entity_name_override, definite=True) )) - if revocation_check_failed: + if not status_good and revocation_check_failed: raise PathValidationError(pretty_message( ''' The path could not be validated because the %s revocation
Allow either CRL or OCSP checks to fail if the other is successful
py
diff --git a/bin/permutation_test.py b/bin/permutation_test.py index <HASH>..<HASH> 100755 --- a/bin/permutation_test.py +++ b/bin/permutation_test.py @@ -318,7 +318,7 @@ def multiprocess_permutation(bed_dict, mut_df, opts): """Handles parallelization of permutations by splitting work by chromosome. """ - chroms = sorted(bed_dict.keys()) + chroms = sorted(bed_dict.keys(), key=lambda x: len(bed_dict[x]), reverse=True) multiprocess_flag = opts['processes']>0 if multiprocess_flag: num_processes = opts['processes']
Do multi-processing of chromosomes in order of chromosomes with most genes to least. This is most likely to group large chromosomes together such that less time is wasted with waiting for other processes to finish.
py
diff --git a/mock/mock.py b/mock/mock.py index <HASH>..<HASH> 100644 --- a/mock/mock.py +++ b/mock/mock.py @@ -2544,7 +2544,7 @@ def mock_open(mock=None, read_data=''): default) then a `MagicMock` will be created for you, with the API limited to methods or attributes available on standard file handles. - `read_data` is a string for the `read` methoddline`, and `readlines` of the + `read_data` is a string for the `read`, `readline` and `readlines` of the file handle to return. This is an empty string by default. """ def _readlines_side_effect(*args, **kwargs):
Fix mock_open docstring to use readline (#<I>) Backports: <I>f<I>a2f<I>f5ec<I>c<I>bce<I>bd<I>bd
py
diff --git a/elifetools/parseJATS.py b/elifetools/parseJATS.py index <HASH>..<HASH> 100644 --- a/elifetools/parseJATS.py +++ b/elifetools/parseJATS.py @@ -1641,9 +1641,9 @@ def body_block_content(tag): # Remove unwanted nested tags unwanted_tag_names = ["table-wrap", "disp-formula", "fig-group", "fig"] tag_copy = duplicate_tag(tag) - tag = remove_tag_from_tag(tag, unwanted_tag_names) + tag_copy = remove_tag_from_tag(tag_copy, unwanted_tag_names) - tag_content["text"] = node_contents_str(tag) + tag_content["text"] = node_contents_str(tag_copy) elif tag.name == "table-wrap": tag_content["type"] = "table"
Tweak to previous commit, rendered the wrong tag.
py
diff --git a/openupgradelib/openupgrade.py b/openupgradelib/openupgrade.py index <HASH>..<HASH> 100644 --- a/openupgradelib/openupgrade.py +++ b/openupgradelib/openupgrade.py @@ -488,7 +488,9 @@ def migrate(no_version=False): """ This is the decorator for the migrate() function in migration scripts. - Return when the 'version' argument is not defined, + Set argument 'no_version' to True if the method as to be taken into account + if the module is installed during a migration. + Return when the 'version' argument is not defined and no_version is False, and log execeptions. Retrieve debug context data from the frame above for logging purposes.
[ADD] explanations of the modifications in the code
py