diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/eli5/keras/gradcam.py b/eli5/keras/gradcam.py index <HASH>..<HASH> 100644 --- a/eli5/keras/gradcam.py +++ b/eli5/keras/gradcam.py @@ -164,7 +164,7 @@ def _calc_gradient(ys, xs): def _get_target_prediction(targets, estimator): - # type: (Union[None, list], Model) -> K.variable + # type: (Optional[list], Model) -> K.variable """ Get a prediction ID based on ``targets``, from the model ``estimator`` (with a rank 2 tensor for its final layer).
(mypy) Replace Union[None,...] with Optional[...] for function params
py
diff --git a/addok/config/__init__.py b/addok/config/__init__.py index <HASH>..<HASH> 100644 --- a/addok/config/__init__.py +++ b/addok/config/__init__.py @@ -68,7 +68,8 @@ class Config(dict): def load_local(self): path = (os.environ.get('ADDOK_CONFIG_MODULE') or os.path.join('/etc', 'addok', 'addok.conf')) - if not path or not os.path.exists(path): + if not os.path.exists(path): + print('No local config file found in "{}".'.format(path)) return d = imp.new_module('config')
Warn when local config file does not exist
py
diff --git a/src/toil/provisioners/aws/awsProvisioner.py b/src/toil/provisioners/aws/awsProvisioner.py index <HASH>..<HASH> 100644 --- a/src/toil/provisioners/aws/awsProvisioner.py +++ b/src/toil/provisioners/aws/awsProvisioner.py @@ -594,6 +594,10 @@ class AWSProvisioner(AbstractProvisioner): # determine number of ephemeral drives via cgcloud-lib bdtKeys = ['', '/dev/xvdb', '/dev/xvdc', '/dev/xvdd'] bdm = BlockDeviceMapping() + # Change root volume size to allow for bigger Docker instances + root_vol = BlockDeviceType() + root_vol.size = 50 + bdm["/dev/xvda"] = root_vol # the first disk is already attached for us so start with 2nd. for disk in xrange(1, instanceType.disks + 1): bdm[bdtKeys[disk]] = BlockDeviceType(
AWS: increase default size of root volume CoreOS root volumes are 8Gb which limits Docker and data sizes on machines. This increases to a larger default size of <I>Gb
py
diff --git a/examples/patent_example/patent_example.py b/examples/patent_example/patent_example.py index <HASH>..<HASH> 100644 --- a/examples/patent_example/patent_example.py +++ b/examples/patent_example/patent_example.py @@ -180,8 +180,10 @@ print 'Learned blocking weights in', time_block_weights - time_start, 'seconds' deduper.writeSettings(settings_file) ## Generate the tfidf canopy as needed - +print 'generating tfidf index' +full_data = ((k, data_d[k]) for k in data_d) blocker.tfIdfBlocks(full_data) +del full_data # Load all the original data in to memory and place # them in to blocks. Each record can be blocked in many ways, so for @@ -211,7 +213,7 @@ threshold_data = tuple(threshold_data) print 'Computing threshold' threshold = deduper.goodThreshold(threshold_data, recall_weight=1) - +del threshold_data # `duplicateClusters` will return sets of record IDs that dedupe # believes are all referring to the same entity.
Added routine to generate full data for tfidf canopy computation
py
diff --git a/pyemma/msm/__init__.py b/pyemma/msm/__init__.py index <HASH>..<HASH> 100644 --- a/pyemma/msm/__init__.py +++ b/pyemma/msm/__init__.py @@ -153,23 +153,18 @@ class _RedirectMSMToolsImport(object): _sys.modules[name] = module return module - +""" _sys.meta_path.append(_RedirectMSMToolsImport('pyemma.msm.analysis', 'pyemma.msm.estimation', 'pyemma.msm.generation', 'pyemma.msm.dtraj', 'pyemma.msm.io', 'pyemma.msm.flux')) - +""" # backward compatibility to PyEMMA 1.2.x -from . import analysis -from . import estimation -from . import generation -from . import dtraj -# backward compatibility +from msmtools import analysis, estimation, generation, dtraj, flux +from msmtools.flux import ReactiveFlux io = dtraj -from . import flux -from .flux import ReactiveFlux ##################################################### # Estimators and models
[msm] disabled import hook (nose succeeds)
py
diff --git a/mtcnn/mtcnn.py b/mtcnn/mtcnn.py index <HASH>..<HASH> 100644 --- a/mtcnn/mtcnn.py +++ b/mtcnn/mtcnn.py @@ -192,7 +192,7 @@ class MTCNN(object): with self.__graph.as_default(): self.__session = tf.Session(config=config, graph=self.__graph) - weights = np.load(weights_file).item() + weights = np.load(weights_file, allow_pickle=True).item() self.__pnet = PNet(self.__session, False) self.__pnet.set_weights(weights['PNet'])
Update mtcnn.py The new version of numpy requires explicitly assigning allow_pickle to True
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,9 +24,9 @@ setup( "This is a {{variable}} in a {{string}}" from coima import Template - Template({'variable':'word', 'string':'sentence'}, 'file.txt') + t = Template({'variable':'word', 'string':'sentence'}, 'file.txt') - Template.render() + t.render() "This is a word in a sentence"
made changes to the API so reflecting on the small doc
py
diff --git a/cwltool/command_line_tool.py b/cwltool/command_line_tool.py index <HASH>..<HASH> 100644 --- a/cwltool/command_line_tool.py +++ b/cwltool/command_line_tool.py @@ -1214,7 +1214,7 @@ class CommandLineTool(Process): rfile = files.copy() revmap(rfile) if files["class"] == "Directory": - ll = schema.get("loadListing") or builder.loadListing + ll = binding.get("loadListing") or builder.loadListing if ll and ll != "no_listing": get_listing(fs_access, files, (ll == "deep_listing")) else:
loadListing is on outputBinding, not schema.
py
diff --git a/Lib/glyphsLib/builder/sources.py b/Lib/glyphsLib/builder/sources.py index <HASH>..<HASH> 100644 --- a/Lib/glyphsLib/builder/sources.py +++ b/Lib/glyphsLib/builder/sources.py @@ -60,10 +60,10 @@ def _to_designspace_source(self, master, is_regular): # Make sure UFO filenames are unique, lest we overwrite masters that # happen to have the same weight name. n = "_" - while any(s != source and s.filename == source.filename + while any(s is not source and s.filename == source.filename for s in self._sources.values()): source.filename = os.path.basename( - build_ufo_path('.', source.familyName, source.styleName + n)) + build_ufo_path('', source.familyName, source.styleName + n)) n += "_" logger.warn("The master with id {} has the same style name ({}) " "as another one. All masters should have distinctive "
Check source objects for identity, not equality
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ setup( install_requires=install_requires, license='LGPL', use_2to3=True, + keywords='django javascript test url reverse helpers', classifiers=[ "Framework :: Django", "Development Status :: 4 - Beta",
Added keywords to setup.py
py
diff --git a/photutils/conftest.py b/photutils/conftest.py index <HASH>..<HASH> 100644 --- a/photutils/conftest.py +++ b/photutils/conftest.py @@ -11,7 +11,7 @@ else: # automatically made available when Astropy is installed. This means it's # not necessary to import them here, but we still need to import global # variables that are used for configuration. - from astropy.tests.plugins.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS + from astropy.tests.plugins.display import pytest_report_header, PYTEST_HEADER_MODULES, TESTED_VERSIONS from astropy.tests.helper import enable_deprecations_as_exceptions
Adding pytest header report
py
diff --git a/contract_sale_generation/tests/common.py b/contract_sale_generation/tests/common.py index <HASH>..<HASH> 100644 --- a/contract_sale_generation/tests/common.py +++ b/contract_sale_generation/tests/common.py @@ -79,11 +79,8 @@ class ContractSaleCommon: } ) cls.line_vals = { - # "contract_id": cls.contract.id, - # "product_id": cls.product_1.id, "name": "Services from #START# to #END#", "quantity": 1, - # "uom_id": cls.product_1.uom_id.id, "price_unit": 100, "discount": 50, "recurring_rule_type": "monthly",
[<I>][IMP] contract_sale_generation: Remove unused code
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( install_requires=[ "deprecated==1.2.10", "cryptography>=2.2.1", - "protobuf>=3.1.0", + "protobuf==3.13.0", "requests>=2.11.1", "future>=0.11.0", "asn1==2.2.0",
bugfix: mismatch in protobuf dep between requirements.txt and setup.py This is the source of a bug in which this package will break other systems by allowing the incompatible version 4.* of protobuf to be installed.
py
diff --git a/lib/svtplay_dl/service/__init__.py b/lib/svtplay_dl/service/__init__.py index <HASH>..<HASH> 100644 --- a/lib/svtplay_dl/service/__init__.py +++ b/lib/svtplay_dl/service/__init__.py @@ -54,12 +54,12 @@ class Service(object): def get_subtitle(self, options): pass - def exclude(self, options): - if options.exclude: - for i in options.exclude: + def exclude(self): + if self.options.exclude: + for i in self.options.exclude: if is_py2: i = i.decode("utf-8") - if i in options.output: + if i in self.options.output: return True return False
service: exclude should use options in the class
py
diff --git a/smartystreets/client.py b/smartystreets/client.py index <HASH>..<HASH> 100644 --- a/smartystreets/client.py +++ b/smartystreets/client.py @@ -103,6 +103,9 @@ class Client(object): self.logging = logging self.accept_keypair = accept_keypair self.truncate_addresses = truncate_addresses + self.session = requests.Session() + self.session.mount(self.BASE_URL, requests.adapters.HTTPAdapter(max_retries=5)) + def post(self, endpoint, data): """ @@ -124,7 +127,7 @@ class Client(object): params = {'auth-id': self.auth_id, 'auth-token': self.auth_token} url = self.BASE_URL + endpoint - response = requests.post(url, json.dumps(stringify(data)), params=params, headers=headers) + response = self.session.post(url, json.dumps(stringify(data)), params=params, headers=headers) if response.status_code == 200: return response.json() raise ERROR_CODES.get(response.status_code, SmartyStreetsError)
add support for keep-alive to improve performance. This is in line with the SmartyStreets best practices (<URL>)
py
diff --git a/sass_processor/__init__.py b/sass_processor/__init__.py index <HASH>..<HASH> 100644 --- a/sass_processor/__init__.py +++ b/sass_processor/__init__.py @@ -21,4 +21,4 @@ Release logic: 13. git push """ -__version__ = '0.4.4' +__version__ = '0.4.5'
Bump to version <I>
py
diff --git a/master/buildbot/status/web/console.py b/master/buildbot/status/web/console.py index <HASH>..<HASH> 100644 --- a/master/buildbot/status/web/console.py +++ b/master/buildbot/status/web/console.py @@ -376,6 +376,10 @@ class ConsoleStatusResource(HtmlResource): return slaves + def isCodebaseInBuild(self, build, codebase): + """Check if codebase is used in build""" + return any(ss.codebase == codebase for ss in build.sourceStamps) + def isRevisionInBuild(self, build, revision): """ Check if revision is in changes in build """ for ss in build.sourceStamps: @@ -416,9 +420,10 @@ class ConsoleStatusResource(HtmlResource): if introducedIn: firstNotIn = build break - elif self.isRevisionInBuild( build, revision ): - introducedIn = build - + elif self.isCodebaseInBuild(build, revision.codebase): + if self.isRevisionInBuild(build, revision): + introducedIn = build + # Get the results of the first build with the revision, and the # first build that does not include the revision. results = None
console: check codebase is in build before checking revisions
py
diff --git a/hex/utils/asc2hasc.py b/hex/utils/asc2hasc.py index <HASH>..<HASH> 100644 --- a/hex/utils/asc2hasc.py +++ b/hex/utils/asc2hasc.py @@ -71,6 +71,9 @@ for j in range(hexGrid.nrows): y = esriGrid.yll + j * 2 * hexPerp + (j % 2) * hexPerp hexGrid.set(i, j, esriGrid.getNearestNeighbour(x, y)) +# This must be evolved into HASC save +hexGrid.createOutputGML("new.gml") + print ("Done!")
Generating GML output in ASC2HASC for visual assessment.
py
diff --git a/instana/meter.py b/instana/meter.py index <HASH>..<HASH> 100644 --- a/instana/meter.py +++ b/instana/meter.py @@ -79,13 +79,17 @@ class Meter(object): sensor = None last_usage = None last_collect = None + timer = None def __init__(self, sensor): self.sensor = sensor self.tick() def tick(self): - t.Timer(1, self.process).start() + timer = t.Timer(1, self.process) + timer.daemon = True + timer.name = "Instana Metric Collection" + timer.start() def process(self): if self.sensor.agent.can_send(): @@ -99,7 +103,9 @@ class Meter(object): d = EntityData(pid=os.getpid(), snapshot=s, metrics=m) t.Thread(target=self.sensor.agent.request, - args=(self.sensor.agent.make_url(a.AGENT_DATA_URL), "POST", d)).start() + args=(self.sensor.agent.make_url(a.AGENT_DATA_URL), + "POST", d), + name="Metrics POST").start() self.tick()
Set timer name/daemon for debug; linter improvements
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,6 +9,7 @@ setup( packages=[ 'two_factor', ], + package_data={'two_factor': ['templates/two_factor/*.html',],}, url='http://github.com/Bouke/django-two-factor-auth', description='Complete Two-Factor Authentication for Django', license='MIT',
Include templates in package_data, so they will be installed
py
diff --git a/digitalocean/Image.py b/digitalocean/Image.py index <HASH>..<HASH> 100644 --- a/digitalocean/Image.py +++ b/digitalocean/Image.py @@ -64,4 +64,4 @@ class Image(BaseAPI): return "%s %s %s" % (self.id, self.name, self.distribution) def __repr__(self): - return "%s %s %s" % (self.id, self.distribution, self.name) + return "< %s %s %s >" % (self.id, self.distribution, self.name)
Update Image.py Make objects more distinguishable
py
diff --git a/pyardrone/__init__.py b/pyardrone/__init__.py index <HASH>..<HASH> 100644 --- a/pyardrone/__init__.py +++ b/pyardrone/__init__.py @@ -76,7 +76,7 @@ class ARDrone: @property def state(self): ''' - The latest state from *NavData*. + The latest state from :py:class:`~pyardrone.navdata.NavData`. >>> drone.state.fly_mask True # drone is flying
doc: make state doc visiable
py
diff --git a/sirbot/plugins/scheduler.py b/sirbot/plugins/scheduler.py index <HASH>..<HASH> 100644 --- a/sirbot/plugins/scheduler.py +++ b/sirbot/plugins/scheduler.py @@ -1,4 +1,5 @@ import logging +import inspect from sirbot.core import hookimpl, Plugin from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -52,13 +53,15 @@ class SchedulerFacade: self.scheduler = scheduler self._facades = facades - def add_job(self, func, trigger, args=None, *job_args, **job_kwargs): - + def add_job(self, id_, func, trigger, args=None, *job_args, **job_kwargs): + logger.debug('Registering job: %s, from %s', + func.__name__, + inspect.getabsfile(func)) if not args: args = list() elif type(args) is tuple: args = list(args) args.insert(0, self._facades.new()) - self.scheduler.add_job(func, trigger=trigger, args=args, - *job_args, **job_kwargs) + return self.scheduler.add_job(func, trigger=trigger, args=args, id=id_, + *job_args, **job_kwargs)
scheduler plugin: id and return job
py
diff --git a/bigfloat_cython/bigfloat/core.py b/bigfloat_cython/bigfloat/core.py index <HASH>..<HASH> 100644 --- a/bigfloat_cython/bigfloat/core.py +++ b/bigfloat_cython/bigfloat/core.py @@ -474,7 +474,7 @@ class BigFloat(mpfr.Mpfr_t): sign = '-' if self._sign() else '' e = self._exponent() - if isinstance(e, str): + if isinstance(e, basestring): return sign + e m = self._significand()
Revert unnecessary change in hex()
py
diff --git a/kubernetes/K8sObject.py b/kubernetes/K8sObject.py index <HASH>..<HASH> 100644 --- a/kubernetes/K8sObject.py +++ b/kubernetes/K8sObject.py @@ -300,6 +300,23 @@ class K8sObject(object): return [] return items + def get_exportable(self): + if self.name is None: + raise SyntaxError('K8sObject: name: [ {0} ] must be set to fetch the object.'.format(self.name)) + + url = '{base}/{name}?export=true'.format(base=self.base_url, name=self.name) + state = self.request(method='GET', url=url) + + if not state.get('success'): + status = state.get('status', '') + reason = state.get('data', dict()).get('message', None) + message = 'K8sObject: GET [ {0}:{1} ] failed: HTTP {2} : {3} '.format( + self.obj_type, self.name, status, reason) + raise NotFoundException(message) + + data = state.get('data') + return data + def create(self): if self.name is None: raise SyntaxError('K8sObject: name: [ {0} ] must be set to CREATE the object.'.format(self.name))
K8sObject: Adding a method to get the model in an "exportable" fashion
py
diff --git a/spython/main/base/command.py b/spython/main/base/command.py index <HASH>..<HASH> 100644 --- a/spython/main/base/command.py +++ b/spython/main/base/command.py @@ -43,7 +43,7 @@ def init_command(self, action, flags=None): if not isinstance(action, list): action = [action] - cmd = ['singularity'] + [action] + cmd = ['singularity'] + action if self.quiet is True: cmd.insert(1, '--quiet')
bug that shouldnt be in second list
py
diff --git a/mutmut/__init__.py b/mutmut/__init__.py index <HASH>..<HASH> 100644 --- a/mutmut/__init__.py +++ b/mutmut/__init__.py @@ -39,12 +39,13 @@ def number_mutation(value, **_): value = value[1:] else: base = 10 - - if '.' in value: - assert base == 10 - parsed = float(value) - else: + + try: parsed = int(value, base=base) + except ValueError: + # Since it wasn't an int, it must be a float + base = 10 + parsed = float(value) result = repr(parsed + 1) if not result.endswith(suffix):
Make number_mutation more robust to floats
py
diff --git a/test/functional/cluster/test_cluster.py b/test/functional/cluster/test_cluster.py index <HASH>..<HASH> 100644 --- a/test/functional/cluster/test_cluster.py +++ b/test/functional/cluster/test_cluster.py @@ -26,7 +26,7 @@ SYNCONLYPART = 'test' skip_cluster_tests = True -if getattr(symbols, 'run_cluster_tests') and symbols.run_cluster_tests is True: +if hasattr(symbols, 'run_cluster_tests') and symbols.run_cluster_tests is True: skip_cluster_tests = False
Fixing up opt-in for clustering tests.
py
diff --git a/rdomanager_oscplugin/plugin.py b/rdomanager_oscplugin/plugin.py index <HASH>..<HASH> 100644 --- a/rdomanager_oscplugin/plugin.py +++ b/rdomanager_oscplugin/plugin.py @@ -28,6 +28,9 @@ DEFAULT_RDOMANAGER_OSCPLUGIN_API_VERSION = '1' # Required by the OSC plugin interface API_NAME = 'rdomanager_oscplugin' API_VERSION_OPTION = 'os_rdomanager_oscplugin_api_version' +API_VERSIONS = { + '1': 'rdomanager_oscplugin.plugin' +} def make_client(instance):
Conform to new API_VERSIONS requirement from OSC OSC now requires plugins to include an API_VERSIONS dict. Change-Id: I<I>ed5c<I>bc<I>d<I>bd0f2ef0c<I>e1b7
py
diff --git a/segments/exit_code.py b/segments/exit_code.py index <HASH>..<HASH> 100644 --- a/segments/exit_code.py +++ b/segments/exit_code.py @@ -1,8 +1,8 @@ -def add_status_indicator_segment(): +def add_exit_code_segment(): if powerline.args.prev_error == 0: return fg = Color.CMD_FAILED_FG bg = Color.CMD_FAILED_BG powerline.append(str(powerline.args.prev_error), fg, bg) -add_status_indicator_segment() +add_exit_code_segment()
rename function in exit_code segment to match filename
py
diff --git a/bids/analysis/base.py b/bids/analysis/base.py index <HASH>..<HASH> 100644 --- a/bids/analysis/base.py +++ b/bids/analysis/base.py @@ -8,14 +8,18 @@ DesignMatrix = namedtuple('DesignMatrix', ('entities', 'groupby', 'data')) class Analysis(object): def __init__(self, layouts, model, manager=None, **selectors): + if isinstance(model, str): + model = json.load(open(model)) + self.model = model + + if 'input' in model: + selectors.update(model['input']) if manager is None: manager = BIDSVariableManager(layouts, **selectors) + self.manager = manager - if isinstance(model, str): - model = json.load(open(model)) - self.model = model self._load_blocks(model['blocks']) self.layout = manager.layout # for convenience
ENH: Use model input as a default selector
py
diff --git a/superset/utils/core.py b/superset/utils/core.py index <HASH>..<HASH> 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -26,6 +26,7 @@ import re import signal import smtplib import tempfile +import threading import traceback import uuid import zlib @@ -631,8 +632,9 @@ class timeout: # pylint: disable=invalid-name def __enter__(self) -> None: try: - signal.signal(signal.SIGALRM, self.handle_timeout) - signal.alarm(self.seconds) + if threading.current_thread() == threading.main_thread(): + signal.signal(signal.SIGALRM, self.handle_timeout) + signal.alarm(self.seconds) except ValueError as ex: logger.warning("timeout can't be used in the current context") logger.exception(ex)
fix: only call signal if executing on the main thread (#<I>)
py
diff --git a/packages/vaex-core/vaex/events.py b/packages/vaex-core/vaex/events.py index <HASH>..<HASH> 100644 --- a/packages/vaex-core/vaex/events.py +++ b/packages/vaex-core/vaex/events.py @@ -32,11 +32,11 @@ class Signal(object): final_kwargs.update(extra_kwargs) final_kwargs.update(kwargs) try: - logger.debug("(%s) calling %r with arguments %r and kwargs %r" % (self.name, callback, final_args, final_kwargs)) + logger.debug("(%s) calling %r with arguments %r and kwargs %r", (self.name, callback, final_args, final_kwargs)) value = callback(*final_args, **final_kwargs) results.append(value) except Exception: - logger.error("error in handling callback %r with arguments %r and kwargs %r" % (callback, final_args, final_kwargs)) + logger.error("error in handling callback %r with arguments %r and kwargs %r", (callback, final_args, final_kwargs)) raise # tb = traceback.format_exc() # raise Exception("error while calling callback: %r with arguments %r and kwargs %r" % (callback, final_args, final_kwargs), tb)
do not format arg always, just pass them to logger
py
diff --git a/tests/test_find_enrichment_run.py b/tests/test_find_enrichment_run.py index <HASH>..<HASH> 100755 --- a/tests/test_find_enrichment_run.py +++ b/tests/test_find_enrichment_run.py @@ -9,7 +9,9 @@ from __future__ import print_function __copyright__ = "Copyright (C) 2010-2018, DV Klopfenstein, H Tang. All rights reserved." import os +import sys import collections as cx +from goatools.base import download_go_basic_obo from goatools.cli.find_enrichment import rd_files from goatools.cli.find_enrichment import get_objgoea @@ -26,9 +28,11 @@ def test_find_enrichment(): filenames = ['data/study', 'data/population', 'data/association'] methods = ['bonferroni', 'sidak', 'holm', 'fdr_bh'] alpha = 0.05 + fin_obo = os.path.join(REPO, 'go-basic.obo') + download_go_basic_obo(fin_obo, prt=sys.stdout, loading_bar=None) args = ntobj( filenames=[os.path.join(REPO, f) for f in filenames], - obo='go-basic.obo', + obo=fin_obo, pval=0.05, alpha=alpha, pvalcalc='fisher',
Download go-basic.obo file if it does not exist.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,12 +12,12 @@ from os import path here = path.abspath(path.dirname(__file__)) setup( - name='btb', + name='baytune', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html - version='0.1.0', + version='0.1.1', description='A framework for Bayesian hyperparameter selection and tuning',
Prepare baytune-<I> release
py
diff --git a/http_check/datadog_checks/http_check/http_check.py b/http_check/datadog_checks/http_check/http_check.py index <HASH>..<HASH> 100644 --- a/http_check/datadog_checks/http_check/http_check.py +++ b/http_check/datadog_checks/http_check/http_check.py @@ -10,6 +10,7 @@ import time from datetime import datetime import requests +from requests import Response from six import PY2, string_types from six.moves.urllib.parse import urlparse @@ -104,7 +105,7 @@ class HTTPCheck(AgentCheck): tags_list.append("instance:{}".format(instance_name)) service_checks = [] service_checks_tags = self._get_service_checks_tags(instance) - r = None + r = None # type: Response try: parsed_uri = urlparse(addr) self.log.debug("Connecting to %s", addr)
Add type to response (#<I>)
py
diff --git a/astrodbkit/astrodb.py b/astrodbkit/astrodb.py index <HASH>..<HASH> 100755 --- a/astrodbkit/astrodb.py +++ b/astrodbkit/astrodb.py @@ -977,7 +977,10 @@ class Database: The table name """ - pprint(self.query("PRAGMA table_info({})".format(table), fmt='table')) + try: + pprint(self.query("PRAGMA table_info({})".format(table), fmt='table')) + except ValueError: + print('Table {} not found'.format(table)) def search(self, criterion, table, columns='', fetch=False): """
catch error when checking schema of a table that doesn't exist
py
diff --git a/geth/chain.py b/geth/chain.py index <HASH>..<HASH> 100644 --- a/geth/chain.py +++ b/geth/chain.py @@ -97,6 +97,15 @@ def write_genesis_file(genesis_file_path, if config is None: config = { 'homesteadBlock': 0, + 'eip150Block': 0, + 'eip155Block': 0, + 'eip158Block': 0, + 'byzantiumBlock': 0, + 'constantinopleBlock': 0, + 'petersburgBlock': 0, + 'istanbulBlock': 0, + 'berlinBlock': 0, + 'londonBlock': 0, 'daoForkBlock': 0, 'daoForSupport': True, }
feat: set upgrade block numbers in default config (#<I>) * feat: auto set eip blocks in default config * fix: add compiler blocks too * feat: inc berlin and london blocks
py
diff --git a/urlscan/urlchoose.py b/urlscan/urlchoose.py index <HASH>..<HASH> 100644 --- a/urlscan/urlchoose.py +++ b/urlscan/urlchoose.py @@ -649,18 +649,6 @@ class URLChooser: another function with the URL. """ - # Try-except block to work around webbrowser module bug - # https://bugs.python.org/issue31014 - try: - browser = os.environ['BROWSER'] - except KeyError: - pass - else: - del os.environ['BROWSER'] - webbrowser.register(browser, None, webbrowser.GenericBrowser(browser)) - try_idx = webbrowser._tryorder.index(browser) - webbrowser._tryorder.insert(0, webbrowser._tryorder.pop(try_idx)) - def browse(*args): # double ()() to ensure self.search evaluated at runtime, not when # browse() is _created_. [0] is self.search, [1] is self.enter
Remove workaround for fixed python webbrowser bug (#<I>)
py
diff --git a/py/selenium/webdriver/common/utils.py b/py/selenium/webdriver/common/utils.py index <HASH>..<HASH> 100644 --- a/py/selenium/webdriver/common/utils.py +++ b/py/selenium/webdriver/common/utils.py @@ -40,7 +40,7 @@ def is_connectable(port): try: socket_ = socket.socket(socket.AF_INET, socket.SOCK_STREAM) socket_.settimeout(1) - socket_.connect(("localhost", port)) + socket_.connect(("127.0.0.1", port)) socket_.close() return True except socket.error: @@ -60,7 +60,7 @@ def is_url_connectable(port): import urllib2 as url_request try: - res = url_request.urlopen("http://localhost:%s/status" % port) + res = url_request.urlopen("http://127.0.0.1:%s/status" % port) if res.getcode() == 200: return True else:
Use <I> as localhost name resolving might fail on some systems
py
diff --git a/tests/integration/shell/syndic.py b/tests/integration/shell/syndic.py index <HASH>..<HASH> 100644 --- a/tests/integration/shell/syndic.py +++ b/tests/integration/shell/syndic.py @@ -51,7 +51,7 @@ class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): config['root_dir'] = config_dir config['log_file'] = 'file:///dev/log/LOG_LOCAL3' config['ret_port'] = int(config['ret_port']) + 10 - config['publish_port'] = nt(config['publish_port']) + 10 + config['publish_port'] = int(config['publish_port']) + 10 open(os.path.join(config_dir, config_file_name), 'w').write( yaml.dump(config, default_flow_style=False)
Fix typo from 6bc5ea<I>.
py
diff --git a/lib/websession_templates.py b/lib/websession_templates.py index <HASH>..<HASH> 100644 --- a/lib/websession_templates.py +++ b/lib/websession_templates.py @@ -1231,7 +1231,7 @@ class Template: out = '''<div class="hassubmenu%(on)s"> <a hreflang="en" class="header%(selected)s" href="%(CFG_SITE_SECURE_URL)s/youraccount/display?ln=%(ln)s">%(personalize)s</a> - <ul class="subsubmenu" style="width: 13em;">''' % { + <ul class="subsubmenu">''' % { 'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL, 'ln' : ln, 'personalize': _("Personalize"), @@ -1340,7 +1340,7 @@ class Template: if activities: out += '''<div class="hassubmenu%(on)s"> <a hreflang="en" class="header%(selected)s" href="%(CFG_SITE_SECURE_URL)s/youraccount/youradminactivities?ln=%(ln)s">%(admin)s</a> - <ul class="subsubmenu" style="width: 19em;">''' % { + <ul class="subsubmenu">''' % { 'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL, 'ln' : ln, 'admin': _("Administration"),
WebStyle: fluid width of the menu * Deletes the fixed width of the menus of the websession_templates.py and bibcirculation_templates.py files. (closes #<I>) * Adds new file invenio-ie7.css with some specific styles for IE7. * Adds aconditional comment in the webstyle_templates.py file, to force IE7 to load the specific styles.
py
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -51,7 +51,7 @@ copyright = u'2010-2013, Coda Hale, Yammer Inc., 2014-2015 Dropwizard Team' # The short X.Y version. version = '0.8' # The full version, including alpha/beta/rc tags. -release = '0.8.1' +release = '0.9.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages.
Update docs version to <I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -35,6 +35,7 @@ install_requires = [ 'six', 'iso8601', ] tests_require = [ + 'pycodestyle < 2.4.0', 'pytest >= 3.2.3, < 4.0.0', 'pytest-flake8 >= 0.9.1, < 1.0.0', 'flake8-import-order >= 0.12, < 1.0',
Fix pycodestyle dependency problem
py
diff --git a/tests/util.py b/tests/util.py index <HASH>..<HASH> 100644 --- a/tests/util.py +++ b/tests/util.py @@ -45,8 +45,7 @@ def get_ssl_config(enable_ssl=False, protocol=PROTOCOL.TLS, check_hostname=None, ciphers=None, - attempt_limit=1, - timeout=1): + attempt_limit=1): config = ClientConfig() config.network_config.ssl_config.enabled = enable_ssl @@ -59,7 +58,6 @@ def get_ssl_config(enable_ssl=False, config.network_config.ssl_config.ciphers = ciphers config.network_config.connection_attempt_limit = attempt_limit - config.network_config.connection_timeout = timeout return config
increase connection timeout for SSL tests from 1 seconds to 5 seconds (#<I>) Sometimes SSL authentication process may take more than 1 seconds which causes some SSL tests to fail due to timeout error. Increased the timeout to 5 seconds to solve this issue.
py
diff --git a/sos/plugins/firewalld.py b/sos/plugins/firewalld.py index <HASH>..<HASH> 100644 --- a/sos/plugins/firewalld.py +++ b/sos/plugins/firewalld.py @@ -28,7 +28,7 @@ class FirewallD(Plugin, RedHatPlugin): def setup(self): self.add_copy_spec([ - "/etc/firewalld/firewalld.conf", + "/etc/firewalld/*.conf", "/etc/firewalld/icmptypes/*.xml", "/etc/firewalld/services/*.xml", "/etc/firewalld/zones/*.xml",
[firewalld] Collect all configs under /etc/firewalld Collect all /etc/firewalld/*.conf files, not only firewalld.conf Resolves: #<I>
py
diff --git a/pysat/_files.py b/pysat/_files.py index <HASH>..<HASH> 100644 --- a/pysat/_files.py +++ b/pysat/_files.py @@ -238,15 +238,14 @@ class Files(object): """ inst_repr = pysat.Instrument(**self.inst_info).__repr__() - out_str = "".join(["Files(", inst_repr, ", directory_format=", + out_str = "".join(["pysat.Files(", inst_repr, ", directory_format=", "'{:}'".format(self.directory_format), ", update_files=", "{:}, file_format=".format(self.update_files), "{:}, ".format(self.file_format.__repr__()), "write_to_disk={:}, ".format(self.write_to_disk), "ignore_empty_files=", - "{:})".format(self.ignore_empty_files), - " -> {:d} Local files".format(len(self.files))]) + "{:})".format(self.ignore_empty_files)]) return out_str
STY: updated the file repr Updated the file repr style to work with eval.
py
diff --git a/angr/exploration_techniques/explorer.py b/angr/exploration_techniques/explorer.py index <HASH>..<HASH> 100644 --- a/angr/exploration_techniques/explorer.py +++ b/angr/exploration_techniques/explorer.py @@ -58,7 +58,7 @@ class Explorer(ExplorationTechnique): l.error("Usage of the CFG has been disabled for this explorer.") self.cfg = None return - find = find.addrs + find = self.find.addrs for a in avoid: if cfg.get_any_node(a) is None:
fixup! explorer: use extra_stop_points to work with unicorn
py
diff --git a/pybotvac/robot.py b/pybotvac/robot.py index <HASH>..<HASH> 100644 --- a/pybotvac/robot.py +++ b/pybotvac/robot.py @@ -74,7 +74,7 @@ class Robot: json = {'reqId': "1", 'cmd': "startCleaning", 'params': { - 'category': 2, + 'category': category, 'mode': mode, 'modifier': 1} } @@ -82,7 +82,7 @@ class Robot: json = {'reqId': "1", 'cmd': "startCleaning", 'params': { - 'category': 2, + 'category': category, 'mode': mode, 'modifier': 1, "navigationMode": navigation_mode} @@ -91,14 +91,14 @@ class Robot: json = {'reqId': "1", 'cmd': "startCleaning", 'params': { - 'category': 2, + 'category': category, "navigationMode": navigation_mode} } else: # self.service_version == 'basic-2' json = {'reqId': "1", 'cmd': "startCleaning", 'params': { - 'category': 2, + 'category': category, 'mode': mode, 'modifier': 1, "navigationMode": navigation_mode}
Use the category provided for start_cleaning
py
diff --git a/oecpy/database.py b/oecpy/database.py index <HASH>..<HASH> 100644 --- a/oecpy/database.py +++ b/oecpy/database.py @@ -60,7 +60,7 @@ class OECDatabase(object): for planet in self.planets: try: - if planet.isTransiting(): + if planet.isTransiting: transitingPlanets.append(planet) except KeyError: # No 'discoverymethod' tag - this also filters Solar System planets pass
changed database transiting planet to use is transiting variable
py
diff --git a/tests/test_ramon.py b/tests/test_ramon.py index <HASH>..<HASH> 100644 --- a/tests/test_ramon.py +++ b/tests/test_ramon.py @@ -15,10 +15,10 @@ class TestRAMON(unittest.TestCase): if os.path.exists("1.hdf5"): os.remove("1.hdf5") - def test_create_ramon_file(self): - r = ramon.RAMONSegment(id=self.ramon_id) - r.cutout = numpy.zeros((3, 3, 3)) - self.h = ramon.ramon_to_hdf5(r) + # def test_create_ramon_file(self): + # r = ramon.RAMONSegment(id=self.ramon_id) + # r.cutout = numpy.zeros((3, 3, 3)) + # self.h = ramon.ramon_to_hdf5(r) # self.assertEqual(type(self.h), h5py.File) # Need to write to disk before this'll work
kill more tests to gruntle travis
py
diff --git a/discord/ui/view.py b/discord/ui/view.py index <HASH>..<HASH> 100644 --- a/discord/ui/view.py +++ b/discord/ui/view.py @@ -335,6 +335,10 @@ class View: if self._cancel_callback: self._cancel_callback(self) + def is_finished(self) -> bool: + """:class:`bool`: Whether the view has finished interacting.""" + return self._stopped.done() + async def wait(self) -> bool: """Waits until the view has finished interacting.
Add View.is_finished() to query listening state
py
diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index <HASH>..<HASH> 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -387,9 +387,11 @@ def bootstrap(vm_, opts): deploy_kwargs['username'] = salt.config.get_cloud_config_value( 'win_username', vm_, opts, default='Administrator' ) - deploy_kwargs['password'] = salt.config.get_cloud_config_value( + win_pass = salt.config.get_cloud_config_value( 'win_password', vm_, opts, default='' ) + if win_pass: + deploy_kwargs['password'] = win_pass # Store what was used to the deploy the VM event_kwargs = copy.deepcopy(deploy_kwargs)
Only assign win_password if it actually is set
py
diff --git a/examples/help_grep.py b/examples/help_grep.py index <HASH>..<HASH> 100644 --- a/examples/help_grep.py +++ b/examples/help_grep.py @@ -12,12 +12,12 @@ # location: init/Kconfig:39 # # config SYSVIPC -# bool +# bool # prompt "System V IPC" # help -# ... +# ... # exchange information. It is generally considered to be a good thing, -# ... +# ... # # location: init/Kconfig:233 # @@ -25,8 +25,8 @@ # bool # prompt "BSD Process Accounting" if MULTIUSER # help -# ... -# information. This is generally a good idea, so say Y. +# ... +# information. This is generally a good idea, so say Y. # # location: init/Kconfig:403 #
Fix comment formatting in help_grep.py Consistently indent with tabs, like in the actual output.
py
diff --git a/tests/test_context.py b/tests/test_context.py index <HASH>..<HASH> 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -107,6 +107,14 @@ class ContextTests(TestCase): self.assertIsInstance(ctx.TRIANGLE_STRIP_ADJACENCY, int) self.assertIsInstance(ctx.PATCHES, int) + # Texture filters + self.assertIsInstance(ctx.LINEAR, int) + self.assertIsInstance(ctx.NEAREST, int) + self.assertIsInstance(ctx.NEAREST_MIPMAP_NEAREST, int) + self.assertIsInstance(ctx.LINEAR_MIPMAP_LINEAR, int) + self.assertIsInstance(ctx.LINEAR_MIPMAP_NEAREST, int) + self.assertIsInstance(ctx.NEAREST_MIPMAP_LINEAR, int) + # Blend functions self.assertIsInstance(ctx.ZERO, int) self.assertIsInstance(ctx.ONE, int)
Test the presence of texture filters in context
py
diff --git a/ai/models.py b/ai/models.py index <HASH>..<HASH> 100644 --- a/ai/models.py +++ b/ai/models.py @@ -61,3 +61,6 @@ class SearchNode(object): node = node.parent return list(reversed(path)) + + def __hash__(self): + return hash(self.state)
The hash of a node is the hash of it's state
py
diff --git a/spotify/http.py b/spotify/http.py index <HASH>..<HASH> 100644 --- a/spotify/http.py +++ b/spotify/http.py @@ -1590,9 +1590,10 @@ class HTTPClient: Provide this parameter if you want to apply Track Relinking. """ route = self.route("GET", "/tracks/{id}", id=track_id) + payload: Dict[str, Any] = {} if market is not None: - payload: Dict[str, Any] = {"market": market} + payload["market"] = market return self.request(route, params=payload)
bugfix potentially undefined payload Closes #<I>
py
diff --git a/docs/extensions/attributetable.py b/docs/extensions/attributetable.py index <HASH>..<HASH> 100644 --- a/docs/extensions/attributetable.py +++ b/docs/extensions/attributetable.py @@ -147,6 +147,7 @@ def get_class_results(lookup, modulename, name, fullname): groups = OrderedDict([ ('Attributes', []), ('Coroutines', []), + ('Classmethods', []), ('Methods', []), ('Decorators', []), ]) @@ -166,6 +167,8 @@ def get_class_results(lookup, modulename, name, fullname): doc = value.__doc__ or '' if inspect.iscoroutinefunction(value) or doc.startswith('|coro|'): key = 'Coroutines' + elif isinstance(value, classmethod): + key = 'Classmethods' elif inspect.isfunction(value): if doc.startswith(('A decorator', 'A shortcut decorator')): # finicky but surprisingly consistent
Show classmethods separately in attribute table
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ def readme(file='', split=False): setup( name='pyderman', - version='1.4.0', + version='2.0.0', description='Installs the latest Chrome/Firefox/Opera/PhantomJS/Edge web drivers automatically.', long_description=readme('README.md'), long_description_content_type='text/markdown',
Increment to <I>, due to potentially-breaking Error handling changes.
py
diff --git a/setuptools/command/bdist_wininst.py b/setuptools/command/bdist_wininst.py index <HASH>..<HASH> 100755 --- a/setuptools/command/bdist_wininst.py +++ b/setuptools/command/bdist_wininst.py @@ -37,18 +37,3 @@ class bdist_wininst(_bdist_wininst): self._fix_upload_names() finally: self._is_running = False - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename()
Remove get_installer_filename (copied from some version of Python now required to be supplied).
py
diff --git a/zipline/pipeline/factors/factor.py b/zipline/pipeline/factors/factor.py index <HASH>..<HASH> 100644 --- a/zipline/pipeline/factors/factor.py +++ b/zipline/pipeline/factors/factor.py @@ -290,7 +290,9 @@ def function_application(func): if func not in NUMEXPR_MATH_FUNCS: raise ValueError("Unsupported mathematical function '%s'" % func) - @with_doc(func) + docstring = "A Factor that computes {}(x) on every input.".format(func) + + @with_doc(docstring) @with_name(func) def mathfunc(self): if isinstance(self, NumericalExpression):
DOC: Slightly better function_application docstring.
py
diff --git a/matrix_client/client.py b/matrix_client/client.py index <HASH>..<HASH> 100644 --- a/matrix_client/client.py +++ b/matrix_client/client.py @@ -346,6 +346,8 @@ class MatrixClient(object): for room_id, left_room in response['rooms']['leave'].items(): for listener in self.left_listeners: listener(room_id, left_room) + if room_id in self.rooms: + del self.rooms[room_id] for room_id, sync_room in response['rooms']['join'].items(): if room_id not in self.rooms:
matrix_client/client.py: Remove room from rooms dict on leave event When receiving a leave event, we should remove the room from our rooms dictionary since it is no longer meaningful for us. This fix also makes join and leave events more symmetric.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,13 @@ +''' +Package description and configuration. +To release to PyPI test: + - python setup.py register -r pypitest + - python setup.py sdist upload -r pypitest +To release to PyPI: + - python setup.py register -r pypi + - python setup.py sdist upload -r pypi +''' + try: from setuptools import setup except ImportError:
Added comments to setup.py to help for future release.
py
diff --git a/api/opentrons/instruments/pipette.py b/api/opentrons/instruments/pipette.py index <HASH>..<HASH> 100755 --- a/api/opentrons/instruments/pipette.py +++ b/api/opentrons/instruments/pipette.py @@ -17,6 +17,9 @@ PLUNGER_POSITIONS = { 'drop_tip': -7 } +DEFAULT_ASPIRATE_SPEED = 20 +DEFAULT_DISPENSE_SPEED = 40 + class PipetteTip: def __init__(self, length): @@ -100,8 +103,8 @@ class Pipette: ul_per_mm=18.51, trash_container=None, tip_racks=[], - aspirate_speed=20, - dispense_speed=40): + aspirate_speed=DEFAULT_ASPIRATE_SPEED, + dispense_speed=DEFAULT_DISPENSE_SPEED): self.robot = robot self.mount = mount
moves plunger default speeds to top of file
py
diff --git a/redis/client.py b/redis/client.py index <HASH>..<HASH> 100644 --- a/redis/client.py +++ b/redis/client.py @@ -1155,6 +1155,10 @@ class StrictRedis(object): return self.execute_command('ZCARD', name) def zcount(self, name, min, max): + """ + Returns the number of elements in the sorted set at key ``name`` with + a score between ``min`` and ``max``. + """ return self.execute_command('ZCOUNT', name, min, max) def zincrby(self, name, value, amount=1):
added docstring for zcount
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( license="MIT", url="https://github.com/Kane610/axis", download_url="https://github.com/Kane610/axis/archive/v35.tar.gz", - install_requires=["attrs", "requests", "xmltodict"], + install_requires=["attrs", "packaging", "requests", "xmltodict"], keywords=["axis", "vapix", "onvif", "event stream", "homeassistant"], classifiers=["Natural Language :: English", "Programming Language :: Python :: 3"], )
Add packaging as dependency (#<I>)
py
diff --git a/synapse/cores/ram.py b/synapse/cores/ram.py index <HASH>..<HASH> 100644 --- a/synapse/cores/ram.py +++ b/synapse/cores/ram.py @@ -105,3 +105,29 @@ class Cortex(common.Cortex): rows = [ row for row in rows if row[3] < maxtime ] return len(rows) + +ramcores = {} + +def initRamCortex(link): + ''' + Initialize a RAM based Cortex from a link tufo. + + NOTE: the "path" element of the link tufo is used to + potentially return an existing cortex instance. + + ''' + path = link[1].get('path').strip('/') + if not path: + return Cortex(link) + + core = ramcores.get(path) + if core == None: + core = Cortex(link) + + ramcores[path] = core + def onfini(): + ramcores.pop(path,None) + + core.onfini(onfini) + + return core
added factory function so ram cortexes can overlap
py
diff --git a/tests/functional_tests/test_providers.py b/tests/functional_tests/test_providers.py index <HASH>..<HASH> 100644 --- a/tests/functional_tests/test_providers.py +++ b/tests/functional_tests/test_providers.py @@ -217,10 +217,9 @@ def login(request, browser, app, attempt=1): log(3, provider_name, 'Hitting ENTER after login input') login_element.send_keys(Keys.ENTER) - hi_xpath, hi_sleep = provider\ - .get('human_interaction_before_password') - if hi_xpath: - human_interaction_needed(hi_xpath, hi_sleep) + hi = provider.get('human_interaction_before_password') + if hi: + human_interaction_needed(*hi) log(2, provider_name, 'Finding password input {0}'.format(password_xpath))
Fixed a bug when human interaction setting could not be unpacked.
py
diff --git a/webpush/utils.py b/webpush/utils.py index <HASH>..<HASH> 100644 --- a/webpush/utils.py +++ b/webpush/utils.py @@ -9,11 +9,7 @@ from pywebpush import WebPusher def send_notification_to_user(user, payload, ttl=0): # Get all the push_info of the user push_infos = user.webpush_info.select_related("subscription") - - numbers = range(push_infos.count()) - - for i in numbers: - push_info = push_infos[i] + for push_info in push_infos: _send_notification(push_info, payload, ttl) @@ -21,12 +17,7 @@ def send_notification_to_group(group_name, payload, ttl=0): # Get all the subscription related to the group push_infos = Group.objects.get(name=group_name).webpush_info.select_related("subscription") - # As there can be many subscription, iterating the large number will make it slow - # so count the number and cut according to it - numbers = range(push_infos.count()) - - for i in numbers: - push_info = push_infos[i] + for push_info in push_infos: _send_notification(push_info, payload, ttl)
Cleaned up for loops. (#2)
py
diff --git a/pifpaf/drivers/gnocchi.py b/pifpaf/drivers/gnocchi.py index <HASH>..<HASH> 100644 --- a/pifpaf/drivers/gnocchi.py +++ b/pifpaf/drivers/gnocchi.py @@ -73,7 +73,7 @@ url = %s""" % (self.tempdir, pg.url)) self._exec(gnocchi_upgrade) c, _ = self._exec(["gnocchi-metricd", "--config-file=%s" % conffile], - wait_for_line="Metricd reporting") + wait_for_line="metrics wait to be processed") self.addCleanup(self._kill, c.pid) c, _ = self._exec(
gnocchi: Catch a working line for metricd
py
diff --git a/tests/integration/states/npm.py b/tests/integration/states/npm.py index <HASH>..<HASH> 100644 --- a/tests/integration/states/npm.py +++ b/tests/integration/states/npm.py @@ -54,7 +54,8 @@ class NpmStateTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn): ret = self.run_state('npm.installed', name=None, pkgs=['pm2', 'grunt']) self.assertSaltTrueReturn(ret) - @skipIf(LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION), 'Skip with npm >= 5.0.0 until #41770 is fixed') + @skipIf(salt.utils.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION), + 'Skip with npm >= 5.0.0 until #41770 is fixed') @destructiveTest def test_npm_cache_clean(self): '''
make sure cmd is not run when npm isn't installed apparently the skipIf on the functions still get run, even if the function is going to be skipped based on a skipIf on the class.
py
diff --git a/PyFunceble/helpers/hash.py b/PyFunceble/helpers/hash.py index <HASH>..<HASH> 100644 --- a/PyFunceble/helpers/hash.py +++ b/PyFunceble/helpers/hash.py @@ -61,6 +61,8 @@ License: from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes +from .file import File + class Hash: """ @@ -78,7 +80,7 @@ class Hash: if not hasattr(hashes, self.algo): raise ValueError(f"Unknown <algo> ({self.algo})") - def file(self, file_path): + def file(self, file_path, encoding="utf-8"): """ Open the given file, and it's content. @@ -90,13 +92,13 @@ class Hash: digest = hashes.Hash(getattr(hashes, self.algo)(), backend=default_backend()) - try: - with open(file_path, "rb") as file_stream: - digest.update(file_stream.read()) + content = File(file_path).read(encoding=encoding) + if content: + digest.update(content.encode(encoding)) return digest.finalize().hex() - except FileNotFoundError: - return None + + return None def data(self, data): """
Fix issue with the tests under Windows.
py
diff --git a/src/marshmallow/schema.py b/src/marshmallow/schema.py index <HASH>..<HASH> 100644 --- a/src/marshmallow/schema.py +++ b/src/marshmallow/schema.py @@ -133,7 +133,7 @@ class SchemaMeta(type): inherited_fields: typing.List, dict_cls: type, ): - """Returns a dictionary of field_name => `Field` pairs declard on the class. + """Returns a dictionary of field_name => `Field` pairs declared on the class. This is exposed mainly so that plugins can add additional fields, e.g. fields computed from class Meta options.
Fix simple typo: declard -> declared Closes #<I>
py
diff --git a/pyIOSXR/iosxr.py b/pyIOSXR/iosxr.py index <HASH>..<HASH> 100644 --- a/pyIOSXR/iosxr.py +++ b/pyIOSXR/iosxr.py @@ -39,7 +39,9 @@ def __execute_rpc__(device, rpc_command, timeout): root = ET.fromstring(response) childs = [x.tag for x in list(root)] - if int(root.find('ResultSummary').get('ErrorCount')) > 0: + result_summary = root.find('ResultSummary') + + if result_summary is not None and int(result_summary.get('ErrorCount', 0)) > 0: if 'CLI' in childs: error_msg = root.find('CLI').get('ErrorMsg') or ''
Fixed inexistent ResultSummary node issue on IOS-XR <I>
py
diff --git a/salt/thorium/reg.py b/salt/thorium/reg.py index <HASH>..<HASH> 100644 --- a/salt/thorium/reg.py +++ b/salt/thorium/reg.py @@ -35,9 +35,17 @@ def set_(name, add, match): return ret -def list_(name, add, match): +def list_(name, add, match, stamp=False): ''' - Add to the named list the specified values + Add the specified values to the named list + + If ``stamp`` is True, then the timestamp from the event will also be added + + foo: + reg.list: + - add: bar + - match: my/custom/event + - stamp: True ''' ret = {'name': name, 'changes': {}, @@ -54,6 +62,8 @@ def list_(name, add, match): for key in add: if key in event['data']['data']: item[key] = event['data']['data'][key] + if stamp is True: + item['time'] = event['data']['_stamp'] __reg__[name]['val'].append(item) return ret
Add timestamping to reg.list
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ setup( 'flask-components >= 0.1', 'sqlalchemy >= 0.8', 'sqlalchemy-utils >= 0.16', + 'alembic >= 0.6, < 0.7', 'pytest >= 2.4', 'pytest-pep8 >= 1.0', 'pytest-cov >= 1.6',
Add alembic as a requirement.
py
diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index <HASH>..<HASH> 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -37,7 +37,6 @@ logger = logging.getLogger('BufrProductClasses') sub_sat_dict = {"E0000": 0.0, "E0415": 41.5, "E0095": 9.5} -seg_area_dict = {"E0000": 'seviri_0deg', "E0415": 'seviri_iodc', "E0095": 'seviri_rss'} seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3}
Removed unused dict seg_area_dict
py
diff --git a/apispec/core.py b/apispec/core.py index <HASH>..<HASH> 100644 --- a/apispec/core.py +++ b/apispec/core.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """Core apispec classes and functions.""" import re +from collections import OrderedDict from apispec.compat import iterkeys from .exceptions import APISpecError, PluginError @@ -102,7 +103,7 @@ class APISpec(object): self._definitions = {} self._parameters = {} self._tags = [] - self._paths = {} + self._paths = OrderedDict() # Plugin and helpers self.plugins = {} self._definition_helpers = []
Maintain order in which paths are added to spec
py
diff --git a/xblock/runtime.py b/xblock/runtime.py index <HASH>..<HASH> 100644 --- a/xblock/runtime.py +++ b/xblock/runtime.py @@ -921,14 +921,6 @@ class Runtime(object): Arguments: block (:class:`.XBlock`): The block to render retrieve asides for. """ - # TODO: This function will need to be extended if we want to allow: - # a) XBlockAsides to statically indicated which types of blocks they can comment on - # b) XBlockRuntimes to limit the selection of asides to a subset of the installed asides - # c) Optimize by only loading asides that actually decorate a particular view - - if self.id_generator is None: - raise Exception("Runtimes must be supplied with an IdGenerator to load XBlockAsides.") - return [ self.get_aside_of_type(block, aside_type) for aside_type, __ @@ -947,6 +939,10 @@ class Runtime(object): # a) XBlockAsides to statically indicated which types of blocks they can comment on # b) XBlockRuntimes to limit the selection of asides to a subset of the installed asides # c) Optimize by only loading asides that actually decorate a particular view + + if self.id_generator is None: + raise Exception("Runtimes must be supplied with an IdGenerator to load XBlockAsides.") + usage_id = block.scope_ids.usage_id aside_cls = self.load_aside_type(aside_type)
Minor cleanup of get_asides
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -50,6 +50,13 @@ LICENSE = 'LGPL' DOWNLOAD_URL = URL SNDFILE_MAJ_VERSION = 1 +CLASSIFIERS = ['Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: GNU Library or Lesser General '\ + 'Public License (LGPL)', 'Topic :: Multimedia :: Sound/Audio', + 'Topic :: Scientific/Engineering'] # The following is more or less random copy/paste from numpy.distutils ... import setuptools @@ -194,12 +201,4 @@ if __name__ == "__main__": include_package_data = True, test_suite="tester", zip_safe=True, - classifiers = - [ 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', - 'Topic :: Multimedia :: Sound/Audio', - 'Topic :: Scientific/Engineering'] - ) + classifiers=CLASSIFIERS)
Minor correction to main setup.py.
py
diff --git a/test/test_motor_collection.py b/test/test_motor_collection.py index <HASH>..<HASH> 100644 --- a/test/test_motor_collection.py +++ b/test/test_motor_collection.py @@ -121,7 +121,7 @@ class MotorCollectionTest(MotorTest): yield cursor.close() - @gen_test + @gen_test(timeout=10) def test_find_one_is_async(self): # Confirm find_one() is async by launching two operations which will # finish out of order.
Longer timeout in find_one_is_async. Query could take longer than expected: SERVER-<I>.
py
diff --git a/src/hamster/today.py b/src/hamster/today.py index <HASH>..<HASH> 100755 --- a/src/hamster/today.py +++ b/src/hamster/today.py @@ -32,6 +32,12 @@ from hamster.configuration import runtime, dialogs, conf, load_ui_file from hamster import widgets from hamster.lib import stuff, trophies +try: + import wnck +except: + logging.warning("Could not import wnck - workspace tracking will be disabled") + wnck = None + class ProjectHamsterStatusIcon(gtk.StatusIcon): def __init__(self, project):
was missing wnck import! found by @Asenar, fixes #<I>
py
diff --git a/bika/lims/upgrade/v01_02_008.py b/bika/lims/upgrade/v01_02_008.py index <HASH>..<HASH> 100644 --- a/bika/lims/upgrade/v01_02_008.py +++ b/bika/lims/upgrade/v01_02_008.py @@ -36,6 +36,7 @@ def upgrade(tool): # -------- ADD YOUR STUFF HERE -------- + setup.runImportStepFromProfile(profile, 'workflow') # Revert upgrade actions performed due to #893 (reverted) revert_client_permissions_for_batches(portal)
Missing "workflow" import step in upgradestep (required by #<I>) (#<I>)
py
diff --git a/pyup/bot.py b/pyup/bot.py index <HASH>..<HASH> 100644 --- a/pyup/bot.py +++ b/pyup/bot.py @@ -84,9 +84,8 @@ class Bot(object): else: pull_request = next((pr for pr in self.pull_requests if pr.title == title), None) - if pull_request: - for update in updates: - update.requirement.pull_request = pull_request + for update in updates: + update.requirement.pull_request = pull_request def commit_and_pull(self, base_branch, new_branch, title, body, updates):
pull requests should exist here no matter what
py
diff --git a/hydpy/core/hydpytools.py b/hydpy/core/hydpytools.py index <HASH>..<HASH> 100644 --- a/hydpy/core/hydpytools.py +++ b/hydpy/core/hydpytools.py @@ -252,9 +252,6 @@ class HydPy(object): funcs.append(node._loaddata_sim) elif node.sequences.obs.use_ext: funcs.append(node._loaddata_obs) - for (name, element) in self.elements: - if element.receivers: - funcs.append(element.model.update_receivers) for (name, node) in self.nodes: if node.routingmode != 'oldsim': funcs.append(node.reset) @@ -264,6 +261,9 @@ class HydPy(object): for (name, element) in self.elements: if element.senders: funcs.append(element.model.update_senders) + for (name, element) in self.elements: + if element.receivers: + funcs.append(element.model.update_receivers) for (name, node) in self.nodes: if node.routingmode != 'oldsim': funcs.append(node._savedata_sim)
Sequence updates from receiver nodes now take place at the end of a simulation step. This offers the advantage that the values stored by the sequences of receiver nodes do not to be saved as (potential) initial conditions.
py
diff --git a/redis/client.py b/redis/client.py index <HASH>..<HASH> 100755 --- a/redis/client.py +++ b/redis/client.py @@ -613,8 +613,10 @@ class Redis(RedisModuleCommands, CoreCommands, object): This abstract class provides a Python interface to all Redis commands and an implementation of the Redis protocol. - Connection and Pipeline derive from this, implementing how - the commands are sent and received to the Redis server + Pipelines derive from this, implementing how + the commands are sent and received to the Redis server. Based on + configuration, an instance will either use a ConnectionPool, or + Connection object to talk to redis. """ RESPONSE_CALLBACKS = { **string_keys_to_dict(
Docstring improvements for Redis class (#<I>)
py
diff --git a/install.py b/install.py index <HASH>..<HASH> 100644 --- a/install.py +++ b/install.py @@ -68,7 +68,7 @@ def copy(src, dest): def main(): exit_code = call(["coverage", "run", "--source", "getgauge", "-m", "unittest", "discover"]) if exit_code != 0: - return + sys.exit(exit_code) if len(sys.argv) == 1: create_zip() elif sys.argv[1] == '--install':
Correcting exit code if tests fail
py
diff --git a/axis/__init__.py b/axis/__init__.py index <HASH>..<HASH> 100644 --- a/axis/__init__.py +++ b/axis/__init__.py @@ -120,8 +120,16 @@ class AxisDevice(object): self._event_topics = '{}|{}'.format(self._event_topics, event_topic) + def minimum_firmware(self, constraint): + """Checks that firmwware isn't older than constraint.""" + from packaging import version + return version.parse(self._version) >= version.parse(constraint) + def initiate_metadatastream(self): """Set up gstreamer pipeline and data callback for metadatastream""" + if not self.minimum_firmware('5.50'): + _LOGGER.info("Too old firmware for metadatastream") + return False try: from .stream import MetaDataStream except ImportError as err: @@ -206,6 +214,7 @@ class AxisDevice(object): elif data['Operation'] == 'Deleted': _LOGGER.debug("Deleted event from stream") + # ToDo: # keep a list of deleted events and a follow up timer of X, # then clean up. This should also take care of rebooting a camera
Added check to ensure minimum firmware version of <I> for metadatastream
py
diff --git a/vint/ast/plugin/scope_plugin/scope_linker.py b/vint/ast/plugin/scope_plugin/scope_linker.py index <HASH>..<HASH> 100644 --- a/vint/ast/plugin/scope_plugin/scope_linker.py +++ b/vint/ast/plugin/scope_plugin/scope_linker.py @@ -406,14 +406,17 @@ class ScopeLinker(object): # We can access "a:firstline" and "a:lastline" if the function is # declared with an attribute "range". See :func-range attr = func_node['attr'] - is_declared_with_range = attr['range'] is not 0 + is_declared_with_range = attr['range'] != 0 if is_declared_with_range: self._scope_tree_builder.handle_new_range_parameters_found() # We can access "l:self" is declared with an attribute "dict" or # the function is a member of a dict. See :help self - is_declared_with_dict = attr['dict'] is not 0 \ - or NodeType(func_name_node['type']) in FunctionNameNodesDeclaringVariableSelf + is_declared_with_dict = ( + attr["dict"] != 0 + or NodeType(func_name_node["type"]) + in FunctionNameNodesDeclaringVariableSelf + ) if is_declared_with_dict: self._scope_tree_builder.handle_new_dict_parameter_found()
Fix SyntaxWarning with py<I> (#<I>) > SyntaxWarning: "is not" with a literal. Did you mean "!="?
py
diff --git a/api/errmsg.py b/api/errmsg.py index <HASH>..<HASH> 100644 --- a/api/errmsg.py +++ b/api/errmsg.py @@ -70,9 +70,9 @@ def warning_empty_loop(lineno): # Emmits an optimization warning -def warning_not_used(lineno, id_): +def warning_not_used(lineno, id_, kind='Variable'): if OPTIONS.optimization.value > 0: - warning(lineno, "Variable '%s' is never used" % id_) + warning(lineno, "%s '%s' is never used" % (kind, id_)) # ----------------------------------------
refact: allow variable kind in warning messages When issuing a warning message of an unused variable, allows specification of "Parameter" or any other ("Variable" by default)
py
diff --git a/ginga/rv/plugins/LineProfile.py b/ginga/rv/plugins/LineProfile.py index <HASH>..<HASH> 100644 --- a/ginga/rv/plugins/LineProfile.py +++ b/ginga/rv/plugins/LineProfile.py @@ -233,11 +233,13 @@ class LineProfile(GingaPlugin.LocalPlugin): def redo(self): # Get image being shown - self.image = self.fitsimage.get_image() - if self.image is None: + image = self.fitsimage.get_image() + if image is None: return - self.build_axes() + if self.image != image: + self.image = image + self.build_axes() self.wd, self.ht = self.image.get_size() @@ -248,7 +250,7 @@ class LineProfile(GingaPlugin.LocalPlugin): mddata = self.image.get_mddata().T naxes = mddata.ndim - if self.selected_axis: + if self.selected_axis is not None: plot_x_axis_data = self.get_axis(self.selected_axis) if plot_x_axis_data is None: # image may lack the required keywords, or some trouble @@ -262,7 +264,8 @@ class LineProfile(GingaPlugin.LocalPlugin): xtitle=self.x_lbl, ytitle=self.y_lbl) else: - self.fv.show_error("Please select an axis") + # TODO: should we show this more prominently? + self.fv.show_status("Please select an axis") def _slice(self, naxes, mk): # Build N-dim slice
Fixes for LineProfile plugin - don't pop up annoying error message if an axis was not selected - don't rebuild axes controls if image has not changed, only slice
py
diff --git a/bulbs/content/tests.py b/bulbs/content/tests.py index <HASH>..<HASH> 100644 --- a/bulbs/content/tests.py +++ b/bulbs/content/tests.py @@ -135,10 +135,10 @@ class PolyContentTestCase(TestCase): es = get_es(urls=settings.ES_URLS) es.delete_index(settings.ES_INDEXES.get('default', 'testing')) - def test_serialize_id(self): - c = Content.objects.all()[0] - c_id = c.from_source(c.extract_document()).id - self.assertNotEqual(c_id, None) + # def test_serialize_id(self): + # c = Content.objects.all()[0] + # c_id = c.from_source(c.extract_document()).id + # self.assertNotEqual(c_id, None) # NOTE: Since extract_document is now only concerned with a one-way # trip to elasticsearch, this should probably be rewritten.
Commented out another test that expected extract_document to use serializers
py
diff --git a/tweepy/streaming.py b/tweepy/streaming.py index <HASH>..<HASH> 100644 --- a/tweepy/streaming.py +++ b/tweepy/streaming.py @@ -144,18 +144,17 @@ class Stream: self.verify = options.get("verify", True) - self.new_session() + self.session = None self.retry_time = self.retry_time_start self.snooze_time = self.snooze_time_step # Example: proxies = {'http': 'http://localhost:1080', 'https': 'http://localhost:1080'} self.proxies = options.get("proxies") - def new_session(self): - self.session = requests.Session() - def _run(self, params=None, body=None): # Authenticate + if self.session is None: + self.session = requests.Session() url = f"https://stream.twitter.com{self.url}" # Connect and process the stream @@ -212,7 +211,6 @@ class Stream: raise finally: self.running = False - self.new_session() def _read_loop(self, resp): for line in resp.iter_lines(chunk_size=self.chunk_size):
Remove Stream.new_session Stop unnecessarily creating a new requests Session at the end of each call to Stream._run
py
diff --git a/optimizely/event/event_processor.py b/optimizely/event/event_processor.py index <HASH>..<HASH> 100644 --- a/optimizely/event/event_processor.py +++ b/optimizely/event/event_processor.py @@ -180,14 +180,16 @@ class BatchEventProcessor(BaseEventProcessor): """ try: while True: - if self._get_time() >= self.flushing_interval_deadline: + loop_time = self._get_time() + loop_time_flush_interval = self._get_time(self.flush_interval.total_seconds()) + + if loop_time >= self.flushing_interval_deadline: self._flush_batch() - self.flushing_interval_deadline = self._get_time() + \ - self._get_time(self.flush_interval.total_seconds()) + self.flushing_interval_deadline = loop_time + loop_time_flush_interval self.logger.debug('Flush interval deadline. Flushed batch.') try: - interval = self.flushing_interval_deadline - self._get_time() + interval = self.flushing_interval_deadline - loop_time item = self.event_queue.get(True, interval) if item is None:
[OASIS-<I>] fix: make _get_time() value the same throughout the loop (#<I>) * fix: make _get_time() value the same throughout the loop * fix: make all _get_time calls into loop_time
py
diff --git a/tests/test_netsnmptestenv.py b/tests/test_netsnmptestenv.py index <HASH>..<HASH> 100644 --- a/tests/test_netsnmptestenv.py +++ b/tests/test_netsnmptestenv.py @@ -38,7 +38,9 @@ def test_SecondGetWorks(): except AssertionError: raise AssertionError("'{0}' != ^SNMPv2-MIB::snmpSetSerialNo.0 = INTEGER: \d+$".format(output)) -def tearDown(self): +def test_Shutdown(): + """ Shutdown without exceptions """ + global testenv testenv.shutdown()
Have test_netsnmptestenv.py also test shutdown(), not just call it
py
diff --git a/pancloud/httpclient.py b/pancloud/httpclient.py index <HASH>..<HASH> 100755 --- a/pancloud/httpclient.py +++ b/pancloud/httpclient.py @@ -235,7 +235,7 @@ class HTTPClient(object): 'timeout']: if x in kwargs and x == 'data': d = kwargs.pop(x) - if type(d) is dict: + if type(d) is dict or type(d) is list: k[x] = json.dumps(d) # convert to str else: # let requests handle the form-encoding k[x] = d
Convert list to JSON to support logging write
py
diff --git a/pyfrc/mains/cli_deploy.py b/pyfrc/mains/cli_deploy.py index <HASH>..<HASH> 100644 --- a/pyfrc/mains/cli_deploy.py +++ b/pyfrc/mains/cli_deploy.py @@ -234,6 +234,10 @@ class PyFrcDeploy: # If we're in a git repo if in_git_repo: try: + hashProc = subprocess.run( + args=["git", "rev-parse", "HEAD"], capture_output=True + ) + # Describe this repo descProc = subprocess.run( args=["git", "describe", "--dirty=-dirty", "--always"], @@ -247,7 +251,8 @@ class PyFrcDeploy: ) # Insert this data into our deploy.json dict - deploy_data["git-hash"] = descProc.stdout.decode().strip() + deploy_data["git-hash"] = hashProc.stdout.decode().strip() + deploy_data["git-desc"] = descProc.stdout.decode().strip() deploy_data["git-branch"] = nameProc.stdout.decode().strip() except subprocess.CalledProcessError as e: logging.exception(e)
Add raw git hash to deploy info also
py
diff --git a/bids/layout/layout.py b/bids/layout/layout.py index <HASH>..<HASH> 100644 --- a/bids/layout/layout.py +++ b/bids/layout/layout.py @@ -107,7 +107,7 @@ class BIDSLayout(Layout): "project root. Every valid BIDS dataset must have " "this file.") else: - with open(target, 'r') as desc_fd: + with open(target, 'r', encoding='utf-8') as desc_fd: self.description = json.load(desc_fd) for k in ['Name', 'BIDSVersion']: if k not in self.description: @@ -184,7 +184,7 @@ class BIDSLayout(Layout): for deriv in deriv_dirs: dd = os.path.join(deriv, 'dataset_description.json') - with open(dd, 'r') as ddfd: + with open(dd, 'r', encoding='utf-8') as ddfd: description = json.load(ddfd) pipeline_name = description.get('PipelineDescription.Name', None) if pipeline_name is None:
set ``encoding='utf-8'`` on all open calls
py
diff --git a/zinnia/views/shortlink.py b/zinnia/views/shortlink.py index <HASH>..<HASH> 100644 --- a/zinnia/views/shortlink.py +++ b/zinnia/views/shortlink.py @@ -13,8 +13,9 @@ class EntryShortLink(RedirectView): def get_redirect_url(self, **kwargs): """ - Get entry corresponding to 'pk' and - return the get_absolute_url of the entry. + Get entry corresponding to 'pk' encoded in base36 + in the 'token' variable and return the get_absolute_url + of the entry. """ - entry = get_object_or_404(Entry, pk=kwargs['pk']) + entry = get_object_or_404(Entry, pk=int(kwargs['token'], 36)) return entry.get_absolute_url()
The shortlink view decode the PK i base <I>
py
diff --git a/mqueue/models.py b/mqueue/models.py index <HASH>..<HASH> 100644 --- a/mqueue/models.py +++ b/mqueue/models.py @@ -84,6 +84,7 @@ class MEvent(models.Model): verbose_name = _(u'Events') verbose_name_plural = _(u'Events') ordering = ['-date_posted'] + unique_together = ['content_type','obj_pk'] def __unicode__(self): return self.name+' - '+str(self.date_posted)
Unique together constrain for obj_pk and content_type
py