diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/unyt/dimensions.py b/unyt/dimensions.py index <HASH>..<HASH> 100644 --- a/unyt/dimensions.py +++ b/unyt/dimensions.py @@ -248,14 +248,8 @@ def accepts(**arg_units): Decorated function. """ - number_of_args = f.__code__.co_argcount names_of_args = f.__code__.co_varnames - assert len(arg_units) == number_of_args, ( - f"decorator number of arguments not equal with " - f"function number of arguments in '{f.__name__}'" - ) - @wraps(f) def new_f(*args, **kwargs): """The new function being returned from the decorator.
Function might have more args than decorator
py
diff --git a/bigchaindb/config_utils.py b/bigchaindb/config_utils.py index <HASH>..<HASH> 100644 --- a/bigchaindb/config_utils.py +++ b/bigchaindb/config_utils.py @@ -3,8 +3,8 @@ The value of each BigchainDB Server configuration setting is determined according to the following rules: -* If it’s set by an environment variable, then use that value -* Otherwise, if it’s set in a local config file, then use that +* If it's set by an environment variable, then use that value +* Otherwise, if it's set in a local config file, then use that value * Otherwise, use the default value (contained in ``bigchaindb.__init__``)
Changed it’s to it's in 2 places
py
diff --git a/salt/modules/logrotate.py b/salt/modules/logrotate.py index <HASH>..<HASH> 100644 --- a/salt/modules/logrotate.py +++ b/salt/modules/logrotate.py @@ -110,7 +110,9 @@ def set_(key, value, setting=None, conf_file=default_conf): salt '*' logrotate.set rotate 2 Can also be used to set a single value inside a multiline configuration - block. For instance, to change rotate in the following block:: + block. For instance, to change rotate in the following block: + + .. code-block:: text /var/log/wtmp { monthly
Changing python code block to text block
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -34,7 +34,8 @@ requirements = [ 'cryptography', 'jwcrypto', 'six', - 'requests' + 'requests', + 'pyasn1 < 0.3', # https://pagure.io/freeipa/issue/7082 ] # extra requirements
Add workaround for pyasn1 <I> issue with FreeIPA pyasn1 <I> broke FreeIPA because ASN1 types no longer take positional arguments in __init__. It broke univ.SequenceOf(char.GeneralString()). <URL>
py
diff --git a/pyvex/IRStmt/__init__.py b/pyvex/IRStmt/__init__.py index <HASH>..<HASH> 100644 --- a/pyvex/IRStmt/__init__.py +++ b/pyvex/IRStmt/__init__.py @@ -72,7 +72,7 @@ class Dirty(IRStmt): class Exit(IRStmt): def __str__(self): - return "if (%s) goto {%s} %s" % (self.guard, self.jumpkind, hex(self.dst.value)) + return "if (%s) { PUT(%d) = %s; %s }" % (self.guard, self.offsIP, hex(self.dst.value), self.jumpkind) class LoadG(IRStmt): def __str__(self):
Fixes #4 - better pretty printing for Fish
py
diff --git a/salt/modules/tomcat.py b/salt/modules/tomcat.py index <HASH>..<HASH> 100644 --- a/salt/modules/tomcat.py +++ b/salt/modules/tomcat.py @@ -42,6 +42,7 @@ Notes: # Import python libs import glob +import hashlib import urllib import urllib2 import tempfile @@ -461,6 +462,37 @@ def deploy_war(war, context, force='no', url='http://localhost:8080/manager', en return res +def passwd(user, passwd, alg='md5', realm=None): + ''' + This function replaces the $CATALINS_HOME/bin/digest.sh script + convert a clear-text password to be hashed in $CATALINA_BASE/conf/tomcat-users.xml file + + CLI Examples:: + + salt '*' tomcat.passwd tomcat secret + salt '*' tomcat.passwd tomcat secret sha1 + salt '*' tomcat.passwd tomcat secret sha1 'Protected Realm' + ''' + + if alg == 'md5': + m = hashlib.md5() + elif alg == 'sha1': + m = hashlib.sha1() + else: + return False + + if realm: + m.update('{0}:{1}:{2}'.format( + user, + realm, + passwd, + )) + else: + m.update(passwd) + + return m.hexdigest() + + # Non-Manager functions def version(): '''
tomcat.passwd function, convert clear-text password to tomcat-users.xml file
py
diff --git a/treeherder/config/whitenoise_custom.py b/treeherder/config/whitenoise_custom.py index <HASH>..<HASH> 100644 --- a/treeherder/config/whitenoise_custom.py +++ b/treeherder/config/whitenoise_custom.py @@ -13,7 +13,7 @@ class CustomWhiteNoise(WhiteNoiseMiddleware): # Matches grunt-cache-bust's style of hash filenames. eg: # index.min-e10ba468ffc8816a.js - IMMUTABLE_FILE_RE = re.compile(r'\.min-[a-f0-9]{16,}\.(js|css)$') + IMMUTABLE_FILE_RE = re.compile(r'\.min\.[a-f0-9]{16,}\.(js|css)$') INDEX_NAME = 'index.html' def update_files_dictionary(self, *args):
Bug <I> - Really fix regex for long max-age static assets The previous change adjusted the hash length, however the filenames also no longer use a hyphen as a separator, eg: index.min.fe5b2cd9a<I>c9d1.css
py
diff --git a/MicroTokenizer/CRF/crf_tokenizer.py b/MicroTokenizer/CRF/crf_tokenizer.py index <HASH>..<HASH> 100644 --- a/MicroTokenizer/CRF/crf_tokenizer.py +++ b/MicroTokenizer/CRF/crf_tokenizer.py @@ -85,7 +85,8 @@ class CRFTokenizer(BaseTokenizer): pickle_file = self.get_char2feature_file(output_dir) with open(pickle_file, 'wb') as fd: - cloudpickle.dump(self.crf_trainer.char2feature_func, fd) + # using protocol=2 to keep compatible with python 2 + cloudpickle.dump(self.crf_trainer.char2feature_func, fd, protocol=2) def assign_from_loader(self, *args, **kwargs): self.crf_tagger = kwargs['crf_tagger']
Bugfix: python pickle protocol version compatible with python 2
py
diff --git a/util/io/fs.py b/util/io/fs.py index <HASH>..<HASH> 100644 --- a/util/io/fs.py +++ b/util/io/fs.py @@ -246,17 +246,6 @@ def remove_recursively(directory, force=False, not_exist_okay=False, exclude_dir topdown=False) -## utility functions - -def flush_and_close(file): - file.flush() - os.fsync(file.fileno()) - file.close() - while not os.path.exists(file.name): - util.logging.warning('File {} is not available after flush and fsync. Waiting.'.format(file.name)) - time.sleep(1) - - ## fd functions def fd_is_file(fd, file, not_exist_okay=False):
API: util.io.fs.flush_and_close removed
py
diff --git a/seqmagick/scripts/cli.py b/seqmagick/scripts/cli.py index <HASH>..<HASH> 100644 --- a/seqmagick/scripts/cli.py +++ b/seqmagick/scripts/cli.py @@ -24,7 +24,7 @@ def main(argv=sys.argv[1:]): logformat = '%(message)s' # set up logging - logging.basicConfig(stream=sys.stdout, format=logformat, level=loglevel) + logging.basicConfig(stream=sys.stderr, format=logformat, level=loglevel) return action(arguments)
Send logging to stderr, not stdout In order to support Unix piping and redirection, any error messages and logging should go to stderr, only data should go to stdout. Should close issue #<I>
py
diff --git a/dvc/project.py b/dvc/project.py index <HASH>..<HASH> 100644 --- a/dvc/project.py +++ b/dvc/project.py @@ -97,7 +97,9 @@ class Project(object): return proj def _ignore(self): - l = [self.link_state.state_file, + l = [self.state.state_file, + self.state._lock_file.lock_file, + self.link_state.state_file, self.link_state._lock_file.lock_file, self.lock.lock_file, self.config.config_local_file,
dvc: don't forget to add state and state.lock to gitignore
py
diff --git a/vespa/stars/trilegal.py b/vespa/stars/trilegal.py index <HASH>..<HASH> 100644 --- a/vespa/stars/trilegal.py +++ b/vespa/stars/trilegal.py @@ -94,7 +94,7 @@ def get_trilegal(filename,ra,dec,folder='.', galactic=False, filterset,maglim,outfile) sp.Popen(cmd,shell=True).wait() if convert_h5: - df = pd.read_table(outfile, sep='\s+', skip_footer=1, engine='python') + df = pd.read_table(outfile, sep='\s+', skipfooter=1, engine='python') df = df.rename(columns={'#Gc':'Gc'}) for col in df.columns: if col not in NONMAG_COLS:
change skip_footer to skipfooter A pandas `FutureWarning` explains that `skip_footer` has been deprecated, and to switch to `skipfooter`, as seen by @elisabethadams in #9.
py
diff --git a/salt/modules/gentoolkit.py b/salt/modules/gentoolkit.py index <HASH>..<HASH> 100644 --- a/salt/modules/gentoolkit.py +++ b/salt/modules/gentoolkit.py @@ -3,6 +3,19 @@ Support for Gentoolkit ''' +def _has_gentoolkit(): + if __salt__['pkg.version']('app-portage/gentoolkit'): + return True + return False + +def __virtual__(): + ''' + Only work on Gentoo systems with gentoolkit installed + ''' + if __grains__['os'] == 'Gentoo' and _has_gentoolkit(): + return 'gentoolkit' + return False + def revdep_rebuild(lib=None): ''' Fix up broken reverse dependencies
Added virtual to gentoolkit
py
diff --git a/jax/core.py b/jax/core.py index <HASH>..<HASH> 100644 --- a/jax/core.py +++ b/jax/core.py @@ -632,7 +632,7 @@ def find_top_trace(args) -> Optional[Tracer]: raise TypeError(f"Argument '{arg}' of type {type(arg)} is not a valid JAX type") return top_so_far - top_trace = reduce(check_arg, args, None) + top_trace = reduce(check_arg, args, None) # type: ignore[wrong-arg-types] if top_trace is not None: return type(top_trace)(top_trace.master, cur_sublevel()) # type: ignore[call-arg] else:
Fix pytype for copybara import (#<I>)
py
diff --git a/salt/state.py b/salt/state.py index <HASH>..<HASH> 100644 --- a/salt/state.py +++ b/salt/state.py @@ -981,6 +981,7 @@ class State(object): elif status == 'change': ret = self.call(low) if not ret['changes']: + low['sfun'] = low['fun'] low['fun'] = 'mod_watch' ret = self.call(low) running[tag] = ret
Pass state function into low data wor mod_watch calls
py
diff --git a/genmodel/manager.py b/genmodel/manager.py index <HASH>..<HASH> 100644 --- a/genmodel/manager.py +++ b/genmodel/manager.py @@ -7,6 +7,7 @@ import logging import os import psycopg2 import requests +import shlex import subprocess import tarfile import threading @@ -238,11 +239,11 @@ def run_job(job_description, job_id, job_name, labeled_data_fname, playbook_fnam # set environment variables, create ssh tunnels, start jobs logger.info("installing dependencies and starting jobs on remote droplet(s)") hosts_string = ','.join([str(d_uid) for d_uid in droplet_uids]) - ansible_command = 'ansible-playbook {} -i \ + ansible_command = 'bash ansible-playbook {} -i \ /etc/ansible/digital_ocean.py -e \ hosts_string={} -e job_id={} -e job_name={}'.format( playbook_fname, hosts_string, job_id, job_name) - output = subprocess.check_output(['bash','-c', ansible_command]) + output = subprocess.check_output(shlex.split(ansible_command)) logger.info("droplets working, job {}-{} started successfully".format( job_name, job_id)) logger.info(output)
added shlex for splitting bash ansible command called by python; simplified the command
py
diff --git a/ryu/services/protocols/bgp/operator/commands/show/neighbor.py b/ryu/services/protocols/bgp/operator/commands/show/neighbor.py index <HASH>..<HASH> 100644 --- a/ryu/services/protocols/bgp/operator/commands/show/neighbor.py +++ b/ryu/services/protocols/bgp/operator/commands/show/neighbor.py @@ -102,7 +102,7 @@ class SentRoutes(Command): aspath = path.get('as_path') origin = path.get('origin') if origin: - aspath.append(origin) + aspath = aspath + [origin] next_hop = path.get('nexthop') med = path.get('metric')
bgp/cli: fix internel data destruction due to cli show command
py
diff --git a/python_modules/libraries/dagster-celery/dagster_celery/cli.py b/python_modules/libraries/dagster-celery/dagster_celery/cli.py index <HASH>..<HASH> 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery/cli.py +++ b/python_modules/libraries/dagster-celery/dagster_celery/cli.py @@ -179,7 +179,7 @@ def worker_start_command( env = os.environ.copy() if pythonpath is not None: - env["PYTHONPATH"] = "{existing_pythonpath}{pythonpath}:".format( + env["PYTHONPATH"] = "{existing_pythonpath}:{pythonpath}:".format( existing_pythonpath=env.get("PYTHONPATH", ""), pythonpath=pythonpath )
[dagster_celery] fix bug in pythonpath (#<I>)
py
diff --git a/misc/templatetags/share_buttons.py b/misc/templatetags/share_buttons.py index <HASH>..<HASH> 100644 --- a/misc/templatetags/share_buttons.py +++ b/misc/templatetags/share_buttons.py @@ -89,6 +89,7 @@ like_functions = [tweet_like, facebook_like, vk_like] def group_buttons(url, title, funcs, block_class): url = current_site_url() + url + url = url.encode('utf-8') title = title.encode('utf-8') res = "<div class=\"%s\">" % block_class for f in funcs:
fix issue with unicode urls
py
diff --git a/nupic/algorithms/anomaly_likelihood.py b/nupic/algorithms/anomaly_likelihood.py index <HASH>..<HASH> 100644 --- a/nupic/algorithms/anomaly_likelihood.py +++ b/nupic/algorithms/anomaly_likelihood.py @@ -480,7 +480,7 @@ def _anomalyScoreMovingAverage(anomalyScores, for record in anomalyScores: # Skip (but log) records without correct number of entries - if not isinstance(record, list) or len(record) != 3: + if not isinstance(record, list or tuple) or len(record) != 3: if verbosity >= 1: print "Malformed record:", record continue
accepts anomaly records as both lists and tuples
py
diff --git a/src/python/test/test_dxclient.py b/src/python/test/test_dxclient.py index <HASH>..<HASH> 100755 --- a/src/python/test/test_dxclient.py +++ b/src/python/test/test_dxclient.py @@ -5535,12 +5535,8 @@ class TestDXCp(DXTestCase): fname1 = self.gen_uniq_fname() create_file_in_project(fname1, self.proj_id1) - # The file {proj_id1}:/{f} exists, however, {proj_id1}/{f} does - # not. We want to see an error message that reflects this; it - # should refer to the path /{proj_id1}, which has been perhaps - # unintentionally interpreted as a folder. - expected_err_msg = "ResolutionError: The folder could not be found in {p}".format( - f=self.proj_id1, p=self.project) + # The file {proj_id1}:/{f} exists, however, {proj_id1}/{f} does not + expected_err_msg = "ResolutionError: The folder could not be found in {p}".format(p=self.project) with self.assertSubprocessFailure(stderr_regexp=expected_err_msg, exit_code=3): run("dx cp {p1}/{f} {p2}:/".format(p1=self.proj_id1, f=fname1, p2=self.proj_id2))
Fixing nonexistent folder test based on code review feedback
py
diff --git a/backtrader/feeds/ibdata.py b/backtrader/feeds/ibdata.py index <HASH>..<HASH> 100644 --- a/backtrader/feeds/ibdata.py +++ b/backtrader/feeds/ibdata.py @@ -253,7 +253,7 @@ class IBData(with_metaclass(MetaIBData, DataBase)): def islive(self): '''Returns ``True`` to notify ``Cerebro`` that preloading and runonce should be deactivated''' - return True + return True if not self.p.historical else False def __init__(self, **kwargs): self.ib = self._store(**kwargs)
Mark as live feed only if historical download has not been activated
py
diff --git a/salt/states/kmod.py b/salt/states/kmod.py index <HASH>..<HASH> 100644 --- a/salt/states/kmod.py +++ b/salt/states/kmod.py @@ -120,12 +120,18 @@ def present(name, persist=False, mods=None): # The remaining modules are not loaded and are available for loading available = list(set(not_loaded) - set(unavailable)) loaded = {'yes': [], 'no': [], 'failed': []} + loaded_by_dependency = [] for mod in available: + if mod in loaded_by_dependency: + loaded['yes'].append(mod) + continue load_result = __salt__['kmod.load'](mod, persist) if isinstance(load_result, (list, tuple)): if len(load_result) > 0: for module in load_result: ret['changes'][module] = 'loaded' + if module != mod: + loaded_by_dependency.append(module) loaded['yes'].append(mod) else: ret['result'] = False
update states kmod * track list of loaded by dependency modules
py
diff --git a/tests/backends/sqlalchemy/test_storage.py b/tests/backends/sqlalchemy/test_storage.py index <HASH>..<HASH> 100644 --- a/tests/backends/sqlalchemy/test_storage.py +++ b/tests/backends/sqlalchemy/test_storage.py @@ -111,11 +111,19 @@ class TestCategoryManager(): assert category.equal_fields(db_instance) def test_update_without_pk(self, alchemy_store, alchemy_category_factory): + """Make sure that passing a category without a PK raises an error.""" category = alchemy_category_factory().as_hamster() category.pk = None with pytest.raises(ValueError): alchemy_store.categories._update(category) + def test_update_invalid_pk(self, alchemy_store, alchemy_category_factory): + """Make sure that passing a category with a non existing PK raises an error.""" + category = alchemy_category_factory().as_hamster() + category.pk = category.pk + 10 + with pytest.raises(KeyError): + alchemy_store.categories._update(category) + def test_update_existing_name(self, alchemy_store, alchemy_category_factory): """Make sure that renaming a given alchemy_category to a taken name throws an error.""" category_1, category_2 = (alchemy_category_factory(), alchemy_category_factory())
Add test for SQLAclhemy Category._update Added test that mimics trying to update a non existing ``Category``. That is the passed ``Category`` instance has a PK that is not present in our backend.
py
diff --git a/pydot.py b/pydot.py index <HASH>..<HASH> 100644 --- a/pydot.py +++ b/pydot.py @@ -1606,7 +1606,7 @@ class Cluster(Graph): if obj_dict is None: self.obj_dict['type'] = 'subgraph' - self.obj_dict['name'] = 'cluster_'+graph_name + self.obj_dict['name'] = quote_if_necessary('cluster_'+graph_name) self.create_attribute_methods(CLUSTER_ATTRIBUTES)
API: quote cluster names when necessary It is odd (and can cause problems) that cluster names are not quoted when necessary, though this is done in the methods: - `Node.__init__` - `Edge.__init__` - `Graph.__init__`
py
diff --git a/openquake/calculators/views.py b/openquake/calculators/views.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/views.py +++ b/openquake/calculators/views.py @@ -754,6 +754,16 @@ def view_global_poes(token, dstore): return rst_table(tbl, header=header) +@view.add('global_gmfs') +def view_global_gmfs(token, dstore): + """ + Display GMFs averaged on everything for debugging purposes + """ + imtls = dstore['oqparam'].imtls + row = dstore['gmf_data/data']['gmv'].mean(axis=0) + return rst_table([row], header=imtls) + + @view.add('mean_disagg') def view_mean_disagg(token, dstore): """
Added view global_gmfs [skip CI] Former-commit-id: dee<I>d<I>fbabd4e<I>a8cb<I>d<I>e<I>bbd<I> [formerly 5d<I>e<I>aa6fb<I>fb<I>] Former-commit-id: e0badbacec<I>cd<I>f7b2a3e<I>be<I>
py
diff --git a/project_example/project_example/models.py b/project_example/project_example/models.py index <HASH>..<HASH> 100644 --- a/project_example/project_example/models.py +++ b/project_example/project_example/models.py @@ -22,7 +22,7 @@ class SalmonellaTest(models.Model): blank=True, null=True) rawid_many = models.ManyToManyField('auth.User', - related_name='rawid_many', blank=True, null=True) + related_name='rawid_many', blank=True) rawid_fk_direct_pk = models.ForeignKey(DirectPrimaryKeyModel, related_name='rawid_fk_direct_pk', blank=True, null=True) @@ -36,7 +36,7 @@ class SalmonellaTest(models.Model): blank=True, null=True) salmonella_many = models.ManyToManyField('auth.User', - related_name='salmonella_many', blank=True, null=True) + related_name='salmonella_many', blank=True) salmonella_fk_direct_pk = models.ForeignKey(DirectPrimaryKeyModel, related_name='salmonella_fk_direct_pk', blank=True, null=True)
Removed null from m2m fields.
py
diff --git a/src/Yowsup/connectionmanager.py b/src/Yowsup/connectionmanager.py index <HASH>..<HASH> 100644 --- a/src/Yowsup/connectionmanager.py +++ b/src/Yowsup/connectionmanager.py @@ -436,7 +436,7 @@ class YowsupConnectionManager: def sendChangeStatus(self,status): self._d("updating status to: %s"%(status)) - bodyNode = ProtocolTreeNode("body",None,None,status.encode('utf-8')); + bodyNode = ProtocolTreeNode("body",None,None,status); messageNode = self.getMessageNode("s.us",bodyNode) self._writeNode(messageNode); @@ -446,7 +446,7 @@ class YowsupConnectionManager: @sendMessage def sendText(self,jid, content): - return ProtocolTreeNode("body",None,None,content.encode('utf-8')); + return ProtocolTreeNode("body",None,None,content); @sendMessage @mediaNode @@ -558,7 +558,7 @@ class YowsupConnectionManager: self._writeNode(iqNode) def sendSetGroupSubject(self,gjid,subject): - subject = subject.encode('utf-8') + #subject = subject.encode('utf-8') #self._d("setting group subject of " + gjid + " to " + subject) idx = self.makeId("set_group_subject_") self.readerThread.requests[idx] = self.readerThread.parseGroupSubject
Removed subject and status encoding, any encoding should be at frontend
py
diff --git a/oauth2client/client.py b/oauth2client/client.py index <HASH>..<HASH> 100644 --- a/oauth2client/client.py +++ b/oauth2client/client.py @@ -760,6 +760,8 @@ class OAuth2Credentials(Credentials): d = json.loads(content) if 'error' in d: error_msg = d['error'] + if 'error_description' in d: + error_msg += ': ' + d['error_description'] self.invalid = True if self.store: self.store.locked_put(self)
Added error description to AccessTokenRefreshError The JSON response returned can contain an "error_description" field that contains additional information about the error. If found, appending to the error message.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -25,5 +25,6 @@ setup( "License :: OSI Approved :: BSD License", ], tests_require=["Django>=1.3", "webtest", "django-webtest"], + include_package_data=True, test_suite='runtests.main', )
Include package files when installing from PyPI
py
diff --git a/werkzeug/routing.py b/werkzeug/routing.py index <HASH>..<HASH> 100644 --- a/werkzeug/routing.py +++ b/werkzeug/routing.py @@ -477,7 +477,7 @@ class Rule(RuleFactory): return -1 elif sw < ow: return 1 - if len(self._weights) < len(other._weights): + if len(self._weights) > len(other._weights): return -1 if not other.arguments and self.arguments: return 1
reverted one of the routing changes from today because it breaks a unittest --HG-- branch : trunk
py
diff --git a/flask_unchained/bundles/api/model_resource.py b/flask_unchained/bundles/api/model_resource.py index <HASH>..<HASH> 100644 --- a/flask_unchained/bundles/api/model_resource.py +++ b/flask_unchained/bundles/api/model_resource.py @@ -1,4 +1,5 @@ import inspect +from typing import * from flask import current_app, make_response, request from flask_unchained import Resource, route, param_converter, unchained, injectable @@ -31,15 +32,15 @@ class ModelResourceMetaclass(ResourceMetaclass): if mcs_args.is_abstract: return cls - routes = {} + routes: Dict[str, List[Route]] = getattr(cls, CONTROLLER_ROUTES_ATTR) include_methods = set(cls.Meta.include_methods) exclude_methods = set(cls.Meta.exclude_methods) for method_name in ALL_RESOURCE_METHODS: if (method_name in exclude_methods or method_name not in include_methods): - continue + routes.pop(method_name, None) - route = getattr(clsdict.get(method_name), FN_ROUTES_ATTR, [None])[0] + route: Route = getattr(clsdict.get(method_name), FN_ROUTES_ATTR, [None])[0] if not route: route = Route(None, mcs_args.getattr(method_name))
do not wipe out non-automatic-resource-view routes on model resources
py
diff --git a/host/pybar/analysis/analyze_raw_data.py b/host/pybar/analysis/analyze_raw_data.py index <HASH>..<HASH> 100644 --- a/host/pybar/analysis/analyze_raw_data.py +++ b/host/pybar/analysis/analyze_raw_data.py @@ -84,7 +84,7 @@ class AnalyzeRawData(object): ---------- raw_data_file : string or tuple, list A string or a list of strings with the raw data file name(s). File ending (.h5) - does not matter. Does not have to be set. + does not not have to be set. analyzed_data_file : string The file name of the output analyzed data file. File ending (.h5) Does not have to be set. @@ -106,7 +106,7 @@ class AnalyzeRawData(object): else: raw_data_files.append(one_raw_data_file) else: - f_list = analysis_utils.get_data_file_names_from_scan_base(raw_data_file, filter_file_words=None, parameter=True) + f_list = analysis_utils.get_data_file_names_from_scan_base(raw_data_file, filter_file_words=['analyzed', 'interpreted'], parameter=True) if f_list: raw_data_files = f_list elif raw_data_file is not None and os.path.splitext(raw_data_file)[1].strip().lower() != ".h5":
ENH: not not use interpreted and analyzed files as raw_data_file
py
diff --git a/pyxmpp/jabber/vcard.py b/pyxmpp/jabber/vcard.py index <HASH>..<HASH> 100644 --- a/pyxmpp/jabber/vcard.py +++ b/pyxmpp/jabber/vcard.py @@ -391,7 +391,7 @@ class VCardImage(VCardField): if (not self.uri and not self.image): raise Empty,"Bad %s value in vcard" % (name,) else: - if rfc2425parameters.get("value").lower()=="uri": + if rfc2425parameters.get("value", "").lower()=="uri": self.uri=value self.type=None else: @@ -1042,8 +1042,8 @@ class VCardSound(VCardField): VCardField.__init__(self,name) if not rfc2425parameters: rfc2425parameters={} + self.uri,self.sound,self.phonetic=[None]*3 if isinstance(value,libxml2.xmlNode): - self.uri,self.sound,self.phonetic=[None]*3 n=value.children vns=get_node_ns(value) while n:
- last fix applied to two other occurences of the buf (closes #<I>, again)
py
diff --git a/hypergraph/node.py b/hypergraph/node.py index <HASH>..<HASH> 100644 --- a/hypergraph/node.py +++ b/hypergraph/node.py @@ -1,7 +1,7 @@ from copy import deepcopy -class Node: +class Node(object): def __init__(self, name="", nodeId=-1): self._name = name @@ -32,3 +32,11 @@ class Node: def __str__(self): return self._name + + def __eq__(self, other): + return (isinstance(other, self.__class__) + and self._name == other._name + and self._nodeId == other._nodeId) + + def __ne__(self, other): + return not self.__eq__(other)
added __eq__ and __neq__ operators
py
diff --git a/pyiso.py b/pyiso.py index <HASH>..<HASH> 100644 --- a/pyiso.py +++ b/pyiso.py @@ -717,6 +717,8 @@ class DirectoryRecord(object): # FIXME: we need to implement Ecma-119 section 9.3 for the sorting # order. if self.file_ident == '\x00': + if other.file_ident == '\x00': + return False return True if other.file_ident == '\x00': return False
Fix to __lt__ method of directory sorting.
py
diff --git a/fluent_contents/models/fields.py b/fluent_contents/models/fields.py index <HASH>..<HASH> 100644 --- a/fluent_contents/models/fields.py +++ b/fluent_contents/models/fields.py @@ -170,13 +170,13 @@ class PlaceholderField(PlaceholderRelation): # Configure the revere relation if possible. # TODO: make sure reverse queries work properly - #if self.rel.related_name is None: - # # Make unique for model (multiple models can use same slotnane) - # self.rel.related_name = '{app}_{model}_{slot}'.format( - # app=cls._meta.app_label, - # model=cls._meta.object_name.lower(), - # slot=self.slot - # ) + if self.rel.related_name is None: + # Make unique for model (multiple models can use same slotnane) + self.rel.related_name = '{app}_{model}_{slot}_FIXME'.format( + app=cls._meta.app_label, + model=cls._meta.object_name.lower(), + slot=self.slot + ) def value_from_object(self, obj):
Fix example app / PlaceholderField by enforcing related_name. The name is marked as _FIXME because the reverse traversal does not work yet, so no API guarantee is made except for it being a unique name.
py
diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index <HASH>..<HASH> 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -702,8 +702,10 @@ class AsyncReqMessageClient(object): def destroy(self): if hasattr(self, 'stream'): # TODO: Optionally call stream.close() on newer pyzmq? It is broken on some. - self.stream.io_loop.remove_handler(self.stream.socket) self.stream.socket.close() + self.stream.io_loop.remove_handler(self.stream.socket) + # set this to None, more hacks for messed up pyzmq + self.stream.socket = None self.socket.close() self.context.term()
Fix #<I> Workaround for pyzmq's inability to unregister sockets sometimes The stack trace @UtahDave was seeing was due to us trying to double unregister the socket from the ioloop. This code block exists soely to work with pyzmq <I>.x (which does it wrong-- and doesn't work). To maintain compabitility the terribleness must continue-- so we'll magically set the socket to None after we close and unregister it.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -46,10 +46,19 @@ setup( "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Framework :: Django", + "Framework :: Django :: 1.7", + "Framework :: Django :: 1.8", + "Framework :: Django :: 1.9", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: JavaScript", + "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules",
Updated the package classifiers.
py
diff --git a/salt/modules/grains.py b/salt/modules/grains.py index <HASH>..<HASH> 100644 --- a/salt/modules/grains.py +++ b/salt/modules/grains.py @@ -495,6 +495,13 @@ def get_or_set_hash(name, .. code-block:: bash salt '*' grains.get_or_set_hash 'django:SECRET_KEY' 50 + + .. warning:: + + This function could return strings which may contain characters which are reserved + as directives by the YAML parser, such as strings beginning with `%`. To avoid + issues when using the output of this function in an SLS file containing YAML+Jinja, + surround the call with single quotes. ''' ret = get(name, None)
Add warning to get_or_set_hash about reserved chars Refs #<I>
py
diff --git a/shinken/objects/module.py b/shinken/objects/module.py index <HASH>..<HASH> 100644 --- a/shinken/objects/module.py +++ b/shinken/objects/module.py @@ -74,7 +74,8 @@ class Modules(Items): if plug is not None: new_modules.append(plug) else: - logger.error("[module] unknown %s module from %s" % (plug_name, s.get_name())) + err = "[module] unknown %s module from %s" % (plug_name, s.get_name()) + logger.error(err) s.configuration_errors.append(err) s.modules = new_modules
Keep error message for console error reporting #<I>
py
diff --git a/azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementclient.py b/azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementclient.py index <HASH>..<HASH> 100644 --- a/azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementclient.py +++ b/azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementclient.py @@ -397,10 +397,12 @@ class _ServiceManagementClient(object): return None - def _get_path(self, resource, name): + def _get_path(self, resource, name, suffix=None): path = '/' + self.subscription_id + '/' + resource if name is not None: path += '/' + _str(name) + if suffix is not None: + path += '/' + suffix return path def _get_cloud_services_path(self, cloud_service_id, resource=None, name=None):
Extend get_path to handle more granular endpoints Publishing details, for example, are nested below a typical endpoint path. An optional suffix on the path makes more granular endpoints reachable without requiring any change to existing code.
py
diff --git a/ibis/expr/types/temporal.py b/ibis/expr/types/temporal.py index <HASH>..<HASH> 100644 --- a/ibis/expr/types/temporal.py +++ b/ibis/expr/types/temporal.py @@ -622,6 +622,12 @@ class IntervalValue(Value): __neg__ = negate + @staticmethod + def __negate_op__(): + import ibis.expr.operations as ops + + return ops.Negate + @public class IntervalScalar(Scalar, IntervalValue):
chore: add negate to interval expressions
py
diff --git a/cherrypy/_cprequest.py b/cherrypy/_cprequest.py index <HASH>..<HASH> 100644 --- a/cherrypy/_cprequest.py +++ b/cherrypy/_cprequest.py @@ -35,6 +35,7 @@ class Request(object): self.hooks = tools.HookMap(pts) self.hooks.failsafe = ['on_start_resource', 'on_end_resource', 'on_end_request'] + self.redirections = [] def close(self): if not self.closed: @@ -110,6 +111,7 @@ class Request(object): break except cherrypy.InternalRedirect, ir: pi = ir.path + self.redirections.append(pi) except (KeyboardInterrupt, SystemExit): raise except: @@ -144,9 +146,6 @@ class Request(object): self.hooks.run('before_finalize') cherrypy.response.finalize() except (cherrypy.HTTPRedirect, cherrypy.HTTPError), inst: - # For an HTTPRedirect or HTTPError (including NotFound), - # we don't go through the regular mechanism: - # we return the redirect or error page immediately inst.set_response() self.hooks.run('before_finalize') cherrypy.response.finalize()
Might as well keep track of InternalRedirects.
py
diff --git a/Lib/ufo2ft/fontInfoData.py b/Lib/ufo2ft/fontInfoData.py index <HASH>..<HASH> 100644 --- a/Lib/ufo2ft/fontInfoData.py +++ b/Lib/ufo2ft/fontInfoData.py @@ -75,7 +75,7 @@ def openTypeNameVersionFallback(info): """ versionMajor = getAttrWithFallback(info, "versionMajor") versionMinor = getAttrWithFallback(info, "versionMinor") - return "%d.%s" % (versionMajor, str(versionMinor).zfill(3)) + return "Version %d.%s" % (versionMajor, str(versionMinor).zfill(3)) def openTypeNameUniqueIDFallback(info): """
Include "Version" in version name table entry This is in the OpenType spec: <URL>
py
diff --git a/pub/pub.py b/pub/pub.py index <HASH>..<HASH> 100644 --- a/pub/pub.py +++ b/pub/pub.py @@ -5,7 +5,7 @@ def task(f): return f def run(cmd, *args, **kwargs): - out = run(cmd, *args, **kwargs) + out = envoy.run(cmd, *args, **kwargs) if out.status_code > 0: stdout.write("error running command: %s" % cmd) exit(out.status_code)
let's not recurse infinitely, hmmm?
py
diff --git a/txaio/aio.py b/txaio/aio.py index <HASH>..<HASH> 100644 --- a/txaio/aio.py +++ b/txaio/aio.py @@ -55,6 +55,12 @@ except ImportError: from trollius import iscoroutine from trollius import Future +try: + from types import AsyncGeneratorType # python 3.5+ +except ImportError: + class AsyncGeneratorType(object): + pass + def _create_future_of_loop(loop): return loop.create_future() @@ -397,6 +403,13 @@ class _AsyncioApi(object): return res elif iscoroutine(res): return _create_task(res, loop=self._config.loop) + elif isinstance(res, AsyncGeneratorType): + raise RuntimeError( + "as_future() received an async generator function; does " + "'{}' use 'yield' when you meant 'await'?".format( + str(fun) + ) + ) else: return create_future_success(res)
Warn if as_future() call makes an async-generator
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ def read(fname): setup( name="Pebble", - version="4.0.0", + version="4.1.0", author="Matteo Cafasso", author_email="noxdafox@gmail.com", description=("Threading and multiprocessing eye-candy."),
release <I> - Added map function to ProcessPool and ThreadPool - ProcessPool.schedule timeout defaulted to None
py
diff --git a/photutils/detection/findstars.py b/photutils/detection/findstars.py index <HASH>..<HASH> 100644 --- a/photutils/detection/findstars.py +++ b/photutils/detection/findstars.py @@ -273,6 +273,12 @@ def daofind(data, threshold, fwhm, ratio=1.0, theta=0.0, sigma_radius=1.5, ``-2.5 * log10(flux)``. The derivation matches that of `DAOFIND`_ if ``sky`` is 0.0. + Notes + ----- + For the convolution step, this routine sets pixels beyond the image + borders to 0.0. The equivalent parameters in `DAOFIND`_ are + ``boundary='constant'`` and ``constant=0.0``. + References ---------- .. [1] http://iraf.net/irafhelp.php?val=daofind&help=Help+Page @@ -382,6 +388,10 @@ def irafstarfind(data, threshold, fwhm, sigma_radius=1.5, sharplo=0.5, Notes ----- + For the convolution step, this routine sets pixels beyond the image + borders to 0.0. The equivalent parameters in `starfind`_ are + ``boundary='constant'`` and ``constant=0.0``. + IRAF's `starfind`_ uses ``hwhmpsf`` and ``fradius`` as input parameters. The equivalent input values for ``irafstarfind`` are:
Add notes about convolution in findstars
py
diff --git a/salt/states/ddns.py b/salt/states/ddns.py index <HASH>..<HASH> 100644 --- a/salt/states/ddns.py +++ b/salt/states/ddns.py @@ -90,7 +90,7 @@ def absent(name, zone, data=None, rdtype=None, **kwargs): DNS resource type. If omitted, all types will be purged. ``**kwargs`` - Additional arguments the ddns.update function may need (e.g. keyfile). + Additional arguments the ddns.delete function may need (e.g. keyfile). ''' ret = {'name': name, 'changes': {}, @@ -110,7 +110,10 @@ def absent(name, zone, data=None, rdtype=None, **kwargs): elif status: ret['result'] = True ret['comment'] = 'Deleted DNS record(s)' - ret['changes'] = True + ret['changes'] = {'Deleted': {'name': name, + 'zone': zone + } + } else: ret['result'] = False ret['comment'] = 'Failed to delete DNS record(s)'
Fixing ddns.absent docs and fixing the invalid output that ddns.absent returns
py
diff --git a/polymodels/__init__.py b/polymodels/__init__.py index <HASH>..<HASH> 100644 --- a/polymodels/__init__.py +++ b/polymodels/__init__.py @@ -2,6 +2,6 @@ from __future__ import unicode_literals from django.utils.version import get_version -VERSION = (1, 5, 0, 'alpha', 1) +VERSION = (1, 5, 0, 'alpha', 2) __version__ = get_version(VERSION)
Bumped version number to <I>a2.
py
diff --git a/dolo/numeric/decision_rules_states.py b/dolo/numeric/decision_rules_states.py index <HASH>..<HASH> 100644 --- a/dolo/numeric/decision_rules_states.py +++ b/dolo/numeric/decision_rules_states.py @@ -1,4 +1,4 @@ -from numpy import tile, dot +from numpy import tile, dot, atleast_2d from dolo.numeric.tensor import mdot class CDR: @@ -25,6 +25,12 @@ class CDR: def __call__(self,points): + + if points.ndim == 1: + pp = atleast_2d(points).T + res = self.__call__(pp) + return res.ravel() + n_s = points.shape[1] ds = points - tile( self.S_bar, (n_s,1) ).T choice = dot(self.X_s, ds) + tile( self.X_bar, (n_s,1) ).T
Perturbation decision rules can be evaluated on vectors.
py
diff --git a/cleverhans/feat_advs.py b/cleverhans/feat_advs.py index <HASH>..<HASH> 100644 --- a/cleverhans/feat_advs.py +++ b/cleverhans/feat_advs.py @@ -1,3 +1,10 @@ +""" +This module inmplements a fast implementation of Feature Adversaries, an attack +against a target internal representation of a model. +Feature adversaries were originally introduced in (Sabour et al. 2016), +where the optimization was done using LBFGS. +Paper link: https://arxiv.org/abs/1511.05122 +""" import numpy as np import tensorflow as tf @@ -161,10 +168,7 @@ class FastIterativeFeatureAdversaries(Attack): """ This is similar to Basic Iterative Method (Kurakin et al. 2016) but - applied to the internal representations. Feature adversaries were - originally introduced in (Sabour et al. 2016), where the optimization - was done using LBFGS. - Paper link: https://arxiv.org/abs/1511.05122 + applied to the internal representations. """ def __init__(self, model, back='tf', sess=None):
added module-level docstring.
py
diff --git a/internetarchive/cli/ia_upload.py b/internetarchive/cli/ia_upload.py index <HASH>..<HASH> 100755 --- a/internetarchive/cli/ia_upload.py +++ b/internetarchive/cli/ia_upload.py @@ -83,6 +83,7 @@ def _upload_files(item, files, upload_kwargs, prev_identifier=None, archive_sess def main(argv, session): args = docopt(__doc__, argv=argv) + ERRORS = False # Validate args. s = Schema({ @@ -172,7 +173,6 @@ def main(argv, session): session = ArchiveSession() spreadsheet = csv.DictReader(open(args['--spreadsheet'], 'rU')) prev_identifier = None - errors = False for row in spreadsheet: local_file = row['file'] identifier = row['identifier'] @@ -189,8 +189,8 @@ def main(argv, session): r = _upload_files(item, local_file, upload_kwargs, prev_identifier, session) for _r in r: if (not _r) or (not _r.ok): - errors = True + ERRORS = True prev_identifier = identifier - if errors: + if ERRORS: sys.exit(1)
made ERRORS global to address UnboundLocalError bug.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ class Tox(test_command): install_requires = [ 'six >=1.2.0', 'Django >= 1.8', - 'Pillow >=2.2.2,<5.0', + 'Pillow >=2.2.2,!=5.1.0', 'django-ranged-response == 0.2.0' ] EXTRAS_REQUIRE = {
#<I>: allow pillow <I>, but <I> is toxic.
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -69,7 +69,7 @@ release = "1.0" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files.
Set language to "en" for documentation
py
diff --git a/socketio/asyncio_manager.py b/socketio/asyncio_manager.py index <HASH>..<HASH> 100644 --- a/socketio/asyncio_manager.py +++ b/socketio/asyncio_manager.py @@ -23,7 +23,8 @@ class AsyncManager(BaseManager): id = None tasks.append(self.server._emit_internal(sid, event, data, namespace, id)) - await asyncio.wait(tasks) + if tasks != []: + await asyncio.wait(tasks) async def close_room(self, room, namespace): """Remove all participants from a room.
Handle broadcasts to zero clients Fixes #<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,12 +13,12 @@ setup( "beautifulsoup4", "lxml" ], - version='0.16', + version='0.17', description='A python library for accessing online data about Makerspaces, Fab Labs, Hackerspaces, TechShop...', author='Massimo Menichinelli', author_email='info@openp2pdesign.org', url='https://github.com/openp2pdesign/makerlabs', - download_url='https://github.com/openp2pdesign/makerlabs/releases/tag/v0.16', + download_url='https://github.com/openp2pdesign/makerlabs/releases/tag/v0.17', keywords=['Fab Lab', 'Fab Lab', 'Makerspace', 'Hackerspace', 'TechShop', 'Makers'], classifiers=[
Move to <I> to solve a problem with PyPi
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ from setuptools import setup setup( name="threat_intel", - version='0.1.18', + version='0.1.19', provides=['threat_intel'], author="Yelp Security", url='https://github.com/Yelp/threat_intel',
Bumping version to <I>
py
diff --git a/tests/test_asks.py b/tests/test_asks.py index <HASH>..<HASH> 100644 --- a/tests/test_asks.py +++ b/tests/test_asks.py @@ -189,9 +189,8 @@ async def callback_example(chunk): @curio_run async def test_callback(): - img = b'' - r = await asks.get('http://httpbin.org/image/png', - callback=callback_example) + await asks.get('http://httpbin.org/image/png', + callback=callback_example) assert len(callback_data) == 8090
removed unused vars in tests
py
diff --git a/FlowCal/gate.py b/FlowCal/gate.py index <HASH>..<HASH> 100644 --- a/FlowCal/gate.py +++ b/FlowCal/gate.py @@ -39,6 +39,7 @@ def start_end(data, num_start=250, num_end=100, full_output=False): the number of parameters (aka channels). num_start, num_end : int, optional Number of events to gate out from beginning and end of `data`. + Ignored if less than 0. full_output : bool, optional Flag specifying to return additional outputs. If true, the outputs are given as a namedtuple. @@ -58,13 +59,22 @@ def start_end(data, num_start=250, num_end=100, full_output=False): number of events in `data`. """ + + if num_start < 0: + num_start = 0 + if num_end < 0: + num_end = 0 + if data.shape[0] < (num_start + num_end): raise ValueError('Number of events to discard greater than total' + ' number of events.') mask = np.ones(shape=data.shape[0],dtype=bool) mask[:num_start] = False - mask[-num_end:] = False + if num_end > 0: + # catch the edge case where `num_end=0` causes mask[-num_end:] to mask + # off all events + mask[-num_end:] = False gated_data = data[mask] if full_output:
Changed gate.start_end() behavior with num_end=0. Fixed edge case where `num_end=0` causes non-intuitive array slicing. See #<I>.
py
diff --git a/src/naarad/metrics/jmeter_metric.py b/src/naarad/metrics/jmeter_metric.py index <HASH>..<HASH> 100644 --- a/src/naarad/metrics/jmeter_metric.py +++ b/src/naarad/metrics/jmeter_metric.py @@ -20,8 +20,8 @@ import naarad.utils logger = logging.getLogger('naarad.metrics.JmeterMetric') class JmeterMetric(Metric): - def __init__ (self, metric_type, infile, access, output_directory, label, ts_start, ts_end, **other_options): - Metric.__init__(self, metric_type, infile, access, output_directory, label, ts_start, ts_end) + def __init__ (self, metric_type, infile, hostname, output_directory, label, ts_start, ts_end, **other_options): + Metric.__init__(self, metric_type, infile, hostname, output_directory, label, ts_start, ts_end) self.metric_description = { 'lb': 'Transaction Name', 'lt': 'Time to First byte',
Fix Jmeter Metric which was broken after latest merge
py
diff --git a/registration/forms.py b/registration/forms.py index <HASH>..<HASH> 100644 --- a/registration/forms.py +++ b/registration/forms.py @@ -3,7 +3,7 @@ from django.contrib.auth.models import User class RegistrationForm(forms.Form): - username = forms.CharField(max_length=32, required=True) + username = forms.CharField(max_length=30, required=True) password = forms.CharField(widget=forms.PasswordInput(), required=True) password_again = forms.CharField(widget=forms.PasswordInput(), required=True, label="Password Again") email = forms.CharField(max_length=254, required=True) @@ -28,4 +28,4 @@ class RegistrationForm(forms.Form): if self.cleaned_data['email'] != self.cleaned_data['email_again']: raise forms.ValidationError(u'Emails do not match') - return self.cleaned_data \ No newline at end of file + return self.cleaned_data
Corrected length mismatch between registration form and database for username field
py
diff --git a/holoviews/core/spaces.py b/holoviews/core/spaces.py index <HASH>..<HASH> 100644 --- a/holoviews/core/spaces.py +++ b/holoviews/core/spaces.py @@ -463,13 +463,13 @@ class DynamicMap(HoloMap): """) def __init__(self, callback, initial_items=None, **params): + super(DynamicMap, self).__init__(initial_items, callback=callback, **params) # Set source to self if not already specified - for stream in params.get('streams',[]): + for stream in self.streams: if stream.source is None: stream.source = self - super(DynamicMap, self).__init__(initial_items, callback=callback, **params) self.counter = 0 if self.callback is None: raise Exception("A suitable callback must be "
Setting stream sources in DynamicMap after setting the parameters
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,8 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- from distutils.core import setup setup( - name='facebook-python-sdk', + name='facebook-sdk', version='0.2.0', description='This client library is designed to support the Facebook Graph API and the official Facebook JavaScript SDK, which is the canonical way to implement Facebook authentication.', author='Facebook', @@ -12,3 +11,4 @@ setup( 'facebook', ], ) +
Rename the package so we can push to PyPi
py
diff --git a/spacy/about.py b/spacy/about.py index <HASH>..<HASH> 100644 --- a/spacy/about.py +++ b/spacy/about.py @@ -3,13 +3,13 @@ # https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py __title__ = 'spacy' -__version__ = '2.0.12.dev1' +__version__ = '2.0.12' __summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython' __uri__ = 'https://spacy.io' __author__ = 'Explosion AI' __email__ = 'contact@explosion.ai' __license__ = 'MIT' -__release__ = False +__release__ = True __download_url__ = 'https://github.com/explosion/spacy-models/releases/download' __compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
Set about to <I> release
py
diff --git a/hearthstone/entities.py b/hearthstone/entities.py index <HASH>..<HASH> 100644 --- a/hearthstone/entities.py +++ b/hearthstone/entities.py @@ -75,6 +75,8 @@ class Game(Entity): self.initial_entities.append(entity) def find_entity_by_id(self, id): + # int() for LazyPlayer mainly... + id = int(id) for entity in self.entities: if entity.id == id: return entity
entities: Always int-ify the argument to Game.find_entity_by_id()
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,6 @@ setup( "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7",
Remove <I> from supported versions.
py
diff --git a/openquake/commonlib/commands/info.py b/openquake/commonlib/commands/info.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/commands/info.py +++ b/openquake/commonlib/commands/info.py @@ -50,6 +50,8 @@ def info(name, filtersources=False): oqparam, sitecol, prefilter=filtersources, in_memory=filtersources) assoc = csm.get_rlzs_assoc() print assoc.csm_info + print('See https://github.com/gem/oq-risklib/blob/master/docs/' + 'effective-realizations.rst for an explanation') print assoc if filtersources: # display information about the size of the hazard curve matrices
Added a reference to the documentation
py
diff --git a/spacy/about.py b/spacy/about.py index <HASH>..<HASH> 100644 --- a/spacy/about.py +++ b/spacy/about.py @@ -3,7 +3,7 @@ # https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py __title__ = 'spacy' -__version__ = '2.0.12' +__version__ = '2.0.13.dev0' __summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython' __uri__ = 'https://spacy.io' __author__ = 'Explosion AI'
Set version to <I>.dev0
py
diff --git a/tenant_schemas/apps.py b/tenant_schemas/apps.py index <HASH>..<HASH> 100644 --- a/tenant_schemas/apps.py +++ b/tenant_schemas/apps.py @@ -1,4 +1,4 @@ -from django.apps import AppConfig +from django.apps import AppConfig, apps from django.conf import settings from django.core.checks import Critical, Error, Warning, register @@ -15,6 +15,9 @@ def best_practice(app_configs, **kwargs): Test for configuration recommendations. These are best practices, they avoid hard to find bugs and unexpected behaviour. """ + if app_configs is None: + app_configs = apps.get_app_configs() + # Take the app_configs and turn them into *old style* application names. # This is what we expect in the SHARED_APPS and TENANT_APPS settings. INSTALLED_APPS = [
Update apps.py Populate `app_configs` if it is `None`.
py
diff --git a/salt/modules/composer.py b/salt/modules/composer.py index <HASH>..<HASH> 100644 --- a/salt/modules/composer.py +++ b/salt/modules/composer.py @@ -146,7 +146,10 @@ def install(dir, if optimize is True: cmd += ' --optimize-autoloader' - result = __salt__['cmd.run_all'](cmd, runas=runas, env={'COMPOSER_HOME': composer_home}) + result = __salt__['cmd.run_all'](cmd, + runas=runas, + env={'COMPOSER_HOME': composer_home}, + python_shell=False) if result['retcode'] != 0: raise CommandExecutionError(result['stderr'])
composer module python_shell=False additon
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ setup( install_requires=[ 'django-modeldict>=1.1.6', 'nexus>=0.2.3', - 'django-jsonfield', + 'django-jsonfield==0.6', ], license='Apache License 2.0', tests_require=tests_require,
Lock json field at <I>
py
diff --git a/flask_appbuilder/models/__init__.py b/flask_appbuilder/models/__init__.py index <HASH>..<HASH> 100644 --- a/flask_appbuilder/models/__init__.py +++ b/flask_appbuilder/models/__init__.py @@ -1,7 +1,7 @@ import logging import re from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import as_declarative log = logging.getLogger(__name__) @@ -9,7 +9,8 @@ log = logging.getLogger(__name__) _camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])') -class BaseModel(object): +@as_declarative(name='Model') +class Model(object): """ Use this class has a mixin for your models, it will define your tablenames automatically MyModel will be called my_model on the database. @@ -37,7 +38,6 @@ class BaseModel(object): return _camelcase_re.sub(_join, cls.__name__).lstrip('_') -Model = declarative_base(cls=BaseModel, name='Model') """ This is for retro compatibility """
Model base class improv with decorator
py
diff --git a/pygount/write.py b/pygount/write.py index <HASH>..<HASH> 100644 --- a/pygount/write.py +++ b/pygount/write.py @@ -157,28 +157,18 @@ class SummaryWriter(BaseWriter): for index, language_summary in enumerate(language_summaries, start=1): table.add_row( language_summary.language, - *[ - str(v) - for v in ( - language_summary.file_count, - language_summary.empty_count, - language_summary.documentation_count, - language_summary.code_count, - ) - ], + str(language_summary.file_count), + str(language_summary.empty_count), + str(language_summary.documentation_count), + str(language_summary.code_count), end_section=(index == len(language_summaries)), ) table.add_row( "SUM", - *[ - str(v) - for v in ( - self.project_summary.total_file_count, - self.project_summary.total_empty_count, - self.project_summary.total_documentation_count, - self.project_summary.total_code_count, - ) - ], + str(self.project_summary.total_file_count), + str(self.project_summary.total_empty_count), + str(self.project_summary.total_documentation_count), + str(self.project_summary.total_code_count), ) Console(file=self._target_stream, soft_wrap=True).print(table)
Replace list comprehensions with code duplication
py
diff --git a/scaleiopy/scaleio.py b/scaleiopy/scaleio.py index <HASH>..<HASH> 100644 --- a/scaleiopy/scaleio.py +++ b/scaleiopy/scaleio.py @@ -577,7 +577,8 @@ class ScaleIO(SIO_Generic_Object): self.logger.error('_do_get() - HTTP response error: %s', response.status_code) self.logger.error('_do_get() - HTTP response error, data: %s', response.text) raise RuntimeError("_do_get() - HTTP response error" + response.status_code) - except: + except Exception as e: + self.logger.error("_do_get() - Unhandled Error Occurred: %s" % str(e)) raise RuntimeError("_do_get() - Communication error with ScaleIO gateway") return response @@ -599,7 +600,8 @@ class ScaleIO(SIO_Generic_Object): self.logger.error('_do_post() - HTTP response error: %s', response.status_code) self.logger.error('_do_post() - HTTP response error, data: %s', response.text) raise RuntimeError("_do_post() - HTTP response error" + response.status_code) - except: + except Exception as e: + self.logger.error("_do_post() - Unhandled Error Occurred: %s" % str(e)) raise RuntimeError("_do_post() - Communication error with ScaleIO gateway") return response
add error logs to runtime error in _do_X Its hard to debug whats going on when you always get ```_do_post() - Communication error with ScaleIO gateway``` adding the actual error to the log will help this, came across and SSL / Cert error recently that wasn't caught so I was only getting the above string for an error.
py
diff --git a/validator/outputhandlers/shellcolors.py b/validator/outputhandlers/shellcolors.py index <HASH>..<HASH> 100644 --- a/validator/outputhandlers/shellcolors.py +++ b/validator/outputhandlers/shellcolors.py @@ -1,5 +1,8 @@ import re -import curses +try: + import curses +except ImportError: + curses = None import os import sys @@ -12,7 +15,8 @@ class OutputHandler: output of the application for *nix-based terminals.""" def __init__(self, buffer=sys.stdout, no_color=False): - + if not curses: + no_color = True if not no_color: no_color = isinstance(sys.stdout, StringIO) or \ not sys.stdout.isatty()
Windows: ncurses might not be available Simply skip colors if ncurses is not available
py
diff --git a/cmsplugin_cascade/bootstrap3/carousel.py b/cmsplugin_cascade/bootstrap3/carousel.py index <HASH>..<HASH> 100644 --- a/cmsplugin_cascade/bootstrap3/carousel.py +++ b/cmsplugin_cascade/bootstrap3/carousel.py @@ -153,8 +153,6 @@ class CarouselSlidePlugin(BootstrapPluginBase): def get_css_classes(cls, obj): css_classes = super(CarouselSlidePlugin, cls).get_css_classes(obj) css_classes.append('img-responsive') # always for slides - if obj.get_previous_sibling() is None: - css_classes.append('active') return css_classes @classmethod
CSS class 'active' is controled via template
py
diff --git a/src/pybel/parser/parse_metadata.py b/src/pybel/parser/parse_metadata.py index <HASH>..<HASH> 100644 --- a/src/pybel/parser/parse_metadata.py +++ b/src/pybel/parser/parse_metadata.py @@ -205,6 +205,9 @@ class MetadataParser(BaseParser): def handle_annotation_pattern(self, s, l, tokens): name = tokens['name'] + + return tokens + self.annotations_re[name] = tokens['value'] return tokens
Remove url rewriting and better logging
py
diff --git a/svg/charts/graph.py b/svg/charts/graph.py index <HASH>..<HASH> 100644 --- a/svg/charts/graph.py +++ b/svg/charts/graph.py @@ -15,13 +15,14 @@ import functools import cssutils from lxml import etree -from svg.charts import css # causes the SVG profile to be loaded - try: import zlib except ImportError: zlib = None +# cause the SVG profile to be loaded +__import__('svg.charts.css') + def sort_multiple(arrays): "sort multiple lists (of equal size) using the first list for the sort keys" tuples = zip(*arrays)
Use __import__ for imperative imports
py
diff --git a/ib_insync/util.py b/ib_insync/util.py index <HASH>..<HASH> 100644 --- a/ib_insync/util.py +++ b/ib_insync/util.py @@ -505,7 +505,7 @@ def formatIBDatetime(dt: Union[date, datetime, str, None]) -> str: elif isinstance(dt, datetime): if dt.tzinfo: # convert to local system timezone - dt = dt.astimezone() + dt = dt.astimezone(tz=None) s = dt.strftime('%Y%m%d %H:%M:%S') elif isinstance(dt, date): s = dt.strftime('%Y%m%d 23:59:59')
Add explicit timezone of None to accomodate pandas Timestamp, fixes #<I>
py
diff --git a/mapillary_tools/uploader.py b/mapillary_tools/uploader.py index <HASH>..<HASH> 100644 --- a/mapillary_tools/uploader.py +++ b/mapillary_tools/uploader.py @@ -870,7 +870,7 @@ def send_videos_for_processing(video_import_path, user_name, user_email=None, us for video in tqdm(all_videos, desc="Uploading videos for processing"): print("Preparing video {} for upload".format(os.path.basename(video))) - [gpx_file_path, isStationaryVid] = gpx_from_blackvue(video,use_nmea_stream_timestamp=True) + [gpx_file_path, isStationaryVid] = gpx_from_blackvue(video,use_nmea_stream_timestamp=False) video_start_time = get_video_start_time(video) if isStationaryVid:
fix: disable reading NMEA date since it was causing unnecessary errors
py
diff --git a/tff/io.py b/tff/io.py index <HASH>..<HASH> 100644 --- a/tff/io.py +++ b/tff/io.py @@ -343,6 +343,9 @@ class DefaultPTY(PTY): new[6][termios.VSUSP] = vdisable # Ctrl-Z new[6][termios.VQUIT] = vdisable # Ctrl-\ + VDSUSP = 11 + new[6][VDSUSP] = vdisable # Ctrl-Y + termios.tcsetattr(self.__stdin_fileno, termios.TCSANOW, new) pid, master = pty.fork() if not pid:
Fix VDSUSP (Ctrl-y) issue
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,13 @@ #!/usr/bin/env python -from distutils.core import setup +from setuptools import setup -setup(name='cloudkey', version='1.0', py_modules=['cloudkey']) +setup(name='cloudkey', + description='Dailymotion Cloud API client library', + author='Dailymotion', + url='http://github.com/dailymotion/cloudkey-py', + version='1.0', + install_requires=['simplejson>=2.1.1', 'pycurl>=7.19.0'], + zip_safe=True, + test_suite='tests')
Add dependancies to setup.py and use setuptools instead of distutuils
py
diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from libtmux.server import Server from libtmux.test import TEST_SESSION_PREFIX, get_test_session_name, namer logger = logging.getLogger(__name__) +USING_ZSH = "zsh" in os.getenv("SHELL", "") @pytest.fixture(autouse=True, scope="session") @@ -24,6 +25,18 @@ def user_path(home_path: pathlib.Path): return p +@pytest.mark.skipif(USING_ZSH, reason="Using ZSH") +@pytest.fixture(autouse=USING_ZSH, scope="session") +def zshrc(user_path: pathlib.Path): + """This quiets ZSH default message. + + Needs a startup file .zshenv, .zprofile, .zshrc, .zlogin. + """ + p = user_path / ".zshrc" + p.touch() + return p + + @pytest.fixture(autouse=True) def home_path_default(user_path: pathlib.Path): os.environ["HOME"] = str(user_path)
tests: If ZSH is the shell, stub out zshrc to prevent default message
py
diff --git a/PySimpleGUI.py b/PySimpleGUI.py index <HASH>..<HASH> 100644 --- a/PySimpleGUI.py +++ b/PySimpleGUI.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -version = __version__ = "4.46.0.46 Unreleased\nAdded exception details if have a problem with the wm_overriderediect" +version = __version__ = "4.46.0.47 Unreleased\nAdded exception details if have a problem with the wm_overriderediect. docstring fix." __version__ = version.split()[0] # For PEP 396 and PEP 345 @@ -6139,7 +6139,7 @@ class TabGroup(Element): Not recommended user call. Used to add rows of Elements to the Frame Element. :param *args: The list of elements for this row - :type: *args: List[Element] + :type *args: List[Element] """ NumRows = len(self.Rows) # number of existing rows is our row number @@ -14477,7 +14477,7 @@ def _no_titlebar_setup(window): Calling twice seems to have had better overall results so that's what's currently done. The MAC has been the problem with this feature. It's been a chronic problem on the Mac. :param window: window to turn off the titlebar if indicated in the settings - :type: Window + :type window: Window """ try: if window.NoTitleBar:
Fix for a couple of docstrings
py
diff --git a/versions/models.py b/versions/models.py index <HASH>..<HASH> 100644 --- a/versions/models.py +++ b/versions/models.py @@ -208,6 +208,11 @@ class VersionedQuerySet(QuerySet): item.as_of = self.query_time elif isinstance(item, VersionedQuerySet): item.query_time = self.query_time + elif isinstance(self, ValueQuerySet): + # When we are dealing with a ValueQuerySet there is no point in + # setting the query_time as we are returning an array of values + # instead of a full-fledged model object + pass else: if type_check: raise TypeError("This item is not a Versionable, it's a " + str(type(item)))
Handling ValuesQuerySet ValuesQuerySet are used when the client code is using the .values() function to retrieve a specific set of attributes from the objects. In such case we really can not and don't want to the query_time or do the type check.
py
diff --git a/spyder/app/mainwindow.py b/spyder/app/mainwindow.py index <HASH>..<HASH> 100644 --- a/spyder/app/mainwindow.py +++ b/spyder/app/mainwindow.py @@ -842,7 +842,6 @@ class MainWindow(QMainWindow): plugin_name = plugin_class.NAME # Non-migrated plugins if plugin_name in [ - Plugins.OutlineExplorer, Plugins.Editor, Plugins.IPythonConsole, Plugins.Projects]:
Remove outline explorrer from old plugins
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ from setuptools import setup, find_packages setup( name="django-deployer", - version="0.1.0", + version="0.1.1", description="Django deployment utility for popular PaaS providers", long_description=open('README.rst').read(), author="Nate Aune", @@ -20,12 +20,11 @@ setup( install_requires=[ 'fabric==1.6.0', # formerly 1.4.3 'jinja2==2.6', - 'heroku', - 'dotcloud', - 'gondor', - 'pyyaml', + 'heroku==0.1.2', + 'dotcloud==0.9.4', + 'gondor==1.2.1', + 'pyyaml==3.10', 'sphinx==1.1.3', - 'pyyaml', 'requests==0.14.2', ], classifiers=(
Bump the version since we now have AppEngine support, and pin the versions of the dependencies just to be safe.
py
diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py index <HASH>..<HASH> 100644 --- a/salt/modules/mysql.py +++ b/salt/modules/mysql.py @@ -808,7 +808,7 @@ def version(**connection_args): return '' try: - return cur.fetchone()[0] + return salt.utils.data.decode(cur.fetchone()[0]) except IndexError: return ''
Fixes mysql server version comparison. Issue #<I>.
py
diff --git a/tests/testapp_nogui/main.py b/tests/testapp_nogui/main.py index <HASH>..<HASH> 100644 --- a/tests/testapp_nogui/main.py +++ b/tests/testapp_nogui/main.py @@ -2,19 +2,21 @@ from math import sqrt for i in range(50): - print i, sqrt(i) + print(i, sqrt(i)) -print 'Just printing stuff apparently worked, trying pyjnius' +print('Just printing stuff apparently worked, trying pyjnius') import jnius -print 'Importing jnius worked' +print('Importing jnius worked') -print 'Trying to autoclass activity' +print('Trying to autoclass activity') from jnius import autoclass +print('Imported autoclass') + NewPythonActivity = autoclass('net.inclem.android.NewPythonActivity') -print ':o the autoclass worked!' +print(':o the autoclass worked!')
Added jnius import test to test app
py
diff --git a/semantic_release/__init__.py b/semantic_release/__init__.py index <HASH>..<HASH> 100644 --- a/semantic_release/__init__.py +++ b/semantic_release/__init__.py @@ -1,6 +1,6 @@ """Semantic Release """ -__version__ = "7.2.0" +__version__ = "7.2.1" from .errors import UnknownCommitMessageStyleError # noqa; noqa
<I> Automatically generated by python-semantic-release
py
diff --git a/discord/message.py b/discord/message.py index <HASH>..<HASH> 100644 --- a/discord/message.py +++ b/discord/message.py @@ -720,7 +720,8 @@ class Message: @staticmethod def _emoji_reaction(emoji): if isinstance(emoji, Reaction): - return emoji.emoji + emoji = emoji.emoji + if isinstance(emoji, Emoji): return '%s:%s' % (emoji.name, emoji.id) if isinstance(emoji, PartialEmoji):
Fix behavior of reaction type-conversion
py
diff --git a/beeswarm/honeypot/capabilities/ftp.py b/beeswarm/honeypot/capabilities/ftp.py index <HASH>..<HASH> 100644 --- a/beeswarm/honeypot/capabilities/ftp.py +++ b/beeswarm/honeypot/capabilities/ftp.py @@ -212,6 +212,7 @@ class BeeFTPHandler(object): def stop(self): self.conn.close() + self.session.connected = False class ftp(HandlerBase):
fix disconnect for ftp capability
py
diff --git a/ntfy/backends/linux.py b/ntfy/backends/linux.py index <HASH>..<HASH> 100644 --- a/ntfy/backends/linux.py +++ b/ntfy/backends/linux.py @@ -5,7 +5,7 @@ from ..data import icon def notify(title, message, - icon=icon.png, + icon=path.abspath(icon.png), urgency=None, transient=None, soundfile=None, @@ -61,5 +61,5 @@ def notify(title, hints.update({'sound-file': soundfile}) message = message.replace('&', '&amp;') - dbus_iface.Notify('ntfy', 0, "" if not icon else path.abspath(icon), title, + dbus_iface.Notify('ntfy', 0, "" or icon, title, message, [], hints, int(timeout))
Make theme icons available for linux backend the abspath is only called on default object icon.png That way we can specify a them icon, which does not need an absolute path.
py
diff --git a/montblanc/impl/biro/slvr_config.py b/montblanc/impl/biro/slvr_config.py index <HASH>..<HASH> 100644 --- a/montblanc/impl/biro/slvr_config.py +++ b/montblanc/impl/biro/slvr_config.py @@ -45,16 +45,6 @@ class BiroSolverConfigurationOptions(Options): 'Governs the level of discretisation of ' 'the nu (frequency) dimension.') - E_BEAM_CONFIG = 'E_beam_config' - E_BEAM_CONFIG_DESCRIPTION = ( - 'A dictionary used to configure the E Beam. ', - 'e.g. %s : { \'%s\': %s, \'%s\': %s, \'%s\': %s }' % ( - E_BEAM_CONFIG, - E_BEAM_WIDTH, DEFAULT_E_BEAM_WIDTH, - E_BEAM_HEIGHT, DEFAULT_E_BEAM_HEIGHT, - E_BEAM_DEPTH, DEFAULT_E_BEAM_DEPTH)) - - # Should a weight vector (sigma) be used to # when calculating the chi-squared values? WEIGHT_VECTOR = 'weight_vector'
Remove the E_beam_config option, just use the beam dimensions in the base Solver Configuration dictionary.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ setup(name='openaccess_epub', 'openaccess_epub.ncx', 'openaccess_epub.opf', 'openaccess_epub.ops', 'openaccess_epub.utils'], scripts=['scripts/oaepub', 'scripts/epubzip', 'scripts/oae-quickstart'], - data_files=[], + data_files=['README.md'], classifiers=['Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Science/Research',
adding README.md to the data_files in setup
py
diff --git a/phoebe/units/conversions.py b/phoebe/units/conversions.py index <HASH>..<HASH> 100644 --- a/phoebe/units/conversions.py +++ b/phoebe/units/conversions.py @@ -511,8 +511,11 @@ import datetime #-- optional libraries: WARNING: when these modules are not installed, the # module's use is restricted -try: import ephem -except ImportError: print("Unable to load pyephem, stellar coordinate transformations unavailable") +try: + import ephem +except ImportError: + pass + #print("Unable to load pyephem, stellar coordinate transformations unavailable") #-- from IVS repository from phoebe.units import constants
removed pyephem warning when importing conversions
py
diff --git a/spadespipeline/quality.py b/spadespipeline/quality.py index <HASH>..<HASH> 100755 --- a/spadespipeline/quality.py +++ b/spadespipeline/quality.py @@ -171,7 +171,7 @@ class Quality(object): if self.numreads == 2: # Incorporate read length into the minlength parameter - set it to 50 unless one or more of the # reads has a lower calculated length than 50 - lesser_length = min(sample.run.forwardlength, sample.run.reverselength) + lesser_length = min(int(sample.run.forwardlength), int(sample.run.reverselength)) min_len = 50 if lesser_length >= 50 else lesser_length # Separate system calls for paired and unpaired fastq files # http://seqanswers.com/forums/showthread.php?t=42776
Needed to ensure that variable was treated as an integer
py
diff --git a/pandas/core/index.py b/pandas/core/index.py index <HASH>..<HASH> 100644 --- a/pandas/core/index.py +++ b/pandas/core/index.py @@ -790,7 +790,8 @@ class MultiIndex(Index): Parameters ---------- - level : int, default 0 + level : int or str, default 0 + If a string is given, must be a name of the level ascending : boolean, default True False to sort in descending order @@ -799,8 +800,14 @@ class MultiIndex(Index): sorted_index : MultiIndex """ labels = list(self.labels) + try: + level = self.names.index(level) + except: + raise ValueError("level %s not in index names" % level) + primary = labels.pop(level) + # Lexsort starts from END indexer = np.lexsort(tuple(labels[::-1]) + (primary,))
ENH: Allow to sort on index level by name
py