diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/session_security/middleware.py b/session_security/middleware.py index <HASH>..<HASH> 100644 --- a/session_security/middleware.py +++ b/session_security/middleware.py @@ -35,7 +35,7 @@ class SessionSecurityMiddleware(object): self.update_last_activity(request, now) delta = now - get_last_activity(request.session) - if delta.seconds >= EXPIRE_AFTER: + if delta.seconds >= EXPIRE_AFTER and delta.days >= 0: logout(request) elif request.path not in PASSIVE_URLS: set_last_activity(request.session, now)
Ensure that the last activity isnt from the future; prevents issues in situations where clocks are out of sync between machines
py
diff --git a/pyecore/ecore.py b/pyecore/ecore.py index <HASH>..<HASH> 100644 --- a/pyecore/ecore.py +++ b/pyecore/ecore.py @@ -696,10 +696,11 @@ class EDataType(EClassifier): 'java.lang.Character': str} # Must be completed def __init__(self, name=None, eType=None, default_value=None, - from_string=None, to_string=None): + from_string=None, to_string=None, instanceClassName=None): super().__init__(name) self.eType = eType - self._instanceClassName = None + if instanceClassName: + self.instanceClassName = instanceClassName self.default_value = default_value if from_string: self.from_string = from_string @@ -913,6 +914,9 @@ class EClass(EClassifier): elif notif.feature is EClass.eStructuralFeatures: if notif.kind is Kind.ADD: setattr(self.python_class, notif.new.name, notif.new) + elif notif.kind is Kind.ADD_MANY: + for x in notif.new: + setattr(self.python_class, x.name, x) elif notif.kind is Kind.REMOVE: delattr(self.python_class, notif.old.name)
Fix __update issue of EClass when many attributes are added at once
py
diff --git a/searchtweets/_version.py b/searchtweets/_version.py index <HASH>..<HASH> 100644 --- a/searchtweets/_version.py +++ b/searchtweets/_version.py @@ -2,4 +2,4 @@ # Copyright 2018 Twitter, Inc. # Licensed under the MIT License # https://opensource.org/licenses/MIT -VERSION = "1.7.0" +VERSION = "1.7.1"
bump the version for the bugfix to cmd line args (issue #<I>)
py
diff --git a/tensorforce/updater/linear_value_function.py b/tensorforce/updater/linear_value_function.py index <HASH>..<HASH> 100644 --- a/tensorforce/updater/linear_value_function.py +++ b/tensorforce/updater/linear_value_function.py @@ -38,7 +38,7 @@ class LinearValueFunction(object): returns = np.concatenate([path["returns"] for path in paths]) columns = feature_matrix.shape[1] - lamb = 0 #2.0 + lamb = 0.1 #2.0 self.coefficients = np.linalg.lstsq(feature_matrix.T.dot(feature_matrix) + lamb * np.identity(columns), feature_matrix.T.dot(returns))[0]
small damping in least squares fit
py
diff --git a/fundamentals/fmultiprocess.py b/fundamentals/fmultiprocess.py index <HASH>..<HASH> 100644 --- a/fundamentals/fmultiprocess.py +++ b/fundamentals/fmultiprocess.py @@ -46,14 +46,15 @@ def fmultiprocess( log.info('starting the ``multiprocess`` function') # DEFINTE POOL SIZE - NUMBER OF CPU CORES TO USE (BEST = ALL - 1) - if cpu_count() > 1: - poolSize = cpu_count() - 1 - else: - poolSize = 1 + # if cpu_count() > 1: + # poolSize = cpu_count() - 1 + # else: + # poolSize = 1 - if len(inputArray) < poolSize: - poolSize = len(inputArray) - p = Pool(processes=poolSize) + # if len(inputArray) < poolSize: + # poolSize = len(inputArray) + # p = Pool(processes=poolSize) + p = Pool() # MAP-REDUCE THE WORK OVER MULTIPLE CPU CORES try:
removing process limit from multiprocessing
py
diff --git a/tests/config_test.py b/tests/config_test.py index <HASH>..<HASH> 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -1,6 +1,7 @@ import contextlib import gc import mock +import os import tempfile import time from testify import run, assert_equal, TestCase, setup, setup_teardown @@ -434,8 +435,10 @@ class ConfigFacadeAcceptanceTest(TestCase): self.file = tempfile.NamedTemporaryFile() self.write("""one: A""") - def write(self, content): - time.sleep(0.01) + def write(self, content, mtime_seconds=0): + tstamp = time.time() - mtime_seconds + os.utime(self.file.name, (tstamp, tstamp)) + time.sleep(0.03) self.file.file.seek(0) self.file.write(content) self.file.flush() @@ -453,7 +456,7 @@ class ConfigFacadeAcceptanceTest(TestCase): facade.add_callback('one', callback) assert_equal(staticconf.get('one', namespace=self.namespace), "A") - self.write("""one: B""") + self.write("""one: B""", 5) facade.reload_if_changed() assert_equal(staticconf.get('one', namespace=self.namespace), "B") callback.assert_called_with()
Attempt to make an acceptance test more reliable.
py
diff --git a/openid/consumer/consumer.py b/openid/consumer/consumer.py index <HASH>..<HASH> 100644 --- a/openid/consumer/consumer.py +++ b/openid/consumer/consumer.py @@ -743,8 +743,8 @@ class OpenIDConsumer(object): else: server_id = consumer_id - urls = (consumer_id, server_id, server) - return SUCCESS, tuple(map(oidutil.normalizeUrl, urls)) + urls = consumer_id, server_id, server + return SUCCESS, urls def _createAssociateRequest(self, dh, args=None): if args is None:
[project @ Don't attempt to normalize URLs that shouldn't be normalized]
py
diff --git a/fusesoc/capi2/core.py b/fusesoc/capi2/core.py index <HASH>..<HASH> 100644 --- a/fusesoc/capi2/core.py +++ b/fusesoc/capi2/core.py @@ -470,6 +470,7 @@ Tools: members: icarus : Icarus icestorm : Icestorm + ise : Ise modelsim : Modelsim quartus : Quartus verilator : Verilator @@ -491,6 +492,13 @@ Icestorm: lists: arachne_pnr_options : String +Ise: + members: + family : String + device : String + package : String + speed : String + Quartus: members: family : String
Add ISE support to CAPI2
py
diff --git a/settings.py b/settings.py index <HASH>..<HASH> 100644 --- a/settings.py +++ b/settings.py @@ -317,6 +317,9 @@ OIDC = False #Set this to True if you want want OpenID Connect Authentication, #OIDC_OP_TOKEN_ENDPOINT = "<URL of the OIDC OP token endpoint>" #OIDC_OP_USER_ENDPOINT = "<URL of the OIDC OP userinfo endpoint>" + #You may also need the following: +#OIDC_RP_SIGN_ALGO = "RS256" #should be HS256 or RS256 +#OIDC_OP_JWKS_ENDPOINT = "<URL of the OIDC OP JWKS endpoint>" ############################################################################## # DJANGO SETTINGS THAT NEED NOT BE CHANGED (but you may if you want to, do scroll through at least)
added OIDC_RP_SIGN_ALGO and OIDC_OP_JWKS_ENDPOINT to settings template #<I>
py
diff --git a/glances/glances.py b/glances/glances.py index <HASH>..<HASH> 100644 --- a/glances/glances.py +++ b/glances/glances.py @@ -722,7 +722,12 @@ class glancesLimits: return self.__limits_list def getHide(self, stat): - return self.__limits_list[stat] + try: + self.__limits_list[stat] + except KeyError: + return [] + else: + return self.__limits_list[stat] def getCareful(self, stat): return self.__limits_list[stat][0]
Correct a bug when a new client talk with a older one...
py
diff --git a/rocker/container.py b/rocker/container.py index <HASH>..<HASH> 100644 --- a/rocker/container.py +++ b/rocker/container.py @@ -216,13 +216,17 @@ def isCurrent(containerName, imageName, pullImage=True, docker=DockerClient()): if imgInfo == None and pullImage == True: image.pull(imageName, docker) + imgInfo = image.inspect(imageName) + if imgInfo == None: + raise Exception("Missing image: {0}".format(imageName)) print('{0} -- {1}'.format(ctrInfo, imgInfo)) if ctrInfo == None: - # container not found => not using current image + # container not found => we need to build it return False elif imgInfo == None: # image not found => Error raise Exception("Unknown image: {0}", imageName) - return ctrInfo["Image"] == imgInfo["Id"] + # newer versions of an image will get a new Id + return ctrInfo.image == imgInfo.id
fixed container.isCurrent() (was still using old code)
py
diff --git a/plenum/server/node.py b/plenum/server/node.py index <HASH>..<HASH> 100644 --- a/plenum/server/node.py +++ b/plenum/server/node.py @@ -2299,7 +2299,10 @@ class Node(HasActionQueue, Motor, Propagator, MessageProcessor, HasFileStorage, if instance_id == 0: # TODO: 0 should be replaced with configurable constant self.monitor.hasMasterPrimary = self.has_master_primary - if self.lost_primary_at and self.nodestack.isConnectedTo(self.master_primary_name): + if not self.lost_primary_at: + return + if self.nodestack.isConnectedTo(self.master_primary_name) or \ + self.master_primary_name == self.name: self.lost_primary_at = None def propose_view_change(self):
consider the node as a primary as well (#<I>)
py
diff --git a/pyzotero/zotero.py b/pyzotero/zotero.py index <HASH>..<HASH> 100644 --- a/pyzotero/zotero.py +++ b/pyzotero/zotero.py @@ -643,9 +643,9 @@ class Zotero(object): 'charset': mtypes[1]})) # add headers authreq.add_header( - 'Content-Type', - 'application/x-www-form-urlencoded') - authreq.add_header('If-None-Match', '*') + 'Content-Type', 'application/x-www-form-urlencoded') + authreq.add_header( + 'If-None-Match', '*') try: authresp = urllib2.urlopen(authreq) authdata = json.loads(authresp.read())
Messing about w/formatting
py
diff --git a/tests/unit/test_commands.py b/tests/unit/test_commands.py index <HASH>..<HASH> 100644 --- a/tests/unit/test_commands.py +++ b/tests/unit/test_commands.py @@ -457,6 +457,7 @@ class TestCommands(object): """ res = pmxbot.defit(c, e, "#test", "testrunner", "keyboard") print res + assert isinstance(res, unicode) assert res.startswith("Wikipedia says: In computing, a keyboard is an input device, partially modeled after the typewriter keyboard,") def test_define_irc(self): @@ -465,6 +466,7 @@ class TestCommands(object): """ res = pmxbot.defit(c, e, "#test", "testrunner", "irc") print res + assert isinstance(res, unicode) assert res.startswith("Wikipedia says: Internet Relay Chat (IRC) is a form of real-time Internet text messaging (chat) or synchronous conferencing") def test_urb_irc(self):
Adding test to ensure that responses from define are unicode (otherwise they might cause encoding errors on the way out)
py
diff --git a/processors/generic_processor.py b/processors/generic_processor.py index <HASH>..<HASH> 100644 --- a/processors/generic_processor.py +++ b/processors/generic_processor.py @@ -652,17 +652,26 @@ class GenericCommitHandler(processor.CommitHandler): def delete_handler(self, filecmd): path = filecmd.path + fileid = self.bzr_file_id(path) try: - del self.inventory[self.bzr_file_id(path)] + del self.inventory[fileid] except KeyError: - self.warning("ignoring delete of %s as not in inventory", path) + self._warn_unless_in_merges(fileid, path) except errors.NoSuchId: - self.warning("ignoring delete of %s as not in inventory", path) + self._warn_unless_in_merges(fileid, path) try: self.cache_mgr._delete_path(path) except KeyError: pass + def _warn_unless_in_merges(self, fileid, path): + if len(self.parents) <= 1: + return + for parent in self.parents[1:]: + if fileid in self.get_inventory(parent): + return + self.warning("ignoring delete of %s as not in parent inventories", path) + def copy_handler(self, filecmd): raise NotImplementedError(self.copy_handler)
remove warning about delete iff file is in a merge parent
py
diff --git a/LiSE/LiSE/thing.py b/LiSE/LiSE/thing.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/thing.py +++ b/LiSE/LiSE/thing.py @@ -295,6 +295,8 @@ class Thing(Node): scheduled to be somewhere else. """ + if len(path) < 2: + raise ValueError("Paths need at least 2 nodes") eng = self.character.engine with eng.plan: prevplace = path.pop(0) @@ -352,8 +354,12 @@ class Thing(Node): """ destn = dest.name if hasattr(dest, 'name') else dest + if destn == self.location.name: + raise ValueError("I'm already at {}".format(destn)) graph = self.character if graph is None else graph path = nx.shortest_path(graph, self["location"], destn, weight) + if len(path) == 1: + return self.go_to_place(destn, weight) return self.follow_path(path, weight) def travel_to_by(self, dest, arrival_tick, weight=None, graph=None):
Handle it better when you try to make a thing travel where it already is
py
diff --git a/isort/settings.py b/isort/settings.py index <HASH>..<HASH> 100644 --- a/isort/settings.py +++ b/isort/settings.py @@ -26,6 +26,7 @@ from __future__ import absolute_import, division, print_function, unicode_litera import fnmatch import os +import posixpath from collections import namedtuple from .pie_slice import itemsview, lru_cache, native_str @@ -216,7 +217,7 @@ def _get_config_data(file_path, sections): def should_skip(filename, config, path='/'): """Returns True if the file should be skipped based on the passed in settings.""" for skip_path in config['skip']: - if os.path.join(path, filename).endswith('/' + skip_path.lstrip('/')): + if posixpath.abspath(posixpath.join(path, filename)) == posixpath.abspath(skip_path.replace('\\', '/')): return True position = os.path.split(filename)
Normalize paths for consistent linux/windows usage
py
diff --git a/linkcheck/checker/httpurl.py b/linkcheck/checker/httpurl.py index <HASH>..<HASH> 100644 --- a/linkcheck/checker/httpurl.py +++ b/linkcheck/checker/httpurl.py @@ -19,6 +19,13 @@ Handle http links. """ import requests +# The validity of SSL certs is ignored to be able +# the check the URL and recurse into it. +# The warning about invalid SSL certs is given to the +# user instead. +import warnings +warnings.simplefilter('ignore', requests.packages.urllib3.exceptions.InsecureRequestWarning) + from cStringIO import StringIO from .. import (log, LOG_CHECK, strformat, mimeutil,
Ignore urllib3 warnings about invalid SSL certs since we check them ourselves.
py
diff --git a/pale/arguments/base.py b/pale/arguments/base.py index <HASH>..<HASH> 100644 --- a/pale/arguments/base.py +++ b/pale/arguments/base.py @@ -167,6 +167,9 @@ class JsonDictArgument(BaseArgument): self._validate_type(item, item_name) + if item is None: + return item + item_keys = item.keys() field_keys = self.field_map.keys() extra_keys = [ k for k in item_keys if k not in field_keys ]
if dict is not present return None
py
diff --git a/msmtools/estimation/dense/transition_matrix.py b/msmtools/estimation/dense/transition_matrix.py index <HASH>..<HASH> 100644 --- a/msmtools/estimation/dense/transition_matrix.py +++ b/msmtools/estimation/dense/transition_matrix.py @@ -62,8 +62,8 @@ def __initX(C): """ Computes an initial guess for a reversible correlation matrix """ - from ..api import tmatrix - from ...analysis import statdist + from msmtools.estimation import tmatrix + from msmtools.analysis import statdist T = tmatrix(C) mu = statdist(T) @@ -138,8 +138,8 @@ def estimate_transition_matrix_reversible(C, Xinit=None, maxiter=1000000, maxerr history of likelihood history. Has the length of the number of iterations needed. Only returned if return_conv = True """ - from ..api import is_connected - from ...estimation import log_likelihood + from msmtools.estimation import is_connected + from msmtools.estimation import log_likelihood # check input if (not is_connected(C)): ValueError('Count matrix is not fully connected. ' +
[estimation/dense/tmatrix] replaced relative imports
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -63,19 +63,9 @@ setup_requires = [ install_requires = [ "flask>=1.1,<2.0", - "flask-resources>=0.7.0,<0.8.0", - "invenio-accounts>=1.4.3", - "invenio-assets>=1.2.2", - "invenio-base>=1.2.3", "invenio-files-rest>=1.2.0", - "invenio-i18n>=1.2.0", - "invenio-indexer>=1.2.0", - "invenio-jsonschemas>=1.1.2", "invenio-mail>=1.0.2", - "invenio-pidstore>=1.2.2", - "invenio-records>=1.5.0a4", - "invenio-rdm-records>=0.31.0", - "uritemplate>=3.0.1", + "invenio-rdm-records>=0.31.0,<0.32.0", ] packages = find_packages()
dependencies: depend only on rdm-records
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( author_email='janne.kuuskeri@gmail.com', url='https://github.com/wuher/devil/', packages=['devil', 'devil.perm', 'devil.mappers'], - install_requires=['simplejson>=2.1.0', 'django>=1.2.0'], + install_requires=['simplejson>=2.1.0', 'django>=1.3.0'], license='MIT', description='Simple REST framework for Django', long_description=open('README.markdown').read(),
updated the django dependency to version <I>
py
diff --git a/gitty/sys_path.py b/gitty/sys_path.py index <HASH>..<HASH> 100644 --- a/gitty/sys_path.py +++ b/gitty/sys_path.py @@ -3,6 +3,7 @@ from . import config, library ORIGINAL_SYS_PATH = sys.path[:] PREFIX = '//git/' +PULL_AUTOMATICALLY = False def load(gitpath, prefix=PREFIX): @@ -21,7 +22,7 @@ def load(gitpath, prefix=PREFIX): e.msg += ('for path ' + gitpath,) raise - lib.load() or lib.pull() + lib.load() or PULL_AUTOMATICALLY and lib.pull() return lib.path
Turn off pulls from repositories.
py
diff --git a/tests/test_bbox.py b/tests/test_bbox.py index <HASH>..<HASH> 100644 --- a/tests/test_bbox.py +++ b/tests/test_bbox.py @@ -188,4 +188,5 @@ def test_random_rotate(): bboxes = [[78, 42, 142, 80]] aug = Rotate(limit = 15, p=1.) transformed = aug(image=image, bboxes=bboxes) - assert len(bboxes) == len(transformed['bboxes']) \ No newline at end of file + assert len(bboxes) == len(transformed['bboxes']) + \ No newline at end of file
Merge remote-tracking branch 'origin/add_bbox_to_rotate' into add_bbox_to_rotate
py
diff --git a/ansible/modules/hashivault/hashivault_auth_ldap.py b/ansible/modules/hashivault/hashivault_auth_ldap.py index <HASH>..<HASH> 100644 --- a/ansible/modules/hashivault/hashivault_auth_ldap.py +++ b/ansible/modules/hashivault/hashivault_auth_ldap.py @@ -221,7 +221,8 @@ def hashivault_auth_ldap(module): # check current config if exists: - result = client.auth.ldap.read_configuration()['data'] + result = client.auth.ldap.read_configuration( + mount_point=desired_state['mount_point'])['data'] # some keys need to be remapped to match desired state (and HVAC implementation) current_state['discover_dn'] = result['discoverdn'] current_state['group_attr'] = result['groupattr'] @@ -253,4 +254,4 @@ def hashivault_auth_ldap(module): return {'changed': changed} if __name__ == '__main__': - main() \ No newline at end of file + main()
Pass mount_point, so current configuration for mointpoint other than "ldap" could be read.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -93,7 +93,14 @@ def get_data_files(): def get_packages(): """Return package list""" - packages = get_subpackages(LIBNAME) + get_subpackages('spyplugins') + packages = ( + get_subpackages(LIBNAME) + + get_subpackages('spyder_breakpoints') + + get_subpackages('spyder_profiler') + + get_subpackages('spyder_pylint') + + get_subpackages('spyder_io_dcm') + + get_subpackages('spyder_io_hdf5') + ) return packages
Re-include plugins in setup.py packages
py
diff --git a/luminoso_api/cli.py b/luminoso_api/cli.py index <HASH>..<HASH> 100644 --- a/luminoso_api/cli.py +++ b/luminoso_api/cli.py @@ -18,7 +18,7 @@ DESCRIPTION = "Access the luminoso api via the command line." USAGE = """ Supply an http verb and a path, with optional parameters. -Output is returned as json, or an error message. +Output is returned as json, csv, or an error message. Parameters may be specified in one of three ways:
mention csv output in usage
py
diff --git a/mtools/mlaunch/mlaunch.py b/mtools/mlaunch/mlaunch.py index <HASH>..<HASH> 100755 --- a/mtools/mlaunch/mlaunch.py +++ b/mtools/mlaunch/mlaunch.py @@ -1853,19 +1853,12 @@ class MLaunchTool(BaseCmdLineTool): con.close() con = self.client('localhost:%i' % port, replicaSet=set_name, serverSelectionTimeoutMS=10000) - v = ismaster.get('maxWireVersion', 0) - if v >= 7: - # Until drivers have implemented SCRAM-SHA-256, use old mechanism. - opts = {'mechanisms': ['SCRAM-SHA-1']} - else: - opts = {} if database == "$external": password = None try: - con[database].command("createUser", name, pwd=password, roles=roles, - **opts) + con[database].command("createUser", name, pwd=password, roles=roles) except OperationFailure as e: raise e
mlaunch: Use default SASL/SCRAM mechanisms when creating users. (#<I>)
py
diff --git a/src/pyshark/capture/live_capture.py b/src/pyshark/capture/live_capture.py index <HASH>..<HASH> 100644 --- a/src/pyshark/capture/live_capture.py +++ b/src/pyshark/capture/live_capture.py @@ -62,7 +62,7 @@ class LiveCapture(Capture): if proc.poll() is not None: # Process has not terminated yet proc.terminate() - except WindowsError: + except OSError: # If process already terminated somehow. pass @@ -88,9 +88,10 @@ class LiveCapture(Capture): try: if proc.poll() is not None: proc.terminate() - except WindowsError: - # On windows - pass + except OSError: + # On windows, happens on termination. + if 'win' not in sys.platform: + raise def get_parameters(self, packet_count=None): """
Changed WindowsErrors to OSErrors to support non-windows platforms
py
diff --git a/cheroot/test/test_ssl.py b/cheroot/test/test_ssl.py index <HASH>..<HASH> 100644 --- a/cheroot/test/test_ssl.py +++ b/cheroot/test/test_ssl.py @@ -344,7 +344,7 @@ def test_tls_client_auth( expected_substring = 'tlsv1 alert unknown ca' if ( IS_WINDOWS - and tls_verify_mode == ssl.CERT_OPTIONAL + and tls_verify_mode == ssl.CERT_REQUIRED and not is_trusted_cert and tls_client_identity == 'localhost' and adapter_type == 'builtin'
Patch expectation if TLS verify mode is required
py
diff --git a/faq/search_indexes.py b/faq/search_indexes.py index <HASH>..<HASH> 100644 --- a/faq/search_indexes.py +++ b/faq/search_indexes.py @@ -5,19 +5,18 @@ from faq.models import Topic, Question class FAQIndexBase(indexes.SearchIndex): + text = indexes.CharField(document=True, use_template=True) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) class TopicIndex(FAQIndexBase): - title = indexes.CharField(model_attr='title', indexed=True) def get_queryset(self): return Topic.objects.published() class QuestionIndex(FAQIndexBase): - title = indexes.CharField(model_attr='question', indexed=True) def get_queryset(self): return Question.objects.published()
Removed title index field from search indexes. The title is already being indexed in the text document.
py
diff --git a/kombine/sampler.py b/kombine/sampler.py index <HASH>..<HASH> 100644 --- a/kombine/sampler.py +++ b/kombine/sampler.py @@ -921,13 +921,13 @@ class Sampler(object): blob0 = None pbar = self._get_finite_pbar(progress, N) - iter = 0 + iter = self.iterations for results in self.sample(p0, lnpost0, lnprop0, blob0, N, **kwargs): - iter += 1 if pbar is not None: pbar.update(1) pbar.set_postfix_str("| {}/{} Walkers Accepted | Last step Acc Rate: {}".format(np.count_nonzero(self.acceptance[iter]), self.nwalkers, self.acceptance_fraction[-1])) + iter += 1 # Store the results for later continuation and toss out the blob self._last_run_mcmc_result = results[:3]
FINALY Fixed acceptence in pbar
py
diff --git a/ml-agents/mlagents/trainers/policy/policy.py b/ml-agents/mlagents/trainers/policy/policy.py index <HASH>..<HASH> 100644 --- a/ml-agents/mlagents/trainers/policy/policy.py +++ b/ml-agents/mlagents/trainers/policy/policy.py @@ -35,15 +35,6 @@ class Policy: self.previous_action_dict: Dict[str, np.ndarray] = {} self.memory_dict: Dict[str, np.ndarray] = {} self.normalize = trainer_settings.network_settings.normalize - if self.normalize: - has_vec_obs = False - # Make sure there is at least one vector observation for normalization - for sen_spec in behavior_spec.sensor_specs: - if len(sen_spec.shape) == 1: - has_vec_obs = True - break - if not has_vec_obs: - self.normalize = False self.use_recurrent = self.network_settings.memory is not None self.h_size = self.network_settings.hidden_units num_layers = self.network_settings.num_layers
removing the extra code to set normalize to false if no observation can be normalized
py
diff --git a/nurbs/Grid.py b/nurbs/Grid.py index <HASH>..<HASH> 100644 --- a/nurbs/Grid.py +++ b/nurbs/Grid.py @@ -78,3 +78,18 @@ class Grid: # Update the origin (bottom left corner) self._origin = self._gridpts[0][0] + + def save(self, file_name="grid.txt"): + target = open(file_name, 'w') + target.truncate() + for cols in self._gridpts: + line = "" + col_size = len(cols) + counter = 0 + for rows in cols: + line = line + str(rows[0]) + "," + str(rows[1]) + "," + str(rows[2]) + counter = counter + 1 + if counter != col_size: + line = line + ";" + target.write(line) + target.write("\n")
Added save function for the Grid Generator
py
diff --git a/pmag_basic_dialogs.py b/pmag_basic_dialogs.py index <HASH>..<HASH> 100755 --- a/pmag_basic_dialogs.py +++ b/pmag_basic_dialogs.py @@ -2539,6 +2539,11 @@ class check(wx.Frame): # end that thing col_labels = ['sites', '', 'locations', 'site_class', 'site_lithology', 'site_type', 'site_definition', 'site_lon', 'site_lat'] self.site_grid, self.temp_data['sites'], self.temp_data['locations'] = self.make_table(col_labels, self.sites, self.Data_hierarchy, 'location_of_site') + + # get data_er_* dictionaries into the ErMagic object, if they didn't already exist + if not self.ErMagic.data_er_sites: + self.ErMagic.read_MagIC_info() + self.add_extra_grid_data(self.site_grid, self.sites, col_labels, self.ErMagic.data_er_sites) locations = self.temp_data['locations']
QuickMagic step 3: fix bug where ErMagic.data_er_* dictionaries were not filled in, and so complete grids could not be created
py
diff --git a/doctr/travis.py b/doctr/travis.py index <HASH>..<HASH> 100644 --- a/doctr/travis.py +++ b/doctr/travis.py @@ -284,8 +284,6 @@ def sync_from_log(src, dst, log_file): files = glob.iglob(join(src, '**'), recursive=True) # sorted makes this easier to test for f in sorted(files): - if f == src: - continue new_f = join(dst, f[len(src):]) if isdir(f): os.makedirs(new_f, exist_ok=True)
Don't skip src in sync_from_log() It may be necessary to create the dst directory.
py
diff --git a/fontbakery-nametable-from-filename.py b/fontbakery-nametable-from-filename.py index <HASH>..<HASH> 100755 --- a/fontbakery-nametable-from-filename.py +++ b/fontbakery-nametable-from-filename.py @@ -235,6 +235,13 @@ def nametable_from_filename(filepath): win_ps_name = filename.encode('utf_16_be') new_table.setName(win_ps_name, 6, 3, 1, 1033) + if style_name not in WIN_SAFE_STYLES: + # Preferred Family Name + new_table.setName(family_name.encode('utf_16_be'), 16, 3, 1, 1033) + # Preferred SubfamilyName + win_pref_subfam_name = _mac_subfamily_name(style_name).encode('utf_16_be') + new_table.setName(win_pref_subfam_name, 17, 3, 1, 1033) + # PAD missing fields # ------------------ for field in REQUIRED_FIELDS:
test_nametable-from-filename: add win nameIDs <I> and <I> when style is not Regular, Italic, Bold, Bold Italic. This was discovered in #<I>
py
diff --git a/lizzy_client/version.py b/lizzy_client/version.py index <HASH>..<HASH> 100644 --- a/lizzy_client/version.py +++ b/lizzy_client/version.py @@ -1,4 +1,4 @@ MAJOR_VERSION = 0 MINOR_VERSION = 2 -REVISION = "20160114a" +REVISION = "201601142" VERSION = "{MAJOR_VERSION}.{MINOR_VERSION}.{REVISION}".format_map(locals())
zalando/lizzy#<I> Use a version that evaluates higher than the previous
py
diff --git a/msrestazure/azure_active_directory.py b/msrestazure/azure_active_directory.py index <HASH>..<HASH> 100644 --- a/msrestazure/azure_active_directory.py +++ b/msrestazure/azure_active_directory.py @@ -544,8 +544,7 @@ class AdalAuthentication(Authentication): # pylint: disable=too-few-public-meth raw_token = self._adal_method(*self._args, **self._kwargs) except adal.AdalError as err: # pylint: disable=no-member - if (getattr(err, 'error_response') and ('error_description' in err.error_response) - and ('AADSTS70008:' in err.error_response['error_description'])): + if 'AADSTS70008:' in ((getattr(err, 'error_response', None) or {}).get('error_description') or ''): raise Expired("Credentials have expired due to inactivity.") else: raise AuthenticationError(err)
CR changes after reviewing the comments, I changed the fix so it is more robust
py
diff --git a/anyconfig/backend/configobj.py b/anyconfig/backend/configobj.py index <HASH>..<HASH> 100644 --- a/anyconfig/backend/configobj.py +++ b/anyconfig/backend/configobj.py @@ -25,8 +25,8 @@ """ from __future__ import absolute_import -import configobj import inspect +import configobj import anyconfig.backend.base import anyconfig.mdicts
refactor: correct import order; inspect is in standard lib
py
diff --git a/pathtools/path.py b/pathtools/path.py index <HASH>..<HASH> 100644 --- a/pathtools/path.py +++ b/pathtools/path.py @@ -71,9 +71,9 @@ def get_dir_walker(recursive, topdown=True, followlinks=False): else: def walk(path, topdown=topdown, followlinks=followlinks): try: - yield next(os.walk(path, topdown, followlinks)) + yield next(os.walk(path, topdown=topdown, followlinks=followlinks)) except NameError: - yield os.walk(path, topdown, followlinks).next() #IGNORE:E1101 + yield os.walk(path, topdown=topdown, followlinks=).next() #IGNORE:E1101 return walk
Changed calls to os.walk for <I> compatibility.
py
diff --git a/pandas/io/sql.py b/pandas/io/sql.py index <HASH>..<HASH> 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -228,7 +228,11 @@ def _write_sqlite(frame, table, names, cur): wildcards = ','.join(['?'] * len(names)) insert_query = 'INSERT INTO %s (%s) VALUES (%s)' % ( table, col_names, wildcards) - data = [tuple(x) for x in frame.values] + # pandas types are badly handled if there is only 1 column ( Issue #3628 ) + if not len(frame.columns )==1 : + data = [tuple(x) for x in frame.values] + else : + data = [tuple(x) for x in frame.values.tolist()] cur.executemany(insert_query, data) def _write_mysql(frame, table, names, cur):
BUG : issue (#<I>) when writing a dataframe column of integers to sqlite
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,10 +1,16 @@ +# coding: utf-8 +from __future__ import unicode_literals + import sys sys.path.append('..') source_suffix = '.rst' +source_encoding= 'utf-8' master_doc = 'index' +language='ru' + html_theme = 'alabaster' extensions = [
Try to fix docs encoding
py
diff --git a/contract/migrations/13.0.1.0.0/post-migration.py b/contract/migrations/13.0.1.0.0/post-migration.py index <HASH>..<HASH> 100644 --- a/contract/migrations/13.0.1.0.0/post-migration.py +++ b/contract/migrations/13.0.1.0.0/post-migration.py @@ -6,9 +6,28 @@ from openupgradelib import openupgrade # pylint: disable=W7936 @openupgrade.migrate() def migrate(env, version): + openupgrade.logged_query( + env.cr, + """ + UPDATE account_move am + SET old_contract_id = ai.old_contract_id + FROM account_invoice ai + WHERE ai.id = am.old_invoice_id + AND ai.old_contract_id IS NOT NULL""", + ) + openupgrade.logged_query( + env.cr, + """ + UPDATE account_move_line aml + SET contract_line_id = ail.contract_line_id + FROM account_invoice_line ail + WHERE ail.id = aml.old_invoice_line_id + AND ail.contract_line_id IS NOT NULL""", + ) openupgrade.load_data( env.cr, "contract", "migrations/13.0.1.0.0/noupdate_changes.xml" ) + # Don't alter line recurrence v12 behavior contracts = env["contract.contract"].search([]) contracts.write({"line_recurrence": True})
[OU-FIX] contract: Transfer contract info from invoice
py
diff --git a/pbs.py b/pbs.py index <HASH>..<HASH> 100644 --- a/pbs.py +++ b/pbs.py @@ -362,7 +362,7 @@ def run_repl(env): try: line = raw_input("pbs> ") except (ValueError, EOFError): break - try: exec compile(line, "<dummy>", "single") in env, env + try: exec(compile(line, "<dummy>", "single"), env, env) except SystemExit: break except: print(traceback.format_exc()) @@ -428,7 +428,7 @@ from anywhere other than a stand-alone script. Do a 'from pbs import program' i source = "".join(source) exit_code = 0 - try: exec source in env, env + try: exec(source, env, env) except SystemExit, e: exit_code = e.code except: print(traceback.format_exc())
exec -> py2/3
py
diff --git a/manager/globals/constants.py b/manager/globals/constants.py index <HASH>..<HASH> 100644 --- a/manager/globals/constants.py +++ b/manager/globals/constants.py @@ -65,7 +65,7 @@ class Constants(): :param minorVersion: Package minor version. :type minorVersion: unicode """ - changeVersion = "4" + changeVersion = "5" """ :param changeVersion: Package change version. :type changeVersion: unicode
Raise application version number to <I>.
py
diff --git a/validator/submain.py b/validator/submain.py index <HASH>..<HASH> 100644 --- a/validator/submain.py +++ b/validator/submain.py @@ -102,7 +102,7 @@ def test_webapp(err, package, expectation=0): err.notice(("main", "test_webapp", "confirmed"), - "Web App confirmed.") + "App confirmed.") def test_package(err, file_, name, expectation=PACKAGE_ANY,
reworded Web app to App
py
diff --git a/kafka/version.py b/kafka/version.py index <HASH>..<HASH> 100644 --- a/kafka/version.py +++ b/kafka/version.py @@ -1 +1 @@ -__version__ = '0.9.5' +__version__ = '0.99.0-dev'
Towards version <I>
py
diff --git a/hypercorn/trio/server.py b/hypercorn/trio/server.py index <HASH>..<HASH> 100644 --- a/hypercorn/trio/server.py +++ b/hypercorn/trio/server.py @@ -114,7 +114,9 @@ class Server: if isinstance(event, RawData): async with self.send_lock: try: - await self.stream.send_all(event.data) + with trio.CancelScope() as cancel_scope: + cancel_scope.shield = True + await self.stream.send_all(event.data) except trio.BrokenResourceError: await self.protocol.handle(Closed()) elif isinstance(event, Closed):
Bugfix shield data sending in Trio working This ensures that data fragments aren't sent (say if the sening task is cancelled). Data fragments, such as a partial HTTP/2 frame confuse the client and likely result in a failed/stalled connection. Note the asyncio worker write is synchronous and hence cannot be cancelled whilst writing.
py
diff --git a/nameko/testing/utils.py b/nameko/testing/utils.py index <HASH>..<HASH> 100644 --- a/nameko/testing/utils.py +++ b/nameko/testing/utils.py @@ -168,21 +168,21 @@ class ResourcePipeline(object): def _create(self): while self.running: - obj = self.create() - self.ready.put(obj) + item = self.create() + self.ready.put(item) def _destroy(self): while True: - obj = self.trash.get() - if obj is ResourcePipeline.STOP: + item = self.trash.get() + if item is ResourcePipeline.STOP: break - self.destroy(obj) + self.destroy(item) def get(self): return self.ready.get() - def discard(self, vhost): - self.trash.put(vhost) + def discard(self, item): + self.trash.put(item) def shutdown(self): self.running = False
remove reference to vhost in generic implementation
py
diff --git a/salt/utils/network.py b/salt/utils/network.py index <HASH>..<HASH> 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -25,6 +25,10 @@ except ImportError: import salt.utils from salt._compat import subprocess, ipaddress +# inet_pton does not exist in Windows, this is a workaround +if salt.utils.is_windows(): + from salt.ext import win_inet_pton # pylint: disable=unused-import + log = logging.getLogger(__name__) # pylint: disable=C0103
Adds support for inet_pton in Windows to network util
py
diff --git a/tests/test_macro.py b/tests/test_macro.py index <HASH>..<HASH> 100644 --- a/tests/test_macro.py +++ b/tests/test_macro.py @@ -115,7 +115,6 @@ def test_arg_parse_with_escaped_gt_lt_symbols(test, expected, error_msg): error_msg) -@pytest.mark.xfail @unittest.mock.patch('datetime.datetime', FakeDate) @pytest.mark.parametrize("test, expected, error_msg", [ ("Today < is <date format=%m/%y>", "Today < is 01/19", "Phrase with extra < before macro breaks macros"),
Undo xfail for passing tests
py
diff --git a/xclim/testing/tests/test_indices.py b/xclim/testing/tests/test_indices.py index <HASH>..<HASH> 100644 --- a/xclim/testing/tests/test_indices.py +++ b/xclim/testing/tests/test_indices.py @@ -1076,7 +1076,7 @@ class TestJetStreamIndices: da_ua = self.da_ua # Should raise ValueError as longitude is in 0-360 instead of -180.E-180.W with pytest.raises(ValueError): - _ = xci.jetstream_metric_woolings(da_ua) + _ = xci.jetstream_metric_woollings(da_ua) # redefine longitude coordiantes to -180.E-180.W so function runs da_ua = da_ua.cf.assign_coords( {
rename js metric
py
diff --git a/pyresttest/tests.py b/pyresttest/tests.py index <HASH>..<HASH> 100644 --- a/pyresttest/tests.py +++ b/pyresttest/tests.py @@ -237,10 +237,11 @@ class Test(object): elif self.method == u'DELETE': curl.setopt(curl.CUSTOMREQUEST,'DELETE') - headers = list() + if self.headers: #Convert headers dictionary to list of header entries, tested and working - for headername, headervalue in self.headers.items(): - headers.append(str(headername) + ': ' +str(headervalue)) + headers = [str(headername)+':'+str(headervalue) for headername, headervalue in self.headers.items()] + else: + headers = list() headers.append("Expect:") # Fix for expecting 100-continue from server, which not all servers will send! headers.append("Connection: close") curl.setopt(curl.HTTPHEADER, headers)
Clean up headers handling a bit
py
diff --git a/sacn/messages/data_packet.py b/sacn/messages/data_packet.py index <HASH>..<HASH> 100644 --- a/sacn/messages/data_packet.py +++ b/sacn/messages/data_packet.py @@ -136,7 +136,9 @@ class DataPacket(RootLayer): tuple(raw_data[40:44]) != tuple(VECTOR_E131_DATA_PACKET) or \ raw_data[117] != VECTOR_DMP_SET_PROPERTY: # REMEMBER: when slicing: [inclusive:exclusive] raise TypeError('Some of the vectors in the given raw data are not compatible to the E131 Standard!') - + if raw_data[125] != 0x00: + raise TypeError('Not a default Null Start Code for Dimmers per DMX512 & DMX512/1990') + tmpPacket = DataPacket(cid=raw_data[22:38], sourceName=str(raw_data[44:108]), universe=(0xFF * raw_data[113]) + raw_data[114]) # high byte first tmpPacket.priority = raw_data[108]
Raise Error when the start code is not Null When the start code is not Null, the packet will probably not be DMX-A data. A full list of start codes can be found here: <URL>
py
diff --git a/niworkflows/utils/images.py b/niworkflows/utils/images.py index <HASH>..<HASH> 100644 --- a/niworkflows/utils/images.py +++ b/niworkflows/utils/images.py @@ -205,7 +205,6 @@ def resample_by_spacing(in_file, zooms, order=3, clip=True, smooth=False): resampled = map_coordinates( data, ijk[:3, :], - output=hdr.get_data_dtype(), order=order, mode="constant", cval=0,
Update niworkflows/utils/images.py
py
diff --git a/cumulusci/cli/cci.py b/cumulusci/cli/cci.py index <HASH>..<HASH> 100644 --- a/cumulusci/cli/cci.py +++ b/cumulusci/cli/cci.py @@ -62,7 +62,7 @@ def timestamp_file(): try: with open(timestamp_file, "r+") as f: yield f - except OSError: # file does not exist + except IOError: # file does not exist with open(timestamp_file, "w+") as f: yield f
IOError is the best exception for Python 2.
py
diff --git a/cogen/web/wsgi.py b/cogen/web/wsgi.py index <HASH>..<HASH> 100644 --- a/cogen/web/wsgi.py +++ b/cogen/web/wsgi.py @@ -775,7 +775,7 @@ def server_factory(global_conf, host, port, **options): default_priority=int(options.get('sched_default_priority', priority.FIRST)), default_timeout=float(options.get('sched_default_timeout', 0)), proactor_resolution=float(options.get('proactor_resolution', 0.5)), - sockoper_run_first=asbool(options.get('sockoper_run_first', 'true')), + proactor_multiplex_first=asbool(options.get('proactor_multiplex_first', 'true')), proactor_greedy=asbool(options.get('proactor_greedy')), ops_greedy=asbool(options.get('ops_greedy')) )
fixed a paster server factory option
py
diff --git a/tests/sphinx_supp_py2/conf.py b/tests/sphinx_supp_py2/conf.py index <HASH>..<HASH> 100644 --- a/tests/sphinx_supp_py2/conf.py +++ b/tests/sphinx_supp_py2/conf.py @@ -20,7 +20,8 @@ example_gallery_config = { 'example_mpl_test_figure_chosen.ipynb'): 0}, 'supplementary_files': { osp.join(dirname, 'raw_examples', - 'example_hello_world.ipynb'): ['test2.txt']}} + 'example_hello_world.ipynb'): ['test2.txt']}, + 'remove_cell_tags': ['remove_in_docs']} exclude_patterns = ['raw_examples']
Minor fix in sphinx conf
py
diff --git a/solvebio/cli/credentials.py b/solvebio/cli/credentials.py index <HASH>..<HASH> 100644 --- a/solvebio/cli/credentials.py +++ b/solvebio/cli/credentials.py @@ -20,11 +20,21 @@ class netrc(_netrc): """ @staticmethod def path(): + if os.name == 'nt': + # Windows + path = '~\\_solvebio\\credentials' + else: + # *nix + path = '~/.solvebio/credentials' try: - path = os.path.join(os.environ['HOME'], '.solvebio', 'credentials') + path = os.path.expanduser(path) except KeyError: - raise IOError("Could not find credentials file: $HOME is not set") + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See http://bugs.python.org/issue20164 + raise IOError( + "Could not find any home directory for '{0}'" + .format(path)) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path))
use expanduser instead of looking for $HOME manually (fixes #<I>)
py
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py index <HASH>..<HASH> 100644 --- a/salt/returners/mysql.py +++ b/salt/returners/mysql.py @@ -194,7 +194,7 @@ def returner(ret): cur.execute(sql, (ret['fun'], ret['jid'], json.dumps(ret['return']), ret['id'], - ret['success'], + ret.get('success', False), json.dumps(ret))) except salt.exceptions.SaltMasterError: log.critical('Could not store return with MySQL returner. MySQL server unavailable.')
Set success key if it is not already present
py
diff --git a/torf/_stream.py b/torf/_stream.py index <HASH>..<HASH> 100644 --- a/torf/_stream.py +++ b/torf/_stream.py @@ -465,8 +465,8 @@ class TorrentFileStream: # reached. if skip_bytes: - skipped = fh.read(skip_bytes) - skip_bytes -= len(skipped) + skipped = fh.seek(skip_bytes) + skip_bytes -= skipped def iter_pieces(fh, piece): piece_size = self._torrent.piece_size
TorrentFileStream: Use seek() instead of read() to skip_bytes
py
diff --git a/tohu/v3/derived_generators.py b/tohu/v3/derived_generators.py index <HASH>..<HASH> 100644 --- a/tohu/v3/derived_generators.py +++ b/tohu/v3/derived_generators.py @@ -8,6 +8,7 @@ __all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS'] class Apply(TohuBaseGenerator): def __init__(self, func, *arg_gens, **kwarg_gens): + super().__init__() self.func = func self.orig_arg_gens = arg_gens self.orig_kwarg_gens = kwarg_gens @@ -21,4 +22,7 @@ class Apply(TohuBaseGenerator): return self.func(*next_args, **next_kwargs) def reset(self, seed=None): - pass \ No newline at end of file + super().reset(seed) + + def spawn(self): + return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens) \ No newline at end of file
Add spawn method to Apply; initialise clones by calling super().__init__()
py
diff --git a/pandas/core/series.py b/pandas/core/series.py index <HASH>..<HASH> 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -9,6 +9,7 @@ from itertools import izip import csv import operator import types +from distutils.version import LooseVersion from numpy import nan, ndarray import numpy as np @@ -34,6 +35,7 @@ from pandas.util.decorators import Appender, Substitution __all__ = ['Series', 'TimeSeries'] _np_version = np.version.short_version +_np_version_under1p6 = LooseVersion(_np_version) < '1.6' #------------------------------------------------------------------------------- # Wrapper function for Series arithmetic methods @@ -72,7 +74,7 @@ def _radd_compat(left, right): try: output = radd(left, right) except TypeError: - cond = (_np_version.startswith('1.5') and + cond = (_np_version_under1p6 and left.dtype == np.object_) if cond: # pragma: no cover output = np.empty_like(left)
BF: special handling for _radd_compat for any numpy under <I> pre-evaluated numpy version condition for "efficiency" ;-)
py
diff --git a/librarypaste/pastebin.py b/librarypaste/pastebin.py index <HASH>..<HASH> 100644 --- a/librarypaste/pastebin.py +++ b/librarypaste/pastebin.py @@ -61,7 +61,7 @@ class Server(object): time=datetime.datetime.now(), makeshort=bool(makeshort), ) - data = file.file is not None and file.file.read() + data = file is not None and file.file is not None and file.file.read() if data: filename = file.filename mime = str(file.content_type)
The input file can be None, so check for it.
py
diff --git a/imbox/__init__.py b/imbox/__init__.py index <HASH>..<HASH> 100644 --- a/imbox/__init__.py +++ b/imbox/__init__.py @@ -58,3 +58,5 @@ class Imbox(object): return self.fetch_list(**kwargs) + def folders(self): + return self.connection.list()
imbox: added folders() method to Imbox I think it would be nice to have the list of folders on the Imbox class along with the message query rather than on the ImapTransport class. However, I have left the existing list_folders() method on the ImapTransport to avoid breaking existing code that is using imbox.
py
diff --git a/datadog_checks_base/datadog_checks/base/__about__.py b/datadog_checks_base/datadog_checks/base/__about__.py index <HASH>..<HASH> 100644 --- a/datadog_checks_base/datadog_checks/base/__about__.py +++ b/datadog_checks_base/datadog_checks/base/__about__.py @@ -1,4 +1,4 @@ # (C) Datadog, Inc. 2018-present # All rights reserved -# Licensed under a 3-clause BSOD style license (see LICENSE) +# Licensed under a 3-clause BSD style license (see LICENSE) __version__ = "11.6.0"
Fix typo (#<I>)
py
diff --git a/virtualenvapi/manage.py b/virtualenvapi/manage.py index <HASH>..<HASH> 100644 --- a/virtualenvapi/manage.py +++ b/virtualenvapi/manage.py @@ -208,8 +208,9 @@ class VirtualEnvironment(object): else: packages.append((name.strip(), description.strip())) except ValueError: - name, description = packages[-1] - packages[-1] = (name, description + six.u(' ') + result.strip()) + if len(packages): + name, description = packages[-1] + packages[-1] = (name, description + six.u(' ') + result.strip()) return packages def search_names(self, term):
Ensure the package list is non empty before referencing a list element
py
diff --git a/pygsp/graphs/nngraphs/nngraph.py b/pygsp/graphs/nngraphs/nngraph.py index <HASH>..<HASH> 100644 --- a/pygsp/graphs/nngraphs/nngraph.py +++ b/pygsp/graphs/nngraphs/nngraph.py @@ -92,6 +92,10 @@ class NNGraph(Graph): N, d = np.shape(self.Xin) Xout = self.Xin + if k >= N: + raise ValueError('The number of neighbors (k={}) must be smaller ' + 'than the number of nodes ({}).'.format(k, N)) + if self.center: Xout = self.Xin - np.kron(np.ones((N, 1)), np.mean(self.Xin, axis=0))
error if #neighbors greater than #nodes
py
diff --git a/graphene_pynamodb/tests/test_query.py b/graphene_pynamodb/tests/test_query.py index <HASH>..<HASH> 100644 --- a/graphene_pynamodb/tests/test_query.py +++ b/graphene_pynamodb/tests/test_query.py @@ -14,10 +14,12 @@ def setup_fixtures(): reporter2 = Reporter(id=2, first_name='ABO', last_name='Y') reporter2.save() if not Article.exists(): + Article.create_table(read_capacity_units=1, write_capacity_units=1, wait=True) article = Article(id=1, headline='Hi!') article.reporter_id = 1 article.save() if not Editor.exists(): + Editor.create_table(read_capacity_units=1, write_capacity_units=1, wait=True) editor = Editor(editor_id=1, name="John") editor.save()
Missed creating tables for articles and editors
py
diff --git a/openquake/calculators/hazard/disagg/core.py b/openquake/calculators/hazard/disagg/core.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/disagg/core.py +++ b/openquake/calculators/hazard/disagg/core.py @@ -58,11 +58,10 @@ def disagg_task(job_id, block, lt_rlz_id, calc_type): phase; first we must computed all of the hazard curves, then we can compute the disaggregation histograms. """ - result = None if calc_type == 'hazard_curve': - result = classical.compute_hazard_curves(job_id, block, lt_rlz_id) + classical.compute_hazard_curves(job_id, block, lt_rlz_id) elif calc_type == 'disagg': - result = compute_disagg(job_id, block, lt_rlz_id) + compute_disagg(job_id, block, lt_rlz_id) else: msg = ('Invalid calculation type "%s";' ' expected "hazard_curve" or "disagg"') @@ -149,7 +148,6 @@ def compute_disagg(job_id, points, lt_rlz_id): lt_rlz.save() logs.LOG.debug('< done computing disaggregation') - return None def _prepare_sources(hc, lt_rlz_id):
calcs/hazard/disagg/core: Task functions shouldn't return anything, even None.
py
diff --git a/mechanicalsoup/browser.py b/mechanicalsoup/browser.py index <HASH>..<HASH> 100644 --- a/mechanicalsoup/browser.py +++ b/mechanicalsoup/browser.py @@ -48,13 +48,12 @@ class Browser: if not name: continue - if input.get("type") in ("radio", "checkbox") and "checked" not in input.attrs: + if input.get("type") in ("radio", "checkbox") \ + and "checked" not in input.attrs: continue if input.get("type") == "checkbox": - if not name in data: - data[name] = list() - data[name].append(value) + data.setdefault(name, []).append(value) elif input.get("type") == "file": # read http://www.cs.tut.fi/~jkorpela/forms/file.html
simplify some codes in _build_request
py
diff --git a/ariba/tests/samtools_variants_test.py b/ariba/tests/samtools_variants_test.py index <HASH>..<HASH> 100644 --- a/ariba/tests/samtools_variants_test.py +++ b/ariba/tests/samtools_variants_test.py @@ -55,7 +55,16 @@ class TestSamtoolsVariants(unittest.TestCase): got_set = set(got[7].split(';')) self.assertEqual(exp_set, got_set) - self.assertEqual(file2lines(expected_depths), file2lines(sv.read_depths_file)) + + # samtools-1.2 and 1.3 output not xonsistent in final column, so + # ignore those. + expected_lines = file2lines(expected_depths) + got_lines = file2lines(sv.read_depths_file) + self.assertEqual(len(expected_lines), len(got_lines)) + + for i in range(len(expected_lines)): + self.assertEqual(expected_lines[i].split('\t')[:-1], got_lines[i].split('\t')[:-1]) + os.unlink(sv.vcf_file) os.unlink(sv.read_depths_file) os.unlink(sv.read_depths_file + '.tbi')
samtools <I>/3 differ slightly. Ignore the offending column
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,4 @@ #!/usr/bin/env python -import os -import shutil -import sys from setuptools import setup, find_packages VERSION = '0.1.1'
Remove unused imports in setup.py
py
diff --git a/shapefile.py b/shapefile.py index <HASH>..<HASH> 100644 --- a/shapefile.py +++ b/shapefile.py @@ -15,7 +15,6 @@ import sys import time import array import tempfile -import itertools import io from datetime import date @@ -937,6 +936,9 @@ class Writer(object): numRecs = self.recNum numFields = len(self.fields) headerLength = numFields * 32 + 33 + if headerLength >= 65535: + raise ShapefileException( + "Shapefile dbf header length exceeds maximum length.") recordLength = sum([int(field[2]) for field in self.fields]) + 1 header = pack('<BBBBLHH20x', version, year, month, day, numRecs, headerLength, recordLength) @@ -1257,6 +1259,9 @@ class Writer(object): elif fieldType == "L": size = "1" decimal = 0 + if len(self.fields) >= 2046: + raise ShapefileException( + "Shapefile Writer reached maximum number of fields: 2046.") self.fields.append((name, fieldType, size, decimal)) def saveShp(self, target):
Raise exceptions if too many fields are added (max <I>)
py
diff --git a/astral/test/test_GoogleGeocoder.py b/astral/test/test_GoogleGeocoder.py index <HASH>..<HASH> 100644 --- a/astral/test/test_GoogleGeocoder.py +++ b/astral/test/test_GoogleGeocoder.py @@ -3,7 +3,7 @@ from astral import GoogleGeocoder def test_GoogleLocator(): locator = GoogleGeocoder() l = locator['Eiffel Tower'] - pass + assert l is not None if __name__ == '__main__': test_GoogleLocator()
Replaced pass statement with an assert (which would have exposed the bug in <I>)
py
diff --git a/ccmlib/dse_node.py b/ccmlib/dse_node.py index <HASH>..<HASH> 100644 --- a/ccmlib/dse_node.py +++ b/ccmlib/dse_node.py @@ -501,16 +501,16 @@ class DseNode(Node): agent_dir = os.path.join(self.get_path(), 'datastax-agent') if os.path.exists(agent_dir): pidfile = os.path.join(agent_dir, 'datastax-agent.pid') - if os.path.exists(pidfile): - with open(pidfile, 'r') as f: - pid = int(f.readline().strip()) - f.close() - if pid is not None: - try: - os.kill(pid, signal.SIGKILL) - except OSError: - pass - os.remove(pidfile) + if os.path.exists(pidfile): + with open(pidfile, 'r') as f: + pid = int(f.readline().strip()) + f.close() + if pid is not None: + try: + os.kill(pid, signal.SIGKILL) + except OSError: + pass + os.remove(pidfile) def _write_agent_address_yaml(self, agent_dir): address_yaml = os.path.join(agent_dir, 'conf', 'address.yaml')
Fix issue when DSE nodes are removed/cleaned and ds agent is not installed
py
diff --git a/django_cron/__init__.py b/django_cron/__init__.py index <HASH>..<HASH> 100644 --- a/django_cron/__init__.py +++ b/django_cron/__init__.py @@ -54,8 +54,9 @@ class CronJobManager(object): cron_log = CronJobLog(code=cron_job.code, start_time=datetime.now()) try: - cron_job.do() + msg = cron_job.do() cron_log.is_success = True + cron_log.message = msg or '' except Exception, e: cron_log.is_success = False cron_log.message = traceback.format_exc()[-1000:]
Add ability to attach message on success The message was only saved upon exception. But sometimes we want to report some additional info after running the cron job, like, how many records were processed or something.
py
diff --git a/pycdlib/rockridge.py b/pycdlib/rockridge.py index <HASH>..<HASH> 100644 --- a/pycdlib/rockridge.py +++ b/pycdlib/rockridge.py @@ -2445,7 +2445,6 @@ class RockRidge(RockRidgeBase): for rec in recs: # FIXME: this won't deal with components split across multiple # SL records properly. - # FIXME: this doesn't deal with root component entries properly. outlist.append(rec.name()) return b"/".join(outlist)
Remove an incorrect FIXME statement.
py
diff --git a/pystmark.py b/pystmark.py index <HASH>..<HASH> 100644 --- a/pystmark.py +++ b/pystmark.py @@ -152,7 +152,7 @@ def send_batch_with_templates(messages, :param test: Use the Postmark Test API. Defaults to `False`. :param request_args: Keyword arguments to pass to :func:`requests.request`. - :rtype: :class:`SendResponse` + :rtype: :class:`BatchSendResponse` """ return _default_pyst_batch_template_sender.send(messages=messages, api_key=api_key,
Fix wrong rtype in docs for send_batch_with_templates
py
diff --git a/anom/transaction.py b/anom/transaction.py index <HASH>..<HASH> 100644 --- a/anom/transaction.py +++ b/anom/transaction.py @@ -89,9 +89,8 @@ def transactional(*, adapter=None, retries=3, propagation=Transaction.Propagatio retries(int, optional): The number of times to retry the transaction if it couldn't be committed. propagation(Transaction.Propagation, optional): The propagation - strategy to use. By default, transactions are Transactions are - nested, but you can force certain transactions to always run - independently. + strategy to use. By default, transactions are nested, but you + can force certain transactions to always run independently. Raises: anom.RetriesExceeded: When the decorator runbs out of retries
doc: fix docstring for @transactional
py
diff --git a/khard/carddav_object.py b/khard/carddav_object.py index <HASH>..<HASH> 100644 --- a/khard/carddav_object.py +++ b/khard/carddav_object.py @@ -1121,7 +1121,14 @@ class CarddavObject(VCardWrapper): flags=re.IGNORECASE) return contents - def _process_user_input(self, input): + @staticmethod + def _parse_yaml(input): + """Parse a YAML document into a dictinary and validate the data to some + degree. + + :param str input: the YAML document to parse + :returns dict: the parsed datastructure + """ yaml_parser = YAML(typ='base') # parse user input string try: @@ -1130,7 +1137,7 @@ class CarddavObject(VCardWrapper): ruamel.yaml.scanner.ScannerError) as err: raise ValueError(err) else: - if contact_data is None: + if not contact_data: raise ValueError("Error: Found no contact information") # check for available data @@ -1140,7 +1147,10 @@ class CarddavObject(VCardWrapper): and not contact_data.get("Organisation"): raise ValueError( "Error: You must either enter a name or an organisation") + return contact_data + def _process_user_input(self, input): + contact_data = self._parse_yaml(input) # update rev self._update_revision()
Split yaml parsing method
py
diff --git a/assemblerflow/generator/inspect.py b/assemblerflow/generator/inspect.py index <HASH>..<HASH> 100644 --- a/assemblerflow/generator/inspect.py +++ b/assemblerflow/generator/inspect.py @@ -858,7 +858,6 @@ class NextflowInspector: curses.color_pair(colors[proc["barrier"]]) | txt_fmt) win.clrtoeol() - win.clearok(1) win.refresh(0, self.padding, 0, 0, height-1, width-1) ###################
Fixed inspect curses flickering issue
py
diff --git a/epson_printer/util.py b/epson_printer/util.py index <HASH>..<HASH> 100644 --- a/epson_printer/util.py +++ b/epson_printer/util.py @@ -11,13 +11,17 @@ class BitmapData: @classmethod def fromFileImage(cls, image_path): i = Image.open(image_path) - monochrome = i.convert('1') - data = monochrome.getdata() + (w, h) = i.size + if w > 512: + ratio = int(w / 512) + h = int(h / ratio) + i = i.resize((512, h), Image.ANTIALIAS) + i = i.convert('1') + data = i.getdata() pixels = [] for i in range(len(data)): if data[i] == 255: pixels.append(0) else: pixels.append(1) - (w, h) = monochrome.size return cls(pixels, w, h) \ No newline at end of file
resize image before sending it to the printer
py
diff --git a/precise/train.py b/precise/train.py index <HASH>..<HASH> 100755 --- a/precise/train.py +++ b/precise/train.py @@ -23,10 +23,17 @@ def main(): args = parser.parse_args() inputs, outputs = load_data(args.data_dir) - validation_data = load_data(args.data_dir + '/test') + val_in, val_out = load_data(args.data_dir + '/test') print('Inputs shape:', inputs.shape) print('Outputs shape:', outputs.shape) + print('Test inputs shape:', val_in.shape) + print('Test outputs shape:', val_out.shape) + + if (0 in inputs.shape or 0 in outputs.shape or + 0 in val_in.shape or 0 in val_out.shape): + print('Not enough data to train') + exit(1) model = create_model(args.model, args.load) @@ -37,7 +44,7 @@ def main(): checkpoint = ModelCheckpoint(args.model, monitor='val_acc', save_best_only=args.save_best, mode='max') try: - model.fit(inputs, outputs, 5000, args.epochs, validation_data=validation_data, callbacks=[checkpoint]) + model.fit(inputs, outputs, 5000, args.epochs, validation_data=(val_in, val_out), callbacks=[checkpoint]) except KeyboardInterrupt: print()
Make training fail gracefully if dimensions are invalid
py
diff --git a/tests/auth.py b/tests/auth.py index <HASH>..<HASH> 100644 --- a/tests/auth.py +++ b/tests/auth.py @@ -14,6 +14,15 @@ class MockUser(object): return isinstance(other, self.__class__) and other.name == self.name +class MockTemplateManager(object): + templates = { + 'login': 'please login', + } + def render_to_response(self, template, data): + return web.Response(self.templates[template]) + + + def get_user_identity(env, login, password): if password == '123': return 'user-identity' @@ -112,6 +121,7 @@ class SqlaModelAuthTests(unittest.TestCase): @web.handler def make_env(env, data, nxt): env.root = root + env.template = MockTemplateManager() env.db = db = session_maker('sqlite:///:memory:')() metadata.create_all(db.bind) user = User(login='user name', password=encrypt_password('123'))
fixed failing test of authentication Used mock object for template manager
py
diff --git a/pyana/examples/gp_stack.py b/pyana/examples/gp_stack.py index <HASH>..<HASH> 100644 --- a/pyana/examples/gp_stack.py +++ b/pyana/examples/gp_stack.py @@ -33,7 +33,7 @@ def gp_stack(version): file_url = os.path.join(inDir, file) data_import = np.loadtxt(open(file_url, 'rb')) # following scaling is wrong for y < 0 && shift != 1 - data_import[:, 1:] *= shift[energy] + data_import[:, (1,3,4)] *= shift[energy] if fnmatch(file, 'data*'): data[energy] = data_import elif energy == '19' and version == 'QM12': # cut of cocktail above 1.1 GeV/c^2
gp_stack: omit dx from scaling
py
diff --git a/ginga/misc/Task.py b/ginga/misc/Task.py index <HASH>..<HASH> 100644 --- a/ginga/misc/Task.py +++ b/ginga/misc/Task.py @@ -1106,6 +1106,7 @@ class ThreadPool(object): self.runningcount) self.regcond.wait() + self.status = 'up' self.logger.debug("startall done")
Added thread up status to fix blocking in terminal
py
diff --git a/pluggy/__init__.py b/pluggy/__init__.py index <HASH>..<HASH> 100644 --- a/pluggy/__init__.py +++ b/pluggy/__init__.py @@ -193,8 +193,7 @@ class PluginManager(object): You can register plugin objects (which contain hooks) by calling ``register(plugin)``. The Pluginmanager is initialized with a prefix that is searched for in the names of the dict of registered - plugin objects. An optional excludefunc allows to blacklist names which - are not considered as hooks despite a matching prefix. + plugin objects. For debugging purposes you can call ``enable_tracing()`` which will subsequently send debug information to the trace helper.
remove reference to missing 'excludefunc' arg in PluginManager docstring
py
diff --git a/doctr/local.py b/doctr/local.py index <HASH>..<HASH> 100644 --- a/doctr/local.py +++ b/doctr/local.py @@ -55,6 +55,8 @@ def encrypt_variable(variable, build_repo, *, public_key=None, is_private=False, headersv2 = {**_headers, **APIv2} headersv3 = {**_headers, **APIv3} if is_private: + print("I need to generate a temporary token with GitHub to authenticate with Travis.") + print("It will be deleted immediately. If you still see it after this at https://github.com/settings/tokens after please delete it manually.") # /auth/github doesn't seem to exist in the Travis API v3. tok_dict = generate_GitHub_token(scopes=["read:org", "user:email", "repo"], note="temporary token for doctr to auth against travis (delete me)",
Print a note about the personal access token
py
diff --git a/AegeanTools/wcs_helpers.py b/AegeanTools/wcs_helpers.py index <HASH>..<HASH> 100644 --- a/AegeanTools/wcs_helpers.py +++ b/AegeanTools/wcs_helpers.py @@ -613,10 +613,10 @@ class PSFHelper(WCSHelper): area : float The area of the beam in square pixels. """ - beam = self.get_pixbeam(ra, dec) - if beam is None: - return 0 - return beam.a * beam.b * np.pi + + parea = abs(self.wcshelper.pixscale[0] * self.wcshelper.pixscale[1]) # in deg**2 at reference coords + barea = self.get_beamarea_deg2(ra, dec) + return barea / parea def get_beamarea_deg2(self, ra, dec):
remove calculation of projected beam size - fixes larger-than-expected integrated flux densities
py
diff --git a/spyder/widgets/ipythonconsole/shell.py b/spyder/widgets/ipythonconsole/shell.py index <HASH>..<HASH> 100644 --- a/spyder/widgets/ipythonconsole/shell.py +++ b/spyder/widgets/ipythonconsole/shell.py @@ -82,8 +82,11 @@ class ShellWidget(NamepaceBrowserWidget, HelpWidget, DebuggingWidget): def set_cwd(self, dirname): """Set shell current working directory.""" - return self.silent_execute( - u"get_ipython().kernel.set_cwd(r'{}')".format(dirname)) + code = u"get_ipython().kernel.set_cwd(r'{}')".format(dirname) + if self._reading: + self.kernel_client.input(u'!' + code) + else: + self.silent_execute(code) # --- To handle the banner def long_banner(self):
IPython console: Make setting cwd work while debugging
py
diff --git a/galpy/orbit_src/planarOrbit.py b/galpy/orbit_src/planarOrbit.py index <HASH>..<HASH> 100644 --- a/galpy/orbit_src/planarOrbit.py +++ b/galpy/orbit_src/planarOrbit.py @@ -1,6 +1,7 @@ import numpy as nu from scipy import integrate from Orbit import Orbit +from RZOrbit import RZOrbit from galpy.potential_src.planarPotential import evaluateplanarRforces,\ planarPotential, RZToplanarPotential class planarOrbitTop(Orbit): @@ -39,6 +40,12 @@ class planarROrbit(planarOrbitTop): self.vxvv= vxvv return None + def __add__(self,linOrb): + """ + """ + return RZOrbit(vxvv=[self.vxvv[0],self.vxvv[1],self.vxvv[2], + linOrb.vxvv[0],linOrb.vxvv[1]]) + def integrate(self,t,pot): """ NAME: @@ -75,7 +82,6 @@ class planarOrbit(planarOrbitTop): raise ValueError("You only provided R,vR, & vT, but not phi; you probably want planarROrbit") self.vxvv= vxvv return None - def _integrateROrbit(vxvv,pot,t): """
create RZOrbits by adding a linearOrbit to a planarROrbit
py
diff --git a/pyxmpp/jabber/delay.py b/pyxmpp/jabber/delay.py index <HASH>..<HASH> 100644 --- a/pyxmpp/jabber/delay.py +++ b/pyxmpp/jabber/delay.py @@ -99,7 +99,7 @@ class Delay(StanzaPayloadObject): tm=time.strptime(stamp,"%Y%m%dT%H:%M:%S") tm=tm[0:8]+(0,) self.timestamp=datetime.datetime.fromtimestamp(time.mktime(tm)) - delay_from=xmlnode.prop("from") + delay_from=from_utf8(xmlnode.prop("from")) if delay_from: self.delay_from=JID(delay_from) else:
- convert 'from' from UTF-8 to Unicode before passing to JID constructor
py
diff --git a/skyfield/positionlib.py b/skyfield/positionlib.py index <HASH>..<HASH> 100644 --- a/skyfield/positionlib.py +++ b/skyfield/positionlib.py @@ -273,11 +273,8 @@ class Apparent(ICRS): p = einsum('ij...,j...->i...', self.jd.M, self.position.AU) - pz = dots(p, uz) - pn = dots(p, un) - pw = dots(p, uw) - - position_AU = array([pn, -pw, pz]) + u = array([un, -uw, uz]) + position_AU = einsum('ij...,j...->i...', u, p) r_AU, alt, az = to_polar(position_AU)
Combine unit vectors into array one step earlier
py
diff --git a/dmf_device_ui/plugin.py b/dmf_device_ui/plugin.py index <HASH>..<HASH> 100644 --- a/dmf_device_ui/plugin.py +++ b/dmf_device_ui/plugin.py @@ -132,16 +132,20 @@ class DevicePlugin(Plugin): data = decode_content_data(request) compare_fields = ['device', 'width', 'height', 'name', 'fourcc', 'framerate'] - for i, row in self.parent.video_mode_slave.configs.iterrows(): - if (data['video_config'][compare_fields] == - row[compare_fields]).all(): - break - else: + if data['video_config'] is None: i = None + else: + for i, row in self.parent.video_mode_slave.configs.iterrows(): + if (row[compare_fields] == + data['video_config'][compare_fields]).all(): + break + else: + i = None if i is None: logger.error('Unsupported video config:\n%s', data['video_config']) logger.error('Video configs:\n%s', self.parent.video_mode_slave.configs) + self.parent.video_mode_slave.config_combo.set_active(0) else: logger.error('Set video config (%d):\n%s', i + 1, data['video_config'])
[FIX] Allow 0MQ API to disable video Prior to this commit, passing `None` to the `set_video_config` 0MQ plugin API method would not disable video. As of this commit, this has been corrected.
py
diff --git a/mallory/version.py b/mallory/version.py index <HASH>..<HASH> 100644 --- a/mallory/version.py +++ b/mallory/version.py @@ -1 +1 @@ -Version = "0.1.0" +Version = "0.2.0"
bumps version to <I>
py
diff --git a/js_host/bin.py b/js_host/bin.py index <HASH>..<HASH> 100644 --- a/js_host/bin.py +++ b/js_host/bin.py @@ -67,11 +67,11 @@ def spawn_detached_manager(config_file, status=None): if not manager.is_running(): raise ProcessError('Started {}, but cannot connect'.format(manager.get_name())) - manager.connect() - if settings.VERBOSITY >= verbosity.PROCESS_START: print('Started {}'.format(manager.get_name())) + manager.connect() + return manager
Fixed an issue where manager logs were displayed out of order.
py
diff --git a/txtwitter/tests/test_fake_twitter.py b/txtwitter/tests/test_fake_twitter.py index <HASH>..<HASH> 100644 --- a/txtwitter/tests/test_fake_twitter.py +++ b/txtwitter/tests/test_fake_twitter.py @@ -26,10 +26,6 @@ class TestFakeStream(TestCase): self.assertFalse(stream.accepts('foo', {'bar': 'qux'})) self.assertFalse(stream.accepts('corge', {'grault': 'garply'})) - def _process_stream_response(self, resp, delegate): - protocol = FakeTwitterStreamProtocol(delegate) - resp.deliverBody(protocol) - def test_deliver(self): stream = self._FakeStream()
Remove unused _process_stream_response from FakeStream's test case
py
diff --git a/salt/utils/event.py b/salt/utils/event.py index <HASH>..<HASH> 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -188,7 +188,7 @@ class SaltEvent(object): opts['ipc_mode'] = 'tcp' self.puburi, self.pulluri = self.__load_uri(sock_dir, node) if listen: - self.subscribe() + self.connect_pub() self.pending_tags = [] self.pending_rtags = [] self.pending_events = []
connect_pub call replaces subscribe call
py