diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,24 @@ """ +============== PiPocketGeiger ------ +============== + Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi. -Links -````` -* `code and documentation <https://github.com/MonsieurV/PiPocketGeiger>`_ +Usage +===== +:: + + from PiPocketGeiger import RadiationWatch + import time + + with RadiationWatch(24, 23) as radiationWatch: + while 1: + print(radiationWatch.status()) + time.sleep(5) + + +See GitHub repository for complete documentation. """ import re import ast @@ -13,7 +26,7 @@ from setuptools import setup setup( name='PiPocketGeiger', - version=0.1, + version='0.1a', url='https://github.com/MonsieurV/PiPocketGeiger', license='MIT', author='Yoan Tournade',
Update pypi description and release new version
py
diff --git a/salt/state.py b/salt/state.py index <HASH>..<HASH> 100644 --- a/salt/state.py +++ b/salt/state.py @@ -576,7 +576,12 @@ class State(object): ''' Execute the aggregation systems to runtime modify the low chunk ''' - if self.functions['config.option']('mod_aggregate') and not low.get('__agg__'): + agg_opt = self.functions['config.option']('mod_aggregate') + if agg_opt is True: + agg_opt = [low['state']] + else: + return low + if low['state'] in agg_opt and not low.get('__agg__'): agg_fun = '{0}.mod_aggregate'.format(low['state']) if agg_fun in self.states: try:
Make mod_aggregate more flexible to configure
py
diff --git a/slither/tools/flattening/flattening.py b/slither/tools/flattening/flattening.py index <HASH>..<HASH> 100644 --- a/slither/tools/flattening/flattening.py +++ b/slither/tools/flattening/flattening.py @@ -151,12 +151,8 @@ class Flattening: if self._convert_library_to_internal and contract.is_library: for f in contract.functions_declared: visibility = "" - if f.visibility == "external": - visibility = f.visibility - elif f.visibility == "public": + if f.visibility in ["external", "public"]: visibility = f.visibility - - if visibility != "": attributes_start = ( f.parameters_src().source_mapping["start"] + f.parameters_src().source_mapping["length"]
Update slither/tools/flattening/flattening.py
py
diff --git a/tests/test_transports/test_periodic_transport.py b/tests/test_transports/test_periodic_transport.py index <HASH>..<HASH> 100644 --- a/tests/test_transports/test_periodic_transport.py +++ b/tests/test_transports/test_periodic_transport.py @@ -16,7 +16,7 @@ class DummyTransport(PeriodicTransportMixin): def __init__(self, delay=0): self.queue = [] self.delay = delay - self.config = ApiConfig(in_cluster=True, timeout=0.001, interval=0) + self.config = ApiConfig(in_cluster=True, timeout=0.0001, interval=0) self._periodic_exceptions = 0 self._periodic_done = 0 @@ -29,7 +29,7 @@ class ExceptionTransport(PeriodicTransportMixin): # pylint:disable=protected-access def __init__(self, delay=0): self.delay = delay - self.config = ApiConfig(in_cluster=True, timeout=0.001, interval=0) + self.config = ApiConfig(in_cluster=True, timeout=0.0001, interval=0) self._periodic_exceptions = 0 self._periodic_done = 0
Update timeout for threaded transport
py
diff --git a/buildbot_travis/travisyml.py b/buildbot_travis/travisyml.py index <HASH>..<HASH> 100644 --- a/buildbot_travis/travisyml.py +++ b/buildbot_travis/travisyml.py @@ -22,7 +22,10 @@ class TravisYml(object): self.branch_blacklist = None def parse(self, config_input): - self.config = safe_load(config_input) + self.parse_dict(safe_load(config_input)) + + def parse_dict(self, config): + self.config = config self.parse_language() self.parse_envs() self.parse_hooks()
Allow recreation of a TravisYml object from JSON data...
py
diff --git a/daemons/startstop/simple.py b/daemons/startstop/simple.py index <HASH>..<HASH> 100644 --- a/daemons/startstop/simple.py +++ b/daemons/startstop/simple.py @@ -16,6 +16,11 @@ from ..interfaces import startstop LOG = logging.getLogger(__name__) +SUCCESS_MSG = "an integer is required" + +if sys.version_info >= (3, 10, 0): + SUCCESS_MSG = "object cannot be interpreted as an integer" + class SimpleStartStopManager(startstop.StartStopManager): @@ -78,7 +83,7 @@ class SimpleStartStopManager(startstop.StartStopManager): except TypeError as err: - if "an integer is required" in str(err): + if SUCCESS_MSG in str(err): LOG.info("Succesfully stopped the process.") return None
Python <I>'s os.kill() throws a TypeError with a different message than older Python versions
py
diff --git a/cwltool/update.py b/cwltool/update.py index <HASH>..<HASH> 100644 --- a/cwltool/update.py +++ b/cwltool/update.py @@ -17,6 +17,15 @@ def findId(doc, frg): return f return None +def fixType(doc): + if isinstance(doc, list): + return [fixType(f) for f in doc] + + if isinstance(doc, basestring): + if doc not in ("null", "boolean", "int", "long", "float", "double", "string", "File", "record", "enum", "array", "Any"): + return "#" + doc + return doc + def fixImport(doc, loader, baseuri): if isinstance(doc, dict): if "import" in doc: @@ -34,6 +43,10 @@ def fixImport(doc, loader, baseuri): if "include" in doc: return loader.fetch_text(urlparse.urljoin(baseuri, doc["include"])) + for t in ("type", "items"): + if t in doc: + doc[t] = fixType(doc[t]) + for a in doc: doc[a] = fixImport(doc[a], loader, baseuri)
Tests pass for cwltool on both draft-2 and draft-3
py
diff --git a/distributions.py b/distributions.py index <HASH>..<HASH> 100644 --- a/distributions.py +++ b/distributions.py @@ -530,6 +530,8 @@ class DiagonalGaussian(GibbsSampling): betas_n = betas_0 + 1/2*sumsq + n*nus_0/(n+nus_0) * 1/2*(xbar - mu_0)**2 mu_n = (n*xbar + nus_0*mu_0)/(n+nus_0) + assert alphas_n.ndim == betas_n.ndim == 1 + return mu_n, nus_n, alphas_n, betas_n else: return mu_0, nus_0, alphas_0, betas_0 @@ -550,10 +552,10 @@ class DiagonalGaussian(GibbsSampling): data = np.reshape(data,(-1,D)) xbar = data.mean(0) centered = data - xbar - sumsq = np.diag(np.dot(centered.T,centered)) + sumsq = np.dot(centered.T,centered) else: xbar = sum(np.reshape(d,(-1,D)).sum(0) for d in data) / n - sumsq = np.diag(sum(((np.reshape(d,(-1,D)) - xbar)**2).sum(0) for d in data)) + sumsq = sum(((np.reshape(d,(-1,D)) - xbar)**2).sum(0) for d in data) else: xbar, sumsq = None, None return n, xbar, sumsq
removed np.diag calls in DiagonalGaussian sumsq (bug fix)
py
diff --git a/keymaker/__init__.py b/keymaker/__init__.py index <HASH>..<HASH> 100644 --- a/keymaker/__init__.py +++ b/keymaker/__init__.py @@ -190,6 +190,7 @@ def get_uid(args): else: iam_resource = boto3.resource("iam") + args.user += config.get('keymaker_linux_user_suffix', default_iam_linux_user_suffix) try: user_id = iam_resource.User(args.user).user_id uid = aws_to_unix_id(user_id)
Fixup for get_uid with user suffix
py
diff --git a/vyper/builtin_interfaces/ERC20Detailed.py b/vyper/builtin_interfaces/ERC20Detailed.py index <HASH>..<HASH> 100644 --- a/vyper/builtin_interfaces/ERC20Detailed.py +++ b/vyper/builtin_interfaces/ERC20Detailed.py @@ -5,14 +5,17 @@ NOTE: interface uses `String[1]` where 1 is the lower bound of the string return """ interface_code = """ +@view @external def name() -> String[1]: pass +@view @external def symbol() -> String[1]: pass +@view @external def decimals() -> uint8: pass
fix: add view modifier to ERC<I>Detailed signatures (#<I>)
py
diff --git a/bcbio/log/__init__.py b/bcbio/log/__init__.py index <HASH>..<HASH> 100644 --- a/bcbio/log/__init__.py +++ b/bcbio/log/__init__.py @@ -102,8 +102,9 @@ def create_base_logger(config=None, parallel=None): from bcbio.log import logbook_zmqpush ips = [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")] - ips += [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close())[1] for s in - [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]] + if not ips: + ips += [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close())[1] for s in + [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]] if not ips: sys.stderr.write("Cannot resolve a local IP address that isn't 127.x.x.x " "Your machines might not have a local IP address "
Logging: limit IP address socket connections Use socket connections for finding logging IP address as a backup only when gethostbyname does not resolve a valid non-local IP. Avoids issues on internet restricted systems where we can't access the network.
py
diff --git a/src/python/turicreate/test/test_object_detector.py b/src/python/turicreate/test/test_object_detector.py index <HASH>..<HASH> 100644 --- a/src/python/turicreate/test/test_object_detector.py +++ b/src/python/turicreate/test/test_object_detector.py @@ -323,12 +323,11 @@ class ObjectDetectorTest(unittest.TestCase): def test_different_grip_shape(self): # Should able to give different input grip shape - shapes = [[1, 1], [5, 5], [13, 13], [26, 26]] + shapes = [[1,1], [5,5], [26,26], [10,20], [7, 19] ] for shape in shapes: model = tc.object_detector.create( - self.sf, max_iterations=1, grid_shape=shape - ) - pred = model.predict(self.sf) + self.sf[:2], max_iterations=1, grid_shape=shape) + pred = model.predict(self.sf[:2]) def test_predict(self): sf = self.sf.head()
Changed test to be far less intensive. (#<I>)
py
diff --git a/salt/returners/local_cache.py b/salt/returners/local_cache.py index <HASH>..<HASH> 100644 --- a/salt/returners/local_cache.py +++ b/salt/returners/local_cache.py @@ -294,9 +294,11 @@ def get_load(jid): if not os.path.exists(jid_dir) or not os.path.exists(load_fn): return {} serial = salt.payload.Serial(__opts__) + ret = {} with salt.utils.files.fopen(os.path.join(jid_dir, LOAD_P), 'rb') as rfh: ret = serial.load(rfh) - + if ret is None: + ret = {} minions_cache = [os.path.join(jid_dir, MINIONS_P)] minions_cache.extend( glob.glob(os.path.join(jid_dir, SYNDIC_MINIONS_P.format('*')))
try to ensure that the ret is never None
py
diff --git a/tornado/httpclient.py b/tornado/httpclient.py index <HASH>..<HASH> 100644 --- a/tornado/httpclient.py +++ b/tornado/httpclient.py @@ -543,6 +543,8 @@ def _curl_setup_request(curl, request, buffer, headers): credentials = '%s:%s' % (request.proxy_username, request.proxy_password) curl.setopt(pycurl.PROXYUSERPWD, credentials) + else: + curl.setopt(pycurl.PROXY, '') # Set the request method through curl's retarded interface which makes # up names for almost every single method
Set proxy to '' if no proxy is explicitly set in the request. If this is not done, curls with proxies set are re-inserted into the empty list, and are then reused. This causes connections to be randomly proxied, if some requests use a proxy, and others don't.
py
diff --git a/tests/test_optimize.py b/tests/test_optimize.py index <HASH>..<HASH> 100644 --- a/tests/test_optimize.py +++ b/tests/test_optimize.py @@ -18,8 +18,13 @@ from pathlib import Path import pytest +import logging + +from PIL import Image from ocrmypdf import optimize as opt +from ocrmypdf.exec.ghostscript import rasterize_pdf +from ocrmypdf.helpers import fspath @pytest.mark.parametrize('pdf', ['multipage.pdf', 'palette.pdf']) @@ -28,3 +33,17 @@ def test_basic(resources, pdf, outpdf): opt.main(infile, outpdf, level=3) assert Path(outpdf).stat().st_size <= Path(infile).stat().st_size + + +def test_mono_not_inverted(resources, outdir): + infile = resources / '2400dpi.pdf' + opt.main(infile, outdir / 'out.pdf', level=3) + + rasterize_pdf( + outdir / 'out.pdf', outdir / 'im.png', + xres=10, yres=10, raster_device='pnggray', + log=logging.getLogger(name='test_mono_flip') + ) + + im = Image.open(fspath(outdir / 'im.png')) + assert im.getpixel((0, 0)) == 255, "Expected white background"
Add test case to ensure mono is not inverted
py
diff --git a/cirq-core/cirq/ops/fsim_gate.py b/cirq-core/cirq/ops/fsim_gate.py index <HASH>..<HASH> 100644 --- a/cirq-core/cirq/ops/fsim_gate.py +++ b/cirq-core/cirq/ops/fsim_gate.py @@ -88,7 +88,7 @@ class FSimGate(gate_features.InterchangeableQubitsGate, raw_types.Gate): iSWAP gate. Maximum strength (full iswap) is at pi/2. phi: Controlled phase angle, in radians. Determines how much the ``|11⟩`` state is phased. Note: uses opposite sign convention to - the CZPowGate. Maximum strength (full cz) is at pi/2. + the CZPowGate. Maximum strength (full cz) is at pi. """ self.theta = _canonicalize(theta) self.phi = _canonicalize(phi)
Fix docstring for fsim gate (#<I>)
py
diff --git a/centinel/primitives/traceroute.py b/centinel/primitives/traceroute.py index <HASH>..<HASH> 100644 --- a/centinel/primitives/traceroute.py +++ b/centinel/primitives/traceroute.py @@ -12,7 +12,7 @@ import time from centinel import command -def traceroute(url, method="icmp", cmd_arguments=[]): +def traceroute(url, method="udp", cmd_arguments=[]): """This function uses centinel.command to issue a traceroute command, wait for it to finish execution and parse the results out to a dictionary.
traceroute should default to UDP (no privilege needed for UDP)
py
diff --git a/paypal/pro/creditcard.py b/paypal/pro/creditcard.py index <HASH>..<HASH> 100644 --- a/paypal/pro/creditcard.py +++ b/paypal/pro/creditcard.py @@ -50,6 +50,9 @@ class CreditCard(object): def is_test(self): """Returns True if number is a test card number.""" + # Note: test numbers cannot be used in the PP Pro sandbox. + # Instead, use the credit card number associated with a + # sandbox account (Test Accounts -> View Details). return self.number in TEST_NUMBERS def get_type(self): @@ -63,4 +66,4 @@ class CreditCard(object): """Returns the card type if valid else None.""" if self.is_number() and not self.is_test() and self.is_mod10(): return self.get_type() - return None \ No newline at end of file + return None
Adding note about test credit cards in PP Pro sandbox.
py
diff --git a/i3pystatus/bitcoin.py b/i3pystatus/bitcoin.py index <HASH>..<HASH> 100644 --- a/i3pystatus/bitcoin.py +++ b/i3pystatus/bitcoin.py @@ -87,7 +87,7 @@ class Bitcoin(IntervalModule): if self._price_prev and fdict["last_price"] > self._price_prev: color = self.color_up fdict["status"] = self.status["price_up"] - elif self._price_prev and fdict["last_price"] > self._price_prev: + elif self._price_prev and fdict["last_price"] < self._price_prev: color = self.color_down fdict["status"] = self.status["price_down"] else:
Fix operator typo in price-comparison
py
diff --git a/ipyvolume/test_all.py b/ipyvolume/test_all.py index <HASH>..<HASH> 100644 --- a/ipyvolume/test_all.py +++ b/ipyvolume/test_all.py @@ -377,9 +377,9 @@ def test_embed(): p3.clear() x, y, z = np.random.random((3, 100)) p3.scatter(x, y, z) - p3.save("tmp/ipyolume_scatter_online.html", offline=False) + p3.save("tmp/ipyolume_scatter_online.html", offline=False, devmode=True) assert os.path.getsize("tmp/ipyolume_scatter_online.html") > 0 - p3.save("tmp/ipyolume_scatter_offline.html", offline=True, scripts_path='js/subdir') + p3.save("tmp/ipyolume_scatter_offline.html", offline=True, scripts_path='js/subdir', devmode=True) assert os.path.getsize("tmp/ipyolume_scatter_offline.html") > 0
test embedding only with devmode (since unreleased dev version are not on unpkg)
py
diff --git a/hvac/adapters.py b/hvac/adapters.py index <HASH>..<HASH> 100644 --- a/hvac/adapters.py +++ b/hvac/adapters.py @@ -9,7 +9,7 @@ import requests import requests.exceptions from hvac import utils -from hvac.constants.client import * +from hvac.constants.client import DEFAULT_BASE_URI class Adapter(object): @@ -17,11 +17,8 @@ class Adapter(object): __metaclass__ = ABCMeta - def __init__(self, base_uri=DEFAULT_BASE_URI, token=None, - cert='\n'.join([VAULT_CLIENT_CERT, VAULT_CLIENT_KEY]) if VAULT_CLIENT_CERT else None, - verify=VAULT_CACERT if VAULT_CACERT else VAULT_CAPATH if VAULT_CAPATH else True, timeout=30, - proxies=None, allow_redirects=True, session=None, namespace=None, ignore_exceptions=False, - strict_http=False): + def __init__(self, base_uri=DEFAULT_BASE_URI, token=None, cert=None, verify=True, timeout=30, proxies=None, + allow_redirects=True, session=None, namespace=None, ignore_exceptions=False, strict_http=False): """Create a new request adapter instance. :param base_uri: Base URL for the Vault instance being addressed.
Move new cert-related logic out of adapters / constants
py
diff --git a/extension/setup.py b/extension/setup.py index <HASH>..<HASH> 100755 --- a/extension/setup.py +++ b/extension/setup.py @@ -57,7 +57,7 @@ setup(name='genepattern-notebook', 'Framework :: IPython', ], install_requires=[ - 'genepattern-python>=1.2.1', + 'genepattern-python>=1.2.2', 'jupyter', 'notebook>=4.2.0', 'ipywidgets>=5.0.0',
Require latest (unreleased) version of genepattern-python for ODF DataFrame support
py
diff --git a/gpapi/googleplay.py b/gpapi/googleplay.py index <HASH>..<HASH> 100644 --- a/gpapi/googleplay.py +++ b/gpapi/googleplay.py @@ -335,7 +335,8 @@ class GooglePlayAPI(object): url = self.FDFE + path response = requests.get(url, headers=headers, verify=ssl_verify, - timeout=60) + timeout=60, + proxies=self.proxies_config) message = googleplay_pb2.UserProfileResponseWrapper.FromString(response.content) if message.commands.displayErrorMessage != "":
Added proxy config to userProfile request
py
diff --git a/measurement/measures/time.py b/measurement/measures/time.py index <HASH>..<HASH> 100644 --- a/measurement/measures/time.py +++ b/measurement/measures/time.py @@ -14,7 +14,7 @@ class Time(AbstractMeasure): functionality for handling intervals of time than this class provides. """ - second = Unit("1", ["s", "sec", "seconds"]) + second = MetricUnit("1", ["s", "sec", "seconds"], ["s"], ["sec"]) minute = Unit("60", ["min", "minutes"]) hour = Unit("3600", ["hr", "h", "hours"]) day = Unit("86400", ["d", "days"])
Update Time to modify Second as a Metric Unit Seconds are an SI unit, so they should allow metric prefixes.
py
diff --git a/indra/statements.py b/indra/statements.py index <HASH>..<HASH> 100644 --- a/indra/statements.py +++ b/indra/statements.py @@ -1079,9 +1079,16 @@ class Statement(object): self.belief = 1 self.uuid = '%s' % uuid.uuid4() + def matches_key(self): + raise NotImplementedError("Method must be implemented in child class.") + def matches(self, other): return self.matches_key() == other.matches_key() + def get_hash(self): + ev_matches_key_list = sorted([ev.matches_key() for ev in self.evidence]) + return hash(self.matches_key() + str(ev_matches_key_list)) + def agent_list_with_bound_condition_agents(self): # Returns the list of agents both directly participating in the # statement and referenced through bound conditions.
Add method to get a hash for statements.
py
diff --git a/spyder/plugins/editor/plugin.py b/spyder/plugins/editor/plugin.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/editor/plugin.py +++ b/spyder/plugins/editor/plugin.py @@ -3471,6 +3471,12 @@ class Editor(SpyderPluginWidget, SpyderConfigurationObserver): self.editorwindows_to_be_created.append( layout_settings) self.set_last_focused_editorstack(self, self.editorstacks[0]) + + # This is necessary to update the statusbar widgets after files + # have been loaded. + editorstack = self.get_current_editorstack() + if editorstack: + self.get_current_editorstack().refresh() else: self.__load_temp_file() self.set_create_new_file_if_empty(True)
Editor: Refresh editorstack after files are opened from session or project
py
diff --git a/pySIR/pySIR.py b/pySIR/pySIR.py index <HASH>..<HASH> 100644 --- a/pySIR/pySIR.py +++ b/pySIR/pySIR.py @@ -77,9 +77,9 @@ class pySIR: r = self._make_call(endpoint, 'GET', params) return r - def put_variables_by_category_and_name(self, q_category, q_name, **params): + def put_variables_by_category_and_name(self, category, name, **params): # r = sir.put_variables_by_category_and_name(development, whatever, content='231', extra_vars='{"asd": "qwe" , "zxc":"poi"}') - endpoint = "/api/v1.0/variables/categories/{}/{}".format(q_category, q_name) + endpoint = "/api/v1.0/variables/categories/{}/{}".format(category, name) r = self._make_call(endpoint, 'PUT', params) return r
variables neeeded for put_variables_by_category_and_name have been changed for consistency
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ with codecs.open(readme_md, encoding='utf-8') as f: long_description = f.read() -version = '0.2.8' +version = '0.2.9' class TestCommand(TestClass):
Increase version package to <I>
py
diff --git a/pebble/pool/channel.py b/pebble/pool/channel.py index <HASH>..<HASH> 100644 --- a/pebble/pool/channel.py +++ b/pebble/pool/channel.py @@ -185,4 +185,4 @@ class ChannelMutex: LOCK_TIMEOUT = 60 -READ_ONLY_EVENTMASK = select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR \ No newline at end of file +READ_ONLY_EVENTMASK = select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR
channel.py new line at end of file
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,9 +18,9 @@ setup( ], install_requires=[ "astor", - "coreir==2.0.*", + "coreir==2.0.6", "cosa", - "hwtypes==1.0.3" + "hwtypes==1.0.*" ], license='BSD License', url='https://github.com/leonardt/fault',
Try pinning coreir version
py
diff --git a/datacats/environment.py b/datacats/environment.py index <HASH>..<HASH> 100644 --- a/datacats/environment.py +++ b/datacats/environment.py @@ -74,7 +74,10 @@ class Environment(object): if not self.children: # get a list of all of the subdirectories. We'll call this the list # of children. - self.children = listdir(join(self.datadir, 'children')) + try: + self.children = listdir(join(self.datadir, 'children')) + except OSError: + self.children = [] return self.children @@ -306,12 +309,7 @@ class Environment(object): if not used_path: environment._update_saved_project_dir() - try: - environment._load_children() - except OSError: - # We ignore this because this means that the datadir hasn't been - # made yet (i.e. we're using init). - pass + environment._load_children() return environment
Make _load_children more resiliant to the fact that the datadir may not exist or may be corrupted
py
diff --git a/src/flapjack/encoders.py b/src/flapjack/encoders.py index <HASH>..<HASH> 100644 --- a/src/flapjack/encoders.py +++ b/src/flapjack/encoders.py @@ -47,6 +47,14 @@ class Json(transcoders.Json, Encoder): @classmethod def encode(cls, obj=None): + + # test if file + try: +# data = obj.read() + obj = base64.b64encode(obj.read()) + except: + pass + # Is this not a dictionary or an array? if not isinstance(obj, dict) and not isinstance(obj, list): # We need this to be at least a list for valid JSON
the JSON encoder no longer chokes on binary files. base<I> is used.
py
diff --git a/skpy/util.py b/skpy/util.py index <HASH>..<HASH> 100644 --- a/skpy/util.py +++ b/skpy/util.py @@ -18,7 +18,7 @@ class SkypeUtils: Raw object containing emoticons and packs. """ - Status = SkypeEnum("SkypeUtils.Status", ("Offline", "Hidden", "Busy", "Idle", "Online")) + Status = SkypeEnum("SkypeUtils.Status", ("Offline", "Hidden", "Busy", "Away", "Idle", "Online")) """ :class:`.SkypeEnum`: Types of user availability. @@ -29,6 +29,8 @@ class SkypeUtils: User is pretending to be offline. Shows as hidden to the current user, offline to anyone else. Status.Busy: User wishes not to be disturbed. Disables notifications on some clients (e.g. on the desktop). + Status.Away: + User has explicitly marked themselves as away. Alternatively, this may just be an alias for idle. Status.Idle: User is online but not active. Messages will likely be delivered as normal, though may not be read. Status.Online:
Add Away as a known status, independent of Idle As mentioned in comments of #<I>.
py
diff --git a/notedown/notedown.py b/notedown/notedown.py index <HASH>..<HASH> 100644 --- a/notedown/notedown.py +++ b/notedown/notedown.py @@ -402,7 +402,7 @@ class MarkdownWriter(NotebookWriter): """Turn the attribute dict into an attribute string for the code block. """ - if self.strip_outputs: + if self.strip_outputs or not hasattr(cell, 'prompt_number'): return 'python' else: attrlist = ['.python', '.input', 'n={}'.format(cell.prompt_number)]
convert un-executed notebooks
py
diff --git a/jsonrpcserver/dispatcher.py b/jsonrpcserver/dispatcher.py index <HASH>..<HASH> 100644 --- a/jsonrpcserver/dispatcher.py +++ b/jsonrpcserver/dispatcher.py @@ -108,6 +108,10 @@ def dispatch(handler): }) return flask.jsonify(response) else: + response_log.info('', extra={ + 'http_code': 200, + 'http_reason': 'OK', + }) return flask.Response('') # Catch any raised exception (invalid request etc), add the request id
Log responses even if there's no message body.
py
diff --git a/src/_pytest/assertion/rewrite.py b/src/_pytest/assertion/rewrite.py index <HASH>..<HASH> 100644 --- a/src/_pytest/assertion/rewrite.py +++ b/src/_pytest/assertion/rewrite.py @@ -143,10 +143,12 @@ class AssertionRewritingHook(importlib.abc.MetaPathFinder): exec(co, module.__dict__) def _early_rewrite_bailout(self, name, state): - """This is a fast way to get out of rewriting modules. Profiling has - shown that the call to PathFinder.find_spec (inside of the find_spec - from this class) is a major slowdown, so, this method tries to - filter what we're sure won't be rewritten before getting to it. + """This is a fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. """ if self.session is not None and not self._session_paths_checked: self._session_paths_checked = True
minor: split doc with _early_rewrite_bailout
py
diff --git a/pcef/core/system.py b/pcef/core/system.py index <HASH>..<HASH> 100644 --- a/pcef/core/system.py +++ b/pcef/core/system.py @@ -166,30 +166,6 @@ class JobThread(QtCore.QThread): else: raise Exception("Executing not callable statement") - @property - def jobResults(self): - return self.__jobResults - - @jobResults.setter - def jobResults(self, value): - self.__jobResults = value - - @jobResults.deleter - def jobResults(self, value): - self.__jobResults = value - - -def singleton(class_): - ''' Class implemented to have a unique instance of a JobRunner. - TODO: test in Python 2.X ''' - instances = {} - def getinstance(*args, **kwargs): - if class_ not in instances: - instances[class_] = class_(*args, **kwargs) - instances[class_].__instances = instances - return instances[class_] - return getinstance - class JobRunner: '''Class JobRunner, created to do a job and stop at anytime.
JobRunner: remove singleton and jobResults (not needed)
py
diff --git a/pycbc/events/events.py b/pycbc/events/events.py index <HASH>..<HASH> 100644 --- a/pycbc/events/events.py +++ b/pycbc/events/events.py @@ -244,7 +244,7 @@ class EventManager(object): self.event_dtype.append( (column, coltype) ) self.events = numpy.array([], dtype=self.event_dtype) - self.accumulate = [] + self.accumulate = [self.events] self.template_params = [] self.template_index = -1 self.template_events = numpy.array([], dtype=self.event_dtype)
prime the pump (#<I>)
py
diff --git a/tcex/testing/test_case.py b/tcex/testing/test_case.py index <HASH>..<HASH> 100644 --- a/tcex/testing/test_case.py +++ b/tcex/testing/test_case.py @@ -281,9 +281,6 @@ class TestCase: def input_params(self): """Return install.json params in a dict with name param as key. - Args: - ij (dict, optional): Defaults to None. The install.json contents. - Returns: dict: A dictionary containing the install.json input params with name as key. """ @@ -490,7 +487,11 @@ class TestCase: def teardown_method(self): """Run after each test method runs.""" - if self.enable_update_profile: + if self.enable_update_profile and self.install_json.get('runtimeLevel').lower() not in [ + 'triggerservice', + 'webhooktriggerservice', + ]: + # exit message can not be validated for a Service App self.populate_exit_message() self.log_data('teardown method', 'finished', datetime.now().isoformat()) self.log_data('teardown method', 'elapsed', time.time() - self._timer_class_start)
+ removed populate exit message for service apps.
py
diff --git a/stacker/actions/build.py b/stacker/actions/build.py index <HASH>..<HASH> 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -27,8 +27,8 @@ def should_update(stack): """ if stack.locked: if not stack.force: - logger.info("Stack %s locked and not in --force list. " - "Refusing to update.", stack.name) + logger.debug("Stack %s locked and not in --force list. " + "Refusing to update.", stack.name) return False else: logger.debug("Stack %s locked, but is in --force "
Make database locked output only debug This quiets regular output down to only showing the things that are going to be updated. After the first run we dump the plan, so you can see that things have been skipped because they are locked.
py
diff --git a/kafka_consumer/check.py b/kafka_consumer/check.py index <HASH>..<HASH> 100644 --- a/kafka_consumer/check.py +++ b/kafka_consumer/check.py @@ -107,7 +107,7 @@ class KafkaCheck(AgentCheck): for consumer_group, topics in consumer_groups.iteritems(): if topics is None: # If topics are't specified, fetch them from ZK - zk_path_topics = zk_path_topic_tmpl.format(consumer_group) + zk_path_topics = zk_path_topic_tmpl.format(consumer_group=consumer_group) topics = {topic: None for topic in self._get_zk_path_children(zk_conn, zk_path_topics, 'topics')}
[kafka_consumer] format missing consumer_group key.
py
diff --git a/afkak/brokerclient.py b/afkak/brokerclient.py index <HASH>..<HASH> 100644 --- a/afkak/brokerclient.py +++ b/afkak/brokerclient.py @@ -65,6 +65,9 @@ class KafkaBrokerClient(ReconnectingClientFactory): # What class protocol instances do we produce? protocol = KafkaProtocol + # Reduce log spam from twisted + noisy = False + def __init__(self, host, port=DefaultKafkaPort, clientId=CLIENT_ID, subscribers=None, maxDelay=MAX_RECONNECT_DELAY_SECONDS, maxRetries=None, @@ -232,7 +235,7 @@ class KafkaBrokerClient(ReconnectingClientFactory): log.debug('%r: Connection Closed:%r:%r', self, connector, reason) notifyReason = None # Not a failure else: - log.error('%r: clientConnectionLost:%r:%r', self, connector, + log.debug('%r: clientConnectionLost:%r:%r', self, connector, reason) notifyReason = reason
Fix for BPSO-<I> - Afkak periodically logs ERROR on disconnect Kafka brokers will disconnect idle clients after <I> minutes of idleness, so we should not log clean disconnects at any higher than DEBUG level.
py
diff --git a/python/flatbuffers/builder.py b/python/flatbuffers/builder.py index <HASH>..<HASH> 100644 --- a/python/flatbuffers/builder.py +++ b/python/flatbuffers/builder.py @@ -93,7 +93,7 @@ class Builder(object): The internal buffer is grown as needed. """ - if not (0 <= initialSize < (2**UOffsetTFlags.bytewidth - 1)): + if not (0 <= initialSize <= self.MaxBufferSize()): msg = "flatbuffers: Cannot create Builder larger than 2 gigabytes." raise BuilderSizeError(msg) @@ -104,6 +104,12 @@ class Builder(object): self.objectEnd = None self.vtables = [] + def MaxBufferSize(self): + """ + Maximum buffer size is 2Gb. + """ + return 2**31 + def Output(self): """ Output returns the portion of the buffer that has been used for @@ -238,7 +244,7 @@ class Builder(object): def growByteBuffer(self): """Doubles the size of the byteslice, and copies the old data towards the end of the new buffer (since we build the buffer backwards).""" - if not len(self.Bytes) <= 2**20: + if not len(self.Bytes) <= self.MaxBufferSize(): msg = "flatbuffers: cannot grow buffer beyond 2 gigabytes" raise BuilderSizeError(msg)
2Gb buffer size checks fixed for Python Builder
py
diff --git a/gns3server/compute/builtin/nodes/cloud.py b/gns3server/compute/builtin/nodes/cloud.py index <HASH>..<HASH> 100644 --- a/gns3server/compute/builtin/nodes/cloud.py +++ b/gns3server/compute/builtin/nodes/cloud.py @@ -162,9 +162,6 @@ class Cloud(BaseNode): rport=nio.rport)) if port_info["type"] in ("ethernet", "tap"): - network_interfaces = [interface["name"] for interface in interfaces()] - if not port_info["interface"] in network_interfaces: - raise NodeError("Interface '{}' could not be found on this system".format(port_info["interface"])) if sys.platform.startswith("win"): windows_interfaces = interfaces() @@ -181,6 +178,10 @@ class Cloud(BaseNode): else: if port_info["type"] == "ethernet": + network_interfaces = [interface["name"] for interface in interfaces()] + if not port_info["interface"] in network_interfaces: + raise NodeError("Interface '{}' could not be found on this system".format(port_info["interface"])) + if sys.platform.startswith("linux"): # use raw sockets on Linux yield from self._ubridge_send('bridge add_nio_linux_raw {name} "{interface}"'.format(name=bridge_name,
Allows the creation of non-existent TAP interfaces when using the cloud.
py
diff --git a/term2048/game.py b/term2048/game.py index <HASH>..<HASH> 100644 --- a/term2048/game.py +++ b/term2048/game.py @@ -6,19 +6,11 @@ import os.path import math from colorama import init, Fore, Style +init(autoreset=True) from term2048 import keypress from term2048.board import Board -# PY3 compat -try: - xrange -except NameError: - xrange = range - - -init(autoreset=True) - class Game(object): """ @@ -199,7 +191,7 @@ class Game(object): return a string representation of the current board. """ b = self.board - rg = xrange(b.size()) + rg = range(b.size()) left = ' '*margins.get('left', 0) s = '\n'.join( [left + ' '.join([self.getCellStr(x, y) for x in rg]) for y in rg])
using 'range' instead of 'xrange' in game.py Since we're using it only once and on a small range, it's cleaner to do that than keeping the try/except hack to use the good function depending of Python version and testing it.
py
diff --git a/woven/linux.py b/woven/linux.py index <HASH>..<HASH> 100644 --- a/woven/linux.py +++ b/woven/linux.py @@ -543,10 +543,11 @@ def upload_etc(): context = {'host_ip':socket.gethostbyname(env.host)} for t in etc_templates: dest = t.replace('woven','',1) - directory = os.path.split(dest)[0] + directory,filename = os.path.split(dest) if directory in ['/etc','/etc/init.d','/etc/init','/etc/logrotate.d','/etc/rsyslog.d']: #must be replacing an existing file - if not exists(dest): continue + package_name = filename.split('.')[0] + if not exists(dest) and package_name not in env.packages: continue elif not exists(directory, use_sudo=True): continue uploaded = upload_template(t,dest,context=context,use_sudo=True, modified_only=True) if uploaded:
make deploying etc templates a little more fine grained
py
diff --git a/djstripe/models/base.py b/djstripe/models/base.py index <HASH>..<HASH> 100644 --- a/djstripe/models/base.py +++ b/djstripe/models/base.py @@ -263,7 +263,7 @@ class StripeModel(StripeBaseModel): return data @classmethod - def _find_owner_account(cls, data): + def _find_owner_account(cls, data, api_key=djstripe_settings.STRIPE_SECRET_KEY): """ Fetches the Stripe Account (djstripe_owner_account model field) linked to the class, cls. @@ -272,13 +272,13 @@ class StripeModel(StripeBaseModel): """ from .account import Account + # try to fetch by stripe_account. Also takes care of Stripe Connected Accounts stripe_account = cls._id_from_data(data.get("account")) if stripe_account: return Account._get_or_retrieve(id=stripe_account) - api_key = data.get("api_key", "") - if api_key: - return Account.get_or_retrieve_for_api_key(api_key) + # try to fetch by the given api_key. + return Account.get_or_retrieve_for_api_key(api_key) @classmethod def _stripe_object_to_record(
The djstripe_owner_account model field will get populated for all models The root cause was that it was assumed the STRIPE API still returned the api_key in its json which no longer happens. Hence Updated the default api_key from "" to the default_api_key retreived from the STRIPE_SECRET_KEY.
py
diff --git a/symbols/number.py b/symbols/number.py index <HASH>..<HASH> 100644 --- a/symbols/number.py +++ b/symbols/number.py @@ -9,10 +9,13 @@ # the GNU General License # ---------------------------------------------------------------------- +import numbers + from api.constants import TYPE from symbol_ import Symbol from type_ import SymbolTYPE from type_ import SymbolBASICTYPE +from type_ import SymbolTYPEREF class SymbolNUMBER(Symbol): @@ -21,6 +24,7 @@ class SymbolNUMBER(Symbol): def __init__(self, value, type_=None, lineno=None): assert lineno is not None assert type_ is None or isinstance(type_, SymbolTYPE) + assert isinstance(value, numbers.Number) Symbol.__init__(self) @@ -54,6 +58,7 @@ class SymbolNUMBER(Symbol): else: self.type_ = SymbolBASICTYPE(None, TYPE.ulong) + self.type_ = SymbolTYPEREF(self.type_, lineno) self.lineno = lineno def __str__(self): @@ -61,3 +66,10 @@ class SymbolNUMBER(Symbol): def __repr__(self): return "%s:%s" % (self.type_, str(self)) + + def __cmp__(self, other): + if isinstance(other, numbers.Number): + return self.value - other + + assert isinstance(other, SymbolNUMBER) + return self.value - other.value
- Number type is encapsulated in a typeref - Assertion checking upon instantiation - __cmp__ method implemented to allow direct number comparison.
py
diff --git a/telemetry/telemetry/page/page_measurement_unittest.py b/telemetry/telemetry/page/page_measurement_unittest.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/page/page_measurement_unittest.py +++ b/telemetry/telemetry/page/page_measurement_unittest.py @@ -63,7 +63,8 @@ class PageMeasurementUnitTest( all_results = self.RunMeasurement(measurement, ps, options=self._options) self.assertEquals(0, len(all_results.failures)) - def testGotQueryParams(self): + def disabled_testGotQueryParams(self): + # Disabled due to http://crbug.com/288631 ps = self.CreatePageSet('file:///blank.html?foo=1') measurement = MeasurementQueryParams() ps.pages[-1].query_params = '?foo=1'
Disable PageMeasurementUnitTest.testGotQueryParams as it is failing. TBR=<EMAIL> NOTRY=true BUG=<I> Review URL: <URL>
py
diff --git a/ReText/window.py b/ReText/window.py index <HASH>..<HASH> 100644 --- a/ReText/window.py +++ b/ReText/window.py @@ -415,11 +415,11 @@ class ReTextWindow(QMainWindow): return markupClass(filename=fileName) def docTypeChanged(self): - self.markups[self.ind] = self.getMarkup() oldType = self.highlighters[self.ind].docType markupClass = self.getMarkupClass() newType = markupClass.name if markupClass else '' if oldType != newType: + self.markups[self.ind] = self.getMarkup() self.updatePreviewBox() self.highlighters[self.ind].docType = newType self.highlighters[self.ind].rehighlight()
docTypeChanged: create new markup object only if the class really changed
py
diff --git a/pypiper/ngstk.py b/pypiper/ngstk.py index <HASH>..<HASH> 100755 --- a/pypiper/ngstk.py +++ b/pypiper/ngstk.py @@ -1525,7 +1525,7 @@ class NGSTk(_AttributeDict): cmd += " -c {0}".format(control_bams if type(control_bams) is str else " ".join(control_bams)) if paired: - cmd += "-f BAMPE " + cmd += " -f BAMPE " # Additional settings based on whether the marks is associated with # broad peaks
Update ngstk.py bug: a single space was forgotten before -f option in cmd (line <I>)
py
diff --git a/misc/watcher.py b/misc/watcher.py index <HASH>..<HASH> 100644 --- a/misc/watcher.py +++ b/misc/watcher.py @@ -45,7 +45,7 @@ OCR_JSON_SETTINGS = json.loads(os.getenv('OCR_JSON_SETTINGS', '{}')) POLL_NEW_FILE_SECONDS = int(os.getenv('OCR_POLL_NEW_FILE_SECONDS', '1')) USE_POLLING = bool(os.getenv('OCR_USE_POLLING', '')) LOGLEVEL = os.getenv('OCR_LOGLEVEL', 'INFO').upper() -PATTERNS = ['*.pdf'] +PATTERNS = ['*.pdf', '*.PDF'] log = logging.getLogger('ocrmypdf-watcher')
watcher: include uppercase .PDF too
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ class PyTest(TestCommand): -readme = open('README.txt', 'rb').read() +readme = open('README.txt', 'r').read() setup(name = 'OWSLib', version = owslib.__version__, @@ -26,9 +26,9 @@ setup(name = 'OWSLib', license = 'BSD', keywords = 'gis ogc iso 19115 fgdc dif ows wfs wms sos csw wps wcs capabilities metadata wmts', author = 'Sean Gillies', - author_email = 'sgillies@frii.com', - maintainer = 'Sean Gillies', - maintainer_email = 'sgillies@frii.com', + author_email = 'sean.gillies@gmail.com', + maintainer = 'Tom.Kralidis@ec.gc.ca', + maintainer_email = 'Tom.Kralidis@ec.gc.ca', url = 'https://geopython.github.com/OWSLib', install_requires = ['python-dateutil==2.1', 'pytz==2012j'], packages = find_packages(),
Fix Python 3 bytes/str bug and update maintainer. open('README.txt', 'rb').read() returns type 'bytes' in Python 3, which distutils can't use.
py
diff --git a/pgmpy/readwrite/ProbModelXML.py b/pgmpy/readwrite/ProbModelXML.py index <HASH>..<HASH> 100644 --- a/pgmpy/readwrite/ProbModelXML.py +++ b/pgmpy/readwrite/ProbModelXML.py @@ -1,7 +1,4 @@ """ -ProbModelXML: http://leo.ugr.es/pgm2012/submissions/pgm2012_submission_43.pdf -ProbModelXML: http://www.cisiad.uned.es/techreports/ProbModelXML.pdf - For the student example the ProbModelXML file should be: <?xml version=“1.0” encoding=“UTF-8”?> @@ -247,6 +244,11 @@ class ProbModelXMLWriter: Examples -------- + + Refernces + --------- + [1] http://leo.ugr.es/pgm2012/submissions/pgm2012_submission_43.pdf + [2] http://www.cisiad.uned.es/techreports/ProbModelXML.pdf """ # TODO: add policies, InferenceOptions, Evidence # TODO: add parsing of language and comments and additional properties @@ -441,6 +443,11 @@ class ProbModelXMLReader: } } } + + References + ---------- + [1] http://leo.ugr.es/pgm2012/submissions/pgm2012_submission_43.pdf + [2] http://www.cisiad.uned.es/techreports/ProbModelXML.pdf """ if path is not None: self.xml = etree.ElementTree(file=path)
added references to __init__'s docstring
py
diff --git a/napalm_base/validate.py b/napalm_base/validate.py index <HASH>..<HASH> 100644 --- a/napalm_base/validate.py +++ b/napalm_base/validate.py @@ -153,9 +153,10 @@ def empty_tree(input_list): return True -def compliance_report(cls, validation_file=None): +def compliance_report(cls, validation_file=None, validation_source=None): report = {} - validation_source = _get_validation_file(validation_file) + if validation_file: + validation_source = _get_validation_file(validation_file) for validation_check in validation_source: for getter, expected_results in validation_check.items():
Validate using python object Having the validation format in a single place makes the validation constrained by placing the content only on a single file, on the local disk. There are many applicabilities where we need to test with remote content, or any other source. This is making the validator more flexible
py
diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/client.py +++ b/pyrogram/client/client.py @@ -1122,7 +1122,7 @@ class Client: def forward_messages(self, chat_id: int or str, from_chat_id: int or str, - message_ids: list, + message_ids: list or int, disable_notification: bool = None): """Use this method to forward messages of any kind. @@ -1139,8 +1139,8 @@ class Client: For a contact that exists in your Telegram address book you can use his phone number (str). For a private channel/supergroup you can use its *t.me/joinchat/* link. - message_ids (``list``): - A list of Message identifiers in the chat specified in *from_chat_id*. + message_ids (``list`` | ``int``): + A list of Message identifiers in the chat specified in *from_chat_id* or a single message id. disable_notification (``bool``, optional): Sends the message silently. @@ -1152,6 +1152,12 @@ class Client: Raises: :class:`Error <pyrogram.Error>` """ + message_ids = ( + message_ids + if isinstance(message_ids, list) + else [message_ids] + ) + return self.send( functions.messages.ForwardMessages( to_peer=self.resolve_peer(chat_id),
Allow passing msg ids as int in forward_messages()
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,14 @@ import re +import io from setuptools import setup, find_packages +open_as_utf = lambda x: io.open(x, encoding='utf-8') + (__version__, ) = re.findall("__version__.*\s*=\s*[']([^']+)[']", open('dateparser/__init__.py').read()) -readme = re.sub(r':members:.+|..\sautomodule::.+|:class:|:func:', '', open('README.rst').read()) -history = re.sub(r':mod:|:class:|:func:', '', open('HISTORY.rst').read()) +readme = re.sub(r':members:.+|..\sautomodule::.+|:class:|:func:', '', open_as_utf('README.rst').read()) +history = re.sub(r':mod:|:class:|:func:', '', open_as_utf('HISTORY.rst').read()) test_requirements = open('tests/requirements.txt').read().splitlines()
Fixed setup.py to open readme/history files as utf-8
py
diff --git a/src/xworkflows/base.py b/src/xworkflows/base.py index <HASH>..<HASH> 100644 --- a/src/xworkflows/base.py +++ b/src/xworkflows/base.py @@ -664,7 +664,9 @@ class ImplementationList(object): if self.should_collect(value): transition = self.workflow.transitions[value.trname] - if value.trname in self.implementations and value.trname in self.custom_implems: + if (value.trname in self.implementations + and value.trname in self.custom_implems + and name != self.transitions_at[value.trname]): # We already have an implementation registered. other_implem_at = self.transitions_at[value.trname] raise ValueError(
Don't warn when overriding an implementation with the same name.
py
diff --git a/taxi/app.py b/taxi/app.py index <HASH>..<HASH> 100755 --- a/taxi/app.py +++ b/taxi/app.py @@ -305,7 +305,12 @@ def commit(options, args): def _prefill(file, direction): parser = get_parser(file) - cur_date = max([date for (date, entries) in parser.get_entries()]) + entries = parser.get_entries() + + if len(entries) == 0: + return + + cur_date = max([date for (date, entries) in entries]) cur_date += datetime.timedelta(days = 1) while cur_date < datetime.date.today():
Make the edit command not to crash when no entries
py
diff --git a/spillway/models.py b/spillway/models.py index <HASH>..<HASH> 100644 --- a/spillway/models.py +++ b/spillway/models.py @@ -47,7 +47,7 @@ class AbstractRasterStore(models.Model): abstract = True def __unicode__(self): - return self.image.name + return unicode(self.image) def clean_fields(self, *args, **kwargs): # Override this instead of save() so that fields are populated on
Cannot rely on presence of name attr in case of ndarray type
py
diff --git a/pytestsalt/fixtures/config.py b/pytestsalt/fixtures/config.py index <HASH>..<HASH> 100644 --- a/pytestsalt/fixtures/config.py +++ b/pytestsalt/fixtures/config.py @@ -216,6 +216,7 @@ def _master_config(root_dir, 'open_mode': True, 'syndic_master': 'localhost', 'fileserver_list_cache_time': 0, + 'fileserver_backend': ['roots'], 'pillar_opts': False, 'peer': { '.*': [
roots is the default fileserver backend
py
diff --git a/verisure/session.py b/verisure/session.py index <HASH>..<HASH> 100644 --- a/verisure/session.py +++ b/verisure/session.py @@ -88,7 +88,7 @@ class Session(object): self._get_installations() return except Exception: - self._cookies = None + self._request_cookies = None # The login with stored cookies failed, try to get a new one for login_url in ['https://automation01.verisure.com/auth/login', @@ -101,7 +101,7 @@ class Session(object): _validate_response(response) with open(self._cookieFileName, 'wb') as f: pickle.dump(response.cookies, f) - self._request_cookies = {'vid': pickle.load(f)['vid']} + self._request_cookies = {'vid': response.cookies['vid']} self._get_installations() except requests.exceptions.RequestException as ex: raise LoginError(ex)
handle non-existing or corrupt cookie file
py
diff --git a/chassis/util/params.py b/chassis/util/params.py index <HASH>..<HASH> 100644 --- a/chassis/util/params.py +++ b/chassis/util/params.py @@ -78,7 +78,7 @@ def parse(parameters): try: kwargs[key] = _apply_validator_chain( properties.get('validators', []), value, self) - except validators.ValidationError, err: + except validators.ValidationError as err: errors.append(err) else: if properties.get('required', False):
Fix exception catching for Py3K
py
diff --git a/waitinglist/views.py b/waitinglist/views.py index <HASH>..<HASH> 100644 --- a/waitinglist/views.py +++ b/waitinglist/views.py @@ -1,6 +1,5 @@ from django.core.urlresolvers import reverse from django.http import Http404 -from django.template import RequestContext from django.shortcuts import render, redirect, get_object_or_404 from django.contrib.auth.models import User @@ -37,8 +36,7 @@ def cohort_list(request): ctx = { "cohorts": Cohort.objects.order_by("-created") } - ctx = RequestContext(request, ctx) - return render_to_response("cohorts/cohort_list.html", ctx) + return render(request, "cohorts/cohort_list.html", ctx) def cohort_create(request):
Fix missing import bug by using render instead of render_to_response
py
diff --git a/creamas/ds.py b/creamas/ds.py index <HASH>..<HASH> 100644 --- a/creamas/ds.py +++ b/creamas/ds.py @@ -59,6 +59,7 @@ def ssh_exec_in_new_loop(server, cmd): ret = loop.run_until_complete(task) return ret + class DistributedEnvironment(): '''Distributed environment which manages several nodes containing multi-environments.
Ensuring flake8 compatability
py
diff --git a/submit50.py b/submit50.py index <HASH>..<HASH> 100755 --- a/submit50.py +++ b/submit50.py @@ -461,13 +461,21 @@ def submit(org, branch): # authenticate user via SSH try: + + # require ssh assert which("ssh") + + # require GitHub username in ~/.gitconfig username, password = run("git config --global credential.https://github.com/submit50.username", quiet=True), None email = "{}@users.noreply.github.com".format(username) repo = "git@github.com:{}/{}.git".format(org, username) - with open(os.devnull, "w") as DEVNULL: - progress(False) - assert subprocess.call(["ssh", "git@github.com"], stderr=DEVNULL) == 1 # successfully authenticated + progress(False) + + # require ssh-agent + child = pexpect.spawn("ssh git@github.com") + i = child.expect(["Enter passphrase for key", pexpect.EOF]) + child.close() + assert i != 0 # authenticate user via HTTPS except:
checking for ssh-agent
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -61,6 +61,9 @@ setup(name='cwltool', 'typing >= 3.5.3', 'six >= 1.8.0', ], + extras_require = { + 'deps': ["galaxy-lib >= 17.09.1"] + }, setup_requires=[] + pytest_runner, test_suite='tests', tests_require=['pytest', 'mock >= 2.0.0',],
galaxy-lib as a setup.py plugin
py
diff --git a/python/jsbeautifier/javascript/beautifier.py b/python/jsbeautifier/javascript/beautifier.py index <HASH>..<HASH> 100644 --- a/python/jsbeautifier/javascript/beautifier.py +++ b/python/jsbeautifier/javascript/beautifier.py @@ -1187,9 +1187,9 @@ class Beautifier: if ( current_token.text[0] == current_token.text[len(current_token.text) - 1] - == '`' + == "`" and self._flags.last_token.type in [TOKEN.WORD, TOKEN.END_EXPR] - and current_token.whitespace_before == '' + and current_token.whitespace_before == "" ): # This conditionial checks backtick strings and makes no changes pass @@ -1448,12 +1448,16 @@ class Beautifier: elif self._flags.last_token.type == TOKEN.OPERATOR: # a++ + ++b # a - -b - space_before = current_token.text in [ - "--", - "-", - "++", - "+", - ] and self._flags.last_token.text in ["--", "-", "++", "+"] + space_before = ( + current_token.text + in [ + "--", + "-", + "++", + "+", + ] + and self._flags.last_token.text in ["--", "-", "++", "+"] + ) # + and - are not unary when preceeded by -- or ++ operator # a-- + b # a * +b
Fix error in Azure DevOps. Refactor code for Linux and MacOS
py
diff --git a/test/test_reader.py b/test/test_reader.py index <HASH>..<HASH> 100644 --- a/test/test_reader.py +++ b/test/test_reader.py @@ -104,7 +104,7 @@ class TestReader(unittest.TestCase, TestResultDataModel): new_nidmres = NIDMResults(nidm_zip=new_name) - self.compare_full_graphs(nidmres.graph, new_nidmres.graph, self.owl, True, True) + self.compare_full_graphs(nidmres.graph, new_nidmres.graph, self.owl, True, True, reconcile=False) # nidm_graph.parse() # # exc_sets = nidm_graph.get_excursion_set_maps()
Do not reconcile for test on read/write loop
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -20,4 +20,5 @@ setup(name = "artist", 'Topic :: Education', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)'], install_requires = ['jinja2', 'numpy'], + package_data={'artist': ['templates/*.tex']}, )
Fix: include templates in source distribution Thanks to Niek for finding this and Arne for reporting. Fixes #7.
py
diff --git a/tests/scripts/thread-cert/node_cli.py b/tests/scripts/thread-cert/node_cli.py index <HASH>..<HASH> 100644 --- a/tests/scripts/thread-cert/node_cli.py +++ b/tests/scripts/thread-cert/node_cli.py @@ -64,6 +64,8 @@ class otCli: else: self.pexpect.logfile_read = sys.stdout.buffer + self._initialized = True + def __init_sim(self, nodeid, mode): """ Initialize a simulation node. """ if 'OT_CLI_PATH' in os.environ.keys(): @@ -144,7 +146,7 @@ class otCli: self.destroy() def destroy(self): - if not self.pexpect: + if not self._initialized: return if hasattr(self.pexpect, 'proc') and self.pexpect.proc.poll() is None or \ @@ -152,7 +154,8 @@ class otCli: print("%d: exit" % self.nodeid) self.pexpect.send('exit\n') self.pexpect.expect(pexpect.EOF) - self.pexpect = None + self.pexpect.wait() + self._initialized = False def read_cert_messages_in_commissioning_log(self, timeout=-1): """Get the log of the traffic after DTLS handshake.
[test] fix unclosed file (#<I>) This commit fixes python warning "ResourceWarning: unclosed file" when set pexpect to None.
py
diff --git a/tests/test_numpy.py b/tests/test_numpy.py index <HASH>..<HASH> 100644 --- a/tests/test_numpy.py +++ b/tests/test_numpy.py @@ -137,6 +137,13 @@ def test_sum_3(): check_grads(fun, mat) check_grads(d_fun, mat) +def test_sum_with_axis_tuple(): + def fun(x): return to_scalar(np.sum(x, axis=(1,2))) + d_fun = lambda x : to_scalar(grad(fun)(x)) + mat = npr.randn(10, 11, 7) + check_grads(fun, mat) + check_grads(d_fun, mat) + def test_flipud(): def fun(x): return to_scalar(np.flipud(x)) d_fun = lambda x : to_scalar(grad(fun)(x))
add test for np.sum axis tuple support
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,8 +10,8 @@ setup(name='django-datatable-view', ' with (http://datatables.net/plug-ins/api). The package consists of ' 'a class-based view, and a small collection of utilities for rendering' ' table data from models.', - author='Tim Valenta', - author_email='tvalenta@pivotalenergysolutions.com', + author='Autumn Valenta', + author_email='avalenta@pivotalenergysolutions.com', url='https://github.com/pivotal-energy-solutions/django-datatable-view', download_url='https://github.com/pivotal-energy-solutions/django-datatable-view/tarball/django-datatable-view-0.9.0-beta.5', license='Apache License (2.0)', @@ -28,5 +28,5 @@ setup(name='django-datatable-view', packages=find_packages(exclude=['tests', 'tests.*']), package_data={'datatableview': ['static/js/*.js', 'templates/datatableview/*.html']}, include_package_data=True, - install_requires=['django>=1.2', 'python-dateutil>=2.1'], -) \ No newline at end of file + install_requires=['django>=1.11', 'python-dateutil>=2.1'], +)
Poke updated info into setup.py
py
diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,8 @@ def pytest_configure(): DEBUG_PROPAGATE_EXCEPTIONS=True, DATABASES={ 'default': { - 'ENGINE': 'django.db.backends.sqlite3' + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': 'db.sqlite3' } }, SECRET_KEY='r-4p2y=uc56fmqsncog%3h!7hc=y+g)xtz+9y(prx*1o9dpry0',
Add name to DATABASES setting
py
diff --git a/ryu/services/protocols/bgp/speaker.py b/ryu/services/protocols/bgp/speaker.py index <HASH>..<HASH> 100644 --- a/ryu/services/protocols/bgp/speaker.py +++ b/ryu/services/protocols/bgp/speaker.py @@ -376,6 +376,8 @@ class BgpProtocol(Protocol, Activity): self._sendlock.acquire() try: self._socket.sendall(msg.serialize()) + except socket.error as err: + self.connection_lost('failed to write to socket') finally: self._sendlock.release()
bgp: catch socket error with sendall
py
diff --git a/glitter/tests/settings.py b/glitter/tests/settings.py index <HASH>..<HASH> 100644 --- a/glitter/tests/settings.py +++ b/glitter/tests/settings.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from __future__ import unicode_literals import os @@ -55,6 +56,7 @@ INSTALLED_APPS = ( 'glitter', 'glitter.pages', 'glitter.blocks.html', + 'glitter.blocks.redactor', 'glitter.tests.sampleblocks', )
Add redactor block in test settings file
py
diff --git a/py/test/testing/acceptance_test.py b/py/test/testing/acceptance_test.py index <HASH>..<HASH> 100644 --- a/py/test/testing/acceptance_test.py +++ b/py/test/testing/acceptance_test.py @@ -211,7 +211,7 @@ class TestPyTest(AcceptBase): """) result = self.runpytest(p1) assert_lines_contain_lines(result.outlines, [ - ".*test_fail.py F", + "*test_fail.py F", "====* FAILURES *====", "____*____", "", @@ -242,7 +242,7 @@ class TestPyTest(AcceptBase): """) result = self.runpytest(p1) assert_lines_contain_lines(result.outlines, [ - ".*test_one.py .F", + "*test_one.py .F", "====* FAILURES *====", "____*____", "*test_one.py:8: ValueError",
[svn r<I>] bah, forgot that helper uses fnmatch-style matching, not regular expression --HG-- branch : trunk
py
diff --git a/pyethereum/apiserver.py b/pyethereum/apiserver.py index <HASH>..<HASH> 100644 --- a/pyethereum/apiserver.py +++ b/pyethereum/apiserver.py @@ -210,21 +210,20 @@ def trace(txhash): test_blk.state.root_hash = pre_state # collect debug output - tl = TraceLogHandler() - tl.setLevel(logging.DEBUG) - processblock.logger.addHandler(tl) + log = [] + def log_receiver(name, data): + log.append({name:data}) + + processblock.pblogger.listeners.append(log_receiver) # apply tx (thread? we don't want logs from other invocations) processblock.apply_transaction(test_blk, tx) # stop collecting debug output - processblock.logger.removeHandler(tl) + processblock.pblogger.listeners.remove(log_receiver) # format - formatter = logging.Formatter('%(name)s:%(message)s') - res = '\n'.join(formatter.format(l) for l in tl.buffer) - return dict(trace=res) - + return dict(tx=txhash, trace=log) # ######## Accounts ############
use new logging in apiserver/trace
py
diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/ec2.py +++ b/salt/cloud/clouds/ec2.py @@ -2605,6 +2605,10 @@ def _get_node(name=None, instance_id=None, location=None): location = get_location() params = {'Action': 'DescribeInstances'} + + if str(name).startswith('i-') and len(name) == 10: + instance_id = name + if instance_id: params['InstanceId.1'] = instance_id else:
Properly handle unnamed/untagged instances
py
diff --git a/tests/func/test_add.py b/tests/func/test_add.py index <HASH>..<HASH> 100644 --- a/tests/func/test_add.py +++ b/tests/func/test_add.py @@ -96,7 +96,7 @@ class TestAddCmdDirectoryRecursive(TestDvc): warning = ( "You are adding a large directory 'large-dir' recursively," " consider tracking it as a whole instead.\n" - "{purple}HINT:{nc} Remove the generated DVC-files and then" + "{purple}HINT:{nc} Remove the generated DVC-file and then" " run {cyan}dvc add large-dir{nc}".format( purple=colorama.Fore.MAGENTA, cyan=colorama.Fore.CYAN,
tests: fix failing test_add (due to extra 's' in word) To close #<I>
py
diff --git a/raven/base.py b/raven/base.py index <HASH>..<HASH> 100644 --- a/raven/base.py +++ b/raven/base.py @@ -123,6 +123,7 @@ class Client(object): else: continue stack.append(frame) + stack.reverse() else: # assume stack was a list of frames stack = get_stack or []
Invert the direction of the auto-log traces so they match those given by Exceptions and culprit determination works properly.
py
diff --git a/manager.py b/manager.py index <HASH>..<HASH> 100644 --- a/manager.py +++ b/manager.py @@ -110,6 +110,7 @@ class ASTNGManager(OptionsProviderMixIn): raise except Exception, ex: if __debug__: + print 'error while building astng for', filepath import traceback traceback.print_exc() msg = 'Unable to load module %s (%s)' % (modname, ex)
tell in which file we have encountered an unexpected error --HG-- branch : _ast_compat
py
diff --git a/zipline/finance/trading.py b/zipline/finance/trading.py index <HASH>..<HASH> 100644 --- a/zipline/finance/trading.py +++ b/zipline/finance/trading.py @@ -214,12 +214,10 @@ class OrderDataSource(qmsg.DataSource): #no more orders, should this be an error condition? if len(rlist) == 0 or len(xlist) > 0: - #no order message means there was a timeout above, - #and the client is done sending orders (but isn't - #telling us himself!). - qutil.LOGGER.warn("signaling orders done on timeout.") - self.signal_done() - return + # no order message means there was a timeout above. + # this is an indeterminant case, we don't know the cause. + # the safest move is to break out of this loop and try again + break order_msg = rlist[0].recv()
fixed logic for timeouts on order. timeouts are not necessarily fatal.
py
diff --git a/tests/test_ytarray.py b/tests/test_ytarray.py index <HASH>..<HASH> 100644 --- a/tests/test_ytarray.py +++ b/tests/test_ytarray.py @@ -469,9 +469,9 @@ def test_unit_conversions(): base_units = {'length':'kpc','time':'Myr','mass':'1.0e14*Msun'} em3_converted = YTQuantity(1.545436840386756e-05, '100000000000000.0*Msun/(Myr**2*kpc)') - yield assert_equal, em3.in_custom(base_units), em3 - yield assert_array_almost_equal, em3.in_custom(base_units), em3_converted - yield assert_equal, str(em3.in_custom(base_units).units), '100000000000000.0*Msun/(Myr**2*kpc)' + yield assert_equal, em3.in_base(base_units=base_units), em3 + yield assert_array_almost_equal, em3.in_base(base_units=base_units), em3_converted + yield assert_equal, str(em3.in_base(base_units=base_units).units), '100000000000000.0*Msun/(Myr**2*kpc)' dimless = YTQuantity(1.0, "") yield assert_equal, dimless.in_cgs(), dimless
Taking account of the simplifications that were made --HG-- branch : yt
py
diff --git a/hearthstone/entities.py b/hearthstone/entities.py index <HASH>..<HASH> 100644 --- a/hearthstone/entities.py +++ b/hearthstone/entities.py @@ -164,8 +164,10 @@ class Player(Entity): # Exclude entity types that cannot be in the deck if not entity.can_be_in_deck: continue - # Exclude choice cards, The Coin, Malchezaar legendaries - if entity.tags.get(GameTag.CREATOR, 0): + + # Allow CREATOR=1 because of monster hunt decks. + # Everything else is likely a false positive. + if entity.tags.get(GameTag.CREATOR, 0) > 1: continue yield entity
entities: Allow CREATOR=1 in initial_deck
py
diff --git a/brome/core/runner/ec2_instance.py b/brome/core/runner/ec2_instance.py index <HASH>..<HASH> 100644 --- a/brome/core/runner/ec2_instance.py +++ b/brome/core/runner/ec2_instance.py @@ -67,10 +67,33 @@ class EC2Instance(BaseInstance): k = paramiko.RSAKey.from_private_key_file(self.browser_config.get('ssh_key_path')) ssh.connect(self.private_ip, username = self.browser_config.get('username'), pkey = k) - stdin, stdout, stderr = ssh.exec_command(command) + sleep_time = 0.1 + output = [] + error_output = [] + + ssh_transport = ssh.get_transport() + channel = ssh_transport.open_session() + channel.setblocking(0) + channel.exec_command(command) + + while True: + + while channel.recv_ready(): + output.append(channel.recv(1000)) + + while channel.recv_stderr_ready(): + error_output.append(channel.recv_stderr(1000)) + + if channel.exit_status_ready(): + break + + time.sleep(sleep_time) + + ret = channel.recv_exit_status() + ssh_transport.close() if read_output: - output = stdout.read() + output = 'stdout: %s ###stderr: %s'%(''.join(output), ''.join(error_output)) else: output = None
Better ssh exec command
py
diff --git a/libextract/html.py b/libextract/html.py index <HASH>..<HASH> 100644 --- a/libextract/html.py +++ b/libextract/html.py @@ -53,7 +53,4 @@ def get_final_text(node): return ' '.join(node.xpath(FILTER_TEXT)) -STRATEGY = [get_etree, - get_pairs, - highest_scoring, - get_final_text] +STRATEGY = (get_etree, get_pairs, highest_scoring, get_final_text)
use tuple to represent strategy
py
diff --git a/blockstack_cli_0.14.1/blockstack_client/proxy.py b/blockstack_cli_0.14.1/blockstack_client/proxy.py index <HASH>..<HASH> 100644 --- a/blockstack_cli_0.14.1/blockstack_client/proxy.py +++ b/blockstack_cli_0.14.1/blockstack_client/proxy.py @@ -350,6 +350,17 @@ def get_consensus_range(block_id_start, block_id_end, proxy=None): return resp +def get_name_blockchain_history(name, start_block, end_block, proxy=None): + """ + Get the name's historical blockchain records + """ + if proxy is None: + proxy = get_default_proxy() + + resp = proxy.get_name_blockchain_history(name, start_block, end_block) + return resp + + def get_nameops_at(block_id, proxy=None): """ Get the set of records as they were at a particular block.
Add get_name_blockchain_history
py
diff --git a/pyflakes/checker.py b/pyflakes/checker.py index <HASH>..<HASH> 100644 --- a/pyflakes/checker.py +++ b/pyflakes/checker.py @@ -139,9 +139,6 @@ class Scope(dict): def __repr__(self): return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), dict.__repr__(self)) - def __init__(self): - super(Scope, self).__init__() - class ClassScope(Scope): pass
Method Scope.__init__ is not needed.
py
diff --git a/ryu/lib/packet/llc.py b/ryu/lib/packet/llc.py index <HASH>..<HASH> 100644 --- a/ryu/lib/packet/llc.py +++ b/ryu/lib/packet/llc.py @@ -124,6 +124,8 @@ class llc(packet_base.PacketBase): _CTR_TYPES = {} _CTR_PACK_STR = '!2xB' + _MIN_LEN = _PACK_LEN + @staticmethod def register_control_type(register_cls): llc._CTR_TYPES[register_cls.TYPE] = register_cls
packet lib: provide llc._MIN_LEN
py
diff --git a/run_tests.py b/run_tests.py index <HASH>..<HASH> 100644 --- a/run_tests.py +++ b/run_tests.py @@ -17,5 +17,8 @@ for testsuite in tests_suites: runnable.addTest(unittest.makeSuite(test_case)) runner=unittest.TextTestRunner() -exit(runner.run(runnable)) +test_result = runner.run(runnable) + +if test_result.failures or test_result.errors: + exit(1)
run_tests: exit with code error if failures
py
diff --git a/pyrogram/__init__.py b/pyrogram/__init__.py index <HASH>..<HASH> 100644 --- a/pyrogram/__init__.py +++ b/pyrogram/__init__.py @@ -16,7 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License # along with Pyrogram. If not, see <http://www.gnu.org/licenses/>. -__version__ = "1.1.4" +__version__ = "1.1.5" __license__ = "GNU Lesser General Public License v3 or later (LGPLv3+)" __copyright__ = "Copyright (C) 2017-2020 Dan <https://github.com/delivrance>"
Update Pyrogram to <I>
py
diff --git a/knowledge_base/__init__.py b/knowledge_base/__init__.py index <HASH>..<HASH> 100755 --- a/knowledge_base/__init__.py +++ b/knowledge_base/__init__.py @@ -590,9 +590,9 @@ class KnowledgeBase(object): title.ecrm_P139_has_alternative_form = abbr title.update() - urn = self.create_urn(work.subject, urn) + work_urn = self.create_urn(work.subject, urn) work.efrbroo_P102_has_title.append(title) - work.ecrm_P1_is_identified_by.append(urn) + work.ecrm_P1_is_identified_by.append(work_urn) work.update() # create CreationEvent to connect author and work
corrected small bug in kb.add_work()
py
diff --git a/bt/core.py b/bt/core.py index <HASH>..<HASH> 100644 --- a/bt/core.py +++ b/bt/core.py @@ -321,6 +321,14 @@ class StrategyBase(Node): return self._capital @property + def cash(self): + """ + TimeSeries of unallocated capital. + """ + # no stale check needed + return self._cash + + @property def universe(self): """ Data universe available at the current time. @@ -403,11 +411,13 @@ class StrategyBase(Node): # setup internal data self.data = pd.DataFrame(index=funiverse.index, - columns=['price', 'value'], + columns=['price', 'value', 'cash', 'fees'], data=0.0) self._prices = self.data['price'] self._values = self.data['value'] + self._cash = self.data['cash'] + self._fees = self.data['fees'] # setup children as well - use original universe here - don't want to # pollute with potential strategy children in funiverse @@ -494,6 +504,10 @@ class StrategyBase(Node): for c in self._strat_children: self._universe.loc[date, c] = self.children[c].price + # Cash should track the unallocated capital at the end of the day, so + # we should update it every time we call "update" + self._cash[self.now] = self._capital + # update paper trade if necessary if newpt and self._paper_trade: self._paper.update(date)
ENH: unallocated cash is saved in "data" structure of the Strategy
py
diff --git a/pyrogram/client/types/__init__.py b/pyrogram/client/types/__init__.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/types/__init__.py +++ b/pyrogram/client/types/__init__.py @@ -28,13 +28,11 @@ from .input_media import ( InputMediaAudio, InputPhoneContact, InputMediaVideo, InputMediaPhoto, InputMediaDocument, InputMediaAnimation ) -from .media import ( +from .messages_and_media import ( Audio, Contact, Document, Animation, Location, Photo, PhotoSize, - Sticker, Venue, Video, VideoNote, Voice, UserProfilePhotos + Sticker, Venue, Video, VideoNote, Voice, UserProfilePhotos, + Message, Messages, MessageEntity ) -from .message import Message -from .message_entity import MessageEntity -from .messages import Messages from .update import Update from .user_and_chats import ( Chat, ChatMember, ChatMembers, ChatPhoto,
Fix init not having message and media types
py
diff --git a/nameko/standalone/rpc.py b/nameko/standalone/rpc.py index <HASH>..<HASH> 100644 --- a/nameko/standalone/rpc.py +++ b/nameko/standalone/rpc.py @@ -78,11 +78,13 @@ class PollingQueueConsumer(object): if self.consumer is not None: try: self.consumer.cancel() - except socket.error: # pragma: no cover + except (socket.error, IOError): # pragma: no cover # On some systems (e.g. os x) we need to explicitly cancel the # consumer here. However, e.g. on ubuntu 14.04, the # disconnection has already closed the socket. We try to # cancel, and ignore any socket errors. + # If the socket has been closed, an IOError is raised, ignore + # it and assume the consumer is already cancelled. pass channel = self.connection.channel()
Catch IOError in PollingQueueConsumer `py-amqplib` may throw `IOError: socket closed` when it can't read from or write to the socket. When this happens, assume the consumer is already cancelled.
py
diff --git a/MAVProxy/modules/mavproxy_link.py b/MAVProxy/modules/mavproxy_link.py index <HASH>..<HASH> 100644 --- a/MAVProxy/modules/mavproxy_link.py +++ b/MAVProxy/modules/mavproxy_link.py @@ -378,11 +378,11 @@ class LinkModule(mp_module.MPModule): sysid = m.get_srcSystem() if sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) - if m.get_type() == "GLOBAL_POSITION_INT" and self.module('map') is not None: - self.module('map').set_secondary_vehicle_position(m) - mod = self.module('asterix') - if mod: - mod.set_secondary_vehicle_position(m) + if m.get_type() == "GLOBAL_POSITION_INT": + for modname in 'map', 'asterix', 'NMEA', 'NMEA2': + mod = self.module(modname) + if mod is not None: + mod.set_secondary_vehicle_position(m) return if getattr(m, '_timestamp', None) is None:
mavproxy_link: pass sysid-output position through to NMEA module
py
diff --git a/findimports.py b/findimports.py index <HASH>..<HASH> 100755 --- a/findimports.py +++ b/findimports.py @@ -43,6 +43,11 @@ Caching: findimports.py foo.importcache -d -T > graph1.dot findimports.py foo.importcache -d -N -c -p -l 2 > graph2.dot +Bugs: + + findimports doesn't know about scoping rules and may emit false + warnings. + Copyright (c) 2003--2007 Marius Gedminas <marius@pov.lt> This program is free software; you can redistribute it and/or modify it under @@ -152,8 +157,15 @@ class ImportFinderAndNameTracker(ImportFinder): if not imported_as: imported_as = name if imported_as != "*": - self.unused_names[imported_as] = ImportInfo(imported_as, - node.lineno) + if imported_as in self.unused_names: + where = self.unused_names[imported_as].lineno + print >> sys.stderr, ("%s:%s: %s imported again" + " (first imported on line %s)" + % (self.filename, node.lineno, + imported_as, where)) + else: + self.unused_names[imported_as] = ImportInfo(imported_as, + node.lineno) def visitName(self, node): if node.name in self.unused_names:
Make findumports.py -u report duplicate imports too. Originally committed <I>-<I>-<I> <I>:<I>:<I> <I> to a different SVN repository (python-tools) as revision <I>.
py
diff --git a/pycbc/ahope/ahope_utils.py b/pycbc/ahope/ahope_utils.py index <HASH>..<HASH> 100644 --- a/pycbc/ahope/ahope_utils.py +++ b/pycbc/ahope/ahope_utils.py @@ -61,14 +61,15 @@ def make_analysis_dir(path): Make the analysis directory path, any parent directories that don't already exist, and the 'logs' subdirectory of path. """ - makedir(os.path.join(path, 'logs')) + if path is not None: + makedir(os.path.join(path, 'logs')) def makedir(path): """ Make the analysis directory path and any parent directories that don't already exist. Will do nothing if path already exists. """ - if not os.path.exists(path): + if not os.path.exists(path) and path is not None: os.makedirs(path) def is_condor_exec(exe_path):
check that path is not None in direction creation functions
py
diff --git a/lib/bibformat_engine.py b/lib/bibformat_engine.py index <HASH>..<HASH> 100644 --- a/lib/bibformat_engine.py +++ b/lib/bibformat_engine.py @@ -225,7 +225,7 @@ def call_old_bibformat(recID, format="HD", on_the_fly=False, verbose=0): (result_code, result_path) = tempfile.mkstemp() command = "( %s/bibformat otype=%s ) > %s" % (bindir, format, result_path) (xm_code, xm_path) = tempfile.mkstemp() - xm_file.open(xm_path, "w") + xm_file = open(xm_path, "w") xm_file.write(xm_record) xm_file.close() command = command + " <" + xm_path
Fixed bug in code for popen replacement (old BibFormat).
py