diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/tests/test_tar_hacks.py b/tests/test_tar_hacks.py
index <HASH>..<HASH> 100644
--- a/tests/test_tar_hacks.py
+++ b/tests/test_tar_hacks.py
@@ -2,12 +2,18 @@ from wal_e import tar_partition
import os
-# Test that _fsync_files() syncs all files and also, if possible, all
-# directories passed to it. There is a separate test in test_blackbox
-# that tar_file_extract() actually calls _fsync_files and passes it
-# the expected list of files.
def test_fsync_tar_members(monkeypatch, tmpdir):
+ """Test that _fsync_files() syncs all files and directories
+
+ Syncing directories is a platform specific feature, so it is
+ optional.
+
+ There is a separate test in test_blackbox that tar_file_extract()
+ actually calls _fsync_files and passes it the expected list of
+ files.
+
+ """
dira = tmpdir.join('dira').ensure(dir=True)
dirb = tmpdir.join('dirb').ensure(dir=True)
foo = dira.join('foo').ensure()
|
Use a more idiomatic docstring for test documentation Moves things around a bit is all.
|
py
|
diff --git a/trolly/board.py b/trolly/board.py
index <HASH>..<HASH> 100644
--- a/trolly/board.py
+++ b/trolly/board.py
@@ -83,8 +83,21 @@ class Board(trelloobject.TrelloObject):
return self.create_card(card_json)
+ def get_checklists( self ):
+ """
+ Get the checklists for this board. Returns a list of Checklist objects.
+ """
+ checklists = self.getChecklistsJson( self.base_uri )
+
+ checklists_list = []
+ for checklist_json in checklists:
+ checklists_list.append( self.createChecklist( checklist_json ) )
+
+ return checklists_list
+
+
def get_members(self, **query_params):
- '''
+ """
Get Members attached to this board. Returns a list of Member objects.
Returns:
|
Add a get_checklists() method to the Board class The API exposes this method which can make certain tasks more efficient by avoiding having to enumerate all cards first. Conflicts: trolly/board.py Update to current orthography
|
py
|
diff --git a/src/metpy/calc/thermo.py b/src/metpy/calc/thermo.py
index <HASH>..<HASH> 100644
--- a/src/metpy/calc/thermo.py
+++ b/src/metpy/calc/thermo.py
@@ -1542,10 +1542,10 @@ def mixing_ratio_from_relative_humidity(pressure, temperature, relative_humidity
-----
Formula adapted from [Hobbs1977]_ pg. 74.
- .. math:: w = (relative_humidity)(w_s)
+ .. math:: w = (rh)(w_s)
* :math:`w` is mixing ratio
- * :math:`relative_humidity` is relative humidity as a unitless ratio
+ * :math:`rh` is relative humidity as a unitless ratio
* :math:`w_s` is the saturation mixing ratio
.. versionchanged:: 1.0
@@ -1589,9 +1589,9 @@ def relative_humidity_from_mixing_ratio(pressure, temperature, mixing_ratio):
-----
Formula based on that from [Hobbs1977]_ pg. 74.
- .. math:: relative_humidity = \frac{w}{w_s}
+ .. math:: rh = \frac{w}{w_s}
- * :math:`relative_humidity` is relative humidity as a unitless ratio
+ * :math:`rh` is relative humidity as a unitless ratio
* :math:`w` is mixing ratio
* :math:`w_s` is the saturation mixing ratio
|
DOC: Fix formula rendering in some docstrings Using relative_humidity in a formula makes for some bad latex equations--use rh instead.
|
py
|
diff --git a/linode_api4/linode_client.py b/linode_api4/linode_client.py
index <HASH>..<HASH> 100644
--- a/linode_api4/linode_client.py
+++ b/linode_api4/linode_client.py
@@ -507,7 +507,7 @@ class LKEGroup(Group):
for c in node_pools:
if isinstance(c, dict):
new_pool = {
- "type": c["type"].id if "type" in c and issubclass(c["type"], Base) else c.get("type"),
+ "type": c["type"].id if "type" in c and issubclass(type(c["type"]), Base) else c.get("type"),
"count": c.get("count"),
}
@@ -515,14 +515,14 @@ class LKEGroup(Group):
params = {
"label": label,
- "region": region.id if issubclass(region, Base) else region,
+ "region": region.id if issubclass(type(region), Base) else region,
"node_pools": pools,
}
params.update(kwargs)
result = self.client.post('/lke/clusters', data=params)
- if not 'id' in result:
+ if 'id' not in result:
raise UnexpectedResponseError('Unexpected response when creating LKE cluster!', json=result)
return LKECluster(self.client, result['id'], result)
|
add type conversion - previously the issubclass check would always fail because it is checking the instance of an object instead of the class of the object
|
py
|
diff --git a/wechatpy/client/api/card.py b/wechatpy/client/api/card.py
index <HASH>..<HASH> 100644
--- a/wechatpy/client/api/card.py
+++ b/wechatpy/client/api/card.py
@@ -5,6 +5,8 @@ from wechatpy.client.api.base import BaseWeChatAPI
class WeChatCard(BaseWeChatAPI):
+
+ API_BASE_URL = 'https://api.weixin.qq.com/'
def create(self, card_data):
"""
|
API_BASE_URL change in card api
|
py
|
diff --git a/holoviews/plotting/bokeh/util.py b/holoviews/plotting/bokeh/util.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/bokeh/util.py
+++ b/holoviews/plotting/bokeh/util.py
@@ -197,7 +197,7 @@ def compute_static_patch(document, models, json=None):
events.append((priority, event))
update_types[obj['type']].append(key)
- events = [e for _, e in sorted(events)]
+ events = [e for _, e in sorted(events, key=lambda x: x[0])]
value_refs = {ref_id: val for ref_id, val in value_refs.items()
if val['type'] not in IGNORED_MODELS}
return dict(events=events, references=list(value_refs.values()))
|
Fix bokeh event sorting in py3
|
py
|
diff --git a/spyder/utils/programs.py b/spyder/utils/programs.py
index <HASH>..<HASH> 100644
--- a/spyder/utils/programs.py
+++ b/spyder/utils/programs.py
@@ -519,7 +519,7 @@ def open_files_with_application(app_path, fnames):
"""
return_codes = {}
- if os.name == 'nt':
+ if os.name == 'nt':
fnames = [fname.replace('\\', '/') for fname in fnames]
if sys.platform == 'darwin':
|
Fix oepning files with spaces
|
py
|
diff --git a/peony/requests.py b/peony/requests.py
index <HASH>..<HASH> 100644
--- a/peony/requests.py
+++ b/peony/requests.py
@@ -81,10 +81,6 @@ class AbstractRequest(ABC, Endpoint):
elif isinstance(value, bool):
params[key] = "true" if value else "false"
- # integers conversion
- elif isinstance(value, int):
- params[key] = str(value)
-
# iterables conversion
elif isinstance(value, iterable):
params[key] = ",".join(map(str, value))
@@ -93,9 +89,10 @@ class AbstractRequest(ABC, Endpoint):
elif value is None:
pass
- # the rest is sent as is
+ # the rest is converted to str
+ # (make sure you don't send something wrong)
else:
- params[key] = value
+ params[key] = str(value)
# dict with other items (+ strip "_" from keys)
kwargs = {key[1:]: value for key, value in kwargs.items()
|
send any kind of data by converting values to str
|
py
|
diff --git a/riak/util.py b/riak/util.py
index <HASH>..<HASH> 100644
--- a/riak/util.py
+++ b/riak/util.py
@@ -1,8 +1,12 @@
-import collections
+try:
+ from collections import Mapping
+except ImportError:
+ # compatibility with Python 2.5
+ Mapping = dict
def quacks_like_dict(object):
"""Check if object is dict-like"""
- return isinstance(object, collections.Mapping)
+ return isinstance(object, Mapping)
def deep_merge(a, b):
"""Merge two deep dicts non-destructively
|
Adjust for compatibility with Python <I>
|
py
|
diff --git a/typedload/dataloader.py b/typedload/dataloader.py
index <HASH>..<HASH> 100644
--- a/typedload/dataloader.py
+++ b/typedload/dataloader.py
@@ -489,9 +489,12 @@ def _namedtupleload(l: Loader, value: Dict[str, Any], type_) -> Any:
raise TypedloadValueError(str(e), value=value, type_=type_)
if hasattr(type_, '__required_keys__') and hasattr(type_, '__optional_keys__'):
- # TypedDict
+ # TypedDict, since 3.9
necessary_fields = type_.__required_keys__
optional_fields = type_.__optional_keys__
+ elif getattr(type_, '__total__', True) == False:
+ # TypedDict, only for 3.8
+ necessary_fields = set()
else:
necessary_fields = fields.difference(optional_fields)
|
Re-introduce old behaviour checking __total__ The __required_keys__ and __optional_keys__ are only present from <I> so in <I> there is no way of achieving the feature. So reintroducing ALSO the older code so that at least total=False will work.
|
py
|
diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py
index <HASH>..<HASH> 100644
--- a/pyrogram/client/client.py
+++ b/pyrogram/client/client.py
@@ -1051,7 +1051,7 @@ class Client(Methods, BaseClient):
bytes=chunk
)
- assert self.send(rpc), "Couldn't upload file"
+ assert session.send(rpc), "Couldn't upload file"
if is_missing_part:
return
|
fixed session mistake use new session when uploading files instead of main session
|
py
|
diff --git a/abydos/bm.py b/abydos/bm.py
index <HASH>..<HASH> 100644
--- a/abydos/bm.py
+++ b/abydos/bm.py
@@ -329,9 +329,10 @@ def _bm_apply_final_rules(phonetic, final_rules, language_arg, strip):
def _bm_phonetic_number(phonetic):
- bracket = phonetic.find('[')
- if bracket != -1:
- return phonetic[:bracket]
+ """Remove bracketed text from the end of a string
+ """
+ if '[' in phonetic:
+ return phonetic[:phonetic.find('[')]
return phonetic # experimental !!!!
|
added docstring to _bm_phonetic_number & made a little more pythonic
|
py
|
diff --git a/gcloud/bigquery/table.py b/gcloud/bigquery/table.py
index <HASH>..<HASH> 100644
--- a/gcloud/bigquery/table.py
+++ b/gcloud/bigquery/table.py
@@ -101,11 +101,8 @@ class Table(object):
:raises: TypeError if 'value' is not a sequence, or ValueError if
any item in the sequence is not a SchemaField
"""
- fields = list(value)
- if len(fields) > 0:
- types = set([type(field) for field in fields])
- if types != set([SchemaField]):
- raise ValueError('Schema items must be fields')
+ if not all(isinstance(field, SchemaField) for field in value):
+ raise ValueError('Schema items must be fields')
self._schema = tuple(value)
@property
|
Simplify test for non-fields, avoid intermediate set/list. Addresses: <URL>
|
py
|
diff --git a/andes/system.py b/andes/system.py
index <HASH>..<HASH> 100644
--- a/andes/system.py
+++ b/andes/system.py
@@ -93,6 +93,7 @@ class System:
def __init__(self,
case: Optional[str] = None,
name: Optional[str] = None,
+ config: Optional[Dict] = None,
config_path: Optional[str] = None,
default_config: Optional[bool] = False,
options: Optional[Dict] = None,
@@ -121,7 +122,7 @@ class System:
self._config_path = None
self._config_object = self.load_config(self._config_path)
- self.config = Config(self.__class__.__name__)
+ self.config = Config(self.__class__.__name__, dct=config)
self.config.load(self._config_object)
# custom configuration for system goes after this line
|
Support providing a configuration to System. This allows us to provide a high-importance configuration, which can be extended from the config file found on the path. This allows greater flexibility rather than depending on inference of finding config files, especially when running multiple Systems in the same process.
|
py
|
diff --git a/tests/core.py b/tests/core.py
index <HASH>..<HASH> 100644
--- a/tests/core.py
+++ b/tests/core.py
@@ -455,7 +455,7 @@ class WebLdapAuthTest(unittest.TestCase):
configuration.conf.set("webserver", "authenticate", "True")
configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.ldap_auth")
try:
- configuration.add_section("ldap")
+ configuration.conf.add_section("ldap")
except:
pass
configuration.conf.set("ldap", "uri", "ldap://localhost:3890")
|
add_section is only part of configuration.conf
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,3 +1,17 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
|
Add license header to docs/conf.py.
|
py
|
diff --git a/pyoko/manage.py b/pyoko/manage.py
index <HASH>..<HASH> 100644
--- a/pyoko/manage.py
+++ b/pyoko/manage.py
@@ -522,13 +522,13 @@ and .js extensions will be loaded."""},
def save_obj(self, bucket_name, key, val):
key = key or None
- if self.manager.args.update or key is None:
+ if key is None:
data = val.encode('utf-8') if self.typ == self.CSV else val
self.buckets[bucket_name].new(key, data).store()
self.record_counter += 1
else:
obj = self.buckets[bucket_name].get(key)
- if not obj.exists:
+ if not obj.exists or self.manager.args.update:
obj.data = val.encode('utf-8') if self.typ == self.CSV else val
obj.store()
self.record_counter += 1
|
fixed the force insertion (update) mechanism of load_data command
|
py
|
diff --git a/src/fam/tests/sync_gateway/config.py b/src/fam/tests/sync_gateway/config.py
index <HASH>..<HASH> 100755
--- a/src/fam/tests/sync_gateway/config.py
+++ b/src/fam/tests/sync_gateway/config.py
@@ -13,6 +13,8 @@ SYNC_GATEWAY_PORT = "4984"
SYNC_GATEWAY_ADMIN_PORT = "4985"
SYNC_GATEWAY_NAME = "sync_gateway"
+SYNC_GATEWAY_PATH = "/opt/couchbase-sync-gateway/bin/sync_gateway"
+
|
fixing path for sync_gateway
|
py
|
diff --git a/peyotl/nexson_syntax/nexson2nexml.py b/peyotl/nexson_syntax/nexson2nexml.py
index <HASH>..<HASH> 100644
--- a/peyotl/nexson_syntax/nexson2nexml.py
+++ b/peyotl/nexson_syntax/nexson2nexml.py
@@ -36,7 +36,7 @@ def _create_sub_el(doc, parent, tag, attrib, data=None):
el.setAttribute(att_key, att_value)
if parent:
parent.appendChild(el)
- if data:
+ if data is not None:
if data is True:
el.appendChild(doc.createTextNode('true'))
elif data is False:
|
dealing with False in boolean meta data If data evaluates to boolean = false then the NeXML output had been an empty element rather than <meta ...>false</meta>
|
py
|
diff --git a/pngcanvas.py b/pngcanvas.py
index <HASH>..<HASH> 100644
--- a/pngcanvas.py
+++ b/pngcanvas.py
@@ -220,9 +220,16 @@ class PNGCanvas(object):
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
- if (bit_depth, color_type, compression,
- filter_type, interlace) != (8, 6, 0, 0, 0):
- raise TypeError('Unsupported PNG format')
+ if bit_depth != 8:
+ raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
+ if compression != 0:
+ raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
+ if filter_type != 0:
+ raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
+ if interlace != 0:
+ raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
+ if color_type != 6:
+ raise ValueError('Unsupported PNG format (color_type={}; must be 6)'.format(color_type))
self.width = width
self.height = height
|
Provide more detailed error information for load. When attempting to load a PNG file, if that file is unsupported it is useful to provide the developer with a more precise error message indicating the exact cause of the error. Have also changed the type of exception to be a ValueError rather than a TypeError. In this case the type is correct, however the value recevied is incorrect (or at least unsupported). Probably worth changing to a specific exception in the future.
|
py
|
diff --git a/tests/lax_numpy_indexing_test.py b/tests/lax_numpy_indexing_test.py
index <HASH>..<HASH> 100644
--- a/tests/lax_numpy_indexing_test.py
+++ b/tests/lax_numpy_indexing_test.py
@@ -757,6 +757,11 @@ class IndexingTest(jtu.JaxTestCase):
x = lnp.array([1, 2, 3])
self.assertRaises(TypeError, lambda: x[3.5])
+ def testIndexOutOfBounds(self): # https://github.com/google/jax/issues/2245
+ array = lnp.ones(5)
+ self.assertAllClose(array, array[:10], check_dtypes=True)
+
+
def _broadcastable_shapes(shape):
"""Returns all shapes that broadcast to `shape`."""
def f(rshape):
|
Added test case for indexing out of bounds
|
py
|
diff --git a/flask_cloudy.py b/flask_cloudy.py
index <HASH>..<HASH> 100644
--- a/flask_cloudy.py
+++ b/flask_cloudy.py
@@ -582,7 +582,7 @@ class Object(object):
s2s = "GET\n\n\n{expires}\n/{object_name}"\
.format(expires=expires, object_name=self.path)
- h = hmac.new(self.driver.secret, s2s, hashlib.sha1)
+ h = hmac.new(self.driver.secret.encode('utf-8'), s2s.encode('utf-8'), hashlib.sha1)
s = base64.encodestring(h.digest()).strip()
_keyIdName = "AWSAccessKeyId" if "s3" in driver_name else "GoogleAccessId"
params = {
|
Encode secret and request before hmac
|
py
|
diff --git a/ryu/ofproto/ofproto_v1_4_parser.py b/ryu/ofproto/ofproto_v1_4_parser.py
index <HASH>..<HASH> 100644
--- a/ryu/ofproto/ofproto_v1_4_parser.py
+++ b/ryu/ofproto/ofproto_v1_4_parser.py
@@ -2343,6 +2343,25 @@ class OFPActionCopyTtlOut(OFPAction):
return cls()
+@OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_IN,
+ ofproto.OFP_ACTION_HEADER_SIZE)
+class OFPActionCopyTtlIn(OFPAction):
+ """
+ Copy TTL In action
+
+ This action copies the TTL from the outermost header with TTL to the
+ next-to-outermost header with TTL.
+ """
+ def __init__(self, type_=None, len_=None):
+ super(OFPActionCopyTtlIn, self).__init__()
+
+ @classmethod
+ def parser(cls, buf, offset):
+ (type_, len_) = struct.unpack_from(
+ ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
+ return cls()
+
+
@OFPAction.register_action_type(ofproto.OFPAT_SET_FIELD,
ofproto.OFP_ACTION_SET_FIELD_SIZE)
class OFPActionSetField(OFPAction):
|
Add OF<I> CopyTtlIn action support
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -46,7 +46,7 @@ setup(
"ruamel.yaml<0.15",
"numpy>=1.13",
"pandas>=0.17.0",
- "optlang>=1.2.5",
+ "optlang>=1.4.2",
"tabulate",
"depinfo"
],
|
bump optlang version req for functional gurobi
|
py
|
diff --git a/mmcv/torchpack/runner/runner.py b/mmcv/torchpack/runner/runner.py
index <HASH>..<HASH> 100644
--- a/mmcv/torchpack/runner/runner.py
+++ b/mmcv/torchpack/runner/runner.py
@@ -108,11 +108,14 @@ class Runner(object):
Args:
optimizer (dict or :obj:`~torch.optim.Optimizer`): Either an
optimizer object or a dict used for constructing the optimizer.
- An example of the dict: ``{'algorithm': 'SGD', 'lr': 0.02,
- 'momentum': 0.9, 'weight_decay': 0.0001}``.
Returns:
:obj:`~torch.optim.Optimizer`: An optimizer object.
+
+ Examples:
+ >>> optimizer = dict(type='SGD', lr=0.01, momentum=0.9)
+ >>> type(runner.init_optimizer(optimizer))
+ <class 'torch.optim.sgd.SGD'>
"""
if isinstance(optimizer, dict):
optimizer = obj_from_dict(
|
fix a typo in docstring
|
py
|
diff --git a/easytrader/httrader.py b/easytrader/httrader.py
index <HASH>..<HASH> 100644
--- a/easytrader/httrader.py
+++ b/easytrader/httrader.py
@@ -134,7 +134,7 @@ class HTTrader(WebTrader):
need_data_index = 0
need_data = search_result.groups()[need_data_index]
bytes_data = base64.b64decode(need_data)
- log.debug('trade info bytes data: ', bytes_data)
+ log.debug('trade info bytes data: %s' % bytes_data)
try:
str_data = bytes_data.decode('gbk')
except UnicodeDecodeError:
|
fix(httrader): fix trade info debug log output
|
py
|
diff --git a/fabfile.py b/fabfile.py
index <HASH>..<HASH> 100644
--- a/fabfile.py
+++ b/fabfile.py
@@ -25,6 +25,7 @@ def build():
local("mkdir -p dist")
local("go clean ./...")
local("go build -a -o dist/gandalf-webserver ./webserver")
+ local("go build -a -o /usr/local/bin/gandalf ./bin")
def clean():
|
fabfile: rebuilding bin and overriding old one
|
py
|
diff --git a/src/doc/conf.py b/src/doc/conf.py
index <HASH>..<HASH> 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -25,8 +25,8 @@ if MOCK_MODULES and on_rtd:
project = 'Astral'
author = 'Simon Kennedy'
copyright = '2009-2018, %s' % author
-version = '1.5'
-release = '1.5'
+version = '1.6'
+release = '1.6'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
Updated version to <I>
|
py
|
diff --git a/telemetry/telemetry/internal/backends/chrome_inspector/native_profiling_backend.py b/telemetry/telemetry/internal/backends/chrome_inspector/native_profiling_backend.py
index <HASH>..<HASH> 100644
--- a/telemetry/telemetry/internal/backends/chrome_inspector/native_profiling_backend.py
+++ b/telemetry/telemetry/internal/backends/chrome_inspector/native_profiling_backend.py
@@ -31,7 +31,7 @@ class NativeProfilingBackend(object):
def DumpProfilingDataOfAllProcesses(self, timeout=120):
"""Causes all profiling data of all Chrome processes to be dumped to disk.
"""
- method = 'NativeProfiling.dumpNativeProfilingDataOfAllProcesses'
+ method = 'NativeProfiling.dumpProfilingDataOfAllProcesses'
request = {'method': method}
try:
response = self._inspector_websocket.SyncRequest(request, timeout)
|
Update NativeProfilingBackend method call The method introduced in crrev/c/<I> was DumpProfilingDataOfAllProcesses. Bug: chromium:<I> Change-Id: I<I>c<I>b<I>d4df<I>e<I>ca<I>ade<I> Reviewed-on: <URL>
|
py
|
diff --git a/ds4drv.py b/ds4drv.py
index <HASH>..<HASH> 100644
--- a/ds4drv.py
+++ b/ds4drv.py
@@ -77,7 +77,9 @@ DS4Report = namedtuple("DS4Report",
"trackpad_touch1_y",
"timestamp",
"battery",
- "charging"])
+ "plug_usb",
+ "plug_audio",
+ "plug_mic"])
class Daemon(object):
@@ -464,8 +466,11 @@ class DS4Device(object):
# Timestamp and battery
buf[10] >> 2,
- buf[33] % 0x10,
- (buf[33] & 0x10) != 0
+ buf[33] % 16,
+
+ # External inputs (usb, audio, mic)
+ (buf[33] & 16) != 0, (buf[33] & 32) != 0,
+ (buf[33] & 64) != 0
)
@property
@@ -640,7 +645,7 @@ def read_device(device, controller):
led_flashing = True
for report in device.reports:
if options.battery_flash:
- if report.battery < 2 and not report.charging:
+ if report.battery < 2 and not report.plug_usb:
if not led_flashing and (time() - led_last_flash) > 60:
device.control(led_red=options.led[0],
led_green=options.led[1],
|
Add some more report parsing.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,7 +23,7 @@ from setuptools import setup
setup(
name='msm',
- version='0.5.13',
+ version='0.5.14',
packages=['msm'],
install_requires=['GitPython', 'typing'],
url='https://github.com/MycroftAI/mycroft-skills-manager',
|
Increment version to <I>
|
py
|
diff --git a/ffpyplayer/__init__.py b/ffpyplayer/__init__.py
index <HASH>..<HASH> 100644
--- a/ffpyplayer/__init__.py
+++ b/ffpyplayer/__init__.py
@@ -33,18 +33,16 @@ It is read only.
_ffmpeg = join(sys.prefix, 'share', 'ffpyplayer', 'ffmpeg', 'bin')
if isdir(_ffmpeg):
+ os.environ["PATH"] += os.pathsep + _ffmpeg
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory(_ffmpeg)
- else:
- os.environ["PATH"] += os.pathsep + _ffmpeg
dep_bins.append(_ffmpeg)
_sdl = join(sys.prefix, 'share', 'ffpyplayer', 'sdl', 'bin')
if isdir(_sdl):
+ os.environ["PATH"] += os.pathsep + _sdl
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory(_sdl)
- else:
- os.environ["PATH"] += os.pathsep + _sdl
dep_bins.append(_sdl)
if 'SDL_AUDIODRIVER' not in os.environ and platform.system() == 'Windows':
|
Always add dlls dir to path.
|
py
|
diff --git a/raiden/transfer/state.py b/raiden/transfer/state.py
index <HASH>..<HASH> 100644
--- a/raiden/transfer/state.py
+++ b/raiden/transfer/state.py
@@ -789,6 +789,8 @@ class BalanceProofUnsignedState(State):
'token_network_identifier': to_checksum_address(self.token_network_identifier),
'channel_identifier': self.channel_identifier,
'chain_id': self.chain_id,
+ # Makes the balance hash available to query
+ 'balance_hash': serialize_bytes(self.balance_hash),
}
@classmethod
|
Bugfix: Unsigned balance serialization The unsigned balance proof also needs to expose the balance hash, otherwise the sent balance proofs are not queryable.
|
py
|
diff --git a/entei.py b/entei.py
index <HASH>..<HASH> 100755
--- a/entei.py
+++ b/entei.py
@@ -125,7 +125,10 @@ def tokenize(template):
# If we might be a standalone and we aren't a tag that can't
# be a standalone
if is_standalone and tag_type not in ['variable', 'no escape']:
- until, template = template.split('\n', 1)
+ try:
+ until, template = template.split('\n', 1)
+ except ValueError:
+ until, template = (template, '')
# If the stuff to the right of us are spaces
if until.isspace() or until == '':
|
Handle no newline after standalone.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@
# coding: utf-8
from setuptools import setup
-import sentry_telegram
+from sentry_telegram import __version__
with open('README.rst', 'r') as f:
@@ -11,7 +11,7 @@ with open('README.rst', 'r') as f:
setup(
name='sentry_telegram',
- version=sentry_telegram.__version__,
+ version=__version__,
packages=['sentry_telegram'],
url='https://github.com/butorov/sentry-telegram',
author='Viacheslav Butorov',
|
Try to automate PyPI deploy with Travis CI. Change import.
|
py
|
diff --git a/dev/merge_spark_pr.py b/dev/merge_spark_pr.py
index <HASH>..<HASH> 100755
--- a/dev/merge_spark_pr.py
+++ b/dev/merge_spark_pr.py
@@ -242,8 +242,8 @@ def resolve_jira_issue(merge_branches, comment, default_jira_id=""):
cur_summary = issue.fields.summary
cur_assignee = issue.fields.assignee
if cur_assignee is None:
- cur_assignee = choose_jira_assignee(issue)
- # Check again, we might not have chose an assignee
+ cur_assignee = choose_jira_assignee(issue, asf_jira)
+ # Check again, we might not have chosen an assignee
if cur_assignee is None:
cur_assignee = "NOT ASSIGNED!!!"
else:
|
[SPARK-<I>][PROJECT-INFRA] Bug fix in jira assigning Small bug fix from last pr, ran a successful merge with this code.
|
py
|
diff --git a/salt/modules/zfs.py b/salt/modules/zfs.py
index <HASH>..<HASH> 100644
--- a/salt/modules/zfs.py
+++ b/salt/modules/zfs.py
@@ -87,7 +87,8 @@ def _make_function( cmd_name ):
ret = { }
# Run the command.
- res = salt_cmd.run_all( "%s %s %s" % ( _check_zfs( ), cmd_name, args ) )
+ #TODO - add arguments into this.
+ res = salt_cmd.run_all( "%s %s" % ( _check_zfs( ), cmd_name ) )
# Make a note of the error in the return object if retcode
# not 0.
|
Isolated the issue - *args.
|
py
|
diff --git a/wily/__main__.py b/wily/__main__.py
index <HASH>..<HASH> 100644
--- a/wily/__main__.py
+++ b/wily/__main__.py
@@ -29,6 +29,7 @@ from wily.operators import resolve_operators
"-p",
"--path",
type=click.Path(resolve_path=True),
+ default=".",
help="Root path to the project folder to scan",
)
@click.pass_context
|
default the path to cwd, which should fix the relative paths bug in builds
|
py
|
diff --git a/polyaxon_cli/cli/dashboard.py b/polyaxon_cli/cli/dashboard.py
index <HASH>..<HASH> 100644
--- a/polyaxon_cli/cli/dashboard.py
+++ b/polyaxon_cli/cli/dashboard.py
@@ -18,7 +18,7 @@ from polyaxon_cli.utils.clients import PolyaxonClients
@clean_outputs
def dashboard(yes, url):
"""Open dashboard in browser."""
- dashboard_url = "{}".format(PolyaxonClients().auth.http_host)
+ dashboard_url = "{}/app".format(PolyaxonClients().auth.http_host)
if url:
click.echo(dashboard_url)
sys.exit(0)
|
Update dashboard command to send directly to dashboard
|
py
|
diff --git a/sfsimodels/output.py b/sfsimodels/output.py
index <HASH>..<HASH> 100644
--- a/sfsimodels/output.py
+++ b/sfsimodels/output.py
@@ -54,7 +54,7 @@ def format_value(value):
def add_table_ends(para, oformat='latex', caption="caption-text", label="table"):
fpara = ""
if oformat == 'latex':
- fpara += "\\begin{table}\n"
+ fpara += "\\begin{table}[H]\n"
fpara += "\\centering\n"
fpara += "\\begin{tabular}{cc}\n"
fpara += "\\toprule\n"
|
locked position of table in latex output.
|
py
|
diff --git a/settings.py b/settings.py
index <HASH>..<HASH> 100644
--- a/settings.py
+++ b/settings.py
@@ -30,4 +30,12 @@ class Settings(object):
#This determines whether the program will erase the pre-zipped output
#directory once it finishes zipping it to ePub.
#It is generally good to leave as True. You can always unzip the ePub.
- self.cleanup = True
\ No newline at end of file
+ self.cleanup = True
+
+ #This determines the location of the base_epub directory, which is the
+ #reference directory copied to instantiate the epub hierarchy
+ self.base_epub = os.path.join('resources', 'base_epub')
+
+ #This determines the location of the base css file, which is copied to
+ #the ePub's css directory
+ self.css_location = os.path.join('resources', 'text.css')
\ No newline at end of file
|
Added new settings, base_epub for the basic ePub Hierarchy location, and css_location for the location of the css file to be used
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='XBee',
- version='2.2.3',
+ version='2.2.4',
description='Python tools for working with XBee radios',
long_description=open('README.rst').read(),
url='https://github.com/nioinnovation/python-xbee',
|
Correcting setup.py version to <I>
|
py
|
diff --git a/src/toolchains/cc.py b/src/toolchains/cc.py
index <HASH>..<HASH> 100644
--- a/src/toolchains/cc.py
+++ b/src/toolchains/cc.py
@@ -1,4 +1,4 @@
-import os.path
+import os
from collections import Iterable
from node import Node
@@ -21,6 +21,10 @@ def _strlistify(thing):
return (str(i) for i in _listify(thing))
class CcCompiler(object):
+ def __init__(self):
+ self._cc_name = os.getenv('CC', 'cc')
+ self._cxx_name = os.getenv('CXX', 'c++')
+
def command_name(self, lang):
if not isinstance(lang, basestring):
is_cxx = any(i == 'c++' for i in lang)
@@ -28,9 +32,9 @@ class CcCompiler(object):
is_cxx = lang == 'c++'
if is_cxx:
- return ('c++', 'cxx')
+ return (self._cxx_name, 'cxx')
else:
- return ('cc', 'cc')
+ return (self._cc_name, 'cc')
def compile_command(self, cmd, input, output, dep=None, prevars=None,
postvars=None):
|
Support setting the compiler name via CC/CXX
|
py
|
diff --git a/bananas/admin/api/serializers.py b/bananas/admin/api/serializers.py
index <HASH>..<HASH> 100644
--- a/bananas/admin/api/serializers.py
+++ b/bananas/admin/api/serializers.py
@@ -1,4 +1,4 @@
-from django.contrib.auth import password_validation
+from django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
@@ -12,7 +12,7 @@ class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
- help_text=password_validation.password_validators_help_text_html(),
+ help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
|
Use plain password help text instead of html
|
py
|
diff --git a/fontbakery-check-ttf.py b/fontbakery-check-ttf.py
index <HASH>..<HASH> 100755
--- a/fontbakery-check-ttf.py
+++ b/fontbakery-check-ttf.py
@@ -1593,7 +1593,7 @@ def main():
continue
designers.append(row[0].decode('utf-8'))
if family.designer not in designers:
- logging.error(("METADATA.pb: Designer '{}' is not listed in profiles.csv" +\
+ logging.error(("METADATA.pb: Designer '{}' is not listed in profiles.csv" +
" (at '{}')").format(family.designer, PROFILES_GIT_URL))
else:
logging.info("OK: Found designer '{}' at profiles.csv".format(family.designer))
|
fix flake8: E<I> the backslash is redundant between brackets
|
py
|
diff --git a/ssbio/databases/pdb.py b/ssbio/databases/pdb.py
index <HASH>..<HASH> 100644
--- a/ssbio/databases/pdb.py
+++ b/ssbio/databases/pdb.py
@@ -100,8 +100,8 @@ def parse_mmcif_header(infile):
chemicals_filtered = utils.filter_list_by_indices(mmdict['_chem_comp.id'],
utils.not_find(mmdict['_chem_comp.type'],
chemical_types_exclude,
- case_sensitive=False))
- chemicals_fitered = utils.filter_list(chemicals_filtered, chemical_ids_exclude, case_sensitive=False)
+ case_sensitive=True))
+ chemicals_fitered = utils.filter_list(chemicals_filtered, chemical_ids_exclude, case_sensitive=True)
newdict['chemicals'] = chemicals_fitered
else:
log.debug('{}: No chemical composition field'.format(infile))
|
Change chemicals list in mmCIF parser to not ignore case
|
py
|
diff --git a/pylast/__init__.py b/pylast/__init__.py
index <HASH>..<HASH> 100644
--- a/pylast/__init__.py
+++ b/pylast/__init__.py
@@ -1018,11 +1018,11 @@ class SessionKeyGenerator(object):
token = self._get_web_auth_token()
- url = "%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s" % {
- "homepage": self.network.homepage,
- "api": self.network.api_key,
- "token": token,
- }
+ url = "{homepage}/api/auth/?api_key={api}&token={token}".format(
+ homepage=self.network.homepage,
+ api=self.network.api_key,
+ token=token,
+ )
self.web_auth_tokens[url] = token
|
Upgrade Python syntax with pyupgrade
|
py
|
diff --git a/ivoire/standalone.py b/ivoire/standalone.py
index <HASH>..<HASH> 100644
--- a/ivoire/standalone.py
+++ b/ivoire/standalone.py
@@ -37,6 +37,11 @@ class Example(TestCase):
self.__name = name
def __enter__(self):
+ """
+ Run the example.
+
+ """
+
self.__result.startTest(self)
if self.__before is not None:
@@ -50,6 +55,10 @@ class Example(TestCase):
return self
def __exit__(self, exc_type, exc_value, traceback):
+ """
+ Finish running the example, logging any raised exceptions as results.
+
+ """
if exc_type is None:
self.__result.addSuccess(self)
elif exc_type == KeyboardInterrupt:
@@ -85,6 +94,11 @@ class Example(TestCase):
return self.__group
def skip_if(self, condition, reason):
+ """
+ Skip the example if the condition is set, with the provided reason.
+
+ """
+
if condition:
raise SkipTest(reason)
@@ -104,6 +118,11 @@ class ExampleGroup(object):
self.examples = []
def __enter__(self):
+ """
+ Begin running the group.
+
+ """
+
return self
def __exit__(self, exc_type, exc_value, traceback):
|
Somewhat useless docstrings.
|
py
|
diff --git a/mbuild/tests/test_lammpsdata.py b/mbuild/tests/test_lammpsdata.py
index <HASH>..<HASH> 100755
--- a/mbuild/tests/test_lammpsdata.py
+++ b/mbuild/tests/test_lammpsdata.py
@@ -43,7 +43,7 @@ class TestLammpsData(BaseTest):
assert np.allclose(
np.asarray(line.split(), dtype=float),
[1, 1, 0.066, 3.5])
- line = fi.readline().partition('#')[0]
+ line = fi.readline().partition('#')[0]
assert np.allclose(
np.asarray(line.split(), dtype=float),
[1, 2, 2.1, 1.06907846])
|
Debugging Fixing indent.
|
py
|
diff --git a/iotile_ext_cloud/test/test_utils.py b/iotile_ext_cloud/test/test_utils.py
index <HASH>..<HASH> 100644
--- a/iotile_ext_cloud/test/test_utils.py
+++ b/iotile_ext_cloud/test/test_utils.py
@@ -1,6 +1,6 @@
import pytest
from iotile.core.exceptions import ArgumentError
-from iotile.cloud.utilities import device_slug_to_id
+from iotile.cloud.utilities import device_slug_to_id, device_id_to_slug
def test_device_slug_to_id():
@@ -20,3 +20,16 @@ def test_device_slug_to_id():
with pytest.raises(ArgumentError):
device_slug_to_id(0x100)
+
+def test_device_id_to_slug():
+ assert device_id_to_slug('0x10') == 'd--0000-0000-0000-0010'
+ assert device_id_to_slug('1234 aBcD 5678 Ef90') == 'd--1234-abcd-5678-ef90'
+
+ with pytest.raises(ArgumentError):
+ device_id_to_slug('12345678901234567')
+
+ with pytest.raises(ArgumentError):
+ device_id_to_slug('non hexa chars')
+
+ with pytest.raises(ArgumentError):
+ device_slug_to_id(1234)
|
unit tests for device_id_to_slug
|
py
|
diff --git a/test/test_steps.py b/test/test_steps.py
index <HASH>..<HASH> 100644
--- a/test/test_steps.py
+++ b/test/test_steps.py
@@ -10,11 +10,11 @@ def plot():
y3 = np.array([1, 2, 1, 4, 2])
y4 = np.array([1, 2, 1, 4, 2])
- plt.step(x, y1, 'r-')
- plt.step(x, y2, 'b--', where='pre')
- plt.step(x, y3, 'g-.', where='post')
- plt.step(x, y4, 'y:', where='mid')
- plt.legend(['default', 'pre', 'post', 'mid'])
+ plt.step(x, y1, "r-")
+ plt.step(x, y2, "b--", where="pre")
+ plt.step(x, y3, "g-.", where="post")
+ plt.step(x, y4, "y:", where="mid")
+ plt.legend(["default", "pre", "post", "mid"])
return plt.gcf()
|
used black to change ' to "
|
py
|
diff --git a/aiogram/dispatcher/webhook.py b/aiogram/dispatcher/webhook.py
index <HASH>..<HASH> 100644
--- a/aiogram/dispatcher/webhook.py
+++ b/aiogram/dispatcher/webhook.py
@@ -1,4 +1,6 @@
import asyncio
+import itertools
+
import asyncio.tasks
import datetime
import functools
@@ -165,7 +167,7 @@ class WebhookRequestHandler(web.View):
timeout_handle = loop.call_later(RESPONSE_TIMEOUT, asyncio.tasks._release_waiter, waiter)
cb = functools.partial(asyncio.tasks._release_waiter, waiter)
- fut = asyncio.ensure_future(dispatcher.process_update(update), loop=loop)
+ fut = asyncio.ensure_future(dispatcher.updates_handler.notify(update), loop=loop)
fut.add_done_callback(cb)
try:
@@ -219,7 +221,7 @@ class WebhookRequestHandler(web.View):
"""
if results is None:
return None
- for result in results:
+ for result in itertools.chain.from_iterable(results):
if isinstance(result, BaseResponse):
return result
|
Pass update from webhook to updates handler instead of `Dispatcher.process_update`
|
py
|
diff --git a/src/Exscriptd/Dispatcher.py b/src/Exscriptd/Dispatcher.py
index <HASH>..<HASH> 100644
--- a/src/Exscriptd/Dispatcher.py
+++ b/src/Exscriptd/Dispatcher.py
@@ -163,6 +163,10 @@ class Dispatcher(object):
remaining = self.order_db.count_tasks(order_id = order.id,
closed = None)
if remaining == 0:
+ total = self.order_db.count_tasks(order_id = order.id)
+ if total == 1:
+ task = self.order_db.get_task(order_id = order.id)
+ order.set_description(task.get_name())
order.close()
self.set_order_status(order, 'completed')
for logger in self.loggers.pop(order.get_id(), []):
|
Orders that have only a single task inherit the description of the task.
|
py
|
diff --git a/tofu/tests/tests01_geom/tests03_core.py b/tofu/tests/tests01_geom/tests03_core.py
index <HASH>..<HASH> 100644
--- a/tofu/tests/tests01_geom/tests03_core.py
+++ b/tofu/tests/tests01_geom/tests03_core.py
@@ -842,7 +842,7 @@ class Test03_Rays(object):
if t is not None:
E = E[np.newaxis,:]*t
return E
- def ffT(Pts, t=None, Vect=None):
+ def ffT(Pts, t=None, vect=None):
E = np.exp(-(np.hypot(Pts[0,:],Pts[1,:])-2.4)**2/0.1
- Pts[2,:]**2/0.1)
if Vect is not None:
|
[coordshift] another bug in test core
|
py
|
diff --git a/cmsplugin_zinnia/models.py b/cmsplugin_zinnia/models.py
index <HASH>..<HASH> 100644
--- a/cmsplugin_zinnia/models.py
+++ b/cmsplugin_zinnia/models.py
@@ -6,7 +6,7 @@ from django.db.models.signals import post_delete
from django.utils.translation import ugettext_lazy as _
from tagging.models import Tag
-from cms.models import CMSPlugin
+from cms.models.pluginmodel import CMSPlugin
from menus.menu_pool import menu_pool
from zinnia.models import Entry
|
Updated models.py for Django-CMS <I>+ The location of CMSPlugin changed in <I> and has been updated accordingly.
|
py
|
diff --git a/django_extensions/management/commands/sync_media_s3.py b/django_extensions/management/commands/sync_media_s3.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/sync_media_s3.py
+++ b/django_extensions/management/commands/sync_media_s3.py
@@ -155,7 +155,7 @@ class Command(BaseCommand):
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
- zbuf = cStringIO.StringIO()
+ zbuf = StringIO()
zfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
zfile.write(s)
zfile.close()
|
fix StringIO call, missed removing cStringIO from call after import (thanks jonlesser for reporting this!)
|
py
|
diff --git a/bambi/priors.py b/bambi/priors.py
index <HASH>..<HASH> 100644
--- a/bambi/priors.py
+++ b/bambi/priors.py
@@ -59,6 +59,11 @@ class Prior(object):
Args:
kwargs (dict): Optional keyword arguments to add to prior args.
'''
+ # Backends expect numpy arrays, so make sure all numeric values are
+ # represented as such.
+ for k, v in kwargs.items():
+ kwargs = {k: (np.array(v) if isinstance(v, (int, float)) else v)
+ for k, v in kwargs.items()}
self.args.update(kwargs)
|
fix data type issue that breaks everything when naming custom priors
|
py
|
diff --git a/test/common/http_admin.py b/test/common/http_admin.py
index <HASH>..<HASH> 100644
--- a/test/common/http_admin.py
+++ b/test/common/http_admin.py
@@ -259,7 +259,7 @@ class InternalServer(Server):
def kill(self):
assert self.running
self.instance.send_signal(signal.SIGINT)
- self.instance.wait()
+ self._wait()
self.running = False
def recover(self, join = None):
@@ -274,9 +274,16 @@ class InternalServer(Server):
def __del__(self):
self.instance.send_signal(signal.SIGINT)
- self.instance.wait()
+ self._wait()
shutil.rmtree(self.db_dir)
+ def _wait(self):
+ start_time = time.time()
+ while time.time() - start_time < 15 and self.instance.poll() is None:
+ time.sleep(1)
+ if self.instance.poll() is None:
+ self.instance.terminate()
+
def __str__(self):
return "Internal" + Server.__str__(self) + ", args:" + str(self.args_without_join)
|
Don't lock up if server locks up.
|
py
|
diff --git a/andes/models/distributed.py b/andes/models/distributed.py
index <HASH>..<HASH> 100644
--- a/andes/models/distributed.py
+++ b/andes/models/distributed.py
@@ -539,7 +539,7 @@ class ESD1Model(PVD1Model):
self.LT = LessThan(self.Ipoutcalc_y, 0.0)
# --- Add integrator. Assume that state-of-charge is the initial condition ---
- self.pIG = Integrator(u='-LT_z1*(v * Ipoutcalc_y)*EtaC - LT_z0*(v * Ipoutcalc_y)/EtaD',
+ self.pIG = Integrator(u='-LT_z1*(v * Ipoutcalc_y)*EtaC-LT_z0*(v * Ipoutcalc_y)/EtaD',
T=self.Tf, K='SOCinit - 3600 / En / sys_mva', y0=self.SOCinit,
check_init=False,
)
|
Fixed formatting issue for "make documentation"
|
py
|
diff --git a/cwltool/process.py b/cwltool/process.py
index <HASH>..<HASH> 100644
--- a/cwltool/process.py
+++ b/cwltool/process.py
@@ -70,7 +70,7 @@ class Process(object):
if "type" not in c:
raise validate.ValidationException("Missing `type` in parameter `%s`" % c["name"])
- if "default" in c and "null" not in c["type"]:
+ if "default" in c and isinstance(c["type"], list) and "null" not in c["type"]:
c["type"] = ["null"] + aslist(c["type"])
else:
c["type"] = c["type"]
|
Checking if c is a list
|
py
|
diff --git a/src/transformers/configuration_utils.py b/src/transformers/configuration_utils.py
index <HASH>..<HASH> 100755
--- a/src/transformers/configuration_utils.py
+++ b/src/transformers/configuration_utils.py
@@ -195,7 +195,6 @@ class PretrainedConfig(object):
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
- self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forwar", 0)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
|
delete reinit (#<I>)
|
py
|
diff --git a/discord/client.py b/discord/client.py
index <HASH>..<HASH> 100644
--- a/discord/client.py
+++ b/discord/client.py
@@ -425,6 +425,8 @@ class Client:
if self.is_closed():
return
+ self._closed.set()
+
for voice in list(self.voice_clients):
try:
yield from voice.disconnect()
@@ -439,7 +441,6 @@ class Client:
yield from self.http.close()
- self._closed.set()
self._ready.clear()
@asyncio.coroutine
|
Set closed state before actually finishing cleaning up.
|
py
|
diff --git a/hotdoc/extensions/gst/gst_extension.py b/hotdoc/extensions/gst/gst_extension.py
index <HASH>..<HASH> 100644
--- a/hotdoc/extensions/gst/gst_extension.py
+++ b/hotdoc/extensions/gst/gst_extension.py
@@ -574,6 +574,8 @@ class GstExtension(Extension):
index.symbol_names.add(sym.unique_name)
if self.unique_feature:
index.comment = self.app.database.get_comment("element-" + self.unique_feature)
+ else:
+ index.comment = self.get_plugin_comment()
return smart_pages
page = smart_pages.get(self.list_plugins_page)
@@ -581,7 +583,6 @@ class GstExtension(Extension):
page.extension_name = self.extension_name
page.symbol_names.add(self.__plugins.unique_name)
- page.comment = self.get_plugin_comment()
self.__plugins.plugins = self.__all_plugins_symbols
return smart_pages
|
gst_extension: fix plugin comments
|
py
|
diff --git a/test/gdb_test.py b/test/gdb_test.py
index <HASH>..<HASH> 100644
--- a/test/gdb_test.py
+++ b/test/gdb_test.py
@@ -27,6 +27,8 @@ import os
import json
import sys
from subprocess import Popen, STDOUT, PIPE
+import argparse
+import logging
from pyOCD.tools.gdb_server import GDBServerTool
from pyOCD.board import MbedBoard
@@ -155,4 +157,9 @@ def test_gdb(board_id=None):
return result
if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description='pyOCD gdb test')
+ parser.add_argument('-d', '--debug', action="store_true", help='Enable debug logging')
+ args = parser.parse_args()
+ level = logging.DEBUG if args.debug else logging.INFO
+ logging.basicConfig(level=level)
test_gdb()
|
Enable logging and -d option when running gdb_test.py directly.
|
py
|
diff --git a/tests/test_client_channels.py b/tests/test_client_channels.py
index <HASH>..<HASH> 100644
--- a/tests/test_client_channels.py
+++ b/tests/test_client_channels.py
@@ -19,3 +19,13 @@ def test_channel_destruction(server, client):
client._create_channel('#pydle')
client._destroy_channel('#pydle')
assert '#pydle' not in client.channels
+
+@with_client()
+def test_channel_user_destruction(server, client):
+ client._create_channel('#pydle')
+ client._create_user('WiZ')
+ client.channels['#pydle']['users'].add('WiZ')
+
+ client._destroy_channel('#pydle')
+ assert '#pydle' not in client.channels
+ assert 'WiZ' not in client.users
|
tests: Add user destruction tests on leaving channel.
|
py
|
diff --git a/c7n/resources/sagemaker.py b/c7n/resources/sagemaker.py
index <HASH>..<HASH> 100644
--- a/c7n/resources/sagemaker.py
+++ b/c7n/resources/sagemaker.py
@@ -9,6 +9,7 @@ from c7n.utils import local_session, type_schema
from c7n.tags import RemoveTag, Tag, TagActionFilter, TagDelayedAction
from c7n.filters.vpc import SubnetFilter, SecurityGroupFilter
from c7n.filters.kms import KmsRelatedFilter
+from c7n.filters.offhours import OffHour, OnHour
@resources.register('sagemaker-notebook')
@@ -43,6 +44,8 @@ class NotebookInstance(QueryResourceManager):
NotebookInstance.filter_registry.register('marked-for-op', TagActionFilter)
+NotebookInstance.filter_registry.register('offhour', OffHour)
+NotebookInstance.filter_registry.register('onhour', OnHour)
@resources.register('sagemaker-job')
|
aws - sagemaker-notebook - add offhour/onhour filter support (#<I>)
|
py
|
diff --git a/src/mimerender.py b/src/mimerender.py
index <HASH>..<HASH> 100644
--- a/src/mimerender.py
+++ b/src/mimerender.py
@@ -330,11 +330,7 @@ try:
del flask.request.environ[key]
def _make_response(self, content, headers, status):
- response = flask.make_response(content)
- response.status = status
- for k, v in headers:
- response.headers[k] = v
- return response
+ return flask.make_response(content, status, headers)
except ImportError:
pass
|
Removes custom response code from Flask implementation Uses Flask's built-in `make_response()` function to return a response object. This fixes a bug that occurred when a Flask view function returns a status code as an integer (like `<I>`) as opposed to a string (like `'<I> OK'`).
|
py
|
diff --git a/pymdstat/pymdstat.py b/pymdstat/pymdstat.py
index <HASH>..<HASH> 100644
--- a/pymdstat/pymdstat.py
+++ b/pymdstat/pymdstat.py
@@ -6,6 +6,7 @@
#
# Copyright (C) 2014 Nicolargo <nicolas@nicolargo.com>
+import sys
from functools import reduce
from re import split
@@ -77,10 +78,14 @@ class MdStat(object):
'''Return a dict of stats'''
ret = {}
- # Read the mdstat file
- with open(self.get_path(), 'r') as f:
- # lines is a list of line (with \n)
- lines = f.readlines()
+ # Read the mdstat file, if it exists, exit otherwise.
+ try:
+ with open(self.get_path(), 'r') as f:
+ # lines is a list of line (with \n)
+ lines = f.readlines()
+ except (OSError, IOError) as err:
+ print("Failed to open '{0}': {1}".format(self.get_path(), err))
+ sys.exit(1)
# First line: get the personalities
# The "Personalities" line tells you what RAID level the kernel currently supports.
|
Handle I/O error if /proc/mdstat doesn't exist
|
py
|
diff --git a/mapi_client.py b/mapi_client.py
index <HASH>..<HASH> 100644
--- a/mapi_client.py
+++ b/mapi_client.py
@@ -2,6 +2,7 @@ from requests import Request, Session
import json
from validation import validate_input
import logging
+import traceback
class mAPIClient(object):
@@ -49,7 +50,10 @@ class mAPIClient(object):
resp = self.session.send(self.session.prepare_request(req))
if resp.status_code / 100 is not 2:
- resp.raise_for_status()
+ try:
+ resp.raise_for_status()
+ except:
+ traceback.print_stack()
return resp
def _depaginate(self, url):
|
make thin Python client show stack trace on exception
|
py
|
diff --git a/src/foremast/configs/outputs.py b/src/foremast/configs/outputs.py
index <HASH>..<HASH> 100644
--- a/src/foremast/configs/outputs.py
+++ b/src/foremast/configs/outputs.py
@@ -88,7 +88,9 @@ def write_variables(app_configs=None, out_file='', git_short=''):
rendered_configs = json.loads(
get_template('configs/configs.json.j2', env=env, app=generated.app_name(), profile=instance_profile))
json_configs[env] = dict(DeepChainMap(configs, rendered_configs))
- for region in json_configs[env]['regions']:
+ region_list = configs['regions']
+ json_configs[env]['regions'] = region_list # removes regions defined in templates but not configs.
+ for region in region_list:
region_config = json_configs[env][region]
json_configs[env][region] = dict(DeepChainMap(region_config, rendered_configs))
else:
|
overrides regions in templates with regions in configs
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,6 +14,7 @@ setup(name='fgivenx',
url='https://github.com/williamjameshandley/fgivenx',
packages=['fgivenx'],
install_requires=['numpy','matplotlib','scipy','joblib','tqdm'],
+ tests_require=['pytest'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
|
adding tests_require=['pytest'] requirement
|
py
|
diff --git a/openstack_dashboard/policy.py b/openstack_dashboard/policy.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/policy.py
+++ b/openstack_dashboard/policy.py
@@ -90,7 +90,7 @@ def check(actions, request, target={}):
:param target: dictionary representing the object of the action
for object creation this should be a dictionary
representing the location of the object e.g.
- {'tenant_id': object.tenant_id}
+ {'project_id': object.project_id}
:returns: boolean if the user has permission or not for the actions.
"""
user = auth_utils.get_user(request)
|
Updated tenant_id to project_id in policy.py Comments were referring to tenant_id where code says project_id Closes-Bug: #<I> Change-Id: I<I>f6c<I>f<I>fe<I>ca2c<I>b7c<I>b<I>fb4abc8
|
py
|
diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py
index <HASH>..<HASH> 100644
--- a/superset/utils/date_parser.py
+++ b/superset/utils/date_parser.py
@@ -32,6 +32,7 @@ from pyparsing import (
Group,
Optional as ppOptional,
ParseException,
+ ParserElement,
ParseResults,
pyparsing_common,
quotedString,
@@ -40,6 +41,8 @@ from pyparsing import (
from .core import memoized
+ParserElement.enablePackrat()
+
logger = logging.getLogger(__name__)
@@ -375,7 +378,7 @@ class EvalHolidayFunc: # pylint: disable=too-few-public-methods
raise ValueError(_("Unable to find such a holiday: [{}]").format(holiday))
-@memoized()
+@memoized
def datetime_parser() -> ParseResults: # pylint: disable=too-many-locals
( # pylint: disable=invalid-name
DATETIME,
|
fix(timepicker): make pyparsing thread safe (#<I>) * fix: make pyparsing thread safe * remove parenthesis for decorator
|
py
|
diff --git a/tests/test_dates.py b/tests/test_dates.py
index <HASH>..<HASH> 100644
--- a/tests/test_dates.py
+++ b/tests/test_dates.py
@@ -15,6 +15,21 @@ def test_date_rounding():
assert chart.date == '1996-08-03'
+def test_previous_next():
+ """Checks that the date, previousDate, and nextDate attributes are parsed
+ from the HTML, not computed. Specifically, we shouldn't assume charts are
+ always published seven days apart, since (as this example demonstrates)
+ this is not true.
+ """
+ chart = billboard.ChartData('hot-100', date='1962-01-06')
+ assert chart.date == '1962-01-06'
+ assert chart.previousDate == '1961-12-25'
+
+ chart = billboard.ChartData('hot-100', date='1961-12-25')
+ assert chart.date == '1961-12-25'
+ assert chart.nextDate == '1962-01-06'
+
+
def test_datetime_date():
"""Checks that ChartData correctly handles datetime objects as the
date parameter.
|
Test that date attributes are parsed, not computed Currently failing. I brought this example up in #<I>.
|
py
|
diff --git a/src/onelogin/saml2/auth.py b/src/onelogin/saml2/auth.py
index <HASH>..<HASH> 100644
--- a/src/onelogin/saml2/auth.py
+++ b/src/onelogin/saml2/auth.py
@@ -433,7 +433,7 @@ class OneLogin_Saml2_Auth(object):
OneLogin_Saml2_Error.SP_CERTS_NOT_FOUND
)
- xmlsec.initialize()
+ xmlsec.initialize('openssl')
dsig_ctx = xmlsec.DSigCtx()
dsig_ctx.signKey = xmlsec.Key.loadMemory(key, xmlsec.KeyDataFormatPem, None)
|
Changing xmlsec.initialize
|
py
|
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -257,7 +257,7 @@ class RemotePillar(RemotePillarMixin):
return ret_pillar
def destroy(self):
- if self._closing:
+ if hasattr(self, '_closing') and self._closing:
return
self._closing = True
|
Ensure _closing exists This was raising an ignored exception because _closing was set later in __init__. Not sure if it must be the last line in __init__.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -35,6 +35,7 @@ setup(
license='MIT',
py_modules=['pyi'],
zip_safe=False,
+ python_requires=">=3.6",
install_requires=['flake8 >= 3.2.1', 'attrs'],
test_suite='tests.test_pyi',
classifiers=[
|
Be explicit in setup.py about Python <I>+
|
py
|
diff --git a/tests/unit/utils/WorkspaceTest.py b/tests/unit/utils/WorkspaceTest.py
index <HASH>..<HASH> 100644
--- a/tests/unit/utils/WorkspaceTest.py
+++ b/tests/unit/utils/WorkspaceTest.py
@@ -31,7 +31,7 @@ class WorkspaceTest:
def test_assign_project(self):
proj = self.ws.new_project()
with pytest.raises(Exception):
- self.ws[proj.name] = proj
+ self.ws.new_project(name=proj.name)
old_name = proj.name
new_name = self.ws._gen_name()
self.ws[new_name] = proj
|
BUG: updated a unit test -> allowing re-assigning a project to Workspace using the old name
|
py
|
diff --git a/prepare.py b/prepare.py
index <HASH>..<HASH> 100644
--- a/prepare.py
+++ b/prepare.py
@@ -1,6 +1,17 @@
#!/usr/bin/env python
import os, sys
+def get_credentials():
+ "Returns login and password stored in secret.txt"
+ while not check_secret():
+ pass
+
+ with open("secret.txt", "r") as f:
+ login = f.readline().strip()
+ password = f.readline().strip()
+
+ return login, password
+
def check_secret():
while True:
if os.path.exists("secret.txt"):
|
add get_credentials function which will check all the preparations and return login and password stored in secter.txt file
|
py
|
diff --git a/mapmyfitness/__init__.py b/mapmyfitness/__init__.py
index <HASH>..<HASH> 100644
--- a/mapmyfitness/__init__.py
+++ b/mapmyfitness/__init__.py
@@ -11,7 +11,7 @@ class MapMyFitness(object):
def __new__(cls, *args, **kwargs):
if not cls._instance:
- cls._instance = super(MapMyFitness, cls).__new__(cls, *args, **kwargs)
+ cls._instance = super(MapMyFitness, cls).__new__(cls)
return cls._instance
|
Trying to make python3 happy with the singleton's use of super.
|
py
|
diff --git a/pyqg/tests/test_particles.py b/pyqg/tests/test_particles.py
index <HASH>..<HASH> 100644
--- a/pyqg/tests/test_particles.py
+++ b/pyqg/tests/test_particles.py
@@ -1,14 +1,20 @@
from __future__ import print_function
from builtins import range
import unittest
+import pytest
import numpy as np
import pyqg
+missing_scipy=False
+try:
+ import scipy.ndimage
+except ImportError:
+ missing_scipy=True
+
def constant_velocity_function(u, v):
"""Return a function that returns a constant velocity field."""
return (lambda x, y: (u, v))
-
def solid_body_rotation_velocity_function(om):
"""Return a function that returns solid body rotation at angular
velocity om."""
@@ -73,8 +79,8 @@ class ParticleTester(unittest.TestCase):
atol=atol
)
+ @pytest.mark.skipif(missing_scipy, reason="requires scipy")
def test_interpolation(self, rtol=1e-14, atol=1e-7):
-
# set up grid
Lx = 10.
Ly = 5.
@@ -123,6 +129,7 @@ class ParticleTester(unittest.TestCase):
atol=1e-1
)
+ @pytest.mark.skipif(missing_scipy, reason="requires scipy")
def test_gridded_integration(self, atol=1e-10):
# set up grid
|
added skips to scipy tests
|
py
|
diff --git a/satpy/scene.py b/satpy/scene.py
index <HASH>..<HASH> 100644
--- a/satpy/scene.py
+++ b/satpy/scene.py
@@ -866,12 +866,12 @@ class Scene(MetadataObject):
new_datasets = {}
datasets = list(new_scn.datasets.values())
max_area = None
- if hasattr(destination_area, 'freeze'):
- try:
- max_area = new_scn.max_area()
- except ValueError:
- raise ValueError("No dataset areas available to freeze "
- "DynamicAreaDefinition.")
+
+ try:
+ max_area = new_scn.max_area()
+ except ValueError:
+ raise ValueError("No dataset areas available to freeze "
+ "DynamicAreaDefinition.")
destination_area = get_frozen_area(destination_area, max_area)
resamplers = {}
|
Fix freezing of areas before resampling even as strings
|
py
|
diff --git a/salt/states/service.py b/salt/states/service.py
index <HASH>..<HASH> 100644
--- a/salt/states/service.py
+++ b/salt/states/service.py
@@ -4,7 +4,21 @@ Starting or restarting of services and daemons
==============================================
Services are defined as system daemons typically started with system init or
-rc scripts. Services can be defined as running or dead.
+rc scripts. The service state uses whichever service module that is loaded on
+the minion with the virtualname of ``service``. Services can be defined as
+running or dead.
+
+If you need to know if your init system is supported, see the list of supported
+:mod:`service modules <salt.modules.service.py>` for your desired init system
+(systemd, sysvinit, launchctl, etc.).
+
+Note that Salt's service execution module, and therefore this service state,
+uses OS grains to ascertain which service module should be loaded and used to
+execute service functions. As existing distributions change init systems or
+new distributions are created, OS detection can sometimes be incomplete.
+If your service states are running into trouble with init system detection,
+please see the :ref:`Overriding Virtual Module Providers <module-provider-override>`
+section of Salt's module documentation to work around possible errors.
.. note::
The current status of a service is determined by the return code of the init/rc
|
Clarify service state opening docs - uses 'service' virtualname (#<I>) * Clarify service state opening docs - uses 'service' virtualname Fixes #<I> * Add a few more clarifications to service state docs And link to service execution modules list.
|
py
|
diff --git a/rednose/rednose.py b/rednose/rednose.py
index <HASH>..<HASH> 100644
--- a/rednose/rednose.py
+++ b/rednose/rednose.py
@@ -218,8 +218,11 @@ class RedNose(nose.plugins.Plugin):
path is returned surrounded by bold xterm escape sequences.
If path is not a child of the working directory, path is returned
"""
- here = os.path.abspath(os.path.realpath(os.getcwd()))
- fullpath = os.path.abspath(os.path.realpath(path))
+ try:
+ here = os.path.abspath(os.path.realpath(os.getcwd()))
+ fullpath = os.path.abspath(os.path.realpath(path))
+ except OSError:
+ return path
if fullpath.startswith(here):
return termstyle.bold(fullpath[len(here)+1:])
return path
|
ignore failures to figure out the current working directory
|
py
|
diff --git a/pytablewriter/style/_theme.py b/pytablewriter/style/_theme.py
index <HASH>..<HASH> 100644
--- a/pytablewriter/style/_theme.py
+++ b/pytablewriter/style/_theme.py
@@ -47,7 +47,12 @@ def load_ptw_plugins() -> Dict[str, Theme]:
logger.debug("discovered_plugins: {}".format(list(discovered_plugins)))
return {
- theme: Theme(plugin.style_filter, plugin.col_separator_style_filter) # type: ignore
+ theme: Theme(
+ plugin.style_filter if hasattr(plugin, "style_filter") else None, # type: ignore
+ plugin.col_separator_style_filter # type: ignore
+ if hasattr(plugin, "col_separator_style_filter")
+ else None,
+ )
for theme, plugin in discovered_plugins.items()
}
|
Fix plugin discovery to avoid errors when some of the functions not implemented
|
py
|
diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py
index <HASH>..<HASH> 100644
--- a/tests/flows/test_oidc-saml.py
+++ b/tests/flows/test_oidc-saml.py
@@ -32,6 +32,7 @@ def oidc_frontend_config(signing_key_path, mongodb_instance):
"issuer": "https://proxy-op.example.com",
"signing_key_path": signing_key_path,
"provider": {"response_types_supported": ["id_token"]},
+ "client_db_uri": mongodb_instance.get_uri(), # use mongodb for integration testing
"db_uri": mongodb_instance.get_uri() # use mongodb for integration testing
}
}
|
Fix tests by setting client_db_uri
|
py
|
diff --git a/nudibranch/models.py b/nudibranch/models.py
index <HASH>..<HASH> 100644
--- a/nudibranch/models.py
+++ b/nudibranch/models.py
@@ -946,10 +946,19 @@ class User(UserMixin, BasicBase, Base):
.filter(UserToGroup.project == project)).first()
def make_submission(self, project):
- group_assoc = self.fetch_group_assoc(project)
- if not group_assoc:
- group_assoc = UserToGroup(group=Group(project=project),
- project=project, user=self)
+ group_assoc = None
+ while not group_assoc:
+ group_assoc = self.fetch_group_assoc(project)
+ if not group_assoc:
+ sp = transaction.savepoint()
+ try:
+ group_assoc = UserToGroup(group=Group(project=project),
+ project=project, user=self)
+ Session.add(group_assoc)
+ Session.flush()
+ except IntegrityError:
+ group_assoc = None
+ sp.rollback()
return Submission(created_by=self, group=group_assoc.group,
project=project)
|
Handle IntegrityError that may occur when assocaiting a User to a Group.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,7 @@ setup(
keywords='color colour terminal text ansi windows crossplatform xplatform',
author='Jonathan Hartley',
author_email='tartley@tartley.com',
- url='https://pypi.python.org/pypi/colorama',
+ url='https://github.com/tartley/colorama',
license='BSD',
packages=[NAME],
# see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers
|
setup.py: Change url to github The GitHub URL is more useful than the PyPI URL, because this data is most often seen on PyPI.
|
py
|
diff --git a/apiritif/loadgen.py b/apiritif/loadgen.py
index <HASH>..<HASH> 100644
--- a/apiritif/loadgen.py
+++ b/apiritif/loadgen.py
@@ -408,7 +408,13 @@ class ApiritifPlugin(Plugin):
if not recording:
return samples_processed
- samples = self.apiritif_extractor.parse_recording(recording, sample)
+ try:
+ samples = self.apiritif_extractor.parse_recording(recording, sample)
+ except BaseException as exc:
+ log.debug("Couldn't parse recording: %s", traceback.format_exc())
+ log.warning("Couldn't parse recording: %s", exc)
+ samples = []
+
for sample in samples:
samples_processed += 1
self._process_sample(sample)
|
Fail-safe parse recording
|
py
|
diff --git a/include/base/import_hook.py b/include/base/import_hook.py
index <HASH>..<HASH> 100644
--- a/include/base/import_hook.py
+++ b/include/base/import_hook.py
@@ -81,3 +81,8 @@ class BaseIncludeLoader(object):
return self
else:
return None
+
+ def __repr__(self):
+ return '<%s.%s for path %r at 0x%x>' % (
+ self.__class__.__module__, self.__class__.__name__, self.module_prefix, id(self)
+ )
|
repr of hooks include module prefix
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,6 +14,7 @@ setup(
author_email='tigrawap@gmail.com',
long_description=read('README.rst'),
packages=['pybabel_hbs'],
+ url="https://github.com/tigrawap/pybabel-hbs",
install_requires=[
'babel'
],
|
Github url added to setup script
|
py
|
diff --git a/python_modules/dagster/dagster/_core/utils.py b/python_modules/dagster/dagster/_core/utils.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/_core/utils.py
+++ b/python_modules/dagster/dagster/_core/utils.py
@@ -2,7 +2,6 @@ import os
import random
import string
import uuid
-import warnings
from collections import OrderedDict
from typing import Tuple, Union, cast
|
fix lint (#<I>)
|
py
|
diff --git a/tests/test_dataset.py b/tests/test_dataset.py
index <HASH>..<HASH> 100644
--- a/tests/test_dataset.py
+++ b/tests/test_dataset.py
@@ -60,7 +60,8 @@ def test_meta():
ds = qpformat.load_data(path=tf, meta_data={"time": 47})
assert ds.get_time() == 47
- assert tf in ds.get_name()
+ # use `.name` because of short-hand paths on Windows
+ assert pathlib.Path(tf).name in ds.get_name()
def test_meta_none():
|
tests: arrg those short-hand paths on Windows
|
py
|
diff --git a/m.py b/m.py
index <HASH>..<HASH> 100755
--- a/m.py
+++ b/m.py
@@ -303,7 +303,7 @@ optional arguments:
>>> runE("play x.mkv") # doctest: +ELLIPSIS
Playing x.mkv ...
RUN false vlc --fullscreen --play-and-exit -- .../media/x.mkv
-Error: could not play file 'x.mkv': Command ... returned non-zero exit status 1.
+Error: could not play file 'x.mkv': Command ... returned non-zero exit status 1...
>>> VLCCMD[0] = "does-not-exist"
>>> runE("play x.mkv") # doctest: +ELLIPSIS
Playing x.mkv ...
@@ -314,7 +314,7 @@ Error: could not play file 'x.mkv': No such file or directory: 'does-not-exist'
>>> runE("play --mpv x.mkv") # doctest: +ELLIPSIS
Playing x.mkv ...
RUN false mpv --fullscreen -- .../media/x.mkv
-Error: could not play file 'x.mkv': Command ... returned non-zero exit status 1.
+Error: could not play file 'x.mkv': Command ... returned non-zero exit status 1...
>>> MPVCMD[0] = "does-not-exist"
>>> runE("play --mpv x.mkv") # doctest: +ELLIPSIS
Playing x.mkv ...
|
fix test for python<I> and pypy3 (small output difference)
|
py
|
diff --git a/salt/transport/__init__.py b/salt/transport/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/transport/__init__.py
+++ b/salt/transport/__init__.py
@@ -101,7 +101,7 @@ class ZeroMQChannel(Channel):
self.ttype = 'zeromq'
# crypt defaults to 'aes'
- self.crypt = kwargs['crypt'] if 'crypt' in kwargs else 'aes'
+ self.crypt = kwargs.get('crypt', 'aes')
self.serial = salt.payload.Serial(opts)
if self.crypt != 'clear':
|
PEP8/PEP<I>: use kwargs.get instead of one line if/else I was poking around in the transport code thinking about an idea and noticed this simplification.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@ setuptools.setup(
description="Dividers in Python, the easy way! Multiple different divider looks.",
long_description=long_description,
long_description_content_type="text/markdown",
- url="https://rdil.github.io/area4",
+ url="https://github.com/RDIL/area4",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
@@ -30,7 +30,7 @@ setuptools.setup(
],
project_urls={
"Bug Tracker": "https://github.com/RDIL/area4/issues",
- "Documentation": "https://github.com/RDIL/area4/blob/master/README.md#area4",
+ "Documentation": "https://area4.readthedocs.io/en/latest",
"Source Code": "https://github.com/RDIL/area4",
}
)
|
Update docs link to readthedocs.io
|
py
|
diff --git a/setup/setup.py b/setup/setup.py
index <HASH>..<HASH> 100644
--- a/setup/setup.py
+++ b/setup/setup.py
@@ -10,12 +10,17 @@ if not os.path.exists(os.path.join(os.getcwd(), 'data',
import sys
sys.exit()
+with open('README.rst', encoding='utf-8') as fobj:
+ LONG_DESCRIPTION = fobj.read()
+
setup(
name='brython',
- version='0.0.5',
+ version='0.0.7',
description='Brython is an implementation of Python 3 running in the browser',
+
+ long_description = LONG_DESCRIPTION,
# The project's main homepage.
url='http://brython.info',
|
In setup.py, add a long_description based on README.rst and change version to <I>
|
py
|
diff --git a/ipyrad/core/assembly.py b/ipyrad/core/assembly.py
index <HASH>..<HASH> 100644
--- a/ipyrad/core/assembly.py
+++ b/ipyrad/core/assembly.py
@@ -1181,8 +1181,10 @@ class Assembly(object):
if diff:
msg = """
The following Samples do not appear to have been clustered in step6
- (i.e., they are not in {}). Check for typos in Sample names, or try
- running step6 including the selected samples.
+ (i.e., they are not in {}).
+ Check for typos in Sample names, or try running step6 including the
+ selected samples.
+
Missing: {}
""".format(self.database, ", ".join(list(diff)))
## The the old way that failed unless all samples were
@@ -1201,8 +1203,8 @@ class Assembly(object):
## i.e. only proceed with the samples that are actually
## present in the db
samples = [x for x in samples if x.name not in diff]
- print(" Excluding these samples from final output:\n"\
- + "\n {}\n".format(", ".join(list(diff))))
+ print(" Excluding these samples from final output: {}"\
+ .format(", ".join(list(diff))))
except (IOError, ValueError):
raise IPyradError("""
Database file {} not found. First run step6
|
Cosmetic changes to step 7 interaction if samples are missing from db
|
py
|
diff --git a/holodeck/environments.py b/holodeck/environments.py
index <HASH>..<HASH> 100755
--- a/holodeck/environments.py
+++ b/holodeck/environments.py
@@ -406,6 +406,14 @@ class HolodeckEnvironment(object):
self._world_process.wait(5)
self._client.unlink()
+ # Context manager APIs, allows `with` statement to be used
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ # TODO: Surpress exceptions?
+ self.__on_exit__()
+
def _get_single_state(self):
reward = None
terminal = None
|
HolodeckEnvironment: implement context manager APIs This allows the `with` statement to be used on a HolodeckEnvironment object, eg ``` with holodeck.make("world") as env: env.tick() # env is cleaned up ``` Implements #<I>
|
py
|
diff --git a/gpiozero/input_devices.py b/gpiozero/input_devices.py
index <HASH>..<HASH> 100644
--- a/gpiozero/input_devices.py
+++ b/gpiozero/input_devices.py
@@ -397,15 +397,15 @@ class MotionSensor(SmoothedInputDevice):
:param bool pull_up:
If ``False`` (the default), the GPIO pin will be pulled low by default.
- In this case, connect the other side of the button to 3V3. If
+ In this case, connect the other side of the sensor to 3V3. If
``True``, the GPIO pin will be pulled high by default. In this case,
- connect the other side of the button to ground.
+ connect the other side of the sensor to ground.
"""
def __init__(
self, pin=None, queue_len=1, sample_rate=10, threshold=0.5,
partial=False, pull_up=False):
super(MotionSensor, self).__init__(
- pin, pull_up, threshold=threshold,
+ pin, pull_up=pull_up, threshold=threshold,
queue_len=queue_len, sample_wait=1 / sample_rate, partial=partial
)
try:
|
Fixed docstring and pull_up
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.