diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/conf/ldap_schemas.py b/conf/ldap_schemas.py
index <HASH>..<HASH> 100644
--- a/conf/ldap_schemas.py
+++ b/conf/ldap_schemas.py
@@ -45,7 +45,7 @@ class account(
class Meta:
base_dn_setting = "LDAP_ACCOUNT_BASE"
object_classes = set([ 'top' ])
- search_classes = set([ 'posixAccount' ])
+ search_classes = set([ 'person' ])
pk = 'uid'
managed_by = tldap.manager.ManyToOneDescriptor(this_key='manager', linked_cls='karaage.datastores.ldap_schemas.account', linked_key='dn')
|
Detect all accounts. Some accounts are people, for legacy reasons. Make sure we detect them. Required for uprades to work properly, in particularly we need to be able to tell if the account is locked or not. Change-Id: I7e<I>d<I>ee2d<I>db<I>ae<I>
|
py
|
diff --git a/mistletoe/core_tokens.py b/mistletoe/core_tokens.py
index <HASH>..<HASH> 100644
--- a/mistletoe/core_tokens.py
+++ b/mistletoe/core_tokens.py
@@ -199,7 +199,7 @@ def match_link_dest(string, offset):
else:
escaped = False
count = 1
- for i, c in enumerate(string[offset+1:], start=offset+1):
+ for i, c in enumerate(string[offset:], start=offset):
if c == '\\':
escaped = True
elif c in whitespace:
|
fixed: match_link_dest skips char when not in angles
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import find_packages, setup
setup(
name='ActionCableZwei',
- version='0.1.1',
+ version='0.1.2',
license='MIT',
description='Action Cable Client for Python 3',
author='Tobias Feistmantl',
|
Update version number to <I>
|
py
|
diff --git a/steenzout/sphinx/__init__.py b/steenzout/sphinx/__init__.py
index <HASH>..<HASH> 100644
--- a/steenzout/sphinx/__init__.py
+++ b/steenzout/sphinx/__init__.py
@@ -21,6 +21,7 @@ from steenzout.object import Object
class ResourceGenerator(Object):
+ """Class to generate Sphinx resources."""
def __init__(self, organization, package):
self.name = organization
@@ -34,7 +35,7 @@ class ResourceGenerator(Object):
metadata=self.metadata,
package=self.package)
- def generate_makefile(self):
+ def makefile(self):
return self.env.get_template('Makefile.j2').render(
metadata=self.metadata,
package=self.package)
|
added ResourceGenerator docstring.
|
py
|
diff --git a/gcloud/bigtable/test_table.py b/gcloud/bigtable/test_table.py
index <HASH>..<HASH> 100644
--- a/gcloud/bigtable/test_table.py
+++ b/gcloud/bigtable/test_table.py
@@ -281,11 +281,11 @@ class TestTable(unittest2.TestCase):
self.assertEqual(mock_created,
[(table.name, self.ROW_KEY, filter_obj)])
- def test_read_empty_row(self):
+ def test_read_row_miss(self):
chunks = []
self._read_row_helper(chunks, None)
- def test_read_row(self):
+ def test_read_row_complete(self):
from gcloud.bigtable.row_data import Cell
from gcloud.bigtable.row_data import PartialRowData
|
Rename testcases for clarity.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -58,7 +58,7 @@ data_files=[
('gimmemotifs/score_dists', ['score_dists/total_wic_mean_score_dist.txt']),
('gimmemotifs/genes', ['genes/hg18.bed', 'genes/hg19.bed', 'genes/xenTro2.bed', 'genes/mm9.bed']),
('gimmemotifs/bg', ['bg/hg19.MotifSampler.bg', 'bg/hg18.MotifSampler.bg', 'bg/mm9.MotifSampler.bg', 'bg/xenTro2.MotifSampler.bg']),
- ('gimmemotifs/motif_databases', ['motif_databases/jaspar.pfm']),
+ ('gimmemotifs/motif_databases', ['motif_databases/*.pfm', 'motif_databases/*.pwm']),
('gimmemotifs/doc', ['doc/gimmemotifs_manual.pdf','doc/gimmemotifs_manual.html']),
('gimmemotifs/examples', ['examples/TAp73alpha.bed','examples/TAp73alpha.fa']),
('gimmemotifs/genome_index', ['genome_index/README.txt'])
|
Cop all pwm/pfm files on setup
|
py
|
diff --git a/pywal/colors.py b/pywal/colors.py
index <HASH>..<HASH> 100644
--- a/pywal/colors.py
+++ b/pywal/colors.py
@@ -87,9 +87,11 @@ def cache_fname(img, backend, light, cache_dir, sat=""):
"""Create the cache file name."""
color_type = "light" if light else "dark"
file_name = re.sub("[/|\\|.]", "_", img)
+ file_size = os.path.getsize(img)
- file_parts = [file_name, color_type, backend, sat, __cache_version__]
- return [cache_dir, "schemes", "%s_%s_%s_%s_%s.json" % (*file_parts,)]
+ file_parts = [file_name, color_type, backend,
+ sat, file_size, __cache_version__]
+ return [cache_dir, "schemes", "%s_%s_%s_%s_%s_%s.json" % (*file_parts,)]
def get_backend(backend):
|
general: Add file size to caching to avoid collisions. Closes #<I>
|
py
|
diff --git a/pycm/pycm_util.py b/pycm/pycm_util.py
index <HASH>..<HASH> 100644
--- a/pycm/pycm_util.py
+++ b/pycm/pycm_util.py
@@ -243,11 +243,9 @@ def statistic_recommend(classes, P):
:type P : dict
:return: recommendation_list as list
"""
- recommendation_list = []
if imbalance_check(P):
- recommendation_list.extend(IMBALANCED_RECOMMEND)
+ return IMBALANCED_RECOMMEND
elif binary_check(classes):
- recommendation_list.extend(BINARY_RECOMMEND)
+ return BINARY_RECOMMEND
else:
- recommendation_list.extend(MULTICLASS_RECOMMEND)
- return list(set(recommendation_list))
+ return MULTICLASS_RECOMMEND
|
fix : minor bug in statistic_recommend fixed
|
py
|
diff --git a/nipap-cli/nipap_cli/nipap_cli.py b/nipap-cli/nipap_cli/nipap_cli.py
index <HASH>..<HASH> 100755
--- a/nipap-cli/nipap_cli/nipap_cli.py
+++ b/nipap-cli/nipap_cli/nipap_cli.py
@@ -82,9 +82,9 @@ def get_vrf(arg = None, opts = None, abort = False):
else:
vrf_rt = arg
- if vrf_rt == 'none':
+ if vrf_rt.lower() == 'none':
vrf = VRF()
- elif vrf_rt == 'all':
+ elif vrf_rt.lower() == 'all':
vrf = VRF()
vrf.rt = 'all'
else:
|
Make vrf_rt case-insensitive Hmm, could this be dangerous in that two VRFs could have the "same" name but in upper / lower cases!? Fixes #<I>.
|
py
|
diff --git a/dipper/sources/FlyBase.py b/dipper/sources/FlyBase.py
index <HASH>..<HASH> 100644
--- a/dipper/sources/FlyBase.py
+++ b/dipper/sources/FlyBase.py
@@ -1480,7 +1480,7 @@ class FlyBase(PostgreSQLSource):
if did == feature_id:
continue
dlabel = self.label_hash.get(did)
- if re.search(r'FB(gn|og)', feature_id):
+ if re.search(r'FB(gn|og|pp)', feature_id):
# only want to add equivalences for fly things
if not re.match(r'OMIM', did):
# these are only omim diseases, not genes;
|
we do not seem to like 'FBpp'
|
py
|
diff --git a/lib/svtplay_dl/service/__init__.py b/lib/svtplay_dl/service/__init__.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/__init__.py
+++ b/lib/svtplay_dl/service/__init__.py
@@ -131,6 +131,12 @@ class Generic(object):
for i in sites:
if i.handles(url):
return url, i(url)
+ match = re.search(r"iframe src='(http://www.svtplay[^']*)'", data)
+ if match:
+ url = match.group(1)
+ for i in sites:
+ if i.handles(url):
+ return url, i(url)
return url, stream
|
generic: detect embeded svtplay streams
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ with open('requirements.txt') as f:
setup(
name='microclient',
- version='0.0.3',
+ version='0.1.0',
author=u'Tangent Solutions',
author_email='admin@tangentsolutions.co.za',
packages=['microclient'],
|
Bumps version to <I> (first beta release)
|
py
|
diff --git a/phypno/widgets/traces.py b/phypno/widgets/traces.py
index <HASH>..<HASH> 100644
--- a/phypno/widgets/traces.py
+++ b/phypno/widgets/traces.py
@@ -335,4 +335,6 @@ def _select_channels(data, channels):
chan_list = list(data.axis['chan'][0])
idx_chan = [chan_list.index(i_chan) for i_chan in channels]
data.data[0] = data.data[0][idx_chan, :]
+ data.axis['chan'][0] = asarray(channels)
+
return data
|
same as commit before, but fix the fact that the list of channels was not updated
|
py
|
diff --git a/arguments/__init__.py b/arguments/__init__.py
index <HASH>..<HASH> 100644
--- a/arguments/__init__.py
+++ b/arguments/__init__.py
@@ -30,7 +30,7 @@ import zipfile
from os.path import exists, expanduser
from consoleprinter import console, console_warning, handle_ex, consoledict, get_print_yaml, remove_extra_indentation, snake_case, bar
-DEBUGMODE = False
+DEBUGMODE = True
class SchemaError(Exception):
@@ -455,7 +455,7 @@ def delete_directory(dirpath, excluded_file_names):
fp = os.path.join(rootpath, f)
for exname in excluded_file_names:
- if not fp.endswith(exname):
+ if not os.path.basename(fp)==exname:
if os.path.exists(fp):
os.remove(fp)
@@ -467,10 +467,12 @@ def delete_directory(dirpath, excluded_file_names):
dirpaths.sort(key=lambda x: len(x.split("/")))
dirpaths.reverse()
+
for rootpath in dirpaths:
if dirpath != rootpath:
if os.path.exists(rootpath):
- os.rmdir(rootpath)
+ os.removedirs(rootpath)
+
return len(list(os.walk(dirpath)))
|
foobar Friday <I> March <I> (week:<I> day:<I>), <I>:<I>:<I>
|
py
|
diff --git a/satpy/scene.py b/satpy/scene.py
index <HASH>..<HASH> 100644
--- a/satpy/scene.py
+++ b/satpy/scene.py
@@ -141,16 +141,20 @@ class Scene(InfoObject):
# out
if not self.info.get("sensor"):
self.info["sensor"] = sensors
+ # overwrite the request start/end times with actual loaded data limits
+ if reader_instances:
+ self.info['start_time'] = min(x.start_time for x in self.readers.values())
+ self.info['end_time'] = max(x.end_time for x in self.readers.values())
@property
def start_time(self):
"""Return the start time of the file."""
- return min(x.start_time for x in self.readers.values())
+ return self.info['start_time']
@property
def end_time(self):
"""Return the end time of the file."""
- return max(x.end_time for x in self.readers.values())
+ return self.info['end_time']
def available_dataset_ids(self, reader_name=None, composites=False):
"""Get names of available datasets, globally or just for *reader_name*
|
Fix start_time/end_time properties on Scene object after resampling These properties were dependent on scn.readers which doesn't exist after resampling creates a new "copy" of the original Scene. Now these values are part of the metadata in .info and set on init.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,5 +14,18 @@ setup(
'console_scripts': [
'bofhexcuse=bofhexcuse:main'
]
- }
+ },
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Environment :: Console',
+ 'Programming Language:: Python:: 2',
+ 'Programming Language:: Python:: 2.6',
+ 'Programming Language:: Python:: 2.7',
+ 'Programming Language:: Python:: 3',
+ 'Programming Language:: Python:: 3.2',
+ 'Programming Language:: Python:: 3.3',
+ 'Programming Language:: Python:: 3.4',
+ 'Programming Language:: Python:: 3.5',
+ 'Topic :: Games/Entertainment :: Fortune Cookies'
+ ]
)
|
Added classifiers to setup.py
|
py
|
diff --git a/etrago/cluster/disaggregation.py b/etrago/cluster/disaggregation.py
index <HASH>..<HASH> 100644
--- a/etrago/cluster/disaggregation.py
+++ b/etrago/cluster/disaggregation.py
@@ -397,7 +397,8 @@ class UniformDisaggregation(Disaggregation):
loc = ()
index = p_nom_times_p_max_pu.index
- for s in series:
+ # for s in series:
+ for s in ['p']:
for bus_id in index:
# TODO: Check whether series multiplication works as
# expected.
|
Temporarily switch back to only handling 'p' The generalization doesn't work yet. Until I can fix this, this is the easiest way to switch back.
|
py
|
diff --git a/pook/__init__.py b/pook/__init__.py
index <HASH>..<HASH> 100644
--- a/pook/__init__.py
+++ b/pook/__init__.py
@@ -9,4 +9,4 @@ __author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current version
-__version__ = '0.1.14'
+__version__ = '0.2.0'
|
feat(version): bump to <I>
|
py
|
diff --git a/openquake/risk/job/probabilistic.py b/openquake/risk/job/probabilistic.py
index <HASH>..<HASH> 100644
--- a/openquake/risk/job/probabilistic.py
+++ b/openquake/risk/job/probabilistic.py
@@ -206,12 +206,13 @@ class ProbabilisticEventMixin(): # pylint: disable=W0232,W0201
point.column, point.row, asset, gmf_slice, loss_ratios)
aggregate_curve.append(loss_ratios * asset["assetValue"])
+ conditional_loss_poes = self._conditional_loss_poes()
- if loss_ratio_curve is not None:
+ if loss_ratio_curve is not None and conditional_loss_poes:
loss_curve = self.compute_loss_curve(
point.column, point.row, loss_ratio_curve, asset)
- for loss_poe in self._conditional_loss_poes():
+ for loss_poe in conditional_loss_poes:
self.compute_conditional_loss(point.column, point.row,
loss_curve, asset, loss_poe)
|
Short-circuit the computation if there are no POEs defined. Former-commit-id: <I>e<I>a<I>f<I>c6c<I>fe<I>c<I>b<I>eb9
|
py
|
diff --git a/convertbng/util.py b/convertbng/util.py
index <HASH>..<HASH> 100644
--- a/convertbng/util.py
+++ b/convertbng/util.py
@@ -39,7 +39,7 @@ else:
ext = "so"
__author__ = u"Stephan Hügel"
-__version__ = "0.1.12"
+__version__ = "0.1.13"
# hacky: http://stackoverflow.com/a/30789980/416626
@@ -86,7 +86,7 @@ class FFIArray(Structure):
def void_array_to_tuple_list(array, _func, _args):
return cast(array.data, POINTER(FFITuple * array.len))[0]
-convert_vec = lib.convert_vec_c
+convert_vec = lib.convert_vec_c_threaded
convert_vec.argtypes = (FFIArray, FFIArray)
convert_vec.restype = FFIArray
convert_vec.errcheck = void_array_to_tuple_list
|
Use threaded version of Rust function
|
py
|
diff --git a/src/sp_test/base.py b/src/sp_test/base.py
index <HASH>..<HASH> 100644
--- a/src/sp_test/base.py
+++ b/src/sp_test/base.py
@@ -315,13 +315,19 @@ class Conversation():
Un-solicited starts with the IDP sending something.
"""
if len(flow) >= 3:
+ logger.info("TEST FLOW: Start by GET-ing the page")
self.wb_send()
+ logger.info("TEST FLOW: Continuing with: %s" % flow[0].__name__)
self.intermit(flow[0]._interaction)
+ logger.info("TEST FLOW: Handling redirect")
self.handle_redirect()
+ logger.info("TEST FLOW: Sending IdP Response with expected request %s and response to be used %s" % flow[1].__name__, flow[2].__name__)
self.send_idp_response(flow[1], flow[2])
if len(flow) == 4:
+ logger.info("TEST FLOW Handling result with HTTP Response check for %s" % flow[3].__name__)
self.handle_result(flow[3])
else:
+ logger.info("TEST FLOW: Handling result (without HTTP Response check)")
self.handle_result()
def do_sequence(self, oper, tests=None):
|
Added logging for each step of the sequence's flow so it's easier to understand where something goes wrong if a test fails.
|
py
|
diff --git a/djangoplugins/models.py b/djangoplugins/models.py
index <HASH>..<HASH> 100644
--- a/djangoplugins/models.py
+++ b/djangoplugins/models.py
@@ -109,8 +109,8 @@ class Plugin(DirtyFieldsMixin, models.Model):
def save(self, *args, **kwargs):
if "status" in self.get_dirty_fields().keys() and self.pk:
if self.status in STATUS_CHOICES_ENABLED:
- django_plugin_enabled.send_safe(plugin=self.get_plugin())
+ django_plugin_enabled.send(sender=self.__class__, plugin=self.get_plugin())
else:
- django_plugin_disabled.send_safe(plugin=self.get_plugin())
+ django_plugin_disabled.send(sender=self.__class__, plugin=self.get_plugin())
return super(Plugin, self).save(*args, **kwargs)
|
Signal.send instead of "send_safe"
|
py
|
diff --git a/analyzers/Fortiguard/urlcategory.py b/analyzers/Fortiguard/urlcategory.py
index <HASH>..<HASH> 100755
--- a/analyzers/Fortiguard/urlcategory.py
+++ b/analyzers/Fortiguard/urlcategory.py
@@ -15,7 +15,7 @@ class URLCategoryAnalyzer(Analyzer):
if 'category' in raw:
r = raw.get('category')
- value = "\"{}\"".format(r)
+ value = "{}".format(r)
if r in self.get_param('config.malicious_categories', []):
level = "malicious"
elif r in self.get_param('config.suspicious_categories', []):
|
<I> #<I> fix/remove " in short report
|
py
|
diff --git a/pymc/gp/cov.py b/pymc/gp/cov.py
index <HASH>..<HASH> 100644
--- a/pymc/gp/cov.py
+++ b/pymc/gp/cov.py
@@ -526,7 +526,7 @@ class Exponential(Stationary):
.. math::
- k(x, x') = \mathrm{exp}\left[ -\frac{||x - x'||}{2\ell^2} \right]
+ k(x, x') = \mathrm{exp}\left[ -\frac{||x - x'||}{2\ell} \right]
"""
def full(self, X, Xs=None):
|
Updated ExpQuad docstring (#<I>) * Updated ExpQuad docstring * Changed fix to correct docstring
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -69,7 +69,6 @@ _CLASSIFIERS = ["Development Status :: 4 - Beta",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
- "Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
|
change: remove python <I> from the supported platform list
|
py
|
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index <HASH>..<HASH> 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -744,6 +744,8 @@ def installed(
for i in not_modified_hold:
comment.append(i['comment'])
+ result = True
+
if failed:
if sources:
summary = ', '.join(failed)
@@ -752,6 +754,7 @@ def installed(
for x in failed])
comment.insert(0, 'The following packages failed to '
'install/update: {0}.'.format(summary))
+ result = False
if failed_hold:
for i in failed_hold:
@@ -764,7 +767,7 @@ def installed(
else:
return {'name': name,
'changes': changes,
- 'result': True,
+ 'result': result,
'comment': ' '.join(comment)}
|
Don't return 'True' for failing package installations.
|
py
|
diff --git a/looper/models.py b/looper/models.py
index <HASH>..<HASH> 100644
--- a/looper/models.py
+++ b/looper/models.py
@@ -202,7 +202,7 @@ class AttributeDict(MutableMapping):
format(key, self.__dict__[key].keys()))
elif value is not None or \
key not in self.__dict__ or self.__dict__["_force_nulls"]:
- self._LOGGER.debug("Setting '{}' to {}".format(key, value))
+ _LOGGER.debug("Setting '{}' to {}".format(key, value))
self.__dict__[key] = value
else:
self._log_(logging.DEBUG,
|
logger's not an instance attr
|
py
|
diff --git a/build.py b/build.py
index <HASH>..<HASH> 100755
--- a/build.py
+++ b/build.py
@@ -410,8 +410,10 @@ virtual('plovr', PLOVR_JAR)
@target(PLOVR_JAR, clean=False)
def plovr_jar(t):
+ t.info('downloading %r', t.name)
t.download('https://plovr.googlecode.com/files/' +
os.path.basename(PLOVR_JAR), md5=PLOVR_JAR_MD5)
+ t.info('downloaded %r', t.name)
@target('gh-pages', 'host-examples', 'doc', phony=True)
|
Add logging messages to indicate when a download is in progress
|
py
|
diff --git a/PyFunceble/config/loader.py b/PyFunceble/config/loader.py
index <HASH>..<HASH> 100644
--- a/PyFunceble/config/loader.py
+++ b/PyFunceble/config/loader.py
@@ -290,7 +290,7 @@ class ConfigLoader:
config = self.dict_helper.from_yaml_file(self.path_to_config)
else:
- config = copy.deepcopy(PyFunceble.storage.CONFIGURATION)
+ config = copy.deepcopy(PyFunceble.storage.CONFIGURATION.to_dict())
if self.merge_upstream or is_3_x_version(
config
|
Ensure that the configuration is a dictionnary instead of a box. This patch fixes #<I>. Indeed, before this patch, if we are savin Boxes, the generated YAML files becomes incorrect. That leaded to the issue #<I>. This patch fix it by ensuring that the read configuration is in dict format.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -147,7 +147,11 @@ def main():
keywords='telegram api chat client library messaging mtproto',
packages=find_packages(exclude=[
'telethon_generator', 'telethon_tests', 'run_tests.py',
- 'try_telethon.py'
+ 'try_telethon.py',
+ 'telethon_generator/parser/__init__.py',
+ 'telethon_generator/parser/source_builder.py',
+ 'telethon_generator/parser/tl_object.py',
+ 'telethon_generator/parser/tl_parser.py',
]),
install_requires=['pyaes', 'rsa'],
extras_require={
|
Fix telethon_generator/ package not being excluded from PyPi
|
py
|
diff --git a/tests/test_kitsune.py b/tests/test_kitsune.py
index <HASH>..<HASH> 100644
--- a/tests/test_kitsune.py
+++ b/tests/test_kitsune.py
@@ -242,7 +242,8 @@ class TestKitsuneBackendArchive(TestCaseBackendArchive):
def setUp(self):
super().setUp()
- self.backend = Kitsune(KITSUNE_SERVER_URL, archive=self.archive)
+ self.backend_write_archive = Kitsune(KITSUNE_SERVER_URL, archive=self.archive)
+ self.backend_read_archive = Kitsune(KITSUNE_SERVER_URL, archive=self.archive)
@httpretty.activate
def test_fetch_from_archive(self):
|
[tests] Modify kitsune tests when fetching from archive This patch adds two different backend objects (one fetches data from remote a data source and the other one from an archive) in order to ensure that backend and method params are initialized in the same way independently from which method is called (fetch or fetch_from_archive)
|
py
|
diff --git a/datanommer.consumer/datanommer/consumer/__init__.py b/datanommer.consumer/datanommer/consumer/__init__.py
index <HASH>..<HASH> 100644
--- a/datanommer.consumer/datanommer/consumer/__init__.py
+++ b/datanommer.consumer/datanommer/consumer/__init__.py
@@ -34,6 +34,12 @@ class Nommer(fedmsg.consumers.FedmsgConsumer):
config_key = 'datanommer.enabled'
def __init__(self, hub):
+ # The superclass __init__() subscribes the hub to the topic specified
+ # by the consumer. If we have a topic we want use instead of "*", it
+ # needs to be set before calling the superclass.
+ if 'datanommer.topic' in hub.config:
+ self.topic = hub.config['datanommer.topic']
+
super(Nommer, self).__init__(hub)
# If fedmsg doesn't think we should be enabled, then we should quit
|
allow overriding the default "*" subscription There are cases where a client may not want to consume all messages (or may not have the permissions to do so). This change allows the default subscription to the "*" topic to be overridden via the config.
|
py
|
diff --git a/wafer/settings.py b/wafer/settings.py
index <HASH>..<HASH> 100644
--- a/wafer/settings.py
+++ b/wafer/settings.py
@@ -121,7 +121,7 @@ TEMPLATES = [
]
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
|
MIDDLEWARE_CLASSES was replaced with MIDDLEWARE
|
py
|
diff --git a/zengine/views/crud.py b/zengine/views/crud.py
index <HASH>..<HASH> 100644
--- a/zengine/views/crud.py
+++ b/zengine/views/crud.py
@@ -547,11 +547,20 @@ class CrudView(BaseView):
"""
actions = []
- if self.Meta.object_actions:
- for perm, action in self.Meta.object_actions.items():
- permission = "%s.%s" % (self.object.__class__.__name__, perm)
- if self.current.has_permission(permission):
- actions.append(action)
+ # If override actions for the model is defined, then only show those actions
+ override_actions = getattr(self.object.Meta, 'crud_override_actions', None)
+ if override_actions is not None:
+ actions = override_actions
+ else:
+ # If override actions is not defined, show the actions defined on the view
+ if self.Meta.object_actions:
+ for perm, action in self.Meta.object_actions.items():
+ permission = "%s.%s" % (self.object.__class__.__name__, perm)
+ if self.current.has_permission(permission):
+ actions.append(action)
+ # If there are extra actions for the model, add them
+ extra_actions = getattr(self.object.Meta, 'crud_extra_actions', [])
+ actions.extend(extra_actions)
result = {'key': obj.key, 'fields': [], 'actions': actions}.copy()
for method in self.FILTER_METHODS:
method(self, obj, result)
|
Make crud list actions overridable/extendable Crud views automatically get delete and edit actions from models, which are shown when listing the objects. This commit makes it possible to override these actions, or add new ones to extend the existing functionality. This is done by adding `crud_override_actions` and `crud_extra_actions` attributes to the Meta class of the models.
|
py
|
diff --git a/thermo/utils.py b/thermo/utils.py
index <HASH>..<HASH> 100644
--- a/thermo/utils.py
+++ b/thermo/utils.py
@@ -2142,9 +2142,6 @@ class TDependentProperty(object):
raise ValueError("The given method is not available for this chemical")
self.T_cached = None
self._method = method
- extrapolation = getattr(self, '_extrapolation', None)
- if extrapolation is not None and method is not None:
- self._load_extrapolation_coeffs(method)
def valid_methods(self, T=None):
r'''Method to obtain a sorted list of methods that have data
@@ -3197,7 +3194,6 @@ class TDependentProperty(object):
self._extrapolation_low = self._extrapolation_high = self.extrapolations = None
return
self.extrapolation_split = '|' in extrapolation
-
if not self.extrapolation_split:
extrapolations = [extrapolation]
self._extrapolation_low = self._extrapolation_high = extrapolation
@@ -3209,8 +3205,6 @@ class TDependentProperty(object):
if extrapolations[0] == extrapolations[1]:
extrapolations.pop()
self.extrapolations = extrapolations
- method = getattr(self, '_method', None)
- if method is not None: self._load_extrapolation_coeffs(method)
def extrapolate(self, T, method, in_range='error'):
r'''Method to perform extrapolation on a given method according to the
|
prevent having to reload extrapolation coefficients each time method changes
|
py
|
diff --git a/salt/modules/boto3_route53.py b/salt/modules/boto3_route53.py
index <HASH>..<HASH> 100644
--- a/salt/modules/boto3_route53.py
+++ b/salt/modules/boto3_route53.py
@@ -128,6 +128,7 @@ def _wait_for_sync(change, conn, tries=10, sleep=20):
log.error('Timed out waiting for Route53 INSYNC status.')
return False
+
def find_hosted_zone(Id=None, Name=None, PrivateZone=None,
region=None, key=None, keyid=None, profile=None):
'''
@@ -719,6 +720,7 @@ def delete_hosted_zone_by_domain(Name, PrivateZone=None, region=None, key=None,
Id = zone[0]['HostedZone']['Id']
return delete_hosted_zone(Id=Id, region=region, key=key, keyid=keyid, profile=profile)
+
def aws_encode(x):
'''
An implementation of the encoding required to suport AWS's domain name
@@ -746,6 +748,7 @@ def aws_encode(x):
log.debug('AWS-encoded result for %s: %s', x, ret)
return ret
+
def _aws_encode_changebatch(o):
'''
helper method to process a change batch & encode the bits which need encoding.
|
Fix PEP8 E<I> lint issues
|
py
|
diff --git a/aldryn_apphook_reload/__init__.py b/aldryn_apphook_reload/__init__.py
index <HASH>..<HASH> 100644
--- a/aldryn_apphook_reload/__init__.py
+++ b/aldryn_apphook_reload/__init__.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
-__version__ = 'dev'
+__version__ = '0.1a1'
__author__ = 'Stefan Foulis'
__license__ = 'BSD'
__copyright__ = "Copyright 2014, Divio Aldryn Ltd"
|
bumps version to <I>a1
|
py
|
diff --git a/can/bus.py b/can/bus.py
index <HASH>..<HASH> 100644
--- a/can/bus.py
+++ b/can/bus.py
@@ -253,15 +253,15 @@ class BusABC(object):
if self._filters is None:
return True
- for filter in self._filters:
+ for _filter in self._filters:
# check if this filter even applies to the message
- if 'extended' in filter and \
- filter['extended'] != msg.is_extended_id:
+ if 'extended' in _filter and \
+ _filter['extended'] != msg.is_extended_id:
continue
# then check for the mask and id
- can_id = filter['can_id']
- can_mask = filter['can_mask']
+ can_id = _filter['can_id']
+ can_mask = _filter['can_mask']
# basically, we compute `msg.arbitration_id & can_mask == can_id & can_mask`
# by using the shorter, but equivalent from below:
|
Fix shadowing built-in name "filter"
|
py
|
diff --git a/console/beep.py b/console/beep.py
index <HASH>..<HASH> 100644
--- a/console/beep.py
+++ b/console/beep.py
@@ -63,6 +63,8 @@ elif os_name == 'posix': # Tron leotards
if __name__ == '__main__':
import sys
+ from time import sleep
+
from console import fg, fx, defx
if '-d' in sys.argv:
@@ -71,10 +73,11 @@ if __name__ == '__main__':
out.configure(level='debug')
except ImportError:
logging.basicConfig(level='DEBUG',
- format=('%(levelname)s '
+ format=('%(levelname)-7.7s '
f'{fx.dim}%(funcName)s:{fg.green}%(lineno)s{fg.default}{defx.dim}'
' %(message)s'),
)
log.debug('console version: %r', version)
beep()
+ sleep(.5)
|
add delay for -m beep to be heard
|
py
|
diff --git a/spyderlib/spyder.py b/spyderlib/spyder.py
index <HASH>..<HASH> 100644
--- a/spyderlib/spyder.py
+++ b/spyderlib/spyder.py
@@ -2646,6 +2646,11 @@ class MainWindow(QMainWindow):
# To avoid a traceback after closing on Windows
if e.args[0] == eintr:
continue
+ # handle a connection abort on close error
+ enotsock = (errno.WSAENOTSOCK if os.name == 'nt'
+ else errno.ENOTSOCK)
+ if e.args[0] in [errno.ECONNABORTED, enotsock]:
+ return
raise
fname = req.recv(1024)
if not self.light:
|
Handle connection abort error on shutdown Handle a connection abort error on shutdown Revert some ws changes Revert more ws changes Revert one more ws change Revert all ws changes Add shutdown handling on windows Another ws revert Make the error check cross platform Reinstate the Econnaborted check
|
py
|
diff --git a/pygerrit/client.py b/pygerrit/client.py
index <HASH>..<HASH> 100644
--- a/pygerrit/client.py
+++ b/pygerrit/client.py
@@ -76,10 +76,9 @@ class GerritClient(object):
data = decoder.decode(line)
except ValueError, err:
raise GerritError("Query returned invalid data: %s", err)
- if "type" in data:
- if data["type"] == "error":
- raise GerritError("Query error: %s" % data["message"])
- else:
+ if "type" in data and data["type"] == "error":
+ raise GerritError("Query error: %s" % data["message"])
+ elif "project" in data:
results.append(Change(data))
return results
|
Only add query result lines to returned data Only add JSON lines in the results if they contain "project". Otherwise the "rowCount" line, and anything else, will be included in the results as an empty Change object. Change-Id: Ia4de4ed<I>c8f5ba<I>f5e<I>dd<I>ff<I>b<I>b<I>
|
py
|
diff --git a/examples/areas_code.py b/examples/areas_code.py
index <HASH>..<HASH> 100755
--- a/examples/areas_code.py
+++ b/examples/areas_code.py
@@ -30,13 +30,13 @@ import configparser
import re
-import events
-
import certifi
-from enrich import FileType
+from enrich.enrich import FileType, ToUTF8
+
+from events.events import Git
-from filter import FilterRows
+from df_utils.filter import FilterRows
def ESConnection():
@@ -121,7 +121,7 @@ def analyze_git(es_read, es_write, es_read_index, es_write_index):
commits.append(item.to_dict())
if cont % 100 == 0:
- git_events = events.Git(commits)
+ git_events = Git(commits)
events_df = git_events.eventize(2)
# Filter information
|
Fix Areas of Code example to fit new directories structure Changes are focused on the new path for the methods used in both places: imports and in the code.
|
py
|
diff --git a/bigchaindb/common/schema/__init__.py b/bigchaindb/common/schema/__init__.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/common/schema/__init__.py
+++ b/bigchaindb/common/schema/__init__.py
@@ -23,15 +23,20 @@ def _validate_schema(schema, body):
TX_SCHEMA_PATH, TX_SCHEMA = _load_schema('transaction')
-VOTE_SCHEMA_PATH, VOTE_SCHEMA = _load_schema('vote')
-def validate_transaction_schema(tx_body):
+def validate_transaction_schema(tx):
""" Validate a transaction dict """
- _validate_schema(TX_SCHEMA, tx_body)
+ _validate_schema(TX_SCHEMA, tx)
-def validate_vote_schema(tx_body):
- """ Validate a vote dict """
- _validate_schema(VOTE_SCHEMA, tx_body)
+VOTE_SCHEMA_PATH, VOTE_SCHEMA = _load_schema('vote')
+
+def validate_vote_schema(vote):
+ """ Validate a vote dict """
+ # A vote does not have an ID, but the database may add one.
+ if 'id' in vote:
+ vote = dict(vote)
+ del vote['id']
+ _validate_schema(VOTE_SCHEMA, vote)
|
refactor schema __init__ slightly
|
py
|
diff --git a/send_self.py b/send_self.py
index <HASH>..<HASH> 100644
--- a/send_self.py
+++ b/send_self.py
@@ -439,6 +439,7 @@ class WeakGeneratorWrapper(object):
:return bool:
Whether the generator has terminated.
"""
+ # TOCHECK relies on generator.gi_frame
# Equivalent to
# `inspect.getgeneratorstate(self.generator) == inspect.GEN_CLOSED`
gen = self.generator
@@ -450,6 +451,7 @@ class WeakGeneratorWrapper(object):
:return bool:
Whether the generator can be resumed.
"""
+ # TOCHECK relies on generator.gi_frame
# Equivalent to `inspect.getgeneratorstate(self.generator) in
# (inspect.GEN_CREATED, inspect.GEN_SUSPENDED)`,
# which is only available starting 3.2.
|
Mention reliance on undocumented behavior
|
py
|
diff --git a/tornado/httpclient.py b/tornado/httpclient.py
index <HASH>..<HASH> 100644
--- a/tornado/httpclient.py
+++ b/tornado/httpclient.py
@@ -228,12 +228,8 @@ class AsyncHTTPClient(Configurable):
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
else:
- for k, v in kwargs.items():
- try:
- getattr(request, k)
- except Exception as e:
- raise ValueError('HTTPRequest get an unexcept kwags %s' % k)
- setattr(request, k, v)
+ if kwargs:
+ raise ValueError("kwargs can't be used if request is an HTTPRequest object")
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
|
raise ValueError if supply both a request object and **kwargs
|
py
|
diff --git a/cumulus/authentication.py b/cumulus/authentication.py
index <HASH>..<HASH> 100644
--- a/cumulus/authentication.py
+++ b/cumulus/authentication.py
@@ -127,5 +127,7 @@ class Auth(object):
"""
try:
return self.container.get_object(name)
- except pyrax.exceptions.NoSuchObject, swiftclient.exceptions.ClientException:
+ except pyrax.exceptions.NoSuchObject:
+ return None
+ except swiftclient.exceptions.ClientException:
return None
|
handle case when swiftclient is not installed
|
py
|
diff --git a/tests/surface_properties.py b/tests/surface_properties.py
index <HASH>..<HASH> 100755
--- a/tests/surface_properties.py
+++ b/tests/surface_properties.py
@@ -35,7 +35,10 @@ import numpy as np
import ase
import ase.io
-from ase.atoms import string2symbols
+try:
+ from ase.symbols import string2symbols
+except:
+ from ase.atoms import string2symbols
from atomistica import *
from atomistica.tests import test_surface_energies
|
MAINT: string2symbols has moved to ase.symbols
|
py
|
diff --git a/python_modules/dagster/dagster_tests/scheduler_tests/test_scheduler_run.py b/python_modules/dagster/dagster_tests/scheduler_tests/test_scheduler_run.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster_tests/scheduler_tests/test_scheduler_run.py
+++ b/python_modules/dagster/dagster_tests/scheduler_tests/test_scheduler_run.py
@@ -15,6 +15,7 @@ from dagster import (
pipeline,
repository,
schedule,
+ seven,
solid,
)
from dagster.core.definitions.job import RunRequest
@@ -1301,6 +1302,7 @@ def test_multi_runs_missing_run_key(external_repo_context, capfd):
)
+@pytest.mark.skipif(seven.IS_WINDOWS, reason="Cron doesn't work on windows")
def test_run_with_hanging_cron_schedules():
# Verify that the system will prompt you to wipe your schedules with the SystemCronScheduler
# before you can switch to DagsterDaemonScheduler
|
[easy] fix windows scheduler tests Summary: Cron is not a thing on windows Test Plan: BK + Azure Reviewers: johann, alangenfeld, prha Reviewed By: johann Differential Revision: <URL>
|
py
|
diff --git a/chess/gaviota.py b/chess/gaviota.py
index <HASH>..<HASH> 100644
--- a/chess/gaviota.py
+++ b/chess/gaviota.py
@@ -44,7 +44,7 @@ except ImportError:
LOGGER = logging.getLogger(__name__)
-NOSQUARE = 0
+NOSQUARE = 64
NOINDEX = -1
MAX_KKINDEX = 462
@@ -1831,7 +1831,7 @@ class PythonTablebases(object):
self.blackSquares, self.blackTypes = zip(*black)
side = 0 if (board.turn == chess.WHITE) else 1
self.realside = side
- self.epsq = board.ep_square if board.ep_square else 0
+ self.epsq = board.ep_square if board.ep_square else NOSQUARE
return self._Probe(side, self.epsq)
|
Set NOSQUARE to <I>
|
py
|
diff --git a/main.py b/main.py
index <HASH>..<HASH> 100644
--- a/main.py
+++ b/main.py
@@ -1,4 +1,11 @@
"""Temp file that lets you run the system
+
+Quick start instructions:
+1. Set up the ReplicaSet.
+2. Connect to the primary
+3. Run main.py
+4. Start adding documents to the primary. Confirm changes via Solr web interface.
+5. Delete docs to confirm deletion.
"""
from mongo_internal import Daemon
@@ -15,4 +22,4 @@ dt.start()
-
\ No newline at end of file
+
|
added brief instructions to main.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@ from setuptools import setup
setup(
name='opp',
- version='1.2.0',
+ version='1.2.1',
description='Python wrapper for OPP',
author='PAY.ON',
author_email='opp@payon.com',
@@ -30,5 +30,5 @@ setup(
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=install_requires,
- download_url='https://github.com/OpenPaymentPlatform/python/tarball/1.2.0'
+ download_url='https://github.com/OpenPaymentPlatform/python/tarball/1.2.1'
)
|
bump point version in setup.py
|
py
|
diff --git a/tests/test_self.py b/tests/test_self.py
index <HASH>..<HASH> 100644
--- a/tests/test_self.py
+++ b/tests/test_self.py
@@ -563,10 +563,8 @@ class TestRunTC(object):
a.join("b.py").write(b_code)
a.join("c.py").write(c_code)
- curdir = os.getcwd()
- try:
+ with tmpdir.as_cwd():
# why don't we start pylint in a subprocess?
- os.chdir(str(tmpdir))
expected = (
"************* Module a.b\n"
"a/b.py:3:0: E0401: Unable to import 'a.d' (import-error)\n\n"
@@ -592,9 +590,6 @@ class TestRunTC(object):
expected_output=expected,
)
- finally:
- os.chdir(curdir)
-
def test_stdin_syntaxerror(self):
expected_output = (
"************* Module a\n"
|
CLeanup tmpdir related code in test py.path.as_cwd is a nice ctx manager that can be used in one test instead of a try/finally block.
|
py
|
diff --git a/umap/tests/test_umap.py b/umap/tests/test_umap.py
index <HASH>..<HASH> 100644
--- a/umap/tests/test_umap.py
+++ b/umap/tests/test_umap.py
@@ -501,7 +501,7 @@ def test_umap_transform_on_iris():
embedding = fitter.transform(new_data)
trust = trustworthiness(new_data, embedding, 10)
- assert_greater_equal(trust, 0.95, 'Insufficiently trustworthy transform for'
+ assert_greater_equal(trust, 0.89, 'Insufficiently trustworthy transform for'
'iris dataset: {}'.format(trust))
def test_multi_component_layout():
|
Leniency in test of transform.
|
py
|
diff --git a/holoviews/core/data.py b/holoviews/core/data.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/data.py
+++ b/holoviews/core/data.py
@@ -244,6 +244,8 @@ class Columns(Element):
and a function to apply or a mapping between the dimensions and
functions to apply along each dimension.
"""
+ if any(dim in self.vdims for dim in dimensions):
+ raise Exception("Reduce cannot be applied to value dimensions")
reduce_dims, reduce_map = self._reduce_map(dimensions, function, reduce_map)
reduced = self
for reduce_fn, group in reduce_map:
@@ -261,6 +263,8 @@ class Columns(Element):
Aggregates over the supplied key dimensions with the defined
function.
"""
+ if function is None:
+ raise ValueError("The aggregate method requires a function to be specified")
if not isinstance(dimensions, list): dimensions = [dimensions]
if not dimensions: dimensions = self.kdims
aggregated = self.interface.aggregate(self, dimensions, function)
|
Improved input validation for reduce and aggregate methods
|
py
|
diff --git a/aiogram/types/chat.py b/aiogram/types/chat.py
index <HASH>..<HASH> 100644
--- a/aiogram/types/chat.py
+++ b/aiogram/types/chat.py
@@ -64,6 +64,19 @@ class Chat(base.TelegramObject):
if as_html:
return markdown.hlink(name, self.user_url)
return markdown.link(name, self.user_url)
+
+ async def get_url(self):
+ """
+ Use this method to get chat link.
+ Private chat returns user link.
+ Other chat types return either username link (if they are public) or invite link (if they are private).
+ :return: link
+ :rtype: :obj:`base.String`
+ """
+ if self.type == ChatType.PRIVATE:
+ return f"tg://user?id={self.id}"
+
+ return f'https://t.me/{self.username}' if self.username else await self.export_invite_link()
async def set_photo(self, photo):
"""
|
Add .get_url() method Use this method to get chat link. Private chat returns user link. Other chat types return either username link (if they are public) or invite link (if they are private).
|
py
|
diff --git a/tilequeue/process.py b/tilequeue/process.py
index <HASH>..<HASH> 100644
--- a/tilequeue/process.py
+++ b/tilequeue/process.py
@@ -81,9 +81,16 @@ def _postprocess_data(feature_layers, post_process_data,
tile_coord, unpadded_bounds, padded_bounds,
config_file_path, cache):
- for step in post_process_data:
+ for step_index, step in enumerate(post_process_data):
fn = loadClassPath(step['fn_name'])
+ # ensure a separate cache for each step, so that different
+ # post-process steps can't tread on each others' caches.
+ step_cache = cache.get(step_index)
+ if step_cache is None:
+ step_cache = dict()
+ cache[step_index] = step_cache
+
ctx = Context(
feature_layers=feature_layers,
tile_coord=tile_coord,
@@ -91,7 +98,7 @@ def _postprocess_data(feature_layers, post_process_data,
padded_bounds=padded_bounds,
config_file_path=config_file_path,
params=step['params'],
- cache=cache)
+ cache=step_cache)
layer = fn(ctx)
feature_layers = ctx.feature_layers
|
Have a different cache for each post-process step.
|
py
|
diff --git a/tensorpack/dataflow/imgaug/paste.py b/tensorpack/dataflow/imgaug/paste.py
index <HASH>..<HASH> 100644
--- a/tensorpack/dataflow/imgaug/paste.py
+++ b/tensorpack/dataflow/imgaug/paste.py
@@ -45,7 +45,7 @@ class ConstantBackgroundFiller(BackgroundFiller):
def _fill(self, background_shape, img):
assert img.ndim in [3, 2]
if img.ndim == 3:
- return_shape = background_shape + (3,)
+ return_shape = background_shape + (img.shape[2],)
else:
return_shape = background_shape
return np.zeros(return_shape) + self.value
|
Fix shape in background filler (fix #<I>)
|
py
|
diff --git a/holoviews/plotting/mpl/chart.py b/holoviews/plotting/mpl/chart.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/mpl/chart.py
+++ b/holoviews/plotting/mpl/chart.py
@@ -1059,8 +1059,8 @@ class BoxPlot(ChartPlot):
data.append(group[group.vdims[0]])
labels.append(label)
style['labels'] = labels
- style.pop('zorder', None)
- style.pop('label', None)
+ style = {k: v for k, v in style.items()
+ if k not in ['zorder', 'label']}
style['vert'] = not self.invert_axes
format_kdims = [kd(value_format=None) for kd in element.kdims]
return (data,), style, {'dimensions': [format_kdims,
|
Simplified style filtering on BoxPlot
|
py
|
diff --git a/bin/VV_main.py b/bin/VV_main.py
index <HASH>..<HASH> 100755
--- a/bin/VV_main.py
+++ b/bin/VV_main.py
@@ -307,5 +307,5 @@ file.write('</BODY>\n')
file.write('</HTML>\n')
file.close()
-print "LIVV Completed. Go to " + options.html_link + "/livv/livv_kit_main.html to view results"
+print "LIVV Completed. Go to " + options.html_link + "/livv_kit_main.html to view results"
|
I think this will fix the "two livvs" problem where it's printing the wrong file after LIVV finishes. git-svn-id: <URL>
|
py
|
diff --git a/tests/tools/test_results.py b/tests/tools/test_results.py
index <HASH>..<HASH> 100644
--- a/tests/tools/test_results.py
+++ b/tests/tools/test_results.py
@@ -51,19 +51,6 @@ class TestCalculateStats(object):
check_dtype=False,
check_index_type=False)
- # @pytest.mark.dependency(depends=[
- # "TestCalculateStats::test_calculate_stats_connect_generators"])
- # def test_calculate_stats_set_branch_ids(self, connect_generators):
- # mvgd_stats = calculate_mvgd_stats(connect_generators)
- # mvgd_stats_expected = pd.read_csv(os.path.join(
- # TEST_DATA_PATH,
- # "mvgd_stats_testgrid_after_set_branch_id_expected.csv"),
- # index_col=0)
- # assert_frame_equal(
- # mvgd_stats, mvgd_stats_expected,
- # check_dtype=False,
- # check_index_type=False)
-
@pytest.mark.dependency(depends=[
"TestCalculateStats::test_calculate_stats_connect_generators"])
def test_calculate_stats_set_circuit_breakers(self, connect_generators):
|
rm obsolete pytest dependency
|
py
|
diff --git a/pyipmi/ipmitool.py b/pyipmi/ipmitool.py
index <HASH>..<HASH> 100755
--- a/pyipmi/ipmitool.py
+++ b/pyipmi/ipmitool.py
@@ -556,7 +556,7 @@ COMMAND_HELP = (
CommandHelp('sdr', None,
'Print Sensor Data Repository entries and readings'),
CommandHelp('sdr list', None, 'List all SDRs'),
- CommandHelp('sdr show', '<sdr-id>', 'List all SDRs'),
+ CommandHelp('sdr show', '<sdr-id>', 'Show detail for one SDR'),
CommandHelp('bmc', None,
'Management Controller status and global enables'),
|
ipmitool: Fix description of 'sdr show' command
|
py
|
diff --git a/usb/backend/libusb1.py b/usb/backend/libusb1.py
index <HASH>..<HASH> 100644
--- a/usb/backend/libusb1.py
+++ b/usb/backend/libusb1.py
@@ -600,9 +600,10 @@ class _ConfigDescriptor(object):
# initialize and finalize the library
class _Initializer(object):
def __init__(self):
- _check(_lib.libusb_init(None))
+ self.ctx = c_void_p()
+ _check(_lib.libusb_init(byref(self.ctx)))
def __del__(self):
- _lib.libusb_exit(None)
+ _lib.libusb_exit(self.ctx)
# iterator for libusb devices
|
Use an explicit context to initialize libusb 1.x. closes #<I> When combining PyUSB with another module that makes use of libusb, we may end up with a double free inside libusb_exit, or a application hang. We now use an explicit context object when initializing and exiting the libusb library to avoid this problem.
|
py
|
diff --git a/pymatgen/analysis/defects/core.py b/pymatgen/analysis/defects/core.py
index <HASH>..<HASH> 100644
--- a/pymatgen/analysis/defects/core.py
+++ b/pymatgen/analysis/defects/core.py
@@ -101,6 +101,13 @@ class Defect(six.with_metaclass(ABCMeta, MSONable)):
"""
return
+ def set_charge(self,new_charge=0.):
+ """
+ Sets the overall charge
+ Args:
+ charge (float): new charge to set
+ """
+ self._charge = new_charge
class Vacancy(Defect):
"""
|
add set_charge method for Defect in core
|
py
|
diff --git a/iron_cache.py b/iron_cache.py
index <HASH>..<HASH> 100644
--- a/iron_cache.py
+++ b/iron_cache.py
@@ -118,7 +118,7 @@ class IronCache:
(int, long)):
value = json.dumps(value)
- options["body"] = value
+ options["value"] = value
body = json.dumps(options)
cache = urllib.quote_plus(cache)
|
change put format to {"value": <data>} (from {"body": <data>})
|
py
|
diff --git a/grimoire_elk/elk/askbot.py b/grimoire_elk/elk/askbot.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/elk/askbot.py
+++ b/grimoire_elk/elk/askbot.py
@@ -87,19 +87,33 @@ class AskbotEnrich(Enrich):
def get_elastic_mappings(self):
+ from grimoire_elk.utils import kibiter_version
+
+ fielddata = ''
+ if kibiter_version == '5':
+ fielddata = ', "fielddata": true'
+
mapping = """
{
"properties": {
"author_badges": {
"type": "string",
"index":"analyzed"
+ %s
+ },
+ "question_tags": {
+ "type": "string",
+ "index":"analyzed"
+ %s
},
"summary": {
"type": "string",
"index":"analyzed"
+ %s
}
}
- } """
+ } """ % (fielddata, fielddata, fielddata)
+
return {"items":mapping}
@metadata
|
[enrich][askbot] Fix aggregatable property in author_badges and question tags
|
py
|
diff --git a/ratcave/shader.py b/ratcave/shader.py
index <HASH>..<HASH> 100644
--- a/ratcave/shader.py
+++ b/ratcave/shader.py
@@ -51,7 +51,12 @@ class UniformCollection(IterableUserDict, object):
shader_id = c_int(0)
gl.glGetIntegerv(gl.GL_CURRENT_PROGRAM, byref(shader_id))
if shader_id.value == 0:
- raise UnboundLocalError("Shader not bound to OpenGL context--uniform cannot be sent.")
+ raise UnboundLocalError("""Shader not bound to OpenGL context--uniform cannot be sent.
+ ------------ Tip -------------
+ with ratcave.default_shader:
+ mesh.draw()
+ ------------------------------
+ """)
# Attach a shader location value to the array, for quick memory lookup. (gl calls are expensive, for some reason)
try:
|
added a more helpful error message to the "shader not bound" situation.
|
py
|
diff --git a/sonnet/__init__.py b/sonnet/__init__.py
index <HASH>..<HASH> 100644
--- a/sonnet/__init__.py
+++ b/sonnet/__init__.py
@@ -59,9 +59,11 @@ from sonnet.python.modules.basic import BatchFlatten
from sonnet.python.modules.basic import BatchReshape
from sonnet.python.modules.basic import FlattenTrailingDimensions
from sonnet.python.modules.basic import Linear
+from sonnet.python.modules.basic import merge_leading_dims
from sonnet.python.modules.basic import MergeDims
from sonnet.python.modules.basic import SelectInput
from sonnet.python.modules.basic import SliceByDim
+from sonnet.python.modules.basic import split_leading_dim
from sonnet.python.modules.basic import TileByDim
from sonnet.python.modules.basic import TrainableVariable
from sonnet.python.modules.basic_rnn import DeepRNN
|
Make merge_leading_dims and split_leading_dim part of the public sonnet API. PiperOrigin-RevId: <I>
|
py
|
diff --git a/test/functional/test_orm_config.py b/test/functional/test_orm_config.py
index <HASH>..<HASH> 100644
--- a/test/functional/test_orm_config.py
+++ b/test/functional/test_orm_config.py
@@ -5,8 +5,8 @@ from sqlalchemy.exc import IntegrityError
from ambry.orm.config import Config
-from test.test_base import TestBase
from test.factories import DatasetFactory
+from test.proto import TestBase
class Test(TestBase):
@@ -81,7 +81,7 @@ class Test(TestBase):
except IntegrityError as exc:
self.assertIn('UNIQUE constraint failed', str(exc))
- @unittest.skip("Credentials need to be fixed")
+ @unittest.skip('Credentials need to be fixed')
def test_config_postgres_unicode(self):
from ambry.orm.database import Database
|
Config tests use proto library. #<I>.
|
py
|
diff --git a/version.py b/version.py
index <HASH>..<HASH> 100644
--- a/version.py
+++ b/version.py
@@ -31,8 +31,8 @@ long_description = """
The STANFORD CNI MRS ANALYSIS LIBRARY (SMAL)
--------------------------------------------
-This library contains implementations
-
+This library contains implementations of analysis of data acquired in
+magnetic resonance spectroscopy experiments (MRS).
Copyright (c) 2013-, Ariel Rokem, Grace Tang.
|
DOC: A bit more detail here. But just a little.
|
py
|
diff --git a/neo/Core/Block.py b/neo/Core/Block.py
index <HASH>..<HASH> 100644
--- a/neo/Core/Block.py
+++ b/neo/Core/Block.py
@@ -25,7 +25,7 @@ class Block(BlockBase, InventoryMixin):
# 该区块的区块头
# < / summary >
- __header = None
+ _header = None
__is_trimmed = False
# < summary >
@@ -106,11 +106,11 @@ class Block(BlockBase, InventoryMixin):
Returns:
neo.Core.Header:
"""
- if not self.__header:
- self.__header = Header(self.PrevHash, self.MerkleRoot, self.Timestamp,
- self.Index, self.ConsensusData, self.NextConsensus, self.Script)
+ if not self._header:
+ self._header = Header(self.PrevHash, self.MerkleRoot, self.Timestamp,
+ self.Index, self.ConsensusData, self.NextConsensus, self.Script)
- return self.__header
+ return self._header
def Size(self):
"""
|
Replace name mangling private with 'regular' private
|
py
|
diff --git a/dohq_artifactory/exception.py b/dohq_artifactory/exception.py
index <HASH>..<HASH> 100644
--- a/dohq_artifactory/exception.py
+++ b/dohq_artifactory/exception.py
@@ -1,5 +1,3 @@
-from json import JSONDecodeError
-
import requests
@@ -25,7 +23,7 @@ def raise_for_status(response):
try:
response_json = exception.response.json()
error_list = response_json.pop("errors", None)
- except JSONDecodeError:
+ except requests.compat.JSONDecodeError:
# not a JSON response
raise ArtifactoryException(str(exception)) from exception
|
Specifically handle JSONDecodeError as returned by requests requests has a compat module that will use simplejson if it is available within the environment. We should catch the exception outlined in this module as it will reflect the json decoder used by requests. Note that requests.exceptions.JSONDecodeError, and subsequently requests.JSONDecodeError is not the same thing, and will not represent the same exception in this case.
|
py
|
diff --git a/cartoframes/context.py b/cartoframes/context.py
index <HASH>..<HASH> 100644
--- a/cartoframes/context.py
+++ b/cartoframes/context.py
@@ -239,9 +239,10 @@ class CartoContext(object):
for t in subtables)
_ = self.sql_client.send(drops)
except CartoException as err:
- raise CartoException('Failed to drop all subtables: '
- '{}'.format(', '.join(subtables)))
- raise Exception('Failed to upload dataframe: {}'.format(err))
+ warn('Failed to drop the following subtables from CARTO '
+ 'account: {}'.format(', '.join(subtables)))
+ finally:
+ raise Exception('Failed to upload dataframe: {}'.format(err))
return table_name
|
clearer erroring when dataframe upload fails
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -58,24 +58,21 @@ try:
from setuptools import setup
params = {
- 'install_requires': ['pyasn1>=0.1.8', 'pysmi'],
+ 'install_requires': ['pyasn1>=0.1.8', 'pysmi', 'pycryptodome'],
'zip_safe': True
}
- if sys.platform.lower()[:3] != 'win':
- params['install_requires'].append('pycryptodome')
except ImportError:
for arg in sys.argv:
if 'egg' in arg:
howto_install_setuptools()
sys.exit(1)
+
from distutils.core import setup
params = {}
if sys.version_info[:2] > (2, 4):
- params['requires'] = ['pyasn1(>=0.1.8)', 'pysmi']
- if sys.platform.lower()[:3] != 'win':
- params['requires'].append('pycryptodome')
+ params['requires'] = ['pyasn1(>=0.1.8)', 'pysmi', 'pycryptodome']
doclines = [x.strip() for x in (__doc__ or '').split('\n') if x]
|
obsolete PyCrypto-on-Windows workaround dropped
|
py
|
diff --git a/OpenTokSDK.py b/OpenTokSDK.py
index <HASH>..<HASH> 100644
--- a/OpenTokSDK.py
+++ b/OpenTokSDK.py
@@ -347,4 +347,4 @@ class OpenTokSDK(object):
raise RequestError("An unexpected error occurred", response.status_code)
def _sign_string(self, string, secret):
- return hmac.new(secret, string.encode('utf-8'), hashlib.sha1).hexdigest()
+ return hmac.new(secret.encode('utf-8'), string.encode('utf-8'), hashlib.sha1).hexdigest()
|
ADD support for secret in unicode format
|
py
|
diff --git a/subdownloader/provider/opensubtitles.py b/subdownloader/provider/opensubtitles.py
index <HASH>..<HASH> 100644
--- a/subdownloader/provider/opensubtitles.py
+++ b/subdownloader/provider/opensubtitles.py
@@ -82,8 +82,8 @@ class OpenSubtitles(SubtitleProvider):
if self.logged_in():
def logout_query():
return self._xmlrpc.LogOut(self._token)
- result = self._safe_exec(logout_query, None)
- self.check_result(result)
+ # Do no check result of this call. Assume connection closed.
+ self._safe_exec(logout_query, None)
self._token = None
def logged_in(self):
|
opensubtitles: do not check result when logging out
|
py
|
diff --git a/src/fuzzfetch/fetch.py b/src/fuzzfetch/fetch.py
index <HASH>..<HASH> 100644
--- a/src/fuzzfetch/fetch.py
+++ b/src/fuzzfetch/fetch.py
@@ -457,12 +457,12 @@ class Fetcher(object):
# If start date is outside the range of the newest/oldest available build, adjust it
if asc:
start = max(start, now - timedelta(days=364))
- end = now
+ end = min(start, now)
else:
- start = min(start, now)
end = now - timedelta(days=364)
+ start = max(min(start, now), end)
- while start < end if asc else start > end:
+ while start <= end if asc else start >= end:
try:
self._task = BuildTask(start.strftime('%Y-%m-%d'), branch, self._flags, self._platform)
break
|
Ensure nearest ranges use the minimal boundary
|
py
|
diff --git a/cqlengine/tests/query/test_updates.py b/cqlengine/tests/query/test_updates.py
index <HASH>..<HASH> 100644
--- a/cqlengine/tests/query/test_updates.py
+++ b/cqlengine/tests/query/test_updates.py
@@ -173,12 +173,13 @@ class QueryUpdateTests(BaseCassEngTestCase):
self.assertEqual(obj.text_list, ["foo", "bar"])
def test_list_prepend_updates(self):
+ """ Prepend two things since order is reversed by default by CQL """
partition = uuid4()
cluster = 1
TestQueryUpdateModel.objects.create(
partition=partition, cluster=cluster, text_list=["foo"])
TestQueryUpdateModel.objects(
partition=partition, cluster=cluster).update(
- text_list__prepend=['bar'])
+ text_list__prepend=['bar', 'baz'])
obj = TestQueryUpdateModel.objects.get(partition=partition, cluster=cluster)
- self.assertEqual(obj.text_list, ["bar", "foo"])
+ self.assertEqual(obj.text_list, ["bar", "baz", "foo"])
|
Updates the prepend to list test to make sure order is preserved when multiple items are prepended
|
py
|
diff --git a/python/sdss_access/path/path.py b/python/sdss_access/path/path.py
index <HASH>..<HASH> 100644
--- a/python/sdss_access/path/path.py
+++ b/python/sdss_access/path/path.py
@@ -1112,6 +1112,26 @@ class Path(BasePath):
subdir = "{:0>4d}".format(designid100) + "XX"
return subdir
+ def healpixgrp(self, filetype, **kwargs):
+ ''' Returns HEALPIX group subdirectory
+
+ Parameters
+ ----------
+ filetype : str
+ File type parameter.
+ healpix : int or str
+ HEALPix number. Will be converted to int internally.
+
+ Returns
+ -------
+ healpixgrp : str
+ HEALPix group directory, HEALPix//100.
+
+ '''
+
+ healpix = int(kwargs['healpix'])
+ subdir = "{:d}".format(healpix//100)
+ return subdir
class AccessError(Exception):
pass
|
Added healpixgrp() to path.py
|
py
|
diff --git a/changes/cli.py b/changes/cli.py
index <HASH>..<HASH> 100644
--- a/changes/cli.py
+++ b/changes/cli.py
@@ -197,13 +197,14 @@ def changelog(arguments):
)
log.info('Added content to CHANGELOG.md')
- execute(
- ['git', 'ci', '-m', '"%s"' % new_version, '%s/__init__.py' % app_name],
- dry_run=dry_run
- )
+ if arguments['--commit-changelog']:
+ execute(
+ ['git', 'ci', '-m', '"%s"' % new_version, '%s/__init__.py' % app_name],
+ dry_run=dry_run
+ )
- execute(['git', 'push'], dry_run=dry_run)
- log.info('Committed changelog update')
+ execute(['git', 'push'], dry_run=dry_run)
+ log.info('Committed changelog update')
def tag(arguments):
dry_run=arguments['--dry-run']
@@ -252,6 +253,8 @@ Options:
--pypi=<pypi> Specify alternative pypi
--dry-run Prints the commands that would have been executed.
+ --commit-changelog Should the automatically generated changelog be
+ committed?
--debug Debug output.
"""
|
Add an option to commit the automatic changelog
|
py
|
diff --git a/arca/exceptions.py b/arca/exceptions.py
index <HASH>..<HASH> 100644
--- a/arca/exceptions.py
+++ b/arca/exceptions.py
@@ -23,9 +23,12 @@ class BuildError(ArcaException):
self.extra_info = extra_info
def __str__(self):
+ extra_info = self.extra_info
+ if isinstance(extra_info, dict) and "traceback" in extra_info:
+ extra_info = extra_info["traceback"]
return "{}\n\n{}".format(
super().__str__(),
- self.extra_info
+ extra_info
)
|
If BuildError extra_info contains a traceback, print only the traceback
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ from setuptools import find_packages, setup
if sys.argv[-1] == 'publish':
- os.system('python setup.py sdist upload')
+ os.system('python setup.py sdist bdist_wheel upload')
sys.exit()
version = __import__('email_confirm_la').get_version()
|
python setup.py sdist bdist_wheel upload
|
py
|
diff --git a/openquake/server/db/upgrade_manager.py b/openquake/server/db/upgrade_manager.py
index <HASH>..<HASH> 100644
--- a/openquake/server/db/upgrade_manager.py
+++ b/openquake/server/db/upgrade_manager.py
@@ -222,7 +222,7 @@ class UpgradeManager(object):
versions = [s['version'] for s in scripts]
if versions:
return ('Your database is not updated. You can update it by '
- 'running oq-engine --upgrade-db which will process the '
+ 'running oq engine --upgrade-db which will process the '
'following new versions: %s' % versions)
def get_db_versions(self, conn):
@@ -237,7 +237,7 @@ class UpgradeManager(object):
curs.execute(query)
return set(version for version, in curs.fetchall())
except:
- raise VersioningNotInstalled('Run oq-engine --upgrade-db')
+ raise VersioningNotInstalled('Run oq engine --upgrade-db')
def parse_script_name(self, script_name):
'''
|
Make 'oq-engine --upgrade-db' consistent with the new oq command Former-commit-id: 3c<I>a4ca<I>b<I>f<I>d4a<I>a<I>fb3f0c5c<I>
|
py
|
diff --git a/phonopy/api_phonopy.py b/phonopy/api_phonopy.py
index <HASH>..<HASH> 100644
--- a/phonopy/api_phonopy.py
+++ b/phonopy/api_phonopy.py
@@ -259,6 +259,7 @@ class Phonopy(object):
def set_nac_params(self, nac_params=None):
self._nac_params = nac_params
+ self._set_dynamical_matrix()
def set_displacement_dataset(self, displacement_dataset):
"""
|
Minor fix to call _set_dynamical_matrix at correct order when with nac_params
|
py
|
diff --git a/gwpy/timeseries/io/gwf/lalframe.py b/gwpy/timeseries/io/gwf/lalframe.py
index <HASH>..<HASH> 100644
--- a/gwpy/timeseries/io/gwf/lalframe.py
+++ b/gwpy/timeseries/io/gwf/lalframe.py
@@ -225,4 +225,4 @@ def write(
add_(frame, lalseries)
# write frame
- lalframe.FrameWrite(frame, outfile)
+ lalframe.FrameWrite(frame, file_path(outfile))
|
gwpy.timeseries.io: fix `Path` compatibility with LALFrame I/O
|
py
|
diff --git a/swiftly/cli.py b/swiftly/cli.py
index <HASH>..<HASH> 100644
--- a/swiftly/cli.py
+++ b/swiftly/cli.py
@@ -397,7 +397,11 @@ Issues a DELETE request of the [path] given.""".strip(),
'instead of 1.')
self._main_parser = _OptionParser(version='%prog 1.0',
- usage='Usage: %prog [options] <command> [command_options] [args]',
+ usage="""
+Usage: %prog [options] <command> [command_options] [args]
+
+NOTE: Be sure any names given are url encoded if necessary. For instance, an
+object named 4&4.txt must be given as 4%264.txt.""".strip(),
stdout=self.stdout, stderr=self.stderr)
self._main_parser.add_option('-A', '--auth-url', dest='auth_url',
default=environ.get('SWIFTLY_AUTH_URL', ''), metavar='URL',
|
Added note about url encoding.
|
py
|
diff --git a/tests/formats/visual.py b/tests/formats/visual.py
index <HASH>..<HASH> 100644
--- a/tests/formats/visual.py
+++ b/tests/formats/visual.py
@@ -196,3 +196,18 @@ class VisualFormatReaderTestCase(unittest.TestCase):
self._drop_header('DELIM')
reader = VisualFormatReader(self.fp)
self.assertEqual(reader.delimiter, str(','))
+
+ def test_obstype(self):
+ """
+ Check that observation type matches data in header.
+ """
+ reader = VisualFormatReader(self.fp)
+ self.assertEqual(reader.obstype, str('Visual'))
+
+ def test_missing_obstype(self):
+ """
+ Check that Visual is assumed obstype when OBSTYPE header is missing.
+ """
+ self._drop_header('OBSTYPE')
+ reader = VisualFormatReader(self.fp)
+ self.assertEqual(reader.obstype, 'Visual')
|
Added tests for OBSTYPE header.
|
py
|
diff --git a/validator/tests/sawtooth_suites/ts_nightly_poet1.py b/validator/tests/sawtooth_suites/ts_nightly_poet1.py
index <HASH>..<HASH> 100644
--- a/validator/tests/sawtooth_suites/ts_nightly_poet1.py
+++ b/validator/tests/sawtooth_suites/ts_nightly_poet1.py
@@ -32,7 +32,8 @@ LOGGER = logging.getLogger(__name__)
class Poet1NightlyTestSuite(SawtoothTestSuite):
def test_suite(self):
- cfg = {"LedgerType": "poet1"}
+ cfg = {"LedgerType": "poet1",
+ "InitialConnectivity": 1}
success = False
try:
|
Set default connectivity in nightly tests.
|
py
|
diff --git a/runners/vr/runners/base.py b/runners/vr/runners/base.py
index <HASH>..<HASH> 100644
--- a/runners/vr/runners/base.py
+++ b/runners/vr/runners/base.py
@@ -44,15 +44,16 @@ class BaseRunner(object):
try:
cmd = self.commands[args.command]
- # Commands that have a lock=False attribute won't try to lock the
- # proc.yaml file. 'uptest' and 'shell' are in this category.
- if getattr(cmd, 'lock', True):
- lock_file(self.file)
- else:
- self.file.close()
except KeyError:
raise SystemExit("Command must be one of: %s" %
', '.join(self.commands.keys()))
+
+ # Commands that have a lock=False attribute won't try to lock the
+ # proc.yaml file. 'uptest' and 'shell' are in this category.
+ if getattr(cmd, 'lock', True):
+ lock_file(self.file)
+ else:
+ self.file.close()
cmd()
def setup(self):
|
Don't feign to trap KeyErrors for file locking operations.
|
py
|
diff --git a/MAVProxy/modules/lib/grapher.py b/MAVProxy/modules/lib/grapher.py
index <HASH>..<HASH> 100755
--- a/MAVProxy/modules/lib/grapher.py
+++ b/MAVProxy/modules/lib/grapher.py
@@ -18,6 +18,8 @@ from pymavlink import mavutil
import threading
import numpy as np
+MAVGRAPH_DEBUG = 'MAVGRAPH_DEBUG' in os.environ
+
colors = [ 'red', 'green', 'blue', 'orange', 'olive', 'black', 'grey', 'yellow', 'brown', 'darkcyan',
'cornflowerblue', 'darkmagenta', 'deeppink', 'darkred']
@@ -441,13 +443,19 @@ class MavGraph(object):
has_instance = True
simple = self.simple_field[i]
+ v = None
if simple is not None and not has_instance:
- v = getattr(vars[simple[0]], simple[1])
- else:
+ try:
+ v = getattr(vars[simple[0]], simple[1])
+ except Exception as ex:
+ if MAVGRAPH_DEBUG:
+ print(ex)
+ if v is None:
try:
v = mavutil.evaluate_expression(f, vars)
- except Exception:
- continue
+ except Exception as ex:
+ if MAVGRAPH_DEBUG:
+ print(ex)
if v is None:
continue
if self.xaxis is None:
|
grapher: fixed handling of bad simple expressions fails on RAD.RSSI as RSSI is a valid message. Also added MAVGRAPH_DEBUG for easier debugging
|
py
|
diff --git a/galpy/orbit_src/linearOrbit.py b/galpy/orbit_src/linearOrbit.py
index <HASH>..<HASH> 100644
--- a/galpy/orbit_src/linearOrbit.py
+++ b/galpy/orbit_src/linearOrbit.py
@@ -39,6 +39,7 @@ class linearOrbit(OrbitTop):
HISTORY:
2010-07-13 - Written - Bovy (NYU)
"""
+ if method == 'leapfrog_c': method= 'odeint'
if hasattr(self,'_orbInterp'): delattr(self,'_orbInterp')
self.t= nu.array(t)
self._pot= pot
|
make sure non-planarOrbts keep working for now
|
py
|
diff --git a/raiden/network/rpc/client.py b/raiden/network/rpc/client.py
index <HASH>..<HASH> 100644
--- a/raiden/network/rpc/client.py
+++ b/raiden/network/rpc/client.py
@@ -30,6 +30,7 @@ from web3.contract import Contract, ContractFunction
from web3.eth import Eth
from web3.exceptions import BlockNotFound, TransactionNotFound
from web3.gas_strategies.rpc import rpc_gas_price_strategy
+from web3.middleware import simple_cache_middleware
from web3.types import (
ABIFunction,
BlockData,
@@ -756,6 +757,7 @@ def monkey_patch_web3(web3: Web3, gas_price_strategy: Callable) -> None:
try:
# install caching middleware
web3.middleware_onion.add(block_hash_cache_middleware)
+ web3.middleware_onion.add(simple_cache_middleware)
# set gas price strategy
web3.eth.setGasPriceStrategy(gas_price_strategy)
|
enabled `simple_cache_middleware`. introducing the same cache as used in the scenario player.
|
py
|
diff --git a/yhy/__init__.py b/yhy/__init__.py
index <HASH>..<HASH> 100644
--- a/yhy/__init__.py
+++ b/yhy/__init__.py
@@ -1 +1 @@
-__version__ = '0.0.4'
+__version__ = '0.0.5'
|
Bump versino to <I>
|
py
|
diff --git a/grandalf/layouts.py b/grandalf/layouts.py
index <HASH>..<HASH> 100644
--- a/grandalf/layouts.py
+++ b/grandalf/layouts.py
@@ -419,6 +419,7 @@ class SugiyamaLayout(object):
try:
self.layers[r].append(v)
except IndexError:
+ assert r==len(self.layers)
self.layers.append(Layer([v]))
def dummyctrl(self,r,ctrl):
|
assert new rank is ok with append
|
py
|
diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py
index <HASH>..<HASH> 100755
--- a/src/transformers/trainer.py
+++ b/src/transformers/trainer.py
@@ -705,7 +705,7 @@ class Trainer:
print(output)
def _prepare_inputs(
- self, inputs: Dict[str, Union[torch.Tensor, Any]], model: nn.Module
+ self, inputs: Dict[str, Union[torch.Tensor, Any]]
) -> Dict[str, Union[torch.Tensor, Any]]:
"""
Prepare :obj:`inputs` before feeding them to the model, converting them to tensors if they are not already and
@@ -746,7 +746,7 @@ class Trainer:
return self._training_step(model, inputs, self.optimizer)
model.train()
- inputs = self._prepare_inputs(inputs, model)
+ inputs = self._prepare_inputs(inputs)
if self.args.fp16 and _use_native_amp:
with autocast():
@@ -1071,7 +1071,7 @@ class Trainer:
"""
has_labels = any(inputs.get(k) is not None for k in ["labels", "lm_labels", "masked_lm_labels"])
- inputs = self._prepare_inputs(inputs, model)
+ inputs = self._prepare_inputs(inputs)
with torch.no_grad():
outputs = model(**inputs)
|
removed redundant arg in prepare_inputs (#<I>) * removed redundant arg in prepare_inputs * made same change in prediction_loop
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ REQUIRES = []
setup(
name='stackinabox',
- version='0.1',
+ version='0.2',
description='OpenStack/Rackspace Service Testing Suite',
license='Apache License 2.0',
url='https://github.com/BenjamenMeyer/stackInABox',
|
Update setup.py Updating version since we made a release
|
py
|
diff --git a/pyspectral/utils.py b/pyspectral/utils.py
index <HASH>..<HASH> 100644
--- a/pyspectral/utils.py
+++ b/pyspectral/utils.py
@@ -433,7 +433,7 @@ def download_luts(**kwargs):
else:
aerosol_types = HTTPS_RAYLEIGH_LUTS.keys()
- chunk_size = 10124
+ chunk_size = 4096
for subname in aerosol_types:
@@ -461,7 +461,7 @@ def download_luts(**kwargs):
if TQDM_LOADED:
with open(filename, "wb") as handle:
for data in tqdm(iterable=response.iter_content(chunk_size=chunk_size),
- total=(total_size / chunk_size), unit='kB'):
+ total=(int(total_size / chunk_size + 0.5)), unit='kB'):
handle.write(data)
else:
with open(filename, "wb") as handle:
|
Adjust ftp download chunk size and adjust the number of expected iterations to be an integer
|
py
|
diff --git a/openquake/engine/calculators/risk/hazard_getters.py b/openquake/engine/calculators/risk/hazard_getters.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/calculators/risk/hazard_getters.py
+++ b/openquake/engine/calculators/risk/hazard_getters.py
@@ -115,7 +115,9 @@ class HazardGetter(object):
def __call__(self, monitor=None):
for hazard in self.hazard_outputs:
h = hazard.output_container
- yield (hazard.id,) + self.get_assets_data(h, monitor)
+ assets, data = self.get_assets_data(h, monitor)
+ if len(assets) > 0:
+ yield hazard.id, assets, data
def weights(self):
ws = []
@@ -215,7 +217,9 @@ class GroundMotionValuesGetter(HazardGetter):
for hazard, seed in zip(self.hazard_outputs, self.seeds):
h = hazard.output_container
numpy.random.seed(seed)
- yield (hazard.id,) + self.get_assets_data(h, monitor)
+ assets, data = self.get_assets_data(h, monitor)
+ if len(assets) > 0:
+ yield hazard.id, assets, data
def assets_gen(self, hazard_output):
"""
|
Fixed a bug when there are no GMVs close to the given assets Former-commit-id: c<I>e<I>dfa1cf<I>a<I>d4be<I>acd<I>ca<I>b5
|
py
|
diff --git a/mdp.py b/mdp.py
index <HASH>..<HASH> 100644
--- a/mdp.py
+++ b/mdp.py
@@ -96,7 +96,7 @@ from math import ceil, log, sqrt
from random import randint, random
from time import time
-from numpy import absolute, array, diag, empty, matrix, mean, mod, multiply
+from numpy import absolute, array, diag, empty, mean, mod, multiply
from numpy import ndarray, ones, zeros
from numpy.random import rand
from scipy.sparse import csr_matrix as sparse
|
remove import of matrix from numpy
|
py
|
diff --git a/rope/refactor/extract.py b/rope/refactor/extract.py
index <HASH>..<HASH> 100644
--- a/rope/refactor/extract.py
+++ b/rope/refactor/extract.py
@@ -657,7 +657,8 @@ class _VariableReadsAndWritesFinder(object):
def _FunctionDef(self, node):
self.written.add(node.name)
visitor = _VariableReadsAndWritesFinder()
- ast.walk(node.code, visitor)
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, visitor)
self.read.update(visitor.read - visitor.written)
def _Class(self, node):
|
extract: fixed finding variable reads in functions
|
py
|
diff --git a/picotui/screen.py b/picotui/screen.py
index <HASH>..<HASH> 100644
--- a/picotui/screen.py
+++ b/picotui/screen.py
@@ -35,6 +35,15 @@ KEY_TAB = b"\t"
KEY_SHIFT_TAB = b"\x1b[Z"
KEY_ESC = 20
KEY_F1 = 30
+KEY_F2 = 31
+KEY_F3 = 32
+KEY_F4 = 33
+KEY_F5 = b'\x1b[15~'
+KEY_F6 = b'\x1b[17~'
+KEY_F7 = b'\x1b[18~'
+KEY_F8 = b'\x1b[19~'
+KEY_F9 = b'\x1b[20~'
+KEY_F10 = b'\x1b[21~'
KEYMAP = {
b"\x1b[A": KEY_UP,
@@ -53,6 +62,9 @@ b"\x7f": KEY_BACKSPACE,
b"\x1b[3~": KEY_DELETE,
b"\x1b": KEY_ESC,
b"\x1bOP": KEY_F1,
+b"\x1bOQ": KEY_F2,
+b"\x1bOR": KEY_F3,
+b"\x1bOS": KEY_F4,
}
class Screen:
|
screen: Add F2-F<I> keys.
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.