diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/smartmin/users/views.py b/smartmin/users/views.py
index <HASH>..<HASH> 100644
--- a/smartmin/users/views.py
+++ b/smartmin/users/views.py
@@ -41,24 +41,14 @@ class UserCRUDL(SmartCRUDL):
class List(SmartListView):
search_fields = ('username__icontains','first_name__icontains', 'last_name__icontains')
- fields = ('is_active', 'username', 'name', 'group', 'last_login')
+ fields = ('username', 'name', 'group', 'last_login')
link_fields = ('username', 'name')
default_order = 'username'
add_button = True
- field_config = {
- 'active': dict(label=""),
- }
-
def get_group(self, obj):
return ", ".join([group.name for group in obj.groups.all()])
- def get_is_active(self, obj):
- if obj.is_active:
- return '<div class="active_icon"></div>'
- else:
- return ''
-
def get_queryset(self, **kwargs):
queryset = super(UserCRUDL.List, self).get_queryset(**kwargs)
return queryset.filter(id__gt=0).exclude(is_staff=True).exclude(is_superuser=True)
|
change is active state for users to be inline with tboostrap
|
py
|
diff --git a/src/scs_core/aws/greengrass/aws_deployment_reporter.py b/src/scs_core/aws/greengrass/aws_deployment_reporter.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/aws/greengrass/aws_deployment_reporter.py
+++ b/src/scs_core/aws/greengrass/aws_deployment_reporter.py
@@ -71,7 +71,7 @@ class AWSDeploymentReporter(object):
latest_deployment = deployments[0] if deployments else None
deployment = Deployment.construct_from_aws(group_name, latest_deployment)
- if currency is None or deployment.is_current(currency):
+ if currency is None or not deployment.is_current(currency):
deployments.append(deployment)
return sorted(deployments)
|
Added AWSDeploymentReporter class
|
py
|
diff --git a/spacy/language_data/tag_map.py b/spacy/language_data/tag_map.py
index <HASH>..<HASH> 100644
--- a/spacy/language_data/tag_map.py
+++ b/spacy/language_data/tag_map.py
@@ -20,5 +20,6 @@ TAG_MAP = {
"X": {POS: X},
"CONJ": {POS: CONJ},
"ADJ": {POS: ADJ},
- "VERB": {POS: VERB}
+ "VERB": {POS: VERB},
+ "PART": {POS: PART}
}
|
Add PART to tag map <I> of the <I> PoS tags in the UD tag set is added; PART is missing.
|
py
|
diff --git a/doc/source/conf.py b/doc/source/conf.py
index <HASH>..<HASH> 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -19,6 +19,7 @@ import subprocess
import sys
import os
+import warnings
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
@@ -148,8 +149,12 @@ html_static_path = ['_static']
#html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
-html_last_updated_fmt = subprocess.Popen(
- git_cmd, stdout=subprocess.PIPE).communicate()[0]
+try:
+ html_last_updated_fmt = subprocess.Popen(
+ git_cmd, stdout=subprocess.PIPE).communicate()[0]
+except Exception:
+ warnings.warn('Cannot get last updated time from git repository. '
+ 'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
|
Fix doc build if git is absent When building packages if git is absent, then we should not set html_last_updated_fmt. It can still be set via the -D switch when building with sphinx-build. Change-Id: Ie<I>df7f<I>e<I>b1b<I>b<I>f<I>ce9dd9aaa Closes-Bug: #<I>
|
py
|
diff --git a/lib/elements/bfe_report_numbers.py b/lib/elements/bfe_report_numbers.py
index <HASH>..<HASH> 100644
--- a/lib/elements/bfe_report_numbers.py
+++ b/lib/elements/bfe_report_numbers.py
@@ -23,13 +23,18 @@
__revision__ = ""
-def format(bfo, separator):
+def format(bfo, separator, limit, extension=" etc."):
"""
Prints the report numbers of the record (037__a and 088__a)
@param separator the separator between report numbers.
+ @param limit the max number of report numbers to print
+ @param extension a prefix printed when limit param is reached
"""
numbers = bfo.fields("037__a")
numbers.extend(bfo.fields("088__a"))
- return separator.join(numbers)
+ if limit.isdigit() and int(limit) <= len(numbers):
+ return separator.join(numbers[:limit]) + extension
+ else:
+ return separator.join(numbers)
|
Added 'limit' and 'extension' parameters in order to limit number of printed report numbers.
|
py
|
diff --git a/auto_ml/utils_models.py b/auto_ml/utils_models.py
index <HASH>..<HASH> 100644
--- a/auto_ml/utils_models.py
+++ b/auto_ml/utils_models.py
@@ -536,5 +536,5 @@ def make_deep_learning_classifier(hidden_layers=None, num_cols=None, optimizer='
model.add(Dense(layer_size, init='normal', activation='relu'))
model.add(Dense(1, init='normal', activation=final_activation))
- model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy', 'poisson', 'precision', 'recall', 'fbeta_score'])
+ model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy', 'poisson'])
return model
|
removes metrics that keras removed. they chose to raise errors rather than continue with a warning.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
setup(
name='sphinxmark',
- version='0.1.2',
+ version='0.1.0',
description='A Sphinx extension that enables watermarks for HTML output.',
long_description=long_description,
url='https://github.com/kallimachos/sphinxmark',
|
Reset version for pypi upload
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -369,4 +369,4 @@ epub_publisher = 'EMVA'
intersphinx_mapping = {'https://docs.python.org/3.4': None}
# List up the module to be mocked.
-autodoc_mock_imports = ['genicam2', 'numpy']
+autodoc_mock_imports = ['genicam2']
|
Remove numpy from the mock import list
|
py
|
diff --git a/salt/utils/docker/__init__.py b/salt/utils/docker/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/utils/docker/__init__.py
+++ b/salt/utils/docker/__init__.py
@@ -225,7 +225,7 @@ def translate_input(**kwargs):
# format {'Type': log_driver, 'Config': log_opt}. So, we need to
# construct this argument to be passed to the API from those two
# arguments.
- if log_driver is not NOTSET and log_opt is not NOTSET:
+ if log_driver is not NOTSET or log_opt is not NOTSET:
kwargs['log_config'] = {
'Type': log_driver if log_driver is not NOTSET else 'none',
'Config': log_opt if log_opt is not NOTSET else {}
|
fix(docker): allow log_driver or log_opt Docker allows either option without the other and the docks make no mention of both options being required.
|
py
|
diff --git a/collectionkit/contrib/work_creator/fields.py b/collectionkit/contrib/work_creator/fields.py
index <HASH>..<HASH> 100644
--- a/collectionkit/contrib/work_creator/fields.py
+++ b/collectionkit/contrib/work_creator/fields.py
@@ -1,11 +1,10 @@
from django.db.models import ImageField
-
class QuietImageField(ImageField):
"""An imagefield that doesn't lose the plot when trying to find the
dimensions of an image that doesn't exist"""
def update_dimension_fields(self, *args, **kwargs):
try:
super(QuietImageField, self).update_dimension_fields(*args, **kwargs)
- except IOError:
+ except:
pass
|
Catch any error trying to update width/height fields.
|
py
|
diff --git a/arcgis_integration/RichDEM_ArcScript.py b/arcgis_integration/RichDEM_ArcScript.py
index <HASH>..<HASH> 100644
--- a/arcgis_integration/RichDEM_ArcScript.py
+++ b/arcgis_integration/RichDEM_ArcScript.py
@@ -1,15 +1,11 @@
-# Script Name: Remove Pits
+# Script Name: RichDEM_ArcScript
#
-# Created By: David Tarboton
-# Date: 9/21/11
+# Created By: Richard Barnes
+# Date: 5/10/2012
-# Import ArcPy site-package and os modules
-#
import arcpy
import os
import sys
-import time
-import string
import subprocess
PROGPATH="richdem"
|
Removed header comments carried over from TauDEM files I used as an example Eliminated unnecessary imports git-svn-id: file:///home/rick/.svn/richdem@<I> c4c8cafc-<I>d-<I>f-9f<I>-3f4f8cd<I>a<I>
|
py
|
diff --git a/easy_thumbnails/files.py b/easy_thumbnails/files.py
index <HASH>..<HASH> 100644
--- a/easy_thumbnails/files.py
+++ b/easy_thumbnails/files.py
@@ -444,8 +444,8 @@ class Thumbnailer(File):
except Exception:
pass
self.thumbnail_storage.save(filename, thumbnail)
- signals.thumbnail_created.send(sender=thumbnail)
self.get_thumbnail_cache(thumbnail.name, create=True, update=True)
+ signals.thumbnail_created.send(sender=thumbnail)
def thumbnail_exists(self, thumbnail_name):
"""
|
Cache the thumbnail info before calling the signal
|
py
|
diff --git a/salt/modules/rbenv.py b/salt/modules/rbenv.py
index <HASH>..<HASH> 100644
--- a/salt/modules/rbenv.py
+++ b/salt/modules/rbenv.py
@@ -245,7 +245,7 @@ def install_ruby(ruby, runas=None):
ret = {}
ret = _rbenv_exec(['install', ruby], env=env, runas=runas, ret=ret)
- if ret['retcode'] == 0:
+ if ret is not False and ret['retcode'] == 0:
rehash(runas=runas)
return ret['stderr']
else:
|
Fix TypeError during rbenv ruby installation when rbenv is not found
|
py
|
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1415,7 +1415,8 @@ def mod_repo(repo, saltenv='base', **kwargs):
setattr(mod_source, key, kwargs[key])
sources.save()
# on changes, explicitly refresh
- refresh_db()
+ if kwargs.get('refresh_db', True):
+ refresh_db()
return {
repo: {
'architectures': getattr(mod_source, 'architectures', []),
|
Add missing conditional for refresh_db in aptpkg.mod_repo
|
py
|
diff --git a/conftest.py b/conftest.py
index <HASH>..<HASH> 100644
--- a/conftest.py
+++ b/conftest.py
@@ -6,6 +6,8 @@ import sys
collect_ignore = []
if sys.version_info[0] > 2:
collect_ignore.append("tests/contrib/flask")
+ if sys.version_info[1] == 3:
+ collect_ignore.append("tests/handlers/logbook")
INSTALLED_APPS = [
|
logbook doesn't work for python <I> yet, ignore their test
|
py
|
diff --git a/test/testbasics.py b/test/testbasics.py
index <HASH>..<HASH> 100644
--- a/test/testbasics.py
+++ b/test/testbasics.py
@@ -312,11 +312,11 @@ class TestBasics(unittest.TestCase):
p = pipe2py.compile.parse_and_build_pipe(self.context, pipe_def)
#todo: check the data! e.g. pubdate etc.
- count = 0
+ creators = set()
for i in p:
- count += 1
-
- self.assertTrue(count == 1)
+ if i.get('dc:creator') in creators:
+ self.fail()
+ creators.add(i.get('dc:creator'))
def test_describe_input(self):
"""Loads a pipeline but just gets the input requirements
|
Improve unique test to handle changing source data
|
py
|
diff --git a/lib/python/dxpy/bindings/__init__.py b/lib/python/dxpy/bindings/__init__.py
index <HASH>..<HASH> 100644
--- a/lib/python/dxpy/bindings/__init__.py
+++ b/lib/python/dxpy/bindings/__init__.py
@@ -477,4 +477,4 @@ from dxrecord import *
from dxproject import *
from dxjob import *
from dxprogram import *
-#from search import *
+from search import *
|
convenience import - I think this is required for tests to pass, too?
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -45,6 +45,7 @@ install_requires = [
"django-object-actions>=1.1",
"idna>=2.9",
"packaging",
+ "typing-extensions; python_version < '3.8'",
]
package_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "ca")
|
add typing-extensions for older python versions
|
py
|
diff --git a/querybuilder/query.py b/querybuilder/query.py
index <HASH>..<HASH> 100644
--- a/querybuilder/query.py
+++ b/querybuilder/query.py
@@ -211,6 +211,15 @@ class Query(object):
return table_dict
+ def select_fields(self, fields=None):
+ """
+ @return: self
+ """
+ if type(fields) is not list:
+ fields = [fields]
+ self.table['fields'] = fields
+ return self
+
def from_table(self, table=None, fields=['*'], schema=None):
"""
@return: self
@@ -221,6 +230,8 @@ class Query(object):
return self
+ #TODO: parse named arg conditions and convert to string
+ # ex: Account__id__gt=5
def where(self, condition, *args):
"""
@return: self
@@ -246,7 +257,7 @@ class Query(object):
"""
return self.join(table, fields=fields, condition=condition, join_type=join_type, schema=schema)
- def group_by(self, group):
+ def group_by(self, group, *args):
"""
@return: self
"""
@@ -254,6 +265,8 @@ class Query(object):
self.groups.append(group)
elif type(group) is list:
self.groups += group
+ if len(args):
+ self.group += args
return self
def order_by(self, order):
|
* Added method for changing select fields
|
py
|
diff --git a/djangular/forms/angular_model.py b/djangular/forms/angular_model.py
index <HASH>..<HASH> 100644
--- a/djangular/forms/angular_model.py
+++ b/djangular/forms/angular_model.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+from base64 import b64encode
from django.forms.util import ErrorDict
from djangular.forms.angular_base import NgFormBaseMixin
@@ -69,4 +70,5 @@ class NgModelFormMixin(NgFormBaseMixin):
try:
return super(NgModelFormMixin, self).name()
except AttributeError:
- return self.scope_prefix
+ # return a pseudo unique name for this form
+ return b64encode(self.scope_prefix).rstrip('=')
|
Generate a unique string as unset form named, which does not conflict with self.scope_prefix
|
py
|
diff --git a/spinoff/actor/process.py b/spinoff/actor/process.py
index <HASH>..<HASH> 100644
--- a/spinoff/actor/process.py
+++ b/spinoff/actor/process.py
@@ -90,6 +90,7 @@ class Process(Actor):
if self.__pre_start_complete_d:
# dbg("PROCESS: first get")
self.__pre_start_complete_d.callback(None)
+ self.__pre_start_complete_d = None
if self.__queue:
try:
|
Fixed a bug in Process when immediately after the first .get(), there is an exception
|
py
|
diff --git a/pysra/output.py b/pysra/output.py
index <HASH>..<HASH> 100644
--- a/pysra/output.py
+++ b/pysra/output.py
@@ -237,8 +237,8 @@ class Output(object):
def calc_stats(self, as_dataframe=False):
ln_values = np.log(self.values)
- median = np.exp(np.mean(ln_values, axis=1))
- ln_std = np.std(ln_values, axis=1)
+ median = np.exp(np.nanmean(ln_values, axis=1))
+ ln_std = np.nanstd(ln_values, axis=1)
stats = {"ref": self.refs, "median": median, "ln_std": ln_std}
if as_dataframe and pd:
|
Fix:calculate stats with np.nanmean...
|
py
|
diff --git a/tests/test_bukuDb.py b/tests/test_bukuDb.py
index <HASH>..<HASH> 100644
--- a/tests/test_bukuDb.py
+++ b/tests/test_bukuDb.py
@@ -64,7 +64,7 @@ def setup():
os.remove(TEST_TEMP_DBFILE_PATH)
-class PrettySafeLoader(yaml.SafeLoader):
+class PrettySafeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
|
fix: test: ignore too many ancestor error
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup
setup(
name='scikit-gof',
- version='0.0.1',
+ version='0.0.2',
packages=('skgof',),
install_requires=('numpy>=1.10', 'scipy'),
tests_require=('pytest', 'pytest-flake8', 'pytest-isort', 'pytest-readme',
|
Released <I>, with just some minor test tweaks and comment changes.
|
py
|
diff --git a/nonebot/message.py b/nonebot/message.py
index <HASH>..<HASH> 100644
--- a/nonebot/message.py
+++ b/nonebot/message.py
@@ -80,16 +80,18 @@ async def handle_message(bot: NoneBot, event: CQEvent) -> None:
def _check_at_me(bot: NoneBot, event: CQEvent) -> None:
+ def is_at_me(seg):
+ return seg.type == 'at' and seg.data['qq'] == event.self_id
+
if event.detail_type == 'private':
event['to_me'] = True
else:
# group or discuss
event['to_me'] = False
- at_me_seg = MessageSegment.at(event.self_id)
# check the first segment
first_msg_seg = event.message[0]
- if first_msg_seg == at_me_seg:
+ if is_at_me(first_msg_seg):
event['to_me'] = True
del event.message[0]
@@ -103,7 +105,7 @@ def _check_at_me(bot: NoneBot, event: CQEvent) -> None:
i -= 1
last_msg_seg = event.message[i]
- if last_msg_seg == at_me_seg:
+ if is_at_me(last_msg_seg):
event['to_me'] = True
del event.message[i:]
|
fix check_at_me on node-onebot
|
py
|
diff --git a/sslyze/server_connectivity.py b/sslyze/server_connectivity.py
index <HASH>..<HASH> 100644
--- a/sslyze/server_connectivity.py
+++ b/sslyze/server_connectivity.py
@@ -115,14 +115,9 @@ class ServerConnectivityInfo(object):
self.ip_address = ip_address
if not self.ip_address:
try:
- addr_infos = socket.getaddrinfo(self.hostname, self.port, socket.AF_UNSPEC, socket.IPPROTO_IP)
- family, socktype, proto, canonname, sockaddr = addr_infos[0]
- except (socket.gaierror, IndexError):
+ self.ip_address = socket.gethostbyname(self.hostname)
+ except socket.gaierror:
raise ServerConnectivityError(self.CONNECTIVITY_ERROR_NAME_NOT_RESOLVED.format(hostname=self.hostname))
- else:
- # Works for both IPv4 and IPv6
- self.ip_address = sockaddr[0]
- self.port = sockaddr[1]
# Use the hostname as the default SNI
self.tls_server_name_indication = tls_server_name_indication if tls_server_name_indication else self.hostname
|
Revert pull request #<I> This reverts commit <I>ffd7c<I>e3db<I>fc3d<I>fb4a<I>f6, reversing changes made to <I>ec8d9f<I>c4b1a<I>e<I>c<I>c<I>bb5fb. # Conflicts: # sslyze/server_connectivity.py
|
py
|
diff --git a/fluent_contents/plugins/oembeditem/content_plugins.py b/fluent_contents/plugins/oembeditem/content_plugins.py
index <HASH>..<HASH> 100644
--- a/fluent_contents/plugins/oembeditem/content_plugins.py
+++ b/fluent_contents/plugins/oembeditem/content_plugins.py
@@ -5,6 +5,9 @@ from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
+import re
+
+re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
@@ -26,4 +29,8 @@ class OEmbedPlugin(ContentPlugin):
"""
Allow to style the item based on the type.
"""
- return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
+ safe_filename = re_safe.sub('', instance.type or 'default')
+ return [
+ "fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
+ self.render_template
+ ]
|
Make sure the OEmbed type can never be used to control filenames. Minor risk, as it's still a template path, but better be safe then sorry.
|
py
|
diff --git a/anyconfig/backend/tests/xml.py b/anyconfig/backend/tests/xml.py
index <HASH>..<HASH> 100644
--- a/anyconfig/backend/tests/xml.py
+++ b/anyconfig/backend/tests/xml.py
@@ -128,6 +128,14 @@ class Test_00(unittest.TestCase):
self.assertTrue(dicts_equal(dic, {"a": {"x": "X", "y": "Y"}}))
self.assertTrue(not subdic)
+ def test_46__process_children_elems__w_merge_attrs(self):
+ elem = TT.ET.XML("<a z='Z'><x>X</x><y>Y</y></a>")
+ dic = {"a": {"@attrs": {"z": "Z"}}}
+ subdic = dic["a"]["@attrs"]
+ TT._process_children_elems(elem, dic, subdic, merge_attrs=True)
+ self.assertTrue(dicts_equal(dic, {"a": {"x": "X", "y": "Y",
+ "z": "Z"}}), dic)
+
def _xml_to_container(snippet, **opts):
return TT.elem_to_container(TT.ET.XML(snippet), to_container=dict, **opts)
|
fix: add missing test case for merge_attrs support in .backend.xml._process_children_elems
|
py
|
diff --git a/tethne/model/corpus/mallet.py b/tethne/model/corpus/mallet.py
index <HASH>..<HASH> 100644
--- a/tethne/model/corpus/mallet.py
+++ b/tethne/model/corpus/mallet.py
@@ -124,7 +124,7 @@ class LDAModel(Model):
def __init__(self, *args, **kwargs):
- self.mallet_bin = os.path.join(mallet_path, "bin", "mallet")
+ self.mallet_bin = os.path.join(self.mallet_path, "bin", "mallet")
if platform.system() == 'Windows':
self.mallet_bin += '.bat'
|
troubleshooting tempfile issues on Windows
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ except (IOError, ImportError):
long_description = f.read()
-version = '0.1.2'
+version = '0.1.3'
setup(name="perceval-mozilla",
|
Increase version package to <I>
|
py
|
diff --git a/HARK/ConsumptionSaving/ConsRiskyAssetModel.py b/HARK/ConsumptionSaving/ConsRiskyAssetModel.py
index <HASH>..<HASH> 100644
--- a/HARK/ConsumptionSaving/ConsRiskyAssetModel.py
+++ b/HARK/ConsumptionSaving/ConsRiskyAssetModel.py
@@ -899,8 +899,10 @@ class RiskyContribConsumerType(RiskyAssetConsumerType):
-------
None
'''
+
IndShockConsumerType.simBirth(self,which_agents)
self.ShareNow[which_agents] = 0.
+ self.nNrmTildeNow[which_agents] = 0.
self.AdjustNow[which_agents] = False
|
Set nNrmTildeNow = 0 at birth
|
py
|
diff --git a/buildozer/targets/android.py b/buildozer/targets/android.py
index <HASH>..<HASH> 100644
--- a/buildozer/targets/android.py
+++ b/buildozer/targets/android.py
@@ -377,13 +377,12 @@ class TargetAndroid(Target):
# add presplash
presplash = config.getdefault('app', 'presplash.filename', '')
if presplash:
- build_cmd += ' --presplash {}'.format(join(self.buildozer.app_dir,
- presplash))
+ build_cmd += ' --presplash {}'.format(join(self.buildozer.app_dir, '..', '..', '..', presplash))
# add icon
icon = config.getdefault('app', 'icon.filename', '')
if icon:
- build_cmd += ' --icon {}'.format(join(self.buildozer.app_dir, icon))
+ build_cmd += ' --icon {}'.format(join(self.buildozer.app_dir, '..', '..', '..', icon))
# build only in debug right now.
if self.build_mode == 'debug':
|
presplash.filename and icon.filename only worked if source.dir was . For any other value of source.dir presplash and icon were broken.
|
py
|
diff --git a/djstripe/templatetags/djstripe_tags.py b/djstripe/templatetags/djstripe_tags.py
index <HASH>..<HASH> 100644
--- a/djstripe/templatetags/djstripe_tags.py
+++ b/djstripe/templatetags/djstripe_tags.py
@@ -2,7 +2,7 @@
from __future__ import division
from django.template import Library
-
+from django.conf import settings
register = Library()
@@ -22,3 +22,22 @@ def djdiv(value, arg):
except Exception:
return ''
division.is_safe = False
+
+
+@register.filter(name='djstripe_plan_level')
+def djstripe_plan_level(name):
+ """
+ Add support to levels over plans, then you can have different kind of plans with the level same access.
+
+ Use: {% <plan_name>|djstripe_plan_level %}
+
+ Custom settings setup need it, please see the documentation for details.
+ """
+ level = -1
+ hierarchy_plans = settings.DJSTRIPE_HIERARCHY_PLANS
+
+ for config_level in hierarchy_plans.values():
+ if name in config_level["plans"]:
+ level = config_level["level"]
+
+ return level
\ No newline at end of file
|
[issue <I>] support membership levels
|
py
|
diff --git a/ELiDE/app.py b/ELiDE/app.py
index <HASH>..<HASH> 100644
--- a/ELiDE/app.py
+++ b/ELiDE/app.py
@@ -368,8 +368,8 @@ class ELiDELayout(FloatLayout):
self.ids.board._trigger_update()
def set_tick(self, t):
- """``self.tick = t``"""
- self.tick = t
+ """``self.tick = int(t)``"""
+ self.tick = int(t)
def advance(self):
"""Resolve one rule and store the results in a list at
|
let the user set the tick by typing it
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -52,7 +52,7 @@ copyright = u'2013, Brian Hicks'
# The short X.Y version.
version = '0.2.0'
# The full version, including alpha/beta/rc tags.
-release = '0.2.0dev'
+release = '0.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
remove pre from release in docs
|
py
|
diff --git a/vault12factor/__init__.py b/vault12factor/__init__.py
index <HASH>..<HASH> 100644
--- a/vault12factor/__init__.py
+++ b/vault12factor/__init__.py
@@ -274,6 +274,7 @@ def monkeypatch_django() -> None:
self.connection = None
else:
if test:
+ self._12fv_retries = 0
test.close()
# connection.cursor creates a transaction on postgresql so we commit that transaction here
if hasattr(self.connection, "commit"):
|
reset the retry counter when we successfully reconnect with Vault credentials
|
py
|
diff --git a/lattice_mc/simulation.py b/lattice_mc/simulation.py
index <HASH>..<HASH> 100644
--- a/lattice_mc/simulation.py
+++ b/lattice_mc/simulation.py
@@ -41,9 +41,12 @@ class Simulation:
self.lattice.set_site_energies( site_energies )
def run( self, for_time = None ):
- assert( self.lattice )
- assert( self.atoms )
- assert( self.number_of_jumps )
+ if not self.lattice:
+ raise AttributeError('Running a simulation needs the lattice to be initialised')
+ if not self.atoms:
+ raise AttributeError('Running a simulation needs the atoms to be initialised')
+ if not ( self.number_of_jumps or for_time):
+ raise AttributeError('Running a simulation needs number_of_jumps or for_time to be set')
if self.number_of_equilibration_jumps > 0:
for step in range( self.number_of_equilibration_jumps ):
self.lattice.jump()
|
More explicit errors when calling Simulation.run() on a simulation object that has not been set up yet
|
py
|
diff --git a/khard/khard.py b/khard/khard.py
index <HASH>..<HASH> 100644
--- a/khard/khard.py
+++ b/khard/khard.py
@@ -1451,9 +1451,18 @@ def parse_args(argv):
remainder.insert(0, config.default_action)
logging.debug("updated remainder={}".format(remainder))
+ # Save the last option that needs to be carried from the first parser run
+ # to the second.
+ skip = args.skip_unparsable
+
# Parse the remainder of the command line. All options from the previous
# run have already been processed and are not needed any more.
args = parser.parse_args(remainder)
+
+ # Restore settings that are left from the first parser run.
+ args.skip_unparsable = skip
+
+ # Finish up with a debug report and return the result.
logging.debug("second args={}".format(args))
return args
|
Correctly set skip_unparsable from the command line The value was overwritten by the second parser run.
|
py
|
diff --git a/pyxray/parser/jeol.py b/pyxray/parser/jeol.py
index <HASH>..<HASH> 100644
--- a/pyxray/parser/jeol.py
+++ b/pyxray/parser/jeol.py
@@ -17,6 +17,7 @@ from pyxray.descriptor import \
from pyxray.property import \
(XrayTransitionEnergy, XrayTransitionSetEnergy,
XrayTransitionRelativeWeight, XrayTransitionSetRelativeWeight)
+from pyxray.util import wavelength_to_energy_eV
# Globals and constants variables.
@@ -143,13 +144,10 @@ class JEOLTransitionParser(_Parser):
probability = line[20:23].strip()
if not probability: # skip transition with no probability
continue
-
probability = float(probability) / 100.0
- if probability > 1: # skip sum of transitions
- continue
wavelength = float(line[26:35])
- energy = (4.13566733e-15 * 299792458) / (wavelength * 1e-10)
+ energy = wavelength_to_energy_eV(wavelength * 1e-10)
if siegbahn in _TRANSITION_LOOKUP:
subshells = list(_TRANSITION_LOOKUP[siegbahn])
|
Fix JEOL parser for x-ray transition set.
|
py
|
diff --git a/testing/logging/test_formatter.py b/testing/logging/test_formatter.py
index <HASH>..<HASH> 100644
--- a/testing/logging/test_formatter.py
+++ b/testing/logging/test_formatter.py
@@ -18,10 +18,6 @@ def test_coloredlogformatter() -> None:
exc_info=None,
)
- class ColorConfig:
- class option:
- pass
-
tw = TerminalWriter()
tw.hasmarkup = True
formatter = ColoredLevelFormatter(tw, logfmt)
@@ -49,10 +45,6 @@ def test_coloredlogformatter_with_width_precision() -> None:
exc_info=None,
)
- class ColorConfig:
- class option:
- pass
-
tw = TerminalWriter()
tw.hasmarkup = True
formatter = ColoredLevelFormatter(tw, logfmt)
|
cleanup tests by removal of unused code elements
|
py
|
diff --git a/spyder/plugins/editor/lsp/providers/document.py b/spyder/plugins/editor/lsp/providers/document.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/editor/lsp/providers/document.py
+++ b/spyder/plugins/editor/lsp/providers/document.py
@@ -207,7 +207,7 @@ class DocumentProvider:
@send_request(method=LSPRequestTypes.DOCUMENT_DID_CLOSE,
requires_response=False)
def document_did_close(self, params):
- file_signal = params['signal']
+ codeeditor = params['codeeditor']
debug_print('[{0}] File: {1}'.format(
LSPRequestTypes.DOCUMENT_DID_CLOSE, params['file']))
filename = path_as_uri(params['file'])
@@ -222,8 +222,8 @@ class DocumentProvider:
else:
editors = self.watched_files[filename]
idx = -1
- for i, signal in enumerate(editors):
- if id(file_signal) == id(signal):
+ for i, editor in enumerate(editors):
+ if id(codeeditor) == id(editor):
idx = i
break
if idx > 0:
|
LSP: Fix error when closing file
|
py
|
diff --git a/glad/lang/common/generator.py b/glad/lang/common/generator.py
index <HASH>..<HASH> 100644
--- a/glad/lang/common/generator.py
+++ b/glad/lang/common/generator.py
@@ -99,8 +99,11 @@ class Generator(object):
)
if self.extension_names is None:
- self. extension_names = list(chain.from_iterable(self.spec.extensions[a]
- for a in self.api))
+ self.extension_names = list(chain.from_iterable(self.spec.extensions[a]
+ for a in self.api))
+
+ # sort and eliminate duplicates
+ self.extension_names = list(sorted(set(self.extension_names)))
e = list(chain.from_iterable(self.spec.extensions[a] for a in self.api))
for ext in self.extension_names:
@@ -149,7 +152,7 @@ class Generator(object):
specification = self.spec.NAME
apis = ', '.join('{}={}'.format(api, '.'.join(map(str, version))) for api, version in self.api.items())
profile = getattr(self.spec, 'profile', '-')
- extensions = ', '.join(self.extension_names)
+ extensions = ',\n '.join(self.extension_names)
online = self.online
if len(online) > 2000:
online = 'Too many extensions'
|
generator: Sort and remove duplicate extensions, format extensions in header, based on #<I> - thanks @rajder.
|
py
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -60,6 +60,23 @@ class AuthTestCase(TestCase):
self.client.post(url, {}, format='json')
self.assertEqual(AuthToken.objects.count(), 0)
+ def test_logout_all_deletes_only_targets_keys(self):
+ self.assertEqual(AuthToken.objects.count(), 0)
+ username, password = 'root', 'toor'
+ user = User.objects.create_user(
+ username, 'root@localhost.com', password)
+ user2 = User.objects.create_user(
+ 'user2', 'user2@localhost.com', password)
+ for _ in range(10):
+ token = AuthToken.objects.create(user=user)
+ token2 = AuthToken.objects.create(user=user2)
+ self.assertEqual(AuthToken.objects.count(), 20)
+
+ url = reverse('knox_logoutall')
+ self.client.credentials(HTTP_AUTHORIZATION=('Token %s' % token))
+ self.client.post(url, {}, format='json')
+ self.assertEqual(AuthToken.objects.count(), 10, 'tokens from other users should not be affected by logout all')
+
def test_expired_tokens_login_fails(self):
self.assertEqual(AuthToken.objects.count(), 0)
username, password = 'root', 'toor'
|
Add another test to ensure logging out doesn't affect other users.
|
py
|
diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py
index <HASH>..<HASH> 100644
--- a/airflow/cli/cli_parser.py
+++ b/airflow/cli/cli_parser.py
@@ -991,7 +991,7 @@ DB_COMMANDS = (
ActionCommand(
name="check-migrations",
help="Check if migration have finished (or continually check until timeout)",
- func=lazy_load_command('airflow.cli.commands.db_command.wait_for_migrations'),
+ func=lazy_load_command('airflow.cli.commands.db_command.check_migrations'),
args=(ARG_MIGRATION_TIMEOUT,),
),
ActionCommand(
|
Fixes check_migrations commands (#<I>) Fixes bug where old "wait_for_migrations" function name is left in the lazy loader of the cli_parser
|
py
|
diff --git a/pyairtable/__init__.py b/pyairtable/__init__.py
index <HASH>..<HASH> 100644
--- a/pyairtable/__init__.py
+++ b/pyairtable/__init__.py
@@ -1,3 +1,3 @@
-__version__ = "1.0.0.rc1"
+__version__ = "1.0.0.rc2"
from .api import Api, Base, Table # noqa
|
Publish version <I>.rc2
|
py
|
diff --git a/wechatpy/utils.py b/wechatpy/utils.py
index <HASH>..<HASH> 100644
--- a/wechatpy/utils.py
+++ b/wechatpy/utils.py
@@ -103,7 +103,7 @@ def to_binary(value, encoding='utf-8'):
def timezone(zone):
- """Try get timezone using pytz or python-dateutil
+ """Try to get timezone using pytz or python-dateutil
:param zone: timezone str
:return: timezone tzinfo or None
|
Fix grammer in comment. [ci skip]
|
py
|
diff --git a/ui/timescales.py b/ui/timescales.py
index <HASH>..<HASH> 100644
--- a/ui/timescales.py
+++ b/ui/timescales.py
@@ -176,7 +176,7 @@ class ImpliedTimescales(object):
else:
all_ts = False
maxnits = min(maxnits, len(ts))
- self._its_samples[i,:maxnits,k] = ts
+ self._its_samples[i,:maxnits,k] = ts[:maxnits]
else:
any_ts = False
maxnlags = i
|
[timescales]: corrected indexing error
|
py
|
diff --git a/mutagen/__init__.py b/mutagen/__init__.py
index <HASH>..<HASH> 100644
--- a/mutagen/__init__.py
+++ b/mutagen/__init__.py
@@ -140,8 +140,9 @@ def File(filename, options=None):
from mutagen.oggvorbis import OggVorbis
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
+ from mutagen.m4a import M4A
options = [MP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
- FLAC, APEv2File, ID3FileType, WavPack]
+ FLAC, APEv2File, M4A, ID3FileType, WavPack]
if not options:
return None
|
mutagen.File: Add M4A scoring.
|
py
|
diff --git a/mapbox_vector_tile/polygon.py b/mapbox_vector_tile/polygon.py
index <HASH>..<HASH> 100644
--- a/mapbox_vector_tile/polygon.py
+++ b/mapbox_vector_tile/polygon.py
@@ -128,6 +128,10 @@ def _polytree_node_to_shapely(node):
children = []
else:
+ # check expectations: this branch gets executed if this node is not a
+ # hole, and has no contour. in that situation we'd expect that it has
+ # no children, as it would not be possible to subtract children from
+ # an empty outer contour.
assert len(children) == 0
return (polygons, children)
|
Comment to make the reason for the else branch and assertion clearer.
|
py
|
diff --git a/documenteer/stackdocs/doxygen.py b/documenteer/stackdocs/doxygen.py
index <HASH>..<HASH> 100644
--- a/documenteer/stackdocs/doxygen.py
+++ b/documenteer/stackdocs/doxygen.py
@@ -91,7 +91,7 @@ class DoxygenConfiguration:
'doxygen_tag': 'RECURSIVE'
}
)
- """Whether or not directories listed in `inputs` should be searched
+ """Whether or not directories listed in ``inputs`` should be searched
recursively.
"""
@@ -101,7 +101,8 @@ class DoxygenConfiguration:
'doxygen_tag': 'FILE_PATTERNS'
}
)
- """File extensions to include from the directories described by `inputs`.
+ """File extensions to include from the directories described by
+ ``inputs``.
"""
exclude_patterns: List[str] = field(
|
Inputs is not available for cross referencing?
|
py
|
diff --git a/openquake/engine2.py b/openquake/engine2.py
index <HASH>..<HASH> 100644
--- a/openquake/engine2.py
+++ b/openquake/engine2.py
@@ -20,6 +20,7 @@ import ConfigParser
import getpass
import md5
import os
+import sys
from django.core import exceptions
from django.db import close_connection
@@ -28,6 +29,7 @@ from openquake import kvs
from openquake import logs
from openquake.db import models
from openquake.supervising import supervisor
+from openquake.utils import monitor
def prepare_job(user_name="openquake", log_level='progress'):
@@ -344,6 +346,13 @@ def _switch_to_job_phase(job, status):
job.save()
models.JobPhaseStats.objects.create(oq_job=job, job_status=status)
logs.log_progress("%s" % status, 1)
+ if status == "executing":
+ # Record the compute nodes that were available at the beginning of the
+ # execute phase so we can detect failed nodes later.
+ failed_nodes = monitor.count_failed_nodes(job)
+ if failed_nodes == -1:
+ logs.LOG.critical("No live compute nodes, aborting calculation")
+ sys.exit(1)
def _do_run_hazard(job, exports):
|
capture worker node counts at the start of the execute phase
|
py
|
diff --git a/tests/conftest.py b/tests/conftest.py
index <HASH>..<HASH> 100755
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -28,9 +28,10 @@ _SERVER_HANDLER = None
# Functions ===================================================================
-def circuit_breaker_http_retry():
- for i in range(10):
+def circuit_breaker_http_retry(max_retry=10):
+ for i in range(max_retry):
try:
+ print "Connecting to server .. %d/%d" % (i + 1, max_retry)
return requests.get(URL).raise_for_status()
except Exception:
time.sleep(1)
@@ -51,9 +52,7 @@ def _create_alt_settings(path):
}
with tempfile.NamedTemporaryFile(delete=False) as f:
- f.write(
- json.dumps(alt_settings)
- )
+ f.write(json.dumps(alt_settings))
return f.name
|
#<I>: circuit_breaker_http_retry() now have improved text output in console.
|
py
|
diff --git a/picotui/widgets.py b/picotui/widgets.py
index <HASH>..<HASH> 100644
--- a/picotui/widgets.py
+++ b/picotui/widgets.py
@@ -246,13 +246,13 @@ class WRadioButton(ItemSelWidget):
self.attr_color(C_B_BLUE, None)
for t in self.items:
self.goto(self.x, self.y + i)
- self.wr("(*) " if self.selected == i else "( ) ")
+ self.wr("(*) " if self.choice == i else "( ) ")
self.wr(t)
i += 1
self.attr_reset()
def handle_mouse(self, x, y):
- self.selected = y - self.y
+ self.choice = y - self.y
self.redraw()
self.signal("changed")
|
widgets: WRadioButton: Comply with ChoiceWidget interface.
|
py
|
diff --git a/inspire_utils/name.py b/inspire_utils/name.py
index <HASH>..<HASH> 100644
--- a/inspire_utils/name.py
+++ b/inspire_utils/name.py
@@ -83,6 +83,9 @@ class ParsedName(object):
self._parsed_name.last = self._parsed_name.first + self._parsed_name.middle + self._parsed_name.suffix
self._parsed_name.first = ''
+ def __iter__(self):
+ return self._parsed_name
+
def __len__(self):
return len(self._parsed_name)
|
name: add support for iteration in ParsedName
|
py
|
diff --git a/py3-test/tests.py b/py3-test/tests.py
index <HASH>..<HASH> 100644
--- a/py3-test/tests.py
+++ b/py3-test/tests.py
@@ -11,22 +11,24 @@ def test_async_emit():
loop = get_event_loop()
ee = EventEmitter(loop=loop)
- future = Future()
+ should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
- future.set_result(True)
+ should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
- future.cancel()
+ if not should_call.done():
+ raise Exception('should_call timed out!')
+ return should_call.cancel()
timeout = create_timeout(loop=loop)
- @future.add_done_callback
+ @should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
- loop.run_until_complete(gather(future, timeout))
+ loop.run_until_complete(gather(should_call, timeout))
|
Rename should_call future in test, raise exception on timeout
|
py
|
diff --git a/pymc/database/ram.py b/pymc/database/ram.py
index <HASH>..<HASH> 100644
--- a/pymc/database/ram.py
+++ b/pymc/database/ram.py
@@ -116,7 +116,6 @@ class Trace(base.Trace):
- chain (int): The index of the chain to fetch. If None, return all chains.
- slicing: A slice, overriding burn and thin assignement.
"""
- warnings.warn('Use Sampler.trace method instead.', DeprecationWarning)
if slicing is None:
slicing = slice(burn, None, thin)
if chain is not None:
|
Removed deprecation warning in ram backend
|
py
|
diff --git a/simple_history/admin.py b/simple_history/admin.py
index <HASH>..<HASH> 100644
--- a/simple_history/admin.py
+++ b/simple_history/admin.py
@@ -69,9 +69,6 @@ class SimpleHistoryAdmin(admin.ModelAdmin):
if not self.has_change_permission(request, obj):
raise PermissionDenied
- if request.method == 'POST' and '_saveas_new' in request.POST:
- return self.add_view(request, form_url='../add/')
-
formsets = []
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
|
Remove unused code check for "Save As" button
|
py
|
diff --git a/bigfloat/pympfr.py b/bigfloat/pympfr.py
index <HASH>..<HASH> 100644
--- a/bigfloat/pympfr.py
+++ b/bigfloat/pympfr.py
@@ -54,6 +54,18 @@ if not _mpfr_library_name:
mpfr = ctypes.cdll.LoadLibrary(_mpfr_library_name)
+# get library version, and check that it's new enough
+mpfr.mpfr_get_version.argtypes = []
+mpfr.mpfr_get_version.restype = ctypes.c_char_p
+
+_mpfr_version = mpfr.mpfr_get_version()
+MPFR_VERSION_MAJOR, MPFR_VERSION_MINOR, _ = _mpfr_version.split('.', 2)
+MPFR_VERSION_MAJOR = int(MPFR_VERSION_MAJOR)
+MPFR_VERSION_MINOR = int(MPFR_VERSION_MINOR)
+
+if (MPFR_VERSION_MAJOR, MPFR_VERSION_MINOR) < (2, 3):
+ raise RuntimeError("This module requires MPFR version 2.3 or later; "
+ "found version %s" % _mpfr_version)
################################################################################
# Platform dependent values
|
Add check for version of MPFR library.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup
setup(
name = 'qless-py',
- version = '0.11.3',
+ version = '0.11.4',
description = 'Redis-based Queue Management',
long_description = '''
Redis-based queue management, with heartbeating, job tracking,
|
Version bump to <I>.
|
py
|
diff --git a/drf_auto_endpoint/utils.py b/drf_auto_endpoint/utils.py
index <HASH>..<HASH> 100644
--- a/drf_auto_endpoint/utils.py
+++ b/drf_auto_endpoint/utils.py
@@ -103,7 +103,7 @@ def get_field_dict(field, serializer, translated_fields=None, fields_annotation=
if model_field:
related_model = model_field.related_model
rv['type'] = settings.WIDGET_MAPPING[model_field.__class__.__name__]
- elif hasattr(field_instance, 'related_model'):
+ elif hasattr(field_instance, 'queryset') and field_instance.queryset is not None:
related_model = field_instance.queryset.model
if model_field and model_field.__class__.__name__ == 'ManyToManyRel':
|
:bug: fix bug introduced in f4a<I>d5
|
py
|
diff --git a/peewee.py b/peewee.py
index <HASH>..<HASH> 100644
--- a/peewee.py
+++ b/peewee.py
@@ -170,6 +170,7 @@ class PostgresqlAdapter(BaseAdapter):
'primary_key': 'SERIAL',
'datetime': 'TIMESTAMP',
'decimal': 'NUMERIC',
+ 'boolean': 'BOOLEAN',
}
def last_insert_id(self, cursor, model):
@@ -1491,9 +1492,7 @@ class BooleanField(IntegerField):
db_field = 'boolean'
def db_value(self, value):
- if value:
- return 1
- return 0
+ return bool(value)
def python_value(self, value):
return bool(value)
|
Use a boolean column type with postgresql backend -- also send boolean value to backend, tested with postgresql and sqlite.
|
py
|
diff --git a/salt/utils/openstack/nova.py b/salt/utils/openstack/nova.py
index <HASH>..<HASH> 100644
--- a/salt/utils/openstack/nova.py
+++ b/salt/utils/openstack/nova.py
@@ -18,6 +18,7 @@ try:
from novaclient.v2 import client
except ImportError:
from novaclient.v1_1 import client
+ from novaclient import client as nclient
from novaclient.shell import OpenStackComputeShell
import novaclient.utils
import novaclient.auth_plugin
@@ -137,7 +138,7 @@ class SaltNova(OpenStackComputeShell):
self.kwargs = kwargs.copy()
if not novaclient.base.Manager._hooks_map:
- self.extensions = self._discover_extensions('1.1')
+ self.extensions = nclient.discover_extensions('1.1')
for extension in self.extensions:
extension.run_hooks('__pre_parse_args__')
self.kwargs['extensions'] = self.extensions
|
use the correct discover_extensions This shouldn't have ever been used, it should have always used the real discover_extension in the main client module. Right now nova is broken with the current novaclient release, which removes _discover_extensions from OpenStackComputeShell
|
py
|
diff --git a/pyemma/_base/logging.py b/pyemma/_base/logging.py
index <HASH>..<HASH> 100644
--- a/pyemma/_base/logging.py
+++ b/pyemma/_base/logging.py
@@ -89,12 +89,3 @@ class Loggable(object):
del d[logger_name]
del Loggable.__refs[logger_id]
return remove_logger
-
- def __getstate__(self):
- # do not pickle the logger instance
- d = dict(self.__dict__)
- try:
- del d['_logger_instance']
- except KeyError:
- pass
- return d
|
remove custom __getstate__ method of Loggable Since we use this protocol for the serializable mixin, we do not want any other class in the mro to win the method resolution race here.
|
py
|
diff --git a/estnltk_core/estnltk_core/base_text.py b/estnltk_core/estnltk_core/base_text.py
index <HASH>..<HASH> 100644
--- a/estnltk_core/estnltk_core/base_text.py
+++ b/estnltk_core/estnltk_core/base_text.py
@@ -42,7 +42,7 @@ class BaseText:
__slots__ = ['text', 'meta', '__dict__', '_shadowed_layers']
def __init__(self, text: str = None) -> None:
- assert text is None or isinstance(text, str), "Text takes string as an argument!"
+ assert text is None or isinstance(text, str), "{} takes string as an argument!".format( self.__class__.__name__ )
# self.text: str
super().__setattr__('text', text)
# self._shadowed_layers: Mapping[str, Layer]
|
Updated BaseText: removed class name hard-coding
|
py
|
diff --git a/semantic_release/settings.py b/semantic_release/settings.py
index <HASH>..<HASH> 100644
--- a/semantic_release/settings.py
+++ b/semantic_release/settings.py
@@ -36,14 +36,14 @@ def _config_from_ini(paths):
parser.read(paths)
flags = {
+ "changelog_capitalize",
"changelog_scope",
"check_build_status",
"commit_version_number",
+ "patch_without_tag",
"remove_dist",
"upload_to_pypi",
"upload_to_release",
- "patch_without_tag",
- "changelog_capitalize",
}
# Iterate through the sections so that default values are applied
|
style(settings): alphabetize boolean settings (#<I>) A few settings were not in alphabetical order.
|
py
|
diff --git a/python/ray/serve/controller.py b/python/ray/serve/controller.py
index <HASH>..<HASH> 100644
--- a/python/ray/serve/controller.py
+++ b/python/ray/serve/controller.py
@@ -181,21 +181,30 @@ class ServeController:
deployment_name, new_deployment_info)
async def run_control_loop(self) -> None:
+ # NOTE(edoakes): we catch all exceptions here and simply log them,
+ # because an unhandled exception would cause the main control loop to
+ # halt, which should *never* happen.
while True:
try:
self.autoscale()
except Exception:
- logger.exception("Exception while autoscaling deployments.")
+ logger.exception("Exception in autoscaling.")
+
async with self.write_lock:
try:
self.http_state.update()
except Exception:
logger.exception("Exception updating HTTP state.")
+
try:
self.deployment_state_manager.update()
except Exception:
logger.exception("Exception updating deployment state.")
- self._put_serve_snapshot()
+
+ try:
+ self._put_serve_snapshot()
+ except Exception:
+ logger.exception("Exception putting serve snapshot.")
await asyncio.sleep(CONTROL_LOOP_PERIOD_S)
def _put_serve_snapshot(self) -> None:
|
[serve] Don't halt main control loop due to exceptions in snapshot logic (#<I>)
|
py
|
diff --git a/openquake/server/dbserver.py b/openquake/server/dbserver.py
index <HASH>..<HASH> 100644
--- a/openquake/server/dbserver.py
+++ b/openquake/server/dbserver.py
@@ -100,7 +100,7 @@ def get_status(address=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s = config.dbserver
try:
- err = sock.connect_ex(address or (s.host, s.port))
+ err = sock.connect_ex(address or (s.host, int(s.port)))
finally:
sock.close()
return 'not-running' if err else 'running'
|
Fixed bug with celery [skip CI]
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,8 +4,8 @@ from setuptools import find_packages, setup
import versioneer
install_requires = [
- "astropy>=1.2, <3",
- "lxml>=2.3, <4.0",
+ "astropy>=1.2",
+ "lxml>=2.3",
'iso8601',
'orderedmultidict',
'pytz',
|
Remove "less-than" restrictions on Astropy, LXML. I think I put these in place before I had Travis-CI cron-jobs available. Therefore wanted to avoid future unknowns. Now at least an email gets sent when there's a new release and it breaks something.
|
py
|
diff --git a/tensor2tensor/data_generators/tokenizer.py b/tensor2tensor/data_generators/tokenizer.py
index <HASH>..<HASH> 100644
--- a/tensor2tensor/data_generators/tokenizer.py
+++ b/tensor2tensor/data_generators/tokenizer.py
@@ -101,13 +101,13 @@ def decode(tokens):
Returns:
a unicode string
"""
- ret = u""
token_is_alnum = [t[0] in _ALPHANUMERIC_CHAR_SET for t in tokens]
+ ret = []
for i, token in enumerate(tokens):
if i > 0 and token_is_alnum[i - 1] and token_is_alnum[i]:
- ret += u" "
- ret += token
- return ret
+ ret.append(u" ")
+ ret.append(token)
+ return "".join(ret)
def corpus_token_counts(text_filepattern, corpus_max_lines,
|
Don't repeatedly concatenate strings in a loop. PiperOrigin-RevId: <I>
|
py
|
diff --git a/singularity/build/converter.py b/singularity/build/converter.py
index <HASH>..<HASH> 100644
--- a/singularity/build/converter.py
+++ b/singularity/build/converter.py
@@ -356,7 +356,7 @@ def print_sections(sections,mapping=None):
mapping = get_mapping()
finished_spec = None
- ordering = ['bootstrap',"From","%runscript","%post"]
+ ordering = ['bootstrap',"From","%runscript","%post",'%test']
for section in ordering:
|
modified: singularity/build/converter.py
|
py
|
diff --git a/www/tests/tester.py b/www/tests/tester.py
index <HASH>..<HASH> 100644
--- a/www/tests/tester.py
+++ b/www/tests/tester.py
@@ -118,6 +118,9 @@ class Tester:
def assertIsNone(self, obj, msg=None):
assert obj == None, obj
+ def assertIsNotNone(self, obj, msg=None):
+ assert obj is not None, obj
+
def assertIsNot(self, a, b):
if a is b:
raise AssertionError('%s is %s should be false' %(a,b))
|
Add assertIsNotNone to tester.py
|
py
|
diff --git a/cyphi/subsystem.py b/cyphi/subsystem.py
index <HASH>..<HASH> 100644
--- a/cyphi/subsystem.py
+++ b/cyphi/subsystem.py
@@ -193,7 +193,9 @@ class Subsystem:
# mechanism that are either not in the purview or whose connections
# to this mechanism have not been severed by a subsystem cut.
for node in marginal_inputs:
- conditioned_tpm = marginalize_out(node, conditioned_tpm)
+ conditioned_tpm = (conditioned_tpm.sum(node.index,
+ keepdims=True)
+ / conditioned_tpm.shape[node.index])
# Incorporate this node's CPT into the mechanism's conditional
# joint distribution by taking the product (with singleton
# broadcasting, which spreads the singleton probabilities in the
@@ -295,7 +297,8 @@ class Subsystem:
marginal_inputs = purview_node.inputs & (
(set(self.nodes) - set(mechanism)) | severed_nodes)
for node in marginal_inputs:
- tpm = marginalize_out(node, tpm)
+ tpm = (tpm.sum(node.index, keepdims=True)
+ / tpm.shape[node.index])
# Expand the TPM along the axes corresponding to mechanism
# nodes who's connections to the purview were severed, since
# those will have conditioning indices despite having being
|
Move marginalize-out logic into c/e repertoire to avoid function call
|
py
|
diff --git a/djangular/core/urlresolvers.py b/djangular/core/urlresolvers.py
index <HASH>..<HASH> 100644
--- a/djangular/core/urlresolvers.py
+++ b/djangular/core/urlresolvers.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+from inspect import isclass
from django.utils import six
from django.utils.module_loading import import_by_path
from django.core.urlresolvers import (get_resolver, get_urlconf, get_script_prefix,
@@ -90,7 +91,7 @@ def get_all_remote_methods(resolver=None, ns_prefix=''):
url = reverse(ns_prefix + name)
resmgr = resolve(url)
ViewClass = import_by_path('{0}.{1}'.format(resmgr.func.__module__, resmgr.func.__name__))
- if issubclass(ViewClass, JSONResponseMixin):
+ if isclass(ViewClass) and issubclass(ViewClass, JSONResponseMixin):
result[name] = _get_remote_methods_for(ViewClass, url)
except (NoReverseMatch, ImproperlyConfigured):
pass
|
Fixed: function based views were ignored
|
py
|
diff --git a/bugzilla/rhbugzilla.py b/bugzilla/rhbugzilla.py
index <HASH>..<HASH> 100644
--- a/bugzilla/rhbugzilla.py
+++ b/bugzilla/rhbugzilla.py
@@ -92,29 +92,6 @@ class RHBugzilla(Bugzilla42):
def _getqueryinfo(self):
return self._proxy.bugzilla.getQueryInfo()
- def _get_info(self, product=None):
- '''This is a convenience method that does getqueryinfo, getproducts,
- and (optionally) getcomponents in one big fat multicall. This is a bit
- faster than calling them all separately.
-
- If you're doing interactive stuff you should call this, with the
- appropriate product name, after connecting to Bugzilla. This will
- cache all the info for you and save you an ugly delay later on.'''
- mc = self._multicall()
- mc._getqueryinfo()
- mc._getproducts()
- mc._getbugfields()
- if product:
- mc._getcomponents(product)
- mc._getcomponentsdetails(product)
- r = mc.run()
- (self._querydata, self._querydefaults) = r.pop(0)
- self._products = r.pop(0)
- self._bugfields = r.pop(0)
- if product:
- self._components[product] = r.pop(0)
- self._components_details[product] = r.pop(0)
-
#---- Methods for modifying existing bugs.
# Most of these will probably also be available as Bug methods, e.g.:
|
rhbugzilla: Remove unused _get_info call Much of its benefit is unneeded these days, and its broken with current RHBZ.
|
py
|
diff --git a/speech/google/cloud/speech_v1/helpers.py b/speech/google/cloud/speech_v1/helpers.py
index <HASH>..<HASH> 100644
--- a/speech/google/cloud/speech_v1/helpers.py
+++ b/speech/google/cloud/speech_v1/helpers.py
@@ -55,12 +55,16 @@ class SpeechHelpers(object):
... pass
Args:
- config (:class:`~.types.StreamingRecognitionConfig`): The
- configuration to use for the stream.
- requests (Iterable[:class:`~.types.StreamingRecognizeRequest`]):
- The input objects.
- options (:class:`google.gax.CallOptions`): Overrides the default
- settings for this call, e.g, timeout, retries etc.
+ config (:class:`~.types.StreamingRecognitionConfig`): The
+ configuration to use for the stream.
+ requests (Iterable[:class:`~.types.StreamingRecognizeRequest`]):
+ The input objects.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will not
+ be retried.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
Returns:
Iterable[:class:`~.types.StreamingRecognizeResponse`]
|
Speech: Fix documentation for helpers gapic (#<I>)
|
py
|
diff --git a/example/rcnn/rcnn/io/image.py b/example/rcnn/rcnn/io/image.py
index <HASH>..<HASH> 100644
--- a/example/rcnn/rcnn/io/image.py
+++ b/example/rcnn/rcnn/io/image.py
@@ -37,7 +37,7 @@ def get_image(roidb):
processed_roidb = []
for i in range(num_images):
roi_rec = roidb[i]
- assert os.path.exists(roi_rec['image']), '%s does not exist'.format(roi_rec['image'])
+ assert os.path.exists(roi_rec['image']), '{} does not exist'.format(roi_rec['image'])
im = cv2.imread(roi_rec['image'])
if roidb[i]['flipped']:
im = im[:, ::-1, :]
|
string.format doesn't support %s (#<I>) Change %s to {}
|
py
|
diff --git a/uproot/tree.py b/uproot/tree.py
index <HASH>..<HASH> 100644
--- a/uproot/tree.py
+++ b/uproot/tree.py
@@ -123,7 +123,13 @@ def iterate(path, treepath, branches=None, entrysteps=float("inf"), outputtype=d
awkward.numpy.add(index, globalentrystart, out=index)
elif type(arrays.index).__name__ == "RangeIndex":
- arrays.index = type(arrays.index)(arrays.index.start + globalentrystart, arrays.index.stop + globalentrystart)
+ if hasattr(arrays.index, "start") and hasattr(arrays.index, "stop"):
+ indexstart = arrays.index.start # pandas>=0.25.0
+ indexstop = arrays.index.stop
+ else:
+ indexstart = arrays.index._start # pandas<0.25.0
+ indexstop = arrays.index._stop
+ arrays.index = type(arrays.index)(indexstart + globalentrystart, indexstop + globalentrystart)
else:
if hasattr(arrays.index, "array"):
|
Support RangeIndex before and after Pandas version <I>.
|
py
|
diff --git a/quantecon/markov/mdp.py b/quantecon/markov/mdp.py
index <HASH>..<HASH> 100644
--- a/quantecon/markov/mdp.py
+++ b/quantecon/markov/mdp.py
@@ -596,8 +596,11 @@ class MDP(object):
Solution method.
v_init : array_like(float, ndim=1), optional(default=None)
- Initial value function, of length n. If None, set v_init(s)
- = max_a r(s, a).
+ Initial value function, of length n. If None, `v_init` is
+ set such that v_init(s) = max_a r(s, a) for value iteration
+ and policy iteration; for modified policy iteration,
+ v_init(s) = min_(s', a) r(s', a)/(1 - beta) to guarantee
+ convergence.
epsilon : scalar(float), optional(default=None)
Value for epsilon-optimality. If None, the value stored in
@@ -731,7 +734,7 @@ class MDP(object):
v = np.empty(self.num_states)
if v_init is None:
- self.s_wise_max(self.R, out=v)
+ v[:] = self.R[self.R > -np.inf].min() / (1 - self.beta)
else:
v[:] = v_init
|
For mpi: v^0 set to min_(s, a) r(s, a) / (1-beta) to guarantee convergence
|
py
|
diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -21,6 +21,13 @@ def assert_str_equalish(exp, act):
act = str(act).strip()
assert_equal(exp, act)
+def assert_startswith(s, prefix):
+ s = str(s)
+ prefix = str(prefix)
+ if not s.startswith(prefix):
+ raise AssertionError('"{0}" does not start with "{1}"'
+ .format(s.encode('string_escape'), prefix))
+
class BetterAssertRaisesMixin(object):
def assertRaises2(self, exc_type, func, *args, **kwargs):
@@ -108,7 +115,7 @@ class TestMain(TestCase, BetterAssertRaisesMixin):
_, err = self.run_scuba(['-v'])
- assert_true(err.startswith('scuba '))
+ assert_startswith(err, 'scuba')
ver = err.split()[1]
assert_equal(ver, main.__version__)
|
add assert_startswith() for better assertion failures
|
py
|
diff --git a/bokeh/server/settings.py b/bokeh/server/settings.py
index <HASH>..<HASH> 100644
--- a/bokeh/server/settings.py
+++ b/bokeh/server/settings.py
@@ -93,6 +93,8 @@ class Settings(object):
self.blaze_config = args.blaze_config
if args.script:
self.scripts = [args.script]
+ if args.url_prefix:
+ self.url_prefix = args.url_prefix
def process_settings(self):
if self.url_prefix:
|
Fix url-prefix not detected.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,9 +3,10 @@
from os import path, listdir
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
+from codecs import open
HERE = path.abspath(path.dirname(__file__))
-__version__ = '0.6.1-8'
+__version__ = '0.6.1-9'
__project_name__ = 'common'
def hack_find_packages(include_str):
@@ -73,7 +74,7 @@ class PyTest(TestCommand):
errno = pytest.main(pytest_commands)
exit(errno)
-with open('README.rst', 'r') as f:
+with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
|
more setup.py fixes from requests
|
py
|
diff --git a/mike/app_version.py b/mike/app_version.py
index <HASH>..<HASH> 100644
--- a/mike/app_version.py
+++ b/mike/app_version.py
@@ -1 +1 @@
-version = '0.3.2'
+version = '0.4.0.dev0'
|
Update version to <I>.dev0
|
py
|
diff --git a/scriptabit/plugins/trello/trello.py b/scriptabit/plugins/trello/trello.py
index <HASH>..<HASH> 100644
--- a/scriptabit/plugins/trello/trello.py
+++ b/scriptabit/plugins/trello/trello.py
@@ -252,7 +252,7 @@ The default is to only synchronise the task names.''')
stats = sync.synchronise(
clean_orphans=False,
- sync_completed_new_tasks=False)
+ sync_completed_new_tasks=True)
self.__notify(stats)
|
changing completed new tasks to sync rather than skip
|
py
|
diff --git a/spinoff/actor/runner.py b/spinoff/actor/runner.py
index <HASH>..<HASH> 100644
--- a/spinoff/actor/runner.py
+++ b/spinoff/actor/runner.py
@@ -14,6 +14,7 @@ from spinoff.actor import spawn, Actor, set_default_node, Node
from spinoff.actor._actor import _validate_nodeid, _VALID_IP_RE
from spinoff.actor.remoting import Hub, HubWithNoRemoting
from spinoff.util.logging import log, err, panic
+from spinoff.util.async import after
from spinoff.util.pattern_matching import ANY
from spinoff.actor.events import Events, Message
from spinoff.actor.supervision import Stop, Restart, Resume
@@ -107,7 +108,7 @@ class Wrapper(Actor):
elif message == ('terminated', self.actor):
_, actor = message
if self.keep_running:
- self._do_spawn()
+ after(1.0).do(self._do_spawn)
else:
self.stop()
else:
|
If keeprunning is set for startnode, restart the actor after 1 second
|
py
|
diff --git a/discord/state.py b/discord/state.py
index <HASH>..<HASH> 100644
--- a/discord/state.py
+++ b/discord/state.py
@@ -702,19 +702,21 @@ class ConnectionState:
def parse_channel_pins_update(self, data):
channel_id = int(data['channel_id'])
- channel = self.get_channel(channel_id)
+ try:
+ guild = self._get_guild(int(data['guild_id']))
+ except KeyError:
+ guild = None
+ channel = self._get_private_channel(channel_id)
+ else:
+ channel = guild and guild._resolve_channel(channel_id)
+
if channel is None:
log.debug('CHANNEL_PINS_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)
return
last_pin = utils.parse_time(data['last_pin_timestamp']) if data['last_pin_timestamp'] else None
- try:
- # I have not imported discord.abc in this file
- # the isinstance check is also 2x slower than just checking this attribute
- # so we're just gonna check it since it's easier and faster and lazier
- channel.guild
- except AttributeError:
+ if guild is None:
self.dispatch('private_channel_pins_update', channel, last_pin)
else:
self.dispatch('guild_channel_pins_update', channel, last_pin)
|
Change CHANNEL_PINS_UPDATE to use guild information from gateway This changes the channel lookup from O(n) to two amortised O(1) lookups
|
py
|
diff --git a/steam/__init__.py b/steam/__init__.py
index <HASH>..<HASH> 100644
--- a/steam/__init__.py
+++ b/steam/__init__.py
@@ -16,6 +16,6 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
-__version__ = "3.1"
+__version__ = "3.2"
from base import *
|
Version <I> has awoken!
|
py
|
diff --git a/hdl_toolkit/synthesizer/codeOps.py b/hdl_toolkit/synthesizer/codeOps.py
index <HASH>..<HASH> 100644
--- a/hdl_toolkit/synthesizer/codeOps.py
+++ b/hdl_toolkit/synthesizer/codeOps.py
@@ -1,4 +1,4 @@
-from copy import deepcopy
+from copy import deepcopy, copy
import types
from hdl_toolkit.hdlObjects.specialValues import DIRECTION
@@ -290,7 +290,10 @@ def packedWidth(intf):
intf._loadDeclarations()
elif isinstance(intf, InterfaceBase) and not hasattr(intf, "_interfaces"):
# not loaded interface
- intf = deepcopy(intf)
+ _intf = intf
+
+ intf = _intf.__class__()
+ intf._updateParamsFrom(_intf)
intf._loadDeclarations()
|
copy instead of deepcopy for unloaded interfaces
|
py
|
diff --git a/django_extensions/db/fields/json.py b/django_extensions/db/fields/json.py
index <HASH>..<HASH> 100644
--- a/django_extensions/db/fields/json.py
+++ b/django_extensions/db/fields/json.py
@@ -18,10 +18,10 @@ from django.core.serializers.json import DjangoJSONEncoder
try:
# Django >= 1.7
- import json
+ import json as _json
except ImportError:
# Django <= 1.6 backwards compatibility
- from django.utils import simplejson as json
+ from django.utils import simplejson as _json
def dumps(value):
@@ -29,7 +29,7 @@ def dumps(value):
def loads(txt):
- value = json.loads(
+ value = _json.loads(
txt,
parse_float=Decimal,
encoding=settings.DEFAULT_CHARSET
|
issue #<I> import json as _json
|
py
|
diff --git a/wallace/custom.py b/wallace/custom.py
index <HASH>..<HASH> 100644
--- a/wallace/custom.py
+++ b/wallace/custom.py
@@ -22,6 +22,8 @@ from operator import attrgetter
from sqlalchemy import and_
+import sys
+
# Load the configuration options.
config = PsiturkConfig()
config.load_config()
@@ -42,6 +44,7 @@ verbose = True
def log(text):
if verbose:
print ">>>> {}".format(text)
+ sys.stdout.flush()
# Specify the experiment.
try:
|
make logging print immediately this should make sure logs are in order!
|
py
|
diff --git a/katcp/server.py b/katcp/server.py
index <HASH>..<HASH> 100644
--- a/katcp/server.py
+++ b/katcp/server.py
@@ -544,6 +544,10 @@ class KATCPServer(object):
except Exception:
self._logger.error('Error handling message {0!s}'.format(msg),
exc_info=True)
+ # Allow the ioloop to run since we may be starving it if there is buffered
+ # data in the stream, resulting in yield stream.read_until_regex('\n|\r')
+ # never actually yielding control to the ioloop
+ yield gen.moment
except Exception:
self._logger.error('Unexpected exception in read-loop for client {0}:'
.format(client_address))
|
Yield to the ioloop at the end of each message to ensure fairness between clients
|
py
|
diff --git a/cirq/circuits/moment.py b/cirq/circuits/moment.py
index <HASH>..<HASH> 100644
--- a/cirq/circuits/moment.py
+++ b/cirq/circuits/moment.py
@@ -51,7 +51,7 @@ class Moment(object):
self.qubits = frozenset(affected_qubits)
if len(affected_qubits) != len(self.qubits):
raise ValueError(
- 'Overlapping operations: {}'.format(ops))
+ 'Overlapping operations: {}'.format(self.operations))
def operates_on(self, qubits: Iterable[ops.QubitId]) -> bool:
"""Determines if the moment has operations touching the given qubits.
|
Fix overlapping operations error printing the ops module instead of the relevant operations (#<I>)
|
py
|
diff --git a/openquake/calculators/views.py b/openquake/calculators/views.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/views.py
+++ b/openquake/calculators/views.py
@@ -857,8 +857,16 @@ Source = collections.namedtuple('Source', 'source_id code geom num_ruptures')
def equal(rec1, rec2):
if len(rec1) != len(rec2):
return False
- return all((v1 == v2).all() if isinstance(v1, numpy.ndarray)
- else v1 == v2 for v1, v2 in zip(rec1, rec2))
+ for v1, v2 in zip(rec1, rec2):
+ if isinstance(v1, numpy.ndarray):
+ diff = v1 != v2
+ if diff is True:
+ return False
+ elif diff.any():
+ return False
+ elif v1 != v2:
+ return False
+ return True
def all_equal(records):
@@ -885,6 +893,6 @@ def view_dupl_sources(token, dstore):
if all_equal(sources):
dupl.append(source_id)
sameid.append(source_id)
- msg = 'Found %d source(s) with the same ID and %d true duplicate: %s' % (
+ msg = 'Found %d source(s) with the same ID and %d truly duplicate: %s' % (
len(sameid), len(dupl), dupl)
return msg
|
[skip CI] Former-commit-id: cbb<I>da<I>c3f1dbaecc<I>fb6b0c3d<I>b<I>c<I>
|
py
|
diff --git a/examples/migrate.py b/examples/migrate.py
index <HASH>..<HASH> 100755
--- a/examples/migrate.py
+++ b/examples/migrate.py
@@ -109,7 +109,9 @@ def migrate(src, dst, resource, name_or_id, filter, should_import_rtypes):
rs = getattr(impreq, rtype)
for name in rs:
if args.overwrite or rs[name].existing_id is None:
- rs[name].should_import = should_import = True
+ rs[name].should_import = True
+ if rtype is plural:
+ should_import = rs[name].should_import
if not should_import:
print_verbose('Skipping {} {}'.format(resource, r.id))
|
Don't import associated resources unless primary resource is to be imported as well
|
py
|
diff --git a/nodeconductor/structure/serializers.py b/nodeconductor/structure/serializers.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/structure/serializers.py
+++ b/nodeconductor/structure/serializers.py
@@ -607,10 +607,10 @@ class UserSerializer(serializers.HyperlinkedModelSerializer):
except (KeyError, AttributeError):
return fields
- if not user.is_staff:
+ if not user.is_staff and not user.is_support:
del fields['is_active']
del fields['is_staff']
- fields['description'].read_only = True
+ del fields['description']
if request.method in ('PUT', 'PATCH'):
fields['username'].read_only = True
|
Show user description only for staff and support - wal-<I>
|
py
|
diff --git a/openquake/commonlib/riskmodels.py b/openquake/commonlib/riskmodels.py
index <HASH>..<HASH> 100644
--- a/openquake/commonlib/riskmodels.py
+++ b/openquake/commonlib/riskmodels.py
@@ -17,8 +17,9 @@ class VulnerabilityNode(LiteralNode):
validators = valid.parameters(
vulnerabilitySetID=valid.name,
vulnerabilityFunctionID=valid.name_with_dashes,
- assetCategory=valid.ChoiceCI(
- 'population', 'buildings', 'single_asset'),
+ assetCategory=str,
+ # the assetCategory here has nothing to do with the category
+ # in the exposure model and it is not used by the engine
lossCategory=valid.name,
IML=valid.IML,
lossRatio=valid.positivefloats,
|
I remove the constraint on the assetCategory
|
py
|
diff --git a/ckanutils/__init__.py b/ckanutils/__init__.py
index <HASH>..<HASH> 100755
--- a/ckanutils/__init__.py
+++ b/ckanutils/__init__.py
@@ -21,6 +21,6 @@ __package_name__ = 'ckanutils'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
-__version__ = '0.3.1'
+__version__ = '0.3.2'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
|
Bump to version <I>
|
py
|
diff --git a/neurom/io/utils.py b/neurom/io/utils.py
index <HASH>..<HASH> 100644
--- a/neurom/io/utils.py
+++ b/neurom/io/utils.py
@@ -151,7 +151,7 @@ def get_morph_files(directory):
'''Get a list of all morphology files in a directory
Returns:
- list with all files with extensions '.swc' or '.h5' (case insensitive)
+ list with all files with extensions '.swc' , 'h5' or '.asc' (case insensitive)
'''
lsdir = [os.path.join(directory, m) for m in os.listdir(directory)]
return [m for m in lsdir
|
Update utils.py fix comment of get_morph_files to show that asc is also an option.
|
py
|
diff --git a/treeherder/settings/base.py b/treeherder/settings/base.py
index <HASH>..<HASH> 100644
--- a/treeherder/settings/base.py
+++ b/treeherder/settings/base.py
@@ -18,7 +18,7 @@ TREEHERDER_DATABASE_PORT = os.environ.get("TREEHERDER_DATABASE_PORT", "")
TREEHERDER_RO_DATABASE_USER = os.environ.get("TREEHERDER_RO_DATABASE_USER", "TREEHERDER_DATABASE_USER")
TREEHERDER_RO_DATABASE_PASSWORD = os.environ.get("TREEHERDER_RO_DATABASE_PASSWORD", "TREEHERDER_DATABASE_PASSWORD")
-TREEHERDER_MEMCACHED = os.environ.get("TREEHERDER_MEMCACHED", "")
+TREEHERDER_MEMCACHED = os.environ.get("TREEHERDER_MEMCACHED", "").strip(',').split(',')
TREEHERDER_MEMCACHED_KEY_PREFIX = os.environ.get("TREEHERDER_MEMCACHED_KEY_PREFIX", "treeherder")
DEBUG = os.environ.get("TREEHERDER_DEBUG", False)
|
added handling for memcache array
|
py
|
diff --git a/kubespawner/spawner.py b/kubespawner/spawner.py
index <HASH>..<HASH> 100644
--- a/kubespawner/spawner.py
+++ b/kubespawner/spawner.py
@@ -336,7 +336,7 @@ class KubeSpawner(Spawner):
set this with::
c.KubeSpawner.start_timeout = 60 * 5 # Upto 5 minutes
-
+
"""
)
@@ -781,6 +781,16 @@ class KubeSpawner(Spawner):
"""
)
+ delete_stopped_pods = Bool(
+ True,
+ config=True,
+ help="""
+ Whether to delete pods that have stopped themselves.
+ Set to False to leave stopped pods in the completed state,
+ allowing for easier debugging of why they may have stopped.
+ """
+ )
+
def _expand_user_properties(self, template):
# Make sure username and servername match the restrictions for DNS labels
safe_chars = set(string.ascii_lowercase + string.digits)
@@ -979,6 +989,9 @@ class KubeSpawner(Spawner):
# return exit code if notebook container has terminated
if c.name == 'notebook':
if c.state.terminated:
+ # call self.stop to delete the pod
+ if self.delete_stopped_pods:
+ yield self.stop(now=True)
return c.state.terminated.exit_code
break
# None means pod is running or starting up
|
delete pods that have stopped if `.poll()` notices that a pod has stopped, delete the pod A `.delete_stopped_pods` flag is added to allow disabling this behavior if deployments want to leave stopped pods around, in case they are interested in inspecting the pod to diagnose the reason for stopping
|
py
|
diff --git a/openquake/export/hazard.py b/openquake/export/hazard.py
index <HASH>..<HASH> 100644
--- a/openquake/export/hazard.py
+++ b/openquake/export/hazard.py
@@ -159,8 +159,6 @@ def export_hazard_curves(output, target_dir):
"""
hc = models.HazardCurve.objects.get(output=output.id)
- # NOTE(LB): Using `values_list` and `iterator` here make this query a bit
- # faster and more lean in terms of memory consumption.
curves = models.HazardCurveData.objects.all_curves_simple(
filter_args=dict(hazard_curve=hc.id)
)
|
export/hazard: Removed a redundant comment.
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.