diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/tests/urls.py b/tests/urls.py
index <HASH>..<HASH> 100644
--- a/tests/urls.py
+++ b/tests/urls.py
@@ -1,6 +1,6 @@
-from django.conf.urls.defaults import include, patterns, url
+from django.conf.urls.defaults import include, url
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^', include('posts.urls')),
-)
+]
|
upgrade test url patterns to list based syntax
|
py
|
diff --git a/arctic/mixins.py b/arctic/mixins.py
index <HASH>..<HASH> 100755
--- a/arctic/mixins.py
+++ b/arctic/mixins.py
@@ -191,7 +191,7 @@ class FormMixin(ModalMixin):
view_from_url(action[1]), action[1]
)
allowed_action["modal"] = self.get_modal_link(
- action[1], self
+ action[1], obj
)
allowed_action["type"] = "link"
allowed_action["url"] = self.in_modal(
|
<I> passing object reference to dialog instead of view (#<I>)
|
py
|
diff --git a/spyderlib/spyder.py b/spyderlib/spyder.py
index <HASH>..<HASH> 100644
--- a/spyderlib/spyder.py
+++ b/spyderlib/spyder.py
@@ -1516,11 +1516,21 @@ class MainWindow(QMainWindow):
add_actions(self.plugins_menu, actions)
def create_toolbars_menu(self):
- actions = []
+ order = ['file_toolbar', 'run_toolbar', 'debug_toolbar',
+ 'main_toolbar', 'Global working directory', None,
+ 'search_toolbar', 'edit_toolbar', 'source_toolbar']
for toolbar in self.toolbarslist:
action = toolbar.toggleViewAction()
- actions.append(action)
- add_actions(self.toolbars_menu, actions)
+ name = toolbar.objectName()
+ try:
+ pos = order.index(name)
+ except ValueError:
+ pos = None
+ if pos is not None:
+ order[pos] = action
+ else:
+ order.append(action)
+ add_actions(self.toolbars_menu, order)
def createPopupMenu(self):
if self.light:
|
Main Window: Organize Toolbars menu entries according to a certain order
|
py
|
diff --git a/boundary/event_list.py b/boundary/event_list.py
index <HASH>..<HASH> 100644
--- a/boundary/event_list.py
+++ b/boundary/event_list.py
@@ -31,7 +31,7 @@ class EventList(ApiCli):
def _handle_results(self):
# Only process if we get HTTP result of 200
- if self._api_result.status_code == requests.ok.codes:
+ if self._api_result.status_code == requests.codes.ok:
out = json.dumps(json.loads(self._api_result.text), sort_keys=True, indent=4, separators=(',', ': '))
print(self.colorize_json(out))
|
Incorrect use of HTTP codes from requests library
|
py
|
diff --git a/ordering/__init__.py b/ordering/__init__.py
index <HASH>..<HASH> 100644
--- a/ordering/__init__.py
+++ b/ordering/__init__.py
@@ -16,16 +16,16 @@ class Ordering(Collection[T]):
_end = _Sentinel()
def __init__(self) -> None:
- self._labels = {
+ self._labels: Dict[_T, Fraction] = {
self._start: Fraction(0),
self._end: Fraction(1)
- } # type: Dict[_T, Fraction]
- self._successors = {
+ }
+ self._successors: Dict[_T, _T] = {
self._start: self._end
- } # type: Dict[_T, _T]
- self._predecessors = {
+ }
+ self._predecessors: Dict[_T, _T] = {
self._end: self._start
- } # type: Dict[_T, _T]
+ }
def insert_after(self, existing_item: _T, new_item: T) -> 'OrderingItem[T]':
self.assert_contains(existing_item)
|
Go full Python <I> with the type hints
|
py
|
diff --git a/app/urls.py b/app/urls.py
index <HASH>..<HASH> 100644
--- a/app/urls.py
+++ b/app/urls.py
@@ -6,7 +6,6 @@ import django.contrib.auth.urls
from django.contrib import admin
from django.conf import settings
-from django.conf.urls.static import static
admin.autodiscover()
@@ -30,4 +29,4 @@ urlpatterns = [
name='timeside-login'),
url(r'^accounts/logout/$', auth_views.logout, name='timeside-logout'),
#url('^', include('django.contrib.auth.urls'))
-] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
\ No newline at end of file
+]
\ No newline at end of file
|
deleting old media files managing as statics
|
py
|
diff --git a/pyrogram/types/user_and_chats/chat.py b/pyrogram/types/user_and_chats/chat.py
index <HASH>..<HASH> 100644
--- a/pyrogram/types/user_and_chats/chat.py
+++ b/pyrogram/types/user_and_chats/chat.py
@@ -868,3 +868,23 @@ class Chat(Object):
user_ids=user_ids,
forward_limit=forward_limit
)
+
+ async def mark_unread(self, ) -> bool:
+ """Bound method *mark_unread* of :obj:`~pyrogram.types.Chat`.
+
+ Use as a shortcut for:
+
+ .. code-block:: python
+
+ client.mark_unread(chat_id)
+
+ Example:
+ .. code-block:: python
+
+ chat.mark_unread()
+
+ Returns:
+ ``bool``: On success, True is returned.
+ """
+
+ return await self._client.mark_chat_unread(self.id)
|
Add missing bound method Chat.mark_unread
|
py
|
diff --git a/simuvex/s_unicorn.py b/simuvex/s_unicorn.py
index <HASH>..<HASH> 100644
--- a/simuvex/s_unicorn.py
+++ b/simuvex/s_unicorn.py
@@ -33,6 +33,7 @@ class SimUnicorn(SimRun):
self.state.unicorn.destroy()
self.success = True
+ self.state.scratch.executed_block_count += self.state.unicorn.steps
# FIXME what's this?
guard = self.state.se.true
|
add the count of executed blocks to state.scratch
|
py
|
diff --git a/tensorflow_probability/python/bijectors/real_nvp_test.py b/tensorflow_probability/python/bijectors/real_nvp_test.py
index <HASH>..<HASH> 100644
--- a/tensorflow_probability/python/bijectors/real_nvp_test.py
+++ b/tensorflow_probability/python/bijectors/real_nvp_test.py
@@ -152,8 +152,8 @@ class RealNVPTest(test_util.VectorDistributionTestHelpers, tf.test.TestCase):
fldj,
])
self.assertEqual("real_nvp", nvp.name)
- self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-1, atol=0.)
- self.assertAllClose(x_, inverse_y_, rtol=1e-1, atol=0.)
+ self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-6, atol=0.)
+ self.assertAllClose(x_, inverse_y_, rtol=1e-6, atol=0.)
self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.)
def testMutuallyConsistent(self):
|
Tighten tolerances in conditional real nvp test asserts
|
py
|
diff --git a/gandi/cli/modules/cert.py b/gandi/cli/modules/cert.py
index <HASH>..<HASH> 100644
--- a/gandi/cli/modules/cert.py
+++ b/gandi/cli/modules/cert.py
@@ -79,12 +79,12 @@ class Certificate(GandiModule):
subj = '/' + '/'.join(['='.join(value) for value in params])
if private_key and os.path.exists(private_key):
- cmd = 'openssl req -new -key %(key)s -out %(csr)s -subj %(subj)s'
+ cmd = 'openssl req -new -key %(key)s -out %(csr)s -subj "%(subj)s"'
else:
private_key = common_name + '.key'
# TODO check if it exists
cmd = ('openssl req -new -newkey rsa:2048 -nodes -out %(csr)s '
- '-keyout %(key)s -subj %(subj)s')
+ '-keyout %(key)s -subj "%(subj)s"')
if private_key.endswith('.crt') or private_key.endswith('.key'):
csr_file = re.sub('\.(crt|key)$', '.csr', private_key)
|
Quote subj in openssl command.
|
py
|
diff --git a/nose2gae/__init__.py b/nose2gae/__init__.py
index <HASH>..<HASH> 100644
--- a/nose2gae/__init__.py
+++ b/nose2gae/__init__.py
@@ -87,8 +87,9 @@ class Nose2GAE(events.Plugin):
def testOutcome(self, event):
self._stopGaeTestbed()
- os.chdir(self._original_dir)
- self._original_dir = None
+ if self._original_dir is not None:
+ os.chdir(self._original_dir)
+ self._original_dir = None
def _initGaeTestbed(self):
# we want to put GAE sys path right at the front, not at the second place as GAE SDK does
@@ -142,6 +143,8 @@ class Nose2GAE(events.Plugin):
def _stopGaeTestbed(self):
global _GAE_TESTBED
+ if _GAE_TESTBED is None:
+ return
datastore = _GAE_TESTBED._test_stub_map.GetStub(self._testbed_module.DATASTORE_SERVICE_NAME)
datastore.Flush()
datastore.Clear()
|
Handle the case gracefully where tests were never started (due to import errors for example)
|
py
|
diff --git a/shellutils.py b/shellutils.py
index <HASH>..<HASH> 100644
--- a/shellutils.py
+++ b/shellutils.py
@@ -37,7 +37,7 @@ import subprocess
from invenio.config import CFG_MISCUTIL_DEFAULT_PROCESS_TIMEOUT
-__all__ = ['run_shell_command', 'run_process_with_timeout', 'Timeout', 'SmarterPopen']
+__all__ = ['run_shell_command', 'run_process_with_timeout', 'Timeout']
"""
This module implements two functions:
|
shellutils: remove non-existing 'SmarterPopen' * Removes explicitely added (though not existing) "SmarterPopen" from declared objects of the module (__all__).
|
py
|
diff --git a/sftpman/model.py b/sftpman/model.py
index <HASH>..<HASH> 100644
--- a/sftpman/model.py
+++ b/sftpman/model.py
@@ -168,7 +168,7 @@ class SystemModel(object):
out['beforeMount'] = self.cmd_before_mount
out['authType'] = self.auth_method
out['sshKey'] = self.ssh_key
- return json.dumps(out)
+ return json.dumps(out, indent=4)
def save(self, environment):
path = environment.get_system_config_path(self.id)
|
Save configuration JSON files indented Save configuration JSON files indented, so they're human readable and easier to hand-edit, diff, etc.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ license: GNU-GPL2
from setuptools import setup
setup(name='arguments',
- version='14',
+ version='15',
description='Argument parser based on docopt',
url='https://github.com/erikdejonge/arguments',
author='Erik de Jonge',
|
pip Tuesday <I> March <I> (week:<I> day:<I>), <I>:<I>:<I>
|
py
|
diff --git a/test/test_parser.py b/test/test_parser.py
index <HASH>..<HASH> 100644
--- a/test/test_parser.py
+++ b/test/test_parser.py
@@ -152,9 +152,6 @@ var = 1
# global section cannot have directive
self.assertRaises(ParsingError, SoS_Script,
'''input: 'filename' ''')
- # or unrecognized directive
- self.assertRaises(ParsingError, SoS_Script,
- '''inputs: 'filename' ''')
# allow definition
SoS_Script('''a = '1' ''')
SoS_Script('''a = ['a', 'b'] ''')
|
Remove a test that works with the recent change that allows action in global section
|
py
|
diff --git a/tests/test_clckru.py b/tests/test_clckru.py
index <HASH>..<HASH> 100644
--- a/tests/test_clckru.py
+++ b/tests/test_clckru.py
@@ -13,7 +13,7 @@ import responses
import pytest
s = Shortener(Shorteners.CLCKRU)
-shorten = 'http://senta.la/test'
+shorten = 'http://clck.ru/test'
expanded = 'http://www.test.com'
|
small change on clck.ru test
|
py
|
diff --git a/synth.py b/synth.py
index <HASH>..<HASH> 100644
--- a/synth.py
+++ b/synth.py
@@ -26,11 +26,11 @@ common_templates = gcp.CommonTemplates()
templates = common_templates.node_library()
s.copy(templates, excludes=[".github/CONTRIBUTING.md"])
-repository_url = "https://github.com/google/google-api-nodejs-client.git"
-log.debug(f"Cloning {repository_url}.")
-repository = git.clone(repository_url, depth=1)
-log.debug("Installing dependencies.")
-shell.run(["npm", "install"], cwd=repository)
-log.debug("Generating all libraries...")
-shell.run(["npm", "run", "generate"], cwd=repository)
-s.copy(repository / "src")
+# repository_url = "https://github.com/googleapis/google-api-nodejs-client.git"
+# log.debug(f"Cloning {repository_url}.")
+# repository = git.clone(repository_url, depth=1)
+# log.debug("Installing dependencies.")
+# shell.run(["npm", "install"], cwd=repository)
+# log.debug("Generating all libraries...")
+# shell.run(["npm", "run", "generate"], cwd=repository)
+# s.copy(repository / "src")
|
Do not generate libraries with synth (#<I>)
|
py
|
diff --git a/beets/mediafile.py b/beets/mediafile.py
index <HASH>..<HASH> 100644
--- a/beets/mediafile.py
+++ b/beets/mediafile.py
@@ -1313,7 +1313,7 @@ class MediaFile(object):
try:
self.mgfile = mutagen.File(path)
except unreadable_exc as exc:
- log.debug(u'header parsing failed: {0}'.format(unicode(exc)))
+ log.debug(u'header parsing failed: {0}', unicode(exc))
raise UnreadableFileError(path)
except IOError as exc:
if type(exc) == IOError:
@@ -1326,7 +1326,7 @@ class MediaFile(object):
except Exception as exc:
# Isolate bugs in Mutagen.
log.debug(traceback.format_exc())
- log.error(u'uncaught Mutagen exception in open: {0}'.format(exc))
+ log.error(u'uncaught Mutagen exception in open: {0}', exc)
raise MutagenError(path, exc)
if self.mgfile is None:
@@ -1399,7 +1399,7 @@ class MediaFile(object):
raise
except Exception as exc:
log.debug(traceback.format_exc())
- log.error(u'uncaught Mutagen exception in save: {0}'.format(exc))
+ log.error(u'uncaught Mutagen exception in save: {0}', exc)
raise MutagenError(self.path, exc)
def delete(self):
|
Convert beets core to lazy logging Original: beetbox/beets@e<I>f9a7
|
py
|
diff --git a/abydos/corpus/_corpus.py b/abydos/corpus/_corpus.py
index <HASH>..<HASH> 100644
--- a/abydos/corpus/_corpus.py
+++ b/abydos/corpus/_corpus.py
@@ -52,6 +52,7 @@ class Corpus(object):
sent_split='\n',
filter_chars='',
stop_words=None,
+ word_tokenizer=None
):
r"""Initialize Corpus.
@@ -74,6 +75,9 @@ class Corpus(object):
stop_words : list
A list of words (as a tuple, set, or list) to filter out of the
corpus text
+ word_tokenizer : _Tokenizer
+ A tokenizer to apply to each sentence in order to retrieve the
+ individual "word" tokens. If set to none, str.split() will be used.
Example
-------
@@ -91,7 +95,12 @@ class Corpus(object):
for document in corpus_text.split(doc_split):
doc = []
- for sentence in (s.split() for s in document.split(sent_split)):
+ for sentence in document.split(sent_split):
+ if word_tokenizer:
+ sentence = word_tokenizer.tokenize(sentence).get_list()
+ else:
+ sentence = sentence.split()
+
if stop_words:
for word in set(stop_words):
while word in sentence:
|
added custom tokenizer option at the sub-sentence ("word") level
|
py
|
diff --git a/xeddsa/xeddsa.py b/xeddsa/xeddsa.py
index <HASH>..<HASH> 100644
--- a/xeddsa/xeddsa.py
+++ b/xeddsa/xeddsa.py
@@ -22,12 +22,24 @@ class XEdDSA(object):
if not self._decryption_key:
raise MissingKeyException("Cannot sign using this XEdDSA instance, Montgomery decryption key missing")
+ if isinstance(message, str):
+ message = toBytes(message)
+
+ if isinstance(nonce, str):
+ nonce = toBytes(nonce)
+
return self._sign(message, nonce, *self.__class__._mont_priv_to_ed_pair(self._decryption_key))
def verify(self, message, signature):
if not self._encryption_key:
raise MissingKeyException("Cannot verify using this XEdDSA instance, Montgomery encryption key missing")
+ if isinstance(message, str):
+ message = toBytes(message)
+
+ if isinstance(signature, str):
+ signature = toBytes(signature)
+
return self._verify(message, signature, self.__class__._mont_pub_to_ed_pub(self._encryption_key))
@classmethod
|
Added conversion from strings to byte-arrays for convenience
|
py
|
diff --git a/notedown/notedown.py b/notedown/notedown.py
index <HASH>..<HASH> 100644
--- a/notedown/notedown.py
+++ b/notedown/notedown.py
@@ -464,6 +464,7 @@ class MarkdownWriter(NotebookWriter):
attr.classes.remove('python')
attr.classes.remove('input')
#
+ attr.kvs = [(k, v) for k, v in attr.kvs if k != 'caption']
attr.classes.append('figure')
attr.classes.append('output')
return attr.to_html()
|
don't put caption in div attrs I'm not sure that k="v" with spaces in the v is allowed at all.
|
py
|
diff --git a/sample_project/manage.py b/sample_project/manage.py
index <HASH>..<HASH> 100755
--- a/sample_project/manage.py
+++ b/sample_project/manage.py
@@ -1,14 +1,10 @@
#!/usr/bin/env python
-from django.core.management import execute_manager
-import imp
-try:
- imp.find_module('settings') # Assumed to be in the same directory.
-except ImportError:
- import sys
- sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__) # noqa
- sys.exit(1)
-
-import settings
+import os
+import sys
if __name__ == "__main__":
- execute_manager(settings)
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sample_project.settings")
+
+ from django.core.management import execute_from_command_line
+
+ execute_from_command_line(sys.argv)
|
Updated sample_project's manage.py to post-Django<I> format.
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -16,6 +16,14 @@
import sys
import os
+
+import os
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+if on_rtd:
+ html_theme = 'default'
+else:
+ html_theme = 'nature'
+
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
@@ -106,7 +114,7 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'default'
+#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
|
modified conf.py according to RTD
|
py
|
diff --git a/yangson/schema.py b/yangson/schema.py
index <HASH>..<HASH> 100644
--- a/yangson/schema.py
+++ b/yangson/schema.py
@@ -232,6 +232,10 @@ class InternalNode(SchemaNode):
res.update(c.data_children())
return res
+ def child_inst_names(self) -> Set[InstanceName]:
+ """Return the set of instance names under the receiver."""
+ return frozenset([c.iname() for c in self.data_children()])
+
def _post_process(self) -> None:
super()._post_process()
for c in [x for x in self.children.values()]:
|
Add method InternalNode::child_inst_names.
|
py
|
diff --git a/plexapi/playlist.py b/plexapi/playlist.py
index <HASH>..<HASH> 100644
--- a/plexapi/playlist.py
+++ b/plexapi/playlist.py
@@ -131,6 +131,9 @@ class Playlist(PlexPartialObject, Playable):
@classmethod
def _create(cls, server, title, items):
""" Create a playlist. """
+ if not items:
+ raise BadRequest('Must include items to add when creating new playlist')
+
if items and not isinstance(items, (list, tuple)):
items = [items]
ratingKeys = []
@@ -162,6 +165,9 @@ class Playlist(PlexPartialObject, Playable):
smart (bool): default False.
**kwargs (dict): is passed to the filters. For a example see the search method.
+
+ Raises:
+ :class:`plexapi.exceptions.BadRequest`: when no items are included in create request.
Returns:
:class:`plexapi.playlist.Playlist`: an instance of created Playlist.
|
Playlist creation - check items are included, raise exception
|
py
|
diff --git a/test_autoflake.py b/test_autoflake.py
index <HASH>..<HASH> 100755
--- a/test_autoflake.py
+++ b/test_autoflake.py
@@ -124,6 +124,20 @@ import os, \
os.foo()
"""))))
+ def test_filter_code_should_ignore_semicolons(self):
+ self.assertEqual(
+ r"""\
+import os
+import os; import math, subprocess
+os.foo()
+""",
+ ''.join(autoflake.filter_code(unicode(r"""\
+import os
+import re
+import os; import math, subprocess
+os.foo()
+"""))))
+
def test_filter_code_should_ignore_non_standard_library(self):
self.assertEqual(
"""\
|
Add test with semicolon in it
|
py
|
diff --git a/test/python/test_quantumprogram.py b/test/python/test_quantumprogram.py
index <HASH>..<HASH> 100644
--- a/test/python/test_quantumprogram.py
+++ b/test/python/test_quantumprogram.py
@@ -86,7 +86,7 @@ class TestQuantumProgram(QiskitTestCase):
"""
# pylint: disable=unused-argument
import re
- self.assertTrue(re.match('https://[-a-z.]*bluemix.net/api', QE_URL))
+ self.assertTrue(re.match('^https?://[0-9.:/A-Za-z_-]+/api', QE_URL))
def test_create_classical_register(self):
"""Test create_classical_register.
|
a more generic re expression (#<I>)
|
py
|
diff --git a/tests/python/pants_test/base/test_source_root.py b/tests/python/pants_test/base/test_source_root.py
index <HASH>..<HASH> 100644
--- a/tests/python/pants_test/base/test_source_root.py
+++ b/tests/python/pants_test/base/test_source_root.py
@@ -53,6 +53,15 @@ class SourceRootTest(unittest.TestCase):
self.assertEquals(OrderedSet([TestTarget]), SourceRoot.types("tests"))
self.assertEquals(OrderedSet(["tests"]), SourceRoot.roots(TestTarget))
+ def test_register_none(self):
+ self._assert_source_root_empty()
+
+ SourceRoot.register("tests", )
+ self.assertEquals({"tests": OrderedSet([])}, SourceRoot.all_roots())
+ self.assertEquals(OrderedSet([]), SourceRoot.types("tests"))
+ self.assertEquals("tests", SourceRoot.find(TestTarget("//tests/foo/bar:baz")))
+ self.assertEquals("tests", SourceRoot.find_by_path("tests/foo/bar"))
+
def test_reset(self):
self._assert_source_root_empty()
SourceRoot.register("tests", TestTarget)
|
Added unit tests for registering a source root path with no targets Testing Done: Added a unit test requested by Patrick. Passed CI @ <URL>
|
py
|
diff --git a/tarbell/cli.py b/tarbell/cli.py
index <HASH>..<HASH> 100644
--- a/tarbell/cli.py
+++ b/tarbell/cli.py
@@ -255,8 +255,11 @@ def tarbell_publish(args):
creds = settings.config.get('s3_creds')
root_url = bucket_uri[5:]
+ s3_bucket = root_url.split("/")[0]
extra_context = {
"ROOT_URL": root_url,
+ "S3_BUCKET": s3_bucket,
+ "BUCKET_NAME": bucket_name,
}
tempdir = "{0}/".format(tarbell_generate(args, extra_context=extra_context, skip_args=True))
|
add some new s3-related context variables when publishing site
|
py
|
diff --git a/pingouin/effsize.py b/pingouin/effsize.py
index <HASH>..<HASH> 100644
--- a/pingouin/effsize.py
+++ b/pingouin/effsize.py
@@ -167,7 +167,7 @@ def convert_effsize(ef, input_type, output_type, nx=None, ny=None):
def compute_effsize(dv=None, group=None, data=None, x=None, y=None,
- eftype=None, paired=False):
+ eftype='cohen', paired=False):
"""Compute effect size from pandas dataframe or two numpy arrays.
Parameters
|
default output type for effsize is cohen
|
py
|
diff --git a/graphenebase/types.py b/graphenebase/types.py
index <HASH>..<HASH> 100644
--- a/graphenebase/types.py
+++ b/graphenebase/types.py
@@ -287,7 +287,7 @@ class Static_variant():
return varint(self.type_id) + bytes(self.data)
def __str__(self):
- return {self._type_id: str(self.data)}
+ return json.dumps([self.type_id, self.data.json()])
class Map():
|
[types] fix static_variant str representation
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -115,6 +115,7 @@ setup(
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
],
# trinity
entry_points={
|
misc: list Python <I> (sic!) in setup.py. This is a "bump" commit to force CircleCI picking up the new branch. Python <I> is known-to-work and regularly tested; it missing in setup.py is an oversight. Python <I> is not yet known to work, is just starting to get one module tested, - therefore not adding it here (yet).
|
py
|
diff --git a/auto_ml/predictor.py b/auto_ml/predictor.py
index <HASH>..<HASH> 100644
--- a/auto_ml/predictor.py
+++ b/auto_ml/predictor.py
@@ -571,7 +571,7 @@ class Predictor(object):
elif self.ml_for_analytics and model_name in ['RandomForestClassifier', 'RandomForestRegressor', 'XGBClassifier', 'XGBRegressor', 'GradientBoostingRegressor', 'GradientBoostingClassifier']:
self._print_ml_analytics_results_random_forest()
- if (self.X_test) is not None and (self.X_test) is not None:
+ if (self.X_test) is not None and (self.y_test) is not None:
if not self.X_test.empty and not self.y_test.empty:
print('The results from the X_test and y_test data passed into ml_for_analytics (which were not used for training- true holdout data) are:')
holdout_data_score = self.score(self.X_test, self.y_test)
|
Commited changes for pull request
|
py
|
diff --git a/pymatgen/core/tests/test_structure.py b/pymatgen/core/tests/test_structure.py
index <HASH>..<HASH> 100644
--- a/pymatgen/core/tests/test_structure.py
+++ b/pymatgen/core/tests/test_structure.py
@@ -964,6 +964,10 @@ class StructureTest(PymatgenTest):
s2 = Structure.from_dict(d)
self.assertEqual(type(s2), Structure)
+ def test_default_dict_attrs(self):
+ d = self.structure.as_dict()
+ self.assertEqual(d["charge"] == 0)
+
def test_to_from_abivars(self):
"""Test as_dict, from_dict with fmt == abivars."""
d = self.structure.as_dict(fmt="abivars")
@@ -1470,6 +1474,11 @@ Site: H (-0.5134, 0.8892, -0.3630)"""
self.assertEqual(mol.formula, "H4 C1")
self.assertEqual(mol.charge, 1)
+ def test_default_dict_attrs(self):
+ d = self.mol.as_dict()
+ self.assertEqual(d["charge"] == 0)
+ self.assertEqual(d["spin_multiplicity"] == 1)
+
def test_to_from_file_string(self):
for fmt in ["xyz", "json", "g03", "yaml"]:
s = self.mol.to(fmt=fmt)
|
Add tests for default charge/spin mult
|
py
|
diff --git a/tests/polls/tests/tigertest.py b/tests/polls/tests/tigertest.py
index <HASH>..<HASH> 100644
--- a/tests/polls/tests/tigertest.py
+++ b/tests/polls/tests/tigertest.py
@@ -16,7 +16,7 @@ class TigerTest(unittest.TestCase):
self.assertEqual(response.status_code, 200)
try:
etree.fromstring(response.content)
- except XMLSyntaxError:
+ except etree.XMLSyntaxError:
self.assertEqual(False, 'This is not XML!')
def test_list_view_xml_2(self):
@@ -25,7 +25,7 @@ class TigerTest(unittest.TestCase):
self.assertEqual(response.status_code, 200)
try:
etree.fromstring(response.content)
- except XMLSyntaxError:
+ except etree.XMLSyntaxError:
self.assertEqual(False, 'This is not XML!')
|
fixed a try-except conditional
|
py
|
diff --git a/gns3server/modules/virtualbox/virtualbox_vm.py b/gns3server/modules/virtualbox/virtualbox_vm.py
index <HASH>..<HASH> 100644
--- a/gns3server/modules/virtualbox/virtualbox_vm.py
+++ b/gns3server/modules/virtualbox/virtualbox_vm.py
@@ -105,9 +105,10 @@ class VirtualBoxVM(BaseVM):
results = yield from self.manager.execute("showvminfo", [self._vmname, "--machinereadable"])
for info in results:
- name, value = info.split('=', 1)
- if name == "VMState":
- return value.strip('"')
+ if '=' in info:
+ name, value = info.split('=', 1)
+ if name == "VMState":
+ return value.strip('"')
raise VirtualBoxError("Could not get VM state for {}".format(self._vmname))
@asyncio.coroutine
|
Fix a crash when in some cases you can't access to VBOX state Fix #<I>
|
py
|
diff --git a/trustar/models/phishing_submission.py b/trustar/models/phishing_submission.py
index <HASH>..<HASH> 100644
--- a/trustar/models/phishing_submission.py
+++ b/trustar/models/phishing_submission.py
@@ -10,20 +10,15 @@ class PhishingSubmission(ModelBase):
"""
Models a PhishingSubmission
+ ``context`` is a dictionary containing these fields:
+ indicatorType, indicatorValue, sourceKey, normalizedIndicatorScore
+
:ivar submission_id: The id of the email submission
:ivar title: The title of the email submission (email subject)
:ivar priority_event_score: The score of the email submission
:ivar status: The current triage status of a submission ("UNRESOLVED", "CONFIRMED", or "IGNORED")
:ivar context: A list containing dicts which represent IOCs, sources, and scores
that contributed to to the triage score.
-
- Example `context` dict:
- {
- "indicatorType": "URL",
- "indicatorValue":"clickhere.com",
- "sourceKey":"crowdstrike_indicator",
- "normalizedIndicatorScore":1
- }
"""
def __init__(self,
|
fix formatting in phishing submission docstring
|
py
|
diff --git a/phoebe/backend/universe.py b/phoebe/backend/universe.py
index <HASH>..<HASH> 100644
--- a/phoebe/backend/universe.py
+++ b/phoebe/backend/universe.py
@@ -657,6 +657,16 @@ class Body(object):
return False
@property
+ def is_misaligned(self):
+ """
+ TODO: add documentation
+ """
+ # should be defined for any class that subclasses body that supports
+ # misalignment
+ return False
+
+
+ @property
def volume(self):
"""
Compute volume of a mesh AT ITS CURRENT TIME/PROJECTION - this should be
|
fixed bug in is_misaligned for Envelopes
|
py
|
diff --git a/models.py b/models.py
index <HASH>..<HASH> 100644
--- a/models.py
+++ b/models.py
@@ -728,10 +728,13 @@ class BibWorkflowObject(db.Model):
def save(self, version=None, task_counter=None, id_workflow=None):
"""Save object to persistent storage."""
if task_counter is not None:
- self.log.debug("Saving task counter: %s" % (task_counter,))
- extra_data = self.get_extra_data()
- extra_data["_task_counter"] = task_counter
- self.set_extra_data(extra_data)
+ if isinstance(task_counter, list):
+ self.log.debug("Saving task counter: %s" % (task_counter,))
+ extra_data = self.get_extra_data()
+ extra_data["_task_counter"] = task_counter
+ self.set_extra_data(extra_data)
+ else:
+ raise ValueError("Task counter must be a list!")
if version is not None:
if version != self.version:
|
workflows: task_counter value check * Checks if the task_counter parameter is a list when saving an BibWorkflowObject. It is required to avoid a problem with the underlying workflow engine.
|
py
|
diff --git a/spacy/en/__init__.py b/spacy/en/__init__.py
index <HASH>..<HASH> 100644
--- a/spacy/en/__init__.py
+++ b/spacy/en/__init__.py
@@ -126,7 +126,7 @@ class English(object):
return self._entity
def __call__(self, text, tag=True, parse=parse_if_model_present,
- entity=parse_if_model_present, merge_mwes=True):
+ entity=parse_if_model_present, merge_mwes=False):
"""Apply the pipeline to some text. The text can span multiple sentences,
and can contain arbtrary whitespace. Alignment into the original string
|
* Disable merge_mwes by default
|
py
|
diff --git a/MAVProxy/mavproxy.py b/MAVProxy/mavproxy.py
index <HASH>..<HASH> 100755
--- a/MAVProxy/mavproxy.py
+++ b/MAVProxy/mavproxy.py
@@ -1798,7 +1798,7 @@ def master_callback(m, master):
def process_master(m):
'''process packets from the MAVLink master'''
try:
- s = m.recv()
+ s = m.recv(8192)
except Exception:
return
if mpstate.logqueue_raw:
|
receive and process MAVLink packets in larger chunks
|
py
|
diff --git a/src/com/dtmilano/android/viewclient.py b/src/com/dtmilano/android/viewclient.py
index <HASH>..<HASH> 100644
--- a/src/com/dtmilano/android/viewclient.py
+++ b/src/com/dtmilano/android/viewclient.py
@@ -3232,7 +3232,7 @@ class ViewClient:
print >>sys.stderr
print >>sys.stderr, repr(received)
print >>sys.stderr
- onlyKilledRE = re.compile('[\n\S]*Killed[\n\r\S]*', re.MULTILINE)
+ onlyKilledRE = re.compile('Killed$')
if onlyKilledRE.search(received):
MONKEY = 'com.android.commands.monkey'
extraInfo = ''
|
Fixed incorrect uiautomator process killed detection
|
py
|
diff --git a/troposphere/iot.py b/troposphere/iot.py
index <HASH>..<HASH> 100644
--- a/troposphere/iot.py
+++ b/troposphere/iot.py
@@ -30,12 +30,12 @@ class CloudwatchMetricAction(AWSProperty):
class DynamoDBAction(AWSProperty):
props = {
'HashKeyField': (basestring, True),
- 'HashKeyType': (basestring, True),
+ 'HashKeyType': (basestring, False),
'HashKeyValue': (basestring, True),
'PayloadField': (basestring, False),
- 'RangeKeyField': (basestring, True),
- 'RangeKeyType': (basestring, True),
- 'RangeKeyValue': (basestring, True),
+ 'RangeKeyField': (basestring, False),
+ 'RangeKeyType': (basestring, False),
+ 'RangeKeyValue': (basestring, False),
'RoleArn': (basestring, True),
'TableName': (basestring, True),
}
|
Update required fields in IoT TopicRule DynamoDBAction
|
py
|
diff --git a/pyes/facets.py b/pyes/facets.py
index <HASH>..<HASH> 100644
--- a/pyes/facets.py
+++ b/pyes/facets.py
@@ -357,3 +357,15 @@ class ANDFacetFilter(ANDFilter, FacetFilter):
pass
+class FacetQueryWrap(EqualityComparableUsingAttributeDictionary):
+ def __init__(self, wrap_object, **kwargs):
+ """
+ Base Object for every Filter Object
+ """
+ self.wrap_object = wrap_object
+
+ def serialize(self):
+ return {"query": self.wrap_object.serialize()}
+
+
+
|
add wrapper for query filter in facet_filter
|
py
|
diff --git a/scripts/make_dist.py b/scripts/make_dist.py
index <HASH>..<HASH> 100644
--- a/scripts/make_dist.py
+++ b/scripts/make_dist.py
@@ -21,7 +21,7 @@ if sys.version_info[0] != 3:
pdir = os.path.dirname(os.getcwd())
# version info
version = [3, 3, 0, "alpha", 0]
-implementation = [3, 5, 1, "dev", 0]
+implementation = [3, 5, 2, "dev", 0]
# version name
vname = '.'.join(str(x) for x in implementation[:3])
|
Chnage version to <I>
|
py
|
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index <HASH>..<HASH> 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -16,14 +16,15 @@ import shutil
import subprocess
import string # pylint: disable=deprecated-module
import logging
-import salt.ext.six as six
# Import third party libs
import yaml
import jinja2
import jinja2.exceptions
+import salt.ext.six as six
+from salt.ext.six.moves import StringIO as _StringIO # pylint: disable=import-error
try:
- import libvirt
+ import libvirt # pylint: disable=import-error
from xml.dom import minidom
HAS_ALL_IMPORTS = True
except ImportError:
@@ -34,7 +35,6 @@ import salt.utils
import salt.utils.files
import salt.utils.templates
import salt.utils.validate.net
-from salt._compat import StringIO as _StringIO
from salt.exceptions import CommandExecutionError, SaltInvocationError
log = logging.getLogger(__name__)
@@ -1589,7 +1589,7 @@ def is_hyper():
salt '*' virt.is_hyper
'''
try:
- import libvirt
+ import libvirt # pylint: disable=import-error
except ImportError:
# not a usable hypervisor without libvirt module
return False
|
Use `StringIO` from six
|
py
|
diff --git a/vlermv/_s3.py b/vlermv/_s3.py
index <HASH>..<HASH> 100644
--- a/vlermv/_s3.py
+++ b/vlermv/_s3.py
@@ -45,8 +45,14 @@ class S3Vlermv(AbstractVlermv):
key.get_contents_to_filename(tmp.name)
except socket.timeout:
raise self.__class__.Timeout('Timeout when reading from S3')
+
tmp.file.seek(0)
- value = self.serializer.load(tmp.file)
+
+ try:
+ value = self.serializer.load(tmp.file)
+ except FileNotFoundError:
+ raise self.__class__.Timeout('Timeout when reading from S3')
+
return value
else:
raise KeyError(keyname)
|
raise timeout on FileNotFoundError
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,5 @@
from setuptools import setup, Command
import sys
-sys.path.insert(0, '.')
-import injector
class PyTest(Command):
@@ -19,8 +17,17 @@ class PyTest(Command):
raise SystemExit(errno)
-version = injector.__version__
-version_tag = injector.__version_tag__
+def read_injector_variable(name):
+ prefix = '%s = ' % (name,)
+ with open('injector.py') as f:
+ for line in f:
+ if line.startswith(prefix):
+ return line.replace(prefix, '').strip().strip("'")
+ raise AssertionError('variable %s not found' % (name,))
+
+
+version = read_injector_variable('__version__')
+version_tag = read_injector_variable('__version_tag__')
try:
import pypandoc
@@ -47,7 +54,7 @@ setup(
author_email='alec@swapoff.org',
install_requires=[
'setuptools >= 0.6b1',
- 'typing; python_version < "3.5"',
+ 'typing',
],
cmdclass={'test': PyTest},
keywords=[
|
Fix the package installation Broken in [1], I forgot that injector is imported in setup.py and the dependency format I used was incorrect. [1] 2b<I>aa<I>fcbfe0dbc<I>dd8caf<I>ef3d
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,7 +23,7 @@ README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
setup(
name='django-core',
- version='1.1.1-dev',
+ version='1.2.0',
description='A core set of tools for django applications.',
long_description=README,
author='Troy Grosfield',
|
version bump for <I> release.
|
py
|
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -132,7 +132,7 @@ class Mock(object):
#import mock
print "Mocking"
-MOCK_MODULES = ['sympy', 'scipy', 'scipy.linalg', 'scipy.linalg.lapack', 'scipy.special',
+MOCK_MODULES = ['sympy', 'scipy', 'scipy.linalg', 'scipy.linalg.lapack', 'scipy.special', 'scipy.io',
'sympy.utilities', 'sympy.utilities.codegen', 'sympy.core.cache',
'sympy.core', 'sympy.parsing', 'sympy.parsing.sympy_parser', 'Tango', 'numdifftools'
]
|
More mocking for scipy, impossible to check without committing :(
|
py
|
diff --git a/ignite/utils.py b/ignite/utils.py
index <HASH>..<HASH> 100644
--- a/ignite/utils.py
+++ b/ignite/utils.py
@@ -15,7 +15,7 @@ def convert_tensor(
"""Move tensors to relevant device."""
def _func(tensor: torch.Tensor) -> torch.Tensor:
- return tensor.to(device=device, non_blocking=non_blocking) if device else tensor
+ return tensor.to(device=device, non_blocking=non_blocking) if device is not None else tensor
return apply_to_tensor(input_, _func)
|
Fixed device conversion method (#<I>)
|
py
|
diff --git a/qiskit/algorithms/optimizers/qnspsa.py b/qiskit/algorithms/optimizers/qnspsa.py
index <HASH>..<HASH> 100644
--- a/qiskit/algorithms/optimizers/qnspsa.py
+++ b/qiskit/algorithms/optimizers/qnspsa.py
@@ -44,6 +44,12 @@ class QNSPSA(SPSA):
increasing the number of ``resamplings``. This leads to a Monte Carlo-style convergence to
the exact, analytic value.
+ .. note::
+
+ This component has some function that is normally random. If you want to reproduce behavior
+ then you should set the random number generator seed in the algorithm_globals
+ (``qiskit.utils.algorithm_globals.random_seed = seed``).
+
Examples:
This short example runs QN-SPSA for the ground state calculation of the ``Z ^ Z``
|
Add note on reproducibility to QNSPSA (#<I>)
|
py
|
diff --git a/tests/test_potential.py b/tests/test_potential.py
index <HASH>..<HASH> 100644
--- a/tests/test_potential.py
+++ b/tests/test_potential.py
@@ -2566,7 +2566,7 @@ class mockFlatCosmphiDiskwBreakPotential(testplanarMWPotential):
def __init__(self):
testplanarMWPotential.__init__(self,
potlist=[potential.LogarithmicHaloPotential(normalize=1.),
- potential.CosmphiDiskPotential(phib=numpy.pi/2.,p=0.,phio=10./220.,rb=0.95,m=6)])
+ potential.CosmphiDiskPotential(phib=numpy.pi/2.,p=0.,phio=10./220.,rb=0.99,m=6)])
def OmegaP(self):
return 0.
class mockFlatDehnenBarPotential(testMWPotential):
|
Slightly adjust break radius of Cosmphiwbreak test potential, so the liouville test orbit goes inside of the break
|
py
|
diff --git a/cmsplugin_cascade/link/forms.py b/cmsplugin_cascade/link/forms.py
index <HASH>..<HASH> 100644
--- a/cmsplugin_cascade/link/forms.py
+++ b/cmsplugin_cascade/link/forms.py
@@ -181,7 +181,8 @@ class LinkForm(EntangledModelFormMixin):
try:
cascade_page = get_related_object(instance.glossary, 'cms_page').cascadepage
for key, val in cascade_page.glossary.get('element_ids', {}).items():
- self.base_fields['section'].choices.append((key, val))
+ if val:
+ self.base_fields['section'].choices.append((key, val))
except (AttributeError, ObjectDoesNotExist):
pass
|
fix: Editor for LinkPlugin doe not offer anchors to empty id-attributes
|
py
|
diff --git a/pipenv/requirements.py b/pipenv/requirements.py
index <HASH>..<HASH> 100644
--- a/pipenv/requirements.py
+++ b/pipenv/requirements.py
@@ -583,9 +583,14 @@ class PipfileRequirement(object):
req_uri = self.uri
if self.path and not self.uri:
req_uri = path_to_url(os.path.abspath(self.path))
- line = self._link.url if self._link else (
- req_uri if req_uri else self.pip_version
- )
+ if self._link:
+ line = self._link.url
+ elif req_uri:
+ line = req_uri
+ elif self.file:
+ line = self.file
+ else:
+ line = self.pip_version
return PipenvRequirement._create_requirement(
name=self.pip_version,
path=self.path,
|
Hand 'file' key in Pipfile to requirements
|
py
|
diff --git a/airflow/jobs.py b/airflow/jobs.py
index <HASH>..<HASH> 100644
--- a/airflow/jobs.py
+++ b/airflow/jobs.py
@@ -1029,9 +1029,6 @@ class SchedulerJob(BaseJob):
"with {open_slots} open slots and {num_queued} "
"task instances in queue".format(**locals()))
- if open_slots <= 0:
- continue
-
priority_sorted_task_instances = sorted(
task_instances, key=lambda ti: (-ti.priority_weight, ti.execution_date))
@@ -1040,7 +1037,8 @@ class SchedulerJob(BaseJob):
for task_instance in priority_sorted_task_instances:
if open_slots <= 0:
- self.logger.info("No more slots free")
+ self.logger.info("Not scheduling since there are {} open slots in pool {}"
+ .format(open_slots, pool))
# Can't schedule any more since there are no more open slots.
break
|
[AIRFLOW-<I>] Log which pool when pool is full in scheduler Closes #<I> from saguziel/aguziel-logging-pool- scheduler
|
py
|
diff --git a/projects/str.py b/projects/str.py
index <HASH>..<HASH> 100644
--- a/projects/str.py
+++ b/projects/str.py
@@ -73,7 +73,7 @@ def evidences(args):
draw_jointplot(outdir + "/C", "MeanCoverage", "HD.PEDP",
data=mf, color='m', xlim=xlim, ylim=ylim, format=format)
- ylim = (0, 50)
+ xlim = (0, 50)
draw_jointplot(outdir + "/D", "HD.2", "HD.FDP",
data=mf, xlim=xlim, ylim=ylim, format=format)
draw_jointplot(outdir + "/E", "HD.2", "HD.PDP",
@@ -95,7 +95,7 @@ def draw_jointplot(figname, x, y, data=None, kind="reg", color=None,
"HD.FDP": "Depth of full spanning reads",
"HD.PDP": "Depth of partial spanning reads",
"HD.PEDP": "Depth of paired-end reads",
- "HD.2": "Size of the longer allele"}
+ "HD.2": "Repeat size of the longer allele"}
g = sns.jointplot(x, y, data=data, kind=kind, color=color,
xlim=xlim, ylim=ylim)
|
[projects] Minor modifications in projects.str
|
py
|
diff --git a/scout/server/blueprints/variants/controllers.py b/scout/server/blueprints/variants/controllers.py
index <HASH>..<HASH> 100644
--- a/scout/server/blueprints/variants/controllers.py
+++ b/scout/server/blueprints/variants/controllers.py
@@ -594,6 +594,8 @@ def _compound_follow_filter_clnsig(compound, compound_var_obj, query_form):
There are some filter options that are rather unique, like the ClinVar one.
+ If clinsig_confident_always_returned is checked, variants are currently never dismissed on ClinSig alone.
+
Args:
compound(dict)
compound_variant_obj(scout.models.Variant)
@@ -603,6 +605,10 @@ def _compound_follow_filter_clnsig(compound, compound_var_obj, query_form):
query_rank = []
query_str_rank = []
+ clinsig_always_returned = query_form.get("clinsig_confident_always_returned")
+ if clinsig_always_returned:
+ return False
+
clinsig = query_form.get("clinsig")
if clinsig:
for item in clinsig:
|
Always show ClinSig flag partly affects compounds
|
py
|
diff --git a/tests/parser/features/iteration/test_for_in_list.py b/tests/parser/features/iteration/test_for_in_list.py
index <HASH>..<HASH> 100644
--- a/tests/parser/features/iteration/test_for_in_list.py
+++ b/tests/parser/features/iteration/test_for_in_list.py
@@ -10,6 +10,7 @@ from vyper.exceptions import (
NamespaceCollision,
StateAccessViolation,
StructureException,
+ TypeMismatch,
)
BASIC_FOR_LOOP_CODE = [
@@ -494,6 +495,17 @@ def foo():
for i in x[1]:
pass
""",
+ (
+ """
+@public
+def test_for() -> int128:
+ a: int128 = 0
+ for i in range(MAX_INT128, MAX_INT128+2):
+ a = i
+ return a
+ """,
+ TypeMismatch,
+ ),
]
|
test: Add test for loop var exceeding type bounds
|
py
|
diff --git a/nacl/nacl.py b/nacl/nacl.py
index <HASH>..<HASH> 100644
--- a/nacl/nacl.py
+++ b/nacl/nacl.py
@@ -88,6 +88,9 @@ ffi.verifier = Verifier(ffi,
# We need to link to the sodium library
libraries=["sodium"],
+
+ # Our ext_package is nacl so look for it
+ ext_package="nacl",
)
|
Use an ext_package on the verifier call
|
py
|
diff --git a/jax/interpreters/parallel.py b/jax/interpreters/parallel.py
index <HASH>..<HASH> 100644
--- a/jax/interpreters/parallel.py
+++ b/jax/interpreters/parallel.py
@@ -197,11 +197,15 @@ parallel_translation_rules[psum_p] = psum_parallel_translation_rule
def all_to_all(x, split_dim, concat_dim, **params):
return all_to_all_p.bind(x, split_dim=split_dim, concat_dim=concat_dim, **params)
+def all_to_all_pmap_rule(x, axis, split_dim, concat_dim):
+ raise NotImplementedError
+
def all_to_all_translation_rule(c, x, split_dim, concat_dim):
return c.AllToAll(x, split_dim, concat_dim)
all_to_all_p = PmapPrimitive('all_to_all')
all_to_all_p.def_abstract_eval(lambda x, **kwargs: x)
+pmap_primitive_rules[all_to_all_p] = all_to_all_pmap_rule
parallel_translation_rules[all_to_all_p] = all_to_all_translation_rule
|
Placeholder pmap rule for `all_to_all`
|
py
|
diff --git a/gandi/cli/commands/ip.py b/gandi/cli/commands/ip.py
index <HASH>..<HASH> 100644
--- a/gandi/cli/commands/ip.py
+++ b/gandi/cli/commands/ip.py
@@ -4,7 +4,7 @@ import click
from gandi.cli.core.cli import cli
from gandi.cli.core.utils import output_ip
-from gandi.cli.core.params import (option, pass_gandi, DATACENTER,
+from gandi.cli.core.params import (pass_gandi, DATACENTER,
IP_TYPE, option, IntChoice)
@@ -39,7 +39,7 @@ def list(gandi, datacenter, type, id, attached, detached, version, reverse,
options = {}
if datacenter:
- datacenter_id = int(Datacenter.usable_id(datacenter))
+ datacenter_id = int(gandi.datacenter.usable_id(datacenter))
options['datacenter_id'] = datacenter_id
iface_options = {}
|
Fixes bug when filtering ip list with datacenter, use correct call to module
|
py
|
diff --git a/salt/fileclient.py b/salt/fileclient.py
index <HASH>..<HASH> 100644
--- a/salt/fileclient.py
+++ b/salt/fileclient.py
@@ -103,6 +103,18 @@ class Client(object):
yield dest
os.umask(cumask)
+ def get_file(self, path, dest='', makedirs=False, env='base'):
+ '''
+ Copies a file from the local files or master depending on implementation
+ '''
+ raise NotImplementedError
+
+ def file_list_emptydirs(self, env='base'):
+ '''
+ List the empty dirs
+ '''
+ raise NotImplementedError
+
def cache_file(self, path, env='base'):
'''
Pull a file down from the file server and store it in the minion
|
define abstract methods for file client Force implementation to create methods by raising NotImplementedError exception
|
py
|
diff --git a/virtualchain/lib/blockchain/bitcoin_blockchain/keys.py b/virtualchain/lib/blockchain/bitcoin_blockchain/keys.py
index <HASH>..<HASH> 100644
--- a/virtualchain/lib/blockchain/bitcoin_blockchain/keys.py
+++ b/virtualchain/lib/blockchain/bitcoin_blockchain/keys.py
@@ -121,6 +121,27 @@ def make_p2sh_address( script ):
return addr
+def is_p2sh_address( address ):
+ """
+ Is the given address a p2sh address?
+ """
+ vb = pybitcoin.b58check_version_byte( address )
+ if vb == multisig_version_byte:
+ return True
+ else:
+ return False
+
+
+def is_p2sh_script( script_hex ):
+ """
+ Is the given script a p2sh script?
+ """
+ if script_hex.startswith("a914") and script_hex.endswith("87") and len(script_hex) == 46:
+ return True
+ else:
+ return False
+
+
def address_reencode( address ):
"""
Depending on whether or not we're in testnet
|
add tests to see if an address or script are p2sh
|
py
|
diff --git a/py/testdir_hosts/test_exec_import_hosts_bigfiles.py b/py/testdir_hosts/test_exec_import_hosts_bigfiles.py
index <HASH>..<HASH> 100644
--- a/py/testdir_hosts/test_exec_import_hosts_bigfiles.py
+++ b/py/testdir_hosts/test_exec_import_hosts_bigfiles.py
@@ -22,7 +22,7 @@ exprList = [
'Result<n>.hex = sum(<keyX>[<col1>]) + Result.hex',
]
-def exec_list_like_other_tests(exprList, lenNodes, csvFilename, key2):
+def exec_list(exprList, lenNodes, csvFilename, key2):
h2e.exec_zero_list(zeroList)
# start with trial = 1 because trial-1 is used to point to Result0 which must be initted
trial = 1
@@ -105,7 +105,7 @@ class Basic(unittest.TestCase):
print "Parse result['destination_key']:", parseKey['destination_key']
inspect = h2o_cmd.runInspect(None, parseKey['destination_key'])
print "\n" + csvFilename
- exec_list_like_other_tests(exprList, lenNodes, csvFilename, key2)
+ exec_list(exprList, lenNodes, csvFilename, key2)
if __name__ == '__main__':
|
nosetest doesn't like 'test' in the def name? (if not the subtest)
|
py
|
diff --git a/pam.py b/pam.py
index <HASH>..<HASH> 100644
--- a/pam.py
+++ b/pam.py
@@ -84,6 +84,10 @@ pam_start = libpam.pam_start
pam_start.restype = c_int
pam_start.argtypes = [c_char_p, c_char_p, POINTER(PamConv), POINTER(PamHandle)]
+pam_acct_mgmt = libpam.pam_acct_mgmt
+pam_acct_mgmt.restype = c_int
+pam_acct_mgmt.argtypes = [PamHandle, c_int]
+
pam_setcred = libpam.pam_setcred
pam_setcred.restype = c_int
pam_setcred.argtypes = [PamHandle, c_int]
@@ -176,6 +180,10 @@ class pam():
retval = pam_authenticate(handle, 0)
auth_success = retval == 0
+ if auth_success:
+ retval = pam_acct_mgmt(handle, 0)
+ auth_success = retval == 0
+
if auth_success and resetcreds:
retval = pam_setcred(handle, PAM_REINITIALIZE_CRED)
|
Call pam_acct_mgmt after initial authentication to verify availability of account.
|
py
|
diff --git a/bugwarrior/services/bitbucket.py b/bugwarrior/services/bitbucket.py
index <HASH>..<HASH> 100644
--- a/bugwarrior/services/bitbucket.py
+++ b/bugwarrior/services/bitbucket.py
@@ -49,7 +49,10 @@ class BitbucketService(IssueService):
url = self.base_api + '/users/' + user + '/'
f = urllib2.urlopen(url)
response = json.loads(f.read())
- repos = [repo.get('slug') for repo in response.get('repositories')]
+ repos = [
+ repo.get('slug') for repo in response.get('repositories')
+ if repo.get('has_issues')
+ ]
issues = sum([self.pull(user + "/" + repo) for repo in repos], [])
log.debug(" Found {0} total.", len(issues))
|
Only get bitbucket repos that actually have issues.
|
py
|
diff --git a/benchmark/python/sparse/util.py b/benchmark/python/sparse/util.py
index <HASH>..<HASH> 100644
--- a/benchmark/python/sparse/util.py
+++ b/benchmark/python/sparse/util.py
@@ -24,7 +24,7 @@ def estimate_density(DATA_PATH, feature_size):
raise Exception("Data is not there!")
density = []
P = 0.01
- for _ in xrange(10):
+ for _ in range(10):
num_non_zero = 0
num_sample = 0
with open(DATA_PATH) as f:
|
xrange(<I>) --> range(<I>) for Python 3 (#<I>) __xrange()__ was removed in Python 3 in favor of __range()__. For such a small number as <I> there is no difference between __xrange()__ and __range()__ in Python 2.
|
py
|
diff --git a/glad/loader/wgl/c.py b/glad/loader/wgl/c.py
index <HASH>..<HASH> 100644
--- a/glad/loader/wgl/c.py
+++ b/glad/loader/wgl/c.py
@@ -16,10 +16,13 @@ int gladLoadWGL(HDC hdc) {
'''
_WGL_HEADER = '''
-#ifndef WIN32_LEAN_AND_MEAN
-#define WIN32_LEAN_AND_MEAN 1
+#ifndef WINAPI
+# ifndef WIN32_LEAN_AND_MEAN
+# define WIN32_LEAN_AND_MEAN 1
+# endif
+# include <windows.h>
#endif
-#include <windows.h>
+
#include <glad/glad.h>
#ifndef __glad_wglext_h_
|
Protect windows.h inclusion, as it doesn't do so itself
|
py
|
diff --git a/src/diamond/collector.py b/src/diamond/collector.py
index <HASH>..<HASH> 100644
--- a/src/diamond/collector.py
+++ b/src/diamond/collector.py
@@ -392,7 +392,6 @@ class Collector(object):
# Collect Data
self.collect()
- self.collect_running = False
end_time = time.time()
if 'measure_collector_time' in self.config:
@@ -405,6 +404,7 @@ class Collector(object):
# Log Error
self.log.error(traceback.format_exc())
finally:
+ self.collect_running = False
# After collector run, invoke a flush
# method on each handler.
for handler in self.handlers:
|
Ensure collection isn't stopped if a collector throws an exception.
|
py
|
diff --git a/openquake/hazardlib/calc/filters.py b/openquake/hazardlib/calc/filters.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/calc/filters.py
+++ b/openquake/hazardlib/calc/filters.py
@@ -183,7 +183,7 @@ class IntegrationDistance(collections.Mapping):
return repr(self.dic)
-def split_sources(srcs, times=True):
+def split_sources(srcs):
"""
:param srcs: sources
:returns: a pair (split sources, split time) or just the split_sources
@@ -238,9 +238,7 @@ def split_sources(srcs, times=True):
splits[0].serial = src.serial
if has_samples:
splits[0].samples = src.samples
- if times:
- return sources, split_time
- return sources
+ return sources, split_time
class SourceFilter(object):
|
[skip CI] Former-commit-id: <I>eaf8ca<I>f<I>b3d<I>e7c<I>fbc<I>d7
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -36,8 +36,8 @@ with open('README.rst') as f:
extras_require = {
'voice': ['PyNaCl>=1.3.0,<1.5'],
'docs': [
- 'sphinx==2.4.4',
- 'sphinxcontrib_trio==1.1.1',
+ 'sphinx==3.0.3',
+ 'sphinxcontrib_trio==1.1.2',
'sphinxcontrib-websupport',
]
}
|
Bump Sphinx to <I>
|
py
|
diff --git a/modelx/__init__.py b/modelx/__init__.py
index <HASH>..<HASH> 100644
--- a/modelx/__init__.py
+++ b/modelx/__init__.py
@@ -20,7 +20,7 @@ Attributes:
"""
-VERSION = (0, 13, 0, "dev")
+VERSION = (0, 13, 0)
__version__ = ".".join([str(x) for x in VERSION])
from modelx.core.api import * # must come after __version__ assignment.
try:
|
DIST: Release <I>
|
py
|
diff --git a/django_nopassword/views.py b/django_nopassword/views.py
index <HASH>..<HASH> 100644
--- a/django_nopassword/views.py
+++ b/django_nopassword/views.py
@@ -27,9 +27,8 @@ def login(request):
def login_with_code(request, login_code):
- code = get_object_or_404(LoginCode, code=login_code)
- user = get_object_or_404(User, pk=code.user_id)
- return login_with_code_and_username(request, username=user.username, login_code=login_code)
+ code = get_object_or_404(LoginCode.objects.select_related('user'), code=login_code)
+ return login_with_code_and_username(request, username=code.user.username, login_code=login_code)
def login_with_code_and_username(request, username, login_code):
|
remove a db query in login with code view
|
py
|
diff --git a/__init__.py b/__init__.py
index <HASH>..<HASH> 100644
--- a/__init__.py
+++ b/__init__.py
@@ -10,4 +10,4 @@ used from a setup script as
__revision__ = "$Id$"
-__version__ = "0.8"
+__version__ = "0.8.1"
|
Bumped version to <I>.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -30,6 +30,7 @@ setup(
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
@@ -38,5 +39,6 @@ setup(
"Framework :: Django :: 1.10",
"Framework :: Django :: 1.11",
"Framework :: Django :: 2.0",
+ "Framework :: Django :: 2.1",
],
)
|
Update setup.py for py<I> and django<I>
|
py
|
diff --git a/remi/server.py b/remi/server.py
index <HASH>..<HASH> 100644
--- a/remi/server.py
+++ b/remi/server.py
@@ -148,6 +148,9 @@ class WebSocketsHandler(socketserver.StreamRequestHandler):
def handle(self):
log.debug('ws handle')
+ #on some systems like ROS, the default socket timeout
+ #is less than expected, we force it to infinite (None) as default socket value
+ self.request.settimeout(None)
while True:
if not self.handshake_done:
self.handshake()
|
Fixed websocket timeout on systems where default socket timeout differs from None.
|
py
|
diff --git a/huey/api.py b/huey/api.py
index <HASH>..<HASH> 100644
--- a/huey/api.py
+++ b/huey/api.py
@@ -373,7 +373,9 @@ class Huey(object):
self.put_result(task.id, Error({
'error': repr(exception),
'retries': task.retries,
- 'traceback': tb}))
+ 'traceback': tb,
+ 'task_id': task.id,
+ }))
elif task_value is not None or self.store_none:
self.put_result(task.id, task_value)
|
Put task_id in the meta information of a TaskException
|
py
|
diff --git a/pyplink/tests/test_pyplink.py b/pyplink/tests/test_pyplink.py
index <HASH>..<HASH> 100644
--- a/pyplink/tests/test_pyplink.py
+++ b/pyplink/tests/test_pyplink.py
@@ -461,3 +461,25 @@ class TestPyPlink(unittest.TestCase):
[i for i in self.pedfile.iter_acgt_geno_marker(markers)]
self.assertEqual("\"marker not in BIM: ['unknown_3', 'unknown_4']\"",
str(cm.exception))
+
+ def test_repr(self):
+ """Tests the object representation of the string."""
+ # Counting the number of samples
+ nb_samples = None
+ with open(self.fam, "r") as i_file:
+ nb_samples = len(i_file.read().splitlines())
+
+ # Counting the number of markers
+ nb_markers = None
+ with open(self.bim, "r") as i_file:
+ nb_markers = len(i_file.read().splitlines())
+
+ # Creating the expected string representation
+ e_repr = "PyPlink({:,d} samples; {:,d} markers)".format(nb_samples,
+ nb_markers)
+
+ # Getting the observed string representation
+ o_repr = str(self.pedfile)
+
+ # Comparing
+ self.assertEqual(e_repr, o_repr)
|
Now tests the string representation of the PyPlink object
|
py
|
diff --git a/thinc/layers/staticvectors.py b/thinc/layers/staticvectors.py
index <HASH>..<HASH> 100644
--- a/thinc/layers/staticvectors.py
+++ b/thinc/layers/staticvectors.py
@@ -58,14 +58,16 @@ def forward(
vectors = model.ops.as_contig(vectors)
assert vectors.shape[0] == ids.shape[0]
+ output = model.ops.gemm(vectors, W, trans2=True)
+ dropout: Optional[float] = model.attrs.get("dropout_rate")
+ drop_mask = cast(Floats1d, model.ops.get_dropout_mask((output.shape[1],), dropout))
+
def backprop(d_output: OutT) -> Ints1d:
+ d_output *= drop_mask
model.inc_grad("W", model.ops.gemm(d_output, vectors, trans1=True))
dX = model.ops.alloc1i(nN)
return dX
- output = model.ops.gemm(vectors, W, trans2=True)
- dropout: Optional[float] = model.attrs.get("dropout_rate")
- drop_mask = cast(Floats1d, model.ops.get_dropout_mask((output.shape[1],), dropout))
output *= drop_mask
return output, backprop
|
multiply by drop_mask in staticvectors.backprop (#<I>) * multiply by drop_mask in staticvectors.backprop * fix formatting
|
py
|
diff --git a/hebel/monitors.py b/hebel/monitors.py
index <HASH>..<HASH> 100644
--- a/hebel/monitors.py
+++ b/hebel/monitors.py
@@ -228,3 +228,18 @@ class SimpleProgressMonitor(object):
self.train_time.total_seconds() % 60)
print "Avg. time per epoch %.2fs" % self.avg_epoch_t
sys.stdout.flush()
+
+
+class DummyProgressMonitor(object):
+ def __init__(self, model=None):
+ self.model = model
+
+ def start_training(self):
+ pass
+
+ def report(self, epoch, train_error, validation_error=None,
+ new_best=None, epoch_t=None):
+ pass
+
+ def finish_training(self):
+ pass
|
Added DummyProgressMonitor that doesn't report anything
|
py
|
diff --git a/dumptruck/dumptruck.py b/dumptruck/dumptruck.py
index <HASH>..<HASH> 100644
--- a/dumptruck/dumptruck.py
+++ b/dumptruck/dumptruck.py
@@ -304,18 +304,17 @@ class DumpTruck:
self.execute(u'CREATE TABLE %s (`value` %s)' % (tmp, column_type), commit = False)
# This is ugly
- self.execute(u'INSERT OR REPLACE INTO %s (`value`) VALUES (?)' % tmp, [value], commit = False)
- p1 = (quote(self.__vars_table), tmp)
- p2 = [key, column_type, value]
+ self.execute(u'INSERT INTO %s (`value`) VALUES (?)' % tmp, [value], commit = False)
+ table = (quote(self.__vars_table), tmp)
+ params = [key, column_type]
self.execute(u'''
-INSERT INTO %s (`key`, `type`, `value`)
+INSERT OR REPLACE INTO %s (`key`, `type`, `value`)
SELECT
? AS key,
? AS type,
value
FROM %s
- WHERE value = ?
-''' % p1, p2)
+''' % table, params)
self.execute(u'DROP TABLE %s' % tmp, commit = False)
self.__commit_if_necessary(kwargs)
|
Correct save_var and refactor
|
py
|
diff --git a/flatlib/object.py b/flatlib/object.py
index <HASH>..<HASH> 100644
--- a/flatlib/object.py
+++ b/flatlib/object.py
@@ -133,7 +133,7 @@ class Object(GenericObject):
or stationary.
"""
- if abs(self.lonspeed) < 0.001:
+ if abs(self.lonspeed) < 0.0003:
return const.STATIONARY
elif self.lonspeed > 0:
return const.DIRECT
|
Change stationary threshold Define that an object is stationary if its speed is less than 1 arc-second
|
py
|
diff --git a/discord/ext/commands/converter.py b/discord/ext/commands/converter.py
index <HASH>..<HASH> 100644
--- a/discord/ext/commands/converter.py
+++ b/discord/ext/commands/converter.py
@@ -263,7 +263,7 @@ class MessageConverter(Converter):
3. Lookup by message URL
.. versionchanged:: 1.5
- Raise :exc:`.ChannelNotFound`, `MessageNotFound` or `ChannelNotReadable` instead of generic :exc:`.BadArgument`
+ Raise :exc:`.ChannelNotFound`, :exc:`.MessageNotFound` or :exc:`.ChannelNotReadable` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
id_regex = re.compile(r'(?:(?P<channel_id>[0-9]{15,21})-)?(?P<message_id>[0-9]{15,21})$')
|
[commands] Added in missing channel links
|
py
|
diff --git a/learn/pipeline.py b/learn/pipeline.py
index <HASH>..<HASH> 100644
--- a/learn/pipeline.py
+++ b/learn/pipeline.py
@@ -21,7 +21,23 @@ class Pipeline(object):
def __init__(self,datadir):
object.__init__(self)
- # the path to a directory where
- # all data for this pipeline will be
- # stored
- self.datadir = datadir
\ No newline at end of file
+ # something that knows how to fetch training examples
+ fetch = None
+
+ # something that knows how to preprocess data
+ preprocess = None
+
+ # something that knows how to train on data, and can
+ # describe future data based on that training.
+ learner = None
+
+ # example 1: an rbm that trains on bark bands
+ # - fetch grabs bark bands from disk
+ # - preprocess does mean and std regularization
+ # - the rbm learns and then can output features
+
+ # example 2: minhash of rbm activations
+ # - no fetcher
+ # - no preprocessor
+ # - the "training" stage just consists of picking the
+ # hash functions (permutations), and saving them
\ No newline at end of file
|
Added comments to pipeline to get me thinking about what that implementation will look like
|
py
|
diff --git a/zhaquirks/tuya/ts0041_zemismart.py b/zhaquirks/tuya/ts0041_zemismart.py
index <HASH>..<HASH> 100644
--- a/zhaquirks/tuya/ts0041_zemismart.py
+++ b/zhaquirks/tuya/ts0041_zemismart.py
@@ -25,7 +25,7 @@ class TuyaZemismartSmartRemote0041(TuyaSmartRemote):
signature = {
# SizePrefixedSimpleDescriptor(endpoint=1, profile=260, device_type=0, device_version=1, input_clusters=[0, 1, 6], output_clusters=[25, 10])
- MODELS_INFO: [("_TZ3000_tk3s5tyg", "TS0041")],
+ MODELS_INFO: [("_TZ3000_tk3s5tyg", "TS0041"), ("_TZ3400_keyjqthh", "TS0041")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
|
Added missing device model (#<I>)
|
py
|
diff --git a/src/core/qt_b26_gui.py b/src/core/qt_b26_gui.py
index <HASH>..<HASH> 100644
--- a/src/core/qt_b26_gui.py
+++ b/src/core/qt_b26_gui.py
@@ -195,7 +195,7 @@ class ControlMainWindow(QMainWindow, Ui_MainWindow):
if isinstance(script, QThread):
script.updateProgress.connect(self.update_status)
self.log('start {:s}'.format(script.name))
- script.run()
+ script.start()
else:
self.log('No script selected. Select script and try again!')
|
threads have to be started with .start() not .run() so that they run on their own thread Fixed that for the scripts in the gui
|
py
|
diff --git a/project_generator/exporters/uvision_definitions.py b/project_generator/exporters/uvision_definitions.py
index <HASH>..<HASH> 100644
--- a/project_generator/exporters/uvision_definitions.py
+++ b/project_generator/exporters/uvision_definitions.py
@@ -20,9 +20,8 @@ class uVisionDefinitions():
try:
return self.mcu_def[name]
except KeyError:
- pass
- # raise RuntimeError(
- # "Mcu was not recognized for uvision. Please check mcu_def dictionary.")
+ raise RuntimeError(
+ "Mcu was not recognized for uvision. Please check mcu_def dictionary.")
# MCU definitions which are currently supported. Add a new one, define a name as it is
# in uVision, create an empty project for that MCU, open the project file (uvproj) in any text
|
uvision def - error uncommented, mcu has to be defined, via board or mcu
|
py
|
diff --git a/steam/core/msg_structs.py b/steam/core/msg_structs.py
index <HASH>..<HASH> 100644
--- a/steam/core/msg_structs.py
+++ b/steam/core/msg_structs.py
@@ -1,5 +1,6 @@
"""Classes to (de)serialize various struct messages"""
import struct
+import six
from steam.enums import EResult, EUniverse
from steam.enums.emsg import EMsg
@@ -8,7 +9,7 @@ _emsg_map = {}
def get_struct(emsg):
return _emsg_map.get(emsg, None)
-class MapEMsgMeta(type):
+class StructMessageMeta(type):
"""Automatically maps subclasses of :class:`StructMessage` to ``EMsg``"""
def __new__(metacls, name, bases, classdict):
@@ -22,9 +23,8 @@ class MapEMsgMeta(type):
return cls
+@six.add_metaclass(StructMessageMeta)
class StructMessage:
- __metaclass__ = MapEMsgMeta
-
def __init__(self, data=None):
if data: self.load(data)
|
fix StructMessage metaclass compability
|
py
|
diff --git a/pyked/chemked.py b/pyked/chemked.py
index <HASH>..<HASH> 100644
--- a/pyked/chemked.py
+++ b/pyked/chemked.py
@@ -188,7 +188,7 @@ class ChemKED(object):
app_index = valid_labels.index('apparatus')
valid_labels[app_index:app_index + 1] = ['apparatus:' + a for a in Apparatus._fields]
- species_list = list(set([s['species-name'] for d in self.datapoints for s in d.composition]))
+ species_list = list(set([s['species-name'] for d in self.datapoints for s in d.composition])) # noqa: E501
if output_columns is None or len(output_columns) == 0:
col_labels = valid_labels
|
Disable line length check for a line that is one too long A foolish consistency is the hobgoblin of little minds :-)
|
py
|
diff --git a/telethon/events/newmessage.py b/telethon/events/newmessage.py
index <HASH>..<HASH> 100644
--- a/telethon/events/newmessage.py
+++ b/telethon/events/newmessage.py
@@ -125,7 +125,6 @@ class NewMessage(EventBuilder):
if isinstance(ori.to_id, types.PeerUser):
if ori.from_id == ori.to_id.user_id and not ori.fwd_from:
event.message.out = True
- print('ooOoOo', ori)
return event
|
Remove debugging print (#<I>)
|
py
|
diff --git a/src/rabird/core/configparser.py b/src/rabird/core/configparser.py
index <HASH>..<HASH> 100777
--- a/src/rabird/core/configparser.py
+++ b/src/rabird/core/configparser.py
@@ -27,11 +27,11 @@ import re
class ConfigParser(configparser.ConfigParser):
- UNNAMED_SECTION = '#--ConfigParser--INTERNAL--UNNAMED-SECTION--#'
+ UNNAMED_SECTION = '#--CONFIGPARSER-UNNAMED-SECTION--#'
# It will transfer to empty line
- __EMPTY_OPTION = '--ConfigParser--INTERNAL--EMPTY-SECTION--'
+ __EMPTY_OPTION = '--CONFIGPARSER-EMPTY-OPTION--'
# It will transfer to comment line
- __COMMENT_OPTION = '--ConfigParser--INTERNAL--COMMENT-SECTION--'
+ __COMMENT_OPTION = '--CONFIGPARSER-COMMENT-OPTION--'
def __init__(self, *args, **kwargs):
if issubclass(ConfigParser, object):
|
Changed for more readable additional key text
|
py
|
diff --git a/tests/integration/modules/decorators.py b/tests/integration/modules/decorators.py
index <HASH>..<HASH> 100644
--- a/tests/integration/modules/decorators.py
+++ b/tests/integration/modules/decorators.py
@@ -9,7 +9,7 @@ class DecoratorTest(integration.ModuleCase):
)
)
- def test_depends(self):
+ def not_test_depends(self):
ret = self.run_function('runtests_decorators.depends')
self.assertTrue(ret['ret'])
self.assertTrue(type(ret['time']) == float)
@@ -21,7 +21,7 @@ class DecoratorTest(integration.ModuleCase):
)
)
- def test_depends_will_fallback(self):
+ def not_test_depends_will_fallback(self):
ret = self.run_function('runtests_decorators.depends_will_fallback')
self.assertTrue(ret['ret'])
self.assertTrue(type(ret['time']) == float)
|
@jacksont, please take a look at these, they are failing sometimes
|
py
|
diff --git a/flange/model.py b/flange/model.py
index <HASH>..<HASH> 100644
--- a/flange/model.py
+++ b/flange/model.py
@@ -1,4 +1,4 @@
-import url_scheme_python as pyurl
+from . import url_scheme_python as pyurl
import jsonschema
import datetime
import six
|
use relative import to fix test in another project
|
py
|
diff --git a/taskw/warrior.py b/taskw/warrior.py
index <HASH>..<HASH> 100644
--- a/taskw/warrior.py
+++ b/taskw/warrior.py
@@ -465,6 +465,10 @@ class TaskWarriorExperimental(TaskWarriorBase):
return tasks['completed'][-1]
def task_update(self, task):
+
+ if 'uuid' not in task:
+ return None, dict()
+
id, _task = self.get_task(uuid=task['uuid'])
if 'id' in task:
|
If task does not have uuid, don't proceed with update
|
py
|
diff --git a/taskforce/task.py b/taskforce/task.py
index <HASH>..<HASH> 100644
--- a/taskforce/task.py
+++ b/taskforce/task.py
@@ -1269,7 +1269,7 @@ Params are:
# Register with legion
self._legion.task_add(self, periodic=self._task_periodic)
- def __del__(self):
+ def close(self):
if self._legion:
try: self._event_deregister()
except: pass
@@ -1794,7 +1794,7 @@ Params are:
if self._stopped:
if self._dnr:
log.info("%s Task %s stopped and will now be deleted", my(self), self._name)
- self.__del__()
+ self.close()
return False
elif once:
log.debug("%s 'once' task %s exited %s ago",
|
#6. removed __del__(). This class holds no resources and we can depend on the underlying object that do to clean up. Otherwise there is a race in python3 between the task cleanup and thw watch_files cleanup
|
py
|
diff --git a/views.py b/views.py
index <HASH>..<HASH> 100644
--- a/views.py
+++ b/views.py
@@ -711,7 +711,8 @@ def switchArtist(request):
@login_required
def helpMe(request):
msg = request.POST.get('message')
- send_mail('Frog Help', msg, request.user.email, MANAGERS)
+ toAddr = [m[1] for m in MANAGERS]
+ send_mail('Frog Help', msg, request.user.email, toAddr)
return HttpResponse()
|
Fixed bug with helpMe and email addresses
|
py
|
diff --git a/python/dllib/src/bigdl/dllib/utils/spark.py b/python/dllib/src/bigdl/dllib/utils/spark.py
index <HASH>..<HASH> 100644
--- a/python/dllib/src/bigdl/dllib/utils/spark.py
+++ b/python/dllib/src/bigdl/dllib/utils/spark.py
@@ -18,13 +18,11 @@ import os
import glob
from pyspark import SparkContext
-
from zoo.common.nncontext import init_spark_conf
-
from zoo import init_nncontext
-class SparkRunner():
+class SparkRunner:
def __init__(self,
spark_log_level="WARN",
redirect_spark_log=True):
|
Expose driver core in RayContext; polish ray docs (#<I>) * update doc * style
|
py
|
diff --git a/eng/versioning/set_versions.py b/eng/versioning/set_versions.py
index <HASH>..<HASH> 100644
--- a/eng/versioning/set_versions.py
+++ b/eng/versioning/set_versions.py
@@ -184,6 +184,10 @@ def update_versions_file_for_nightly_devops(build_type, build_qualifier, artifac
else:
module.dependency += '-' + unreleased_build_qualifier
+ # The final unreleased dependency version needs to be of the form
+ # [1.0.0-dev.YYYYMMDD.,] <-- note the ., this is the version range for Maven
+ module.dependency = '[{},]'.format(module.dependency)
+
print(f'updating unreleased/beta dependency {module.name} to use dependency version range: "{module.dependency}"')
version_map[module.name] = module
|
Version range needs the , at the end (#<I>) * Version range needs the , at the end * version range needs to be in braces Used override because nothing in this PR affects a core - ci run. The change had to be verified on a run that sets the dev version in a pipeline that has unreleased dependencies.
|
py
|
diff --git a/pastml/ml.py b/pastml/ml.py
index <HASH>..<HASH> 100755
--- a/pastml/ml.py
+++ b/pastml/ml.py
@@ -897,7 +897,7 @@ def optimise_likelihood(forest, avg_br_len, tree_len, num_edges, character, stat
optimise_kappa=optimise_kappa, avg_br_len=avg_br_len,
tree_len=tree_len, num_edges=num_edges, model=model,
observed_frequencies=observed_frequencies,
- tau=tau, optimise_tau=optimise_tau)
+ tau=tau, optimise_tau=False)
if np.any(np.isnan(likelihood) or likelihood == -np.inf):
raise PastMLLikelihoodError('Failed to calculate the likelihood for your tree, '
'please check that you do not have contradicting {} states specified '
|
preoptimise tau with sf (frequencies fixed) then fix it for further optimisations
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.