diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/salt/states/user.py b/salt/states/user.py
index <HASH>..<HASH> 100644
--- a/salt/states/user.py
+++ b/salt/states/user.py
@@ -26,7 +26,7 @@ def present(
uid=None,
gid=None,
groups=None,
- home=False,
+ home=True,
password=None,
enforce_password=True,
shell=None,
|
change user to create homedir by default
|
py
|
diff --git a/indra/tools/reading/readers/core.py b/indra/tools/reading/readers/core.py
index <HASH>..<HASH> 100644
--- a/indra/tools/reading/readers/core.py
+++ b/indra/tools/reading/readers/core.py
@@ -75,6 +75,24 @@ class ReadingData(object):
return self._statements[:]
+ def to_json(self):
+ return {'content_id': self.content_id,
+ 'reader_name': self.reader_class.name,
+ 'reader_version': self.reader_version,
+ 'reading_format': self.format,
+ 'reading': self.reading}
+
+ @classmethod
+ def from_json(cls, jd):
+ jd['reader_class'] = get_reader_class(jd.pop('reader_name'))
+ stored_version = jd['reader_version']
+ current_version = jd['reader_class'].get_version()
+ if stored_version != current_version:
+ logger.debug("Current reader version does not match stored "
+ "version: %s (current) vs %s (stored)"
+ % (current_version, stored_version))
+ return cls(**jd)
+
class Reader(object):
"""This abstract object defines and some general methods for readers."""
|
Implement JSON (de)serialization.
|
py
|
diff --git a/scriptworker/cot/verify.py b/scriptworker/cot/verify.py
index <HASH>..<HASH> 100644
--- a/scriptworker/cot/verify.py
+++ b/scriptworker/cot/verify.py
@@ -904,8 +904,8 @@ def verify_cot_signatures(chain):
"""
for link in chain.links:
- unsigned_path = link.get_artifact_full_path('chain-of-trust.json')
- ed25519_signature_path = link.get_artifact_full_path('chain-of-trust.json.sig')
+ unsigned_path = link.get_artifact_full_path('public/chain-of-trust.json')
+ ed25519_signature_path = link.get_artifact_full_path('public/chain-of-trust.json.sig')
try:
verify_link_ed25519_cot_signature(chain, link, unsigned_path, ed25519_signature_path)
except Exception as exc:
|
cot artifacts are in public/
|
py
|
diff --git a/src/sagemaker/local/image.py b/src/sagemaker/local/image.py
index <HASH>..<HASH> 100644
--- a/src/sagemaker/local/image.py
+++ b/src/sagemaker/local/image.py
@@ -489,12 +489,9 @@ class _SageMakerContainer(object):
os.path.join(self.container_root, DOCKER_COMPOSE_FILENAME),
"up",
"--build",
- "--abort-on-container-exit",
+ "--abort-on-container-exit" if not detached else "--detach", # mutually exclusive
]
- if detached:
- command.append("-d")
-
logger.info("docker command: %s", " ".join(command))
return command
|
fix: Fix local _SageMakerContainer detached mode (aws#<I>) (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,18 +1,19 @@
from distutils.core import setup
+def long_description():
+ with open('README.md', 'r') as readme:
+ readme_text = readme.read()
+ return(readme_text)
+
setup(name='PyUserInput',
- version='0.1',
+ version='0.1.1',
description='A simple, cross-platform module for mouse and keyboard control',
- long_description='''PyUserInput provides cross-platform tools for the
- programmatic fabrication of user input through the mouse and keyboard. Through
- the PyKeyboardEvent and PyMouseEvent classes one can also define event
- handling for detection of user keyboard and mouse input. Mouse support exists
- for Mac, Linux, and Windows. At this time, keyboard support consists of Linux
- and Windows.''',
+ long_description=long_description(),
author='Paul Barton',
#Original author of PyMouse: Pepijn de Vos
author_email='pablo.barton@gmail.com',
url='https://github.com/SavinaRoja/PyUserInput',
packages = ['pykeyboard', 'pymouse'],
- license='LICENSE.txt',
+ license='http://www.gnu.org/licenses/gpl-3.0.html',
+ keywords='mouse keyboard user input event'
)
|
added keywords and long_description now is README
|
py
|
diff --git a/tgbot/pluginbase.py b/tgbot/pluginbase.py
index <HASH>..<HASH> 100644
--- a/tgbot/pluginbase.py
+++ b/tgbot/pluginbase.py
@@ -53,7 +53,8 @@ class TGPluginBase(object):
if in_message.text is None:
return
- m = self.bot.models.Message.create(
+ # TODO: revisit this once duplicate incoming messages are dealt with (in issue #25)
+ m, _ = self.bot.models.Message.create_or_get(
id=in_message.message_id,
group_chat=chat,
sender=sender,
|
fix for #<I> - do not re-create incoming message on need_reply
|
py
|
diff --git a/buildozer/targets/android.py b/buildozer/targets/android.py
index <HASH>..<HASH> 100644
--- a/buildozer/targets/android.py
+++ b/buildozer/targets/android.py
@@ -15,7 +15,7 @@ ANDROID_API = '14'
ANDROID_MINAPI = '8'
ANDROID_SDK_VERSION = '21'
ANDROID_NDK_VERSION = '9c'
-APACHE_ANT_VERSION = '1.8.4'
+APACHE_ANT_VERSION = '1.9.4'
import traceback
import os
|
upgrade ant tool, as ant < <I> cannot handle java 8
|
py
|
diff --git a/easymode/admin/abstract.py b/easymode/admin/abstract.py
index <HASH>..<HASH> 100644
--- a/easymode/admin/abstract.py
+++ b/easymode/admin/abstract.py
@@ -4,13 +4,8 @@ class AbstractOrderedModel(models.Model):
"""
This model includes functionality for ordering.
"""
- order = models.PositiveIntegerField()
-
- def save(self, force_insert=False, force_update=False):
- # this is the last model in the list except if there are 10000000 models.
- order = 99999999
- super(AbstractOrderedModel, self).save(False, False)
-
+ order = models.AutoField()
+
class Meta:
abstract = True
ordering = ('order',)
|
Improved drag and drop admin model base.
|
py
|
diff --git a/parsedmarc/cli.py b/parsedmarc/cli.py
index <HASH>..<HASH> 100644
--- a/parsedmarc/cli.py
+++ b/parsedmarc/cli.py
@@ -223,6 +223,8 @@ def _main():
help="only print errors and warnings")
arg_parser.add_argument("--debug", action="store_true",
help="print debugging information")
+ arg_parser.add_argument("--log-file", default=None,
+ help="redirect logging to file")
arg_parser.add_argument("-v", "--version", action="version",
version=__version__)
@@ -237,6 +239,11 @@ def _main():
if args.debug:
logging.basicConfig(level=logging.DEBUG)
logger.setLevel(logging.DEBUG)
+ if args.log_file:
+ fh = logging.FileHandler(args.log_file)
+ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
+ fh.setFormatter(formatter)
+ logger.addHandler(fh)
if args.host is None and len(args.file_path) == 0:
arg_parser.print_help()
exit(1)
|
added option to redirect log into a specified file
|
py
|
diff --git a/discord/state.py b/discord/state.py
index <HASH>..<HASH> 100644
--- a/discord/state.py
+++ b/discord/state.py
@@ -194,7 +194,7 @@ class ConnectionState:
def parse_guild_member_add(self, data):
server = self._get_server(data.get('guild_id'))
- member = Member(server=server, deaf=False, mute=False, **data)
+ member = Member(server=server, **data)
member.roles.append(server.default_role)
server._add_member(member)
self.dispatch('member_join', member)
|
Guild member add will include deaf and mute now, do not provide default values
|
py
|
diff --git a/ella/photos/models.py b/ella/photos/models.py
index <HASH>..<HASH> 100644
--- a/ella/photos/models.py
+++ b/ella/photos/models.py
@@ -99,7 +99,7 @@ class Photo(models.Model):
thumbUrl = self.thumb_url()
if not thumbUrl:
return mark_safe("""<strong>%s</strong>""" % ugettext('Thumbnail not available'))
- return mark_safe("""<a href="%s" class="thickbox"><img src="%s" alt="Thumbnail %s" /></a>""" % (self.image.url, thumbUrl, self.title))
+ return mark_safe("""<a href="%s" class="thickbox" title="%s"><img src="%s" alt="Thumbnail %s" /></a>""" % (self.image.url, self.title, thumbUrl, self.title))
thumb.allow_tags = True
def get_thumbnail_path(self, image_name=None):
|
Added photo title to thickbox overlay.
|
py
|
diff --git a/dirutility/_version.py b/dirutility/_version.py
index <HASH>..<HASH> 100644
--- a/dirutility/_version.py
+++ b/dirutility/_version.py
@@ -1 +1 @@
-__version__ = '0.3.6'
+__version__ = '0.3.7'
|
Updated to version <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,8 +8,9 @@ if os.path.exists("README.rst"):
setup(name="xcat",
- version="0.7",
+ version="0.7.1",
author="Tom Forbes",
+ license="MIT",
author_email="tom@tomforb.es",
package_dir = {'xcat': 'xcat'},
packages = ["xcat"] + ["xcat." + p for p in find_packages("xcat")],
@@ -26,5 +27,6 @@ setup(name="xcat",
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.2'
+ 'Programming Language :: Python :: 3.2',
+ 'License :: OSI Approved :: MIT License',
])
\ No newline at end of file
|
Added license to setup.py
|
py
|
diff --git a/tests/main/test_http.py b/tests/main/test_http.py
index <HASH>..<HASH> 100644
--- a/tests/main/test_http.py
+++ b/tests/main/test_http.py
@@ -320,7 +320,7 @@ class HttpEntryTestCase(HttpTestCase):
Entry.single_register(Entry.POST, url, status=201)
file_obj = open("tests/fluidicon.png", "rb")
files = {"content": file_obj}
- r = requests.post(url, files=files, data={}, verify=False)
+ r = requests.post(url, files=files, data={})
self.assertEqual(r.status_code, 201)
@mocketize
|
Small refactor. (#<I>)
|
py
|
diff --git a/GPy/util/datasets.py b/GPy/util/datasets.py
index <HASH>..<HASH> 100644
--- a/GPy/util/datasets.py
+++ b/GPy/util/datasets.py
@@ -140,7 +140,7 @@ def download_url(url, store_directory, save_name=None, messages=True, suffix='')
meta = response.info()
content_length_str = meta.get("Content-Length")
if content_length_str:
- file_size = int(content_length_str[0])
+ file_size = int(content_length_str)
else:
file_size = None
status = ""
|
fix: Bug in dataset (in fn download_url) which wrongly interprets the Content-Length meta data, and just takes first character.
|
py
|
diff --git a/source/rafcon/utils/gui_functions.py b/source/rafcon/utils/gui_functions.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/utils/gui_functions.py
+++ b/source/rafcon/utils/gui_functions.py
@@ -46,7 +46,7 @@ def call_gui_callback(callback, *args, **kwargs):
global exception_info, result
result = None
try:
- result = callback(*args)
+ result = callback(*args, **kwargs)
except:
# Exception within this asynchronously called function won't reach pytest. This is why we have to store
# the information about the exception to re-raise it at the end of the synchronous call.
@@ -57,7 +57,7 @@ def call_gui_callback(callback, *args, **kwargs):
condition.release()
if "priority" in kwargs:
- priority = kwargs["priority"]
+ priority = kwargs.pop("priority")
else:
priority = GLib.PRIORITY_LOW
|
feat(gui_functions): pass kwargs to callback Why hasn't this been done before? Why was that not needed before??
|
py
|
diff --git a/pub/pub.py b/pub/pub.py
index <HASH>..<HASH> 100644
--- a/pub/pub.py
+++ b/pub/pub.py
@@ -54,6 +54,16 @@ def get_tasks(do_tasks, dep_graph):
"""Given a list of tasks to perform and a dependency graph, return the tasks
that must be performed, in the correct order"""
task_order = DiGraph()
+ dep_graph = dep_graph.copy()
+
+ #first add dependencies between the tasks to perform,
+ #to make sure that we run the tasks in the order given
+ #on the cmd line. "pub install clean" needs a dep
+ #clean -> install. This dep is ephemeral, so we operate
+ #on a copy of the dep graph
+ for i, task in enumerate(do_tasks):
+ if i+1 < len(do_tasks):
+ dep_graph.add_edge(do_tasks[i+1], task)
for task in do_tasks:
task_order.add_node(task)
|
add dependencies between tasks when multiple tasks are given on the command line
|
py
|
diff --git a/tests/test_function_name.py b/tests/test_function_name.py
index <HASH>..<HASH> 100644
--- a/tests/test_function_name.py
+++ b/tests/test_function_name.py
@@ -8,7 +8,7 @@ def test_stacktrace():
view = View.from_json(min_map)
stacktrace = [
- (0, 63, 'e', 'onFailure'),
+ (0, 63, u'e', 'onFailure'),
(0, 135, 'r', 'invoke'),
(0, 182, 'i', 'test'),
(0, 244, 'nonexisting', None),
|
Change one test to pass unicode function names
|
py
|
diff --git a/dataproperty/_factory.py b/dataproperty/_factory.py
index <HASH>..<HASH> 100644
--- a/dataproperty/_factory.py
+++ b/dataproperty/_factory.py
@@ -13,7 +13,6 @@ from .converter import NopConverterCreator
from .converter import IntegerConverterCreator
from .converter import FloatConverterCreator
from .converter import DateTimeConverterCreator
-from .converter import InfinityConverterCreator
from ._type_checker_creator import NoneTypeCheckerCreator
from ._type_checker_creator import IntegerTypeCheckerCreator
from ._type_checker_creator import FloatTypeCheckerCreator
@@ -88,4 +87,4 @@ class InfinityTypeFactory(TypeConverterFactoryInterface):
@property
def value_converter_factory(self):
- return InfinityConverterCreator()
+ return FloatConverterCreator()
|
Replace InfinityConverterCreator to FloatConverterCreator Since these classes are the same behavior.
|
py
|
diff --git a/pyOCD/gdbserver/gdbserver.py b/pyOCD/gdbserver/gdbserver.py
index <HASH>..<HASH> 100644
--- a/pyOCD/gdbserver/gdbserver.py
+++ b/pyOCD/gdbserver/gdbserver.py
@@ -105,7 +105,7 @@ class GDBServer(threading.Thread):
def run(self):
while True:
new_command = False
- data = []
+ data = ""
logging.info('GDB server started')
self.shutdown_event.clear()
@@ -141,7 +141,7 @@ class GDBServer(threading.Thread):
if self.shutdown_event.isSet() or self.detach_event.isSet():
break
self.abstract_socket.setBlocking(0)
- data = self.abstract_socket.read()
+ data += self.abstract_socket.read()
if data.index("$") >= 0 and data.index("#") >= 0:
break
except (ValueError, socket.error):
|
Fix socket race condition which can cause bytes to be dropped Rather than discarding data if it is not a complete packet, keep appending until a complete packet is formed.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,9 +17,9 @@ DESC = " ".join(__import__('django_schemata').__doc__.splitlines()).strip()
setup(
name = "django-schemata",
version = get_git_version(),
- url = '',
+ url = 'https://github.com/tuttle/django-schemata',
author = 'Vlada Macek',
- author_email = '',
+ author_email = 'macek@sandbox.cz',
description = DESC,
long_description = read_file('README'),
packages = find_packages(),
|
Thank you, Erik. Now added some contacts.
|
py
|
diff --git a/src/python/test/test_dxclient.py b/src/python/test/test_dxclient.py
index <HASH>..<HASH> 100755
--- a/src/python/test/test_dxclient.py
+++ b/src/python/test/test_dxclient.py
@@ -447,7 +447,7 @@ class TestDXBuildReportHtml(unittest.TestCase):
desc = json.loads(run(u"dx describe {record} --details --json".format(record=report["recordId"])))
self.assertEquals(desc["types"], [u"Report", u"HTMLReport"])
self.assertEquals(desc["name"], u"html_report")
- self.assertEquals(desc["details"]["files"][0]["$dnanexus_link"]["id"], fileId)
+ self.assertEquals(desc["details"]["files"][0]["$dnanexus_link"], fileId)
self.assertEquals(desc["details"]["width"], "47")
self.assertEquals(desc["details"]["height"], "63")
desc = json.loads(run(u"dx describe {file} --details --json".format(file=fileId)))
|
Fixing test broken by changing the on reports
|
py
|
diff --git a/plop/collector.py b/plop/collector.py
index <HASH>..<HASH> 100644
--- a/plop/collector.py
+++ b/plop/collector.py
@@ -22,6 +22,7 @@ class Collector(object):
assert mode in Collector.MODES
timer, sig = Collector.MODES[self.mode]
signal.signal(sig, self.handler)
+ signal.siginterrupt(sig, False)
self.reset()
def reset(self):
|
collector: don't interrupt syscalls
|
py
|
diff --git a/pycbc/inference/sampler/dynesty.py b/pycbc/inference/sampler/dynesty.py
index <HASH>..<HASH> 100644
--- a/pycbc/inference/sampler/dynesty.py
+++ b/pycbc/inference/sampler/dynesty.py
@@ -164,7 +164,8 @@ class DynestySampler(BaseSampler):
'_UPDATE', '_PROPOSE',
'evolve_point', 'use_pool', 'queue_size',
'use_pool_ptform', 'use_pool_logl',
- 'use_pool_evolve']
+ 'use_pool_evolve', 'use_pool_update',
+ 'pool', 'M']
def run(self):
diff_niter = 1
|
fix pool properties upon resume for dynesty <I> (#<I>)
|
py
|
diff --git a/tests/test_pdf.py b/tests/test_pdf.py
index <HASH>..<HASH> 100644
--- a/tests/test_pdf.py
+++ b/tests/test_pdf.py
@@ -42,5 +42,6 @@ class PDFTests(unittest.TestCase):
trusted_cert_pems = (fh.read(),)
with open(fname, 'rb') as fh:
data = fh.read()
- (hashok, signatureok, certok) = pdf.verify(data, trusted_cert_pems)
- assert signatureok and hashok and certok
+ results = pdf.verify(data, trusted_cert_pems)
+ for (hashok, signatureok, certok) in results:
+ assert signatureok and hashok and certok
|
#<I> - verification of signatures of a multiple-signed file
|
py
|
diff --git a/python/ray/ml/config.py b/python/ray/ml/config.py
index <HASH>..<HASH> 100644
--- a/python/ray/ml/config.py
+++ b/python/ray/ml/config.py
@@ -169,4 +169,4 @@ class RunConfig:
stop: Optional[Union[Mapping, "Stopper", Callable[[str, Mapping], bool]]] = None
failure: Optional[FailureConfig] = None
sync_config: Optional[SyncConfig] = None
- verbose: Union[int, Verbosity] = Verbosity.V2_TRIAL_NORM
+ verbose: Union[int, Verbosity] = Verbosity.V3_TRIAL_DETAILS
|
[air] Update to use more verbose default config for trainers. (#<I>) Internal user feedback showing that more detailed logging is preferred: <URL>
|
py
|
diff --git a/djs_playground/settings.py b/djs_playground/settings.py
index <HASH>..<HASH> 100644
--- a/djs_playground/settings.py
+++ b/djs_playground/settings.py
@@ -118,7 +118,6 @@ USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'djs_playground/static/')
-# STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'djs_playground/media/')
|
Remove unnecessary comment (#<I>)
|
py
|
diff --git a/src/arcrest/manageorg/_content.py b/src/arcrest/manageorg/_content.py
index <HASH>..<HASH> 100644
--- a/src/arcrest/manageorg/_content.py
+++ b/src/arcrest/manageorg/_content.py
@@ -6,7 +6,7 @@ import urlparse
import json
import os
import mmap
-from urlparse import urlparse
+
from os.path import splitext, basename
########################################################################
@@ -652,7 +652,7 @@ class Item(BaseAGOLClass):
if self._thumbnail is not None:
imgUrl = self._baseUrl + "/" + self._itemId + "/info/" + self._thumbnail
- disassembled = urlparse(imgUrl)
+ disassembled = urlparse.urlparse(imgUrl)
onlineFileName, file_ext = splitext(basename(disassembled.path))
fileNameSafe = "".join(x for x in fileName if x.isalnum()) + file_ext
result = self._download_file(self._baseUrl + "/" + self._itemId + "/info/" + self._thumbnail,
|
fix issues with urlparse
|
py
|
diff --git a/ipython_widgets/widgets/widget.py b/ipython_widgets/widgets/widget.py
index <HASH>..<HASH> 100644
--- a/ipython_widgets/widgets/widget.py
+++ b/ipython_widgets/widgets/widget.py
@@ -132,7 +132,7 @@ class Widget(LoggingConfigurable):
@staticmethod
def handle_comm_opened(comm, msg):
"""Static method, called when a widget is constructed."""
- widget_class = import_item(msg['content']['data']['widget_class'])
+ widget_class = import_item(str(msg['content']['data']['widget_class']))
widget = widget_class(comm=comm)
|
Make sure class name is a string.
|
py
|
diff --git a/src/OpenAccess_EPUB/opf/opf.py b/src/OpenAccess_EPUB/opf/opf.py
index <HASH>..<HASH> 100644
--- a/src/OpenAccess_EPUB/opf/opf.py
+++ b/src/OpenAccess_EPUB/opf/opf.py
@@ -105,7 +105,7 @@ class MetaOPF(object):
mimetypes = {'jpg': 'image/jpeg', 'jpeg': 'image/jpeg', 'xml':
'application/xhtml+xml', 'png': 'image/png', 'css':
'text/css', 'ncx': 'application/x-dtbncx+xml', 'gif':
- 'image/gif', 'tif': 'image/tif'}
+ 'image/gif', 'tif': 'image/tif', 'pdf': 'application/pdf'}
current_dir = os.getcwd()
os.chdir(self.location)
for path, _subname, filenames in os.walk('OPS'):
|
added pdf as a mimetype
|
py
|
diff --git a/taxtastic/taxonomy.py b/taxtastic/taxonomy.py
index <HASH>..<HASH> 100644
--- a/taxtastic/taxonomy.py
+++ b/taxtastic/taxonomy.py
@@ -92,7 +92,6 @@ class Taxonomy(object):
self.ranks.insert(self.ranks.index(parent_rank) + 1, rank)
self.rankset = set(self.ranks)
-
def _node(self, tax_id):
"""
Returns parent_id, rank
@@ -235,10 +234,10 @@ class Taxonomy(object):
def is_below(self, lower, upper):
return lower in self.ranks_below(upper)
- def ranks_below(self, rank):
+ def ranks_below(self, rank, depth=None):
below = []
try:
- below = self.ranks[self.ranks.index(rank):]
+ below = self.ranks[self.ranks.index(rank):depth]
except ValueError as err:
log.error(err)
return below
|
added a depth option to a subset of below ranks
|
py
|
diff --git a/sharepoint/__init__.py b/sharepoint/__init__.py
index <HASH>..<HASH> 100644
--- a/sharepoint/__init__.py
+++ b/sharepoint/__init__.py
@@ -1,4 +1,4 @@
from .auth import basic_auth_opener
from .site import SharePointSite
-__version__ = '0.2.1'
+__version__ = '0.3'
|
Bump to version <I>.
|
py
|
diff --git a/photutils/aperture/attributes.py b/photutils/aperture/attributes.py
index <HASH>..<HASH> 100644
--- a/photutils/aperture/attributes.py
+++ b/photutils/aperture/attributes.py
@@ -59,7 +59,7 @@ class PixelPositions(ApertureAttribute):
if isinstance(value, zip):
value = tuple(value)
- value = np.atleast_2d(value) # np.ndarray
+ value = np.atleast_2d(value).astype(float) # np.ndarray
self._validate(value)
if isinstance(value, u.Quantity):
@@ -78,6 +78,10 @@ class PixelPositions(ApertureAttribute):
raise TypeError(f'{self.name} must be an (x, y) pixel position '
'or a list or array of (x, y) pixel positions.')
+ if np.any(~np.isfinite(value)):
+ raise ValueError(f'{self.name} must not contain any non-finite '
+ '(e.g. NaN or inf) positions')
+
class SkyCoordPositions(ApertureAttribute):
"""
|
Add check for non-finite positions
|
py
|
diff --git a/multiqc/modules/ivar/ivar.py b/multiqc/modules/ivar/ivar.py
index <HASH>..<HASH> 100644
--- a/multiqc/modules/ivar/ivar.py
+++ b/multiqc/modules/ivar/ivar.py
@@ -101,8 +101,8 @@ class MultiqcModule(BaseMultiqcModule):
for l in f['f']:
match = re.search(regex, l)
if match:
- primer = matches.group(1)
- counts = int(matches.group(2))
+ primer = match.group(1)
+ counts = int(match.group(2))
primers[primer] = counts
return primers
|
Still not working :-( )
|
py
|
diff --git a/src/rabird/core/cstring.py b/src/rabird/core/cstring.py
index <HASH>..<HASH> 100644
--- a/src/rabird/core/cstring.py
+++ b/src/rabird/core/cstring.py
@@ -6,9 +6,14 @@ Provided C-style string process methods.
@author Hong-She Liang <starofrainnight@gmail.com>
'''
+import six
+
def escape(text):
- return text.encode('unicode-escape').replace('"', '\\"').replace("'", "\\'")
+ if six.PY2:
+ return text.encode('unicode-escape').replace('"', '\\"').replace("'", "\\'")
+ else:
+ return text.encode('unicode-escape').decode().replace('"', '\\"').replace("'", "\\'")
def unescape(text):
- return text.decode('unicode-escape')
+ return six.b(text).decode('unicode-escape')
|
Fixed compatible with python 2
|
py
|
diff --git a/docker/version.py b/docker/version.py
index <HASH>..<HASH> 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1 +1 @@
-version = "0.7.3-dev"
+version = "1.0.0"
|
Bumped version to <I>
|
py
|
diff --git a/openquake/engine/engine.py b/openquake/engine/engine.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/engine.py
+++ b/openquake/engine/engine.py
@@ -353,8 +353,7 @@ def run_job(cfg_file, log_level, log_file, exports='', hazard_output_id=None,
edir = job.get_param('export_dir')
log_file = os.path.join(edir, 'calc_%d.log' % job.id)
logging.root.addHandler(logs.LogStreamHandler(job))
- if log_file:
- touch_log_file(log_file) # check if writeable
+ touch_log_file(log_file) # check if writeable
# instantiate the calculator and run the calculation
t0 = time.time()
|
touch_log_file must be done always
|
py
|
diff --git a/pydle/async.py b/pydle/async.py
index <HASH>..<HASH> 100644
--- a/pydle/async.py
+++ b/pydle/async.py
@@ -192,9 +192,9 @@ class EventLoop:
def _do_schedule_periodically(self, interval, callback, args, kwargs):
# Use a wrapper function
- return self.io_loop.add_timeout(interval, functools.partial(self._periodical_handler, interval, callback, args, kwargs))
+ return self.io_loop.add_timeout(interval, functools.partial(self._periodic_handler, interval, callback, args, kwargs))
- def _periodical_handler(self, interval, callback, args, kwargs):
+ def _periodic_handler(self, interval, callback, args, kwargs):
# Call callback, and schedule again if it doesn't return False.
handle = self._do_schedule_in(interval, callback, args, kwargs)
if callback(*args, **kwargs) == False:
|
Replace _periodical_handler with something that reminds less of menstrual cycles.
|
py
|
diff --git a/phylotoast/biom_calc.py b/phylotoast/biom_calc.py
index <HASH>..<HASH> 100644
--- a/phylotoast/biom_calc.py
+++ b/phylotoast/biom_calc.py
@@ -141,3 +141,14 @@ def transform_raw_abundance(biom, fn=math.log10,
"""
totals = raw_abundance(biom, sampleIDs, sample_abd)
return {sid: fn(abd) for sid, abd in totals.items()}
+
+
+def arcsine_sqrt_transform(rel_abd):
+ """
+ Takes the proportion data from relative_abundance() and applies the variance
+ stabilizing arcsine square root transformation:
+
+ X = sin^{-1} \sqrt p
+ """
+ arcsint = lambda p: math.asin(math.sqrt(p))
+ return {col_id: {row_id: arcsint(rel_abd[col_id][row_id]) for row_id in rel_abd[col_id]} for col_id in rel_abd}
|
Adds method to apply the Arcsine Square Root transform to relative abundance data
|
py
|
diff --git a/bddrest/specification/call.py b/bddrest/specification/call.py
index <HASH>..<HASH> 100644
--- a/bddrest/specification/call.py
+++ b/bddrest/specification/call.py
@@ -88,6 +88,9 @@ class Call(metaclass=ABCMeta):
query = None
parsedurl = urlparse(url)
+ if url is None:
+ return None, None, None
+
# Parsing the querystrings if available
if parsedurl.query:
query = normalize_query_string(parsedurl.query)
|
Fix a bug when url is None
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -72,6 +72,7 @@ setup(name="grimoire-elk",
'perceval-opnfv>=0.1.2',
'perceval-puppet>=0.1.4',
'perceval-finos>=0.1.0',
+ 'perceval-weblate>=0.1.0',
'cereslib>=0.1.0',
'grimoirelab-toolkit>=0.1.4',
'sortinghat>=0.6.2',
|
Update setup.py This commit updates the setup.py with the latest dependencies required for perceval. It is necessary as it is responsible for failures in the builds of other repos.
|
py
|
diff --git a/perfdump/plugin.py b/perfdump/plugin.py
index <HASH>..<HASH> 100644
--- a/perfdump/plugin.py
+++ b/perfdump/plugin.py
@@ -121,6 +121,12 @@ class PerfDumpPlugin(Plugin):
row['file']))
stream.writeln()
row = cur.fetchone()
+
+ cur.execute('SELECT SUM(elapsed) FROM times')
+ row = cur.fetchone()
+ stream.writeln('-'*10)
+ stream.writeln()
+ stream.writeln('Total time: {}s'.format(row['SUM(elapsed)']))
stream.writeln()
|
Add Aggregate Time - Add total time spent in tests to report output
|
py
|
diff --git a/timeside/server/admin.py b/timeside/server/admin.py
index <HASH>..<HASH> 100644
--- a/timeside/server/admin.py
+++ b/timeside/server/admin.py
@@ -47,4 +47,4 @@ admin.site.register(Task, TaskAdmin)
admin.site.register(Analysis)
admin.site.register(AnalysisTrack)
admin.site.register(Annotation)
-admin.site.regsiter(AnnotationTrack)
+admin.site.register(AnnotationTrack)
|
Server: Add more models in Admin - fix
|
py
|
diff --git a/coreir/lib.py b/coreir/lib.py
index <HASH>..<HASH> 100644
--- a/coreir/lib.py
+++ b/coreir/lib.py
@@ -1,6 +1,7 @@
from ctypes import cdll
import platform
import os
+import subprocess
def is_binary(path):
@@ -17,8 +18,10 @@ def is_binary(path):
# see if a coreir binary exists in the user's path
COREIR_BINARY_PATH = None
-with os.popen("which -a coreir") as process:
- for line in process.read().splitlines():
+with subprocess.Popen(["which", "-a", "coreir"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL) as process:
+ for line in process.stdout.read().splitlines():
if is_binary(line):
COREIR_BINARY_PATH = line
break
|
Update "which" logic to avoid dumping to stderr Before, stderr would dump by default to stderr which would cause a "false" error to be reported to the user's terminal. This also moves from os.popen to subprocess.Popen to keep up with the latest Python convention
|
py
|
diff --git a/src/ocrmypdf/exec/__init__.py b/src/ocrmypdf/exec/__init__.py
index <HASH>..<HASH> 100644
--- a/src/ocrmypdf/exec/__init__.py
+++ b/src/ocrmypdf/exec/__init__.py
@@ -278,7 +278,6 @@ def check_external_program(
need_version,
required_for=None,
recommended=False,
- **kwargs, # To consume log parameter
):
try:
found_version = version_checker()
|
Remove **kwargs from check_external_program; deprecated
|
py
|
diff --git a/sos/report/plugins/__init__.py b/sos/report/plugins/__init__.py
index <HASH>..<HASH> 100644
--- a/sos/report/plugins/__init__.py
+++ b/sos/report/plugins/__init__.py
@@ -335,7 +335,7 @@ class SoSPredicate(object):
]
return " ".join(msg).lstrip()
- def __nonzero__(self):
+ def __bool__(self):
"""Predicate evaluation hook.
"""
@@ -349,11 +349,6 @@ class SoSPredicate(object):
self._eval_arch())
and not self.dry_run)
- def __bool__(self):
- # Py3 evaluation ends in a __bool__() call where py2 ends in a call
- # to __nonzero__(). Wrap the latter here, to support both versions
- return self.__nonzero__()
-
def __init__(self, owner, dry_run=False, kmods=[], services=[],
packages=[], cmd_outputs=[], arch=[], required={}):
"""Initialise a new SoSPredicate object
|
[plugins] remove py2/3 relict in __nonzero__ / __bool__ Resolves: #<I>
|
py
|
diff --git a/reana_commons/tasks.py b/reana_commons/tasks.py
index <HASH>..<HASH> 100644
--- a/reana_commons/tasks.py
+++ b/reana_commons/tasks.py
@@ -8,6 +8,7 @@
"""REANA common tasks."""
import importlib
+import json
import logging
from kubernetes.client.rest import ApiException
@@ -37,13 +38,17 @@ def reana_ready():
def check_predefined_conditions():
"""Check k8s predefined conditions for the nodes."""
try:
- node_info = current_k8s_corev1_api_client.list_node()
- for node in node_info.items:
+ node_info = json.loads(
+ current_k8s_corev1_api_client.list_node(
+ _preload_content=False
+ ).data.decode()
+ )
+ for node in node_info["items"]:
# check based on the predefined conditions about the
# node status: MemoryPressure, OutOfDisk, KubeletReady
# DiskPressure, PIDPressure,
- for condition in node.status.conditions:
- if not condition.status:
+ for condition in node.get("status", {}).get("conditions", {}):
+ if not condition.get("status"):
return False
except ApiException as e:
log.error("Something went wrong while getting node information.")
|
tasks: skip client side node_list validation * Gets raw data from Kubernetes and avoids OpenAPI deserialization which fails due to <URL> exists, the Python Kubernetes library is still not compatible with it. More information at <URL>
|
py
|
diff --git a/telebot/types.py b/telebot/types.py
index <HASH>..<HASH> 100644
--- a/telebot/types.py
+++ b/telebot/types.py
@@ -296,12 +296,13 @@ class Message(JsonDeserializable):
if 'audio' in obj:
opts['audio'] = Audio.de_json(obj['audio'])
content_type = 'audio'
- if 'animation' in obj:
- opts['animation'] = Animation.de_json(obj['animation'])
- content_type = 'animation'
if 'document' in obj:
opts['document'] = Document.de_json(obj['document'])
content_type = 'document'
+ if 'animation' in obj:
+ # Document content type accompanies "animation", so "animation" should be checked below "document" to override it
+ opts['animation'] = Animation.de_json(obj['animation'])
+ content_type = 'animation'
if 'game' in obj:
opts['game'] = Game.de_json(obj['game'])
content_type = 'game'
|
Animation content_type "When you send gif telegram gives you animation and document at same time in update and when you parse that first if is animation and second is document because of this the content_type set document not animation"
|
py
|
diff --git a/helper-scripts/wsgi-loader.py b/helper-scripts/wsgi-loader.py
index <HASH>..<HASH> 100644
--- a/helper-scripts/wsgi-loader.py
+++ b/helper-scripts/wsgi-loader.py
@@ -22,7 +22,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
-import sys, os, re, imp, traceback, socket, select, struct
+import sys, os, re, imp, traceback, socket, select, struct, logging
from socket import _fileobject
options = {}
@@ -99,8 +99,7 @@ class RequestHandler:
except KeyboardInterrupt:
done = True
except Exception, e:
- traceback.print_tb(sys.exc_info()[2])
- sys.stderr.write(str(e.__class__) + ": " + str(e) + "\n")
+ logging.exception("WSGI application raised an exception!")
finally:
try:
client.close()
@@ -209,6 +208,9 @@ class RequestHandler:
if __name__ == "__main__":
+ logging.basicConfig(
+ level = logging.WARNING,
+ format = "[ pid=" + str(os.getpid()) + ", time=%(asctime)s ]: %(message)s")
handshake_and_read_startup_request()
app_module = load_app()
socket_filename, server_socket = create_server_socket()
|
Improve exception handling in the WSGI loader.
|
py
|
diff --git a/media_tree/admin/change_list.py b/media_tree/admin/change_list.py
index <HASH>..<HASH> 100644
--- a/media_tree/admin/change_list.py
+++ b/media_tree/admin/change_list.py
@@ -1,3 +1,4 @@
+import django
from media_tree.models import FileNode
from media_tree.admin.utils import get_current_request, is_search_request, \
get_request_attr
|
import django - got an error that django was undefined.
|
py
|
diff --git a/tests/SpiffWorkflow/bpmn2/test_workflows.py b/tests/SpiffWorkflow/bpmn2/test_workflows.py
index <HASH>..<HASH> 100644
--- a/tests/SpiffWorkflow/bpmn2/test_workflows.py
+++ b/tests/SpiffWorkflow/bpmn2/test_workflows.py
@@ -27,6 +27,26 @@ class MessagesTest(WorkflowTest):
self.assertEquals('Test Message', self.workflow.get_tasks(Task.READY)[0].task_spec.description)
+ def testRunThroughSaveAndRestore(self):
+
+ self.workflow = BpmnWorkflow(self.spec)
+ self.do_next_exclusive_step('Select Test', choice='Messages')
+ self.workflow.do_engine_steps()
+
+ self.save_restore()
+
+ self.assertEquals([], self.workflow.get_tasks(Task.READY))
+ self.assertEquals(1, len(self.workflow.get_tasks(Task.WAITING)))
+ self.workflow.accept_message('Wrong Message')
+ self.assertEquals([], self.workflow.get_tasks(Task.READY))
+ self.workflow.accept_message('Test Message')
+
+ self.save_restore()
+
+ self.assertEquals(1, len(self.workflow.get_tasks(Task.READY)))
+
+ self.assertEquals('Test Message', self.workflow.get_tasks(Task.READY)[0].task_spec.description)
+
|
Add save and restore test for WAITING for message. This fails currently.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@ setup(license="Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)",
"bioutils>=0.4.0,<1.0",
"configparser>=3.3.0",
"enum34",
- "ipython<6", # for hgvs-shell; >=6 for Py3 only
+ "ipython",
"parsley",
"psycopg2-binary",
"six",
|
Update setup.py (#<I>) Remove python2 compatibility version enforcement on ipython. Python2 support is being dropped in 1 month and ipython should not be locked at <<I>
|
py
|
diff --git a/pabot/pabot.py b/pabot/pabot.py
index <HASH>..<HASH> 100755
--- a/pabot/pabot.py
+++ b/pabot/pabot.py
@@ -494,7 +494,7 @@ def _print_elapsed(start, end):
if elapsed_hours > 0:
elapsed_string += '%d hours ' % elapsed_hours
elapsed_string += '%d minutes %d.%d seconds' % (minutes, seconds, millis)
- print('Elapsed time: ' + elapsed_string)
+ _write('Elapsed time: ' + elapsed_string)
def keyboard_interrupt(*args):
|
Try fix by using _write instead
|
py
|
diff --git a/tests/test_pip.py b/tests/test_pip.py
index <HASH>..<HASH> 100644
--- a/tests/test_pip.py
+++ b/tests/test_pip.py
@@ -1,8 +1,16 @@
from climber import Climber
+import json
-c = Climber({"images" : 1})
+c = Climber({"images" : True})
results = c.climb("wolf")
+results = json.loads(results)
print "Results for search of wolf."
-print results
+for bolt in results['data']:
+ print "-----------------------\n"
+ print "Contexts: ",
+ print bolt['contexts']
+ print "Text: ",
+ print bolt['text']
+ print "-----------------------\n"
|
text case built out now to fix the readme
|
py
|
diff --git a/parler/admin.py b/parler/admin.py
index <HASH>..<HASH> 100644
--- a/parler/admin.py
+++ b/parler/admin.py
@@ -42,7 +42,7 @@ from django.conf import settings
from django.conf.urls import url
from django.contrib import admin
from django.contrib.admin.options import csrf_protect_m, BaseModelAdmin, InlineModelAdmin
-from django.contrib.admin.utils import get_deleted_objects, unquote
+from django.contrib.admin.utils import get_deleted_objects, quote, unquote
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.core.exceptions import PermissionDenied, ImproperlyConfigured
from django.db import router, transaction
@@ -254,7 +254,7 @@ class TranslatableAdmin(BaseTranslatableAdmin, admin.ModelAdmin):
classes.append('current')
info = _get_model_meta(opts)
- admin_url = reverse('admin:{0}_{1}_change'.format(*info), args=(object.pk,), current_app=self.admin_site.name)
+ admin_url = reverse('admin:{0}_{1}_change'.format(*info), args=(quote(object.pk),), current_app=self.admin_site.name)
buttons.append('<a class="{classes}" href="{href}?language={language_code}">{title}</a>'.format(
language_code=code,
classes=' '.join(classes),
|
quote object id when constructing the language column
|
py
|
diff --git a/patroni/zookeeper.py b/patroni/zookeeper.py
index <HASH>..<HASH> 100644
--- a/patroni/zookeeper.py
+++ b/patroni/zookeeper.py
@@ -228,13 +228,16 @@ class ZooKeeper(AbstractDCS):
if isinstance(self.cluster, Cluster) and self.cluster.leader.name == self._name:
self.client.delete(self.leader_path, version=self.cluster.leader.index)
- def cancel_initialization(self):
+ def _cancel_initialization(self):
node = self.get_node(self.initialize_path)
if node and node[0] == self._name:
- try:
- self.client.retry(self.client.delete, self.initialize_path, version=node[1].mzxid)
- except KazooException:
- logger.exception("Unable to delete initialize key")
+ self.client.delete(self.initialize_path, version=node[1].mzxid)
+
+ def cancel_initialization(self):
+ try:
+ self.client.retry(self._cancel_initialization)
+ except:
+ logger.exception("Unable to delete initialize key")
def watch(self, timeout):
self.cluster_event.wait(timeout)
|
Run cancel_initialization with retry
|
py
|
diff --git a/src/pydocstyle/checker.py b/src/pydocstyle/checker.py
index <HASH>..<HASH> 100644
--- a/src/pydocstyle/checker.py
+++ b/src/pydocstyle/checker.py
@@ -60,7 +60,7 @@ class ConventionChecker(object):
'Attributes',
'Methods']
- def check_source(self, source, filename, ignore_decorators):
+ def check_source(self, source, filename, ignore_decorators=None):
module = parse(StringIO(source), filename)
for definition in module:
for this_check in self.checks:
|
Default value for breaking API change. Some tools like prospector tried to import a v1 pydocstyle or a v2 pydocstyle and uses them the same way, BUT there has been a breaking change introduced here and so `ignore_decorators` must be provided. This breaks at least prospectors behavior. May we set just a default=None value to have the same signature for the v1 and v2 function here.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ def extras_require():
def main():
setup(
name='straitlets',
- version='0.2.3',
+ version='0.2.4',
description="Serializable IPython Traitlets",
author="Quantopian Team",
author_email="opensource@quantopian.com",
|
BUILD: PyPI commit.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@ setup(
packages=['location_field'],
package_data={'location_field': ['static/location_field/js/*.js',],},
classifiers=[
- "Development Status :: 3 - Alpha",
+ "Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
|
bump development status (setup.py)
|
py
|
diff --git a/devassistant/command_runners.py b/devassistant/command_runners.py
index <HASH>..<HASH> 100644
--- a/devassistant/command_runners.py
+++ b/devassistant/command_runners.py
@@ -729,11 +729,9 @@ class DockerCommandRunner(object):
logger.warning('docker-py not installed, cannot execute docker command.')
return [False, '']
- args = c.format_deep()
-
if c.comm_type == 'docker_b':
# TODO: allow providing another argument - a repository name/tag for the built image
- return cls._docker_build(args)
+ return cls._docker_build(c.input_res)
else:
raise exceptions.CommandException('Unknown command type {ct}.'.format(ct=c.comm_type))
|
Fix DockerCommandRunner with new Yaml syntax
|
py
|
diff --git a/test_tableone.py b/test_tableone.py
index <HASH>..<HASH> 100644
--- a/test_tableone.py
+++ b/test_tableone.py
@@ -78,6 +78,16 @@ class TestTableOne(object):
assert x != y
@with_setup(setup, teardown)
+ def test_examples_used_in_the_readme_run_without_raising_error(self):
+
+ convars = ['time','age','bili','chol','albumin','copper','alk.phos','ast','trig','platelet','protime']
+ catvars = ['status', 'ascites', 'hepato', 'spiders', 'edema','stage', 'sex']
+ strat = 'trt'
+ nonnormal = ['bili']
+ mytable = TableOne(self.data_pbc, convars, catvars, strat, nonnormal, pval=False)
+ mytable = TableOne(self.data_pbc, convars, catvars, strat, nonnormal, pval=True)
+
+ @with_setup(setup, teardown)
def test_overall_mean_and_std_as_expected_for_cont_variable(self):
continuous=['normal','nonnormal','height']
|
add tests for example used in readme
|
py
|
diff --git a/pyathena/async_cursor.py b/pyathena/async_cursor.py
index <HASH>..<HASH> 100644
--- a/pyathena/async_cursor.py
+++ b/pyathena/async_cursor.py
@@ -109,7 +109,7 @@ class AsyncCursor(BaseCursor):
)
return query_id, self._executor.submit(self._collect_result_set, query_id)
- def executemany(self, operation: str, seq_of_parameters: Dict[str, Any]):
+ def executemany(self, operation: str, seq_of_parameters: List[Dict[str, Any]]):
raise NotSupportedError
def cancel(self, query_id: str) -> Future:
|
Fix Argument 2 of "executemany" is incompatible with supertype "BaseCursor"
|
py
|
diff --git a/tests/integration_test.py b/tests/integration_test.py
index <HASH>..<HASH> 100644
--- a/tests/integration_test.py
+++ b/tests/integration_test.py
@@ -141,6 +141,17 @@ def test_option_unreleased(test_repo, runner, open_changelog):
@pytest.mark.parametrize(
"commands", [["git init -q", "touch file", "git add file", "git commit -m 'feat: Add file #1' -q"]]
)
+def test_option_skipping_unreleased(test_repo, runner, open_changelog):
+ result = runner.invoke(main)
+ assert result.exit_code == 0, result.stderr
+ assert result.output == ""
+ changelog = open_changelog().read()
+ assert "## Unreleased" not in changelog
+
+
+@pytest.mark.parametrize(
+ "commands", [["git init -q", "touch file", "git add file", "git commit -m 'feat: Add file #1' -q"]]
+)
def test_option_issue_url(test_repo, runner, open_changelog):
result = runner.invoke(main, ["--issue-url", "issues.custom.com/{id}", "--unreleased"])
assert result.exit_code == 0, result.stderr
|
test: Add skipping unreleased integration test #<I>
|
py
|
diff --git a/flink-python/setup.py b/flink-python/setup.py
index <HASH>..<HASH> 100644
--- a/flink-python/setup.py
+++ b/flink-python/setup.py
@@ -312,6 +312,7 @@ try:
'cloudpickle==1.2.2', 'avro-python3>=1.8.1,!=1.9.2,<1.10.0',
'pandas>=1.0,<1.2.0', 'pyarrow>=0.15.1,<3.0.0',
'pytz>=2018.3', 'numpy>=1.14.3,<1.20', 'fastavro>=0.21.4,<0.24',
+ 'requests>=2.26.0',
apache_flink_libraries_dependency],
cmdclass={'build_ext': build_ext},
tests_require=['pytest==4.4.1'],
|
[FLINK-<I>][python] Limits the version requests to <I> or above This closes #<I>.
|
py
|
diff --git a/test/unit/model/v2/test_driver_section.py b/test/unit/model/v2/test_driver_section.py
index <HASH>..<HASH> 100644
--- a/test/unit/model/v2/test_driver_section.py
+++ b/test/unit/model/v2/test_driver_section.py
@@ -134,13 +134,7 @@ def _model_driver_provider_name_not_nullable_when_vagrant_section_data():
['_model_driver_provider_name_not_nullable_when_vagrant_section_data'],
indirect=True)
def test_driver_provider_name_not_nullable_when_vagrant_driver(_config):
- x = {
- 'driver': [{
- 'provider': [{
- 'name': ['unallowed value None', 'null value not allowed']
- }]
- }]
- }
+ x = {'driver': [{'provider': [{'name': ['null value not allowed']}]}]}
assert x == schema_v2.validate(_config)
|
Corrected missed test from #<I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setup(
name="fluxions",
- version="0.0.2",
+ version="0.0.4",
author="Harvard CS207 Final Project Group 10",
description="A package for Automatic Differentiation",
long_description=long_description,
@@ -13,7 +13,7 @@ setup(
url="https://github.com/CS207-Final-Project-Group-10/cs207-FinalProject",
tests_require=["pytest"],
packages=['fluxions'],
- install_requires=['numpy==1.15.2'],
+ install_requires=[],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
|
Removed numpy from setup.py
|
py
|
diff --git a/pyroSAR/tests/test_ancillary.py b/pyroSAR/tests/test_ancillary.py
index <HASH>..<HASH> 100644
--- a/pyroSAR/tests/test_ancillary.py
+++ b/pyroSAR/tests/test_ancillary.py
@@ -40,7 +40,10 @@ def test_run(tmpdir, testdata):
def test_which():
- assert os.path.isfile(anc.which('gdalinfo'))
+ program = anc.which('gdalinfo')
+ assert os.path.isfile(program)
+ assert anc.which(program) == program
+ assert anc.which('foobar') is None
def test_multicore():
|
[test_ancillary] additional tests for function which
|
py
|
diff --git a/host/analysis/analyze_raw_data.py b/host/analysis/analyze_raw_data.py
index <HASH>..<HASH> 100644
--- a/host/analysis/analyze_raw_data.py
+++ b/host/analysis/analyze_raw_data.py
@@ -148,7 +148,7 @@ class AnalyzeRawData(object):
self.create_cluster_table = False
self.create_cluster_size_hist = False
self.create_cluster_tot_hist = False
- self._n_injection = 100
+ self.n_injections = 100
self.n_bcid = 16
self.max_tot_value = 13
|
MAINT: use property to set the number of injections
|
py
|
diff --git a/holoviews/core/ndmapping.py b/holoviews/core/ndmapping.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/ndmapping.py
+++ b/holoviews/core/ndmapping.py
@@ -432,10 +432,10 @@ class MultiDimensionalMapping(Dimensioned):
return default
- def pop(self, *args):
- if len(args) > 0 and not isinstance(args[0], tuple):
- args[0] = (args[0],)
- return self.data.pop(*args)
+ def pop(self, key, default=None):
+ "Standard pop semantics for all mapping types"
+ if not isinstance(key, tuple): key = (key,)
+ return self.data.pop(key, default)
def __getitem__(self, key):
|
Fixed pop method on MultiDimensionalMapping
|
py
|
diff --git a/discord/calls.py b/discord/calls.py
index <HASH>..<HASH> 100644
--- a/discord/calls.py
+++ b/discord/calls.py
@@ -117,7 +117,8 @@ class GroupCall:
if data['channel_id'] is None:
self._voice_states.pop(user_id, None)
else:
- self._voice_states[user_id] = VoiceState(**data, voice_channel=self.channel)
+ data['voice_channel'] = self.channel
+ self._voice_states[user_id] = VoiceState(**data)
@property
def connected(self):
|
Fix SyntaxError in constructing VoiceState for Python <I> Generalised unpacking is <I> only.
|
py
|
diff --git a/test-code.py b/test-code.py
index <HASH>..<HASH> 100644
--- a/test-code.py
+++ b/test-code.py
@@ -24,10 +24,11 @@ _dir: str
d = None
# the commit message:
c_message = os.getenv("TRAVIS_COMMIT_MESSAGE")
+if c_message is None:
+ raise EnvironmentError("No commit name detected!")
# if extra tests should be run:
extra: bool = False
-
# make sure this is being run directly and
# not from another python module
if not __name__ == "__main__":
|
added a check to see if no commit message exists
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -81,8 +81,7 @@ setup(
"Pillow>=3.2.0,<4.0.0", # Pillow 4.0.0 drops python 2.6 support
"python-magic",
"Flask",
- "ConfigArgParse>=0.13.0",
- "argparse" # So we have it for 2.6
+ "ConfigArgParse>=0.13.0"
],
test_suite="tests",
tests_require=[
|
Don't need to import argparse
|
py
|
diff --git a/sos/cleaner/parsers/username_parser.py b/sos/cleaner/parsers/username_parser.py
index <HASH>..<HASH> 100644
--- a/sos/cleaner/parsers/username_parser.py
+++ b/sos/cleaner/parsers/username_parser.py
@@ -28,6 +28,7 @@ class SoSUsernameParser(SoSCleanerParser):
prep_map_file = 'sos_commands/login/lastlog_-u_1000-60000'
regex_patterns = []
skip_list = [
+ 'core',
'nobody',
'nfsnobody',
'root',
|
[cleaner] Don't obfuscate default 'core' user The 'core' user is a common default user on containerized hosts, and obfuscation of it is not advantageous, much like the default 'ubuntu' user for that distribution.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ except ImportError:
setup(
name='dweepy',
- version='0.2.0',
+ version='0.3.0',
description='Dweepy is a Python client for dweet.io',
long_description=open('README.rst').read(),
author='Patrick Carey',
|
version bump after <I> release
|
py
|
diff --git a/scss/__init__.py b/scss/__init__.py
index <HASH>..<HASH> 100644
--- a/scss/__init__.py
+++ b/scss/__init__.py
@@ -619,7 +619,9 @@ class Scss(object):
if not block.argument:
raise SyntaxError("%s requires a function name (%s)" % (block.directive, rule.file_and_line))
- funct, lpar, argstr = block.argument.strip().partition('(')
+ funct, lpar, argstr = block.argument.partition('(')
+ funct = normalize_var(funct.strip())
+ argstr = argstr.strip()
defaults = {}
new_params = []
|
Function name stripped for mixins and functions
|
py
|
diff --git a/statik/config.py b/statik/config.py
index <HASH>..<HASH> 100644
--- a/statik/config.py
+++ b/statik/config.py
@@ -19,6 +19,7 @@ class StatikConfig(YamlLoadable):
super().__init__(*args, **kwargs)
self.project_name = self.vars.get('project-name', 'Untitled project')
self.base_path = self.vars.get('base-path', '/')
+ self.encoding = self.vars.get('encoding')
# relative to the output folder
self.assets_src_path = self.assets_dest_path = 'assets'
if 'assets' in self.vars and isinstance(self.vars['assets'], dict):
@@ -43,10 +44,11 @@ class StatikConfig(YamlLoadable):
def __repr__(self):
return ("<StatikConfig project_name=%s\n" +
" base_path=%s\n" +
+ " encoding=%s\n" +
" assets_src_path=%s\n" +
" assets_dest_path=%s\n" +
" context_static=%s\n" +
" context_dynamic=%s>") % (
- self.project_name, self.base_path, self.assets_src_path,
+ self.project_name, self.base_path, self.encoding, self.assets_src_path,
self.assets_dest_path, self.context_static, self.context_dynamic
)
|
added 'encoding' option to config.yml
|
py
|
diff --git a/gns3server/controller/compute.py b/gns3server/controller/compute.py
index <HASH>..<HASH> 100644
--- a/gns3server/controller/compute.py
+++ b/gns3server/controller/compute.py
@@ -448,11 +448,11 @@ class Compute:
log.error("Error received on compute WebSocket '{}': {}".format(ws_url, ws.exception()))
elif response.type == aiohttp.WSMsgType.CLOSED:
pass
- self._connected = False
break
except aiohttp.client_exceptions.ClientResponseError as e:
log.error("Client response error received on compute WebSocket '{}': {}".format(ws_url,e))
finally:
+ self._connected = False
log.info("Connection closed to compute WebSocket '{}'".format(ws_url))
# Try to reconnect after 1 second if server unavailable only if not during tests (otherwise we create a ressources usage bomb)
|
Allow controller to reconnect to compute if communication is lost. Ref #<I>
|
py
|
diff --git a/linguist/cache.py b/linguist/cache.py
index <HASH>..<HASH> 100644
--- a/linguist/cache.py
+++ b/linguist/cache.py
@@ -63,7 +63,8 @@ class CachedTranslation(object):
Returns lookup for get() and filter() methods.
"""
lookup = dict((k, getattr(self, k)) for k in self.fields)
- lookup.pop('field_value')
+ for field_name in ['field_value', 'updated_at']:
+ lookup.pop(field_name)
return lookup
@classmethod
|
updated_at should not be in lookup (not in the unique_together fields)
|
py
|
diff --git a/visidata/clipboard.py b/visidata/clipboard.py
index <HASH>..<HASH> 100644
--- a/visidata/clipboard.py
+++ b/visidata/clipboard.py
@@ -58,7 +58,8 @@ class _Clipboard:
p = subprocess.Popen(
self._command,
- stdin=open(temp.name, 'r', encoding=options.encoding))
+ stdin=open(temp.name, 'r', encoding=options.encoding),
+ stdout=subprocess.DEVNULL)
p.communicate()
def save(self, vs, filetype):
@@ -73,6 +74,7 @@ class _Clipboard:
p = subprocess.Popen(
self._command,
stdin=open(tempfn, 'r', encoding=options.encoding),
+ stdout=subprocess.DEVNULL,
close_fds=True)
p.communicate()
|
discard the clipboard command's stdout this prevents it being displayed in the UI
|
py
|
diff --git a/datapackage/datapackage.py b/datapackage/datapackage.py
index <HASH>..<HASH> 100644
--- a/datapackage/datapackage.py
+++ b/datapackage/datapackage.py
@@ -677,7 +677,7 @@ class DataPackage(Specification):
try:
resource_file = self.open_resource(resource_path)
except Exception as x:
- warnings.warn("Error opening resource {}={}: {}".format(location_type, resource_path, x))
+ warnings.warn("Error opening resource {0}={1}: {2}".format(location_type, resource_path, x))
continue # Try next location_type
else:
break
|
Add field names to format string to support older version Older versions of Python (Python <I>) require the fields in a format string to have names or ids. So instead of: 'Example: {}'.format(value) it has to be: 'Example: {0}.format(value) or: 'Example: {value}.format(value=value)
|
py
|
diff --git a/taskforce/watch_modules.py b/taskforce/watch_modules.py
index <HASH>..<HASH> 100644
--- a/taskforce/watch_modules.py
+++ b/taskforce/watch_modules.py
@@ -22,6 +22,7 @@ import modulefinder
from . import watch_files
from . import utils
from .utils import get_caller as my
+from .utils import ses
class watch(object):
"""
@@ -132,7 +133,7 @@ class watch(object):
else:
log.warning("%s Path '%s' had no matching watch entry", my(self), path)
names = list(changes)
- log.debug("%s Change was to %d name%s", my(self), len(names), '' if len(names) == 1 else 's')
+ log.debug("%s Change was to %d name%s", my(self), len(names), ses(len(names)))
names.sort()
resp = []
for name in names:
|
#6. register previous change -- Use list() instead of .keys() to snapshot dict key list against python3 issue. Also added ses() call
|
py
|
diff --git a/wtframework/wtf/web/tests/test_page_utils.py b/wtframework/wtf/web/tests/test_page_utils.py
index <HASH>..<HASH> 100644
--- a/wtframework/wtf/web/tests/test_page_utils.py
+++ b/wtframework/wtf/web/tests/test_page_utils.py
@@ -43,7 +43,7 @@ class TestPageUtils(unittest.TestCase):
def __load_google_later(self):
print "load google later thread started."
- time.sleep(10)
+ time.sleep(30)
self.driver.get("http://www.google.com")
print "load google later thread now loading google."
@@ -56,14 +56,14 @@ class TestPageUtils(unittest.TestCase):
t = threading.Thread(target=self.__load_google_later())
t.start()
- self.page_obj = page.PageUtils.wait_until_page_loaded(GoogleSearch, self.driver, 60)
+ self.page_obj = page.PageUtils.wait_until_page_loaded(GoogleSearch, self.driver, 120)
end_time = datetime.now()
# check we get a page object pack.
self.assertTrue(isinstance(self.page_obj, GoogleSearch))
# check that the instantiation happened later when the page was loaded.
- self.assertGreater(end_time - start_time, timedelta(seconds=10))
+ self.assertGreater(end_time - start_time, timedelta(seconds=30))
def test_wait_for_page_loads_times_out_on_bad_page(self):
|
adjust timing on the PageUtils test to account for lag time when running tests over a public grid.
|
py
|
diff --git a/pydux/compose.py b/pydux/compose.py
index <HASH>..<HASH> 100644
--- a/pydux/compose.py
+++ b/pydux/compose.py
@@ -1,3 +1,5 @@
+from functools import reduce
+
def compose(*funcs):
"""
chained function composition wrapper
|
Adapt to Guido's functional hate
|
py
|
diff --git a/wandb/data_types.py b/wandb/data_types.py
index <HASH>..<HASH> 100644
--- a/wandb/data_types.py
+++ b/wandb/data_types.py
@@ -1559,6 +1559,16 @@ class Image(BatchableMedia):
width, height = images[0]._image.size
format = jsons[0]["format"]
+ def size_equals_image(image):
+ img_width, img_height = image._image.size
+ return img_width == width and img_height == height
+
+ sizes_match = all(size_equals_image(img) for img in images)
+ if not sizes_match:
+ logging.warning(
+ "Images sizes do not match. This will causes images to be display incorrectly in the UI."
+ )
+
meta = {
"_type": "images/separated",
"width": width,
|
Warning message when logging a sequence of images and they don't match in size (#<I>) * Warning message when image sizes dont match
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ and expectation values from all of the samples simultaneously.
setup(
name="pymbar",
- author="Levi N. Naden and Jaime Rodríguez-Guerra and Michael R. Shirts and John D. Chodera",
+ author="Levi N. Naden and Jaime Rodriguez-Guerra and Michael R. Shirts and John D. Chodera",
author_email="levi.naden@choderalab.org, jaime.rodriguez-guerra@choderalab.org, michael.shirts@virginia.edu, john.chodera@choderalab.org",
description="Python implementation of the multistate Bennett acceptance ratio (MBAR) method",
license="MIT",
|
remove special character for now. (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ def read(fname):
setup(
name="Pebble",
- version="3.1.2",
+ version="3.1.3",
author="Matteo Cafasso",
author_email="noxdafox@gmail.com",
description=("Threading and multiprocessing eye-candy."),
|
release <I>: fixed bug in thread Pool
|
py
|
diff --git a/samples/dumpit.py b/samples/dumpit.py
index <HASH>..<HASH> 100755
--- a/samples/dumpit.py
+++ b/samples/dumpit.py
@@ -271,3 +271,4 @@ for s in slots:
except PyKCS11.PyKCS11Error as e:
print("Error:", e)
+ raise
|
Sample dumpit: re-raise the catched exception If a generate PyKCS<I>Error exception is catched then we re-raise it so that a backtrace is displayed to help debug the problem.
|
py
|
diff --git a/py2pack/__init__.py b/py2pack/__init__.py
index <HASH>..<HASH> 100644
--- a/py2pack/__init__.py
+++ b/py2pack/__init__.py
@@ -1,3 +1,3 @@
__doc__ = 'Generate distribution packages from Python packages on PyPI'
__author__ = 'Sascha Peilicke <saschpe@gmx.de>'
-__version__ = '0.2.2'
+__version__ = '0.2.3'
|
Bumped version number to '<I>'Bumped version number to '<I>'..
|
py
|
diff --git a/sprinter/formulas/ssh.py b/sprinter/formulas/ssh.py
index <HASH>..<HASH> 100644
--- a/sprinter/formulas/ssh.py
+++ b/sprinter/formulas/ssh.py
@@ -29,7 +29,7 @@ class SSHFormula(FormulaBase):
def install(self, feature_name, config):
ssh_path = self.__generate_key(feature_name, config)
self.__install_ssh_config(config, ssh_path)
- super(FormulaBase, self).install(feature_name, config)
+ super(SSHFormula, self).install(feature_name, config)
def update(self, feature_name, source_config, target_config):
ssh_path = self.__generate_key(feature_name, target_config)
|
Fixing typo in ssh
|
py
|
diff --git a/m.py b/m.py
index <HASH>..<HASH> 100755
--- a/m.py
+++ b/m.py
@@ -202,8 +202,8 @@ def db_load(dirname):
# TODO: use flock? backup?
def db_update(dirname, files): # {{{1
(HOME / CFG).mkdir(exist_ok = True)
- fs = db_load(dirname)["files"]
- fs_ = { k:v for k,v in {**fs, **files}.items() if v != False }
+ fs = db_load(dirname)["files"]; fs.update(**files)
+ fs_ = { k:v for k,v in fs.items() if v != False }
db = _db_check(dirname, dict(dir = str(dirname), files = fs_))
with db_dir_file(dirname).open("w") as f:
json.dump(db, f, indent = 2, sort_keys = True)
|
support <I> <= python
|
py
|
diff --git a/tests/test_api.py b/tests/test_api.py
index <HASH>..<HASH> 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -85,6 +85,15 @@ class NoConfig(unittest.TestCase):
assert str(excinfo.value) == 'You have to provide either repo_token in .coveralls.mock, or launch via Travis ' \
'or CircleCI'
+ @patch.dict(os.environ, {'CIRCLECI': 'True',
+ 'CIRCLE_BUILD_NUM': '888',
+ 'CI_PULL_REQUEST': 'https://github.com/org/repo/pull/9999'}, clear=True)
+ def test_circleci_no_config(self):
+ cover = Coveralls()
+ assert cover.config['service_name'] == 'circle-ci'
+ assert cover.config['service_job_id'] == '888'
+ assert cover.config['service_pull_request'] == '9999'
+
class Git(GitBasedTest):
|
Added no_config tests for CircleCI
|
py
|
diff --git a/tests/commands/test_check_command.py b/tests/commands/test_check_command.py
index <HASH>..<HASH> 100644
--- a/tests/commands/test_check_command.py
+++ b/tests/commands/test_check_command.py
@@ -10,6 +10,7 @@ from commitizen.exceptions import (
InvalidCommitMessageError,
NoCommitsFoundError,
)
+from tests.utils import create_file_and_commit
COMMIT_LOG = [
"refactor: A code change that neither fixes a bug nor adds a feature",
@@ -217,7 +218,12 @@ def test_check_command_with_invalid_argument(config):
)
+@pytest.mark.usefixtures("tmp_commitizen_project")
def test_check_command_with_empty_range(config, mocker):
+
+ # must initialize git with a commit
+ create_file_and_commit("feat: initial")
+
check_cmd = commands.Check(config=config, arguments={"rev_range": "master..master"})
with pytest.raises(NoCommitsFoundError) as excinfo:
check_cmd()
|
test(check): fixes logic issue made evident by the latest fix(git) commit git was failing with "fatal: ambiguous argument 'master..master': unknown revision or path not in the working tree". git `master` branch doesn't exist unless there is at least one "initial" commit or when only the PR branch has been cloned (e.g. CI).
|
py
|
diff --git a/openupgradelib/openupgrade.py b/openupgradelib/openupgrade.py
index <HASH>..<HASH> 100644
--- a/openupgradelib/openupgrade.py
+++ b/openupgradelib/openupgrade.py
@@ -646,6 +646,12 @@ def rename_models(cr, model_spec):
'UPDATE mail_followers SET res_model=%s '
'where res_model=%s', (new, old),
)
+ if table_exists(cr, 'mail_activity'):
+ logged_query(
+ cr,
+ 'UPDATE mail_activity SET res_model=%s '
+ 'where res_model=%s', (new, old),
+ )
# TODO: signal where the model occurs in references to ir_model
|
[IMP] rename_models In <I>, they introduced the mail activities.
|
py
|
diff --git a/gpustat.py b/gpustat.py
index <HASH>..<HASH> 100755
--- a/gpustat.py
+++ b/gpustat.py
@@ -20,7 +20,7 @@ import locale
import platform
import json
-__version__ = '0.3.1'
+__version__ = '0.4.0.dev'
class ANSIColors:
|
Bump to next development version: <I>.dev
|
py
|
diff --git a/websocket/_app.py b/websocket/_app.py
index <HASH>..<HASH> 100644
--- a/websocket/_app.py
+++ b/websocket/_app.py
@@ -252,8 +252,8 @@ class WebSocketApp(object):
if ping_timeout is not None and ping_timeout <= 0:
ping_timeout = None
- if ping_timeout and ping_interval and ping_interval <= ping_timeout:
- raise WebSocketException("Ensure ping_interval > ping_timeout")
+ if ping_timeout and ping_interval and ping_interval >= ping_timeout:
+ raise WebSocketException("Ensure ping_interval < ping_timeout")
if not sockopt:
sockopt = []
if not sslopt:
|
'ping_interval' should be less than 'ping_timeout' (#<I>)
|
py
|
diff --git a/openquake/hazardlib/sourceconverter.py b/openquake/hazardlib/sourceconverter.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/sourceconverter.py
+++ b/openquake/hazardlib/sourceconverter.py
@@ -249,7 +249,6 @@ def area_to_point_sources(area_src):
nodal_plane_distribution=area_src.nodal_plane_distribution,
hypocenter_distribution=area_src.hypocenter_distribution,
temporal_occurrence_model=area_src.temporal_occurrence_model)
- pt.src_group_id = area_src.src_group_id
pt.num_ruptures = pt.count_ruptures()
yield pt
@@ -310,13 +309,16 @@ def split_source(src):
"""
if hasattr(src, '__iter__'): # multipoint source
for s in src:
+ s.src_group_id = src.src_group_id
yield s
if isinstance(src, source.AreaSource):
for s in area_to_point_sources(src):
+ s.src_group_id = src.src_group_id
yield s
elif isinstance(
src, (source.SimpleFaultSource, source.ComplexFaultSource)):
for s in split_fault_source(src):
+ s.src_group_id = src.src_group_id
yield s
else:
# characteristic and nonparametric sources are not split
|
Set src_group_id correctly on the split sources [skip CI]
|
py
|
diff --git a/mt940/__about__.py b/mt940/__about__.py
index <HASH>..<HASH> 100644
--- a/mt940/__about__.py
+++ b/mt940/__about__.py
@@ -6,7 +6,7 @@ A library to parse MT940 files and returns smart Python collections for
statistics and manipulation.
'''.strip().split())
__email__ = 'wolph@wol.ph'
-__version__ = '4.7'
+__version__ = '4.8.0'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015 Rick van Hattem (wolph)'
__url__ = 'https://github.com/WoLpH/mt940'
|
Incrementing version to <I>
|
py
|
diff --git a/luigi/worker.py b/luigi/worker.py
index <HASH>..<HASH> 100644
--- a/luigi/worker.py
+++ b/luigi/worker.py
@@ -271,7 +271,10 @@ class DequeQueue(collections.deque):
return self.append(obj)
def get(self, block=None, timeout=None):
- return self.pop()
+ try:
+ return self.pop()
+ except IndexError:
+ raise Queue.Empty
class AsyncCompletionException(Exception):
|
Raise Queue.Empty for get() on empty queue Worker._handle_next_task() expects get() on an empty queue to raise Queue.Empty, but the dummy implementation DequeQueue raises IndexError instead. It probaly only shows up if you create custom workers that derive from Worker, but it is still inconsistent with multiprocessing.Queue.
|
py
|
diff --git a/importpathsdecomposer.py b/importpathsdecomposer.py
index <HASH>..<HASH> 100644
--- a/importpathsdecomposer.py
+++ b/importpathsdecomposer.py
@@ -9,6 +9,7 @@ class ImportPathsDecomposer:
return self.classes
def decompose(self, importpaths):
+ self.classes = {}
for path in importpaths:
try:
self.ipparser.parse(path)
|
import paths decomposer: if run more than once reset classes
|
py
|
diff --git a/glue/pipeline.py b/glue/pipeline.py
index <HASH>..<HASH> 100644
--- a/glue/pipeline.py
+++ b/glue/pipeline.py
@@ -81,6 +81,12 @@ class CondorJob:
"""
return self.__executable
+ def set_executable(self, executable):
+ """
+ Set the name of the executable for this job.
+ """
+ self.__executable = executable
+
def add_condor_cmd(self, cmd, value):
"""
Add a Condor command to the submit file (e.g. a class add or evironment).
@@ -405,7 +411,7 @@ class CondorDAGManJob:
"""
Write a submit file for this Condor job.
"""
- command = "condor_submit_dag -no_submit -usedagdir "
+ command = "condor_submit_dag -f -no_submit -usedagdir "
if self.__options.keys():
for c in self.__options.keys():
|
added set+executable method to condorJob class
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.