diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/src/infi/clickhouse_orm/fields.py b/src/infi/clickhouse_orm/fields.py index <HASH>..<HASH> 100644 --- a/src/infi/clickhouse_orm/fields.py +++ b/src/infi/clickhouse_orm/fields.py @@ -79,6 +79,8 @@ class DateField(Field): def to_python(self, value): if isinstance(value, datetime.date): return value + if isinstance(value, datetime.datetime): + return value.date() if isinstance(value, int): return DateField.class_default + datetime.timedelta(days=value) if isinstance(value, string_types):
Accept datetime values for date fields (by Zloool)
py
diff --git a/cfgrib/messages.py b/cfgrib/messages.py index <HASH>..<HASH> 100644 --- a/cfgrib/messages.py +++ b/cfgrib/messages.py @@ -271,8 +271,8 @@ class FileIndex(collections.abc.Mapping): value = 'undef' if isinstance(value, (np.ndarray, list)): value = tuple(value) - # NOTE: the following ensures that values of the same type that evaluate equal - # are exactly the same object. The optimisation is especially useful for strings and + # NOTE: the following ensures that values of the same type that evaluate equal are + # exactly the same object. The optimisation is especially useful for strings and # it also reduces the on-disk size of the index in a backward compatible way. value = header_values_cache.setdefault((value, type(value)), value) header_values.append(value)
Stop codestyle form complaining on a comment
py
diff --git a/djangochannelsrestframework/observer/model_observer.py b/djangochannelsrestframework/observer/model_observer.py index <HASH>..<HASH> 100644 --- a/djangochannelsrestframework/observer/model_observer.py +++ b/djangochannelsrestframework/observer/model_observer.py @@ -121,7 +121,10 @@ class ModelObserver(BaseObserver): Triggers the old_binding to possibly send to its group. """ - old_group_names = self.get_observer_state(instance).current_groups + if action == Action.CREATE: + old_group_names = set() + else: + old_group_names = self.get_observer_state(instance).current_groups if action == Action.DELETE: new_group_names = set()
* set the "old group names" to an empty set in the post_change_receiver (model observer) if the performed action is a create
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,12 +4,17 @@ except ImportError: from distutils.core import setup import sys +import os +import atexit sys.path.insert(0, '.') version = __import__('voluptuous').__version__ try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') + with open('README.rst', 'w') as f: + f.write(long_description) + atexit.register(lambda: os.unlink('README.rst')) except ImportError: print('WARNING: Could not locate pandoc, using Markdown long_description.') long_description = open('README.md').read()
Also include README.rst in distribution.
py
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index <HASH>..<HASH> 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -688,8 +688,11 @@ class SyndicOptionParser(OptionParser, ConfigDirMixIn, MergeConfigMixIn, def setup_config(self): opts = config.master_config(self.get_config_file_path('master')) + user = opts.get('user', 'root') opts['_minion_conf_file'] = opts['conf_file'] opts.update(config.minion_config(self.get_config_file_path('minion'))) + # Over ride the user from the master config file + opts['user'] = user if 'syndic_master' not in opts: self.error(
Fix parser forsyndic to use master user
py
diff --git a/source/rafcon/core/state_elements/state_element.py b/source/rafcon/core/state_elements/state_element.py index <HASH>..<HASH> 100644 --- a/source/rafcon/core/state_elements/state_element.py +++ b/source/rafcon/core/state_elements/state_element.py @@ -111,7 +111,7 @@ class StateElement(Observable, YAMLObject, JSONObject, Hashable): # In case of just the data type is wrong raise an Exception but keep the data flow if "not have matching data types" in message: do_delete_item = False - self._parent = parent + self._parent = ref(parent) raise RecoveryModeException("{0} invalid within state \"{1}\" (id {2}): {3}".format( class_name, parent.name, parent.state_id, message), do_delete_item=do_delete_item) else:
fix(state element): for invalid element _parent always weakref or None
py
diff --git a/indra/tools/assemble_corpus.py b/indra/tools/assemble_corpus.py index <HASH>..<HASH> 100644 --- a/indra/tools/assemble_corpus.py +++ b/indra/tools/assemble_corpus.py @@ -497,11 +497,7 @@ def filter_genes_only(stmts_in, **kwargs): stmts_out : list[indra.statements.Statement] A list of filtered statements. """ - - if 'remove_bound' in kwargs and kwargs['remove_bound']: - remove_bound = True - else: - remove_bound = False + remove_bound = 'remove_bound' in kwargs and kwargs['remove_bound'] specific_only = kwargs.get('specific_only') logger.info('Filtering %d statements for ones containing genes only...' % @@ -555,10 +551,9 @@ def filter_belief(stmts_in, belief_cutoff, **kwargs): stmts_out = [] # Now we eliminate supports/supported-by for stmt in stmts_in: - if stmt.belief >= belief_cutoff: - stmts_out.append(stmt) - else: + if stmt.belief < belief_cutoff: continue + stmts_out.append(stmt) supp_by = [] supp = [] for st in stmt.supports:
Streamline assemble_corpus code with update logic 1. collapsed the logic of the if statement directly to the assignment 2. Switched order of if statement to make the continue happen faster and decrease code indentation
py
diff --git a/test/test_transactions.py b/test/test_transactions.py index <HASH>..<HASH> 100644 --- a/test/test_transactions.py +++ b/test/test_transactions.py @@ -71,12 +71,13 @@ def test_query_inside_transaction(): def test_set_connection_works(): - assert APerson(name='New guy').save() + assert APerson(name='New guy 1').save() from socket import gaierror old_url = db.url with raises(ValueError): db.set_connection('bolt://user:password@6.6.6.6.6.6.6.6:7687') + APerson(name='New guy 2').save() db.set_connection(old_url) # set connection back - assert APerson(name='New guy2').save() + assert APerson(name='New guy 3').save()
fix connection test after update (#<I>)
py
diff --git a/src/_pytest/tmpdir.py b/src/_pytest/tmpdir.py index <HASH>..<HASH> 100644 --- a/src/_pytest/tmpdir.py +++ b/src/_pytest/tmpdir.py @@ -10,6 +10,7 @@ import warnings import attr import py +import six import pytest from .pathlib import ensure_reset_dir @@ -28,8 +29,11 @@ class TempPathFactory(object): _given_basetemp = attr.ib( # using os.path.abspath() to get absolute path instead of resolve() as it - # does not work the same in all platforms - convert=attr.converters.optional(lambda p: Path(os.path.abspath(p))) + # does not work the same in all platforms; there's Path.absolute(), but it is not + # public (see https://bugs.python.org/issue25012) + convert=attr.converters.optional( + lambda p: Path(os.path.abspath(six.text_type(p))) + ) ) _trace = attr.ib() _basetemp = attr.ib(default=None)
Fix call to os.path.abspath: the argument might already be a Path instance There's Path.absolute(), but it is not public, see <URL>
py
diff --git a/traffic/data/samples/__init__.py b/traffic/data/samples/__init__.py index <HASH>..<HASH> 100644 --- a/traffic/data/samples/__init__.py +++ b/traffic/data/samples/__init__.py @@ -67,6 +67,7 @@ def __getattr__(name: str) -> Union[Flight, Traffic]: airbus_tree = cast(Flight, __getattr__("airbus_tree")) belevingsvlucht = cast(Flight, __getattr__("belevingsvlucht")) +texas_longhorn = cast(Flight, __getattr__("texas_longhorn")) quickstart = cast(Traffic, __getattr__("quickstart")) switzerland = cast(Traffic, __getattr__("switzerland"))
additional sample flight by default in py<I>, need for demo
py
diff --git a/irclib.py b/irclib.py index <HASH>..<HASH> 100644 --- a/irclib.py +++ b/irclib.py @@ -601,6 +601,13 @@ class ServerConnection(Connection): """ apply(self.irclibobj.add_global_handler, args) + def remove_global_handler(self, *args): + """Remove global handler. + + See documentation for IRC.remove_global_handler. + """ + apply(self.irclibobj.remove_global_handler, args) + def action(self, target, action): """Send a CTCP ACTION command.""" self.ctcp("ACTION", target, action)
Added Connection.remove_global_handler method (patch by Brandon Beck).
py
diff --git a/phy/plot/features.py b/phy/plot/features.py index <HASH>..<HASH> 100644 --- a/phy/plot/features.py +++ b/phy/plot/features.py @@ -630,6 +630,30 @@ class FeatureView(BaseSpikeCanvas): @_wrap_vispy def plot_features(features, **kwargs): + """Plot features. + + Parameters + ---------- + + features : ndarray + The features to plot. A `(n_spikes, n_channels, n_features)` array. + spike_clusters : ndarray (optional) + A `(n_spikes,)` int array with the spike clusters. + masks : ndarray (optional) + A `(n_spikes, n_channels)` float array with the spike masks. + n_rows : int + Number of rows (= number of columns) in the grid view. + x_dimensions : list + List of dimensions for the x axis. + y_dimensions : list + List of dimensions for the yœ axis. + extra_features : dict + A dictionary `{feature_name: array}` where `array` has + `n_spikes` elements. + background_features : ndarray + The background features. A `(n_spikes, n_channels, n_features)` array. + + """ c = FeatureView(keys='interactive') c.set_data(features, **kwargs) return c
plot_features() docstring.
py
diff --git a/ayrton/utils.py b/ayrton/utils.py index <HASH>..<HASH> 100644 --- a/ayrton/utils.py +++ b/ayrton/utils.py @@ -47,3 +47,16 @@ def patch_logging (): logging.Logger.debug3= debug3 patch_logging () + +def dump_dict (d, level=0): + strings= [] + + strings.append ("%s%r: {\n" % ( ' '*level, k)) + for k, v in d.items (): + if type (v)!=dict: + strings.append ("%s%r: %r,\n" % ( ' '*(level+1), k, v )) + else: + strings.extend (dump_dict (v, level+1)) + strings.append ("%s},\n" % ( ' '*level, )) + + return ''.join (strings)
[+] dump_dict() pretty prints dicts.
py
diff --git a/test/base_test.py b/test/base_test.py index <HASH>..<HASH> 100644 --- a/test/base_test.py +++ b/test/base_test.py @@ -41,7 +41,7 @@ class BaseTest(TestCase): def test_activate_session_with_one_session_then_clearing_and_activating_with_another_session_shoul_request_to_correct_shop(self): shopify.ShopifyResource.activate_session(self.session1) - shopify.ShopifyResource.clear_session + shopify.ShopifyResource.clear_session() shopify.ShopifyResource.activate_session(self.session2) self.assertIsNone(ActiveResource.site)
Add missing parentheses to method call.
py
diff --git a/onecodex/models/sample.py b/onecodex/models/sample.py index <HASH>..<HASH> 100644 --- a/onecodex/models/sample.py +++ b/onecodex/models/sample.py @@ -1,4 +1,3 @@ -import json from requests.exceptions import HTTPError from six import string_types import warnings @@ -194,19 +193,6 @@ class Samples(OneCodexBase, ResourceDownloadMixin): class Metadata(OneCodexBase): _resource_path = "/api/v1/metadata" - def __setattr__(self, key, value): - # At some point we should validate that these match the schema - if key == "custom": - try: - import pandas as pd - - if isinstance(value, pd.Series): - value = json.loads(value.to_json()) - except ImportError: - pass - - super(Metadata, self).__setattr__(key, value) - def save(self): if self.id is None: super(Metadata, self).save() # Create
Remove coercion of pd.Series objects for Metadata.custom field
py
diff --git a/anonymizer/management/commands/anonymize_data.py b/anonymizer/management/commands/anonymize_data.py index <HASH>..<HASH> 100644 --- a/anonymizer/management/commands/anonymize_data.py +++ b/anonymizer/management/commands/anonymize_data.py @@ -2,6 +2,11 @@ anonymize_data command """ +from __future__ import print_function + +import sys +import time + from django.core.management.base import AppCommand from anonymizer.utils import get_anonymizers @@ -27,4 +32,10 @@ class Command(AppCommand): instances.append(instance) for instance in instances: + print('Running %s.%s... ' % (instance.__class__.__module__, + instance.__class__.__name__), end='') + sys.stdout.flush() + start = time.time() instance.run(chunksize=chunksize, parallel=parallel) + duration = time.time() - start + print('took %0.2f seconds' % duration)
print running anonymizer and the time they take
py
diff --git a/salt/modules/dpkg.py b/salt/modules/dpkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/dpkg.py +++ b/salt/modules/dpkg.py @@ -364,7 +364,8 @@ def info(*packages): pkg[pkg_ext_k] = pkg_ext_v # Remove "technical" keys for t_key in ['installed_size', 'depends', 'recommends', - 'conflicts', 'bugs', 'description-md5']: + 'provides', 'replaces', 'conflicts', 'bugs', + 'description-md5', 'task']: if t_key in pkg: del pkg[t_key] ret[pkg['package']] = pkg
Enhance filter for the "technical" fields that are not generally needed as a package information for the CMDB
py
diff --git a/plexapi/base.py b/plexapi/base.py index <HASH>..<HASH> 100644 --- a/plexapi/base.py +++ b/plexapi/base.py @@ -566,15 +566,18 @@ class Playable(object): self._server.query(key) self.reload() - def updateTimeline(self, time, state='stopped'): + def updateTimeline(self, time, state='stopped', duration=None): """ Set the timeline progress for this video. Parameters: time (int): milliseconds watched state (string): state of the video, default 'stopped' """ - key = '/:/timeline?ratingKey=%s&key=%s&identifier=com.plexapp.plugins.library&time=%d&state=%s' % (self.ratingKey, self.key, - time, state) + durationStr = '' + if duration != None: + durationStr = '&duration=' + str(duration) + key = '/:/timeline?ratingKey=%s&key=%s&identifier=com.plexapp.plugins.library&time=%d&state=%s%s' % (self.ratingKey, self.key, + time, state, durationStr) self._server.query(key) self.reload()
updateTimeline with option duration Without the option duration the video progres will not be updated.
py
diff --git a/master/buildbot/process/remotecommand.py b/master/buildbot/process/remotecommand.py index <HASH>..<HASH> 100644 --- a/master/buildbot/process/remotecommand.py +++ b/master/buildbot/process/remotecommand.py @@ -141,18 +141,13 @@ class RemoteCommand(base.RemoteCommandImpl): while self.rc is None and timeout > 0: yield util.asyncSleep(.1) timeout -= 1 - # call .remoteComplete. If it raises an exception, or returns the - # Failure that we gave it, our self.deferred will be errbacked. If - # it does not (either it ate the Failure or there the step finished - # normally and it didn't raise a new exception), self.deferred will - # be callbacked. - d = defer.maybeDeferred(self.remoteComplete, failure) - # arrange for the callback to get this RemoteCommand instance - # instead of just None - d.addCallback(lambda r: self) - # this fires the original deferred we returned from .run(), - # with self as the result, or a failure - d.addBoth(self.deferred.callback) + + try: + yield self.remoteComplete(failure) + # this fires the original deferred we returned from .run(), + self.deferred.callback(self) + except Exception as e: + self.deferred.errback(e) @defer.inlineCallbacks def interrupt(self, why):
process: Simplify RemoteCommand._finish() We can wait on self.remoteComplete() to finish, because _finished() is only ever called without waiting for results
py
diff --git a/PyMI/src/wmi/__init__.py b/PyMI/src/wmi/__init__.py index <HASH>..<HASH> 100644 --- a/PyMI/src/wmi/__init__.py +++ b/PyMI/src/wmi/__init__.py @@ -272,8 +272,16 @@ class _Connection(object): instance._instance, six.text_type(method_name), params) as op: l = [] r = op.get_next_instance() + elements = [] for i in six.moves.range(0, len(r)): - l.append(_wrap_element(self, *r.get_element(i))) + elements.append(r.get_element(i)) + + # Sort the output params by name before returning their values. + # The WINRM and WMIDCOM protocols behave differently in how + # returned elements are ordered. This hack aligns with the WMIDCOM + # behaviour to retain compatibility with the wmi.py module. + for element in sorted(elements, key=lambda element: element[0]): + l.append(_wrap_element(self, *element)) return tuple(l) def new_instance_from_class(self, cls):
Sorts method call return params by name
py
diff --git a/telethon/utils.py b/telethon/utils.py index <HASH>..<HASH> 100644 --- a/telethon/utils.py +++ b/telethon/utils.py @@ -922,7 +922,8 @@ def resolve_bot_file_id(file_id): dc_id=dc_id, volume_id=volume_id, secret=secret, - local_id=local_id + local_id=local_id, + file_reference=b'' ), w=0, h=0, size=0) ], file_reference=b'', date=None)
Add yet another missing file_reference
py
diff --git a/marshmallow_sqlalchemy/fields.py b/marshmallow_sqlalchemy/fields.py index <HASH>..<HASH> 100644 --- a/marshmallow_sqlalchemy/fields.py +++ b/marshmallow_sqlalchemy/fields.py @@ -53,7 +53,8 @@ class Related(fields.Field): @property def session(self): - return self.parent.session + schema = get_schema_for_field(self) + return schema.session def _serialize(self, value, attr, obj): ret = {
Fix accessing Schema's session
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setup( name='awss', packages=['awss'], entry_points={'console_scripts': ['awss=awss:main']}, - version='0.9.4', + version='0.9.5', author="Robert Peteuil", author_email="robert.s.peteuil@gmail.com", url='https://github.com/robertpeteuil/aws-shortcuts',
setup.py for <I> release
py
diff --git a/metaseq/test/examples/ctcf_peaks_settings.py b/metaseq/test/examples/ctcf_peaks_settings.py index <HASH>..<HASH> 100644 --- a/metaseq/test/examples/ctcf_peaks_settings.py +++ b/metaseq/test/examples/ctcf_peaks_settings.py @@ -9,7 +9,7 @@ DOWNSTREAM = 1000 BINS = 100 FRAGMENT_SIZE = 200 GENOME = 'hg19' -CHROMS = ['chr19'] +CHROMS = ['chr1', 'chr2'] gtfdb = metaseq.example_filename('Homo_sapiens.GRCh37.66.cleaned.gtf.db') G = gffutils.FeatureDB(gtfdb)
use chr1 and 2 instead of <I> for examples
py
diff --git a/synapse/__init__.py b/synapse/__init__.py index <HASH>..<HASH> 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -3,8 +3,8 @@ The synapse distributed key-value hypergraph analysis framework. ''' import sys -if (sys.version_info.major, sys.version_info.minor) < (3, 6): # pragma: no cover - raise Exception('synapse is not supported on Python versions < 3.6') +if (sys.version_info.major, sys.version_info.minor) < (3, 7): # pragma: no cover + raise Exception('synapse is not supported on Python versions >= 3.7') # checking maximum *signed* integer size to determine the interpreter arch if sys.maxsize < 9223372036854775807: # pragma: no cover @@ -12,7 +12,7 @@ if sys.maxsize < 9223372036854775807: # pragma: no cover import lmdb if tuple([int(x) for x in lmdb.__version__.split('.')]) < (0, 94): # pragma: no cover - raise Exception('synapse is only supported on version 0.94 of the lmdb python module') + raise Exception('synapse is only supported on version >= 0.94 of the lmdb python module') import multiprocessing
Bump required python version. Also improved error description
py
diff --git a/packages/vaex-viz/vaex/viz/mpl.py b/packages/vaex-viz/vaex/viz/mpl.py index <HASH>..<HASH> 100644 --- a/packages/vaex-viz/vaex/viz/mpl.py +++ b/packages/vaex-viz/vaex/viz/mpl.py @@ -843,7 +843,7 @@ def plot(self, x=None, y=None, z=None, what="count(*)", vwhat=None, reduce=["col if show: pylab.show() if return_extra: - return im, grid, fgrid, ngrid, rgrid, rgba8 + return im, grid, fgrid, ngrid, rgrid else: return im # colorbar = None
Removed unused, undefined variable in the `plot` method.
py
diff --git a/icekit/plugins/child_pages/tests.py b/icekit/plugins/child_pages/tests.py index <HASH>..<HASH> 100644 --- a/icekit/plugins/child_pages/tests.py +++ b/icekit/plugins/child_pages/tests.py @@ -77,9 +77,9 @@ class ChildPagesTestCase(WebTest): self.assertEqual(len(self.child_pages_1.get_child_pages()), 0) self.assertEqual(len(self.child_pages_2.get_child_pages()), 2) expected_children = [self.page_3, self.page_4] + # test page ids, because the pages are boobytraps for child in expected_children: - self.assertIn(child, self.child_pages_2.get_child_pages()) - + self.assertIn(child.pk, [x.pk for x in self.child_pages_2.get_child_pages()]) def test_get_child_pages_published(self): pcp1 = self.page_1.get_published().contentitem_set.all()[0]
Test ids instead of object equality on draft pages
py
diff --git a/test_elasticsearch/test_server/test_common.py b/test_elasticsearch/test_server/test_common.py index <HASH>..<HASH> 100644 --- a/test_elasticsearch/test_server/test_common.py +++ b/test_elasticsearch/test_server/test_common.py @@ -152,15 +152,19 @@ class YamlTestCase(ElasticsearchTestCase): def run_skip(self, skip): if 'features' in skip: - if skip['features'] in IMPLEMENTED_FEATURES: - return - elif skip['features'] == 'requires_replica': - if self._get_data_nodes() > 1: - return - elif skip['features'] == 'benchmark': - if self._get_benchmark_nodes(): - return - raise SkipTest(skip.get('reason', 'Feature %s is not supported' % skip['features'])) + features = skip['features'] + if not isinstance(features, (tuple, list)): + features = [features] + for feature in features: + if feature in IMPLEMENTED_FEATURES: + continue + elif feature == 'requires_replica': + if self._get_data_nodes() > 1: + continue + elif feature == 'benchmark': + if self._get_benchmark_nodes(): + continue + raise SkipTest(skip.get('reason', 'Feature %s is not supported' % feature)) if 'version' in skip: version, reason = skip['version'], skip['reason']
Skip in yaml tests can have multiple values
py
diff --git a/pybar/ViTablesPlugin/__init__.py b/pybar/ViTablesPlugin/__init__.py index <HASH>..<HASH> 100644 --- a/pybar/ViTablesPlugin/__init__.py +++ b/pybar/ViTablesPlugin/__init__.py @@ -1 +0,0 @@ -__all__ = ['pybar_vitables_plugin.py'] # only these modules will be visible in ViTables
BUG: addressing #<I>
py
diff --git a/salt/minion.py b/salt/minion.py index <HASH>..<HASH> 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -546,7 +546,7 @@ class Minion(object): last = time.time() while True: try: - socks = dict(poller.poll(self.opts['sub_timeout'])) + socks = dict(poller.poll(self.opts['sub_timeout'] * 1000)) if socket in socks and socks[socket] == zmq.POLLIN: payload = self.serial.loads(socket.recv()) self._handle_payload(payload) @@ -585,7 +585,7 @@ class Minion(object): else: while True: try: - socks = dict(poller.poll(60)) + socks = dict(poller.poll(60000)) if socket in socks and socks[socket] == zmq.POLLIN: payload = self.serial.loads(socket.recv()) self._handle_payload(payload)
Change the poll wait times in tune in This makes the minion more reponsive and less resource hungry
py
diff --git a/src/fonduer/utils/data_model_utils/tabular.py b/src/fonduer/utils/data_model_utils/tabular.py index <HASH>..<HASH> 100644 --- a/src/fonduer/utils/data_model_utils/tabular.py +++ b/src/fonduer/utils/data_model_utils/tabular.py @@ -5,7 +5,7 @@ from builtins import range from collections import defaultdict from functools import lru_cache from itertools import chain -from typing import DefaultDict, Iterator, List, Optional, Tuple, Union +from typing import DefaultDict, Iterator, List, Optional, Set, Tuple, Union from fonduer.candidates.models import Candidate, Mention from fonduer.candidates.models.span_mention import TemporarySpanMention @@ -457,7 +457,7 @@ def get_head_ngrams( :rtype: a *generator* of ngrams """ spans = _to_spans(mention) - axes = (axis,) if axis else ("row", "col") # type: ignore + axes: Set[str] = (axis,) if axis else ("row", "col") # type: ignore for span in spans: if span.sentence.is_tabular(): for axis in axes:
Annotate type to axes
py
diff --git a/nudibranch/models.py b/nudibranch/models.py index <HASH>..<HASH> 100644 --- a/nudibranch/models.py +++ b/nudibranch/models.py @@ -355,6 +355,9 @@ class Submission(BasicBase, Base): retval[testable] = testable_result return retval + def testables_with_statuses(self): + return self.all_testables() - self.testables_waiting_to_run() + def testable_statuses(self): warn_err = self.verification_warnings_errors() with_build_errors = self.testables_with_build_errors() @@ -363,7 +366,7 @@ class Submission(BasicBase, Base): to_testable_result.get(testable), warn_err, testable in with_build_errors) - for testable in self.all_testables()] + for testable in self.testables_with_statuses()] @staticmethod def get_or_empty(item, if_not_none, empty={}):
Only testables that aren't pending have statuses.
py
diff --git a/inverse_covariance/statistical_power.py b/inverse_covariance/statistical_power.py index <HASH>..<HASH> 100644 --- a/inverse_covariance/statistical_power.py +++ b/inverse_covariance/statistical_power.py @@ -65,7 +65,7 @@ class StatisticalPower(object): self.results = np.zeros((self.n_grid_points, self.n_grid_points)) grid = np.linspace(0.1, 1, self.n_grid_points) - self.alphas = np.linspace(0.1, 1, self.n_grid_points) + self.alphas = 10 * np.linspace(0.1, 1, self.n_grid_points) for aidx, alpha in enumerate(self.alphas): if self.verbose: print 'At alpha {} ({}/{})'.format(
Amp up alpha just to test
py
diff --git a/test/test_process_pool_fork.py b/test/test_process_pool_fork.py index <HASH>..<HASH> 100644 --- a/test/test_process_pool_fork.py +++ b/test/test_process_pool_fork.py @@ -73,7 +73,9 @@ def long_function(value=1): def pid_function(): time.sleep(0.1) - return os.getpid() + pid = os.getpid() + print(pid) + return pid def sigterm_function(): @@ -105,7 +107,8 @@ def pool_function(): def pebble_function(): with ProcessPool(max_workers=1) as pool: f = pool.schedule(function, args=[1]) - return f.result() + + return f.result() @unittest.skipIf(not supported, "Start method is not supported")
travis: MAC OS test failure investigations
py
diff --git a/monitor/report/app.py b/monitor/report/app.py index <HASH>..<HASH> 100644 --- a/monitor/report/app.py +++ b/monitor/report/app.py @@ -31,5 +31,9 @@ def zonedata(key, zone): ret[ts] = [{'date': k, 'value': v} for k,v in dd.iteritems()] return jsonify(ret) +@app.route("/histogram/<zone>") +def histogram(zone): + return jsonify({'data': data['disaggregate_histograms'][zone]}) + if __name__=='__main__': app.run(debug=True)
give histogram data as json uri
py
diff --git a/openquake/calculators/scenario_risk.py b/openquake/calculators/scenario_risk.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/scenario_risk.py +++ b/openquake/calculators/scenario_risk.py @@ -48,12 +48,12 @@ def scenario_risk(riskinputs, riskmodel, rlzs_assoc, monitor): :class:`openquake.baselib.performance.PerformanceMonitor` instance :returns: a dictionary { - 'agg': array of shape (E, R, 2), - 'avg': list of tuples (rlz_ids, asset_idx, statistics) + 'agg': array of shape (E, L, R, 2), + 'avg': list of tuples (lt_idx, rlz_idx, asset_idx, statistics) } - where E is the number of simulated events, R the number of realizations - and statistics is an array of shape (n, R, 4), being n the number of - assets in the current riskinput object. + where E is the number of simulated events, L the number of loss types, + R the number of realizations and statistics is an array of shape + (n, R, 4), with n the number of assets in the current riskinput object """ E = monitor.oqparam.number_of_ground_motion_fields logging.info('Process %d, considering %d risk input(s) of weight %d',
Fixed docstring (again)
py
diff --git a/workshift/views.py b/workshift/views.py index <HASH>..<HASH> 100644 --- a/workshift/views.py +++ b/workshift/views.py @@ -851,9 +851,10 @@ def adjust_hours_view(request, semester): pool_hour_forms = [] for workshifter in workshifters: + forms_list = [] for pool in pools: hours = workshifter.pool_hours.get(pool=pool) - pool_hour_forms.append(( + forms_list.append(( AdjustHoursForm( request.POST or None, prefix="pool_hours-{}".format(hours.pk), @@ -861,6 +862,7 @@ def adjust_hours_view(request, semester): ), hours, )) + pool_hour_forms.apend(forms_list) if all( form.is_valid()
Fxied adjust_hours view
py
diff --git a/telemetry/telemetry/android_browser_finder.py b/telemetry/telemetry/android_browser_finder.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/android_browser_finder.py +++ b/telemetry/telemetry/android_browser_finder.py @@ -7,6 +7,7 @@ import os import logging as real_logging import re import subprocess +import sys from telemetry import adb_commands from telemetry import android_browser_backend @@ -80,9 +81,15 @@ def FindAllAvailableBrowsers(options, logging=real_logging): logging.warn(' adb kill-server') logging.warn(' sudo `which adb` devices\n\n') except OSError: - logging.info('No adb command found. ' + - 'Will not try searching for Android browsers.') - return [] + if sys.platform.startswith('linux'): + os.environ['PATH'] = os.pathsep.join([ + os.path.join(os.path.dirname(__file__), + '../../../third_party/android_tools/sdk/platform-tools'), + os.environ['PATH']]) + else: + logging.info('No adb command found. ' + + 'Will not try searching for Android browsers.') + return [] device = None if options.android_device:
[Telemetry] Put adb on the path if it is not already there. There is a linux adb binary checked in to platform-tools. We use it if on linux and the adb command isn't found. BUG=None TEST=tools/perf/run_multipage_benchmarks --browser=list -v Review URL: <URL>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,14 +30,14 @@ setup( setup_requires=pytest_runner + wheel + ["setuptools_scm"], tests_require=["pytest>=2.8"], install_requires=[ - "fonttools[ufo]>=4.11.0", - "cu2qu>=1.6.6", + "fonttools[ufo]>=4.17.1", + "cu2qu>=1.6.7", "compreffor>=0.4.6", "booleanOperations>=0.9.0", ], extras_require={ - "pathops": ["skia-pathops>=0.2.0"], - "cffsubr": ["cffsubr>=0.2.6"], + "pathops": ["skia-pathops>=0.5.1"], + "cffsubr": ["cffsubr>=0.2.7"], }, python_requires=">=3.6", classifiers=[
setup.py: bump minimum requirements
py
diff --git a/tests/test_command__devenv.py b/tests/test_command__devenv.py index <HASH>..<HASH> 100644 --- a/tests/test_command__devenv.py +++ b/tests/test_command__devenv.py @@ -40,12 +40,6 @@ class DevEnvTestCase(TestCase): else: self.projector("devenv pack") - def test_build__no_bootstrap(self): - with self.temporary_directory_context(): - self.projector("repository init a.b.c none short long") - remove("bootstrap.py") - self.projector("devenv build --clean") - def test_build_after_init__use_isolated_python(self): with self.temporary_directory_context(): self.projector("repository init a.b.c none short long")
HOSTDEV-<I> removing unessary test now we are not using bootstrap.py
py
diff --git a/src/toil_lib/tools/aligners.py b/src/toil_lib/tools/aligners.py index <HASH>..<HASH> 100644 --- a/src/toil_lib/tools/aligners.py +++ b/src/toil_lib/tools/aligners.py @@ -59,11 +59,12 @@ def run_star(job, r1_id, r2_id, star_index_url, wiggle=False): # Write to fileStore transcriptome_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'rnaAligned.toTranscriptome.out.bam')) sorted_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'rnaAligned.sortedByCoord.out.bam')) + log_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'rnaLog.final.out')) if wiggle: wiggle_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'rnaSignal.UniqueMultiple.str1.out.bg')) - return transcriptome_id, sorted_id, wiggle_id + return transcriptome_id, sorted_id, wiggle_id, log_id else: - return transcriptome_id, sorted_id + return transcriptome_id, sorted_id, log_id def run_bwakit(job, config, sort=True, trim=False):
Return log.final.out from STAR (resolves #<I>) Needed by CKCC for QC. This breaks backwards compatability but only toil-rnaseq uses this function as of now.
py
diff --git a/tests/parser/test_parse_inreach.py b/tests/parser/test_parse_inreach.py index <HASH>..<HASH> 100644 --- a/tests/parser/test_parse_inreach.py +++ b/tests/parser/test_parse_inreach.py @@ -1,6 +1,7 @@ import unittest from ogn.parser.aprs_comment.inreach_parser import InreachParser + class TestStringMethods(unittest.TestCase): def test_position_comment(self): message = InreachParser().parse_position("id300434060496190 inReac True")
Add a blank line in the test case for CI?
py
diff --git a/ncdjango/interfaces/arcgis/views.py b/ncdjango/interfaces/arcgis/views.py index <HASH>..<HASH> 100644 --- a/ncdjango/interfaces/arcgis/views.py +++ b/ncdjango/interfaces/arcgis/views.py @@ -374,9 +374,9 @@ class LegendView(ArcGisMapServerMixin, LegendViewBase): bottom_image.paste(full_image.crop((0, 40, 20, 60))) elements = [ - LegendElement(top_image, [1], [labels[0]]), + LegendElement(top_image, [1], [labels[-1]]), LegendElement(middle_image, [.5], ['']), - LegendElement(bottom_image, [0], [labels[-1]]) + LegendElement(bottom_image, [0], [labels[0]]) ] return [
Fixed bug with ordering of legend labels relative to legend image for stretched renderer
py
diff --git a/workshift/models.py b/workshift/models.py index <HASH>..<HASH> 100644 --- a/workshift/models.py +++ b/workshift/models.py @@ -49,9 +49,9 @@ class Semester(models.Model): blank=True, help_text="Workshift rate for this semester.", ) - self_sign_out = models.BooleanField( + self_sign_off = models.BooleanField( default=False, - help_text="Whether members may sign themselves out of a workshift." + help_text="Whether members may sign themselves off for a workshift." ) policy = models.URLField( max_length=255,
Renamed self_sign_out to self_sign_off
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ setup( author="UW-IT AXDD", author_email="aca-it@uw.edu", include_package_data=True, - install_requires=['UW-RestClients-Core>0.8,<1.0', - 'UW-RestClients-PWS<1.0', + install_requires=['UW-RestClients-Core>=0.8.9,<1.0', + 'UW-RestClients-PWS>=0.5,<1.0', 'python-dateutil' ], license='Apache License, Version 2.0',
update restclient, pws version
py
diff --git a/gruvi/endpoints.py b/gruvi/endpoints.py index <HASH>..<HASH> 100644 --- a/gruvi/endpoints.py +++ b/gruvi/endpoints.py @@ -372,9 +372,6 @@ class Server(Endpoint): callback = functools.partial(self._on_new_connection, ssl, ssl_args) handle.listen(callback, backlog) addr = handle.getsockname() - # pyuv issue #152 - if isinstance(addr, bytes) and six.PY3: - addr = addr.decode(sys.getfilesystemencoding()) self._log.debug('listen on {}', saddr(addr)) addresses.append(addr) self._handles += handles
Server: no need to work around pyuv #<I> anymore
py
diff --git a/iota/commands/extended/send_trytes.py b/iota/commands/extended/send_trytes.py index <HASH>..<HASH> 100644 --- a/iota/commands/extended/send_trytes.py +++ b/iota/commands/extended/send_trytes.py @@ -2,7 +2,7 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from typing import List +from typing import List, Optional import filters as f from iota import TransactionTrytes, TryteString, TransactionHash
Added missing import for type hint.
py
diff --git a/svg/charts/graph.py b/svg/charts/graph.py index <HASH>..<HASH> 100644 --- a/svg/charts/graph.py +++ b/svg/charts/graph.py @@ -586,12 +586,14 @@ class Graph(object): return styles = self.parse_css() - for node in xpath.Evaluate('//*[@class]', self.root): - cl = node.getAttribute('class') + for node in self.root.xpath('//*[@class]'): + cl = '.' + node.attrib['class'] + if not cl in styles: + continue style = styles[cl] - if node.hasAttribute('style'): - style += node.getAttribute('style') - node.setAttribute('style', style) + if 'style' in node.attrib: + style += node.attrib['style'] + node.attrib['style'] = style def parse_css(self): """
Updated Graph.render_inline_styles, correcting errors such that now tests pass.
py
diff --git a/everest/missions/k2/k2.py b/everest/missions/k2/k2.py index <HASH>..<HASH> 100755 --- a/everest/missions/k2/k2.py +++ b/everest/missions/k2/k2.py @@ -446,9 +446,8 @@ def GetData(EPIC, season = None, cadence = 'lc', clobber = False, delete_raw = F elif (i-2 >= 0) and aperture[i-2,j] == 1: aperture[i,j] = 2 ext += 1 - if ext: log.info("Extended saturated columns by %d pixel(s)." % ext) - - import pdb; pdb.set_trace() + if ext: + log.info("Extended saturated columns by %d pixel(s)." % ext) for j in range(aperture.shape[1]): if np.any(f97[:,j] > satflx):
Extended saturated columns to conserve flux; working awesomely
py
diff --git a/doc2dash/parsers/sphinx/parser.py b/doc2dash/parsers/sphinx/parser.py index <HASH>..<HASH> 100644 --- a/doc2dash/parsers/sphinx/parser.py +++ b/doc2dash/parsers/sphinx/parser.py @@ -91,8 +91,8 @@ def _get_type(text): _IN_MODULE = '_in_module' TYPE_MAPPING = [ - (re.compile(r'(.*)\(\S+ method\)$'), types.METHOD), - (re.compile(r'(.*)\(.*function\)$'), types.FUNCTION), + (re.compile(r'([^ (]*)(?:\(\))? ?\(\S+ method\)$'), types.METHOD), + (re.compile(r'([^ (]*)(?:\(\))? ?\(.*function\)$'), types.FUNCTION), (re.compile(r'(.*)\(\S+ attribute\)$'), types.ATTRIBUTE), (re.compile(r'(.*)\(\S+ member\)$'), types.ATTRIBUTE), (re.compile(r'(.*)\(class in \S+\)$'), types.CLASS),
Don't collect () as part of func and method names Complying with dash's default style.
py
diff --git a/src/anyconfig/processors.py b/src/anyconfig/processors.py index <HASH>..<HASH> 100644 --- a/src/anyconfig/processors.py +++ b/src/anyconfig/processors.py @@ -10,6 +10,9 @@ r"""Abstract processor module. - Add to abstract processors such like Parsers (loaders and dumpers). """ import operator +import typing +import warnings + import pkg_resources import anyconfig.ioinfo @@ -21,21 +24,22 @@ from anyconfig.globals import ( ) -def load_plugins(pgroup: str, safe: bool = True): +ProcT = typing.TypeVar('ProcT', bound=anyconfig.models.processor.Processor) +ProcClsT = typing.Type[ProcT] + + +def load_plugins(pgroup: str) -> typing.Iterator[ProcClsT]: """ - Generator to yield a :class:`anyconfig.models.processor.Processor` object. + A generator function to yield an instance of + :class:`anyconfig.models.processor.Processor`. :param pgroup: A string represents plugin type, e.g. anyconfig_backends - :param safe: Do not raise ImportError during load if True - :raises: ImportError """ for res in pkg_resources.iter_entry_points(pgroup): try: yield res.load() - except ImportError: - if safe: - continue - raise + except ImportError as exc: + warnings.warn(f'Failed to load plugin, exc={exc!s}') def sort_by_prio(prs):
change: ensure anyconfig.processors.load_plugins not raise exceptions Ensure anyconfig.processors.load_plugins not raise exceptions and ignore errors during load of plugins with warnings to protect from broken plugin modules.
py
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based.py +++ b/openquake/calculators/event_based.py @@ -19,6 +19,7 @@ import os.path import logging import collections +import itertools import operator import numpy @@ -261,11 +262,14 @@ class EventBasedCalculator(base.HazardCalculator): rgetters = self.gen_rupture_getters() # build the associations eid -> rlz sequentially or in parallel - distribute = 'no' if len(events) < 1E5 else None - smap = parallel.Starmap(RuptureGetter.get_eid_rlz, - ((rgetter,) for rgetter in rgetters), - progress=logging.debug, distribute=distribute, - hdf5path=self.datastore.filename) + if len(events) < 1E5: + smap = itertools.starmap(RuptureGetter.get_eid_rlz, + ((rgetter,) for rgetter in rgetters)) + else: + smap = parallel.Starmap(RuptureGetter.get_eid_rlz, + ((rgetter,) for rgetter in rgetters), + progress=logging.debug, + hdf5path=self.datastore.filename) i = 0 for eid_rlz in smap: # 30 million of events associated in 1 minute! for er in eid_rlz:
Avoided zmq for few events [skip hazardlib]
py
diff --git a/progressbar/bar.py b/progressbar/bar.py index <HASH>..<HASH> 100644 --- a/progressbar/bar.py +++ b/progressbar/bar.py @@ -347,7 +347,7 @@ class ProgressBar(StdRedirectMixin, ResizableMixin, ProgressBarBase): if max_value is None: try: self.max_value = len(iterable) - except: + except TypeError: if self.max_value is None: self.max_value = UnknownLength else:
Added correct exception type to ProgressBar.__call__
py
diff --git a/maildir_deduplicate/tests/test_deduplicate.py b/maildir_deduplicate/tests/test_deduplicate.py index <HASH>..<HASH> 100644 --- a/maildir_deduplicate/tests/test_deduplicate.py +++ b/maildir_deduplicate/tests/test_deduplicate.py @@ -290,9 +290,9 @@ class TestDateStrategy(TestDeduplicate): maildir_path = './strategy_date' newest_date = arrow.utcnow() - newer_date = newest_date.replace(minutes=-1) - older_date = newest_date.replace(minutes=-2) - oldest_date = newest_date.replace(minutes=-3) + newer_date = newest_date.shift(minutes=-1) + older_date = newest_date.shift(minutes=-2) + oldest_date = newest_date.shift(minutes=-3) newest_mail = MailFactory(date=newest_date) newer_mail = MailFactory(date=newer_date)
Fix call to deprecated Arrow time shifting methods.
py
diff --git a/puput/models.py b/puput/models.py index <HASH>..<HASH> 100644 --- a/puput/models.py +++ b/puput/models.py @@ -140,6 +140,19 @@ class EntryPage(Entry, Page): parent_page_types = ['puput.BlogPage'] subpage_types = [] + def get_sitemap_urls(self, request=None): + from .urls import get_entry_url + root_url = self.get_url_parts()[1] + entry_url = get_entry_url(self, self.blog_page.page_ptr, root_url) + return [ + { + 'location': root_url + entry_url, + # fall back on latest_revision_created_at if last_published_at is null + # (for backwards compatibility from before last_published_at was added) + 'lastmod': (self.last_published_at or self.latest_revision_created_at), + } + ] + @property def blog_page(self): return self.get_parent().specific
Duplicate pages #<I>: Define a get_sitemap_urls method to include the year/month/day prefix (#<I>)
py
diff --git a/abydos/distance/_meta_levenshtein.py b/abydos/distance/_meta_levenshtein.py index <HASH>..<HASH> 100644 --- a/abydos/distance/_meta_levenshtein.py +++ b/abydos/distance/_meta_levenshtein.py @@ -240,7 +240,16 @@ class MetaLevenshtein(_Distance): if src == tar: return 0.0 - return self.dist_abs(src, tar) / self._normalizer([len(src), len(tar)]) + return self.dist_abs(src, tar) / ( + self._normalizer( + [ + self.dist_abs(src, ' ' * len(tar)), + self.dist_abs(src, ' ' * len(src)), + ] + ) + if self._corpus + else self._normalizer([len(src), len(tar)]) + ) if __name__ == '__main__':
adjusted normalization if a corpus is available
py
diff --git a/pysat/_params.py b/pysat/_params.py index <HASH>..<HASH> 100644 --- a/pysat/_params.py +++ b/pysat/_params.py @@ -164,7 +164,7 @@ class Parameters(object): """ dir_path = os.path.split(self.file_path)[0] - out_str = ''.join(('pysat._params.Parameters(path="', dir_path, '")')) + out_str = ''.join(('pysat._params.Parameters(path=r"', dir_path, '")')) return out_str def __str__(self, long_str=True):
BUG: Ensure path string is a raw string
py
diff --git a/test/file_test.py b/test/file_test.py index <HASH>..<HASH> 100644 --- a/test/file_test.py +++ b/test/file_test.py @@ -177,8 +177,8 @@ class LocalTargetTest(unittest.TestCase, FileSystemTargetTestMixin): bt=['', 'b', 't'], plus=['', '+']): p = itertools.product(rwax, plus, bt) - return set( [''.join(c) for c in - list(itertools.chain.from_iterable( + return set([''.join(c) for c in + list(itertools.chain.from_iterable( [itertools.permutations(m) for m in p]))]) def valid_io_modes(self, *a, **kw): @@ -207,7 +207,7 @@ class LocalTargetTest(unittest.TestCase, FileSystemTargetTestMixin): def invalid_io_modes_for_luigi(self): return self.valid_io_modes().difference( - self.valid_write_io_modes_for_luigi(), + self.valid_write_io_modes_for_luigi(), self.valid_read_io_modes_for_luigi()) def test_open_modes(self):
next time, I'll read pep8
py
diff --git a/src/satellitelink.py b/src/satellitelink.py index <HASH>..<HASH> 100644 --- a/src/satellitelink.py +++ b/src/satellitelink.py @@ -49,6 +49,8 @@ class SatelliteLink(Item): def create_connexion(self): self.uri = "PYROLOC://"+self.address+":"+str(self.port)+"/ForArbiter" self.con = Pyro.core.getProxyForURI(self.uri) + #Ok, set timeout to 5 sec + self.con._setTimeout(5) def put_conf(self, conf):
Add a timeout for satellite and scheduler connexions. Sets to 5 seconds.
py
diff --git a/graphics/canvas.py b/graphics/canvas.py index <HASH>..<HASH> 100644 --- a/graphics/canvas.py +++ b/graphics/canvas.py @@ -9,9 +9,11 @@ from . import shapes class Canvas: """ size = (int width, int height) + fullscreen = bool background = char center = bool border = bool + wrap = bool """ def __init__(self, size = (40, 30), @@ -39,10 +41,10 @@ class Canvas: def __repr__(self): return ('Canvas(size={!r}, fullscreen={!r}, background={!r}, ' - 'center={!r}, border={!r})' + 'center={!r}, border={!r}, wrap={!r})' ).format(self.size, self.fullscreen, self.background, self.center, - self.border) + self.border, self.wrap) def __str__(self): """
Updated docstring and repr
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -51,7 +51,6 @@ setup(name='icetea', "icetea=icetea_lib:icetea_main" ] }, - dependency_links=["git+https://github.com/ARMmbed/mbed-flasher@v0.6.3#egg=mbed-flasher"], install_requires=[ "prettytable", "requests", @@ -62,7 +61,7 @@ setup(name='icetea', "psutil", "mbed-ls==1.3.6", "semver", - "mbed-flasher", + "mbed-flasher==0.6.3", "six" ] )
Removed dependency link to mbed-flasher since that is no longer needed. (#<I>)
py
diff --git a/projects/ninux/ninux/wsgi.py b/projects/ninux/ninux/wsgi.py index <HASH>..<HASH> 100755 --- a/projects/ninux/ninux/wsgi.py +++ b/projects/ninux/ninux/wsgi.py @@ -28,7 +28,7 @@ application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) -#from ninux.settings import DEBUG -#if DEBUG: -# from ninux import monitor -# monitor.start(interval=1.0) +from ninux.settings import DEBUG +if DEBUG: + from ninux import monitor + monitor.start(interval=1.0)
Enabled code change monitor for development environment (DEBUG = True)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -178,6 +178,22 @@ if install: path_out = prep(hydpy.docs.rst.__path__[0], filename) source2target(path_in, path_out) + # Make all additional data files available. + print_('\nCopy data files:') + import hydpy.data + dir_input = os.path.join('hydpy', 'data') + dir_output = hydpy.data.__path__[0] + for subdir_input, dirs, files in os.walk(dir_input): + subdir_output = subdir_input.replace(dir_input, dir_output, 1) + if not os.path.exists(subdir_output): + os.makedirs(subdir_output) + for file_ in files: + filepath_input = os.path.join(subdir_input, file_) + filepath_output = os.path.join(subdir_output, file_) + if os.path.exists(filepath_output): + os.remove(filepath_output) + shutil.copy(filepath_input, subdir_output) + # Make all additional figures available. print_('\nCopy figures:') import hydpy.docs.figs
Let `setup.py` copy all contents of subpackage `data` into the site-package folder.
py
diff --git a/tests/test_commands.py b/tests/test_commands.py index <HASH>..<HASH> 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -1,5 +1,5 @@ import unittest -import os, shutil, tempfile +import os, shutil, tempfile, types import pdb from fragman.__main__ import ExecutionError, init, stat, add @@ -65,6 +65,13 @@ class TestInitCommand(CommandBase): class PostInitCommandMixIn(object): + def setUp(self): + super(CommandBase, self).setUp() + + def test_command_attribute_set_properly(self): + self.assertTrue(isinstance(self.command, types.FunctionType), + "%s.command attribute must be a staticmethod." % type(self).__name__) + def test_command_raises_error_before_init(self): self.assertRaises(ConfigurationDirectoryNotFound, self.command)
make sure command attribute is set properly to avoid mysterious test failures (self will be passed as first argument to command)
py
diff --git a/nodeconductor/structure/handlers.py b/nodeconductor/structure/handlers.py index <HASH>..<HASH> 100644 --- a/nodeconductor/structure/handlers.py +++ b/nodeconductor/structure/handlers.py @@ -59,17 +59,17 @@ def log_project_save(sender, instance, created=False, **kwargs): if created: event_logger.info( 'Project %s has been created.', instance.name, - extra={'project': instance, 'event_type': 'project_created'} + extra={'project': instance, 'event_type': 'project_creation_succeeded'} ) else: event_logger.info( 'Project %s has been updated.', instance.name, - extra={'project': instance, 'event_type': 'project_updated'} + extra={'project': instance, 'event_type': 'project_update_succeeded'} ) def log_project_delete(sender, instance, **kwargs): event_logger.info( 'Project %s has been deleted.', instance.name, - extra={'project': instance, 'event_type': 'project_deleted'} + extra={'project': instance, 'event_type': 'project_deletion_succeeded'} )
Change event types names NC-<I>
py
diff --git a/test/test_mail_v2.py b/test/test_mail_v2.py index <HASH>..<HASH> 100644 --- a/test/test_mail_v2.py +++ b/test/test_mail_v2.py @@ -98,7 +98,10 @@ class TestSendGrid(unittest.TestCase): } ''')) - self.assertEqual(url, test_url) + try: + self.assertItemsEqual(url, test_url) + except: # Python 3+ + self.assertCountEqual(url, test_url) @unittest.skipUnless(sys.version_info < (3, 0), 'only for python2') def test__build_body_unicode(self):
We don't care about the order of the result, only that they match.
py
diff --git a/woven/project.py b/woven/project.py index <HASH>..<HASH> 100644 --- a/woven/project.py +++ b/woven/project.py @@ -74,7 +74,7 @@ def deploy_project(): if env.verbosity: print env.host,"DEPLOYING project", env.project_fullname #Exclude a few things that we don't want deployed as part of the project folder - rsync_exclude = ['*.pyc','*.log','.*','/build','/dist','/media*','/static*','/www','/public','/templates'] + rsync_exclude = ['local_settings*','*.pyc','*.log','.*','/build','/dist','/media*','/static*','/www','/public','/templates'] #make site local settings if they don't already exist _make_local_sitesettings()
deploy_project now excludes local_settings
py
diff --git a/astrobase/checkplot.py b/astrobase/checkplot.py index <HASH>..<HASH> 100644 --- a/astrobase/checkplot.py +++ b/astrobase/checkplot.py @@ -1433,16 +1433,16 @@ def _pkl_finder_objectinfo(objectinfo, if ((300.0 - annotatex) > 50.0): offx = annotatex + 25.0 - xha = 'left' + xha = 'center' else: offx = annotatex - 25.0 - xha = 'right' + xha = 'center' if ((300.0 - annotatey) > 50.0): offy = annotatey - 25.0 - yha = 'bottom' + yha = 'center' else: offy = annotatey + 25.0 - yha = 'top' + yha = 'center' plt.annotate('N%s' % nbrind, (annotatex, annotatey),
lcproc: add neighbor stuff to parallel_cp workers and driver
py
diff --git a/pifpaf/drivers/__init__.py b/pifpaf/drivers/__init__.py index <HASH>..<HASH> 100644 --- a/pifpaf/drivers/__init__.py +++ b/pifpaf/drivers/__init__.py @@ -154,9 +154,9 @@ class Driver(fixtures.Fixture): stdout_fd = subprocess.DEVNULL if stdin: - stdout_fd = subprocess.PIPE + stdin_fd = subprocess.PIPE else: - stdout_fd = subprocess.DEVNULL + stdin_fd = subprocess.DEVNULL if path or env: complete_env = dict(os.environ)
fix: typo in stdout handling
py
diff --git a/django_q/models.py b/django_q/models.py index <HASH>..<HASH> 100644 --- a/django_q/models.py +++ b/django_q/models.py @@ -141,6 +141,7 @@ class Schedule(models.Model): return self.func success.boolean = True + last_run.allow_tags = True class Meta: app_label = 'django_q'
fixed regression of task link in schedule admin
py
diff --git a/andes/plot.py b/andes/plot.py index <HASH>..<HASH> 100644 --- a/andes/plot.py +++ b/andes/plot.py @@ -717,6 +717,7 @@ def set_latex(enable=True): if has_dvipng and enable: mpl.rc('text', usetex=True) + logger.info('Using LaTeX for rendering. If it takes too long, use option `-d` to disable it.') return True else: mpl.rc('text', usetex=False)
Added info for LaTeX rendering
py
diff --git a/src/MQTTLibrary/version.py b/src/MQTTLibrary/version.py index <HASH>..<HASH> 100644 --- a/src/MQTTLibrary/version.py +++ b/src/MQTTLibrary/version.py @@ -1 +1 @@ -VERSION = '0.1.0.dev1' +VERSION = '0.2.0.dev1'
Update version to publish to pypi
py
diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # ## This file is part of Invenio. -## Copyright (C) 2011, 2012, 2013, 2014 CERN. +## Copyright (C) 2011, 2012, 2013, 2014, 2015 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as @@ -331,6 +331,8 @@ class UserEXT(db.Model): id_user = db.Column(db.Integer(15, unsigned=True), db.ForeignKey(User.id), nullable=False) + user = db.relationship(User, backref="external_identifiers") + __table_args__ = (db.Index('id_user', id_user, method, unique=True), db.Model.__table_args__)
oauthclient: local account discovery improvement * Uses userEXT table to do a lookup of the external identifier in order to find the local account corresponding to the user that logs in using oauth. (closes #<I>)
py
diff --git a/lib/svtplay_dl/utils/output.py b/lib/svtplay_dl/utils/output.py index <HASH>..<HASH> 100644 --- a/lib/svtplay_dl/utils/output.py +++ b/lib/svtplay_dl/utils/output.py @@ -135,10 +135,10 @@ def formatname(output, config, extension): if key == "title" and output[key]: name = name.replace("{title}", filenamify(output[key])) if key == "season" and output[key]: - number = "{1:02d}".format(output[key]) + number = "{0:02d}".format(int(output[key])) name = name.replace("{season}", number) if key == "episode" and output[key]: - number = "{1:02d}".format(output[key]) + number = "{0:02d}".format(int(output[key])) name = name.replace("{episode}", number) if key == "episodename" and output[key]: name = name.replace("{episodename}", filenamify(output[key]))
output.formatname: this should be ints
py
diff --git a/openquake/hazardlib/nrml.py b/openquake/hazardlib/nrml.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/nrml.py +++ b/openquake/hazardlib/nrml.py @@ -74,13 +74,12 @@ supplemented by a dictionary of validators. import io import re import sys -import logging import operator import collections.abc import numpy -from openquake.baselib import hdf5, performance +from openquake.baselib import hdf5 from openquake.baselib.general import CallableDict, groupby from openquake.baselib.node import ( node_to_xml, Node, striptag, ValidatingXmlParser, floatformat)
Removed unused imports [skip CI]
py
diff --git a/salt/modules/cmd.py b/salt/modules/cmd.py index <HASH>..<HASH> 100644 --- a/salt/modules/cmd.py +++ b/salt/modules/cmd.py @@ -127,7 +127,7 @@ def has_exec(cmd): return True return False -def exec_code(lang, code): +def exec_code(lang, code, cwd=DEFAULT_CWD): ''' Pass in two strings, the first naming the executable language, aka - python2, python3, ruby, perl, lua, etc. the second string containing @@ -136,7 +136,7 @@ def exec_code(lang, code): CLI Example: salt '*' cmd.exec_code ruby 'puts "cheese"' ''' - cfn = tempfile.mkstemp() + fd, cfn = tempfile.mkstemp() open(cfn, 'w+').write(code) return subprocess.Popen(lang + ' ' + cfn, shell=True,
Fixed two syntax issues with cmd.exec_code
py
diff --git a/pycbc/workflow/jobsetup.py b/pycbc/workflow/jobsetup.py index <HASH>..<HASH> 100644 --- a/pycbc/workflow/jobsetup.py +++ b/pycbc/workflow/jobsetup.py @@ -635,6 +635,7 @@ class PyCBCInspiralExecutable(Executable): constant_psd_segs = int(self.get_opt('psd-recalculate-segments')) if constant_psd_segs is None: constant_psd_segs = min_analysis_segs + max_analysis_segs = min_analysis_segs if min_analysis_segs % constant_psd_segs != 0: raise ValueError('Constant PSD segments does not evenly divide the '
use min analysis segs when constant segs not given in inspiral job
py
diff --git a/tests/unit/test_ticketed_features.py b/tests/unit/test_ticketed_features.py index <HASH>..<HASH> 100644 --- a/tests/unit/test_ticketed_features.py +++ b/tests/unit/test_ticketed_features.py @@ -869,7 +869,10 @@ def test_api_to_allow_custom_diff_and_output_stream_1583(capsys, tmpdir): assert not error isort_diff.seek(0) - assert "+import a\n import b\n-import a\n" in isort_diff.read() - + isort_diff_content = isort_diff.read() + assert "+import a" in isort_diff_content + assert " import b" in isort_diff_content + assert "-import a" in isort_diff_content + isort_output.seek(0) assert isort_output.read() == "import a\nimport b\n"
Update test_ticketed_features.py Update test to be OS newline agnostic for diff
py
diff --git a/nanoplot/NanoPlot.py b/nanoplot/NanoPlot.py index <HASH>..<HASH> 100755 --- a/nanoplot/NanoPlot.py +++ b/nanoplot/NanoPlot.py @@ -76,6 +76,7 @@ def main(): plots.extend( make_plots(dfbarc, settings) ) + settings["path"] = path.join(args.outdir, args.prefix) else: plots = make_plots(datadf, settings) make_report(plots, settings["path"], logfile)
reset path after processing all barcodes
py
diff --git a/wright/stage/c.py b/wright/stage/c.py index <HASH>..<HASH> 100644 --- a/wright/stage/c.py +++ b/wright/stage/c.py @@ -59,10 +59,12 @@ int main() { } ''' - def __call__(self, name, args=()): + def __call__(self, name, headers=()): source = self.source % (name,) + for header in headers: + source = '#include <{}>\n'.format(header) with TempFile('define', '.c', content=source) as temp: - return super(CheckDefine, self).__call__(temp.filename, args, run=True) + return super(CheckDefine, self).__call__(temp.filename, run=True) class CheckFeature(CheckCompile):
c: pass headers in CheckDefine
py
diff --git a/datasette/utils.py b/datasette/utils.py index <HASH>..<HASH> 100644 --- a/datasette/utils.py +++ b/datasette/utils.py @@ -221,8 +221,11 @@ def detect_fts_sql(table): return r''' select name from sqlite_master where rootpage = 0 - and sql like '%VIRTUAL TABLE%USING FTS%content="{}"%'; - '''.format(table) + and ( + sql like '%VIRTUAL TABLE%USING FTS%content="{table}"%' + or tbl_name = "{table}" + ) + '''.format(table=table) class Filter:
?_search=x now works directly against fts virtual table Closes #<I>
py
diff --git a/teneto/neuroimagingtools/fmriutils.py b/teneto/neuroimagingtools/fmriutils.py index <HASH>..<HASH> 100644 --- a/teneto/neuroimagingtools/fmriutils.py +++ b/teneto/neuroimagingtools/fmriutils.py @@ -67,7 +67,7 @@ def make_parcellation(data_path, atlas, template='MNI152NLin2009cAsym', atlas_de data = region.fit_transform(data_path).transpose() data = pd.DataFrame(data=data) meta_info = tf.get(template=template, atlas=atlas, - desc=atlas_desc, extension='tsv') + desc=atlas_desc, extension='.tsv') if len(str(meta_info)) > 0: meta_info = load_tabular_file(str(meta_info)) data.index = meta_info['name'].values
update extension in make_parcellation
py
diff --git a/lp.py b/lp.py index <HASH>..<HASH> 100644 --- a/lp.py +++ b/lp.py @@ -89,7 +89,7 @@ class TokenStreamer(object): @staticmethod def tokenize(line): - token_iter = (m.group(0) for m in re.finditer(r'[-+*/(){}=%]|[A-Za-z]+|\d+', line)) + token_iter = (m.group(0) for m in re.finditer(r'[-+*/(){}=%]|[A-Za-z][A-Za-z0-9]+|\d+', line)) return list(token_iter) def has_nxt_line(self):
Fixed regex to support alphanumeric variable names.
py
diff --git a/charmhelpers/contrib/hahelpers/cluster.py b/charmhelpers/contrib/hahelpers/cluster.py index <HASH>..<HASH> 100644 --- a/charmhelpers/contrib/hahelpers/cluster.py +++ b/charmhelpers/contrib/hahelpers/cluster.py @@ -12,11 +12,13 @@ import os from socket import gethostname as get_unit_hostname from charmhelpers.core.hookenv import ( - log as juju_log, + log, relation_ids, related_units as relation_list, relation_get, config as config_get, + INFO, + ERROR, ) @@ -71,12 +73,12 @@ def oldest_peer(peers): def eligible_leader(resource): if is_clustered(): if not is_leader(resource): - juju_log('INFO', 'Deferring action to CRM leader.') + log('Deferring action to CRM leader.', level=INFO) return False else: peers = peer_units() if peers and not oldest_peer(peers): - juju_log('INFO', 'Deferring action to oldest service unit.') + log('Deferring action to oldest service unit.', level=INFO) return False return True @@ -153,7 +155,7 @@ def get_hacluster_config(): missing = [] [missing.append(s) for s, v in conf.iteritems() if v is None] if missing: - juju_log('Insufficient config data to configure hacluster.') + log('Insufficient config data to configure hacluster.', level=ERROR) raise HAIncompleteConfig return conf
Fix log vs juju_log usage.
py
diff --git a/pyemu/utils/helpers.py b/pyemu/utils/helpers.py index <HASH>..<HASH> 100644 --- a/pyemu/utils/helpers.py +++ b/pyemu/utils/helpers.py @@ -4098,7 +4098,7 @@ class PstFrom(object): self.logger.statement("all done") - + def setup_mult_dirs(self):
a little more on the refactor
py
diff --git a/spacy/language.py b/spacy/language.py index <HASH>..<HASH> 100644 --- a/spacy/language.py +++ b/spacy/language.py @@ -1,4 +1,5 @@ -from typing import Optional, Any, Dict, Callable, Iterable, Union, List, Pattern +from typing import Iterator, Optional, Any, Dict, Callable, Iterable, TypeVar +from typing import Union, List, Pattern, overload from typing import Tuple from dataclasses import dataclass import random @@ -1431,6 +1432,21 @@ class Language: except StopIteration: pass + _AnyContext = TypeVar("_AnyContext") + + @overload + def pipe( + self, + texts: Iterable[Tuple[str, _AnyContext]], + *, + as_tuples: bool = ..., + batch_size: Optional[int] = ..., + disable: Iterable[str] = ..., + component_cfg: Optional[Dict[str, Dict[str, Any]]] = ..., + n_process: int = ..., + ) -> Iterator[Tuple[Doc, _AnyContext]]: + ... + def pipe( self, texts: Iterable[str], @@ -1440,7 +1456,7 @@ class Language: disable: Iterable[str] = SimpleFrozenList(), component_cfg: Optional[Dict[str, Dict[str, Any]]] = None, n_process: int = 1, - ): + ) -> Iterator[Doc]: """Process texts as a stream, and yield `Doc` objects in order. texts (Iterable[str]): A sequence of texts to process.
Add the right return type for Language.pipe and an overload for the as_tuples case (#<I>) * Add the right return type for Language.pipe and an overload for the as_tuples version * Reformat, tidy up
py
diff --git a/billy/bin/update.py b/billy/bin/update.py index <HASH>..<HASH> 100755 --- a/billy/bin/update.py +++ b/billy/bin/update.py @@ -306,7 +306,7 @@ def main(): args.types.append('events') if 'speeches' in metadata['feature_flags']: - args.types.append('events') + args.types.append('speeches') plan = """billy-update abbr=%s actions=%s
typo - events --> speeches
py
diff --git a/salt/cli/salt.py b/salt/cli/salt.py index <HASH>..<HASH> 100644 --- a/salt/cli/salt.py +++ b/salt/cli/salt.py @@ -253,6 +253,8 @@ class SaltCMD(parsers.SaltCMDOptionParser): not_connected_minions = [] for each_minion in ret: minion_ret = ret[each_minion] + if isinstance(minion_ret, dict) and 'ret' in minion_ret: + minion_ret = ret[each_minion].get('ret') if ( isinstance(minion_ret, string_types) and minion_ret.startswith("Minion did not return")
fix salt --summary to count not responding minions correctly (#<I>) In case a minion is not responding a dict is returned instead of a string.
py
diff --git a/dipper/config.py b/dipper/config.py index <HASH>..<HASH> 100644 --- a/dipper/config.py +++ b/dipper/config.py @@ -12,16 +12,17 @@ conf = {} ''' Load the configuration file 'conf.json', if it exists. it isn't always required, but may be for some sources. + conf.json may contain sensitive info and should not live in a public repo ''' if os.path.exists(os.path.join(os.path.dirname(__file__), 'conf.json')): with open( - os.path.join(os.path.dirname(__file__), - 'conf.json')) as json_file: + os.path.join( + os.path.dirname(__file__), 'conf.json')) as json_file: conf = json.load(json_file) - logger.debug("Finished loading config") + logger.debug("Finished loading dipper/config.json") else: - logger.warning("'conf.json' not found in '%s'", os.path.dirname(__file__)) + logger.warning("'dipper/conf.json' not found in '%s'", os.path.dirname(__file__)) logger.warning("Sources that depend on 'conf.json' will fail")
make what 'config' is specific in comments
py
diff --git a/treebeard/templatetags/admin_tree.py b/treebeard/templatetags/admin_tree.py index <HASH>..<HASH> 100644 --- a/treebeard/templatetags/admin_tree.py +++ b/treebeard/templatetags/admin_tree.py @@ -12,7 +12,10 @@ from django.db import models from django.conf import settings from django.contrib.admin.templatetags.admin_list import ( result_headers, result_hidden_fields) -from django.contrib.admin.util import lookup_field, display_for_field +try: + from django.contrib.admin.utils import lookup_field, display_for_field +except ImportError: # < Django 1.8 + from django.contrib.admin.util import lookup_field, display_for_field from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE from django.core.exceptions import ObjectDoesNotExist from django.template import Library
Fixed ImportError in current Django `master` because `django.contrib.admin.util` became `django.contrib.admin.utils`
py
diff --git a/spyder/plugins/editor/widgets/base.py b/spyder/plugins/editor/widgets/base.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/editor/widgets/base.py +++ b/spyder/plugins/editor/widgets/base.py @@ -43,7 +43,7 @@ if is_dark_interface(): MAIN_BG_COLOR = '#19232D' MAIN_DEFAULT_FG_COLOR = '#ffffff' MAIN_ERROR_FG_COLOR = '#FF0000' - MAIN_TB_FG_COLOR = '#0000FF' + MAIN_TB_FG_COLOR = '#2980b9' MAIN_PROMPT_FG_COLOR = '#00AA00' else: MAIN_BG_COLOR = 'white'
Internal console: Change color of traceback links in dark mode
py
diff --git a/dev/prepare-distribution.py b/dev/prepare-distribution.py index <HASH>..<HASH> 100755 --- a/dev/prepare-distribution.py +++ b/dev/prepare-distribution.py @@ -58,7 +58,7 @@ def main(): # Create virtualenv. subprocess.check_call( - ["virtualenv", "--python", base_python_path, venv_path], cwd=proj_path + [base_python_path, "-m", "venv", venv_path], cwd=proj_path ) pip_path = os.path.join(venv_path, 'bin', 'pip')
Adjusted prepare-distribution.py.
py
diff --git a/rows/utils.py b/rows/utils.py index <HASH>..<HASH> 100644 --- a/rows/utils.py +++ b/rows/utils.py @@ -143,6 +143,12 @@ class ProgressBar: ) self.started = False + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + @property def description(self): return self.progress.desc @@ -161,7 +167,7 @@ class ProgressBar: self.progress.total = value self.progress.refresh() - def update(self, last_done=None, total_done=None): + def update(self, last_done=1, total_done=None): if not last_done and not total_done: raise ValueError('Either last_done or total_done must be specified') @@ -563,6 +569,12 @@ class CsvLazyDictWriter: self.encoding = encoding self._fobj = None + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + @property def fobj(self): if self._fobj is None:
Make CsvLazyDictWriter and ProgressBar context managers
py
diff --git a/holoviews/plotting/mpl/element.py b/holoviews/plotting/mpl/element.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/mpl/element.py +++ b/holoviews/plotting/mpl/element.py @@ -174,9 +174,6 @@ class ElementPlot(GenericElementPlot, MPLPlot): if dimensions: self._set_labels(axis, dimensions, xlabel, ylabel, zlabel) - # Set axes limits - self._set_axis_limits(axis, element, subplots, ranges) - if not subplots: legend = axis.get_legend() if legend: @@ -199,6 +196,9 @@ class ElementPlot(GenericElementPlot, MPLPlot): if self.apply_ticks: self._finalize_ticks(axis, dimensions, xticks, yticks, zticks) + # Set axes limits + self._set_axis_limits(axis, element, subplots, ranges) + # Apply aspects self._set_aspect(axis, self.aspect)
Set matplotlib axis ranges after ticks
py
diff --git a/monolithe/generators/sdk/cli.py b/monolithe/generators/sdk/cli.py index <HASH>..<HASH> 100755 --- a/monolithe/generators/sdk/cli.py +++ b/monolithe/generators/sdk/cli.py @@ -76,8 +76,7 @@ def main(argv=sys.argv): metavar="branches", help="The branches of the specifications to use to generate the documentation (examples: \"master 3.2\")", nargs="*", - type=str, - required=True) + type=str) parser.add_argument("-p", "--path", dest="repository_path", @@ -138,7 +137,6 @@ def main(argv=sys.argv): generator = SDKGenerator(monolithe_config=monolithe_config) - if args.folder: generator.initialize_folder_manager(folder=args.folder) if not monolithe_config: @@ -146,6 +144,10 @@ def main(argv=sys.argv): generator.generate_from_folder() else: + if not args.branches: + print "You must provide the --branches options. Use --help for help." + sys.exit(1) + # Use environment variable if necessary if not args.api_url and "MONOLITHE_GITHUB_API_URL" in os.environ: args.api_url = os.environ["MONOLITHE_GITHUB_API_URL"]
-b is not required for folder generation
py
diff --git a/programs/pmag_gui.py b/programs/pmag_gui.py index <HASH>..<HASH> 100755 --- a/programs/pmag_gui.py +++ b/programs/pmag_gui.py @@ -233,7 +233,7 @@ class MagMainFrame(wx.Frame): bSizer2.AddSpacer(20) #---sizer 3 ---- - bSizer3 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Upload to MagIC database"), wx.HORIZONTAL) + bSizer3 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database"), wx.HORIZONTAL) text = "Create MagIC txt file for upload" self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50))
clarify wording about uploads (creates a MagIC file, does not yet directly connect to database)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ setup( , author = "Stephen Moore" , author_email = "delfick755@gmail.com" , description = "Python library to discover commit times of all files under a git repository" + , long_description = open("README.rst").read() , license = "MIT" , keywords = "git,commit,mtime" )
Show readme in pypi
py
diff --git a/tests/risk_job_unittest.py b/tests/risk_job_unittest.py index <HASH>..<HASH> 100644 --- a/tests/risk_job_unittest.py +++ b/tests/risk_job_unittest.py @@ -17,6 +17,7 @@ # version 3 along with OpenQuake. If not, see # <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License. +import numpy import os import unittest @@ -298,7 +299,8 @@ class RiskMixinTestCase(unittest.TestCase): loss_key = kvs.tokens.loss_key(job_id, row, col, asset["assetID"], loss_poe) - self.assertAlmostEqual(expected_result, float(kvs.get(loss_key)), 4) + self.assertTrue(numpy.allclose(expected_result, + float(kvs.get(loss_key)), atol=4)) def test_asset_losses_per_site(self): with patch('openquake.kvs.get') as get_mock:
using allclose to test AlmostEquality
py
diff --git a/tests/test_get.py b/tests/test_get.py index <HASH>..<HASH> 100644 --- a/tests/test_get.py +++ b/tests/test_get.py @@ -99,14 +99,8 @@ class GetTest(unittest.TestCase): self.assertEqual(o['get'], qs) def test_timeout(self): - try: - r = None + with self.assertRaises(socket.timeout) as tm: r = urlfetch.get('http://127.0.0.1:8800/sleep/1', timeout=0.5) - except Exception as e: - self.assertIsInstance(e, socket.timeout) - - self.assertIs(r, None) - if __name__ == '__main__':
test timeout: assertRaises
py
diff --git a/docker/api/service.py b/docker/api/service.py index <HASH>..<HASH> 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -197,7 +197,8 @@ class ServiceApiMixin(object): into the service inspect output. Returns: - ``True`` if successful. + (dict): A dictionary of the server-side representation of the + service, including all relevant properties. Raises: :py:class:`docker.errors.APIError`
Fix incorrect return info for inspect_service
py