diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/pypot/creatures/abstractcreature.py b/pypot/creatures/abstractcreature.py
index <HASH>..<HASH> 100644
--- a/pypot/creatures/abstractcreature.py
+++ b/pypot/creatures/abstractcreature.py
@@ -87,7 +87,7 @@ class AbstractPoppyCreature(Robot):
(not os.path.exists(os.path.join(scene_path, scene)))):
raise ValueError('Could not find the scene "{}"!'.format(scene))
- scene = os.path.join(scene_path, scene)
+ scene = os.path.join(scene_path, scene)
# TODO: use the id so we can have multiple poppy creatures
# inside a single vrep scene
|
Fix scene path import error due to wrong indentation
|
py
|
diff --git a/tests/test_backends.py b/tests/test_backends.py
index <HASH>..<HASH> 100644
--- a/tests/test_backends.py
+++ b/tests/test_backends.py
@@ -92,7 +92,7 @@ class TestBackends(unittest.TestCase):
items_pack = []
- for item in items:
+ for item in items.fetch():
item = self.__ocean_item(item)
if len(items_pack) >= enrich_backend.elastic.max_items_bulk:
logging.info("Adding %i (%i done) enriched items to %s",
|
[tests] Fix test to extract items from ocean using the generator instead the iterator
|
py
|
diff --git a/saunter/ConfigWrapper.py b/saunter/ConfigWrapper.py
index <HASH>..<HASH> 100644
--- a/saunter/ConfigWrapper.py
+++ b/saunter/ConfigWrapper.py
@@ -19,6 +19,7 @@ ConfigWrapper
import ConfigParser
import os
import os.path
+import sys
class ConfigWrapper(object):
"""
@@ -31,9 +32,13 @@ class ConfigWrapper(object):
cls._instance = super(ConfigWrapper, cls).__new__(cls, *args, **kwargs)
return cls._instance
- def configure(self, config = "saunter.ini"):
- self.config = ConfigParser.SafeConfigParser()
- self.config.readfp(open(os.path.join("conf", config)))
+ def configure(self, config = "saunter.yaml"):
+ try:
+ self.config = ConfigParser.SafeConfigParser()
+ self.config.readfp(open(os.path.join("conf", config)))
+ except IOError:
+ print("Could not find %s; are you sure you remembered to create one?" % os.path.join("conf", config))
+ sys.exit(1)
# initialize the singleton
try:
|
actually behave nice when the config file is missing
|
py
|
diff --git a/shell/log.py b/shell/log.py
index <HASH>..<HASH> 100644
--- a/shell/log.py
+++ b/shell/log.py
@@ -1,10 +1,21 @@
# -*- coding: utf-8 -*-
+
+from shell.utils import _print
import logging
-logger = logging.getLogger('Evolux Console')
-hdlr = logging.FileHandler('newshell.log')
+
+class ConsoleLogHandler(logging.StreamHandler):
+ def emit(self, record):
+ _print(self.format(record))
+
+ def flush(self):
+ pass
+
+
+logger = logging.getLogger('console app')
+hdlr = ConsoleLogHandler()
formatter = logging.Formatter('%(asctime)s %(levelname)s(%(lineno)s) %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
-logger.setLevel(logging.DEBUG)
+logger.setLevel(logging.INFO)
|
Making logger write messages to the console instead of file, reducing level to INFO
|
py
|
diff --git a/skitai/server/http_server.py b/skitai/server/http_server.py
index <HASH>..<HASH> 100644
--- a/skitai/server/http_server.py
+++ b/skitai/server/http_server.py
@@ -345,12 +345,13 @@ class http_server (asyncore.dispatcher):
ACTIVE_WORKERS += 1
signal.signal(signal.SIGHUP, hHUPMASTER)
signal.signal(signal.SIGTERM, hTERMMASTER)
+ signal.signal(signal.SIGINT, hTERMMASTER)
signal.signal(signal.SIGQUIT, hQUITMASTER)
signal.signal (signal.SIGCHLD, hCHLD)
time.sleep (1)
except KeyboardInterrupt:
- EXITCODE = 0
+ pass
if self.worker_ident == "master":
return EXITCODE
|
handle KeyboardInterrupt on posix
|
py
|
diff --git a/pyani/pyani_graphics.py b/pyani/pyani_graphics.py
index <HASH>..<HASH> 100644
--- a/pyani/pyani_graphics.py
+++ b/pyani/pyani_graphics.py
@@ -233,23 +233,23 @@ def add_mpl_colorbar(dfr, fig, dend, params, orientation='row'):
cblist.append(classdict[params.classes[name]])
except KeyError:
cblist.append(classdict[name])
- cbar = pd.Series(cblist)
+ colbar = pd.Series(cblist)
# Create colourbar axis - could capture if needed
if orientation == 'row':
cbaxes = fig.add_subplot(dend['gridspec'][0, 1])
- cbaxes.imshow([[bar] for bar in cbar.values],
+ cbaxes.imshow([[cbar] for cbar in colbar.values],
cmap=plt.get_cmap(pyani_config.MPL_CBAR),
interpolation='nearest', aspect='auto',
origin='lower')
else:
cbaxes = fig.add_subplot(dend['gridspec'][1, 0])
- cbaxes.imshow([cbar],
+ cbaxes.imshow([colbar],
cmap=plt.get_cmap(pyani_config.MPL_CBAR),
interpolation='nearest', aspect='auto',
origin='lower')
clean_axis(cbaxes)
- return cbar
+ return colbar
# Add labels to the heatmap axes
|
make minor code clean for pylint
|
py
|
diff --git a/daapserver/revision.py b/daapserver/revision.py
index <HASH>..<HASH> 100644
--- a/daapserver/revision.py
+++ b/daapserver/revision.py
@@ -42,6 +42,7 @@ class TreeRevisionStorage(object):
for set operations, since two sequential edits don't increment.
"""
self.last_operation = constants.NOOP
+ self.revision += 1
def clean(self, up_to_revision=None):
"""
|
Commit now raises revision. Fix for iTunes <I>
|
py
|
diff --git a/runtests.py b/runtests.py
index <HASH>..<HASH> 100755
--- a/runtests.py
+++ b/runtests.py
@@ -19,6 +19,13 @@ if not settings.configured:
'NAME': ':memory:'
}
},
+ CACHES = {
+ # By explicit since many tests also need the caching support
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
+ 'LOCATION': 'unique-snowflake',
+ }
+ },
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
|
runtests: add CACHES setting
|
py
|
diff --git a/master/buildbot/worker/latent.py b/master/buildbot/worker/latent.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/worker/latent.py
+++ b/master/buildbot/worker/latent.py
@@ -312,7 +312,7 @@ class AbstractLatentWorker(AbstractWorker):
return
if self.state == self.STATE_INSUBSTANTIATING:
- # TODO: wait until stop_instance completes just like when substantiating
+ yield self._insubstantiation_notifier.wait()
return
notify_cancel = self.state == self.STATE_SUBSTANTIATING
|
latent: Wait for ongoing insubstantiation to complete on duplicate call
|
py
|
diff --git a/doc/source/conf.py b/doc/source/conf.py
index <HASH>..<HASH> 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -59,7 +59,7 @@ html_favicon = "./images/logo-geco.ico"
# General information about the project.
project = 'PyGMQL'
copyright = '2017, Luca Nanni'
-author = 'Luca Nanni'
+author = 'Luca Nanni, Pietro Pinoli and Stefano Ceri'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
|
added other authors to the docs
|
py
|
diff --git a/python/test_gumath.py b/python/test_gumath.py
index <HASH>..<HASH> 100644
--- a/python/test_gumath.py
+++ b/python/test_gumath.py
@@ -59,8 +59,6 @@ class TestCall(unittest.TestCase):
end = time.time()
sys.stderr.write("\ngumath: time=%s\n" % (end-start))
- continue
-
if np is not None:
a = np.array(lst, dtype="float64")
|
Re-enable numpy tests.
|
py
|
diff --git a/hybridLFPy/population.py b/hybridLFPy/population.py
index <HASH>..<HASH> 100644
--- a/hybridLFPy/population.py
+++ b/hybridLFPy/population.py
@@ -1305,8 +1305,6 @@ class Population(PopulationSuper):
synParams = synParams,
idx = self.synIdx[cellindex][X][j],
SpCell = self.SpCells[cellindex][X][j],
- #SpTimes = os.path.join(self.savefolder,
- # self.networkSim.dbname),
synDelays = synDelays)
@@ -1332,8 +1330,6 @@ class Population(PopulationSuper):
Postsynaptic compartment indices.
SpCell : numpy.ndarray
Presynaptic spiking cells.
- #SpTimes : str
- # ':memory:' or path to on-disk spike time database.
synDelays : numpy.ndarray
Per connection specific delays.
@@ -1351,10 +1347,6 @@ class Population(PopulationSuper):
#Insert synapses in an iterative fashion
if hasattr(self.networkSim, 'db'):
spikes = self.networkSim.db.select(SpCell[:idx.size])
- #else:
- # db = GDF(SpTimes, new_db=False)
- # spikes = db.select(SpCell[:idx.size])
- # db.close()
#apply synaptic delays
if synDelays is not None and idx.size > 0:
|
removed SpTimes arg to insert_synapses function call
|
py
|
diff --git a/ella/core/box.py b/ella/core/box.py
index <HASH>..<HASH> 100644
--- a/ella/core/box.py
+++ b/ella/core/box.py
@@ -1,4 +1,4 @@
-from django.template import loader, Context
+from django.template import loader
from django.utils.datastructures import MultiValueDict
BOX_INFO = 'ella.core.box.BOX_INFO'
@@ -80,7 +80,11 @@ class Box(object):
media['js'] = media['js'].union(my_media['js'])
media['css'] = media['css'].union(my_media['css'])
- return loader.render_to_string(t_list, self.get_context())
+ t = loader.select_template(t_list)
+ self._context.update(self.get_context())
+ resp = t.render(self._context)
+ self._context.pop()
+ return resp
def get_media(self):
"""
|
Minor change to boxes to allow access to full context from within a box. git-svn-id: <URL>
|
py
|
diff --git a/vyper/exceptions.py b/vyper/exceptions.py
index <HASH>..<HASH> 100644
--- a/vyper/exceptions.py
+++ b/vyper/exceptions.py
@@ -88,13 +88,17 @@ class VyperException(Exception):
msg = f"{self.message}\n"
for node in self.nodes:
- source_annotation = annotate_source_code(
- self.source_code,
- node.lineno,
- node.col_offset,
- context_lines=VYPER_ERROR_CONTEXT_LINES,
- line_numbers=VYPER_ERROR_LINE_NUMBERS,
- )
+ try:
+ source_annotation = annotate_source_code(
+ self.source_code,
+ node.lineno,
+ node.col_offset,
+ context_lines=VYPER_ERROR_CONTEXT_LINES,
+ line_numbers=VYPER_ERROR_LINE_NUMBERS,
+ )
+ except Exception:
+ # necessary for certian types of syntax exceptions
+ return msg
if isinstance(node, vy_ast.VyperNode):
fn_node = node.get_ancestor(vy_ast.FunctionDef)
@@ -116,6 +120,7 @@ class SyntaxException(VyperException):
item.lineno = lineno
item.col_offset = col_offset
item.full_source_code = source_code
+ print("got this far")
super().__init__(message, item)
|
fix: handle source that cannot be annotated during exceptions
|
py
|
diff --git a/nap/url.py b/nap/url.py
index <HASH>..<HASH> 100644
--- a/nap/url.py
+++ b/nap/url.py
@@ -138,11 +138,17 @@ class Url(object):
def _remove_leading_slash(self, text):
return text[1:] if text.startswith('/') else text
+
+ def _ensure_trailing_slash(self, text):
+ return text if text.endswith('/') else text + '/'
def _new_url(self, relative_url):
"""Create new Url which points to new url."""
return Url(
- urljoin(self._base_url, relative_url),
+ urljoin(
+ self._ensure_trailing_slash(self._base_url),
+ self._remove_leading_slash(relative_url)
+ ),
**self._default_kwargs
)
|
Fix URL joining with Python <I> Possibly something's changed with `urllib.parse`, but definitely the URL joining tests where the baseurl does _not_ end in a slash were failing on my Ubuntu <I> Python <I>. That is to say, that version works like this: ```py urljoin("<URL>) == "<URL> == "<URL> == "<URL> == "<URL>
|
py
|
diff --git a/bitshares/market.py b/bitshares/market.py
index <HASH>..<HASH> 100644
--- a/bitshares/market.py
+++ b/bitshares/market.py
@@ -77,6 +77,9 @@ class Market(dict):
return (
self["quote"]["symbol"] == quote_symbol and
self["base"]["symbol"] == base_symbol
+ ) or (
+ self["quote"]["symbol"] == base_symbol and
+ self["base"]["symbol"] == quote_symbol
)
elif isinstance(other, Market):
return (
|
[market] Allow to compare market string with market using overloaded operation
|
py
|
diff --git a/vagrant/__init__.py b/vagrant/__init__.py
index <HASH>..<HASH> 100644
--- a/vagrant/__init__.py
+++ b/vagrant/__init__.py
@@ -803,7 +803,7 @@ class Vagrant(object):
# target is the VM name
# type is the type of data, e.g. 'provider-name', 'box-version'
# data is a (possibly comma separated) type-specific value, e.g. 'virtualbox', '0'
- parsed_lines = [line.split(',', 3) for line in output.splitlines() if line.strip()]
+ parsed_lines = [line.split(',', 4) for line in output.splitlines() if line.strip()]
# vagrant 1.8 adds additional fields that aren't required,
# and will break parsing if included in the status lines.
# filter them out pending future implementation.
|
Increases max splits in machine-readable output parsing Previously was limited to 3, now bumping to 4 to accommodate for the extra fields in the "metadata" and "ui" lines introduced in Vagrant <I>. Currently the fields are filtered out, pending future implementation.
|
py
|
diff --git a/git/remote.py b/git/remote.py
index <HASH>..<HASH> 100644
--- a/git/remote.py
+++ b/git/remote.py
@@ -537,7 +537,10 @@ class Remote(LazyMixin, Iterable):
fetch_head_info = fp.readlines()
fp.close()
- assert len(fetch_info_lines) == len(fetch_head_info), "len(%s) != len(%s)" % (fetch_head_info, fetch_info_lines)
+ # NOTE: HACK Just disabling this line will make github repositories work much better.
+ # I simply couldn't stand it anymore, so here is the quick and dirty fix ... .
+ # This project needs a lot of work !
+ # assert len(fetch_info_lines) == len(fetch_head_info), "len(%s) != len(%s)" % (fetch_head_info, fetch_info_lines)
output.extend(FetchInfo._from_line(self.repo, err_line, fetch_line)
for err_line,fetch_line in zip(fetch_info_lines, fetch_head_info))
|
HACK: Removed assertion just to be a bit less annoyed by constant fail
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ from setuptools import setup, find_packages
setup(
name='quite',
- version='0.0.2',
+ version='0.0.3',
description='QT UI Extension',
url='https://github.com/sf-zhou/quite',
@@ -33,10 +33,10 @@ setup(
keywords='qt ui',
packages=find_packages(exclude=['docs', 'tests']),
- data_files=[('pyside-rcc resources', [
+ package_data={ 'quite': [
'./quite/tools/bin/pyside-rcc.exe',
'./quite/tools/bin/QtCore4.dll',
'./quite/tools/bin/QtXml4.dll'
- ])],
+ ]},
install_requires=['st', 'prett']
)
|
fixed previous error: regard execution as package data
|
py
|
diff --git a/src/analysis/galaxy_prior.py b/src/analysis/galaxy_prior.py
index <HASH>..<HASH> 100644
--- a/src/analysis/galaxy_prior.py
+++ b/src/analysis/galaxy_prior.py
@@ -125,6 +125,11 @@ class GalaxyPrior(model_mapper.AbstractPriorModel):
def priors(self):
return [prior for prior_model in self.prior_models for prior in prior_model.priors]
+ @property
+ def prior_class_dict(self):
+ return {prior: cls for prior_model in self.prior_models for prior, cls in
+ prior_model.prior_class_dict.items()}
+
def instance_for_arguments(self, arguments):
"""
Create an instance of the associated class for a set of arguments
|
prior class dict property of galaxy prior
|
py
|
diff --git a/tilequeue/query/fixture.py b/tilequeue/query/fixture.py
index <HASH>..<HASH> 100644
--- a/tilequeue/query/fixture.py
+++ b/tilequeue/query/fixture.py
@@ -124,6 +124,17 @@ class DataFetcher(object):
if zoom < 16 and (zoom + 1) <= min_zoom:
continue
+ # UGLY HACK: match the query for "max zoom" for NE places.
+ # this removes larger cities at low zooms, and smaller cities
+ # as the zoom increases and as the OSM cities start to "fade
+ # in".
+ if props.get('source') == 'naturalearthdata.com':
+ pop_max = int(props.get('pop_max', '0'))
+ if ((zoom >= 8 and zoom < 10 and pop_max > 50000) or
+ (zoom >= 10 and zoom < 11 and pop_max > 20000) or
+ (zoom >= 11 and pop_max > 5000)):
+ continue
+
# if the feature exists in any label placement layer, then we
# should consider generating a centroid
label_layers = self.label_placement_layers.get(
|
Add hack to match 'max zoom' functionality of the NE places SQL query.
|
py
|
diff --git a/animal/models.py b/animal/models.py
index <HASH>..<HASH> 100644
--- a/animal/models.py
+++ b/animal/models.py
@@ -95,7 +95,7 @@ If a eartag is present then the string reads some_strain-Eartag #some_number. If
self.Alive = False
super(Animal, self).save()
class Meta:
- ordering = ['MouseID']
+ ordering = ['Strain', 'MouseID']
class Breeding(models.Model):
"""This data model stores information about a particular breeding set
|
Modified animal/models.py to order animals first by strain then by MouseID
|
py
|
diff --git a/block_if.py b/block_if.py
index <HASH>..<HASH> 100644
--- a/block_if.py
+++ b/block_if.py
@@ -11,7 +11,7 @@ from .constants import (
class BlockIf:
RE_IF = re.compile(
- '^\s*#(if|elif)\s+@([{VAR_DOTS}]+)(\s*\(\s*(@[{VAR_DOTS}]+\s*(,\s*' +
+ '^\s*#(if|elif)\s+@([{VAR_DOTS}]+)(\s*\(\s*(@[{VAR_DOTS}]+\s*(,\s*'
'@[{VAR_DOTS}]+\s*)*)?\))?\s*:\s*$'
.format(VAR_DOTS=VAR_DOTS),
re.UNICODE)
|
Fixed wrong breank in block if
|
py
|
diff --git a/sphinx_nbexamples/__init__.py b/sphinx_nbexamples/__init__.py
index <HASH>..<HASH> 100644
--- a/sphinx_nbexamples/__init__.py
+++ b/sphinx_nbexamples/__init__.py
@@ -380,7 +380,8 @@ logging.getLogger('py.warnings').setLevel(logging.ERROR)
def create_rst(self, nb, in_dir, odir):
"""Create the rst file from the notebook node"""
- raw_rst, resources = nbconvert.export_by_name('rst', nb)
+ exporter = nbconvert.RSTExporter()
+ raw_rst, resources = exporter.from_notebook_node(nb)
# remove ipython magics
rst_content = ''
i0 = 0
|
replaced export_by_name since it has been removed in <URL>
|
py
|
diff --git a/simpleyapsy/plugin_manager.py b/simpleyapsy/plugin_manager.py
index <HASH>..<HASH> 100644
--- a/simpleyapsy/plugin_manager.py
+++ b/simpleyapsy/plugin_manager.py
@@ -25,11 +25,3 @@ class PluginManager(object):
for plugin in plugins:
if plugin not in self.blacklisted_plugins:
self.plugins.append(plugin)
-
- def deactivate_all_plugins(self):
- for plugin in self.plugins:
- plugin.deactivate()
-
- def activate_all_plugins(self):
- for plugin in self.plugins:
- plugin.activate()
|
Removed active and deactive methods from plugin manager
|
py
|
diff --git a/uptick/wallet.py b/uptick/wallet.py
index <HASH>..<HASH> 100644
--- a/uptick/wallet.py
+++ b/uptick/wallet.py
@@ -64,14 +64,15 @@ def addkey(ctx, key):
installedKeys = ctx.bitshares.wallet.getPublicKeys()
if len(installedKeys) == 1:
name = ctx.bitshares.wallet.getAccountFromPublicKey(installedKeys[0])
- account = Account(name, bitshares_instance=ctx.bitshares)
- click.echo("=" * 30)
- click.echo("Setting new default user: %s" % account["name"])
- click.echo()
- click.echo("You can change these settings with:")
- click.echo(" uptick set default_account <account>")
- click.echo("=" * 30)
- config["default_account"] = account["name"]
+ if name: # only if a name to the key was found
+ account = Account(name, bitshares_instance=ctx.bitshares)
+ click.echo("=" * 30)
+ click.echo("Setting new default user: %s" % account["name"])
+ click.echo()
+ click.echo("You can change these settings with:")
+ click.echo(" uptick set default_account <account>")
+ click.echo("=" * 30)
+ config["default_account"] = account["name"]
@main.command()
|
[addkey] do not set a default_account if no name can be found to the key
|
py
|
diff --git a/bingo/views.py b/bingo/views.py
index <HASH>..<HASH> 100644
--- a/bingo/views.py
+++ b/bingo/views.py
@@ -258,13 +258,14 @@ def thumbnail(request, board_id, marked=False, voted=False):
game__site=get_current_site(request))
# check if the board is from an expired game
- game_expired_cachename = "game_expired__board={0:d}"
+ game_expired_cachename = "game_expired__board={0:d}".format(
+ int(bingo_board.id))
game_expired = cache.get(
- game_expired_cachename.format(int(board_id)))
+ game_expired_cachename)
if game_expired is None:
game_expired = bingo_board.game.is_expired()
cache.set(
- game_expired_cachename.format(int(board_id)),
+ game_expired_cachename,
game_expired, 60 * 60)
# when the game of the board is expired,
|
fix: use board.id instead of board.board_id for caching "game_expired" board.board_id is not unique across different sites, so bingo_board.id is used.
|
py
|
diff --git a/holoviews/core/options.py b/holoviews/core/options.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/options.py
+++ b/holoviews/core/options.py
@@ -35,6 +35,7 @@ Store:
import param
from .tree import AttrTree
+from .util import valid_identifier
class OptionError(Exception):
@@ -247,6 +248,26 @@ class OptionTree(AttrTree):
return super(OptionTree, self).__getitem__(item)
+ def __getattr__(self, identifier):
+ """
+ Allows creating sub OptionTree instances using attribute
+ access, inheriting the group options.
+ """
+ try:
+ return super(AttrTree, self).__getattr__(identifier)
+ except AttributeError: pass
+
+ if identifier.startswith('_'): raise AttributeError(str(identifier))
+ elif self.fixed==True: raise AttributeError(self._fixed_error % identifier)
+ identifier = valid_identifier(identifier)
+
+ if identifier in self.children:
+ return self.__dict__[identifier]
+
+ self.__setattr__(identifier, self.groups)
+ return self[identifier]
+
+
def __setattr__(self, identifier, val):
new_groups = {}
if isinstance(val, dict):
|
Added support for multi-level OptionTree setattr
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,11 +10,11 @@ Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 3.4
Topic :: Software Development :: Testing
-Development Status :: 3 - Alpha
+Development Status :: 4 - Beta
"""
setup(name="hy_coverage_plugin",
- version="0.0.4",
+ version="0.1.0",
description="coverage.py plugin for the Hy language",
long_description=readme(),
url="https://github.com/timmartin/hy-coverage",
@@ -28,4 +28,3 @@ setup(name="hy_coverage_plugin",
],
classifiers=classifiers.splitlines()
)
-
|
Updated the version number to <I>
|
py
|
diff --git a/dimod/sampleset.py b/dimod/sampleset.py
index <HASH>..<HASH> 100644
--- a/dimod/sampleset.py
+++ b/dimod/sampleset.py
@@ -14,6 +14,7 @@
#
# ================================================================================================
import itertools
+import numbers
try:
import collections.abc as abc
@@ -122,6 +123,12 @@ def as_samples(samples_like, dtype=None, copy=False, order='C'):
if isinstance(samples_like, abc.Mapping):
return as_samples(([samples_like], labels), dtype=dtype)
+ if (isinstance(samples_like, list) and samples_like and
+ isinstance(samples_like[0], numbers.Number)):
+ # this is not actually necessary but it speeds up the
+ # samples_like = [1, 0, 1,...] case significantly
+ return as_samples(([samples_like], labels), dtype=dtype)
+
if not isinstance(samples_like, np.ndarray):
if any(isinstance(sample, abc.Mapping) for sample in samples_like):
# go through samples-like, turning the dicts into lists
|
Speed up single sample list case for as_samples
|
py
|
diff --git a/bakery/tests/__init__.py b/bakery/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/bakery/tests/__init__.py
+++ b/bakery/tests/__init__.py
@@ -287,8 +287,8 @@ class BakeryTest(TestCase):
# Some save overrides tests
obj = AutoMockObject.objects.all()[0]
obj.save(publish=False)
- # obj.is_published = True
- # obj.save()
+ obj.is_published = True
+ obj.save()
obj.delete(unpublish=False)
def test_static_views(self):
|
try again with the task run in the tests for #<I>
|
py
|
diff --git a/wsgiservice/application.py b/wsgiservice/application.py
index <HASH>..<HASH> 100644
--- a/wsgiservice/application.py
+++ b/wsgiservice/application.py
@@ -36,7 +36,7 @@ class Application(object):
_resources = None
#: :class:`wsgiservice.routing.Router` instance. Set by the constructor.
- _resources = None
+ _urlmap = None
def __init__(self, resources):
"""Constructor.
|
application.py: Fix initialization of the _urlmap string.
|
py
|
diff --git a/pyoko/db/queryset.py b/pyoko/db/queryset.py
index <HASH>..<HASH> 100644
--- a/pyoko/db/queryset.py
+++ b/pyoko/db/queryset.py
@@ -236,6 +236,7 @@ class QuerySet(object):
obj = self.bucket.get(model.key)
obj.data = clean_value
obj.store()
+ model.just_created = new_obj
if settings.DEBUG:
if new_obj:
sys.PYOKO_STAT_COUNTER['save'] += 1
|
added just_created property rref #<I> ref GH-<I>
|
py
|
diff --git a/pyrogram/__init__.py b/pyrogram/__init__.py
index <HASH>..<HASH> 100644
--- a/pyrogram/__init__.py
+++ b/pyrogram/__init__.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
-__version__ = "1.1.12"
+__version__ = "1.1.13"
__license__ = "GNU Lesser General Public License v3 or later (LGPLv3+)"
__copyright__ = "Copyright (C) 2017-2021 Dan <https://github.com/delivrance>"
|
Update Pyrogram to <I>
|
py
|
diff --git a/openquake/baselib/zeromq.py b/openquake/baselib/zeromq.py
index <HASH>..<HASH> 100644
--- a/openquake/baselib/zeromq.py
+++ b/openquake/baselib/zeromq.py
@@ -1,3 +1,4 @@
+import os
import zmq
context = zmq.Context()
@@ -19,7 +20,11 @@ class ReplySocket(object):
self.zsocket.bind(self.end_point)
with self.zsocket:
while True:
- args = self.zsocket.recv_pyobj()
+ try:
+ args = self.zsocket.recv_pyobj()
+ except (KeyboardInterrupt, zmq.error.ZMQError):
+ # sending SIGTERM raises ZMQError
+ break
if args[0] == 'stop':
self.reply((None, None, None))
break
@@ -40,3 +45,9 @@ def request(end_point, *args):
with zsocket:
zsocket.send_pyobj(args)
return zsocket.recv_pyobj()
+
+if __name__ == '__main__':
+ print('started echo server, pid=%d' % os.getpid())
+ sock = ReplySocket('tcp://127.0.0.1:9000')
+ for args in sock: # echo server for testing purposes
+ sock.reply(args)
|
Managed SIGINT, SIGTER
|
py
|
diff --git a/src/satosa/routing.py b/src/satosa/routing.py
index <HASH>..<HASH> 100644
--- a/src/satosa/routing.py
+++ b/src/satosa/routing.py
@@ -47,7 +47,7 @@ class ModuleRouter(object):
if not frontends and not backends:
raise ValueError("Need at least one frontend and one backend")
- self.frontends = {name: {"instance": instance, "endpoints": instance.register_endpoints(backends.keys())}
+ self.frontends = {name: {"instance": instance, "endpoints": instance.register_endpoints(list(backends.keys()))}
for name, instance in frontends.items()}
self.backends = {name: {"instance": instance, "endpoints": instance.register_endpoints()}
for name, instance in backends.items()}
|
Correct interface of FrontendModule.register_endpoints. Python 3 dict_keys is not indexable, so ensure a list is passed to avoid an exception.
|
py
|
diff --git a/GPy/testing/misc_tests.py b/GPy/testing/misc_tests.py
index <HASH>..<HASH> 100644
--- a/GPy/testing/misc_tests.py
+++ b/GPy/testing/misc_tests.py
@@ -1,6 +1,7 @@
import numpy as np
import scipy as sp
import GPy
+import warnings
class MiscTests(np.testing.TestCase):
"""
@@ -11,8 +12,12 @@ class MiscTests(np.testing.TestCase):
self._lim_val_exp = np.log(self._lim_val)
def test_safe_exp_upper(self):
- assert np.exp(self._lim_val_exp + 1) == np.inf
- assert GPy.util.misc.safe_exp(self._lim_val_exp + 1) < np.inf
+ with warnings.catch_warnings(record=True) as w:
+ assert np.isfinite(np.exp(self._lim_val_exp))
+ assert np.isinf(np.exp(self._lim_val_exp + 1))
+ assert np.isfinite(GPy.util.misc.safe_exp(self._lim_val_exp + 1))
+
+ assert len(w)==1 # should have one overflow warning
def test_safe_exp_lower(self):
assert GPy.util.misc.safe_exp(1e-10) < np.inf
|
caught warnings in misc_tests
|
py
|
diff --git a/squad/run/worker.py b/squad/run/worker.py
index <HASH>..<HASH> 100644
--- a/squad/run/worker.py
+++ b/squad/run/worker.py
@@ -1,15 +1,17 @@
+from squad.settings import CELERY_TASK_ROUTES
import os
import sys
def main():
+ queues = set([conf['queue'] for _, conf in CELERY_TASK_ROUTES.items()])
argv = [
sys.executable, '-m', 'celery',
# default celery args:
'-A', 'squad',
'worker',
+ '--queues=celery,' + ','.join(queues),
'--concurrency=1',
- '--queues=celery,reporting_queue',
'--max-tasks-per-child=5000',
'--max-memory-per-child=1500000',
'--loglevel=INFO'
|
squad.run.worker: listen on all configured queues by default This way one does not need to explicitly pass the queue names in the command line
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,15 +19,15 @@ except Exception as e:
tests_require = [
'coverage==4.2',
- 'flake8==2.5.4',
+ 'flake8==3.2.0',
'hypothesis==3.6.0',
'hypothesis-pytest==0.19.0',
'py==1.4.31',
- 'pydocstyle==1.0.0',
+ 'pydocstyle==1.1.1',
'pytest==3.0.4',
'pytest-benchmark==3.0.0',
- 'pytest-cov==2.2.1',
- 'Sphinx==1.4.4',
+ 'pytest-cov==2.4.0',
+ 'Sphinx==1.4.8',
]
setup(
|
upgrade to latest flake8, pydocstyle, pytest-cov, and sphinx dependencies
|
py
|
diff --git a/PySimpleGUIWeb/PySimpleGUIWeb.py b/PySimpleGUIWeb/PySimpleGUIWeb.py
index <HASH>..<HASH> 100644
--- a/PySimpleGUIWeb/PySimpleGUIWeb.py
+++ b/PySimpleGUIWeb/PySimpleGUIWeb.py
@@ -1305,6 +1305,7 @@ class Output(Element):
def Update(self, value=None, disabled=None, append=False, background_color=None, text_color=None, font=None, visible=None):
if value is not None and not append:
self.Widget.set_value(str(value))
+ self.CurrentValue = str(value)
elif value is not None and append:
self.CurrentValue = self.CurrentValue + '\n' + str(value)
self.Widget.set_value(self.CurrentValue)
|
Fixed clearing of Output Element using Update method. Wasn't setting "CurrentValue" variable, only the widget
|
py
|
diff --git a/examples/window/pyglet/window.py b/examples/window/pyglet/window.py
index <HASH>..<HASH> 100644
--- a/examples/window/pyglet/window.py
+++ b/examples/window/pyglet/window.py
@@ -41,6 +41,13 @@ class Window(BaseWindow):
config.sample_buffers = 1 if self.samples > 1 else 0
config.samples = self.samples
+ # Obtain the default destop screen's resolution
+ if self.fullscreen:
+ platform = pyglet.window.get_platform()
+ display = platform.get_default_display()
+ screen = display.get_default_screen()
+ self.width, self.height = screen.width, screen.height
+
# Create window wrapper
self.window = PygletWrapper(
width=self.width, height=self.height,
|
Pyglet window: Use the current desktop resolution in fullscreen mode This will make the pyglet window a lot more pleasant to work with in fullscreen. On modern monitors using HDMI the resolution change can drive you crazy over time.
|
py
|
diff --git a/ansi2html/converter.py b/ansi2html/converter.py
index <HASH>..<HASH> 100755
--- a/ansi2html/converter.py
+++ b/ansi2html/converter.py
@@ -128,7 +128,8 @@ class _State(object):
css_class = 'ansi%d' % value
output.append(css_class)
- def append_color_unless_default(output, (value, parameter), default, negative, neg_css_class):
+ def append_color_unless_default(output, color, default, negative, neg_css_class):
+ value, parameter = color
if value != default:
prefix = 'inv' if negative else 'ansi'
css_class_index = str(value) \
|
Tweak for py3 support.
|
py
|
diff --git a/proso_models/models.py b/proso_models/models.py
index <HASH>..<HASH> 100644
--- a/proso_models/models.py
+++ b/proso_models/models.py
@@ -519,6 +519,7 @@ class ItemManager(models.Manager):
dict: identifier -> item id
"""
result = {}
+ identifiers = set(identifiers)
item_types = ItemType.objects.get_all_types()
for item_type_id, type_identifiers in proso.list.group_by(identifiers, by=lambda identifier: self.get_item_type_id_from_identifier(identifier, item_types)).items():
to_find = {}
|
drop duplicated identifiers issues: #<I>
|
py
|
diff --git a/src/hamster/overview.py b/src/hamster/overview.py
index <HASH>..<HASH> 100755
--- a/src/hamster/overview.py
+++ b/src/hamster/overview.py
@@ -225,13 +225,15 @@ class HorizontalBarChart(graphics.Sprite):
self.layout.set_markup(markup)
label_w, label_h = self.layout.get_pixel_size()
+ bar_start_x = 150 # pixels
+ margin = 10 # pixels
y = int(i * label_h * 1.5)
- g.move_to(100 - label_w, y)
+ g.move_to(bar_start_x - margin - label_w, y)
pangocairo.show_layout(context, self.layout)
- w = (self.alloc_w - 110) * value.total_seconds() / self._max.total_seconds()
+ w = (self.alloc_w - bar_start_x) * value.total_seconds() / self._max.total_seconds()
w = max(1, int(round(w)))
- g.rectangle(110, y, int(w), int(label_h))
+ g.rectangle(bar_start_x, y, int(w), int(label_h))
g.fill("#999")
g.restore_context()
|
reduce bar size => more space for labels
|
py
|
diff --git a/dharma/core/dharma.py b/dharma/core/dharma.py
index <HASH>..<HASH> 100644
--- a/dharma/core/dharma.py
+++ b/dharma/core/dharma.py
@@ -22,7 +22,7 @@ class GenState:
class String:
- """Generator class basic strings which need no further evaluation."""
+ """Generator class for basic strings which need no further evaluation."""
def __init__(self, value, parent):
self.parent = parent
|
Update dharma.py small typo
|
py
|
diff --git a/src/python/setup.py b/src/python/setup.py
index <HASH>..<HASH> 100644
--- a/src/python/setup.py
+++ b/src/python/setup.py
@@ -22,7 +22,12 @@ setup(
platforms=['any'],
# Our modules to package
- packages=find_packages(exclude=["*.test", "*.test.*", "test.*", "test"]),
+ packages=find_packages(exclude=['*.test', '*.test.*', 'test.*', 'test']),
+
+ # Essential dependencies
+ install_requires=[
+ 'future >= 0.16.0'
+ ],
# Project classification:
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
@@ -40,8 +45,6 @@ setup(
# Root of the test suite
test_suite = 'test',
- install_requires=[],
-
zip_safe=False,
)
|
- Setup now has future as a dependent module
|
py
|
diff --git a/dallinger/docker/tools.py b/dallinger/docker/tools.py
index <HASH>..<HASH> 100644
--- a/dallinger/docker/tools.py
+++ b/dallinger/docker/tools.py
@@ -113,7 +113,7 @@ class DockerComposeWrapper(object):
def start(self):
self.copy_docker_compse_files()
env = {"DOCKER_BUILDKIT": "1"}
- build_arg = ""
+ build_arg = "--progress=plain"
if self.needs_chrome:
build_arg = (
"--build-arg DALLINGER_DOCKER_IMAGE=dallingerimages/dallinger-bot"
|
Use plain progress indicator when building experiment docker images
|
py
|
diff --git a/python/setup.py b/python/setup.py
index <HASH>..<HASH> 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -180,7 +180,6 @@ requires = [
"packaging",
"pytest",
"pyyaml",
- "jsonschema",
"redis>=3.3.2",
# NOTE: Don't upgrade the version of six! Doing so causes installation
# problems. See https://github.com/ray-project/ray/issues/4169.
|
Remove duplicate jsonschema from setup.py (#<I>)
|
py
|
diff --git a/simple_history/tests/tests.py b/simple_history/tests/tests.py
index <HASH>..<HASH> 100644
--- a/simple_history/tests/tests.py
+++ b/simple_history/tests/tests.py
@@ -330,6 +330,8 @@ class AdminSiteTest(WebTest):
return form.submit()
def test_history_list(self):
+ if VERSION >= (1, 5):
+ self.assertEqual(self.user._meta.module_name, 'customuser')
self.login()
poll = Poll(question="why?", pub_date=today)
poll._history_user = self.user
|
Ensure custom user model is used if supported
|
py
|
diff --git a/pyemma/thermo/api.py b/pyemma/thermo/api.py
index <HASH>..<HASH> 100644
--- a/pyemma/thermo/api.py
+++ b/pyemma/thermo/api.py
@@ -328,7 +328,7 @@ def tram(
assert len(ttraj) == len(dtraj)
assert len(ttraj) == btraj.shape[0]
# check lag time(s)
- lags = _np.asarray(lag, dtype=_np.intc).tolist()
+ lags = _np.asarray(lag, dtype=_np.intc).reshape((-1,)).tolist()
# build TRAM and run estimation
from pyemma.thermo import TRAM as _TRAM
tram_estimators = [
@@ -426,7 +426,7 @@ def dtram(
for ttraj, dtraj in zip(ttrajs, dtrajs):
assert len(ttraj) == len(dtraj)
# check lag time(s)
- lags = _np.asarray(lag, dtype=_np.intc).tolist()
+ lags = _np.asarray(lag, dtype=_np.intc).reshape((-1,)).tolist()
# build DTRAM and run estimation
from pyemma.thermo import DTRAM
dtram_estimators = [
|
[THERMO] bugfix in dtram() + tram() API functions
|
py
|
diff --git a/pyphi/models/fmt.py b/pyphi/models/fmt.py
index <HASH>..<HASH> 100644
--- a/pyphi/models/fmt.py
+++ b/pyphi/models/fmt.py
@@ -303,8 +303,8 @@ def fmt_concept(concept):
return ''
return box(indent(fmt_mip(x.mip, verbose=False), amount=1))
- cause = header('Cause', fmt_cause_or_effect(concept.cause))
- effect = header('Effect', fmt_cause_or_effect(concept.effect))
+ cause = header('Maximally-irreducible cause', fmt_cause_or_effect(concept.cause))
+ effect = header('Maximally-irreducible effect', fmt_cause_or_effect(concept.effect))
ce = side_by_side(cause, effect)
mechanism = fmt_mechanism(concept.mechanism, concept.subsystem)
|
Print MIC and MIE with full names Update the header of the MICE objects to read “Maximally-irreducible cause” and “Maximally-irreducible effect” instead of “Cause” and “Effect”.
|
py
|
diff --git a/salt/grains/core.py b/salt/grains/core.py
index <HASH>..<HASH> 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1503,9 +1503,11 @@ def id_():
_REPLACE_LINUX_RE = re.compile(r"\W(?:gnu/)?linux", re.IGNORECASE)
# This maps (at most) the first ten characters (no spaces, lowercased) of
-# 'osfullname' to the 'os' grain that Salt traditionally uses.
-# Please see os_data() and _supported_dists.
-# If your system is not detecting properly it likely needs an entry here.
+# 'osfullname' to the 'os' grain that Salt traditionally uses, and is used by
+# the os_data() function to create the "os" grain.
+#
+# If your system is not detecting the "os" grain properly, it likely needs an
+# entry in this dictionary.
_OS_NAME_MAP = {
"redhatente": "RedHat",
"gentoobase": "Gentoo",
|
Remove comment referencing _supported_dists from salt/grains/core.py As this has been removed in favor of `distro.linux_distribution()`, the comment is no longer accurate.
|
py
|
diff --git a/pyciss/pipeline.py b/pyciss/pipeline.py
index <HASH>..<HASH> 100644
--- a/pyciss/pipeline.py
+++ b/pyciss/pipeline.py
@@ -1,21 +1,26 @@
+""" Note that the calibration starts from the LBL files, not the IMG !!! """
from __future__ import division, print_function
-from pysis.isis import ciss2isis, cisscal, spiceinit, ringscam2map, getkey,\
- editlab, dstripe, isis2std
-from pysis.util import file_variations
-from pysis import IsisPool
+
+import os
+from os.path import join as pjoin
+
import gdal
import numpy as np
-from os.path import join as pjoin
-import os
from pyciss import plotting
from pyciss.io import dataroot
-from . import io
+from pysis import IsisPool
+from pysis.isis import (ciss2isis, cisscal, dstripe, editlab, getkey, isis2std,
+ ringscam2map, spiceinit)
+from pysis.util import file_variations
+from . import io
ISISDATA = os.environ['ISIS3DATA']
def calibrate_ciss(img_name, name_only=False):
+
+ img_name = str(img_name)
(cub_name,
cal_name,
dst_name,
|
clean up imports.cast img_name to string.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@ install_requires = [
test_requires = install_requires
setup(name='dictalchemy',
- version='0.1a4',
+ version='0.1b1',
description="Contains asdict and fromdict methods for SQL-Alchemy "
"declarative models",
long_description=__doc__,
@@ -35,9 +35,15 @@ setup(name='dictalchemy',
author='Daniel Holmstrom',
author_email='holmstrom.daniel@gmail.com',
platforms='any',
- classifiers=['Development Status :: 2 - Pre-Alpha',
+ classifiers=['Development Status :: 4 - Beta',
+ 'License :: OSI Approved :: MIT License'
+ 'Environment :: Web Environment',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License'],
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+ 'Topic :: Software Development :: '
+ 'Libraries :: Python Modules'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
|
Changed status to beta, added classifiers
|
py
|
diff --git a/analyzers/LastInfoSec/LastInfoSec.py b/analyzers/LastInfoSec/LastInfoSec.py
index <HASH>..<HASH> 100755
--- a/analyzers/LastInfoSec/LastInfoSec.py
+++ b/analyzers/LastInfoSec/LastInfoSec.py
@@ -4,7 +4,7 @@ from cortexutils.analyzer import Analyzer
import requests
-class LIS_GetReport(Analyzer):
+class LastInfoSec(Analyzer):
def __init__(self):
Analyzer.__init__(self)
self.api_key = self.get_param(
@@ -117,4 +117,4 @@ class LIS_GetReport(Analyzer):
if __name__ == "__main__":
- LIS_GetReport().run()
+ LastInfoSec().run()
|
rename Lis_GetReport to LastInfoSec
|
py
|
diff --git a/hearthstone/hslog/parser.py b/hearthstone/hslog/parser.py
index <HASH>..<HASH> 100644
--- a/hearthstone/hslog/parser.py
+++ b/hearthstone/hslog/parser.py
@@ -212,6 +212,7 @@ class LogWatcher(LogBroadcastMixin):
def action_start(self, ts, entity, type, index, target):
entity = self.parse_entity(entity)
type = parse_enum(enums.PowSubType, type)
+ target = self.parse_entity(target)
action = Action(entity, type, index, target)
action.parent = self.current_action
if self.current_action:
|
hslog: Parse the action target as an entity
|
py
|
diff --git a/spyder/widgets/sourcecode/base.py b/spyder/widgets/sourcecode/base.py
index <HASH>..<HASH> 100644
--- a/spyder/widgets/sourcecode/base.py
+++ b/spyder/widgets/sourcecode/base.py
@@ -319,15 +319,6 @@ class TextEditBaseWidget(QPlainTextEdit, BaseEditMixin):
#------Extra selections
- def extra_selection_length(self, key):
- selection = self.get_extra_selections(key)
- if selection:
- cursor = self.extra_selections_dict[key][0].cursor
- selection_length = cursor.selectionEnd() - cursor.selectionStart()
- return selection_length
- else:
- return 0
-
def get_extra_selections(self, key):
"""Return editor extra selections.
|
Remove not longer used TextEditBaseWidget.extra_selection_length method. (this logic was moved to DecorationManager._order_decorations)
|
py
|
diff --git a/tests/functional/test_subunit_output.py b/tests/functional/test_subunit_output.py
index <HASH>..<HASH> 100644
--- a/tests/functional/test_subunit_output.py
+++ b/tests/functional/test_subunit_output.py
@@ -33,8 +33,6 @@ class Includes(object):
self.d = d
def __eq__(self, a):
- # for k, v in self.d.iteritems():
- # assert_equal(v, a[k])
return all((v == a[k] for k, v in self.d.iteritems()))
def __repr__(self):
|
Remove debugging comment that is no longer needed
|
py
|
diff --git a/src/ai/backend/common/types.py b/src/ai/backend/common/types.py
index <HASH>..<HASH> 100644
--- a/src/ai/backend/common/types.py
+++ b/src/ai/backend/common/types.py
@@ -5,6 +5,8 @@ from typing import Hashable, Mapping, Iterable, Sequence, Set, NewType, Tuple, U
import attr
+from . import etcd
+
DeviceId = NewType('DeviceId', Hashable)
@@ -181,7 +183,7 @@ class ImageRef:
raise ValueError('Invalid image tag')
self._update_tag_set()
- async def resolve(self, etcd: 'ai.backend.common.etcd.AsyncEtcd'):
+ async def resolve(self, etcd: 'etcd.AsyncEtcd'):
'''
Resolve the tag using etcd so that the current instance indicates
a concrete, latest image.
|
ci, types: Avoid potential circular ref but also fix undefined name error * The types module would be referenced from other common modules in the future as expanded. * GvR in PEP-<I> suggests to use module imports to resolve such cases. ref) <URL>
|
py
|
diff --git a/neo/SmartContract/ContractParameterContext.py b/neo/SmartContract/ContractParameterContext.py
index <HASH>..<HASH> 100755
--- a/neo/SmartContract/ContractParameterContext.py
+++ b/neo/SmartContract/ContractParameterContext.py
@@ -22,11 +22,16 @@ class ContractParamater():
Value = None
def __init__(self, type):
- self.Type = type
+ if isinstance(type, ContractParameterType):
+ self.Type = type
+ elif isinstance(type, int):
+ self.Type = ContractParameterType(type)
+ else:
+ raise Exception("Invalid Contract Parameter Type %s. Must be ContractParameterType or int" % type)
def ToJson(self):
jsn = {}
- jsn['type'] = ToName(self.Type)
+ jsn['type'] = self.Type.name
return jsn
|
Bugfix for neo/SmartContract/ContractParameterContext Based on code by @localhuman
|
py
|
diff --git a/salt/modules/dockerng.py b/salt/modules/dockerng.py
index <HASH>..<HASH> 100644
--- a/salt/modules/dockerng.py
+++ b/salt/modules/dockerng.py
@@ -5802,4 +5802,3 @@ def sls_build(name, base='fedora', mods=None, saltenv='base',
__salt__['dockerng.stop'](id_)
return __salt__['dockerng.commit'](id_, name)
-
|
Quick lint of dockerng
|
py
|
diff --git a/enoslib/service/monitoring/monitoring.py b/enoslib/service/monitoring/monitoring.py
index <HASH>..<HASH> 100644
--- a/enoslib/service/monitoring/monitoring.py
+++ b/enoslib/service/monitoring/monitoring.py
@@ -118,7 +118,7 @@ class Monitoring(Service):
volumes = [
"/telegraf.conf:/etc/telegraf/telegraf.conf",
- "sys:/rootfs/sys:ro",
+ "/sys:/rootfs/sys:ro",
"/proc:/rootfs/proc:ro",
"/var/run/docker.sock:/var/run/docker.sock:ro",
]
|
service/monitoring: fix typo
|
py
|
diff --git a/fabfile.py b/fabfile.py
index <HASH>..<HASH> 100644
--- a/fabfile.py
+++ b/fabfile.py
@@ -5,7 +5,7 @@
# license that can be found in the LICENSE file.
import os
-from fabric.api import abort, cd, env, local, put, run
+from fabric.api import abort, cd, env, local, put, run, sudo
current_dir = os.path.abspath(os.path.dirname(__file__))
env.user = 'ubuntu'
@@ -48,3 +48,16 @@ def deploy(flags="", tags=""):
send()
restart()
clean()
+
+
+def deploy_hooks(path, user="git", group="git"):
+ run("mkdir -p /tmp/git-hooks")
+ put("misc/git-hooks/*", "/tmp/git-hooks")
+ sudo("chown -R %s:%s /tmp/git-hooks" % (user, group))
+ sudo("chmod 755 /tmp/git-hooks/*")
+ out = run("find %s -name \*.git -type d" % path)
+ paths = [p.strip() for p in out.split("\n")]
+ for path in paths:
+ sudo("cp -p /tmp/git-hooks/* %s/hooks" % path)
+ sudo("rm /tmp/git-hooks/*")
+ sudo("rmdir /tmp/git-hooks")
|
fabfile: added command do deploy hooks
|
py
|
diff --git a/pysmi/codegen/pysnmp.py b/pysmi/codegen/pysnmp.py
index <HASH>..<HASH> 100644
--- a/pysmi/codegen/pysnmp.py
+++ b/pysmi/codegen/pysnmp.py
@@ -75,8 +75,8 @@ class PySnmpCodeGen(AbstractCodeGen):
# - or import base ASN.1 types from implementation-specific MIBs
fakeMibs = ('ASN1',
'ASN1-ENUMERATION',
- 'ASN1-REFINEMENT',
- 'SNMP-FRAMEWORK-MIB',
+ 'ASN1-REFINEMENT')
+ baseMibs = ('SNMP-FRAMEWORK-MIB',
'SNMP-TARGET-MIB',
'TRANSPORT-ADDRESS-MIB') + AbstractCodeGen.baseMibs
|
fix to fake MIBs classifier
|
py
|
diff --git a/tethne/readers/zotero.py b/tethne/readers/zotero.py
index <HASH>..<HASH> 100644
--- a/tethne/readers/zotero.py
+++ b/tethne/readers/zotero.py
@@ -386,7 +386,7 @@ class ZoteroParser(RDFParser):
self.full_text[fset_name][ident] = structuredfeature
-def read(path, corpus=True, index_by='uri', follow_links=True, **kwargs):
+def read(path, corpus=True, index_by='uri', follow_links=False, **kwargs):
"""
Read bibliographic data from Zotero RDF.
@@ -420,7 +420,7 @@ def read(path, corpus=True, index_by='uri', follow_links=True, **kwargs):
title and author names.
follow_links : bool
If ``True``, attempts to load full-text content from attached files
- (e.g. PDFs with embedded text).
+ (e.g. PDFs with embedded text). Default: False.
kwargs : kwargs
Passed to the :class:`.Corpus` constructor.
|
in Zotero reader, follow_links=False by default
|
py
|
diff --git a/octodns/source/tinydns.py b/octodns/source/tinydns.py
index <HASH>..<HASH> 100755
--- a/octodns/source/tinydns.py
+++ b/octodns/source/tinydns.py
@@ -144,7 +144,7 @@ class TinyDnsBaseSource(BaseSource):
'3': 'AAAA',
'6': 'AAAA',
}
- name_re = re.compile(r'((?P<name>.+)\.)?{}$'.format(zone.name[:-1]))
+ name_re = re.compile(fr'((?P<name>.+)\.)?{zone.name[:-1]}$')
data = defaultdict(lambda: defaultdict(list))
for line in self._lines():
@@ -180,7 +180,7 @@ class TinyDnsBaseSource(BaseSource):
'record=%s', record)
def _populate_in_addr_arpa(self, zone, lenient):
- name_re = re.compile(r'(?P<name>.+)\.{}$'.format(zone.name[:-1]))
+ name_re = re.compile(fr'(?P<name>.+)\.{zone.name[:-1]}$')
for line in self._lines():
_type = line[0]
|
f-string some regexes in sources
|
py
|
diff --git a/twarc/decorators2.py b/twarc/decorators2.py
index <HASH>..<HASH> 100644
--- a/twarc/decorators2.py
+++ b/twarc/decorators2.py
@@ -202,9 +202,10 @@ class FileSizeProgressBar(tqdm):
self, result, field="id", error_resource_type=None, error_parameter="ids"
):
try:
- for item in result["data"]:
- # Use the length of the id / name and a newline to match original file
- self.update(len(item[field]) + len("\n"))
+ if "data" in result:
+ for item in result["data"]:
+ # Use the length of the id / name and a newline to match original file
+ self.update(len(item[field]) + len("\n"))
if error_resource_type and "errors" in result:
for error in result["errors"]:
# Account for deleted data
|
fix FileSizeProgressBar when therte's no data in response
|
py
|
diff --git a/mobly/controllers/android_device_lib/snippet_client.py b/mobly/controllers/android_device_lib/snippet_client.py
index <HASH>..<HASH> 100644
--- a/mobly/controllers/android_device_lib/snippet_client.py
+++ b/mobly/controllers/android_device_lib/snippet_client.py
@@ -96,7 +96,9 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
line = self._read_protocol_line()
# Forward the device port to a new host port, and connect to that port
self.host_port = utils.get_available_host_port()
- if line == 'INSTRUMENTATION_RESULT: shortMsg=Process crashed.':
+ if line in ('INSTRUMENTATION_RESULT: shortMsg=Process crashed.',
+ 'INSTRUMENTATION_RESULT: shortMsg='
+ 'java.lang.IllegalArgumentException'):
self.log.warning('Snippet %s crashed on startup. This might be an '
'actual error or a snippet using deprecated v0 '
'start protocol. Retrying as a v0 snippet.',
|
Add missing protocol line patterns to start_app_and_connect() (#<I>)
|
py
|
diff --git a/tacacs_plus/packet.py b/tacacs_plus/packet.py
index <HASH>..<HASH> 100644
--- a/tacacs_plus/packet.py
+++ b/tacacs_plus/packet.py
@@ -157,12 +157,18 @@ class TACACSHeader(object):
# B = unsigned char
# !I = network-order (big-endian) unsigned int
raw = six.BytesIO(raw)
- version, type, seq_no, flags = struct.unpack(
- 'BBBB',
- raw.read(4)
- )
- session_id, length = struct.unpack('!II', raw.read(8))
- return cls(version, type, session_id, length, seq_no, flags)
+ raw_chars = raw.read(4)
+ if raw_chars:
+ version, type, seq_no, flags = struct.unpack(
+ 'BBBB',
+ raw_chars
+ )
+ session_id, length = struct.unpack('!II', raw.read(8))
+ return cls(version, type, session_id, length, seq_no, flags)
+ else:
+ raise ValueError(
+ "Unable to extract data from header. Likely the TACACS+ key does not match between server and client"
+ )
def __str__(self):
return ', '.join([
|
Fix #<I>. Packet header becomes not analyzable if the client key missmatch the server's one
|
py
|
diff --git a/instaloader/structures.py b/instaloader/structures.py
index <HASH>..<HASH> 100644
--- a/instaloader/structures.py
+++ b/instaloader/structures.py
@@ -655,6 +655,14 @@ class Profile:
return self._metadata('external_url')
@property
+ def is_business_account(self) -> bool:
+ return self._metadata('is_business_account')
+
+ @property
+ def business_category_name(self) -> str:
+ return self._metadata('business_category_name')
+
+ @property
def biography(self) -> str:
return self._metadata('biography')
|
[Issue #<I>] Add is_business_account and business_category_name properties to Profile structure
|
py
|
diff --git a/bluetooth/bluez.py b/bluetooth/bluez.py
index <HASH>..<HASH> 100644
--- a/bluetooth/bluez.py
+++ b/bluetooth/bluez.py
@@ -1,23 +1,15 @@
+import array
+import fcntl
import sys
import struct
from errno import (EADDRINUSE, EBUSY, EINVAL)
-if sys.version_info.major < 3:
- from .btcommon import *
- import _bluetooth as _bt
- get_byte = ord
-else:
- from bluetooth.btcommon import *
- import bluetooth._bluetooth as _bt
- get_byte = int
-import array
-import fcntl
-_constants = [ 'HCI', 'RFCOMM', 'L2CAP', 'SCO', 'SOL_L2CAP', 'SOL_RFCOMM',\
- 'L2CAP_OPTIONS' ]
-for _c in _constants:
- command_ = "{C} = _bt.{C1}".format(C=_c, C1=_c)
- exec(command_)
-del _constants
+from bluetooth.btcommon import *
+import bluetooth._bluetooth as _bt
+from bluetooth._bluetooth import HCI, RFCOMM, L2CAP, SCO, SOL_L2CAP, \
+ SOL_RFCOMM, L2CAP_OPTIONS
+
+get_byte = ord if sys.version_info.major < 3 else int
# ============== SDP service registration and unregistration ============
|
bluez.py: simplify imports between Python 2 & 3
|
py
|
diff --git a/subprocess2/__init__.py b/subprocess2/__init__.py
index <HASH>..<HASH> 100644
--- a/subprocess2/__init__.py
+++ b/subprocess2/__init__.py
@@ -156,7 +156,7 @@ def runInBackground(self, pollInterval=.1, encoding=False):
The object returned is a "BackgroundTaskInfo" object, and represents the state of the process. It is updated automatically as the program runs,
and if stdout or stderr are streams, they are automatically read from and populated into this object.
- @see BackgroundTaskInfo for more info or https://htmlpreview.github.io/?https://raw.githubusercontent.com/kata198/python-subprocess2/master/doc/subprocess2.BackgroundTask.html
+ @see BackgroundTaskInfo for more info or http://pythonhosted.org/python-subprocess2/subprocess2.BackgroundTask.html
@param pollInterval - Amount of idle time between polling
@param encoding - Default False. If provided, data will be decoded using the value of this field as the codec name (e.x. "utf-8"). Otherwise, data will be stored as bytes.
|
Update url to pythonhosted.org
|
py
|
diff --git a/mockito_test/empty_mocks_test.py b/mockito_test/empty_mocks_test.py
index <HASH>..<HASH> 100644
--- a/mockito_test/empty_mocks_test.py
+++ b/mockito_test/empty_mocks_test.py
@@ -29,3 +29,16 @@ class TestEmptyMocks:
verify(dummy).__call__(1, 2)
+
+class Action(object):
+ def __call__(self, task):
+ return task
+
+
+class TestAction:
+ def testA(self):
+ when(Action).__call__(Ellipsis).thenReturn('Done')
+
+ action = Action()
+ assert action('work') == 'Done'
+
|
Ensure we can stub __call__`s
|
py
|
diff --git a/script/upload.py b/script/upload.py
index <HASH>..<HASH> 100755
--- a/script/upload.py
+++ b/script/upload.py
@@ -93,9 +93,7 @@ def main():
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot),
args.upload_to_s3)
- # TODO: make s3 compatible
-
- if PLATFORM == 'win32' and not tag_exists:
+ if PLATFORM == 'win32' and not tag_exists and not args.upload_to_s3:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
|
skip headers and pdbs for fake release builds
|
py
|
diff --git a/furious/_pkg_meta.py b/furious/_pkg_meta.py
index <HASH>..<HASH> 100644
--- a/furious/_pkg_meta.py
+++ b/furious/_pkg_meta.py
@@ -1,2 +1,2 @@
-version_info = (0, 9, 5)
+version_info = (1, 0, 0)
version = '.'.join(map(str, version_info))
|
Update version to <I> Update the Furious version to <I> as a preparation for the <I> release.
|
py
|
diff --git a/lenstronomy/ImSim/MultiBand/multi_data_base.py b/lenstronomy/ImSim/MultiBand/multi_data_base.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/ImSim/MultiBand/multi_data_base.py
+++ b/lenstronomy/ImSim/MultiBand/multi_data_base.py
@@ -17,10 +17,10 @@ class MultiDataBase(object):
self._num_response_list = []
for imageModel in imageModel_list:
self._num_response_list.append(imageModel.num_data_evaluate)
- #self.LensModel = self._imageModel_list[0].LensModel
- #self.SourceModel = self._imageModel_list[0].SourceModel
- #self.LensLightModel = self._imageModel_list[0].LensLightModel
- #self.PointSource = self._imageModel_list[0].PointSource
+ self.LensModel = self._imageModel_list[0].LensModel
+ self.SourceModel = self._imageModel_list[0].SourceModel
+ self.LensLightModel = self._imageModel_list[0].LensLightModel
+ self.PointSource = self._imageModel_list[0].PointSource
@property
def num_bands(self):
|
base functions available through MultiDataBase
|
py
|
diff --git a/tests/python/pants_test/test_utf8_header.py b/tests/python/pants_test/test_utf8_header.py
index <HASH>..<HASH> 100644
--- a/tests/python/pants_test/test_utf8_header.py
+++ b/tests/python/pants_test/test_utf8_header.py
@@ -6,13 +6,7 @@ from __future__ import (nested_scopes, generators, division, absolute_import, wi
print_function, unicode_literals)
import os
-
-from twitter.common.lang import Compatibility
-
-if Compatibility.PY3:
- import unittest
-else:
- import unittest2 as unittest
+import unittest2 as unittest
from pants.base.build_environment import get_buildroot
@@ -39,4 +33,5 @@ class Utf8HeaderTest(unittest.TestCase):
nonconforming_files.append(path)
if len(nonconforming_files) > 0:
- self.fail('Expected these files to contain first line "# coding=utf8": ' + str(nonconforming_files))
+ self.fail('Expected these files to contain first line "# coding=utf8": '
+ + str(nonconforming_files))
|
(Accidentally omitted last change from review) Added a test to show that all files have 'coding=utf8' on them This is a followon to <URL>
|
py
|
diff --git a/DataPanel.py b/DataPanel.py
index <HASH>..<HASH> 100644
--- a/DataPanel.py
+++ b/DataPanel.py
@@ -169,7 +169,7 @@ class DataPanel(Panel.Panel):
self.__update_item_count(container)
def item_key_press(self, text, modifiers, index, parent_row, parent_id):
- if len(text) == 1 and ord(text[0]) == 127:
+ if len(text) == 1 and (ord(text[0]) == 127 or ord(text[0]) == 8):
data_group = self.itemValue("data_group", None, self.itemId(index, parent_id))
if data_group:
parent_item = self.itemFromId(self._parent_id)
@@ -396,7 +396,7 @@ class DataPanel(Panel.Panel):
def itemKeyPress(self, index, text, raw_modifiers):
data_item = self.__get_data_items_flat()[index] if index >= 0 else None
if data_item:
- if len(text) == 1 and ord(text[0]) == 127:
+ if len(text) == 1 and (ord(text[0]) == 127 or ord(text[0]) == 8):
container = self.__get_data_item_container(self.data_group, data_item)
assert data_item in container.data_items
container.data_items.remove(data_item)
|
More robust delete key handling. svn r<I>
|
py
|
diff --git a/core.py b/core.py
index <HASH>..<HASH> 100644
--- a/core.py
+++ b/core.py
@@ -260,12 +260,13 @@ class Orchestrator(Module):
continue
self.playing_media = media
self.satisfied_request = req
- self.history.record(self.playing_media,
- self.satisfied_request,
- datetime.datetime.now())
+ startTime = datetime.datetime.now()
self.on_playing_changed()
self.player.play(media)
-
+ self.history.record(self.playing_media,
+ self.satisfied_request,
+ startTime)
+
def wait_for_media(self):
self.l.info("Randomqueue couldn't return media -- collection "+
"is assumed to be empty -- waiting for media.")
|
core: Orchestrator: record history afterwards
|
py
|
diff --git a/mbuild/tests/test_compound.py b/mbuild/tests/test_compound.py
index <HASH>..<HASH> 100755
--- a/mbuild/tests/test_compound.py
+++ b/mbuild/tests/test_compound.py
@@ -164,6 +164,11 @@ class TestCompound(BaseTest):
xyz = ch3.xyz_with_ports
assert xyz.shape == (12, 3)
+ def test_xyz_setter_bad_shape(self, ch3):
+ single_compound = mb.Compound()
+ with pytest.raises(ValueError):
+ single_compound.xyz = np.zeros(shape=(4, 10))
+
def test_particles_by_name(self, ethane):
assert sum(1 for _ in ethane.particles()) == 8
|
Add a test for trying to set a single coord with multiple values
|
py
|
diff --git a/command/install_lib.py b/command/install_lib.py
index <HASH>..<HASH> 100644
--- a/command/install_lib.py
+++ b/command/install_lib.py
@@ -53,7 +53,12 @@ class install_lib (Command):
# Install everything: simply dump the entire contents of the build
# directory to the installation directory (that's the beauty of
# having a build directory!)
- outfiles = self.copy_tree (self.build_dir, self.install_dir)
+ if os.path.isdir(self.build_dir):
+ outfiles = self.copy_tree (self.build_dir, self.install_dir)
+ else:
+ self.warn("'%s' does not exist -- no Python modules to install" %
+ self.build_dir)
+ return
# (Optionally) compile .py to .pyc
# XXX hey! we can't control whether we optimize or not; that's up
|
Check if the claimed build directory doesn't exist, and warn that we don't have any Python modules to install (rather than bomb when we try to copy a non-existent directory).
|
py
|
diff --git a/tasks.py b/tasks.py
index <HASH>..<HASH> 100644
--- a/tasks.py
+++ b/tasks.py
@@ -107,8 +107,12 @@ def update_ftp(ctx, file):
execute_upload_pickle(file)
-@task
+@task()
def gen_test_df(ctx):
+ """
+ Generate small dataframes that represents the
+ real dataframes used in metrics training.
+ """
print('Generating test dataframes...\n')
print('Generating test planilha orcamentaria...')
@@ -177,6 +181,9 @@ def gen_test_df(ctx):
@task()
def test_metrics(ctx):
+ """
+ Train metrics with test dataframes.
+ """
raw_dir = './data/raw/'
original = os.listdir(raw_dir)
for fname in original:
|
Add new inv task to README and document new tasks
|
py
|
diff --git a/nion/swift/Application.py b/nion/swift/Application.py
index <HASH>..<HASH> 100644
--- a/nion/swift/Application.py
+++ b/nion/swift/Application.py
@@ -601,8 +601,15 @@ class Application(UIApplication.BaseApplication):
if result and profile:
try:
new_project_reference = profile.upgrade(project_reference)
- except Exception:
+ except FileExistsError:
+ message = _("Upgraded project already exists.")
+ self.show_ok_dialog(_("Error Upgrading Project"), f"{message}\n{project_reference.path}")
+ logging.info(f"Project already exists: {project_reference.path}")
+ new_project_reference = None
+ except Exception as e:
self.show_ok_dialog(_("Error Upgrading Project"), _("Unable to upgrade project."))
+ import traceback
+ traceback.print_exc()
new_project_reference = None
if new_project_reference:
self.switch_project_reference(new_project_reference)
|
Improve error message when upgraded project already exists.
|
py
|
diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/conftest.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/conftest.py
index <HASH>..<HASH> 100644
--- a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/conftest.py
+++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/conftest.py
@@ -1,8 +1,10 @@
# pylint: disable=redefined-outer-name, unused-argument
+import warnings
from contextlib import contextmanager
import boto3
import pytest
+from dagster import ExperimentalWarning
from dagster.core.definitions.reconstructable import ReconstructableRepository
from dagster.core.host_representation.origin import InProcessRepositoryLocationOrigin
from dagster.core.test_utils import instance_for_test
@@ -10,6 +12,13 @@ from dagster.core.test_utils import instance_for_test
from . import repo
+@pytest.fixture(autouse=True)
+def ignore_experimental_warning():
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=ExperimentalWarning)
+ yield
+
+
@pytest.fixture
def image():
return "dagster:latest"
|
Ignore dagster.ExperimentalWarning Summary: Because of the frequency with which we initialize EcsRunLauncher, we end up seeing this warning a bunch during tests. Let's ignore it to make the test output more readable. Depends on D<I> Test Plan: unit Reviewers: dgibson, alangenfeld, johann Reviewed By: dgibson Differential Revision: <URL>
|
py
|
diff --git a/bang/deployers/cloud.py b/bang/deployers/cloud.py
index <HASH>..<HASH> 100644
--- a/bang/deployers/cloud.py
+++ b/bang/deployers/cloud.py
@@ -23,14 +23,19 @@ class BaseDeployer(Deployer):
"""Base class for all cloud resource deployers"""
def __init__(self, stack, config, consul):
super(BaseDeployer, self).__init__(stack, config)
- self.consul = consul
+ self._consul = consul
+
+ @property
+ def consul(self):
+ return self._consul
class RegionedDeployer(BaseDeployer):
"""Deployer that automatically sets its region"""
- def __init__(self, *args, **kwargs):
- super(RegionedDeployer, self).__init__(*args, **kwargs)
- self.consul.set_region(self.region_name)
+ @property
+ def consul(self):
+ self._consul.set_region(self.region_name)
+ return self._consul
class ServerDeployer(RegionedDeployer):
|
Set region in child process. RegionedDeployer used to set the region for its consul object in its constructor. Since consuls are shared between objects, setting the region at this point meant that all deployer objects would share the region of the latest instantiated deployer. This change pushes the region setting to a later time in the deployment once the deployers have all been forked into their child processes. This allows each deployer to set its own region.
|
py
|
diff --git a/peyotl/phylesystem/git_actions.py b/peyotl/phylesystem/git_actions.py
index <HASH>..<HASH> 100644
--- a/peyotl/phylesystem/git_actions.py
+++ b/peyotl/phylesystem/git_actions.py
@@ -62,7 +62,7 @@ class GitAction(object):
git(self.gitdir, self.gitwd, "checkout","master")
dirs = []
# first we look for studies already in our master branch
- for f in os.listdir("study/"):
+ for f in os.listdir(os.path.join(repo,"study/")):
if os.path.isdir("study/%s" % f):
# ignore alphabetic prefix, o = created by opentree API
if f[0].isalpha():
|
Fixed typo in newest_study_id
|
py
|
diff --git a/fluo/urls.py b/fluo/urls.py
index <HASH>..<HASH> 100644
--- a/fluo/urls.py
+++ b/fluo/urls.py
@@ -21,7 +21,7 @@
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
-from django.conf.urls import handler400, handler403, handler404, handler500, include, patterns, url
+from django.conf.urls import handler400, handler403, handler404, handler500, include, url
from django.urls import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, ResolverMatch, Resolver404, get_script_prefix, reverse as django_reverse, resolve
)
@@ -31,7 +31,7 @@ from django.utils.functional import lazy
__all__ = [
'handler400', 'handler403', 'handler404', 'handler500',
- 'url', 'include', 'patterns',
+ 'url', 'include',
'NoReverseMatch', 'RegexURLPattern', 'RegexURLResolver', 'ResolverMatch', 'Resolver404', 'get_script_prefix',
'reverse', 'reverse_lazy', 'resolve',
'UrlsMixin',
|
don't import deprecated patterns in fluo.urls
|
py
|
diff --git a/src/libtcod.py b/src/libtcod.py
index <HASH>..<HASH> 100644
--- a/src/libtcod.py
+++ b/src/libtcod.py
@@ -50,6 +50,7 @@ if 'darwin' in _sys.platform:
_os.environ['DYLD_LIBRARY_PATH'] += ':' + _os.path.realpath(_os.path.join(__path__[0], _get_lib_path_crossplatform()))
else:
_os.environ['DYLD_LIBRARY_PATH'] = _os.path.realpath(_os.path.join(__path__[0], _get_lib_path_crossplatform()))
+ _ctypes.CDLL(_os.path.join(__path__[0], 'Frameworks/SDL.framework/Versions/A/SDL'))
from . import _libtcod
|
force load SDL with ctypes
|
py
|
diff --git a/dbaas_zabbix/database_providers.py b/dbaas_zabbix/database_providers.py
index <HASH>..<HASH> 100644
--- a/dbaas_zabbix/database_providers.py
+++ b/dbaas_zabbix/database_providers.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-from provider import ZabbixProvider
+from dbaas_zabbix.provider import ZabbixProvider
import logging
LOG = logging.getLogger(__name__)
|
Fix import to work with python <I>
|
py
|
diff --git a/tarbell/cli.py b/tarbell/cli.py
index <HASH>..<HASH> 100644
--- a/tarbell/cli.py
+++ b/tarbell/cli.py
@@ -286,6 +286,8 @@ def tarbell_publish(command, args):
puts(colored.green("http://{0}\n".format(bucket_url)))
except KeyboardInterrupt:
show_error("ctrl-c pressed, bailing out!")
+ except KeyError:
+ show_error("Credentials for bucket {0} not configured -- run {1} or add credentials to {2}".format(colored.red(bucket_url), colored.yellow("tarbell configure s3"), colored.yellow("~/.tarbell/settings.yaml")))
finally:
_delete_dir(tempdir)
|
catch missing bucket cred error, effectively closes #<I>
|
py
|
diff --git a/phoebe/atmospheres/limbdark.py b/phoebe/atmospheres/limbdark.py
index <HASH>..<HASH> 100644
--- a/phoebe/atmospheres/limbdark.py
+++ b/phoebe/atmospheres/limbdark.py
@@ -3795,6 +3795,14 @@ def download_atm(atm=None):
destin_folder = get_paths()[0]
+ # Does the directory exist?
+ if not os.path.isdir(destin_folder):
+ direcs = os.sep.split(destin_folder)
+ level1 = os.path.join(direcs[:-1])
+ if not os.path.isdir(level1):
+ os.mkdir(level1)
+ os.mkdir(destin_folder)
+
# Perhaps we need to be sudo?
print("Copying to destination folder {}".format(destin_folder))
if not os.access(destin_folder, os.W_OK):
|
fixed bug in plotting lcobs if no errorbars are available
|
py
|
diff --git a/django_performance_recorder/__init__.py b/django_performance_recorder/__init__.py
index <HASH>..<HASH> 100644
--- a/django_performance_recorder/__init__.py
+++ b/django_performance_recorder/__init__.py
@@ -1,6 +1,22 @@
# -*- coding:utf-8 -*-
+"""
+isort:skip_file
+"""
from __future__ import absolute_import, division, print_function, unicode_literals
+import six
+
+try:
+ import pytest
+except ImportError:
+ pytest = None
+
+if pytest is not None:
+ if six.PY2:
+ pytest.register_assert_rewrite(b'django_performance_recorder.api')
+ else:
+ pytest.register_assert_rewrite('django_performance_recorder.api')
+
from .api import record # noqa: F401
__version__ = '1.0.0'
|
Make assert statement rich in pytest
|
py
|
diff --git a/cauldron/test/runner/test_printing.py b/cauldron/test/runner/test_printing.py
index <HASH>..<HASH> 100644
--- a/cauldron/test/runner/test_printing.py
+++ b/cauldron/test/runner/test_printing.py
@@ -1,4 +1,3 @@
-import time
import string
import cauldron
@@ -34,18 +33,15 @@ class TestPrinting(scaffolds.ResultsTest):
response.thread.join(1)
dom = step.dumps()
self.assertEqual(dom.count('BAT'), 1)
- self.assertEqual(dom.count('SAT'), 0)
response.thread.join(1)
dom = step.dumps()
self.assertEqual(dom.count('BAT'), 1)
- self.assertEqual(dom.count('SAT'), 0)
response.thread.join()
- time.sleep(1)
dom = step.dumps()
self.assertEqual(dom.count('BAT'), 1)
- self.assertEqual(dom.count('SAT'), 1)
+ self.assertLess(dom.count('SAT'), 2)
support.run_command('close')
|
Slow Printing Test Fix slow printing test to avoid race conditions.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ setup(name='nested_formset',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
- zip_safe=True,
+ zip_safe=False,
install_requires=[
'Django>=1.5',
'django-discover-runner',
|
Mark the package not zipsafe so test discovery finds directories.
|
py
|
diff --git a/ecell4/util/decorator.py b/ecell4/util/decorator.py
index <HASH>..<HASH> 100644
--- a/ecell4/util/decorator.py
+++ b/ecell4/util/decorator.py
@@ -77,8 +77,7 @@ def generate_ReactionRule(lhs, rhs, k=None):
'parameter must be given as a number; "%s" given' % str(k))
def traverse_ParseObj(obj, keys):
- reserved_vars = ['pi']
- # reserved_vars = ['_t', 'pi']
+ reserved_vars = ['_t', 'pi']
reserved_funcs = ['exp', 'log', 'sin', 'cos', 'tan', 'asin', 'acos', 'atan']
if isinstance(obj, parseobj.AnyCallable):
@@ -128,6 +127,7 @@ def generate_ratelaw(obj, rr):
rr.add_reactant(ecell4.core.Species(key), 1)
rr.add_product(ecell4.core.Species(key), 1)
exp = exp.format(*names)
+ # print(exp)
import math
f = eval("lambda _r, _p, _v, _t, _rr: {0}".format(exp))
f.__globals__['exp'] = math.exp
|
Calculate a sensitivity against time in jacobi_func
|
py
|
diff --git a/isort/settings.py b/isort/settings.py
index <HASH>..<HASH> 100644
--- a/isort/settings.py
+++ b/isort/settings.py
@@ -235,7 +235,8 @@ class _Config:
)
if self.multi_line_output == WrapModes.VERTICAL_GRID_GROUPED_NO_COMMA: # type: ignore
- object.__setattr__(self, "multi_line_output", WrapModes.VERTICAL_GRID_GROUPED) # type: ignore
+ vertical_grid_grouped = WrapModes.VERTICAL_GRID_GROUPED # type: ignore
+ object.__setattr__(self, "multi_line_output", vertical_grid_grouped)
if self.force_alphabetical_sort:
object.__setattr__(self, "force_alphabetical_sort_within_sections", True)
object.__setattr__(self, "no_sections", True)
|
squash! fixup! Respect line_length in vertical grid modes and deprecate mode 6 Linting work-around
|
py
|
diff --git a/tests/test_constantq.py b/tests/test_constantq.py
index <HASH>..<HASH> 100644
--- a/tests/test_constantq.py
+++ b/tests/test_constantq.py
@@ -52,8 +52,8 @@ def test_cqt():
# incorrect hop length for a 6-octave analysis
- # num_octaves = 6, 2**6 = 64 > 32
- for hop_length in [-1, 0, 32, 63, 65]:
+ # num_octaves = 6, 2**(6-1) = 32 > 16
+ for hop_length in [-1, 0, 16, 63, 65]:
yield (raises(librosa.ParameterError)(__test_cqt_size), y, sr, hop_length, None, 72,
12, 0.0, 2, None, 1, 0.01)
|
cqt-test: only need to downsample 5 times for 6 octaves
|
py
|
diff --git a/ledger/__metadata__.py b/ledger/__metadata__.py
index <HASH>..<HASH> 100644
--- a/ledger/__metadata__.py
+++ b/ledger/__metadata__.py
@@ -1,7 +1,7 @@
"""
Ledger package metadata
"""
-__version_info__ = (0, 0, 29)
+__version_info__ = (0, 0, 30)
__version__ = '{}.{}.{}'.format(*__version_info__)
__author__ = "Evernym, Inc."
__license__ = "Apache 2.0"
|
advance version to push to pypi
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,6 @@ setup(
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
- 'djangorestframework',
'requests',
'requests_cache',
'django_mptt',
|
Remove dependency to djangorestframework
|
py
|
diff --git a/src/ai/backend/client/__init__.py b/src/ai/backend/client/__init__.py
index <HASH>..<HASH> 100644
--- a/src/ai/backend/client/__init__.py
+++ b/src/ai/backend/client/__init__.py
@@ -6,7 +6,7 @@ __all__ = (
*session.__all__,
)
-__version__ = '20.03.0'
+__version__ = '20.09.0a1.dev0'
def get_user_agent():
|
repo: Prepare for next dev cycle
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.