diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/cassandra/cqlengine/columns.py b/cassandra/cqlengine/columns.py index <HASH>..<HASH> 100644 --- a/cassandra/cqlengine/columns.py +++ b/cassandra/cqlengine/columns.py @@ -328,7 +328,9 @@ class Text(Column): Defaults to 1 if this is a ``required`` column. Otherwise, None. :param int max_length: Sets the maximum length of this string, for validation purposes. """ - self.min_length = min_length or (1 if kwargs.get('required', False) else None) + self.min_length = ( + 1 if not min_length and kwargs.get('required', False) + else min_length) self.max_length = max_length if self.min_length is not None:
Only enforce minimal length requirement if min_length is unset.
py
diff --git a/fireplace/actions.py b/fireplace/actions.py index <HASH>..<HASH> 100644 --- a/fireplace/actions.py +++ b/fireplace/actions.py @@ -365,6 +365,12 @@ class Play(GameAction): args = ("card", "target", "choose") type = PowSubType.PLAY + def _broadcast(self, entity, source, game, at, *args): + # Prevent cards from triggering off their own play + if entity is self.card: + return + return super()._broadcast(entity, source, game, at, *args) + def get_args(self, source, game): return (source, ) + self._args @@ -737,6 +743,12 @@ class Summon(TargetedAction): """ args = ("targets", "card") + def _broadcast(self, entity, source, game, at, *args): + # Prevent cards from triggering off their own summon + if entity is args[1]: + return + return super()._broadcast(entity, source, game, at, *args) + def get_args(self, source, game, target): cards = _eval_card(source, game, self.card) return (target, cards)
Prevent entities from triggering off their own summon/play
py
diff --git a/rejected/controller.py b/rejected/controller.py index <HASH>..<HASH> 100644 --- a/rejected/controller.py +++ b/rejected/controller.py @@ -7,6 +7,7 @@ import logging import signal import sys +from rejected import common from rejected import mcp from rejected import __version__ @@ -41,6 +42,7 @@ class Controller(clihelper.Controller): def _setup(self): """Continue the run process blocking on MasterControlProgram.run""" # If the app was invoked to specified to prepend the path, do so now + common.add_null_handler() if self._options.prepend_path: self._prepend_python_path(self._options.prepend_path)
Add the null handler to the root logger to prevent Tornado from doing logging.basicConfig
py
diff --git a/lib/stsci/tools/check_files.py b/lib/stsci/tools/check_files.py index <HASH>..<HASH> 100644 --- a/lib/stsci/tools/check_files.py +++ b/lib/stsci/tools/check_files.py @@ -336,9 +336,9 @@ def stisExt2PrimKw(stisfiles): if isinstance(sfile, str): sfile = fits.open(sfile, mode='udpate') toclose = True - d = {} + #d = {} for k in kw_list: - d[0].header[k] = d[1].header[k] + sfile[0].header[k] = sfile[1].header[k] if toclose: sfile.close()
Fix code to move keywords from SCI to PRIMARY
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ setup( author=meta['author'], author_email='mail@honzajavorek.cz', url='https://github.com/honzajavorek/redis-collections', - license=io.open('LICENSE', encoding='utf-8').read(), + license='ISC', packages=find_packages(exclude=['tests']), include_package_data=True, install_requires=['redis>=2.7.2', 'six>=1.10.0'],
Use license type in setup.py
py
diff --git a/trainerdex/leaderboard.py b/trainerdex/leaderboard.py index <HASH>..<HASH> 100644 --- a/trainerdex/leaderboard.py +++ b/trainerdex/leaderboard.py @@ -131,6 +131,7 @@ class WorldwideLeaderboard(BaseLeaderboard): class DiscordLeaderboard(BaseLeaderboard): def __init__(self, data: List[dict], client: HTTPClient = HTTPClient()): + self.client = client self.time = dateutil.parser.parse(data.get('generated')) self.title = data.get('title') self._data = data.get('leaderboard')
Fixes "`DiscordLeaderboard` object has no attribute `client`"
py
diff --git a/examples/appengine/example.py b/examples/appengine/example.py index <HASH>..<HASH> 100644 --- a/examples/appengine/example.py +++ b/examples/appengine/example.py @@ -109,8 +109,10 @@ class HomeHandler(BaseHandler): def post(self): url = self.request.get('url') file = urllib2.urlopen(url) - graph = facebook.GraphAPI(self.current_user.access_token) - graph.put_photo(file, "Test Image") + graph = facebook.GraphAPI(self.current_user['access_token']) + response = graph.put_photo(file, "Test Image") + photo_url = "http://www.facebook.com/photo.php?fbid={0}".format(response['id']) + self.redirect(str(photo_url)) class LogoutHandler(BaseHandler): def get(self):
changed implementation of upload photo in the way it gets the access token and to redirect to photo after upload is done
py
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index <HASH>..<HASH> 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -684,9 +684,9 @@ class ShareFileClient(StorageAccountHostsMixin): def download_file( self, offset=None, # type: Optional[int] length=None, # type: Optional[int] - **kwargs + **kwargs # type: Any ): - # type: (Optional[int], Optional[int], Any) -> StorageStreamDownloader + # type: (...) -> StorageStreamDownloader """Downloads a file to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the file into a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks.
fix function has duplicate type signatures mypy error (#<I>)
py
diff --git a/salt/modules/win_system.py b/salt/modules/win_system.py index <HASH>..<HASH> 100644 --- a/salt/modules/win_system.py +++ b/salt/modules/win_system.py @@ -448,10 +448,9 @@ def get_hostname(): salt 'minion-id' system.get_hostname ''' - cmd = 'wmic computersystem get name' + cmd = 'hostname' ret = __salt__['cmd.run'](cmd=cmd) - _, hostname = ret.split("\n") - return hostname + return ret def set_hostname(hostname):
Fix get_hostname to handle longer computer names
py
diff --git a/tests/base.py b/tests/base.py index <HASH>..<HASH> 100644 --- a/tests/base.py +++ b/tests/base.py @@ -9,6 +9,7 @@ from is_element_present import IsElementPresentTest from iframes import IFrameElementsTest from async_finder import AsyncFinderTests from within_elements import WithinElementsTest +from status_code_404 import StatusCode404Test class BaseBrowserTests(FindElementsTest, FormElementsTest, ClickElementsTest, WithinElementsTest): @@ -56,7 +57,7 @@ class BaseBrowserTests(FindElementsTest, FormElementsTest, ClickElementsTest, Wi element = self.browser.find_by_id("firstheader").first assert_equals(element.parent, self.browser) -class WebDriverTests(BaseBrowserTests, IFrameElementsTest, ElementDoestNotExistTest, IsElementPresentTest, AsyncFinderTests): +class WebDriverTests(BaseBrowserTests, IFrameElementsTest, ElementDoestNotExistTest, IsElementPresentTest, AsyncFinderTests, StatusCode404Test): def test_should_reload_a_page(self): "should reload a page"
Added recently created test in the list of inherit test classes
py
diff --git a/pyspectral/rayleigh.py b/pyspectral/rayleigh.py index <HASH>..<HASH> 100644 --- a/pyspectral/rayleigh.py +++ b/pyspectral/rayleigh.py @@ -166,7 +166,7 @@ class Rayleigh(object): wvl = self.get_effective_wavelength(bandname) * 1000.0 coeff, wvl_coord, azid_coord = self.get_poly_coeff() - if wvl > wvl_coord.max() or wvl < wvl_coord.min(): + if not(wvl_coord.min() < wvl < wvl_coord.max()): LOG.warning( "Effective wavelength for band %s outside 400-800 nm range!", str(bandname)) LOG.info(
Simplify out of bounds check
py
diff --git a/src/openaccess_epub/ncx/ncx.py b/src/openaccess_epub/ncx/ncx.py index <HASH>..<HASH> 100644 --- a/src/openaccess_epub/ncx/ncx.py +++ b/src/openaccess_epub/ncx/ncx.py @@ -191,11 +191,13 @@ e try: child_title = child.getChildrenByTagName('title')[0] except IndexError: - label = 'Title Not Found!' + #label = 'Title Not Found!' + continue else: label = utils.serialize_text(child_title) if not label: - label = 'Blank Title Found!' + #label = 'Blank Title Found!' + continue source = 'main.{0}.xml#{1}'.format(self.article_doi, source_id) if tagname == 'sec': play_order = self.pull_play_order()
Changed behavior for sec to navPoint conversion. If they lack titles they will now not become navPoints
py
diff --git a/lib/stsci/tools/logutil.py b/lib/stsci/tools/logutil.py index <HASH>..<HASH> 100644 --- a/lib/stsci/tools/logutil.py +++ b/lib/stsci/tools/logutil.py @@ -178,8 +178,30 @@ class StreamTeeLogger(logging.Logger): self.set_stream(stream) self.addHandler(_LogTeeHandler()) - self.errors = 'strict' - self.encoding = 'utf8' + #self.errors = 'strict' + #self.encoding = 'utf8' + + @property + def encoding(self): + if self.stream: + try: + return self.stream.encoding + except AttributeError: + pass + + # Default value + return 'utf-8' + + @property + def errors(self): + if self.stream: + try: + return self.stream.errors + except AttributeError: + pass + + # Default value + return 'strict' def set_stream(self, stream): """
Replaced hard-coded attributes for 'errors' and 'encoding' in logutil.StreamTeeLogger with properties, as provided by Erik. This should make the code more robust. git-svn-id: <URL>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ setup( author_email='dev@aaren.me', license='BSD 2-Clause', url='http://github.com/aaren/notedown', - install_requires=['ipython >= 3.0', 'jinja2', 'pandoc-attributes'], + install_requires=['ipython[nbconvert] >= 3.0', 'pandoc-attributes'], entry_points={'console_scripts': ['notedown = notedown:cli', ]}, package_dir={'notedown': 'notedown'}, package_data={'notedown': ['templates/markdown.tpl']},
fix requirements: now need nbconvert
py
diff --git a/bitshares/account.py b/bitshares/account.py index <HASH>..<HASH> 100644 --- a/bitshares/account.py +++ b/bitshares/account.py @@ -57,8 +57,12 @@ class Account(BlockchainObject): self.cache(account["name"]) if self.full: - account = self.blockchain.rpc.get_full_accounts( - [account["id"]], False)[0][1] + accounts = self.blockchain.rpc.get_full_accounts( + [account["id"]], False) + if accounts and isinstance(accounts, list): + account = accounts[0][1] + else: + raise AccountDoesNotExistsException(self.identifier) super(Account, self).__init__( account["account"], blockchain_instance=self.blockchain
Issue #<I> Added bad API response for accounts list handling
py
diff --git a/nose2gae/__init__.py b/nose2gae/__init__.py index <HASH>..<HASH> 100644 --- a/nose2gae/__init__.py +++ b/nose2gae/__init__.py @@ -33,7 +33,7 @@ class Nose2GAE(events.Plugin): configSection = 'nose2-gae' commandLineSwitch = (None, 'with-gae', 'Run tests inside the Google Appengine sandbox') - def __init__(self): + def handleArgs(self, event): self._gae_path = os.path.abspath( self.config.as_str('lib-root', '/usr/local/google_appengine')) appserver_py = os.path.join(self._gae_path, 'dev_appserver.py')
Don't prematurely parse arguments - could fail if the plugin is loaded but not activated
py
diff --git a/test/test_prechecks.py b/test/test_prechecks.py index <HASH>..<HASH> 100644 --- a/test/test_prechecks.py +++ b/test/test_prechecks.py @@ -26,6 +26,7 @@ class PrecheckTest(BrokerTestCase): }) self.assertEqual(response.status_code, http.HTTPStatus.PRECONDITION_FAILED) + print(response.data) self.assertEqual(response.json, dict(description="Service broker requires version 2.13+.")) def test_returns_400_if_request_not_contains_version_header(self):
Add some output to test, to engage ci error
py
diff --git a/arq/worker.py b/arq/worker.py index <HASH>..<HASH> 100644 --- a/arq/worker.py +++ b/arq/worker.py @@ -151,7 +151,7 @@ class Worker: def __init__( self, - functions: Sequence[Function] = (), + functions: Sequence[Union[Function, Callable]] = (), *, queue_name: str = default_queue_name, cron_jobs: Optional[Sequence[CronJob]] = None,
fix Worker typing (#<I>)
py
diff --git a/zipline/algorithm.py b/zipline/algorithm.py index <HASH>..<HASH> 100644 --- a/zipline/algorithm.py +++ b/zipline/algorithm.py @@ -499,6 +499,7 @@ class TradingAlgorithm(object): perf['daily_perf'].update( perf['daily_perf'].pop('recorded_vars') ) + perf['daily_perf'].update(perf['cumulative_risk_metrics']) daily_perfs.append(perf['daily_perf']) else: self.risk_report = perf
added cumulative risk measures to the datapanel produced from perf frames.
py
diff --git a/test/test_tram.py b/test/test_tram.py index <HASH>..<HASH> 100644 --- a/test/test_tram.py +++ b/test/test_tram.py @@ -71,10 +71,17 @@ class TestTRAM(unittest.TestCase): state_sequence = Markov_state_sequence log_R_K_i = np.log(R) + log_R_K_i_compare = np.zeros_like(log_R_K_i) + new_biased_conf_energies = np.zeros_like(biased_conf_energies) + thermotools.tram.update_biased_conf_energies(log_lagrangian_mult, biased_conf_energies, count_matrices, bias_energy_sequence, state_sequence, + state_counts, log_R_K_i_compare, scratch_M, scratch_T, new_biased_conf_energies) + + assert np.allclose(log_R_K_i, log_R_K_i_compare) + compare = thermotools.tram.log_likelihood(log_lagrangian_mult, biased_conf_energies, count_matrices, bias_energy_sequence, state_sequence, - state_counts, log_R_K_i, scratch_M, scratch_T) + state_counts, log_R_K_i_compare, scratch_M, scratch_T) - assert abs(reference-compare)<1.E-3 + assert np.allclose(reference, compare) if __name__ == "__main__": unittest.main()
[TRAM] completed logL unit test
py
diff --git a/ci/utils.py b/ci/utils.py index <HASH>..<HASH> 100644 --- a/ci/utils.py +++ b/ci/utils.py @@ -2,6 +2,8 @@ import time from datetime import timedelta from django.utils import timezone +from six.moves.urllib.parse import urlparse, parse_qs +from six import string_types import attr @@ -101,3 +103,14 @@ def get_ranged_data_from_timeseries(timeseries, dt, range_type='week'): end = get_timestamp(boundries.end) sent_data = transform_timeseries_data(timeseries, start, end) return right_pad_list(sent_data, length=padding, value=0) + + +def extract_query_params(url): + """ + Takes a URL as a string, and returns a dictionary representing the query + parameters present in the URL. + """ + if isinstance(url, string_types): + return parse_qs(urlparse(url).query) + else: + return {}
Add util to extract query params from a URL
py
diff --git a/intake/catalog/entry.py b/intake/catalog/entry.py index <HASH>..<HASH> 100644 --- a/intake/catalog/entry.py +++ b/intake/catalog/entry.py @@ -133,7 +133,7 @@ class CatalogEntry(DictSerialiseMixin): 'application/json': contents, 'text/plain': pretty_describe(contents) }, metadata={ - 'application/json': { 'root': self.name } + 'application/json': { 'root': self._name } }, raw=True) if warning: display(warning) # noqa: F821
Use underscored `_name` to avoid initialization.
py
diff --git a/qiskit/visualization/dag_visualization.py b/qiskit/visualization/dag_visualization.py index <HASH>..<HASH> 100644 --- a/qiskit/visualization/dag_visualization.py +++ b/qiskit/visualization/dag_visualization.py @@ -170,7 +170,8 @@ def dag_drawer(dag, scale=0.7, filename=None, style='color'): with tempfile.TemporaryDirectory() as tmpdirname: tmp_path = os.path.join(tmpdirname, 'dag.png') dot.write_png(tmp_path) - image = Image.open(tmp_path) + with Image.open(tmp_path) as test_image: + image = test_image.copy() os.remove(tmp_path) return image else:
fix error when `dag_drawer` removes a temporary file that is still open (#<I>) * fix error when removing an image file that is still open * autopep
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,5 +10,7 @@ setup( packages=find_packages(), include_package_data=True, install_requires=['pyhamcrest'], + tests_requires=['pyhamcrest'], + test_suite="tests", zip_safe=False, )
re #<I> Updating setup.py for missing test deps. Updating setup.py because tests missing dependencies. Using this way, now `python setup.py test` can be run
py
diff --git a/tabledata/_core.py b/tabledata/_core.py index <HASH>..<HASH> 100644 --- a/tabledata/_core.py +++ b/tabledata/_core.py @@ -2,6 +2,7 @@ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ +import copy import re from collections import OrderedDict, namedtuple from typing import Any, Dict, List, Optional, Sequence @@ -44,7 +45,7 @@ class TableData: self.__rows = [] if dp_extractor: - self.__dp_extractor = dp_extractor + self.__dp_extractor = copy.deepcopy(dp_extractor) else: self.__dp_extractor = dp.DataPropertyExtractor()
Change to copy DataPropertyExtractor argument instead of assign
py
diff --git a/vent/core/network_tap/ncontrol/rest/create.py b/vent/core/network_tap/ncontrol/rest/create.py index <HASH>..<HASH> 100644 --- a/vent/core/network_tap/ncontrol/rest/create.py +++ b/vent/core/network_tap/ncontrol/rest/create.py @@ -60,7 +60,10 @@ class CreateR: except Exception as e: # pragma: no cover return (False, 'unable to connect to redis because: ' + str(e)) if r: - r.hmset(payload['id'], ast.literal_eval(payload['metadata'])) + try: + r.hmset(payload['id'], payload['metadata']) + except Exception as e: # pragma: no cover + return (False, 'unable to store contents of the payload in redis because: ' + str(e)) # connect to docker c = None
fix string to dict into redis issue
py
diff --git a/LiSE/LiSE/engine.py b/LiSE/LiSE/engine.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/engine.py +++ b/LiSE/LiSE/engine.py @@ -1570,6 +1570,8 @@ class Engine(AbstractEngine, gORM): self.add_character(name, data, **kwargs) return self.character[name] + new_graph = new_character + def add_character(self, name: Keyable, data: Graph = None, **kwargs): """Create a new character.
Add new_graph as an alias of new_character
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -22,4 +22,8 @@ setup( license='MIT', packages=find_packages(exclude=['tests']), install_requires=['six', 'decorator', 'funcsigs'], + classifiers=[ + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 3', + ] )
Added Py2/3 classifiers.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -90,10 +90,10 @@ setup( ], data_files = [ ## this does not appear to actually put anything into the egg... + ('examples', recursive_glob('src/examples', '*.py')), + ('configs', recursive_glob('configs', '*.yaml')), ('data/john-smith', recursive_glob('data/john-smith', '*.sc')), ('data/john-smith', recursive_glob('data/john-smith', '*.sc.xz')), ('data/john-smith/original', recursive_glob('data/john-smith/original', '*')), - ('examples', recursive_glob('src/examples/', '*.py$')), - ('configs', recursive_glob('configs/', '*.yaml$')), ], )
fixing examples and configs in the egg
py
diff --git a/scss/__init__.py b/scss/__init__.py index <HASH>..<HASH> 100644 --- a/scss/__init__.py +++ b/scss/__init__.py @@ -36,7 +36,7 @@ xCSS: """ -from scss_meta import BUILD_INFO, PROJECT, VERSION, AUTHOR, AUTHOR_EMAIL, LICENSE +from scss_meta import BUILD_INFO, PROJECT, VERSION, REVISION, URL, AUTHOR, AUTHOR_EMAIL, LICENSE __project__ = PROJECT __version__ = VERSION @@ -294,6 +294,15 @@ _default_scss_files = {} # Files to be compiled ({file: content, ...}) _default_scss_index = {0: '<unknown>:0'} _default_scss_vars = { + '$BUILD_INFO': BUILD_INFO, + '$PROJECT': PROJECT, + '$VERSION': VERSION, + '$REVISION': REVISION, + '$URL': URL, + '$AUTHOR': AUTHOR, + '$AUTHOR_EMAIL': AUTHOR_EMAIL, + '$LICENSE': LICENSE, + # unsafe chars will be hidden as vars '$__doubleslash': '//', '$__bigcopen': '/*',
Added pyScss version information variables
py
diff --git a/gnupg/gnupg.py b/gnupg/gnupg.py index <HASH>..<HASH> 100644 --- a/gnupg/gnupg.py +++ b/gnupg/gnupg.py @@ -171,11 +171,6 @@ def _make_binary_stream(s, encoding): rv = StringIO(s) return rv -def _today(): - """Get the current date as a string in the form %Y-%m-%d.""" - now_string = datetime.now().__str__() - return now_string.split(' ', 1)[0] - def _threaded_copy_data(instream, outstream): wr = threading.Thread(target=_copy_data, args=(instream, outstream)) wr.setDaemon(True)
Remove function _today() from gnupg.py.
py
diff --git a/tests/refresh_token/test_models.py b/tests/refresh_token/test_models.py index <HASH>..<HASH> 100644 --- a/tests/refresh_token/test_models.py +++ b/tests/refresh_token/test_models.py @@ -18,10 +18,21 @@ class AbstractRefreshTokenTests(UserTestCase): def test_generate_token(self): token = self.refresh_token.generate_token() + n_bytes = jwt_settings.JWT_REFRESH_TOKEN_N_BYTES - self.assertEqual( - len(token), - jwt_settings.JWT_REFRESH_TOKEN_N_BYTES * 2) + self.assertEqual(len(token), n_bytes * 2) + + def test_get_token(self): + self.refresh_token.token = 'hashed' + token = self.refresh_token.get_token() + + self.assertEqual(self.refresh_token._cached_token, token) + self.assertNotEqual(self.refresh_token.token, token) + + del self.refresh_token._cached_token + token = self.refresh_token.get_token() + + self.assertEqual(self.refresh_token.token, token) def test_is_expired(self): with refresh_expired():
Added test_get_token
py
diff --git a/src/landslide/macro.py b/src/landslide/macro.py index <HASH>..<HASH> 100644 --- a/src/landslide/macro.py +++ b/src/landslide/macro.py @@ -50,7 +50,7 @@ class CodeHighlightingMacro(Macro): Pygments. """ code_blocks_re = re.compile( - r'(<pre.+?>(<code>)?\s?!(\w+?)\n(.*?)(</code>)?</pre>)', + r'(<pre.+?>(<code>)?\s?!(\S+?)\n(.*?)(</code>)?</pre>)', re.UNICODE | re.MULTILINE | re.DOTALL) html_entity_re = re.compile('&(\w+?);')
Code blocks regex now captures all pygments short-names
py
diff --git a/src/ossos-pipeline/ossos/plant.py b/src/ossos-pipeline/ossos/plant.py index <HASH>..<HASH> 100755 --- a/src/ossos-pipeline/ossos/plant.py +++ b/src/ossos-pipeline/ossos/plant.py @@ -1,6 +1,6 @@ import fcntl import os -import random +from numpy import random from astropy.table import Table import numpy import storage @@ -124,6 +124,13 @@ class KBOGenerator(object): 'id': self.id} @classmethod + def _step(cls, mag): + low = mag * 0 + 0.3 + high = mag * 0 + 0.7 + g = [mag < 23.3] * low + [mag >= 23.3] * high + return g[0] + + @classmethod def get_kbos(cls, n, rate, angle, mag, x, y, filename=None): kbos = Table(names=('x', 'y', 'mag', 'sky_rate', 'angle', 'id')) @@ -132,7 +139,7 @@ class KBOGenerator(object): for kbo in cls(n, rate=Range(rate, func=lambda value: value**0.25), angle=Range(angle), - mag=Range(mag), + mag=Range(mag, func=cls._step), x=Range(x), y=Range(y)): kbos.add_row(kbo)
Made the mag distribution a step-function with <I> coming from mag<<I> and <I>% from mag>=<I> The flux distributoin of planted sources needs to be solid enough near the transition to 0 completeness such that we can measure the shape. Also, we don't want too many bright sources in planted images that the operator looks at since that will create a distraction. The solution is to have a step at around <I> where we transition from full completeness to fractional completelness.
py
diff --git a/notifications/templatetags/notifications_tags.py b/notifications/templatetags/notifications_tags.py index <HASH>..<HASH> 100644 --- a/notifications/templatetags/notifications_tags.py +++ b/notifications/templatetags/notifications_tags.py @@ -9,8 +9,10 @@ register = Library() def notifications_unread(context): if 'user' not in context: return '' - - user = context['user'] + + request = context['request'] + user = request.user if user.is_anonymous(): return '' - return user.notifications.unread().count() \ No newline at end of file + return user.notifications.unread().count() +
bugfix for wrong user unread notification count
py
diff --git a/dvc/repo/install.py b/dvc/repo/install.py index <HASH>..<HASH> 100644 --- a/dvc/repo/install.py +++ b/dvc/repo/install.py @@ -17,7 +17,7 @@ def pre_commit_install(scm: "Git") -> None: with modify_yaml(config_path) as config: entry = { "repo": "https://github.com/iterative/dvc", - "rev": "master", + "rev": "main", "hooks": [ { "id": "dvc-pre-commit",
fix: hooks install alongside pre-commit tool
py
diff --git a/bonsai/py/model.py b/bonsai/py/model.py index <HASH>..<HASH> 100644 --- a/bonsai/py/model.py +++ b/bonsai/py/model.py @@ -176,6 +176,12 @@ class PyComprehension(PyExpression): self.expr = expr self.iters = iters + def _children(self): + yield self.expr + + for iter in self.iters: + yield iter + def pretty_str(self, indent=0): parens = parentheses[self.name[0:-self.name_suffix_length]] iters = '\n'.join( @@ -198,6 +204,13 @@ class PyComprehensionIterator(PyExpression): self.iter = iter self.filters = filters + def _children(self): + yield self.target + yield self.iter + + for filter in self.filters: + yield filter + def pretty_str(self, indent=0): indent = ' ' * indent @@ -219,6 +232,13 @@ class PyKeyValue(PyExpression): PyExpression.__init__(self, scope, parent, name, result, False) self.value = value + def _children(self): + if isinstance(self.name, CodeEntity): + yield self.name + + if isinstance(self.value, CodeEntity): + yield self.value + def pretty_str(self, indent=0): return '{}{}: {}'.format(' ' * indent, pretty_str(self.name), pretty_str(self.value))
Adding some _children() methods
py
diff --git a/launch_control/utils/json/pod.py b/launch_control/utils/json/pod.py index <HASH>..<HASH> 100644 --- a/launch_control/utils/json/pod.py +++ b/launch_control/utils/json/pod.py @@ -86,10 +86,6 @@ class PlainOldData(IComplexJSONType): def get_json_class_name(cls): return cls.__name__ - @classmethod - def get_json_attr_types(cls): - return {} - def to_json(self): """ Convert an instance to a JSON-compatible document.
Remove empty implementation of PlainOldData.get_json_attr_types() This is expected as this method is really not implemented in this class
py
diff --git a/python/tensorflow/ner/create_models.py b/python/tensorflow/ner/create_models.py index <HASH>..<HASH> 100644 --- a/python/tensorflow/ner/create_models.py +++ b/python/tensorflow/ner/create_models.py @@ -7,9 +7,9 @@ import argparse def create_graph(output_path, number_of_tags, embeddings_dimension, number_of_chars, lstm_size=128): if sys.version_info[0] != 3 or sys.version_info[1] >= 7: - raise Exception('Python 3.7 or above not supported by tensorflow') + raise Exception('Python 3.7 or above not supported by TensorFlow') if tf.__version__ != '1.15.0': - return Exception('Spark NLP is compiled with TensorFlow 1.15.0. Please use such version.') + raise Exception('Spark NLP is compiled with TensorFlow 1.15.0. Please use such version.') tf.reset_default_graph() name_prefix = 'blstm' model_name = name_prefix+'_{}_{}_{}_{}'.format(number_of_tags, embeddings_dimension, lstm_size, number_of_chars)
Fixed return and wrote raise for the tensorflow version exception
py
diff --git a/api/db.py b/api/db.py index <HASH>..<HASH> 100644 --- a/api/db.py +++ b/api/db.py @@ -11,14 +11,5 @@ from . import app from mongoengine import connect from flask.ext.mongoengine import MongoEngine -connect(app.config['MONGODB_DB'], host=app.config['MONGODB_URI']) +connect(app.config['API_DB_NAME'], host=app.config['API_DB_URI']) db = MongoEngine(app) - -# MongoDB database for register queue, utxo index, etc. -from pymongo import MongoClient -from .settings import INDEX_DB_URI - -namecoin_index = MongoClient(INDEX_DB_URI)['namecoin_index'] -utxo_index = namecoin_index.utxo -address_to_utxo = namecoin_index.address_to_utxo -address_to_keys = namecoin_index.address_to_keys_new
removed namecoin db indexes and updated name of API DB
py
diff --git a/gtts/tests/test_token.py b/gtts/tests/test_token.py index <HASH>..<HASH> 100644 --- a/gtts/tests/test_token.py +++ b/gtts/tests/test_token.py @@ -1,3 +1,5 @@ +# coding=UTF-8 + import unittest from gtts import gToken @@ -22,17 +24,17 @@ class TestToken(unittest.TestCase): def test_token_accentuated(self): lang = 'en' - text = 'Hé' + text = u'Hé' self.assertEqual('63792.446860', self.tokenizer.calculate_token(text, seed=403644)) def test_token_special_char(self): lang = 'en' - text = '€Hé' + text = u'€Hé' self.assertEqual('535990.918794', self.tokenizer.calculate_token(text, seed=403644)) def test_token_very_special_char(self): lang = 'en' - text = "◐" + text = u"◐" self.assertEqual('457487.54195', self.tokenizer.calculate_token(text, seed=403644)) if __name__ == '__main__':
Make tests compatible with Python <I> string handling.
py
diff --git a/custodia/httpd/server.py b/custodia/httpd/server.py index <HASH>..<HASH> 100644 --- a/custodia/httpd/server.py +++ b/custodia/httpd/server.py @@ -9,6 +9,7 @@ import socket import ssl import struct import sys +import warnings import six @@ -25,10 +26,21 @@ except ImportError: from urllib.parse import urlparse, parse_qs, unquote try: - # pylint: disable=import-error - from systemd import daemon as sd + from systemd import daemon as sd # pylint: disable=import-error except ImportError: sd = None + if 'NOTIFY_SOCKET' in os.environ: + warnings.warn( + "NOTIFY_SOCKET env var is set but python-systemd bindings are " + "not available!", + category=RuntimeWarning + ) + if 'LISTEN_FDS' in os.environ: + warnings.warn( + "LISTEN_FDS env var is set, but python-systemd bindings are" + "not available!", + category=RuntimeWarning + ) from custodia import log
Warn users about missing systemd bindings Python systemd bindings are an optional component. Custodia used to silently ignore systemd features like sd_notify and sd_listen_fds, when the bindings could not be imported. Now Custodia prints a warning when it detects NOTIFY_SOCKET or LISTEN_FDS env vars.
py
diff --git a/src/googleclouddebugger/version.py b/src/googleclouddebugger/version.py index <HASH>..<HASH> 100644 --- a/src/googleclouddebugger/version.py +++ b/src/googleclouddebugger/version.py @@ -4,4 +4,4 @@ # The major version should only change on breaking changes. Minor version # changes go between regular updates. Instances running debuggers with # different major versions will show up as two different debuggees. -__version__ = '2.1' +__version__ = '2.2'
Increment python agent minor version to <I> ------------- Created by MOE: <URL>
py
diff --git a/datacats/environment.py b/datacats/environment.py index <HASH>..<HASH> 100644 --- a/datacats/environment.py +++ b/datacats/environment.py @@ -58,6 +58,7 @@ class Environment(object): self.extension_dir = extension_dir self.ckan_version = ckan_version self.port = int(port if port else self._choose_port()) + self.host = None self.deploy_target = deploy_target self.site_url = site_url self.always_prod = always_prod @@ -518,6 +519,8 @@ class Environment(object): self._create_run_ini(port, production) try: self._run_web_container(port, command, host) + if not is_boot2docker(): + self.host = host except PortAllocatedError: port = self._next_port(port) continue @@ -627,6 +630,7 @@ class Environment(object): """ Stop and remove the web container """ + self.host = None remove_container('datacats_web_' + self.name, force=True) def _current_web_port(self): @@ -663,9 +667,10 @@ class Environment(object): Return the url of the web server or None if not running """ port = self._current_web_port() - if port is None: + host = self.host + if port is None or host is None: return None - return 'http://{0}:{1}/'.format(docker_host(), port) + return 'http://{0}:{1}/'.format(host if host else docker_host(), port) def create_admin_set_password(self, password): """
Report proper address when on Linux and using --host command.
py
diff --git a/mir_eval/util.py b/mir_eval/util.py index <HASH>..<HASH> 100644 --- a/mir_eval/util.py +++ b/mir_eval/util.py @@ -639,30 +639,6 @@ def validate_events(events, max_time=30000.): raise ValueError('Events should be in increasing order.') -def filter_labeled_intervals(intervals, labels): - r'''Remove all invalid intervals (start >= end) and corresponding labels. - - :parameters: - - intervals : np.ndarray - Array of interval times (seconds) - - - labels : list - List of labels - - :returns: - - filtered_intervals : np.ndarray - Valid interval times. - - filtered_labels : list - Corresponding filtered labels - ''' - filt_intervals, filt_labels = [], [] - for interval, label in zip(intervals, labels): - if interval[0] < interval[1]: - filt_intervals.append(interval) - filt_labels.append(label) - return np.array(filt_intervals), filt_labels - - def filter_kwargs(function, *args, **kwargs): ''' Given a function and args and keyword args to pass to it, call the function
This function is no longer used in mir_eval
py
diff --git a/openquake/commonlib/readinput.py b/openquake/commonlib/readinput.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/readinput.py +++ b/openquake/commonlib/readinput.py @@ -555,11 +555,14 @@ def get_composite_source_model(oqparam, in_memory=True): csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels, oqparam.optimize_same_id_sources) for sm in csm.source_models: - srcs = [] + counter = collections.Counter() for sg in sm.src_groups: - srcs.extend(map(getid, sg)) - if len(set(srcs)) < len(srcs): - raise nrml.DuplicatedID('Found duplicated source IDs in %s' % sm) + for srcid in map(getid, sg): + counter[srcid] += 1 + dupl = [srcid for srcid in counter if counter[srcid] > 1] + if dupl: + raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' + % (sm, dupl)) return csm
Better check on duplicated sources [skip CI]
py
diff --git a/src/python/pants/backend/jvm/subsystems/shader.py b/src/python/pants/backend/jvm/subsystems/shader.py index <HASH>..<HASH> 100644 --- a/src/python/pants/backend/jvm/subsystems/shader.py +++ b/src/python/pants/backend/jvm/subsystems/shader.py @@ -237,7 +237,7 @@ class Shader(object): cls.register_jvm_tool(register, 'jarjar', classpath=[ - JarDependency(org='org.pantsbuild', name='jarjar', rev='1.6.2') + JarDependency(org='org.pantsbuild', name='jarjar', rev='1.6.4') ]) @classmethod
Upgrade default jarjar to <I>. (#<I>) This picks up asm fixes from and closes <URL>
py
diff --git a/cartoframes/_version.py b/cartoframes/_version.py index <HASH>..<HASH> 100644 --- a/cartoframes/_version.py +++ b/cartoframes/_version.py @@ -1 +1 @@ -__version__ = '1.0b7' +__version__ = '1.0rc1.dev'
Bump to rc1.dev
py
diff --git a/tests/test_repository.py b/tests/test_repository.py index <HASH>..<HASH> 100644 --- a/tests/test_repository.py +++ b/tests/test_repository.py @@ -82,10 +82,7 @@ def test_tag_pattern(tags, tag_prefix, tag_pattern, expected_tags): for selected_tag_ref in selected_tag_ref_list: selected_tags.append(selected_tag_ref.name) - if len(selected_tags) == 0: - assert len(expected_tags) == 0 - else: - assert all([a == b for a, b in zip(selected_tags, expected_tags)]) + assert selected_tags == expected_tags @pytest.mark.parametrize(
test: refactor assert condition to make it simpler
py
diff --git a/opinel/utils.py b/opinel/utils.py index <HASH>..<HASH> 100644 --- a/opinel/utils.py +++ b/opinel/utils.py @@ -98,18 +98,11 @@ def add_common_argument(parser, default_args, argument_name): default=[], nargs='+', help='Name of regions to run the tool in, defaults to all') - elif argument_name == 'with-gov': - parser.add_argument('--with-gov', - dest='with_gov', - default=False, - action='store_true', - help='Include the Government regions') - elif argument_name == 'with-cn': - parser.add_argument('--with-cn', - dest='with_cn', - default=False, - action='store_true', - help='Include the China regions') + elif argument_name == 'partition-name': + parser.add_argument('--partition-name', + dest='partition_name', + default='aws', + help='Switch out of the public AWS partition (e.g. US gov or China)') elif argument_name == 'vpc': parser.add_argument('--vpc', dest='vpc',
Follow up to issue #<I> -- deprecate with_gov and with_cn arguments in favor of partition_name
py
diff --git a/modelx/core/space.py b/modelx/core/space.py index <HASH>..<HASH> 100644 --- a/modelx/core/space.py +++ b/modelx/core/space.py @@ -1105,8 +1105,7 @@ class StaticSpaceImpl(BaseSpaceImpl, EditableSpaceContainerImpl): name, formula=None, refs=None, - source=None, - arguments=None, + source=None ): BaseSpaceImpl.__init__(self, parent, name, formula, refs, source)
FAC: Remove redundant param from StaticSpaceImpl
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ rootdir = os.path.abspath(os.path.dirname(__file__)) long_description = open(os.path.join(rootdir, 'README')).read() setup(name='Spartacus', - version='2.62', + version='2.63', description='Generic database wrapper', long_description=long_description, url='http://github.com/wind39/spartacus', @@ -38,7 +38,7 @@ setup(name='Spartacus', author_email='william.ivanski@gmail.com', license='MIT', packages=['Spartacus', 'Spartacus.pgspecial', 'Spartacus.pgspecial.help'], - install_requires=['pyscrypt', 'pyaes', 'openpyxl', 'click', 'sqlparse'], + install_requires=['pyscrypt', 'pyaes', 'openpyxl', 'click', 'sqlparse', 'formulas'], extras_require={ 'postgresql': ['psycopg2-binary', 'psqlparse'], 'mysql': ['PyMySQL'],
<I>: Packaging to pip
py
diff --git a/productmd/composeinfo.py b/productmd/composeinfo.py index <HASH>..<HASH> 100644 --- a/productmd/composeinfo.py +++ b/productmd/composeinfo.py @@ -41,6 +41,11 @@ from productmd.common import Header import six +if six.PY3: + def cmp(a, b): + return (a > b) - (a < b) + + # order matters - used in __cmp__ # least important come first #: supported compose types @@ -248,6 +253,7 @@ class Compose(productmd.common.MetadataBase): self._assert_type("id", list(six.string_types)) self._assert_not_blank("id") self._assert_matches_re("id", [r".*\d{8}(\.nightly|\.n|\.test|\.t)?(\.\d+)?"]) + def _validate_date(self): self._assert_type("date", list(six.string_types)) self._assert_matches_re("date", [r"^\d{8}$"])
Fix flake8 errors in composeinfo.py.
py
diff --git a/pytmx/pytmx.py b/pytmx/pytmx.py index <HASH>..<HASH> 100644 --- a/pytmx/pytmx.py +++ b/pytmx/pytmx.py @@ -105,7 +105,7 @@ types = defaultdict(lambda: six.u) _str = six.u types.update({ - "version": float, + "version": str, "orientation": _str, "width": float, "height": float,
change version from float to str new versions of tiled use a x.y.z format, so casting as a float no longer works.
py
diff --git a/opencage/geocoder.py b/opencage/geocoder.py index <HASH>..<HASH> 100644 --- a/opencage/geocoder.py +++ b/opencage/geocoder.py @@ -1,19 +1,19 @@ -import requests, json +import requests class OpenCageGeocode: - url = 'http://prototype.opencagedata.com/geocode/v1/json' - key = '' + url = 'http://prototype.opencagedata.com/geocode/v1/json' + key = '' - def __init__(self, key): - self.key = key + def __init__(self, key): + self.key = key - def geocode(self, query): - return self.getJSON(query) + def geocode(self, query): + return self.getJSON(query) - def getJSON(self, query): - data = { - 'q': query, - 'key': self.key - } - url = self.url - return requests.get(url, params=data) + def getJSON(self, query): + data = { + 'q': query, + 'key': self.key + } + url = self.url + return requests.get(url, params=data)
here we go replace tabs with spaces
py
diff --git a/confindr/confindr.py b/confindr/confindr.py index <HASH>..<HASH> 100644 --- a/confindr/confindr.py +++ b/confindr/confindr.py @@ -669,8 +669,9 @@ if __name__ == '__main__': help='Number of threads to run analysis with.') parser.add_argument('-n', '--number_subsamples', type=int, - default=3, - help='Number of times to subsample.') + default=5, + help='Number of times to subsample. Default is 5. Any less than that can cause variation' + ' between runs.') parser.add_argument('-k', '--kmer-size', type=int, default=31,
Upped default number of subsamples to 5. In silico testing was showing too much variation with only 3
py
diff --git a/pybliometrics/scopus/abstract_retrieval.py b/pybliometrics/scopus/abstract_retrieval.py index <HASH>..<HASH> 100644 --- a/pybliometrics/scopus/abstract_retrieval.py +++ b/pybliometrics/scopus/abstract_retrieval.py @@ -310,7 +310,9 @@ class AbstractRetrieval(Retrieval): @property def idxterms(self): - """List of index terms.""" + """List of index terms (these are just one category of those + Scopus provides in the web version) + .""" try: terms = listify(self._json.get("idxterms", {}).get('mainterm', [])) except AttributeError: # idxterms is empty
Better explain idxterms
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ except ImportError: print "Proper 'develop' support unavailable." setup( - name="pyvex", version="3.12", description="A Python interface to libVEX and VEX IR.", + name="pyvex", version='4.5.9.9', description="A Python interface to libVEX and VEX IR.", packages=['pyvex', 'pyvex.IRConst', 'pyvex.IRExpr', 'pyvex.IRStmt'], data_files=[ ('lib', (os.path.join('pyvex_c', library_file),),),
ticked version number to <I>
py
diff --git a/gnupg.py b/gnupg.py index <HASH>..<HASH> 100644 --- a/gnupg.py +++ b/gnupg.py @@ -429,7 +429,7 @@ def _is_allowed(input): assert _allowed.issubset(_possible), \ '_allowed is not subset of known options, difference: %s' \ % _allowed.difference(_possible) - except AssertionError as ae: ## 'as' syntax requires python>=2.6 + except AssertionError as ae: logger.debug("gnupg._is_allowed(): %s" % ae.message) raise UsageError(ae.message)
Remove a comment about pre-Python<I> compatibility, since we don't care.
py
diff --git a/master/buildbot/libvirtbuildslave.py b/master/buildbot/libvirtbuildslave.py index <HASH>..<HASH> 100644 --- a/master/buildbot/libvirtbuildslave.py +++ b/master/buildbot/libvirtbuildslave.py @@ -160,6 +160,38 @@ class LibVirtSlave(AbstractLatentBuildSlave): self.domain = None + self.ready = False + self._find_existing_instance() + + @defer.inlineCallbacks + def _find_existing_instance(self): + """ + I find existing VMs that are already running that might be orphaned instances of this slave. + """ + if not self.connection: + defer.returnValue(None) + + domains = yield self.connection.all() + for d in domains: + name = yield d.name() + if name.startswith(self.name): + self.domain = d + self.substantiated = True + break + + self.ready = True + + def canStartBuild(self): + if not self.ready: + log.msg("Not accepting builds as existing domains not iterated") + return False + + if self.domain and not self.isConnected(): + log.msg("Not accepting builds as existing domain but slave not connected") + return False + + return AbstractLatentBuildSlave.canStartBuild(self) + def _prepare_base_image(self): """ I am a private method for creating (possibly cheap) copies of a
Cope with orphaned build slaves When insubstantiate_after_build is False you can get orphan slaves that are duplicated when you restart buildbot. This prevents that by enumerating existing VM's and attempting to match them to defined slaves.
py
diff --git a/cobra/io/sbml.py b/cobra/io/sbml.py index <HASH>..<HASH> 100644 --- a/cobra/io/sbml.py +++ b/cobra/io/sbml.py @@ -148,8 +148,7 @@ def create_cobra_model_from_sbml_file(sbml_filename, old_sbml=False, legacy_meta except: warn("charge of %s is not a number (%s)" % (tmp_metabolite.id, str(note_charge))) else: - if tmp_metabolite.charge == 0 or tmp_metabolite.charge == note_charge: # get_charge() when unspecified is 0 - tmp_metabolite.charge = note_charge + if tmp_metabolite.charge is None or tmp_metabolite.charge == note_charge: tmp_metabolite.notes.pop("CHARGE") else: # tmp_metabolite.charge != note_charge msg = "different charges specified for %s (%d and %d)"
sbml bugfix for detecting conflicting charges Should check for 0, not None, now that getCharge is only read if isSet is True
py
diff --git a/peri/conf.py b/peri/conf.py index <HASH>..<HASH> 100644 --- a/peri/conf.py +++ b/peri/conf.py @@ -5,8 +5,8 @@ import copy CONF_FILE = os.path.join(os.path.expanduser("~"), ".peri.json") default_conf = { - "fftw_threads": -1, - "fftw_wisdom": os.path.join(os.path.expanduser("~"), ".fftw_wisdom.pkl"), + "fftw-threads": -1, + "fftw-wisdom": os.path.join(os.path.expanduser("~"), ".peri-wisdom.pkl"), "log-filename": os.path.join(os.path.expanduser("~"), '.peri.log'), "log-to-file": False, "log-colors": False, @@ -47,7 +47,7 @@ def load_conf(): def get_wisdom(): conf = load_conf() - return conf['fftw_wisdom'] + return conf['fftw-wisdom'] def get_logfile(): conf = load_conf()
chaning some conf keys around while breaking compatibility
py
diff --git a/src/discoursegraphs/readwrite/rst/rs3/rs3tree.py b/src/discoursegraphs/readwrite/rst/rs3/rs3tree.py index <HASH>..<HASH> 100644 --- a/src/discoursegraphs/readwrite/rst/rs3/rs3tree.py +++ b/src/discoursegraphs/readwrite/rst/rs3/rs3tree.py @@ -113,8 +113,7 @@ class RSTTree(object): # This happens if there's one EDU not to connected to the rest # of the tree (e.g. a headline). We will just make all 'root' # nodes part of a multinuc relation called 'virtual-root'. - loglevel = logging.WARN if num_roots > 2 else logging.INFO - logging.log(loglevel, + logging.log(logging.INFO, "File '{}' has {} roots!".format( os.path.basename(self.filepath), num_roots)) @@ -263,7 +262,7 @@ class RSTTree(object): if elem.get('reltype') in ('span', '', None): if elem['nuclearity'] != 'root': logging.log( - logging.WARN, + logging.INFO, "Segment '{}' in file '{}' is a non-root nucleus without children".format( elem_id, os.path.basename(self.filepath))) return tree
use logging.INFO to inform about corpus errors
py
diff --git a/angr/engines/light/engine.py b/angr/engines/light/engine.py index <HASH>..<HASH> 100644 --- a/angr/engines/light/engine.py +++ b/angr/engines/light/engine.py @@ -146,8 +146,10 @@ class SimEngineLightVEX(SimEngineLight): def _handle_Unop(self, expr): handler = None - simop = vex_operations[expr.op] - if simop.op_attrs['conversion']: + + # All conversions are handled by the Conversion handler + simop = vex_operations.get(expr.op) + if simop is not None and simop.op_attrs['conversion']: handler = '_handle_Conversion' # Notice order of "Not" comparisons elif expr.op == 'Iop_Not1': @@ -159,8 +161,7 @@ class SimEngineLightVEX(SimEngineLight): return getattr(self, handler)(expr) else: self.l.error('Unsupported Unop %s.', expr.op) - - return None + return None def _handle_Binop(self, expr): handler = None
Fix an exception thrown upon unsupported VEX unops. Closes #<I>. (#<I>) Thanks @mephi<I> for reporting this bug.
py
diff --git a/salt/states/apt.py b/salt/states/apt.py index <HASH>..<HASH> 100644 --- a/salt/states/apt.py +++ b/salt/states/apt.py @@ -1,5 +1,6 @@ ''' Package management operations specific to APT- and DEB-based systems +==================================================================== ''' # Import python libs
Change to header Apparently a header is required to have it included in the "Full list of builtin state modules".
py
diff --git a/widgets.py b/widgets.py index <HASH>..<HASH> 100644 --- a/widgets.py +++ b/widgets.py @@ -359,6 +359,12 @@ class WTextEntry(Editor): return super().handle_edit_key(key) + def handle_mouse(self, x, y): + if self.just_started: + self.just_started = False + self.redraw() + super().handle_mouse(x, y) + def show_line(self, l, i): if self.just_started: fg = COLOR_WHITE
widgets: WTextEntry: Reset just_started status on mouse click.
py
diff --git a/securesystemslib/gpg/constants.py b/securesystemslib/gpg/constants.py index <HASH>..<HASH> 100644 --- a/securesystemslib/gpg/constants.py +++ b/securesystemslib/gpg/constants.py @@ -16,6 +16,7 @@ handling """ import logging +import os import securesystemslib.gpg.rsa as rsa import securesystemslib.gpg.dsa as dsa @@ -25,9 +26,10 @@ import securesystemslib.process as process log = logging.getLogger(__name__) -# By default, we assume and test that gpg2 exists. Otherwise, we assume gpg +# By default, we allow providing GPG client through the environment +# assuming gpg2 as default value and test if exists. Otherwise, we assume gpg # exists. -GPG_COMMAND = "gpg2" +GPG_COMMAND = os.environ.get('GNUPG', "gpg2") GPG_VERSION_COMMAND = GPG_COMMAND + " --version" FULLY_SUPPORTED_MIN_VERSION = "2.1.0"
Allow to provide GPG client through env
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ from wheel.bdist_wheel import bdist_wheel here = path.abspath(path.dirname(__file__)) root = 'xlmhg' description = 'XL-mHG: A Semiparametric Test for Enrichment' -version = '2.4.5' +version = '2.4.6' long_description = '' with io.open(path.join(here, 'README.rst'), encoding='UTF-8') as fh: @@ -117,7 +117,7 @@ class CustomBdistWheel(bdist_wheel): # print('I\'m running!!! Tag is "%s"' % str(tag)) if platform == 'darwin': repl = 'macosx_10_6_x86_64.macosx_10_9_x86_64.macosx_10_10_x86_64' - if tag[2] == 'macosx_10_6_x86_64': + if tag[2] in ['macosx_10_6_x86_64', 'macosx_10_7_x86_64']: tag = (tag[0], tag[1], repl) return tag
fix wheel tag bug affecting mac Python <I> builds
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,14 @@ try: from setuptools import setup, Extension + # Required for compatibility with pip (issue #177) + from setuptools.command.install import install except ImportError: from distutils.core import setup, Extension + from distutils.command.install import install + from distutils.command.build import build from distutils.command.build_ext import build_ext -from distutils.command.install import install from distutils.command.clean import clean from distutils import log from distutils.dir_util import remove_tree
Fix #<I>: make setup.py compatible with `pip install`
py
diff --git a/cdrouter/devices.py b/cdrouter/devices.py index <HASH>..<HASH> 100644 --- a/cdrouter/devices.py +++ b/cdrouter/devices.py @@ -195,7 +195,9 @@ class DevicesService(object): schema = DeviceSchema(exclude=('attachments_dir', 'default_ip', 'default_login', 'default_password', 'location', 'device_category', 'manufacturer', 'manufacturer_oui', 'model_name', 'model_number', 'product_class', 'serial_number', - 'hardware_version', 'software_version', 'provisioning_code', 'note')) + 'hardware_version', 'software_version', 'provisioning_code', 'note', + 'insecure_mgmt_url', 'mgmt_url', 'add_mgmt_addr', 'mgmt_interface', + 'mgmt_addr', 'power_on_cmd', 'power_off_cmd')) resp = self.service.list(self.base, filter, type, sort, limit, page) ds, l = self.service.decode(schema, resp, many=True, links=True) return Page(ds, l)
Exclude device connect/power fields from list output
py
diff --git a/tests/test_mbar.py b/tests/test_mbar.py index <HASH>..<HASH> 100644 --- a/tests/test_mbar.py +++ b/tests/test_mbar.py @@ -110,3 +110,15 @@ def test_general_expectations(): u_n = u_kn[:2, :] state_list = np.array([[0, 0], [1, 0], [2, 0], [2, 1]],int) [A_i, d2A_ij] = mbar.computeGeneralExpectations(A_in, u_n, state_list) + + +def test_2_states(): + u_kn = np.random.normal(size=(2, 10)) + N_k = np.array([5, 5]) + mbar = MBAR(u_kn, N_k) + +def test_1_nonempty_state(): + u_kn = np.random.normal(size=(3, 10)) + N_k = np.array([0, 10, 0]) + mbar = MBAR(u_kn, N_k) +
Added extra tests for two state checks.
py
diff --git a/astroid/rebuilder.py b/astroid/rebuilder.py index <HASH>..<HASH> 100644 --- a/astroid/rebuilder.py +++ b/astroid/rebuilder.py @@ -387,10 +387,7 @@ class TreeRebuilder(object): # parent is a astroid.nodes.Function node newnode = new.Decorators() _lineno_parent(node, newnode, parent) - if 'decorators' in node._fields: # py < 2.6, i.e. 2.5 - decorators = node.decorators - else: - decorators = node.decorator_list + decorators = node.decorator_list newnode.nodes = [self.visit(child, newnode) for child in decorators] return newnode @@ -501,11 +498,7 @@ class TreeRebuilder(object): _init_set_doc(node, newnode) newnode.args = self.visit(node.args, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] - if 'decorators' in node._fields: # py < 2.6 - attr = 'decorators' - else: - attr = 'decorator_list' - decorators = getattr(node, attr) + decorators = node.decorator_list if decorators: newnode.decorators = self.visit_decorators(node, newnode) if PY3K and node.returns:
Remove decorator support for Python < <I>, since it's dead code anyway.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ setup( 'requests_oauthlib>=0.3.3', 'tlslite>=0.4.4'], setup_requires=['sphinx', 'requests_oauthlib'], - tests_require=['unittest-xml-reporting', 'requests>=1.2.3'], + tests_require=['xmlrunner', 'requests>=1.2.3'], extras_require={ 'magic': ['filemagic>=1.6'], 'shell': ['ipython>=0.13'],
switched to the renamed xmlrunner, which is patched and under our control.
py
diff --git a/sem/runner.py b/sem/runner.py index <HASH>..<HASH> 100644 --- a/sem/runner.py +++ b/sem/runner.py @@ -179,8 +179,9 @@ class SimulationRunner(object): smoothing=0, bar_format=bar_format) with pbar as progress_bar: - for _, total in line_iterator: - progress_bar.update(1) + for current, total in line_iterator: + progress_bar.n = current + progress_bar.update(0) progress_bar.n = progress_bar.total except (StopIteration): if pbar is not None:
Fix progress bar with ns3 build script
py
diff --git a/pyrsistent/_pmap.py b/pyrsistent/_pmap.py index <HASH>..<HASH> 100644 --- a/pyrsistent/_pmap.py +++ b/pyrsistent/_pmap.py @@ -402,9 +402,6 @@ class PMap(object): self.set(key, val) def set(self, key, val): - if len(self._buckets_evolver) < 0.67 * self._size: - self._reallocate(2 * len(self._buckets_evolver)) - kv = (key, val) index, bucket = PMap._get_bucket(self._buckets_evolver, key) if bucket: @@ -421,6 +418,9 @@ class PMap(object): self._buckets_evolver[index] = new_bucket self._size += 1 else: + if len(self._buckets_evolver) < 0.67 * self._size: + self._reallocate(2 * len(self._buckets_evolver)) + self._buckets_evolver[index] = [kv] self._size += 1
Fix #<I>, fix performance bug when setting elements in maps and adding elements to sets
py
diff --git a/manage.py b/manage.py index <HASH>..<HASH> 100755 --- a/manage.py +++ b/manage.py @@ -1,6 +1,23 @@ #!/usr/bin/env python -import os -os.environ['DJANGO_SETTINGS_MODULE'] = 'mailer.test_settings' -from django.core import management +DEFAULT_SETTINGS = dict( + INSTALLED_APPS=[ + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sites", + "mailer", + ], + DATABASES={ + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } + }, + SITE_ID=1, + SECRET_KEY="notasecret", +) + if __name__ == "__main__": + from django.conf import settings + from django.core import management + settings.configure(**DEFAULT_SETTINGS) management.execute_from_command_line()
Fixed manage.py so that it can be used again
py
diff --git a/barf/barf/arch/arm/armtranslator.py b/barf/barf/arch/arm/armtranslator.py index <HASH>..<HASH> 100644 --- a/barf/barf/arch/arm/armtranslator.py +++ b/barf/barf/arch/arm/armtranslator.py @@ -801,11 +801,13 @@ class ArmTranslator(object): arm_operand = instruction.operands[0] if isinstance(arm_operand, ArmImmediateOperand): - target = ReilImmediateOperand(tb.read(arm_operand).immediate << 8, self._pc.size) + target = ReilImmediateOperand(tb.read(arm_operand).immediate << 8, self._pc.size * 2) # (* 2) to store a shifted address elif isinstance(arm_operand, ArmRegisterOperand): target = ReilRegisterOperand(arm_operand.name, arm_operand.size) target = tb._and_regs(target, ReilImmediateOperand(0xFFFFFFFE, target.size)) - target = tb._shift_reg(target, ReilImmediateOperand(8, target.size)) + tmp = tb.temporal(target.size * 2) # (* 2) to store a shifted address + tb.add(self._builder.gen_bsh(target, ReilImmediateOperand(8, target.size), tmp)) + target = tmp else: raise NotImplementedError("Instruction Not Implemented: Unknown operand for branch operation.")
ARM: Branch: Expanded target's address size to store the original address shifted by 8.
py
diff --git a/pymc/gp/step_methods.py b/pymc/gp/step_methods.py index <HASH>..<HASH> 100644 --- a/pymc/gp/step_methods.py +++ b/pymc/gp/step_methods.py @@ -173,11 +173,11 @@ class GPEvaluationGibbs(pm.Metropolis): self.children_no_data = copy.copy(self.children) if isinstance(eps_p_f, pm.Variable): - self.children_no_data.discard(eps_p_f) + self.children_no_data.remove(eps_p_f) self.eps_p_f = eps_p_f else: for epf in eps_p_f: - self.children_no_data.discard(epf) + self.children_no_data.remove(epf) self.eps_p_f = pm.Lambda('eps_p_f', lambda e=eps_p_f: np.hstack(e), trace=False) self.V = pm.Lambda('%s_vect'%V.__name__, lambda V=V: np.resize(V, len(submod.mesh)))
Guarding against gotcha in GPEvaluationGibbs
py
diff --git a/openquake/calculators/tests/__init__.py b/openquake/calculators/tests/__init__.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/tests/__init__.py +++ b/openquake/calculators/tests/__init__.py @@ -63,7 +63,7 @@ def columns(line): class CalculatorTestCase(unittest.TestCase): - OVERWRITE_EXPECTED = True + OVERWRITE_EXPECTED = False edir = None # will be set to a temporary directory def get_calc(self, testfile, job_ini, **kw):
Restored False [skip CI]
py
diff --git a/source/rafcon/mvc/views/state_machines_editor.py b/source/rafcon/mvc/views/state_machines_editor.py index <HASH>..<HASH> 100644 --- a/source/rafcon/mvc/views/state_machines_editor.py +++ b/source/rafcon/mvc/views/state_machines_editor.py @@ -69,7 +69,7 @@ class PlusAddNotebook(gtk.Notebook): if pb_x <= event.x <= pb_x + pb_width and pb_y <= event.y <= pb_y + pb_height \ and self.add_visible and event.state & gtk.gdk.BUTTON1_MASK: self.emit("add_state_machine") - return + return True for i in range(0, self.get_n_pages()): alloc = self.get_tab_label(self.get_nth_page(i)).get_allocation() @@ -79,7 +79,7 @@ class PlusAddNotebook(gtk.Notebook): if alloc.x < mouse_x < alloc.x + alloc.width and alloc.y < mouse_y < alloc.y + alloc.height and \ event.state & gtk.gdk.BUTTON2_MASK: self.emit("close_state_machine", i) - return + return True def do_expose_event(self, event):
Fix issue #<I> Do not propagate middle mouse click event after handling it by closing the state machine
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -173,6 +173,7 @@ console or advanced editor, in your own software. """, download_url=__website_url__ + "#fh5co-download", author="The Spyder Project Contributors", + author_email="ccordoba12@gmail.com", url=__website_url__, license='MIT', keywords='PyQt5 editor console widgets IDE science data analysis IPython',
Add author email to setup args
py
diff --git a/pyeapi/client.py b/pyeapi/client.py index <HASH>..<HASH> 100644 --- a/pyeapi/client.py +++ b/pyeapi/client.py @@ -581,10 +581,15 @@ class Node(object): raise TypeError('config mode commands not supported') results = list() + # IMPORTANT: There are two keys (response, result) that both + # return the same value. 'response' was originally placed + # there in error and both are now present to avoid breaking + # existing scripts. 'response' will be removed in a future release. if strict: responses = self.run_commands(commands, encoding) for index, response in enumerate(responses): results.append(dict(command=commands[index], + result=response, response=response, encoding=encoding)) else:
Update enable return dict - Fix #<I> - Note - the result key was adding to the dict when strict equals True. The response key will be removed in a later version. Please update any scripts that are using response.
py
diff --git a/tests/server/blueprints/cases/test_cases_views.py b/tests/server/blueprints/cases/test_cases_views.py index <HASH>..<HASH> 100644 --- a/tests/server/blueprints/cases/test_cases_views.py +++ b/tests/server/blueprints/cases/test_cases_views.py @@ -128,7 +128,7 @@ def test_rerun_monitor(app, institute_obj, mocker, mock_redirect): # AND an unmonitor event should be created rerun_event = store.event_collection.find_one() - assert rerun_event.get("verb") == "monitor" + assert rerun_event.get("verb") == "rerun_monitor" def test_research(app, institute_obj, case_obj, mocker, mock_redirect):
Update tests/server/blueprints/cases/test_cases_views.py
py
diff --git a/dolo/algos/dtcscc/gssa.py b/dolo/algos/dtcscc/gssa.py index <HASH>..<HASH> 100644 --- a/dolo/algos/dtcscc/gssa.py +++ b/dolo/algos/dtcscc/gssa.py @@ -145,6 +145,8 @@ def gssa(model, maxit=100, tol=1e-8, initial_dr=None, verbose=False, if verbose: print(err) + it += 1 + return coefs @@ -155,6 +157,6 @@ if __name__ == '__main__': model = yaml_import("../../../examples/models/rbc_full.yaml") - gssa(model, deg=5, verbose=True, damp=0.5) + gssa(model, deg=5, verbose=True, damp=0.1) # TODO: time and check the returned coefficients
increment iteration counter in gssa
py
diff --git a/salt/modules/debian_service.py b/salt/modules/debian_service.py index <HASH>..<HASH> 100644 --- a/salt/modules/debian_service.py +++ b/salt/modules/debian_service.py @@ -100,6 +100,7 @@ def restart(name): cmd = 'service {0} restart'.format(name) return not __salt__['cmd.retcode'](cmd) + def reload(name): ''' Reload the named service @@ -111,19 +112,21 @@ def reload(name): cmd = 'service {0} reload'.format(name) return not __salt__['cmd.retcode'](cmd) + def status(name, sig=None): ''' - Return the status for a service, returns the PID or an empty string if the - service is running or not, pass a signature to use to find the service via - ps + Return the status for a service, pass a signature to use to find + the service via ps CLI Example:: - salt '*' service.status <service name> [service signature] + salt '*' service.status <service name> ''' - sig = sig or name - cmd = 'pgrep {0}'.format(sig) - return __salt__['cmd.run'](cmd).strip() + if sig: + return bool(__salt__['status.pid'](sig)) + cmd = 'service {0} status'.format(name) + return not __salt__['cmd.retcode'](cmd) + def enable(name): '''
Fixed debian_service.status method Changes based on discussion from pull-req #<I> (<URL>) - if signature argument is set, use it to do a process lookup - if signature argument is not set, use the startup script with "status" argument It's base on the rh_service.status method behavior.
py
diff --git a/extras.py b/extras.py index <HASH>..<HASH> 100644 --- a/extras.py +++ b/extras.py @@ -3,6 +3,12 @@ import requests import os +api_root = 'http://ws.audioscrobbler.com/2.0/' + def user_tracks(user_name): - resp = requests.get('http://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user=' + user_name + '&api_key=' + os.environ['LAST_FM_API'] + '&format=json') + resp = requests.get(api_root + '?method=user.getrecenttracks&user=' + user_name + '&api_key=' + os.environ['LAST_FM_API'] + '&format=json') + print resp.text + +def user_weekly_tracks(user_name): + resp = requests.get(api_root + '?method=user.getweeklytrackchart&user=' + user_name + '&api_key=' + os.environ['LAST_FM_API'] + '&format=json') print resp.text
Separate api root and add user_weekly_tracks method
py
diff --git a/aioxmpp/muc/service.py b/aioxmpp/muc/service.py index <HASH>..<HASH> 100644 --- a/aioxmpp/muc/service.py +++ b/aioxmpp/muc/service.py @@ -37,6 +37,7 @@ import aioxmpp.im.conversation import aioxmpp.im.dispatcher import aioxmpp.im.p2p import aioxmpp.im.service +import aioxmpp.utils from aioxmpp.utils import namespaces @@ -977,6 +978,26 @@ class Room(aioxmpp.im.conversation.AbstractConversation): aioxmpp.im.conversation.ConversationFeature.INVITE_DIRECT, } + muc_soft_timeout = aioxmpp.utils.proxy_property( + "_monitor", + "soft_timeout", + ) + + muc_hard_timeout = aioxmpp.utils.proxy_property( + "_monitor", + "hard_timeout", + ) + + muc_ping_timeout = aioxmpp.utils.proxy_property( + "_monitor", + "ping_timeout", + ) + + muc_ping_interval = aioxmpp.utils.proxy_property( + "_monitor", + "ping_interval", + ) + def _enter_active_state(self): self._state = RoomState.ACTIVE self._history_replay_occupants.clear()
muc: add proxy properties for timeout controls
py
diff --git a/law/parameter.py b/law/parameter.py index <HASH>..<HASH> 100644 --- a/law/parameter.py +++ b/law/parameter.py @@ -14,8 +14,8 @@ import luigi from law.notification import notify_mail -# globally make luigi's BoolParameter parsing explicit, https://github.com/spotify/luigi/pull/2427 -luigi.BoolParameter.parsing = "explicit" +# make luigi's BoolParameter parsing explicit globally, https://github.com/spotify/luigi/pull/2427 +luigi.BoolParameter.parsing = getattr(luigi.BoolParameter, "EXPLICIT_PARSING", "explicit") #: String value denoting an empty parameter.
Better way to set explicit bool parsing.
py
diff --git a/examples/mnist-autoencoder.py b/examples/mnist-autoencoder.py index <HASH>..<HASH> 100644 --- a/examples/mnist-autoencoder.py +++ b/examples/mnist-autoencoder.py @@ -27,6 +27,6 @@ read = lambda s: cPickle.load(gzip.open(s)) net = lmj.tnn.main( lmj.tnn.Autoencoder, - lambda *_: [(x, y.astype('int32')) for x, y in read(DATASET)]) + lambda *_: [(x, ) for x, _ in read(DATASET)]) net.save('net.pkl.gz')
Do not pass labels in to autoencoder.
py
diff --git a/fretboard/settings.py b/fretboard/settings.py index <HASH>..<HASH> 100644 --- a/fretboard/settings.py +++ b/fretboard/settings.py @@ -1,7 +1,7 @@ from django.conf import settings from django.contrib.sites.models import Site -def current_site(): +def get_current_site(): """ Helper function to play nice with appconfig loading. """ @@ -9,5 +9,5 @@ def current_site(): PAGINATE_BY = getattr(settings, "PAGINATE_BY", 25) -FORUM_BASE_NAME = getattr(settings, 'FORUM_BASE_NAME', current_site.name) +FORUM_BASE_NAME = getattr(settings, 'FORUM_BASE_NAME', get_current_site().name) COMMENT_PLACEHOLDER = getattr(settings, "COMMENT_PLACEHOLDER", "Be nice.")
function. it's a function
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,6 @@ setup( "test": [ "pytest>=5.2.2,<6.1.0", "pytest-asyncio>=0.10,<0.15", - "aiohttp~=3.6.2", "beautifulsoup4>=4.8.1,<4.10.0", "asgiref~=3.2.3", "black~=19.10b0",
Removed aiohttp from test dependencies It wasn't being used.
py
diff --git a/blockstack/blockstackd.py b/blockstack/blockstackd.py index <HASH>..<HASH> 100644 --- a/blockstack/blockstackd.py +++ b/blockstack/blockstackd.py @@ -2915,11 +2915,16 @@ def run_blockstackd(): if args.num_required: num_required = int(args.num_required) + print "Synchronizing from snapshot. This will take about 10-15 minutes." + rc = fast_sync_import(working_dir, url, public_keys=public_keys, num_required=num_required) if not rc: print 'fast_sync failed' sys.exit(1) + print "Node synchronized! Node state written to {}".format(working_dir) + print "Start your node with `blockstack-core start`" + print "Pass `--debug` for extra output." if __name__ == '__main__':
give helpful pre- and post-sync feedback
py
diff --git a/bdata/__init__.py b/bdata/__init__.py index <HASH>..<HASH> 100644 --- a/bdata/__init__.py +++ b/bdata/__init__.py @@ -1,2 +1,6 @@ from bdata.bdata import bdata from bdata import mudpy + +__all__ = ['bdata','mudpy'] +__version__ = '1.1.2' +__author__ = 'Derek Fujimoto'
added version number to __init__
py
diff --git a/vstutils/utils.py b/vstutils/utils.py index <HASH>..<HASH> 100644 --- a/vstutils/utils.py +++ b/vstutils/utils.py @@ -216,6 +216,9 @@ class Lock(KVExchanger): time.sleep(0.01) raise self.AcquireLockException(err_msg) + def get(self): # nocv + return self.cache.get(self.key) + def __enter__(self): return self
Fix bug with celery beat prolongation
py
diff --git a/tests/utils/helpers.py b/tests/utils/helpers.py index <HASH>..<HASH> 100644 --- a/tests/utils/helpers.py +++ b/tests/utils/helpers.py @@ -907,6 +907,15 @@ def get_fake_risk_job(risk_cfg, hazard_cfg, output_type="curve", hazard_job, "Test Hazard output", "gmf"), lt_realization=rlz) + # this is needed because the AggregateLossCurve is only generated if + # there are GmfSets; the problem is in + # risk/event_based/core.py:EventBasedRiskCalculator.post_process, line + # gmf_sets = hazard_output.gmfcollection.gmfset_set.all() + models.GmfSet.objects.create( + gmf_collection=hazard_output, + investigation_time=hc.investigation_time, + ses_ordinal=1) + for point in ["POINT(15.310 38.225)", "POINT(15.71 37.225)", "POINT(15.48 38.091)", "POINT(15.565 38.17)", "POINT(15.481 38.25)"]:
Fixed a very subtle initialization bug breaking risk/event_based/core_test.py
py
diff --git a/overpy/__init__.py b/overpy/__init__.py index <HASH>..<HASH> 100644 --- a/overpy/__init__.py +++ b/overpy/__init__.py @@ -1164,13 +1164,33 @@ class RelationMember(object): role = data.get("role") attributes = {} - ignore = ["type", "ref", "role"] + ignore = ["geometry", "type", "ref", "role"] for n, v in data.items(): if n in ignore: continue attributes[n] = v - return cls(attributes=attributes, ref=ref, role=role, result=result) + geometry = data.get("geometry") + if isinstance(geometry, list): + geometry_orig = geometry + geometry = [] + for v in geometry_orig: + geometry.append( + RelationWayGeometryValue( + lat=v.get("lat"), + lon=v.get("lon") + ) + ) + else: + geometry = None + + return cls( + attributes=attributes, + geometry=geometry, + ref=ref, + role=role, + result=result + ) @classmethod def from_xml(cls, child, result=None):
src - Parse relation member with gemotry from json
py
diff --git a/fireplace/cards/blackrock/collectible.py b/fireplace/cards/blackrock/collectible.py index <HASH>..<HASH> 100644 --- a/fireplace/cards/blackrock/collectible.py +++ b/fireplace/cards/blackrock/collectible.py @@ -29,6 +29,20 @@ class BRM_009: return value - self.game.minionsKilledThisTurn +# Axe Flinger +class BRM_016: + events = [ + SELF_DAMAGE.on(Hit(ENEMY_HERO, 2)) + ] + + +# Dragon Egg +class BRM_022: + events = [ + SELF_DAMAGE.on(Summon(CONTROLLER, "BRM_022t")) + ] + + ## # Spells
Implement Axe Flinger and Dragon Egg
py
diff --git a/src/setup.py b/src/setup.py index <HASH>..<HASH> 100644 --- a/src/setup.py +++ b/src/setup.py @@ -25,7 +25,7 @@ setup( author_email = 'team@projexsoftware.com', maintainer = 'Projex Software', maintainer_email = 'team@projexsoftware.com', - description = '''''', + description = 'Bindings for the pyramid webframework and the ORB database ORM library.', license = 'LGPL', keywords = '', url = 'http://www.projexsoftware.com', @@ -35,4 +35,4 @@ setup( tests_require = REQUIREMENTS, long_description= README, classifiers=[], -) \ No newline at end of file +)
Update setup.py Adding description string to setup.py so that project is not listed as `UNKNOWN` in `pypi`
py
diff --git a/src/toil/job.py b/src/toil/job.py index <HASH>..<HASH> 100644 --- a/src/toil/job.py +++ b/src/toil/job.py @@ -802,7 +802,7 @@ class Job(object): memory=float(config.defaultMemory) if self.memory is None else self.memory, cores=float(config.defaultCores) if self.cores is None else self.cores, disk=float(config.defaultDisk) if self.disk is None else self.disk, - preemptable=float(config.defaultPreemptable) if self.preemptable is None else self.preemptable) + preemptable=config.defaultPreemptable if self.preemptable is None else self.preemptable) return requirements def _makeJobWrappers(self, jobWrapper, jobStore):
Fix: `preemptable` requirement wrongly cast to float
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ REQUIREMENTS = [ 'django-revproxy[diazo]>=0.9.5', # Async Signals - 'celery[redis]>=3.1', + 'celery[redis]>=3.1.2', ### Move out of colab (as plugins): @@ -42,7 +42,7 @@ EXCLUDE_FROM_PACKAGES = [] setup( name='colab', - version='1.11', + version='1.11.0', url='https://github.com/colab-community/colab', author='Sergio Oliveira', author_email='sergio@tracy.com.br',
Bump both Celery and Colab version
py