diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/werkzeug/security.py b/werkzeug/security.py index <HASH>..<HASH> 100644 --- a/werkzeug/security.py +++ b/werkzeug/security.py @@ -202,8 +202,8 @@ def _hash_internal(method, salt, password): def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8): - """Hash a password with the given method and salt with with a string of - the given length. The format of the string returned includes the method + """Hash a password with the given method and salt with a string of + the given length. The format of the string returned includes the method that was used so that :func:`check_password_hash` can check the hash. The format for the hashed string looks like this:: @@ -211,8 +211,8 @@ def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8): method$salt$hash This method can **not** generate unsalted passwords but it is possible - to set the method to plain to enforce plaintext passwords. If a salt - is used, hmac is used internally to salt the password. + to set param method='plain' in order to enforce plaintext passwords. + If a salt is used, hmac is used internally to salt the password. If PBKDF2 is wanted it can be enabled by setting the method to ``pbkdf2:method:iterations`` where iterations is optional::
Update generate_password_hash Updating generate_password_hash method documentation. Hopefully this will be a bit clearer.
py
diff --git a/openquake/calculators/hazard/classical/__init__.py b/openquake/calculators/hazard/classical/__init__.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/classical/__init__.py +++ b/openquake/calculators/hazard/classical/__init__.py @@ -64,6 +64,12 @@ key elements: * **Spectral Acceleration (SA) Period** - Optional; used only if the IMT is SA. * **Spectral Acceleration (SA) Damping** - Optional; used only if the IMT is SA. +* **Source Model Logic Tree Path (SMLT Path)** - The path taken through the + calculation's source model logic tree. Does not apply to statistical curves, + since these aggregate are computed over multiple logic tree realizations. +* **GSIM (Ground Shaking Intensity Model) Logic Tree Path (GSIMLT Path)** - The + path take through the calculation's GSIM logic tree. As with the SMLT Path, + this does not apply to statistical curves. For a given calculation, hazard curves are computed for each logic tree realization, each IMT/IML definition, and each geographical point of interest. @@ -82,6 +88,9 @@ where * ``P`` is the number of geographical points of interest * ``I`` is the number of IMT/IML definitions +Hazard curves are grouped by IMT and realization (1 group per IMT per +realization). Each group includes 1 curve for each point of interest. + Statistical Curves ------------------
calcs/hazard/classical/__init__: Added notes about SMLT and GSIMLT paths. Also added a brief explanation of hazard curve grouping.
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,10 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import hgtools.managers - -# use hgtools to get the version -hg_mgr = hgtools.managers.RepoManager.get_first_valid_manager() +import setuptools_scm extensions = [ 'sphinx.ext.autodoc', @@ -15,7 +12,7 @@ project = 'pytest-runner' copyright = '2015 Jason R. Coombs' # The short X.Y version. -version = hg_mgr.get_current_version() +version = setuptools_scm.get_version() # The full version, including alpha/beta/rc tags. release = version
Use setuptools_scm in docs generation.
py
diff --git a/h11/_util.py b/h11/_util.py index <HASH>..<HASH> 100644 --- a/h11/_util.py +++ b/h11/_util.py @@ -1,4 +1,5 @@ import sys +import re __all__ = ["ProtocolError", "LocalProtocolError", "RemoteProtocolError", "validate", "make_sentinel", "bytesify"] @@ -78,12 +79,14 @@ class LocalProtocolError(ProtocolError): class RemoteProtocolError(ProtocolError): pass -# Equivalent to python 3.4's regex.fullmatch(data) -def _fullmatch(regex, data): # version specific: Python < 3.4 - match = regex.match(data) - if match and match.end() != len(data): - match = None - return match +try: + _fullmatch = type(re.compile('')).fullmatch +except AttributeError: + def _fullmatch(regex, data): # version specific: Python < 3.4 + match = regex.match(data) + if match and match.end() != len(data): + match = None + return match def validate(regex, data, msg="malformed data", msgargs=()): match = _fullmatch(regex, data)
Use regex.fullmatch when available (since Python <I>) It is faster than the hand-rolled version. Running python<I> benchmarks/benchmarks.py: Before: <I> requests/sec After: <I> requests/sec
py
diff --git a/src/hamster/lib/stuff.py b/src/hamster/lib/stuff.py index <HASH>..<HASH> 100644 --- a/src/hamster/lib/stuff.py +++ b/src/hamster/lib/stuff.py @@ -294,8 +294,8 @@ class Fact(object): activity, self.description = activity.split(",", 1) self.description = self.description.strip() - if "#" in self.description: - self.description, self.tags = self.description.split("#", 1) + if " #" in self.description: + self.description, self.tags = self.description.split(" #", 1) self.tags = [tag.strip(", ") for tag in self.tags.split("#") if tag.strip(", ")] if activity.find("@") > 0: @@ -315,8 +315,8 @@ class Fact(object): tags = [tag.strip() for tag in tags.split(",") if tag.strip()] # override implicit with explicit - self.category = category.replace("#", "").replace(",", "") or self.category or None - self.description = (description or "").replace("#", "") or self.description or None + self.category = category.replace(",", "") or self.category or None + self.description = (description or "").replace(" #", " ") or self.description or None self.tags = tags or self.tags or [] self.start_time = start_time or self.start_time or None self.end_time = end_time or self.end_time or None
be more careful on sanitizing - strip out only hashes that are preceded by a whitespace, don't strip out hashes out of categories. should fix Bug <I>
py
diff --git a/certvalidator/validate.py b/certvalidator/validate.py index <HASH>..<HASH> 100644 --- a/certvalidator/validate.py +++ b/certvalidator/validate.py @@ -1904,7 +1904,7 @@ class PolicyTreeRoot(): A generator yielding PolicyTreeNode objects """ - for child in self.children.copy(): + for child in list(self.children): if depth == 0: yield child else: @@ -1924,7 +1924,7 @@ class PolicyTreeRoot(): A generator yielding PolicyTreeNode objects """ - for child in self.children.copy(): + for child in list(self.children): if depth != 0: for grandchild in child.walk_up(depth - 1): yield grandchild
Fix Python 2 support for new certificate policies functionality
py
diff --git a/salt/modules/kmod.py b/salt/modules/kmod.py index <HASH>..<HASH> 100644 --- a/salt/modules/kmod.py +++ b/salt/modules/kmod.py @@ -81,6 +81,8 @@ def _set_persistent_module(mod): commented uncomment it. ''' conf = _get_modules_conf() + if not os.path.exists(conf): + __salt__['file.touch'](conf) mod_name = _strip_module_name(mod) if not mod_name or mod_name in mod_list(True) or mod_name not in available(): return set() @@ -184,12 +186,14 @@ def mod_list(only_persist=False): ''' mods = set() if only_persist: - with salt.utils.fopen(_get_modules_conf(), 'r') as modules_file: - for line in modules_file: - line = line.strip() - mod_name = _strip_module_name(line) - if not line.startswith('#') and mod_name: - mods.add(mod_name) + conf = _get_modules_conf() + if os.path.exists(conf): + with salt.utils.fopen(conf, 'r') as modules_file: + for line in modules_file: + line = line.strip() + mod_name = _strip_module_name(line) + if not line.startswith('#') and mod_name: + mods.add(mod_name) else: for mod in lsmod(): mods.add(mod['module'])
Do not fail reading/writing persistent modules when _get_modules_conf() file doesn't exist. For instance on Arch, /etc/modules-load.d/salt_managed.conf will not exist at first and trying to apply a state such as microcode: kmod.present: - persist: True will fail with the error: IOError: [Errno 2] No such file or directory: '/etc/modules-load.d/salt_managed.conf'
py
diff --git a/edtf/parser/tests.py b/edtf/parser/tests.py index <HASH>..<HASH> 100644 --- a/edtf/parser/tests.py +++ b/edtf/parser/tests.py @@ -247,8 +247,10 @@ class TestParsing(unittest.TestCase): self.assertEqual(f.lower_fuzzy().isoformat(), expected_lower_fuzzy) self.assertEqual(f.upper_fuzzy().isoformat(), expected_upper_fuzzy) except Exception as x: - print x - import pdb; pdb.set_trace() + # Write to stdout for manual debugging, I guess + sys.stdout.write(unicode(x)) + # Re-raise exception so unit tests work for non-manual usage + raise def test_comparisons(self): d1 = parse("1979-08~")
Fix parser unit test to work properly on exceptions Don't swallow exception with a `pdb` invocation that will not work in any setting except for manual test runs with no output capturing.
py
diff --git a/python/cmsis_svd/parser.py b/python/cmsis_svd/parser.py index <HASH>..<HASH> 100644 --- a/python/cmsis_svd/parser.py +++ b/python/cmsis_svd/parser.py @@ -71,7 +71,9 @@ class SVDParser(object): expand_arrays_of_registers = 0 @classmethod - def for_xml_file(cls, path): + def for_xml_file(cls, path, remove_reserved = 0, expand_arrays_of_registers = 0): + cls.remove_reserved = remove_reserved + cls.expand_arrays_of_registers = expand_arrays_of_registers return cls(ET.parse(path)) @classmethod
added flags to for_xml_file
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,6 @@ from setuptools import setup, find_packages REQUIRES = [ - 'aiohttp', 'PyYAML' ]
removed dependency to aiohttp which is no longer needed
py
diff --git a/synapse/lib/remcycle.py b/synapse/lib/remcycle.py index <HASH>..<HASH> 100644 --- a/synapse/lib/remcycle.py +++ b/synapse/lib/remcycle.py @@ -340,8 +340,6 @@ class Hypnos(s_config.Config): *args, **kwargs): s_config.Config.__init__(self) - for optname, optconf in HYPNOS_BASE_DEFS: - self.addConfDef(optname, **optconf) # Runtime-settable options self.onConfOptSet(CACHE_ENABLED, self._onSetWebCache) self.onConfOptSet(CACHE_TIMEOUT, self._onSetWebCacheTimeout) @@ -430,6 +428,11 @@ class Hypnos(s_config.Config): # Stop the web cache self.web_cache.fini() + @staticmethod + @s_config.confdef() + def _getHyposBaseDefs(): + return HYPNOS_BASE_DEFS + def _onSetWebCache(self, valu): ''' Enable or disable caching of results from fireWebApi.
Use the confdef() decorator for loading configable options into remcycle.
py
diff --git a/zipline/data/us_equity_pricing.py b/zipline/data/us_equity_pricing.py index <HASH>..<HASH> 100644 --- a/zipline/data/us_equity_pricing.py +++ b/zipline/data/us_equity_pricing.py @@ -46,6 +46,7 @@ from pandas.tslib import iNaT from six import ( iteritems, viewkeys, + string_types, ) from zipline.data.session_bars import SessionBarReader @@ -856,7 +857,7 @@ class SQLiteAdjustmentWriter(object): overwrite=False): if isinstance(conn_or_path, sqlite3.Connection): self.conn = conn_or_path - elif isinstance(conn_or_path, str): + elif isinstance(conn_or_path, string_types): if overwrite: try: remove(conn_or_path)
BUG: Fixes zipline ingest with non-default bundle on python 2
py
diff --git a/source/rafcon/core/states/execution_state.py b/source/rafcon/core/states/execution_state.py index <HASH>..<HASH> 100644 --- a/source/rafcon/core/states/execution_state.py +++ b/source/rafcon/core/states/execution_state.py @@ -71,7 +71,8 @@ class ExecutionState(State): outcomes = {elem_id: copy(elem) for elem_id, elem in list(self._outcomes.items())} state = self.__class__(self.name, self.state_id, input_data_ports, output_data_ports, income, outcomes, None, safe_init=False) - state._script_text = deepcopy(self.script_text) + # use setter here! the acutal value, which is changed is self._script.script! + state.script_text = deepcopy(self.script_text) state._description = deepcopy(self.description) state._semantic_data = deepcopy(self.semantic_data) state._file_system_path = self.file_system_path
fix(execution_state): fix copy, use setter for script
py
diff --git a/pycanlib/__init__.py b/pycanlib/__init__.py index <HASH>..<HASH> 100644 --- a/pycanlib/__init__.py +++ b/pycanlib/__init__.py @@ -1,3 +1,6 @@ from CAN import Bus, BufferedReader, Message, MessageList from CAN import Log, Listener, TimestampMessage from CAN import MachineInfo, ChannelInfo + +class CANLIBError(Exception): + pass \ No newline at end of file
Add a generic exception we can throw instead of ctypes errors. (Don't actually throw them yet - more to keep compatibality with kvaser implementation)
py
diff --git a/tcex/tcex.py b/tcex/tcex.py index <HASH>..<HASH> 100644 --- a/tcex/tcex.py +++ b/tcex/tcex.py @@ -790,7 +790,7 @@ class TcEx(object): from .tcex_request import TcExRequest r = TcExRequest(self, session) - if self.default_args.tc_proxy_external: + if session is None and self.default_args.tc_proxy_external: self.log.info( 'Using proxy server for external request {}:{}.'.format( self.default_args.tc_proxy_host, self.default_args.tc_proxy_port
+ update for proxy issue with batch.
py
diff --git a/asv/runner.py b/asv/runner.py index <HASH>..<HASH> 100644 --- a/asv/runner.py +++ b/asv/runner.py @@ -187,13 +187,13 @@ def run_benchmarks(benchmarks, env, results=None, # Interleave benchmark runs, in setup_cache order def iter_run_items(): - for run_round in range(max_processes): + for run_round in range(max_processes, 0, -1): for setup_cache_key, benchmark_set in six.iteritems(benchmark_order): for name, benchmark in benchmark_set: processes = get_processes(benchmark) - if run_round >= processes: + if run_round > processes: continue - is_final = (run_round + 1 >= processes) + is_final = (run_round == 1) yield name, benchmark, setup_cache_key, is_final # Run benchmarks in order
runner: swap process run order so that results are printed in better order
py
diff --git a/pandas/util/testing.py b/pandas/util/testing.py index <HASH>..<HASH> 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -990,6 +990,12 @@ def assert_series_equal(left, right, check_dtype=True, Specify comparison precision. Only used when check_exact is False. 5 digits (False) or 3 digits (True) after decimal points are compared. If int, then specify the digits to compare. + + When comparing two numbers, if the first number has magnitude less + than 1e-5, we compare the two numbers directly and check whether + they are equivalent within the specified precision. Otherwise, we + compare the **ratio** of the second number to the first number and + check whether it is equivalent to 1 within the specified precision. check_names : bool, default True Whether to check the Series and Index names attribute. check_exact : bool, default False @@ -1131,6 +1137,12 @@ def assert_frame_equal(left, right, check_dtype=True, Specify comparison precision. Only used when check_exact is False. 5 digits (False) or 3 digits (True) after decimal points are compared. If int, then specify the digits to compare. + + When comparing two numbers, if the first number has magnitude less + than 1e-5, we compare the two numbers directly and check whether + they are equivalent within the specified precision. Otherwise, we + compare the **ratio** of the second number to the first number and + check whether it is equivalent to 1 within the specified precision. check_names : bool, default True Whether to check that the `names` attribute for both the `index` and `column` attributes of the DataFrame is identical, i.e.
Improve documentation for assert_frame|series_equal #<I> (#<I>)
py
diff --git a/wandb/apis/internal.py b/wandb/apis/internal.py index <HASH>..<HASH> 100644 --- a/wandb/apis/internal.py +++ b/wandb/apis/internal.py @@ -906,11 +906,23 @@ class Api(object): } } ''') + + # don't retry on validation errors + # TODO(jhr): generalize error handling routines + def no_retry_400(e): + if not isinstance(e, requests.HTTPError): + return True + if e.response.status_code != 400: + return True + body = json.loads(e.response.content) + raise UsageError(body['errors'][0]['message']) + response = self.gql(mutation, variable_values={ 'config': yaml.dump(config), 'description': config.get("description"), 'entityName': self.settings("entity"), - 'projectName': self.settings("project")}) + 'projectName': self.settings("project")}, + check_retry_fn=no_retry_400) return response['upsertSweep']['sweep']['name'] def file_current(self, fname, md5):
when creating sweeps, validation of sweep config will return a <I> on error. no need to retry.
py
diff --git a/epab/cmd/appveyor.py b/epab/cmd/appveyor.py index <HASH>..<HASH> 100644 --- a/epab/cmd/appveyor.py +++ b/epab/cmd/appveyor.py @@ -9,6 +9,7 @@ import click from epab import __version__ from epab.utils import _info, do, repo_get_latest_tag +from .test_runner import pytest from .release import release @@ -48,7 +49,7 @@ def appveyor(ctx: click.Context): do(ctx, ['pipenv', 'install', '-d', '.']) _info('Running tests') - do(ctx, ['pipenv', 'run', 'pytest', 'test']) + ctx.invoke(pytest) _info('Uploading coverage info') do(ctx, ['pip', 'install', '--upgrade', 'codacy-coverage'])
fix: run test suite from EPAB to generate coverage
py
diff --git a/src/saml2/server.py b/src/saml2/server.py index <HASH>..<HASH> 100644 --- a/src/saml2/server.py +++ b/src/saml2/server.py @@ -476,7 +476,7 @@ class Server(Entity): if not encrypt_assertion: if sign_assertion: assertion.signature = pre_signature_part(assertion.id, - self.sec.my_cert, 1, + self.sec.my_cert, 2, sign_alg=sign_alg, digest_alg=digest_alg) to_sign.append((class_name(assertion), assertion.id))
The ID of each Signature element must be unique If the assertion and response both are signed, both Signatures have an ID of `Signature1`. This creates invalid xml as xs:ID must be unique. This fixes the issue when integrating with onelogin's python3-saml client: Element '{<URL>
py
diff --git a/salt/modules/slsutil.py b/salt/modules/slsutil.py index <HASH>..<HASH> 100644 --- a/salt/modules/slsutil.py +++ b/salt/modules/slsutil.py @@ -53,6 +53,26 @@ def merge(obj_a, obj_b, strategy='smart', renderer='yaml', merge_lists=False): return salt.utils.dictupdate.merge(obj_a, obj_b, strategy, renderer, merge_lists) +def merge_all(lst, strategy='smart', renderer='yaml', merge_lists=False): + ''' + Merge a list of objects into each other in order + + CLI Example: + + .. code-block:: shell + + > salt '*' slsutil.merge_all '[{foo: Foo}, {foo: Bar}]' + local: {u'foo': u'Bar'} + ''' + + ret = {} + for obj in lst: + ret = salt.utils.dictupdate.merge( + ret, obj, strategy, renderer, merge_lists + ) + + return ret + def renderer(path=None, string=None, default_renderer='jinja|yaml', **kwargs): '''
Adding a merge_all function to slsutil The merge_all function merges a list of objects in order. This will make it easier to merge more than two objects.
py
diff --git a/satpy/readers/agri_l1.py b/satpy/readers/agri_l1.py index <HASH>..<HASH> 100644 --- a/satpy/readers/agri_l1.py +++ b/satpy/readers/agri_l1.py @@ -69,8 +69,7 @@ class HDF_AGRI_L1(HDF5FileHandler): data.attrs['units'] = ds_info['units'] ds_info['valid_range'] = data.attrs['valid_range'] return data - - elif calibration in ['reflectance', 'radiance']: + if calibration in ['reflectance', 'radiance']: logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' @@ -90,7 +89,6 @@ class HDF_AGRI_L1(HDF5FileHandler): ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset) * 100 else: ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset) - elif calibration == 'brightness_temperature': logger.debug("Calibrating to brightness_temperature") # the value of dn is the index of brightness_temperature
Update AGRI reader to keep codefactor happy.
py
diff --git a/mapclassify.py b/mapclassify.py index <HASH>..<HASH> 100644 --- a/mapclassify.py +++ b/mapclassify.py @@ -766,6 +766,7 @@ class Map_Classifier(object): x = np.asarray(x).flatten() uptos = [np.where(value < self.bins)[0] for value in x] bins = [x.min() if x.size > 0 else len(self.bins)-1 for x in uptos] #bail upwards + bins = np.asrrray(bins) if len(bins) == 1: return bins[0] else:
find_bin should return the same type as yb
py
diff --git a/tools/get_system_info.py b/tools/get_system_info.py index <HASH>..<HASH> 100644 --- a/tools/get_system_info.py +++ b/tools/get_system_info.py @@ -35,14 +35,14 @@ PY2 = sys.version_info[0] == 2 def run_and_get_stdout(command, pipe_command=None): if not pipe_command: - p1 = subprocess.Popen(command, stdout=subprocess.PIPE) + p1 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = p1.communicate()[0] if not PY2: output = output.decode(encoding='UTF-8') return p1.returncode, output else: - p1 = subprocess.Popen(command, stdout=subprocess.PIPE) - p2 = subprocess.Popen(pipe_command, stdin=p1.stdout, stdout=subprocess.PIPE) + p1 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p2 = subprocess.Popen(pipe_command, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p1.stdout.close() output = p2.communicate()[0] if not PY2: @@ -104,3 +104,7 @@ if program_paths('kstat'): + + + +
Fixed issues with subprocess output being printed to console.
py
diff --git a/metpy/plots/_mpl.py b/metpy/plots/_mpl.py index <HASH>..<HASH> 100644 --- a/metpy/plots/_mpl.py +++ b/metpy/plots/_mpl.py @@ -2,6 +2,7 @@ # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause """Functionality that we have upstreamed or will upstream into matplotlib.""" +from __future__ import division # See if we should monkey-patch Barbs for better pivot import matplotlib
BUG: Enforce new-style division. (Fixes #<I>) This was causing differences in tests between Python 2 and 3.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ setup( url='http://github.com/LuminosoInsight/python-ftfy', platforms=["any"], description="Fixes some problems with Unicode text after the fact", - packages=['ftfy'], + packages=['ftfy', 'ftfy.bad_codecs'], package_data={'ftfy': ['char_classes.dat']}, classifiers=[ "Programming Language :: Python :: 2",
List 'ftfy.bad_codecs' as a package
py
diff --git a/airflow/models.py b/airflow/models.py index <HASH>..<HASH> 100644 --- a/airflow/models.py +++ b/airflow/models.py @@ -388,6 +388,8 @@ class Connection(Base): return hooks.JdbcHook(jdbc_conn_id=self.conn_id) elif self.conn_type == 'mssql': return hooks.MsSqlHook(mssql_conn_id=self.conn_id) + elif self.conn_type == 'oracle': + return hooks.OracleHook(oracle_conn_id=self.conn_id) except: return None
Update models.py add Oracle SQL support through the OracleHook
py
diff --git a/insteonplm/aldb.py b/insteonplm/aldb.py index <HASH>..<HASH> 100644 --- a/insteonplm/aldb.py +++ b/insteonplm/aldb.py @@ -83,7 +83,7 @@ class ALDB(object): return Device.create(plm, addr, cat, subcat, product_key) - def has_override(cls, addr): + def has_override(self, addr): if self._overrides.get(addr, None) is not None: return True else:
Removed classmethod decorator from has_override
py
diff --git a/src/nupic/algorithms/anomaly_likelihood.py b/src/nupic/algorithms/anomaly_likelihood.py index <HASH>..<HASH> 100644 --- a/src/nupic/algorithms/anomaly_likelihood.py +++ b/src/nupic/algorithms/anomaly_likelihood.py @@ -213,13 +213,6 @@ class AnomalyLikelihood(object): likelihood = 1.0 - likelihoods[0] - # Mitigate the impact of not updating the distribution at every iteration: - # if we have a very high anomaly likelihood, then we need it to be - # accurate, so force an update. (this should have minimal performance - # impact as it only occurs about 1% of the time) - if likelihood > 0.99: - self._distribution = None - # Before we exit update historical scores and iteration self._historicalScores.append(dataPoint) self._iteration += 1
Remove greedy reestimation of distribution Turns out this trick can hurt when you have anomalies early on. It does not seem to help in any situation. Tested and verified with NAB.
py
diff --git a/hamster/charting.py b/hamster/charting.py index <HASH>..<HASH> 100644 --- a/hamster/charting.py +++ b/hamster/charting.py @@ -120,8 +120,7 @@ class Integrator(object): if there is any action needed. returns velocity, which is synonym from delta. Use it to determine when animation is done (experiment to find value that fits you!""" - if self.targeting: - self.force += self.attraction * (self.target_value - self.current_value) + self.force += self.attraction * (self.target_value - self.current_value) self.accel = self.force / self.mass self.vel = (self.vel + self.accel) * self.damping
woops, one variable too much - it's getting late! svn path=/trunk/; revision=<I>
py
diff --git a/rinoh/structure.py b/rinoh/structure.py index <HASH>..<HASH> 100644 --- a/rinoh/structure.py +++ b/rinoh/structure.py @@ -142,17 +142,22 @@ class ListItemNumber(Paragraph): class ListItem(Flowable): def __init__(self, number, separator, flowables, style=None, parent=None): super().__init__(style=style, parent=parent) - tab_stop = TabStop(self.get_style('item_indent'), align=RIGHT) - marker_style = ParagraphStyle(base=style, tab_stops=[tab_stop]) - self.marker = ListItemNumber([Tab() + number + separator], - style=marker_style, parent=self) + self.number = number + self.separator = separator self.flowables = flowables def render(self, container, last_descender, state=None): if not state: try: maybe_container = MaybeContainer(container) - height, _ = self.marker.flow(maybe_container, last_descender) + tab_stop = TabStop(self.get_style('item_indent', + container.document), + align=RIGHT) + marker_style = ParagraphStyle(base=self.style, + tab_stops=[tab_stop]) + marker = ListItemNumber([Tab() + self.number + self.separator], + style=marker_style, parent=self) + height, _ = marker.flow(maybe_container, last_descender) except EndOfContainer: raise EndOfContainer(state) try:
Delay creating the ListItem marker until rendering (need Document)
py
diff --git a/angr/simos.py b/angr/simos.py index <HASH>..<HASH> 100644 --- a/angr/simos.py +++ b/angr/simos.py @@ -220,7 +220,8 @@ class SimOS(object): state = SimState(**kwargs) stack_end = state.arch.initial_sp - state.memory.mem._preapproved_stack = IRange(stack_end - stack_size, stack_end) + if o.ABSTRACT_MEMORY not in state.options: + state.memory.mem._preapproved_stack = IRange(stack_end - stack_size, stack_end) if o.INITIALIZE_ZERO_REGISTERS in state.options: for r in self.arch.registers: @@ -652,7 +653,8 @@ class SimCGC(SimOS): # Special stack base for CGC binaries to work with Shellphish CRS s.regs.sp = 0xbaaaaffc - s.memory.mem._preapproved_stack = IRange(0xbaaab000 - 1024*1024*8, 0xbaaab000) + if o.ABSTRACT_MEMORY not in s.options: + s.memory.mem._preapproved_stack = IRange(0xbaaab000 - 1024*1024*8, 0xbaaab000) # 'main' gets called with the magic page address as the first fast arg s.regs.ecx = 0x4347c000
Don't try to pre-approve memory during VSA
py
diff --git a/tools/gen.py b/tools/gen.py index <HASH>..<HASH> 100644 --- a/tools/gen.py +++ b/tools/gen.py @@ -10,8 +10,8 @@ from slimit.visitors import nodevisitor from slimit.visitors.ecmavisitor import ECMAVisitor from slimit import ast -aws_url = -"https://awsiamconsole.s3.amazonaws.com/iam/assets/js/bundles/policies.js" +aws_url = \ + "https://awsiamconsole.s3.amazonaws.com/iam/assets/js/bundles/policies.js" header = """\ # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
Fix indentation/line continuation problem introduced with style fix
py
diff --git a/satpy/resample.py b/satpy/resample.py index <HASH>..<HASH> 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -1014,7 +1014,7 @@ class NativeResampler(BaseResampler): raise ValueError("Expand factor must be a whole number") d_arr = da.repeat(d_arr, int(factor), axis=axis) return d_arr - elif all(x <= 1 for x in repeats.values()): + if all(x <= 1 for x in repeats.values()): # reduce y_size = 1. / repeats[0] x_size = 1. / repeats[1]
Remove unnecessary elif in resample.py
py
diff --git a/src/codemod.py b/src/codemod.py index <HASH>..<HASH> 100755 --- a/src/codemod.py +++ b/src/codemod.py @@ -98,9 +98,10 @@ def path_filter(extensions=None, exclude_paths=[]): if extensions: if not any(path.endswith('.' + extension) for extension in extensions): return False - for excluded in exclude_paths: - if path.startswith(excluded) or path.startswith('./' + excluded): - return False + if exclude_paths: + for excluded in exclude_paths: + if path.startswith(excluded) or path.startswith('./' + excluded): + return False return True return the_filter @@ -797,7 +798,8 @@ def _parse_command_line(): query_options['path_filter'] = ( path_filter(extensions=opts['--extensions'].split(',') \ if '--extensions' in opts else None, - exclude_paths=opts.get('--exclude_paths', '').split(','))) + exclude_paths=opts.get['--exclude_paths'].split(',') \ + if '--exclude_paths' in opts else None)) options = {} options['query'] = Query(**query_options)
Fix bug in my new exclude paths feature
py
diff --git a/libextract/prototypes/prototype.py b/libextract/prototypes/prototype.py index <HASH>..<HASH> 100644 --- a/libextract/prototypes/prototype.py +++ b/libextract/prototypes/prototype.py @@ -1,9 +1,7 @@ from functools import wraps from statscounter import stats -from ..formatters import table_json - - +from libextract.formatters import table_json def processes(*tags): tags = set(tag for tag in tags) @@ -53,14 +51,15 @@ def td_counts(node): def td_list_per_tr(node): return get_td(node) -@processes('table') +@processes('table', 'tbody') def convert_table(node): table = table_json(node) if not table: - from libextract.formatters import get_table_rows, chunks + mode = stats.mode(td_counts(node)) rows = [tds for tds in td_list_per_tr(node) if len(tds) == mode] - table = {str(col): [' '.join(row[col].text_content().split()) for row in rows] + table = {str(col): [' '.join(row[col].text_content().split()) + for row in rows] for col in range(mode)} return table
added tbody to table formatter's resolution
py
diff --git a/toytree/TreeStyle.py b/toytree/TreeStyle.py index <HASH>..<HASH> 100644 --- a/toytree/TreeStyle.py +++ b/toytree/TreeStyle.py @@ -100,6 +100,10 @@ STYLES = { "node_hover": False, "tip_labels": False, "scalebar": True, + "node_style": { + "stroke": "#262626", + "stroke-width": 1, + }, }, 'd': { @@ -111,7 +115,7 @@ STYLES = { 'stroke': COLORS[0], }, }, - + 'm': { 'edge_type': 'c', 'layout': 'r',
added stroke to 'c' style
py
diff --git a/flasgger/utils.py b/flasgger/utils.py index <HASH>..<HASH> 100644 --- a/flasgger/utils.py +++ b/flasgger/utils.py @@ -88,11 +88,11 @@ def get_specs(rules, ignore_verbs, optional_fields, sanitizer): klass = method.__dict__.get('view_class', None) if not is_mv and klass and hasattr(klass, 'verb'): - method = klass.__dict__.get('verb') + method = getattr(klass, 'verb', None) elif klass and hasattr(klass, 'dispatch_request'): - method = klass.__dict__.get('dispatch_request') + method = getattr(klass, 'dispatch_request', None) if method is None: # for MethodView - method = klass.__dict__.get(verb) + method = getattr(klass, verb, None) if method is None: if is_mv: # #76 Empty MethodViews
fix: should use getattr
py
diff --git a/ppb/engine.py b/ppb/engine.py index <HASH>..<HASH> 100644 --- a/ppb/engine.py +++ b/ppb/engine.py @@ -1,3 +1,4 @@ +from collections import defaultdict from collections import deque from contextlib import ExitStack from itertools import chain @@ -32,6 +33,7 @@ class GameEngine(Engine, EventMixin): # Engine State self.scenes = [] self.events = deque() + self.event_extensions = defaultdict(dict) self.running = False self.entered = False @@ -108,6 +110,8 @@ class GameEngine(Engine, EventMixin): def publish(self): event = self.events.popleft() event.scene = self.current_scene + for attr_name, attr_value in self.event_extensions[type(event)].items(): + setattr(event, attr_name, attr_value) for entity in chain((self,), self.systems, (self.current_scene,), self.current_scene): entity.__event__(event, self.signal) @@ -123,3 +127,6 @@ class GameEngine(Engine, EventMixin): def on_quit(self, quit_event: 'Quit', signal: Callable): #TODO: Look up syntax for Callable typing. self.running = False + + def register(self, event_type, attribute, value): + self.event_extensions[event_type][attribute] = value
Subsystems can now extend other system's events.
py
diff --git a/override_settings/__init__.py b/override_settings/__init__.py index <HASH>..<HASH> 100644 --- a/override_settings/__init__.py +++ b/override_settings/__init__.py @@ -2,6 +2,12 @@ from __future__ import with_statement from django.conf import settings, UserSettingsHolder from django.utils.functional import wraps +class DeletedSettingDescriptor(object): + def __get__(self, instance, owner): + raise AttributeError("attribute not set") + +SETTING_DELETED = DeletedSettingDescriptor() + # Backported from Django trunk (r16377) class override_settings(object): """
Add SETTING_DELETED to test for non-existent settings
py
diff --git a/inginious/frontend/pages/course_admin/settings.py b/inginious/frontend/pages/course_admin/settings.py index <HASH>..<HASH> 100644 --- a/inginious/frontend/pages/course_admin/settings.py +++ b/inginious/frontend/pages/course_admin/settings.py @@ -32,10 +32,10 @@ class CourseSettings(INGIniousAdminPage): if course_content['name'] == "": errors.append(_('Invalid name')) course_content['description'] = data['description'] - course_content['admins'] = list(map(str.strip, data['admins'].split(','))) + course_content['admins'] = list(map(str.strip, data['admins'].split(','))) if data['admins'].strip() else [] if not self.user_manager.user_is_superadmin() and self.user_manager.session_username() not in course_content['admins']: errors.append(_('You cannot remove yourself from the administrators of this course')) - course_content['tutors'] = list(map(str.strip, data['tutors'].split(','))) + course_content['tutors'] = list(map(str.strip, data['tutors'].split(','))) if data['tutors'].strip() else [] if len(course_content['tutors']) == 1 and course_content['tutors'][0].strip() == "": course_content['tutors'] = []
Fix (an orthogonal, preexisting) bug that added an admin/tutor with login "()" when no admin/tutor is given.
py
diff --git a/phonenumber_field/modelfields.py b/phonenumber_field/modelfields.py index <HASH>..<HASH> 100644 --- a/phonenumber_field/modelfields.py +++ b/phonenumber_field/modelfields.py @@ -60,7 +60,9 @@ class PhoneNumberField(models.Field): elif self.blank: return to_python(self.default) or '' - value = to_python(value) + if value != '': + value = to_python(value) + if isinstance(value, string_types): # it is an invalid phone number return value @@ -87,4 +89,4 @@ try: ), ], ["^phonenumber_field\.modelfields\.PhoneNumberField"]) except ImportError: - pass \ No newline at end of file + pass
quick and dirty fix for Django <I> migrations bug The function check if value can be None at the beginning but if it is an empty string to_python will return None and get_prep_value will try to execute value.as_e<I> with value as None type.
py
diff --git a/sos/plugins/monit.py b/sos/plugins/monit.py index <HASH>..<HASH> 100644 --- a/sos/plugins/monit.py +++ b/sos/plugins/monit.py @@ -23,7 +23,7 @@ class Monit(Plugin, RedHatPlugin): """Monit monitoring daemon """ packages = ('monit',) - profiles = ('system') + profiles = ('system',) plugin_name = 'monit' # Define configuration files
[monit] Correct the monit plugin profile list. Correct profiles = ('system',) Closes:#<I>
py
diff --git a/vcs/utils/diffs.py b/vcs/utils/diffs.py index <HASH>..<HASH> 100644 --- a/vcs/utils/diffs.py +++ b/vcs/utils/diffs.py @@ -137,17 +137,21 @@ class DiffProcessor(object): """ Extract the filename and revision hint from a line. """ + try: if line1.startswith('--- ') and line2.startswith('+++ '): l1 = line1[4:].split(None, 1) - old_filename = l1[0] if len(l1) >= 1 else None + old_filename = l1[0].lstrip('a/') if len(l1) >= 1 else None old_rev = l1[1] if len(l1) == 2 else 'old' - l2 = line1[4:].split(None, 1) - #new_filename = l2[0] if len(l2) >= 1 else None + l2 = line2[4:].split(None, 1) + new_filename = l2[0].lstrip('b/') if len(l1) >= 1 else None new_rev = l2[1] if len(l2) == 2 else 'new' - return old_filename, new_rev, old_rev + filename = old_filename if (old_filename != + 'dev/null') else new_filename + + return filename, new_rev, old_rev except (ValueError, IndexError): pass
fixed issue with diff filepaths, for new nodes there was /dev/null returned as filename
py
diff --git a/paypal/standard/ipn/admin.py b/paypal/standard/ipn/admin.py index <HASH>..<HASH> 100644 --- a/paypal/standard/ipn/admin.py +++ b/paypal/standard/ipn/admin.py @@ -67,6 +67,16 @@ class PayPalIPNAdmin(admin.ModelAdmin): "next_payment_date" ] }), + ("Subscription", { + "description": "Information about recurring Subscptions.", + "classes": ("collapse",), + "fields": [ + "subscr_date", "subscr_effective", "period1", "period2", + "period3", "amount1", "amount2", "amount3", "mc_amount1", + "mc_amount2", "mc_amount3", "recurring", "reattempt", + "retry_at", "recur_times", "username", "password", "subscr_id" + ] + }), ("Admin", { "description": "Additional Info.", "classes": ('collapse',),
Display the subscription related fields in the admin
py
diff --git a/saltcloud/clouds/joyent.py b/saltcloud/clouds/joyent.py index <HASH>..<HASH> 100644 --- a/saltcloud/clouds/joyent.py +++ b/saltcloud/clouds/joyent.py @@ -18,8 +18,6 @@ Using the old cloud configuration syntax, it requires that the ``username`` and # the Datacenter location associated with the new VMS JOYENT.location: us-east-1 - - Using the new format, set up the cloud configuration at ``/etc/salt/cloud.providers`` or ``/etc/salt/cloud.providers.d/joyent.conf``: @@ -151,7 +149,6 @@ def create(vm_): log.info('Creating Cloud VM {0} in {1}'.format(vm_['name'],vm_['location'])) saltcloud.utils.check_name(vm_['name'], 'a-zA-Z0-9-') - conn = get_conn() kwargs = { 'name': vm_['name'], 'image': get_image(conn, vm_), @@ -248,7 +245,6 @@ def stop(name, call=None): 'This action must be called with -a or --action.' ) - conn = get_conn() node = get_node(conn, name) try: data = conn.ex_stop_node(node=node)
#<I> - Added support for joyent locations added location to get_conn method moved get_conn call to __virtual__() added JOYENT_LOCATIONS and get_location and avail_locations methods
py
diff --git a/tests/test_container.py b/tests/test_container.py index <HASH>..<HASH> 100644 --- a/tests/test_container.py +++ b/tests/test_container.py @@ -56,6 +56,10 @@ class ContainerTest(TestCase): newCfg = Container.fromRockerConfig("abc", dict(cfg)).toRockerFile() + # sort links (we don't care about their order and Container might have reordered them) + cfg['links'].sort() + newCfg['links'].sort() + self.assertEqual(cfg, newCfg) finally: Container._mkdirs = originalMkdirs
fixed a link sorting issue in test_container.py
py
diff --git a/peri/initializers.py b/peri/initializers.py index <HASH>..<HASH> 100644 --- a/peri/initializers.py +++ b/peri/initializers.py @@ -86,7 +86,7 @@ def local_max_featuring(im, radius=10, smooth=4, masscut=None): pos = np.array(nd.measurements.center_of_mass(e==g, lbl, ind)) if masscut is not None: m = nd.convolve(im, footprint, mode='reflect') - mass = np.array(map(lambda x: m[x[0],x[1],x[2]], pos)) + mass = np.array(map(lambda x: m[x[0],x[1],x[2]], pos.astype('int'))) good = mass > masscut return pos[good].copy(), e, mass[good].copy() else:
VisibleDeprecationWarning in peri.initializers.local_max_featuring.
py
diff --git a/examples/bme280_simpletest_pico.py b/examples/bme280_simpletest_pico.py index <HASH>..<HASH> 100644 --- a/examples/bme280_simpletest_pico.py +++ b/examples/bme280_simpletest_pico.py @@ -7,7 +7,7 @@ import busio import adafruit_bme280 # Create sensor object, using the board's default I2C bus. -i2c = busio.I2C(board.GP1, board.GP0) # SCL, SDA +i2c = busio.I2C(board.GP1, board.GP0) # SCL, SDA bme280 = adafruit_bme280.Adafruit_BME280_I2C(i2c) # OR create sensor object, using the board's default SPI bus.
Re-ran pre-commit hooks
py
diff --git a/astrobase/checkplot.py b/astrobase/checkplot.py index <HASH>..<HASH> 100644 --- a/astrobase/checkplot.py +++ b/astrobase/checkplot.py @@ -2041,6 +2041,13 @@ def checkplot_dict(lspinfolist, # errs, but should provide enough uniqueness otherwise (across different # times/mags array inputs). this is all done so we can still save checkplots # correctly to pickles after reviewing them using checkplotserver + + # try again to get the right objectid + if (objectinfo and isinstance(objectinfo, dict) and + 'objectid' in objectinfo and objectinfo['objectid']): + checkplotdict['objectid'] = objectinfo['objectid'] + + # if this doesn't work, generate a random one if checkplotdict['objectid'] is None: try: objuuid = hashlib.sha512(times[5:10].tostring() +
checkplot: try hard to get an objectid out of the input kwargs
py
diff --git a/albumentations/augmentations/functional.py b/albumentations/augmentations/functional.py index <HASH>..<HASH> 100644 --- a/albumentations/augmentations/functional.py +++ b/albumentations/augmentations/functional.py @@ -1047,8 +1047,8 @@ def channel_dropout(img, channels_to_drop, fill_value=0): def gamma_transform(img, gamma): if img.dtype == np.uint8: invGamma = 1.0 / gamma - table = np.array([((i / 255.0) ** invGamma) * 255 for i in np.arange(0, 256)]).astype("uint8") - img = cv2.LUT(img, table) + table = (np.arange(0, 256.0 / 255, 1.0 / 255) ** invGamma) * 255 + img = cv2.LUT(img, table.astype(np.uint8)) else: img = np.power(img, gamma)
gamma_transform optimization (#<I>) * gamma_transform optimization * gamma_transform removed division
py
diff --git a/python/ccxt/async/base/exchange.py b/python/ccxt/async/base/exchange.py index <HASH>..<HASH> 100644 --- a/python/ccxt/async/base/exchange.py +++ b/python/ccxt/async/base/exchange.py @@ -61,7 +61,7 @@ class Exchange(BaseExchange): }, self.tokenBucket)) def __del__(self): - self.asyncio_loop.run_until_complete(self.session.close()) + self.asyncio_loop.ensure_future(self.session.close()) async def wait_for_token(self): while self.rateLimitTokens <= 1:
exchange.py run_until_complete → ensure_future fix #<I>
py
diff --git a/flasgger/utils.py b/flasgger/utils.py index <HASH>..<HASH> 100644 --- a/flasgger/utils.py +++ b/flasgger/utils.py @@ -72,7 +72,7 @@ def swag_from(specs=None, filetype=None, endpoint=None, methods=None, swag_paths = getattr(function, 'swag_paths', None) validate_args = { 'filepath': swag_path or swag_paths, - 'root': function.root_path + 'root': getattr(function, 'root_path', None) } if isinstance(specs, dict): set_from_specs_dict(function)
Attempt to resolve #<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,8 @@ setup(name = 'synergy_odm', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', - 'Programming Language :: Python', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries', ], requires=[]
- marking both Python <I> and 3 as supported
py
diff --git a/paramiko/packet.py b/paramiko/packet.py index <HASH>..<HASH> 100644 --- a/paramiko/packet.py +++ b/paramiko/packet.py @@ -167,7 +167,7 @@ class Packetizer (object): self.__keepalive_callback = callback self.__keepalive_last = time.time() - def read_all(self, n): + def read_all(self, n, check_rekey=False): """ Read as close to N bytes as possible, blocking as long as necessary. @@ -191,7 +191,7 @@ class Packetizer (object): except socket.timeout: if self.__closed: raise EOFError() - if self.__need_rekey: + if check_rekey and (len(out) == 0) and self.__need_rekey: raise NeedRekeyException() self._check_keepalive() return out @@ -278,7 +278,7 @@ class Packetizer (object): @raise SSHException: if the packet is mangled @raise NeedRekeyException: if the transport should rekey """ - header = self.read_all(self.__block_size_in) + header = self.read_all(self.__block_size_in, check_rekey=True) if self.__block_engine_in != None: header = self.__block_engine_in.decrypt(header) if self.__dump_packets:
[project @ Arch-1:<EMAIL><I>-master-shake%paramiko--dev--1--patch-<I>] copy from jaramiko: only check for rekey at the beginning of a packet
py
diff --git a/wagtailnews/views/editor.py b/wagtailnews/views/editor.py index <HASH>..<HASH> 100644 --- a/wagtailnews/views/editor.py +++ b/wagtailnews/views/editor.py @@ -16,7 +16,7 @@ from ..models import get_newsindex_content_types def get_newsitem_edit_handler(NewsItem): panels = extract_panel_definitions_from_model_class( NewsItem, exclude=['newsindex']) - EditHandler = ObjectList(panels) + EditHandler = ObjectList(panels).bind_to_model(NewsItem) return EditHandler get_newsitem_edit_handler = memoize(get_newsitem_edit_handler, {}, 1)
Changed edit_handler to support wagtail <I>b1
py
diff --git a/examples/scripts/get-managed-sans.py b/examples/scripts/get-managed-sans.py index <HASH>..<HASH> 100755 --- a/examples/scripts/get-managed-sans.py +++ b/examples/scripts/get-managed-sans.py @@ -21,8 +21,9 @@ # THE SOFTWARE. ### import sys +import re if sys.version_info < (3, 2): - raise Exception("Must use Python 3.2 or later") + raise Exception('Must use Python 3.2 or later') import hpOneView as hpov from pprint import pprint @@ -33,7 +34,7 @@ def acceptEULA(con): con.get_eula_status() try: if con.get_eula_status() is True: - print("EULA display needed") + print('EULA display needed') con.set_eula('no') except Exception as e: print('EXCEPTION:') @@ -48,9 +49,9 @@ def login(con, credential): print('Login failed') -def getmanagedsans(fcs): - ret = fcs.get_managed_sans() - pprint(ret) +def getsans(fcs): + sans = fcs.get_managed_sans() + pprint(sans) def main(): @@ -81,8 +82,7 @@ def main(): login(con, credential) acceptEULA(con) - getmanagedsans(fcs) - + getsans(fcs) if __name__ == '__main__': import sys
New exmaple script get-managed-sans.py
py
diff --git a/steamspypi/api.py b/steamspypi/api.py index <HASH>..<HASH> 100644 --- a/steamspypi/api.py +++ b/steamspypi/api.py @@ -1,6 +1,18 @@ import requests +def fix_request(data_request): + if 'appid' in data_request: + # Make sure appid are strings, not integers. + data_request['appid'] = str(data_request['appid']) + + if 'genre' in data_request: + # Make sure genres are submitted with space characters, not with '+' as shown in SteamSpy API documentation. + data_request['genre'] = data_request['genre'].replace('+', ' ') + + return data_request + + def check_request(data_request): is_request_correct = True @@ -22,6 +34,8 @@ def check_request(data_request): def download(data_request): is_request_correct = check_request(data_request) + data_request = fix_request(data_request) + if is_request_correct: response = requests.get(get_steamspy_api_url(), params=data_request) data = response.json()
Small fixes to data request, in case the user follows SteamSpy API doc
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,11 +6,20 @@ try: except ImportError: from distutils.core import setup +here = path.abspath(path.dirname(__file__)) + def readme(): - here = path.abspath(path.dirname(__file__)) - with open(path.join(here, 'README.md'), encoding='utf-8') as f: - return f.read() + try: + import pypandoc + long_description = pypandoc.convert("README.md", "rst") + long_description = long_description.replace("\r", "") + except (OSError, ImportError): + print("Pandoc not found. Long_description conversion failure.") + with open(path.join(here, 'README.md'), encoding='utf-8') as f: + long_description = f.read() + + return long_description setup( name='craft-ai',
Added call to pandoc to convert README.md to rst * PyPI only supports reStructuredTxt syntax, not Markdown. Now long description on PyPI should have a correct formatting
py
diff --git a/webhooks/senders/base.py b/webhooks/senders/base.py index <HASH>..<HASH> 100644 --- a/webhooks/senders/base.py +++ b/webhooks/senders/base.py @@ -87,10 +87,12 @@ class Senderable(object): """ Dump the payload to JSON """ return json.dumps(self.payload, cls=StandardJSONEncoder) - def notify(self, message): - print(message) + def notify_debug(self, message): logging.debug(message) + def notify(self, message): + logging.info(message) + def send(self): """ Wrapper around _send method for use with asynchronous coding. """ return self._send() @@ -141,6 +143,9 @@ class Senderable(object): self.notify("Attempt {}: Could not send webhook {}".format( self.attempt, self.hash_value) ) + self.notify_debug("Webhook {} body: {}".format( + self.hash_value, self.payload) + ) # Wait a bit before the next attempt sleep(wait)
NH - adding debug detail.
py
diff --git a/django_q/monitor.py b/django_q/monitor.py index <HASH>..<HASH> 100644 --- a/django_q/monitor.py +++ b/django_q/monitor.py @@ -9,13 +9,19 @@ from django.utils import timezone from django.utils.translation import ugettext as _ # local -from .conf import Conf, redis_client +from .conf import Conf, redis_client, logger from .tasks import SignedPackage def monitor(run_once=False): term = Terminal() r = redis_client + try: + redis_client.ping() + except Exception as e: + print(term.red('Can not connect to Redis server.')) + logger.exception(e) + return with term.fullscreen(), term.hidden_cursor(), term.cbreak(): val = None start_width = int(term.width / 8)
Monitor starts with pinging Redis.
py
diff --git a/telethon/network/connection/connection.py b/telethon/network/connection/connection.py index <HASH>..<HASH> 100644 --- a/telethon/network/connection/connection.py +++ b/telethon/network/connection/connection.py @@ -65,7 +65,7 @@ class Connection(abc.ABC): else: s.set_proxy(*self._proxy) - s.setblocking(False) + s.settimeout(timeout) await asyncio.wait_for( self._loop.sock_connect(s, address), timeout=timeout, @@ -78,14 +78,14 @@ class Connection(abc.ABC): 'without the SSL module being available' ) - s.settimeout(timeout) s = ssl_mod.wrap_socket( s, do_handshake_on_connect=True, ssl_version=ssl_mod.PROTOCOL_SSLv23, ciphers='ADH-AES256-SHA' ) - s.setblocking(False) + + s.setblocking(False) self._reader, self._writer = \ await asyncio.open_connection(sock=s, loop=self._loop)
Switch to blocking connect when using proxy (#<I>) Until a better fix is found, this should help proxy users.
py
diff --git a/pysat/tests/test_registry.py b/pysat/tests/test_registry.py index <HASH>..<HASH> 100644 --- a/pysat/tests/test_registry.py +++ b/pysat/tests/test_registry.py @@ -186,7 +186,7 @@ class TestRegistration(): # registered has been removed for platform, name in zip(self.platforms, self.platform_names): registry.remove(platform, name) - except: + except Exception: # ok if a module has already been removed pass # ensure things are clean, all have been removed
STY: Covered bare exception
py
diff --git a/afkak/test/test_brokerclient.py b/afkak/test/test_brokerclient.py index <HASH>..<HASH> 100644 --- a/afkak/test/test_brokerclient.py +++ b/afkak/test/test_brokerclient.py @@ -365,6 +365,12 @@ class KafkaBrokerClientTestCase(unittest.TestCase): c.connector.factory = c # MemoryReactor doesn't make this connection. def test_delay_reset(self): + """test_delay_reset + Test that reconnect delay is handled correctly: + 1) That initializer values are respected + 2) That delay maximum is respected + 3) That delay is reset to initial delay on successful connection + """ init_delay = last_delay = 0.025 max_delay = 14 reactor = MemoryReactorClock()
Add docstring comment to new test as requested by review.
py
diff --git a/jsonschema/validators.py b/jsonschema/validators.py index <HASH>..<HASH> 100644 --- a/jsonschema/validators.py +++ b/jsonschema/validators.py @@ -753,7 +753,8 @@ class RefResolver(object): result = requests.get(uri).json else: # Otherwise, pass off to urllib and assume utf-8 - result = json.loads(urlopen(uri).read().decode("utf-8")) + with urlopen(uri) as url: + result = json.loads(url.read().decode("utf-8")) if self.cache_remote: self.store[uri] = result
Fix unclosed resource in validators
py
diff --git a/macroeco/main/main.py b/macroeco/main/main.py index <HASH>..<HASH> 100644 --- a/macroeco/main/main.py +++ b/macroeco/main/main.py @@ -475,7 +475,7 @@ def _write_test_statistics(spid, models, options, fit_results): for model in models: fit_result = fit_results[spid][model] - fit_stats = str(fit_result[3])[1:-1] + fit_stats = str(fit_result[3])[:] f.write("%s,%s\n" % (model, fit_stats)) f.close()
Main was cutting off the first and last AIC digit
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup import os, platform -version = "1.8.11" +version = "1.8.12" def package_files(directory): paths = [] @@ -59,12 +59,12 @@ on how to use MAVProxy.''', author='Andrew Tridgell', author_email='andrew@tridgell.net', classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python', 'Topic :: Scientific/Engineering'], license='GPLv3', packages=['MAVProxy',
mark mavproxy releases as stable
py
diff --git a/src/ossos-pipeline/ossos/storage.py b/src/ossos-pipeline/ossos/storage.py index <HASH>..<HASH> 100644 --- a/src/ossos-pipeline/ossos/storage.py +++ b/src/ossos-pipeline/ossos/storage.py @@ -20,7 +20,7 @@ CERTFILE=os.path.join(os.getenv('HOME'), DBIMAGES='vos:OSSOS/dbimages' MEASURE3='vos:OSSOS/measure3' -DATA_WEB_SERVICE='https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/data/pub/' +DATA_WEB_SERVICE='https://www.canfar.phys.uvic.ca/data/pub/' OSSOS_TAG_URI_BASE='ivo://canfar.uvic.ca/ossos' OBJECT_COUNT = "object_count"
MAJOR: Data web service is changing location.
py
diff --git a/mechanicalsoup/browser.py b/mechanicalsoup/browser.py index <HASH>..<HASH> 100644 --- a/mechanicalsoup/browser.py +++ b/mechanicalsoup/browser.py @@ -75,8 +75,8 @@ class Browser: session handles cookies automatically without calling this function, only use this when default cookie handling is insufficient. - :param cookiejar: Any `cookielib.CookieJar - <https://docs.python.org/2/library/cookielib.html#cookielib.CookieJar>`__ + :param cookiejar: Any `http.cookiejar.CookieJar + <https://docs.python.org/3/library/http.cookiejar.html#http.cookiejar.CookieJar>`__ compatible object. """ self.session.cookies = cookiejar
Update reference of CookieJar to Python 3 The module namespace where CookieJar exists has been changed in Python 3 (it is in `http.cookiejar` instead of `cookielib`). Update the documentation accordingly.
py
diff --git a/gpustat.py b/gpustat.py index <HASH>..<HASH> 100755 --- a/gpustat.py +++ b/gpustat.py @@ -255,7 +255,11 @@ class GPUStatCollection(object): ps_process = psutil.Process(pid=pid) process['username'] = ps_process.username() # cmdline returns full path; as in `ps -o comm`, get short cmdnames. - process['command'] = os.path.basename(ps_process.cmdline()[0]) + _cmdline = ps_process.cmdline() + if not _cmdline: # sometimes, zombie or unknown (e.g. [kworker/8:2H]) + process['command'] = '?' + else: + process['command'] = os.path.basename(_cmdline[0]) # Bytes to MBytes process['gpu_memory_usage'] = int(nv_process.usedGpuMemory / 1024 / 1024) process['pid'] = nv_process.pid @@ -441,7 +445,7 @@ def print_gpustat(json=False, debug=False, **args): ''' try: gpu_stats = GPUStatCollection.new_query() - except Exception: + except Exception as e: sys.stderr.write('Error on querying NVIDIA devices. Use --debug flag for details\n') if debug: import traceback
Fix a bug (IndexError) where commands might be empty It sometimes happens that some zombie processes or unknown processes with no command-line information are retrieved. We should avoid an error thrown in such a case.
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,17 +14,16 @@ # All configuration values have a default; values that are commented out # serve to show the default -# Allow Sphinx to find the setup command that is imported below. +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +# Allow Sphinx to find the setup command that is imported below, as referenced above. import sys, os sys.path.append(os.path.abspath('..')) import setup as setup_script -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.append(os.path.abspath('.')) - # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions
Update conf.py to place it below original docstring.
py
diff --git a/node2vec/node2vec.py b/node2vec/node2vec.py index <HASH>..<HASH> 100644 --- a/node2vec/node2vec.py +++ b/node2vec/node2vec.py @@ -5,7 +5,7 @@ import numpy as np import networkx as nx import gensim from joblib import Parallel, delayed -from tqdm import tqdm +from tqdm.auto import tqdm from .parallel import parallel_generate_walks
Added TQDM wrapper for showing the proper loading bar within Jupyter Notebooks.
py
diff --git a/pydriller/domain/commit.py b/pydriller/domain/commit.py index <HASH>..<HASH> 100644 --- a/pydriller/domain/commit.py +++ b/pydriller/domain/commit.py @@ -574,7 +574,7 @@ class Commit: @property def lines(self) -> int: """ - Return the deletion lines of the commit. + Return the total lines of the commit. :return: int insertion + deletion lines """ @@ -583,9 +583,9 @@ class Commit: @property def files(self) -> int: """ - Return the deletion lines of the commit. + Return the modified files of the commit. - :return: str insertion + deletion lines + :return: int modified files number """ return len(self._c_object.stats.files)
Update commit.py fix comments error by copy-paste
py
diff --git a/torf/_torrent.py b/torf/_torrent.py index <HASH>..<HASH> 100644 --- a/torf/_torrent.py +++ b/torf/_torrent.py @@ -147,6 +147,8 @@ class Torrent(): :raises PathEmptyError: if :attr:`path` contains no data (i.e. empty file, empty directory or directory containing only empty files) + :raises ReadError: if :attr:`path` is a directory and not readable + :raises PathNotFoundError: if :attr:`path` doesn't exist """ return getattr(self, '_path', None) @path.setter
Torrent.path: Add more exceptions to docstring
py
diff --git a/master/buildbot/status/web/hooks/bitbucket.py b/master/buildbot/status/web/hooks/bitbucket.py index <HASH>..<HASH> 100644 --- a/master/buildbot/status/web/hooks/bitbucket.py +++ b/master/buildbot/status/web/hooks/bitbucket.py @@ -38,10 +38,8 @@ def getChanges(request, options=None): payload = json.loads(request.args['payload'][0]) repo_url = '%s%s' % ( payload['canon_url'], payload['repository']['absolute_url']) - project = ( - request.args.get('project') if request.args.get('project') is not None - else '' - ) + raw_project = request.args.get('project', None) + project = raw_project[0] if raw_project is not None else '' changes = [] for commit in payload['commits']:
Convert BitBucket hook's project from a list to a string
py
diff --git a/pygleif/api/data.py b/pygleif/api/data.py index <HASH>..<HASH> 100644 --- a/pygleif/api/data.py +++ b/pygleif/api/data.py @@ -90,11 +90,11 @@ class Entity(BaseModel): jurisdiction: str = Field(alias="jurisdiction") legal_address: Address = Field(alias="legalAddress") legal_form: LegalForm = Field(alias="legalForm") - legal_name: Name = Field("legalName") + legal_name: Name = Field(alias="legalName") other_addresses: List[Any] = Field(alias="otherAddresses") - other_names: Name = Field("otherNames") + other_names: Name = Field(alias="otherNames") registered_as: str = Field(alias="registeredAs") - registered_at: RegisteredAt = Field("registeredAt") + registered_at: RegisteredAt = Field(alias="registeredAt") status: str = Field(alias="status") successor_entities: List[Any] = Field(alias="successorEntities") sub_category: Optional[str] = Field(alias="subCategory")
Fix missing alias (#<I>)
py
diff --git a/pandas/tests/plotting/test_datetimelike.py b/pandas/tests/plotting/test_datetimelike.py index <HASH>..<HASH> 100644 --- a/pandas/tests/plotting/test_datetimelike.py +++ b/pandas/tests/plotting/test_datetimelike.py @@ -1,5 +1,5 @@ """ Test cases for time series specific (freq conversion, etc) """ - +import sys from datetime import datetime, timedelta, date, time import pickle @@ -1557,7 +1557,10 @@ def _check_plot_works(f, freq=None, series=None, *args, **kwargs): # GH18439 # this is supported only in Python 3 pickle since # pickle in Python2 doesn't support instancemethod pickling - if PY3: + # TODO(statsmodels 0.10.0): Remove the statsmodels check + # https://github.com/pandas-dev/pandas/issues/24088 + # https://github.com/statsmodels/statsmodels/issues/4772 + if PY3 and 'statsmodels' not in sys.modules: with ensure_clean(return_filelike=True) as path: pickle.dump(fig, path) finally:
TST: Work around statsmodels bug (#<I>)
py
diff --git a/ledgerautosync/ledgerwrap.py b/ledgerautosync/ledgerwrap.py index <HASH>..<HASH> 100644 --- a/ledgerautosync/ledgerwrap.py +++ b/ledgerautosync/ledgerwrap.py @@ -174,6 +174,15 @@ class Ledger(MetaLedger): for line in r: self.add_payee(line[2], line[3]) + def get_autosync_payee(self, payee, account): + q = [account, "--last", "1", "--format", "%(quoted(payee))\n", + "--limit", 'tag("AutosyncPayee") == "%s"' % (payee)] + r = self.run(q) + try: + return next(r)[0] + except StopIteration: + return payee + class LedgerPython(MetaLedger): @staticmethod
Add function to get correct payee (plain Ledger)
py
diff --git a/backtrader/indicator.py b/backtrader/indicator.py index <HASH>..<HASH> 100644 --- a/backtrader/indicator.py +++ b/backtrader/indicator.py @@ -29,6 +29,19 @@ from .lineiterator import LineIterator, IndicatorBase class MetaIndicator(IndicatorBase.__class__): + _indcol = dict() + + def __init__(cls, name, bases, dct): + ''' + Class has already been created ... register subclasses + ''' + # Initialize the class + super(MetaIndicator, cls).__init__(name, bases, dct) + + if not cls.aliased and \ + name != 'Indicator' and not name.startswith('_'): + cls._indcol[name] = cls + def donew(cls, *args, **kwargs): if IndicatorBase.next == cls.next:
indicators autoregister with Indicator (for things like autodocumentation)
py
diff --git a/notario/__init__.py b/notario/__init__.py index <HASH>..<HASH> 100644 --- a/notario/__init__.py +++ b/notario/__init__.py @@ -1,4 +1,4 @@ from notario.engine import validate from notario.utils import ensure -__version__ = '0.0.10' +__version__ = '0.0.11'
bump the version to <I>
py
diff --git a/yowsup/common/tools.py b/yowsup/common/tools.py index <HASH>..<HASH> 100644 --- a/yowsup/common/tools.py +++ b/yowsup/common/tools.py @@ -101,7 +101,7 @@ class StorageTools: @staticmethod def writePhoneData(phone, name, val): path = StorageTools.getStorageForPhone(phone) - with open(os.path.join(path, name), 'wb') as attrFile: + with open(path, 'w' if type(val) is str else 'wb') as attrFile: attrFile.write(val) @staticmethod
[fix] open phone file bin mode if val is bytes
py
diff --git a/jmboyourwords/admin.py b/jmboyourwords/admin.py index <HASH>..<HASH> 100644 --- a/jmboyourwords/admin.py +++ b/jmboyourwords/admin.py @@ -45,13 +45,8 @@ class YourStoryCompetitionAdmin(admin.ModelAdmin): class YourStoryEntryAdmin(admin.ModelAdmin): list_filter = ('created', 'your_story_competition') - list_display = ('name', 'user', 'user_msisdn', 'text', 'created',) + list_display = ('name', 'user', 'text', 'created',) raw_id_fields = ('user', ) - def user_msisdn(self, obj): - # return the msisdn of the user - profile = obj.user.get_profile() - return profile.mobile_number - admin.site.register(YourStoryEntry, YourStoryEntryAdmin) admin.site.register(YourStoryCompetition, YourStoryCompetitionAdmin)
Removed msisdn display in change list.
py
diff --git a/patroni/version.py b/patroni/version.py index <HASH>..<HASH> 100644 --- a/patroni/version.py +++ b/patroni/version.py @@ -1 +1 @@ -__version__ = '1.2.4' +__version__ = '1.2.5'
Bump up to <I> (#<I>)
py
diff --git a/python/nano/example/pytorch/quantization/inc/resnet18_cifar.py b/python/nano/example/pytorch/quantization/inc/resnet18_cifar.py index <HASH>..<HASH> 100644 --- a/python/nano/example/pytorch/quantization/inc/resnet18_cifar.py +++ b/python/nano/example/pytorch/quantization/inc/resnet18_cifar.py @@ -31,6 +31,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +# mypy: ignore-errors import os import torch
Fix type check failure in resnet<I>_cifar (#<I>) * ignore type check on resnet<I>_cifar * roll back unrelated changes
py
diff --git a/baron/test_grammator_control_structures.py b/baron/test_grammator_control_structures.py index <HASH>..<HASH> 100644 --- a/baron/test_grammator_control_structures.py +++ b/baron/test_grammator_control_structures.py @@ -192,7 +192,7 @@ def test_if_else_stmt_indent(): ('ENDL', '\n'), ('DEDENT', ''), ('ELSE', 'else'), - ('COLON', ':', " "), + ('COLON', ':', [('SPACE', ' ')]), ('ENDL', '\n', [], [('SPACE', ' ')]), ('INDENT', ''), ('PASS', 'pass'),
[fix] still present old token format
py
diff --git a/ardy/core/build/build.py b/ardy/core/build/build.py index <HASH>..<HASH> 100644 --- a/ardy/core/build/build.py +++ b/ardy/core/build/build.py @@ -106,7 +106,7 @@ class Build(ConfigMixin): def get_src_path(self): return self.src_path - def run(self, src_folder, requirements=False, local_package=None): + def run(self, src_folder, requirements="requirements.txt", local_package=None): """Builds the file bundle. :param str src: The path to your Lambda ready project (folder must contain a valid @@ -189,13 +189,12 @@ class Build(ConfigMixin): pass else: requirements_path = os.path.join(self.get_src_path(), requirements) - logger.debug('Gathering requirement packages {}'.format(requirements_path)) - if os.path.exists(requirements_path): + logger.debug('Gathering packages from requirements: {}'.format(requirements_path)) + if os.path.isfile(requirements_path): data = self.read(requirements_path) packages.extend(data.splitlines()) - - if not packages: - logger.debug('No dependency packages installed!') + else: + logger.debug('No requirements file in {}'.format(requirements_path)) if local_package is not None: if not isinstance(local_package, (list, tuple)):
Build: search for requirements.txt by default
py
diff --git a/lib/numina/diskstorage.py b/lib/numina/diskstorage.py index <HASH>..<HASH> 100644 --- a/lib/numina/diskstorage.py +++ b/lib/numina/diskstorage.py @@ -44,17 +44,24 @@ def _store_rr(obj, where): external = [] - for key in obj: - t = type(obj[key]) + try: + # Iterator for dictionaries + iobj = obj.itervalues() + except AttributeError: + # All the rest + iobj = iter(obj) + + for val in iobj: + t = type(val) if t is dict: - _store_rr(obj[key], where) + _store_rr(val, where) elif t is list: - _store_rr(obj[key], where) + _store_rr(val, where) elif store.is_registered(t): - filename = generate_fname(obj[key]) - external.append((filename, obj[key])) - obj[key] = '<file>: %s' % filename + filename = generate_fname(val) + external.append((filename, val)) + val = '<file>: %s' % filename else: pass
Working with dictionaries and other iterables
py
diff --git a/grove/simon/simon.py b/grove/simon/simon.py index <HASH>..<HASH> 100644 --- a/grove/simon/simon.py +++ b/grove/simon/simon.py @@ -1,9 +1,8 @@ -"""Module for the Bernstein-Vazirani Algorithm.""" +"""Module for the Simon's Algorithm.""" import pyquil.quil as pq from pyquil.gates import * import numpy as np -from operator import xor def oracle_function(unitary_funct, qubits, ancillas, scratch_bit): """
Minor clean up. Note that tests are still needed, as well as functionality for detecting one-to-one.
py
diff --git a/yaka/services/indexing.py b/yaka/services/indexing.py index <HASH>..<HASH> 100644 --- a/yaka/services/indexing.py +++ b/yaka/services/indexing.py @@ -34,6 +34,7 @@ from shutil import rmtree class WhooshIndexService(object): app = None + to_update = {} def __init__(self, app=None): self.indexes = {}
Safeguard in case something goes wrong.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ import os -from distutils.core import setup +#from distutils.core import setup +from setuptools import setup def read(fname):
update from distutils to setuptools
py
diff --git a/xclim/indices/fwi.py b/xclim/indices/fwi.py index <HASH>..<HASH> 100644 --- a/xclim/indices/fwi.py +++ b/xclim/indices/fwi.py @@ -129,7 +129,7 @@ References Codes: .. bibliography:: - :unsrt: + :style: unsrt :labelprefix: CODE :keyprefix: code- @@ -138,14 +138,14 @@ Matlab code of the GFWED obtained through personal communication. Fire season determination methods: .. bibliography:: - :unsrt: + :style: unsrt :labelprefix: FIRE :keyprefix: fire- Drought Code overwintering: .. bibliography:: - :unsrt: + :style: unsrt :labelprefix: DROUGHT :keyprefix: drought-
unsort bibliography in fwi fix
py
diff --git a/auth_backends/__init__.py b/auth_backends/__init__.py index <HASH>..<HASH> 100644 --- a/auth_backends/__init__.py +++ b/auth_backends/__init__.py @@ -3,4 +3,4 @@ These package is designed to be used primarily with Open edX Django projects, but should be compatible with non-edX projects as well. """ -__version__ = '0.5.3' # pragma: no cover +__version__ = '0.6.0' # pragma: no cover
Updated to version <I> ECOM-<I>
py
diff --git a/gsl/g4visitor.py b/gsl/g4visitor.py index <HASH>..<HASH> 100644 --- a/gsl/g4visitor.py +++ b/gsl/g4visitor.py @@ -37,7 +37,10 @@ def process(in_file, out_file=None): from collections import namedtuple from gsl.antlr import ParseTreeVisitor -from .{grammarName}Parser import {grammarName}Parser +if __name__ is not None and "." in __name__: + from .{grammarName}Parser import {grammarName}Parser +else: + from {grammarName}Parser import {grammarName}Parser """) for ruleName, body in rules:
correct generated parser import statement for when the parser is not part of a module
py
diff --git a/remi/gui.py b/remi/gui.py index <HASH>..<HASH> 100644 --- a/remi/gui.py +++ b/remi/gui.py @@ -590,9 +590,12 @@ class ListView(Widget): @classmethod def new_from_list(cls, w, h, items): + """ + the items are appended with an string enumeration key + """ obj = cls(w,h) - for item in items: - obj.append(item) + for key,item in enumerate(items): + obj.append(item,str(key)) return obj def append(self, item, key=''):
Fixed issue #<I>. Now the ListView item are appended with an enumeration key.
py
diff --git a/tests/test_method.py b/tests/test_method.py index <HASH>..<HASH> 100644 --- a/tests/test_method.py +++ b/tests/test_method.py @@ -326,6 +326,14 @@ def test_clear_blast(variables): os.remove(os.path.join(targetpath, 'baitedtargets.nhr')) +def test_clear_kma(variables): + targetpath = os.path.join(variables.referencefilepath, 'ConFindr', 'databases') + os.remove(os.path.join(targetpath, 'rMLST_combined_kma.index.b')) + os.remove(os.path.join(targetpath, 'rMLST_combined_kma.length.b')) + os.remove(os.path.join(targetpath, 'rMLST_combined_kma.name')) + os.remove(os.path.join(targetpath, 'rMLST_combined_kma.seq.b')) + + def test_clear_logs(variables): # Use os.walk to find all log files in the subfolders within the reference file path for root, folders, files in os.walk(variables.referencefilepath):
Added function to clean up KMA targets
py
diff --git a/test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py b/test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py index <HASH>..<HASH> 100644 --- a/test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py +++ b/test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py @@ -32,5 +32,5 @@ class TestBigqueryCopyTableFails(DBTIntegrationTest): @use_profile('bigquery') def test__bigquery_copy_table_fails(self): results = self.run_dbt(expect_pass=False) - self.assertEqual(len(results), 1) + self.assertEqual(len(results), 2) self.assertTrue(results[0].error)
Should be two results for original table and (failing) copy
py
diff --git a/pythonforandroid/toolchain.py b/pythonforandroid/toolchain.py index <HASH>..<HASH> 100644 --- a/pythonforandroid/toolchain.py +++ b/pythonforandroid/toolchain.py @@ -159,7 +159,6 @@ def dist_from_args(ctx, args): ctx, name=args.dist_name, recipes=split_argument_list(args.requirements), - extra_dist_dirs=split_argument_list(args.extra_dist_dirs), require_perfect_match=args.require_perfect_match)
Removed extra_dist_dirs passing to get_distributions
py
diff --git a/ginga/misc/plugins/Thumbs.py b/ginga/misc/plugins/Thumbs.py index <HASH>..<HASH> 100644 --- a/ginga/misc/plugins/Thumbs.py +++ b/ginga/misc/plugins/Thumbs.py @@ -204,9 +204,6 @@ class Thumbs(GingaPlugin.GlobalPlugin): if not self.gui_up: return - if path is None: - # Currently we need a path to make a thumb key - return self.logger.info("removing thumb for %s" % (name)) try:
Thumbs now removes deleted mosaic
py
diff --git a/tests/test_utils.py b/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -497,8 +497,6 @@ def test_b64_encode_img_valid_png(): img = utils.array_to_img(arr) assert utils.b64_encode_img(img, 'png') -# KeyError being generated for WEBP format -@pytest.mark.xfail def test_b64_encode_img_valid_webp(): """Should work as expected """
Enabling test failing due to local environment issue
py
diff --git a/LiSE/LiSE/proxy.py b/LiSE/LiSE/proxy.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/proxy.py +++ b/LiSE/LiSE/proxy.py @@ -1981,7 +1981,7 @@ class EngineProxy(AbstractEngine): for orig, dests in portdata.items(): assert orig not in self._character_portals_cache[char] assert orig not in self._portal_stat_cache[char] - for dest, stats in portdata.items(): + for dest, stats in dests.items(): assert dest not in self._character_portals_cache[char][orig] assert dest not in self._portal_stat_cache[char][orig] self._character_portals_cache[char][orig][dest] = PortalProxy(self.engine, char, orig, dest)
Cache portal data correctly when creating character in remote proc
py