diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/src/saml2/config.py b/src/saml2/config.py index <HASH>..<HASH> 100644 --- a/src/saml2/config.py +++ b/src/saml2/config.py @@ -248,7 +248,11 @@ class Config(object): args["socktype"] = socket.SOCK_STREAM else: raise Exception("Unknown socktype!") - handler = LOG_HANDLER[htyp](**args) + try: + handler = LOG_HANDLER[htyp](**args) + except TypeError: # difference between 2.6 and 2.7 + del args["socktype"] + handler = LOG_HANDLER[htyp](**args) else: handler = LOG_HANDLER[htyp](**_logconf[htyp]) break
Difference in logging SysLogHandler arguments between <I> and <I>
py
diff --git a/src/hieroglyph/builder.py b/src/hieroglyph/builder.py index <HASH>..<HASH> 100644 --- a/src/hieroglyph/builder.py +++ b/src/hieroglyph/builder.py @@ -44,7 +44,7 @@ def building_slides(app): class AbstractSlideBuilder(object): - format = 'slides' + format = 'html' add_permalinks = False default_translator_class = writer.SlideTranslator
Fix #<I>: HieroglyphBuilder.format is not "html" To render custom nodes as HTML, hieroglyph builder should behave as a kind of HTML builder.
py
diff --git a/test/test_c_functions.py b/test/test_c_functions.py index <HASH>..<HASH> 100644 --- a/test/test_c_functions.py +++ b/test/test_c_functions.py @@ -10,6 +10,7 @@ except ImportError: from unittest.mock import patch, Mock from kernel_tuner.c import CFunctions, Argument +from kernel_tuner.core import KernelSource, KernelInstance def test_ready_argument_list1(): @@ -119,9 +120,13 @@ def test_compile(npct, subprocess): kernel_string = "this is a fake C program" kernel_name = "blabla" + kernel_sources = KernelSource(kernel_string, "C") + kernel_instance = KernelInstance(kernel_name, kernel_sources, kernel_string, [], None, None, dict(), []) + cfunc = CFunctions() - f = cfunc.compile(kernel_name, kernel_string) + + f = cfunc.compile(kernel_instance) print(subprocess.mock_calls) print(npct.mock_calls)
Adapt C backend compile test to new interface
py
diff --git a/sdmreader/sdmreader.py b/sdmreader/sdmreader.py index <HASH>..<HASH> 100644 --- a/sdmreader/sdmreader.py +++ b/sdmreader/sdmreader.py @@ -336,6 +336,7 @@ class BDFData (object): logger.warning('Something went wrong. Parsing bdf directly...') self._parse() else: + logger.info('Count not find bdf pkl file %s.' % (self.pklname)) self._parse() return self
added message for no bdf pkl found
py
diff --git a/qiskit/tools/jupyter/job_widgets.py b/qiskit/tools/jupyter/job_widgets.py index <HASH>..<HASH> 100644 --- a/qiskit/tools/jupyter/job_widgets.py +++ b/qiskit/tools/jupyter/job_widgets.py @@ -67,7 +67,7 @@ def make_labels(): labels = widgets.HBox(children=[labels0, labels1, labels2, labels3, labels4], layout=widgets.Layout(width='600px', - margin='0px 0px 0px 35px')) + margin='0px 0px 0px 37px')) return labels @@ -104,7 +104,7 @@ def create_job_widget(watcher, job, backend, status='', queue_pos=None, msg=''): layout=widgets.Layout(overflow_x='scroll')) close_button = widgets.Button(button_style='', icon='close', - layout=widgets.Layout(width='30px', + layout=widgets.Layout(width='32px', margin="0px 5px 0px 0px")) close_button.style.button_color = 'white'
Increase job watcher cancel button width. (#<I>)
py
diff --git a/stanza/models/ner_tagger.py b/stanza/models/ner_tagger.py index <HASH>..<HASH> 100644 --- a/stanza/models/ner_tagger.py +++ b/stanza/models/ner_tagger.py @@ -116,6 +116,7 @@ def train(args): pretrain = None vocab = None trainer = None + if args['finetune'] and os.path.exists(model_file): logger.warning('Finetune is ON. Using model from "{}"'.format(model_file)) _, trainer, vocab = load_model(args, model_file) @@ -262,7 +263,12 @@ def evaluate(args): def load_model(args, model_file): # load model use_cuda = args['cuda'] and not args['cpu'] - trainer = Trainer(model_file=model_file, use_cuda=use_cuda, train_classifier_only=args['train_classifier_only']) + charlm_args = {} + if 'charlm_forward_file' in args: + charlm_args['charlm_forward_file'] = args['charlm_forward_file'] + if 'charlm_backward_file' in args: + charlm_args['charlm_backward_file'] = args['charlm_backward_file'] + trainer = Trainer(args=charlm_args, model_file=model_file, use_cuda=use_cuda, train_classifier_only=args['train_classifier_only']) loaded_args, vocab = trainer.args, trainer.vocab # load config
Pass the charlm filenames to the trainer so that different charlm locations can be used when testing
py
diff --git a/autoflake.py b/autoflake.py index <HASH>..<HASH> 100644 --- a/autoflake.py +++ b/autoflake.py @@ -32,7 +32,7 @@ def standard_package_names(): yield name -SAFE_PACKAGES = set(standard_package_names()) - {'readline'} +SAFE_PACKAGES = set(standard_package_names()) - {'rlcompleter'} def unused_import_line_numbers(source):
Blacklist "rlcompleter"
py
diff --git a/workers/worker_redis.py b/workers/worker_redis.py index <HASH>..<HASH> 100644 --- a/workers/worker_redis.py +++ b/workers/worker_redis.py @@ -81,7 +81,10 @@ class RedisResult(AsynchronousResultWrapper): @property def status(self): - raise NotImplementedError + return self.asyncresult.get_status() def get(self, postprocess=None): - raise NotImplementedError + if postprocess is None: + return self.asyncresult.result + else: + return postprocess(self.asyncresult.result)
workflows: missing RQ worker code * Adds missing RQ worker code for .get() and .status().
py
diff --git a/mpop/satin/viirs_sdr.py b/mpop/satin/viirs_sdr.py index <HASH>..<HASH> 100644 --- a/mpop/satin/viirs_sdr.py +++ b/mpop/satin/viirs_sdr.py @@ -301,6 +301,7 @@ def load_viirs_sdr(satscene, options, *args, **kwargs): if not os.path.exists(directory): directory = globify(options["dir"]) % values + LOG.debug("Looking for files in directory " + str(directory)) directories = glob.glob(directory) if len(directories) > 1: raise IOError("More than one directory for npp scene... " + @@ -314,11 +315,10 @@ def load_viirs_sdr(satscene, options, *args, **kwargs): file_list = glob.glob(os.path.join(directory, filename_tmpl)) filenames = [ os.path.basename(s) for s in file_list ] + LOG.debug("Template = " + str(filename_tmpl)) if len(file_list) > 22: # 22 VIIRS bands (16 M-bands + 5 I-bands + DNB) raise IOError("More than 22 files matching!") elif len(file_list) == 0: - #LOG.warning("No VIIRS SDR file matching!: " + os.path.join(directory, - # filename_tmpl)) raise IOError("No VIIRS SDR file matching!: " + os.path.join(directory, filename_tmpl))
Adding a bit of debug info...
py
diff --git a/cleverhans/dataset.py b/cleverhans/dataset.py index <HASH>..<HASH> 100644 --- a/cleverhans/dataset.py +++ b/cleverhans/dataset.py @@ -16,7 +16,7 @@ class Dataset(object): """ # The number of classes in the dataset. Should be specified by subclasses. - nb_classes = None + NB_CLASSES = None def get_factory(self): """Returns a picklable callable that recreates the dataset. @@ -38,7 +38,7 @@ class Dataset(object): class MNIST(Dataset): """The MNIST dataset""" - nb_classes = 10 + NB_CLASSES = 10 def __init__(self, train_start=0, train_end=60000, test_start=0, test_end=10000, center=False): @@ -62,7 +62,7 @@ class MNIST(Dataset): class CIFAR10(Dataset): """The CIFAR-10 dataset""" - nb_classes = 10 + NB_CLASSES = 10 def __init__(self, train_start=0, train_end=60000, test_start=0, test_end=10000, center=False):
nb_classes -> NB_CLASSES
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ cmdclass = {} # -- versioning --------------------------------------------------------------- -import versioneer +import versioneer # nopep8 __version__ = versioneer.get_version() cmdclass.update(versioneer.get_cmdclass()) @@ -145,6 +145,7 @@ class GWpyClean(clean): os.unlink(portfile) clean.run(self) + cmdclass['clean'] = GWpyClean @@ -216,6 +217,7 @@ class BuildPortfile(Command): else: return out.splitlines()[0].rsplit(' ', 1)[-1] + cmdclass['port'] = BuildPortfile if 'port' in sys.argv: setup_requires.append('jinja2')
setup.py: appeased pep8 [ci skip]
py
diff --git a/tests/load/test_load_report.py b/tests/load/test_load_report.py index <HASH>..<HASH> 100644 --- a/tests/load/test_load_report.py +++ b/tests/load/test_load_report.py @@ -19,7 +19,7 @@ def test_load_delivery_report_bad_case_id(panel_database): report_path=report_path) -def test_load_delivery_report_without_date_success(case_database): +def test_load_delivery_report_success(case_database): adapter = case_database # GIVEN a case exist, without a delivery report for the given analysis date
renamed test method to match test
py
diff --git a/host/tune_noise_occupancy.py b/host/tune_noise_occupancy.py index <HASH>..<HASH> 100644 --- a/host/tune_noise_occupancy.py +++ b/host/tune_noise_occupancy.py @@ -25,7 +25,7 @@ class NoiseOccupancyScan(ScanBase): occupancy_limit : float Occupancy limit which is multiplied with measured number of hits for each pixel. Any pixel above 1 will be masked. triggers : int - Total number of triggers sent to FE. + Total number of triggers sent to FE. From 1 to 4294967295 (32-bit unsigned int). consecutive_lvl1 : int Number of consecutive LVL1 triggers. From 1 to 16. disable_for_mask : list, tuple @@ -46,6 +46,7 @@ class NoiseOccupancyScan(ScanBase): Note ---- The total number of trigger is triggers * consecutive_lvl1. + Please note that a high trigger rate leads to an effective lower threshold. ''' # create restore point self.register.create_restore_point()
ENH: add a few notes
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -330,7 +330,7 @@ cmdclass = { 'test' : test, 'clean' : clean, } -extras_require = {'cuda': ['pycuda>=2015.1', 'scikits.cuda']} +extras_require = {'cuda': ['pycuda>=2015.1', 'scikit-cuda']} # do the actual work of building the package VERSION = get_version_info()
Update dependancies for skcuda
py
diff --git a/scripts/parsedata.py b/scripts/parsedata.py index <HASH>..<HASH> 100755 --- a/scripts/parsedata.py +++ b/scripts/parsedata.py @@ -16,8 +16,8 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import argparse -from time import strftime -from datetime import datetime +from time import strftime, mktime +from datetime import datetime, timedelta from collections import OrderedDict from jinja2 import Environment, FileSystemLoader from os.path import dirname, exists @@ -41,7 +41,10 @@ def get_scores(session): def get_urls(session): - rows = session.query(Urls).order_by(Urls.time.desc()).all() + # FIXME: support stuff older than one week + limit = datetime.now() - timedelta(weeks=1) + limit = mktime(limit.timetuple()) + rows = session.query(Urls).filter(Urls.time > limit).order_by(Urls.time.desc()).all() urls = [] for row in rows: urls.append({'time': datetime.fromtimestamp(row.time), 'title': row.title, 'url': row.url})
make url log sane
py
diff --git a/cwltool/argparser.py b/cwltool/argparser.py index <HASH>..<HASH> 100644 --- a/cwltool/argparser.py +++ b/cwltool/argparser.py @@ -215,7 +215,8 @@ def arg_parser(): # type: () -> argparse.ArgumentParser help="Will be passed to `docker run` as the '--net' " "parameter. Implies '--enable-net'.") parser.add_argument("--disable-validate", dest="do_validate", - action="store_false", default=True, help="Skip CWL document validation.") + action="store_false", default=True, + help=argparse.SUPPRESS) exgroup = parser.add_mutually_exclusive_group() exgroup.add_argument("--enable-ga4gh-tool-registry", action="store_true", help="Enable resolution using GA4GH tool registry API",
Suppress command line help for --disable-validate to discourage people from using it.
py
diff --git a/salt/search/whoosh_search.py b/salt/search/whoosh_search.py index <HASH>..<HASH> 100644 --- a/salt/search/whoosh_search.py +++ b/salt/search/whoosh_search.py @@ -20,12 +20,15 @@ try: except ImportError: pass +# Define the module's virtual name +__virtualname__ = 'whoosh' + def __virtual__(): ''' Only load if the whoosh libs are available ''' - return 'whoosh' if HAS_WHOOSH else False + return __virtualname__ if HAS_WHOOSH else False def index():
`salt.search.whoosh_search` is now using `__virtualname__`
py
diff --git a/discord/http.py b/discord/http.py index <HASH>..<HASH> 100644 --- a/discord/http.py +++ b/discord/http.py @@ -33,7 +33,6 @@ from typing import ( ClassVar, Coroutine, Dict, - Final, Iterable, List, Optional, @@ -159,9 +158,6 @@ aiohttp.hdrs.WEBSOCKET = 'websocket' #type: ignore class HTTPClient: """Represents an HTTP client sending HTTP requests to the Discord API.""" - SUCCESS_LOG: Final[ClassVar[str]] = '{method} {url} has received {text}' - REQUEST_LOG: Final[ClassVar[str]] = '{method} {url} with {json} has returned {status}' - def __init__( self, connector: Optional[aiohttp.BaseConnector] = None,
Remove unused log lines in HTTPClient
py
diff --git a/lib/python/dxpy/bindings/dxfile_functions.py b/lib/python/dxpy/bindings/dxfile_functions.py index <HASH>..<HASH> 100644 --- a/lib/python/dxpy/bindings/dxfile_functions.py +++ b/lib/python/dxpy/bindings/dxfile_functions.py @@ -89,9 +89,12 @@ def download_dxfile(dxid, filename, chunksize=1024*1024): ''' with DXFile(dxid) as dxfile: - file_content = dxfile.read(chunksize) with open(filename, 'w') as fd: - fd.write(file_content) + while True: + file_content = dxfile.read(chunksize) + if len(file_content) == 0: + break + fd.write(file_content) def upload_local_file(filename, media_type=None, wait_on_close=False, **kwargs): '''
Fix bug in Python bindings: download_dxfile only downloaded one chunk.
py
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -45,7 +45,6 @@ extensions = [ 'sphinx.ext.autosummary', 'm2r', 'nbsphinx', - 'jupyter_sphinx.embed_widgets', ] exclude_patterns = ['_build', '**.ipynb_checkpoints']
Removing explicit call for jupyter sphinx embed widgets
py
diff --git a/mutant/hacks.py b/mutant/hacks.py index <HASH>..<HASH> 100644 --- a/mutant/hacks.py +++ b/mutant/hacks.py @@ -9,9 +9,10 @@ def patch_db_field_compare(): try: assert Field() != None except AttributeError: - def _Field__cmp__(self, other): + del Field.__cmp__ + def _Field__lt__(self, other): if isinstance(other, Field): - return cmp(self.creation_counter, other.creation_counter) + return self.creation_counter < other.creation_counter return NotImplemented - Field.__cmp__ = _Field__cmp__ + Field.__lt__ = _Field__lt__ assert Field() != None
Updated the field comparison hack to reflect the update patch of django #<I>
py
diff --git a/paramz/__init__.py b/paramz/__init__.py index <HASH>..<HASH> 100644 --- a/paramz/__init__.py +++ b/paramz/__init__.py @@ -56,16 +56,23 @@ def load(file_or_path): :param file_name: path/to/file.pickle """ from pickle import UnpicklingError - try: + _python3 = True + try: import cPickle as pickle - strcl = basestring - p3kw = {} - return _unpickle(file_or_path, pickle, strcl, p3kw) + _python3 = False except ImportError: #python3 import pickle - strcl = str - p3kw = dict(encoding='latin1') - return _unpickle(file_or_path, pickle, strcl, p3kw) + + try: + if _python3: + strcl = str + p3kw = dict(encoding='latin1') + return _unpickle(file_or_path, pickle, strcl, p3kw) + else: + strcl = basestring + p3kw = {} + return _unpickle(file_or_path, pickle, strcl, p3kw) + except UnpicklingError: # pragma: no coverage import pickle return _unpickle(file_or_path, pickle, strcl, p3kw) \ No newline at end of file
fix: load swallowed the import errors from missing packages...
py
diff --git a/cpt/test/unit/config_test.py b/cpt/test/unit/config_test.py index <HASH>..<HASH> 100644 --- a/cpt/test/unit/config_test.py +++ b/cpt/test/unit/config_test.py @@ -15,11 +15,11 @@ class RemotesTest(unittest.TestCase): def test_valid_config(self): manager = ConfigManager(self.conan_api, Printer()) - manager.install('https://github.com/bincrafters/conan-config.git') + manager.install('https://github.com/bincrafters/bincrafters-config.git') def test_valid_config_with_args(self): manager = ConfigManager(self.conan_api, Printer()) - manager.install('https://github.com/bincrafters/conan-config.git', '-b master') + manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master') class RemotesTestRealApi(BaseTest): @@ -30,7 +30,7 @@ class RemotesTestRealApi(BaseTest): profiles = self.api.profile_list() self.assertEquals(len(profiles), 0) - manager.install("https://github.com/bincrafters/conan-config.git", "-b master") + manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master") profiles = self.api.profile_list() self.assertGreater(len(profiles), 3)
Update Bincrafters config url for tests
py
diff --git a/cms_lab_publications/models.py b/cms_lab_publications/models.py index <HASH>..<HASH> 100644 --- a/cms_lab_publications/models.py +++ b/cms_lab_publications/models.py @@ -149,13 +149,13 @@ class Publication(models.Model): super(Publication, self).save(*args, **kwargs) def __str__(self): - if len(self.title) >= 45: - title = "{}...".format(self.title[:40]) - else: - title = self.title + # if len(self.title) >= 45: + # title = "{}...".format(self.title[:40]) + # else: + # title = self.title return "{} - {} - {} - {} [{}]".format(self.year, - self.first_author, self.journal, title, str(self.pmid),) + self.first_author, self.journal, self.title, str(self.pmid),) class Meta: ordering = ('-year', '-month', '-day', 'first_author')
Return full publication title with Publication.__str__
py
diff --git a/angr/simos.py b/angr/simos.py index <HASH>..<HASH> 100644 --- a/angr/simos.py +++ b/angr/simos.py @@ -364,6 +364,9 @@ class SimCGC(SimOS): return s def state_entry(self, **kwargs): + if isinstance(self.proj.loader.main_bin, BackedCGC): + kwargs['permissions_backer'] = (True, self.proj.loader.main_bin.permissions_map) + state = super(SimCGC, self).state_entry(**kwargs) if isinstance(self.proj.loader.main_bin, BackedCGC):
When loading from BackedCGC initialize the blank state with the permissions_map inside
py
diff --git a/bcbio/distributed/ipythontasks.py b/bcbio/distributed/ipythontasks.py index <HASH>..<HASH> 100644 --- a/bcbio/distributed/ipythontasks.py +++ b/bcbio/distributed/ipythontasks.py @@ -345,7 +345,7 @@ def machine_info(*args): @require(chipseq) def clean_chipseq_alignment(*args): args = ipython.unzip_args(args) - return ipython.zip_args(chipseq.machine_info()) + return ipython.zip_args(apply(chipseq.clean_chipseq_alignment, *args)) @require(archive) def archive_to_cram(*args):
Fix chipseq unique alignment cleaning. This was not getting run properly when running with IPython.
py
diff --git a/shap/plots/dependence.py b/shap/plots/dependence.py index <HASH>..<HASH> 100644 --- a/shap/plots/dependence.py +++ b/shap/plots/dependence.py @@ -151,9 +151,10 @@ def dependence_plot(ind, shap_values, features, feature_names=None, display_feat xvals = xvals.astype(np.float) xvals = xvals[~np.isnan(xvals)] xvals = np.unique(xvals) - smallest_diff = np.min(np.diff(np.sort(xvals))) - jitter_amount = x_jitter * smallest_diff - xv += (np.random.ranf(size = len(xv))*jitter_amount) - (jitter_amount/2) + if len(xvals) >= 2: + smallest_diff = np.min(np.diff(np.sort(xvals))) + jitter_amount = x_jitter * smallest_diff + xv += (np.random.ranf(size = len(xv))*jitter_amount) - (jitter_amount/2) # the actual scatter plot, TODO: adapt the dot_size to the number of data points? if interaction_index is not None:
Only apply jitter if we have at least 2 unique x-values after removing NaNs
py
diff --git a/SkyPy/util.py b/SkyPy/util.py index <HASH>..<HASH> 100644 --- a/SkyPy/util.py +++ b/SkyPy/util.py @@ -48,10 +48,13 @@ def initAttrs(cls): setattr(cls, "__init__", __init__) return cls -def convertIds(*types, user=(), users=(), chat=()): +def convertIds(*types, **kwargs): """ Class decorator: add helper methods to convert identifier properties into SkypeObjs. """ + user = kwargs.get('user',()) + users = kwargs.get('users', ()) + chat = kwargs.get('chat', ()) def userObj(self, field): """ Retrieve the user referred to in the object.
Keeping the compatibility with python<I>
py
diff --git a/beautifultable/rows.py b/beautifultable/rows.py index <HASH>..<HASH> 100644 --- a/beautifultable/rows.py +++ b/beautifultable/rows.py @@ -82,9 +82,11 @@ class RowData(BaseRow): if len(row_item) <= width: return row_item else: - assert width-len(delimiter) >= 0 - clamped_string = (row_item[:width-len(delimiter)] - + delimiter) + if width-len(delimiter) >= 0: + clamped_string = (row_item[:width-len(delimiter)] + + delimiter) + else: + clamped_string = delimiter[:width] assert len(clamped_string) == width return clamped_string
Fixes #<I> : fixed exception on WEP_ELLIPSIS and token length less than 3
py
diff --git a/tests/test_crossbuild.py b/tests/test_crossbuild.py index <HASH>..<HASH> 100644 --- a/tests/test_crossbuild.py +++ b/tests/test_crossbuild.py @@ -11,6 +11,7 @@ from crossbuild import ( go_tarball, main, run_command, + version_from_tarball, working_directory, ) from utils import temp_dir @@ -54,6 +55,10 @@ class CrossBuildTestCase(TestCase): self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args) self.assertEqual({'dry_run': False, 'verbose': False}, kwargs) + def test_version_from_tarball(self): + self.assertEqual('1.2.3', version_from_tarball('foo_1.2.3.tar.gz')) + self.assertEqual('1.2.3', version_from_tarball('bzr/foo_1.2.3.tar.gz')) + def test_go_build(self): with patch('crossbuild.run_command') as mock: go_build(
Added a test for version_from_tarball.
py
diff --git a/tests/test_client.py b/tests/test_client.py index <HASH>..<HASH> 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -284,8 +284,9 @@ class SSHClientTest (unittest.TestCase): def test_default_key_locations_trigger_cert_loads_if_found(self): # TODO: what it says on the tin: ~/.ssh/id_rsa tries to load - # ~/.ssh/id_rsa-cert.pub - assert False + # ~/.ssh/id_rsa-cert.pub. Right now no other tests actually test that + # code path (!) so we're punting too, sob. + pass def test_4_auto_add_policy(self): """
God damn it, really? Whatever.
py
diff --git a/src/sos/__main__.py b/src/sos/__main__.py index <HASH>..<HASH> 100755 --- a/src/sos/__main__.py +++ b/src/sos/__main__.py @@ -333,6 +333,10 @@ def cmd_run(args, workflow_args): except ImportError as e: raise RuntimeError(f'Python packages graphviz, pillow, and imageio are required for the generation of DAG animation in workflow report (options -p with -d): {e}') + import shutil + if not shutil.which('dot'): + raise RuntimeError(f'Command dot from package graphviz is required for the generation of DAG animation in workflow report (options -p with -d): {e}') + from .workflow_executor import Base_Executor if args.__bin_dirs__:
Checking the availability of command dot for the use of -p and -d #<I>
py
diff --git a/safe_qgis/impact_statistics/test/test_postprocessor_manager.py b/safe_qgis/impact_statistics/test/test_postprocessor_manager.py index <HASH>..<HASH> 100644 --- a/safe_qgis/impact_statistics/test/test_postprocessor_manager.py +++ b/safe_qgis/impact_statistics/test/test_postprocessor_manager.py @@ -77,7 +77,8 @@ class PostprocessorManagerTest(unittest.TestCase): def test_checkPostProcessingLayersVisibility(self): """Generated layers are not added to the map registry.""" myRunButton = DOCK.pbnRunStop - + # Explicitly disable showing intermediate layers + DOCK.showIntermediateLayers = False # with KAB_NAME aggregation attribute defined in .keyword using # kabupaten_jakarta_singlepart.shp myResult, myMessage = setup_scenario(
Try to make post processor tests pass (still gives one error due to layer count mismatch
py
diff --git a/.buildkite/pipeline.py b/.buildkite/pipeline.py index <HASH>..<HASH> 100644 --- a/.buildkite/pipeline.py +++ b/.buildkite/pipeline.py @@ -201,7 +201,6 @@ def dask_tests(): .on_integration_image( version, ['AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION'] ) - .with_timeout(5) .on_medium_instance() .build() )
Remove timeout for Dask builds Summary: Removes the aggressive timeout for Dask - builds that are running fine are getting killed Test Plan: n/a Reviewers: alangenfeld Reviewed By: alangenfeld Differential Revision: <URL>
py
diff --git a/globus_cli/commands/bookmark/list.py b/globus_cli/commands/bookmark/list.py index <HASH>..<HASH> 100644 --- a/globus_cli/commands/bookmark/list.py +++ b/globus_cli/commands/bookmark/list.py @@ -1,5 +1,6 @@ import click +from globus_sdk import TransferAPIError from globus_cli.parsing import common_options from globus_cli.safeio import formatted_print @@ -19,8 +20,14 @@ def bookmark_list(): def get_ep_name(item): ep_id = item['endpoint_id'] - ep_doc = client.get_endpoint(ep_id) - return display_name_or_cname(ep_doc) + try: + ep_doc = client.get_endpoint(ep_id) + return display_name_or_cname(ep_doc) + except TransferAPIError as err: + if err.code == "EndpointDeleted": + return "[DELETED ENDPOINT]" + else: + raise err formatted_print( bookmark_iterator,
catch TransferAPIError raised in BookmarkList when a bookmark exists for a deleted endpoint
py
diff --git a/ipyvolume/astro.py b/ipyvolume/astro.py index <HASH>..<HASH> 100644 --- a/ipyvolume/astro.py +++ b/ipyvolume/astro.py @@ -82,8 +82,9 @@ def stars(N=1000, radius=100000, thickness=3, seed=42, color=[255, 240, 240]): x /= r y /= r z /= r - return ipv.scatter(x, y, z, texture=radial_sprite((64, 64), color), marker='square_2d', grow_limits=False, size=radius*0.7/100) - + s = ipv.scatter(x, y, z, texture=radial_sprite((64, 64), color), marker='square_2d', grow_limits=False, size=radius*0.7/100) + s.material.transparent = True + return s milkyway_url = 'https://www.nasa.gov/sites/default/files/images/620057main_milkyway_full.jpg' milkyway_image = UrlCached(milkyway_url)
astro:fix: setting star material to transparent (doesn't seem to matter when no alpha channel in canvas)
py
diff --git a/drf_auto_endpoint/endpoints.py b/drf_auto_endpoint/endpoints.py index <HASH>..<HASH> 100644 --- a/drf_auto_endpoint/endpoints.py +++ b/drf_auto_endpoint/endpoints.py @@ -193,6 +193,9 @@ class Endpoint(with_metaclass(EndpointMetaClass, object)): def application_name(self): return self.model._meta.app_label.lower() + def get_exclude_fields(self): + return self.exclude_fields + def get_fields_for_serializer(self): if self.fields is None: @@ -201,7 +204,7 @@ class Endpoint(with_metaclass(EndpointMetaClass, object)): self.fields = tuple([f for f in get_all_field_names(self.model) if f not in self.default_language_field_names and - f not in self.exclude_fields]) + f not in self.get_exclude_fields()]) if self.extra_fields is not None: self.fields += tuple(self.extra_fields) if self.include_str:
:heavy_plus_sign: add exclude_fields option to endpoints
py
diff --git a/app/actions/prottable/qvality.py b/app/actions/prottable/qvality.py index <HASH>..<HASH> 100644 --- a/app/actions/prottable/qvality.py +++ b/app/actions/prottable/qvality.py @@ -1,9 +1,12 @@ from app.dataformats import prottable as prottabledata +from app.dataformats import peptable as peptabledata -def prepare_qvality_input(features, feattype, get_static_percolator_output=False): +def prepare_qvality_input(features, feattype, + get_static_percolator_output=False): feat_fields = {'probability': prottabledata.HEADER_PROBABILITY, 'qvalue': prottabledata.HEADER_QSCORE, - } + 'svm': peptabledata.HEADER_SVM_SCORE, + } for feature in features: yield feature[feat_fields[feattype]]
Added svm as a measure to run qvality on for when it is run on peptide table
py
diff --git a/tests/mocktest.py b/tests/mocktest.py index <HASH>..<HASH> 100644 --- a/tests/mocktest.py +++ b/tests/mocktest.py @@ -19,7 +19,7 @@ if 'mocktest' in sys.modules: from testcase import TestCase from testutils import RunTests -from mock import Mock, sentinel, MakeMock +from mock import Mock, sentinel, MakeMock, DEFAULT @@ -61,7 +61,7 @@ class MockTest(TestCase): mock.assert_called_with(1, 2, fish=3) results = [1, 2, 3] - def effect(*args, **kwargs): + def effect(): return results.pop() mock.side_effect = effect @@ -72,6 +72,11 @@ class MockTest(TestCase): self.assertEquals(mock.side_effect, sentinel.SideEffect, "side effect in constructor not used") + def side_effect(): + return DEFAULT + mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN) + self.assertEqual(mock(), sentinel.RETURN) +
Changing the way side_effect is called
py
diff --git a/abl/vpath/base/fs.py b/abl/vpath/base/fs.py index <HASH>..<HASH> 100644 --- a/abl/vpath/base/fs.py +++ b/abl/vpath/base/fs.py @@ -335,10 +335,12 @@ class BaseUri(object): @property def path(self): path = self.parse_result.path - if path.startswith('/.'): - return path[1:] - else: - return path + parsed_path = path[1:] if path.startswith("/.") else path + + if os.name == "nt" and self.scheme == "file": + parsed_path = os.path.normpath(re.sub(r"^/([a-zA-Z])/", r"\1:/", parsed_path)) + + return parsed_path @property
Properly format file paths for Windows We were generating paths like `/c/path/to/some/file` on Windows, where paths should be formated like `c:/path/to/some/file`, resulting in errors in the web connector tests. This commit ensures paths are formatted correctly on Windows filesystems.
py
diff --git a/docs/level-7-use-custom-jinja2-filter-test-n-global/custom-jj2-plugin/filter.py b/docs/level-7-use-custom-jinja2-filter-test-n-global/custom-jj2-plugin/filter.py index <HASH>..<HASH> 100644 --- a/docs/level-7-use-custom-jinja2-filter-test-n-global/custom-jj2-plugin/filter.py +++ b/docs/level-7-use-custom-jinja2-filter-test-n-global/custom-jj2-plugin/filter.py @@ -1,7 +1,13 @@ +import sys import base64 from moban.extensions import JinjaFilter @JinjaFilter('base64encode') def base64_encode(string): - return base64.b64encode(string) + if sys.version_info[0] > 2: + content = base64.b64encode(string.encode('utf-8')) + content = content.decode('utf-8') + else: + content = base64.b64encode(string) + return content
:bug: fix filter for python 3
py
diff --git a/cosmic_ray/mutating.py b/cosmic_ray/mutating.py index <HASH>..<HASH> 100644 --- a/cosmic_ray/mutating.py +++ b/cosmic_ray/mutating.py @@ -46,6 +46,7 @@ def run_with_mutants(module_file, module_name, operator, func, q): log.info('{} successfully parsed'.format(module_name)) for record, mutant in operator.bombard(pristine_ast): + record['filename'] = module_file try: new_mod = types.ModuleType(module_name) code = compile(mutant, module_file, 'exec')
Attaching filenames to activation records.
py
diff --git a/tests/test_routing.py b/tests/test_routing.py index <HASH>..<HASH> 100644 --- a/tests/test_routing.py +++ b/tests/test_routing.py @@ -554,9 +554,9 @@ def test_alias_redirects(): ]) a = m.bind('example.com') - def ensure_redirect(path, new_url): + def ensure_redirect(path, new_url, args=None): try: - a.match(path) + a.match(path, query_args=args) except RequestRedirect, e: assert e.new_url == new_url else: @@ -566,6 +566,7 @@ def test_alias_redirects(): ensure_redirect('/users/index.html', '/users/') ensure_redirect('/users/page-2.html', '/users/page/2') ensure_redirect('/users/page-1.html', '/users/') + ensure_redirect('/users/page-1.html', '/users/?foo=bar', {'foo': 'bar'}) assert a.build('index') == '/' assert a.build('users', {'page': 1}) == '/users/'
Added another test case with query args
py
diff --git a/blockstore/lib/config.py b/blockstore/lib/config.py index <HASH>..<HASH> 100644 --- a/blockstore/lib/config.py +++ b/blockstore/lib/config.py @@ -46,7 +46,7 @@ BLOCKSTORED_CONFIG_FILE = 'blockstore.ini' try: BLOCKSTORED_SERVER = os.environ['BLOCKSTORED_SERVER'] - BLOCKSTORED_PORT = os.environ['BLOCKSTORED_PORT'] + BLOCKSTORED_PORT = int(os.environ['BLOCKSTORED_PORT']) except KeyError: BLOCKSTORED_SERVER = 'localhost' BLOCKSTORED_PORT = DEFAULT_BLOCKSTORED_PORT
fixed bug where port read from ENV should be int
py
diff --git a/hwt/serializer/verilog/ops.py b/hwt/serializer/verilog/ops.py index <HASH>..<HASH> 100644 --- a/hwt/serializer/verilog/ops.py +++ b/hwt/serializer/verilog/ops.py @@ -43,6 +43,9 @@ class ToHdlAstVerilog_ops(): _, tmpVar = self.tmpVars.create_var_cached("tmp_concat_", operand._dtype, def_val=operand) # HdlAssignmentContainer(tmpVar, operand, virtual_only=True) operand = tmpVar + elif operator.operator == AllOps.INDEX and i == 0 and self._operandIsAnotherOperand(operand): + _, tmpVar = self.tmpVars.create_var_cached("tmp_index_", operand._dtype, def_val=operand) + operand = tmpVar oper = operator.operator width = None
verilog: rewrite indexing on expression as tmp var
py
diff --git a/graphene_django_extras/fields.py b/graphene_django_extras/fields.py index <HASH>..<HASH> 100644 --- a/graphene_django_extras/fields.py +++ b/graphene_django_extras/fields.py @@ -221,7 +221,7 @@ class DjangoFilterPaginateListField(Field): kwargs["description"] = "{} list".format(_type._meta.model.__name__) super(DjangoFilterPaginateListField, self).__init__( - List(_type), *args, **kwargs + List(NonNull(_type)), *args, **kwargs ) @property
NONNULL: Treat DjangoFilterPaginateListField the same as DjangoListField Currently DjangoFilterPaginateListField treats list items as nullable whereas DjangoListField doesn't. This ensures consistent handling of the two.
py
diff --git a/netmiko/paloalto/paloalto_panos.py b/netmiko/paloalto/paloalto_panos.py index <HASH>..<HASH> 100644 --- a/netmiko/paloalto/paloalto_panos.py +++ b/netmiko/paloalto/paloalto_panos.py @@ -51,6 +51,7 @@ class PaloAltoPanosBase(BaseConnection): def commit( self, + comment=None, force=False, partial=False, device_and_network=False, @@ -87,6 +88,8 @@ class PaloAltoPanosBase(BaseConnection): # Select proper command string based on arguments provided command_string = "commit" commit_marker = "configuration committed successfully" + if comment: + command_string += f' description "{comment}"' if force: command_string += " force" if partial:
Add commit comment support to PaloAltoPanosBase class (#<I>)
py
diff --git a/perceval/backends/puppet/puppetforge.py b/perceval/backends/puppet/puppetforge.py index <HASH>..<HASH> 100644 --- a/perceval/backends/puppet/puppetforge.py +++ b/perceval/backends/puppet/puppetforge.py @@ -23,8 +23,8 @@ import json import logging -from grimoirelab.toolkit.datetime import datetime_to_utc, str_to_datetime -from grimoirelab.toolkit.uris import urijoin +from grimoirelab_toolkit.datetime import datetime_to_utc, str_to_datetime +from grimoirelab_toolkit.uris import urijoin from ...backend import (Backend,
[puppetforge] Change toolkit import This code replaces the import of grimoirelab toolkit to make the code working with the new package structure of the toolkit.
py
diff --git a/holoviews/ipython/display_hooks.py b/holoviews/ipython/display_hooks.py index <HASH>..<HASH> 100644 --- a/holoviews/ipython/display_hooks.py +++ b/holoviews/ipython/display_hooks.py @@ -236,7 +236,7 @@ def layout_display(layout, size, max_frames, max_branches, widget_mode): "recommended format for accessing your data; calling " ".collate() on these objects will resolve any violations " "of the recommended nesting presented in the Composing Data " - "tutorial (http://git.io/vtIQh)") + "tutorial (http://git.io/vqs03)") expanded = [] for el in layout.values(): if isinstance(el, HoloMap) and not displayable(el):
Updated link to nesting hierarchy diagram in warning message
py
diff --git a/src/livestreamer/plugins/beattv.py b/src/livestreamer/plugins/beattv.py index <HASH>..<HASH> 100644 --- a/src/livestreamer/plugins/beattv.py +++ b/src/livestreamer/plugins/beattv.py @@ -50,12 +50,12 @@ _schema = validate.Schema( "status": int, "media": [{ "duration": validate.any(float, int), - "offset": int, + "offset": validate.any(float, int), "id": int, "parts": [{ "duration": validate.any(float, int), "id": int, - "offset": int, + "offset": validate.any(float, int), validate.optional("recording"): int, validate.optional("start"): validate.any(float, int) }]
plugins.beattv: Offsets can be floats or integers rather than just integers
py
diff --git a/tests/test_posthocs.py b/tests/test_posthocs.py index <HASH>..<HASH> 100644 --- a/tests/test_posthocs.py +++ b/tests/test_posthocs.py @@ -471,7 +471,7 @@ class TestPosthocs(unittest.TestCase): [2.857818e-06, 1.230888e-05, 1]]) results = sp.posthoc_wilcoxon(self.df.sort_index(), val_col = 'pulse', group_col = 'kind') - self.assertTrue(np.allclose(results, r_results)) + self.assertTrue(np.allclose(results, r_results, atol=1e-4)) def test_posthoc_scheffe(self):
increased abs tolerance for wilcoxon test
py
diff --git a/fusesoc/edalizer.py b/fusesoc/edalizer.py index <HASH>..<HASH> 100644 --- a/fusesoc/edalizer.py +++ b/fusesoc/edalizer.py @@ -63,7 +63,7 @@ class Edalizer(object): snippet['tool_options'] = {flags['tool'] : core.get_tool_options(_flags)} #Extract scripts - snippet['scripts'] = core.get_scripts(rel_root, _flags) + snippet['hooks'] = core.get_scripts(rel_root, _flags) _files = [] for file in core.get_files(_flags):
Fix broken hooks support in EDAM files
py
diff --git a/djcelery/loaders.py b/djcelery/loaders.py index <HASH>..<HASH> 100644 --- a/djcelery/loaders.py +++ b/djcelery/loaders.py @@ -20,12 +20,15 @@ class DjangoLoader(BaseLoader): self.configured = True return settings + def on_task_init(self, task_id, task): + self.close_database() + def close_database(self): from django.db import connection db_reuse_max = getattr(self.conf, "CELERY_DB_REUSE_MAX", None) if not db_reuse_max: return connection.close() - if self._db_reuse >= db_reuse_max: + if self._db_reuse >= db_reuse_max * 2: self._db_reuse = 0 return connection.close() self._db_reuse += 1
Close database before and after. Closes #<I> (I hope)
py
diff --git a/PyFunceble.py b/PyFunceble.py index <HASH>..<HASH> 100755 --- a/PyFunceble.py +++ b/PyFunceble.py @@ -2107,6 +2107,7 @@ class Referer(object): "tj", "tp", "tt", + "va", "vi", "vn", "ye", @@ -3988,7 +3989,7 @@ if __name__ == "__main__": help=" Get the latest version of PyFunceble.", ) PARSER.add_argument( - "-v", "--version", action="version", version="%(prog)s 0.60.3-beta" + "-v", "--version", action="version", version="%(prog)s 0.60.4-beta" ) ARGS = PARSER.parse_args()
Introduction of `va` into the list of ignored extensions cf: No whois server.
py
diff --git a/workflows/transport/stomp_transport.py b/workflows/transport/stomp_transport.py index <HASH>..<HASH> 100644 --- a/workflows/transport/stomp_transport.py +++ b/workflows/transport/stomp_transport.py @@ -112,7 +112,7 @@ class StompTransport(CommonTransport): self._conn.set_listener('', self._stomp_listener) try: self._conn.start() - except stomp.exception.ConnectFailedException, e: + except stomp.exception.ConnectFailedException: raise DisconnectedError('Could not initiate connection to stomp host') username = self.config.get('--stomp-user', self.defaults.get('--stomp-user')) @@ -123,7 +123,7 @@ class StompTransport(CommonTransport): self._conn.connect(username, password, wait=True) else: # anonymous access self._conn.connect(wait=True) - except stomp.exception.ConnectFailedException, e: + except stomp.exception.ConnectFailedException: raise AuthenticationError('Could not connect to stomp host: Authentication error') self._namespace = \ self.config.get('--stomp-prfx', self.defaults.get('--stomp-prfx'))
don't need exception object. Python3 syntax error
py
diff --git a/data/db/db_migrations/env.py b/data/db/db_migrations/env.py index <HASH>..<HASH> 100644 --- a/data/db/db_migrations/env.py +++ b/data/db/db_migrations/env.py @@ -11,16 +11,13 @@ MRIDB_FOLDER = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__f from alembic import context from alembic.config import Config -alembic_cfg = Config(MRIDB_FOLDER+"/alembic.ini") +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = Config(MRIDB_FOLDER+"/alembic.ini") sys.path.append(MRIDB_FOLDER) from model import Base - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name)
Simplify setup of config var
py
diff --git a/pysat/_constellation.py b/pysat/_constellation.py index <HASH>..<HASH> 100644 --- a/pysat/_constellation.py +++ b/pysat/_constellation.py @@ -15,4 +15,6 @@ class Constellation(object): # FIXME pass - + + def __getitem__(instruments, index): + return self.instruments[index]
Added __getitem__ method
py
diff --git a/datajoint/erd.py b/datajoint/erd.py index <HASH>..<HASH> 100644 --- a/datajoint/erd.py +++ b/datajoint/erd.py @@ -149,15 +149,16 @@ class ERD(nx.DiGraph): Part: dict(size=7)} ax = plt.gca() for node in graph.nodes(data=True): - ax.text(pos[node[0]][0], pos[node[0]][1], node[0], **label_props[node[1]['node_type']], - horizontalalignment=('right' if pos[node[0]][0]<0.5 else 'left')) + ax.text(pos[node[0]][0], pos[node[0]][1], node[0], + horizontalalignment=('right' if pos[node[0]][0] < 0.5 else 'left'), + **label_props[node[1]['node_type']]) ax = plt.gca() ax.axis('off') ax.set_xlim([-0.4, 1.4]) # allow a margin for labels plt.show() @staticmethod - def _layout(graph, quality=1): + def _layout(graph, quality=2): """ :param graph: a networkx.DiGraph object :param quality: 0=dirty, 1=draft, 2=good, 3=great, 4=publish
fixed another bug specific to Python<I> and earlier versions
py
diff --git a/AlphaTwirl/Binning.py b/AlphaTwirl/Binning.py index <HASH>..<HASH> 100755 --- a/AlphaTwirl/Binning.py +++ b/AlphaTwirl/Binning.py @@ -65,6 +65,9 @@ class Round(object): return [self.__call__(v) for v in val] except TypeError: pass + return float(self._callImpDecimal(val)) + + def _callImpDecimal(self, val): val = decimal.Decimal(str(val)) ret = (val + self.shift)/self.width @@ -74,7 +77,7 @@ class Round(object): ret = ret*self.width - self.shift if self.lowedge: ret = ret - self.halfWidth - return float(ret) + return ret ##____________________________________________________________________________|| class Echo(object):
split Round.__call__() into two methods
py
diff --git a/hangups/ui/__main__.py b/hangups/ui/__main__.py index <HASH>..<HASH> 100644 --- a/hangups/ui/__main__.py +++ b/hangups/ui/__main__.py @@ -190,12 +190,6 @@ class ChatUI(object): """Open conversation tab for new messages & pass events to notifier.""" conv = self._conv_list.get(conv_event.conversation_id) user = conv.get_user(conv_event.user_id) - if self._discreet_notifications: - notification = DISCREET_NOTIFICATION - else: - notification = notifier.Notification( - user.full_name, get_conv_name(conv), conv_event.text - ) show_notification = all(( isinstance(conv_event, hangups.ChatMessageEvent), not user.is_self, @@ -203,6 +197,12 @@ class ChatUI(object): )) if show_notification: self.add_conversation_tab(conv_event.conversation_id) + if self._discreet_notifications: + notification = DISCREET_NOTIFICATION + else: + notification = notifier.Notification( + user.full_name, get_conv_name(conv), conv_event.text + ) self._notifier.send(notification) def _on_quit(self):
Fix attempt to notify for non-message events
py
diff --git a/src/sos/_version.py b/src/sos/_version.py index <HASH>..<HASH> 100644 --- a/src/sos/_version.py +++ b/src/sos/_version.py @@ -17,7 +17,7 @@ if _py_ver.major == 2 or (_py_ver.major == 3 and (_py_ver.minor, _py_ver.micro) # version of the SoS language __sos_version__ = '1.0' # version of the sos command -__version__ = '0.9.14.7' +__version__ = '0.9.14.8' __py_version__ = '{}.{}.{}'.format(_py_ver.major, _py_ver.minor, _py_ver.micro) #
Release sos <I> for internal change
py
diff --git a/twitch/helix/api.py b/twitch/helix/api.py index <HASH>..<HASH> 100644 --- a/twitch/helix/api.py +++ b/twitch/helix/api.py @@ -133,6 +133,8 @@ class TwitchHelix(object): clip_ids=None, after=None, before=None, + started_at=None, + ended_at=None, page_size=20, ): if not broadcaster_id and not clip_ids and not game_id: @@ -151,6 +153,8 @@ class TwitchHelix(object): "id": clip_ids, "after": after, "before": before, + "started_at": started_at, + "ended_at": ended_at, } if broadcaster_id or game_id:
Added started_at and ended_at to get_clips function (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup __major_version__ = 0 __minor_version__ = 0 -__patch_version__ = 26 +__patch_version__ = 27 def readme():
Bumping version to <I>
py
diff --git a/kconfiglib.py b/kconfiglib.py index <HASH>..<HASH> 100644 --- a/kconfiglib.py +++ b/kconfiglib.py @@ -5437,7 +5437,7 @@ class MenuNode(object): filename/linenr: The location where the menu node appears. The filename is relative to $srctree (or to the current directory if $srctree isn't set), except - absolute paths passed to 'source' and Kconfig.__init__() are preserved. + absolute paths are used for paths outside $srctree. include_path: A tuple of (filename, linenr) tuples, giving the locations of the
Make MenuNode.filename documentation more accurate re. absolute paths An absolute path to within $srctree is turned into a relative path, and a relative path with '..' in it might be turned into an absolute path. Only whether the path is within $srctree matters.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -15,6 +15,7 @@ # (see http://peterdowns.com/posts/first-time-with-pypi.html) +from setuptools import setup, find_packages from numpy.distutils.core import setup, Extension from os import path import io
setup.py is a fragile thing - unused packages with side effects on import
py
diff --git a/cheroot/test/pure_pytest/test_core.py b/cheroot/test/pure_pytest/test_core.py index <HASH>..<HASH> 100644 --- a/cheroot/test/pure_pytest/test_core.py +++ b/cheroot/test/pure_pytest/test_core.py @@ -77,27 +77,10 @@ def testing_server(wsgi_server): def server_client(testing_server): host, port = testing_server.bind_addr - interface = webtest.interface(host) - - def probe_ipv6_sock(interface): - import errno - import socket - from contextlib import closing - try: - with closing(socket.socket(family=socket.AF_INET6)) as sock: - sock.bind((interface, 0)) - except OSError as sock_err: - if sock_err.errno != errno.EADDRNOTAVAIL: - raise - else: - return True - - return False - - if ':' in interface and not probe_ipv6_sock(interface): - interface = '127.0.0.1' - if ':' in host: - host = interface + interface = webtest.interface(host) # :: -> ::1; 0.0.0.0 -> 127.0.0.1 + host = socket.getfqdn(interface) # {::1|127.0.0.1} -> localhost + # localhost -> {::1|127.0.0.1} : + interface = socket.gethostbyaddr(host)[-1].pop(0) class _TestClient(object): def __init__(self, host, port):
Do available IP version resolution at socket level
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( install_requires=["pluginbase", "future", # "oic", - # "pyjwkest", + "pyjwkest", # "pysaml2 >= 3.0.2", "requests", "PyYAML",
Add pyjwkest as a dependency in setup.py.
py
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -117,7 +117,7 @@ copyright = '2013 SaltStack, Inc.' version = salt.version.__version__ #release = '.'.join(map(str, salt.version.__version_info__)) -release = '2014.1.0' +release = '0.17.5' language = 'en' locale_dirs = [
Wait until actual announced release to change doc version
py
diff --git a/examples/plotting/file/color_sliders.py b/examples/plotting/file/color_sliders.py index <HASH>..<HASH> 100755 --- a/examples/plotting/file/color_sliders.py +++ b/examples/plotting/file/color_sliders.py @@ -127,5 +127,5 @@ layout = bkplt.hplot( vform(p1, p2) ) -bkplt.output_file("colorSliders.html") +bkplt.output_file("color_sliders.html") bkplt.show(layout)
changed name of HTML output to fit with project convention
py
diff --git a/djrichtextfield/models.py b/djrichtextfield/models.py index <HASH>..<HASH> 100644 --- a/djrichtextfield/models.py +++ b/djrichtextfield/models.py @@ -6,8 +6,6 @@ from djrichtextfield.widgets import RichTextWidget class RichTextField(models.TextField): - _south_introspects = True - def __init__(self, *args, **kwargs): self.field_settings = None if 'field_settings' in kwargs:
Remove unused _south_introspects member
py
diff --git a/parsl/dataflow/dflow.py b/parsl/dataflow/dflow.py index <HASH>..<HASH> 100644 --- a/parsl/dataflow/dflow.py +++ b/parsl/dataflow/dflow.py @@ -714,6 +714,22 @@ class DataFlowKernel(object): if not self.cleanup_called: self.cleanup() + def cleanup_wait_for_all_tasks(self): + """Waits for all tasks in the task list to be completed, by waiting for their + AppFuture to be completed. This method will not necessarily wait for any tasks + added after cleanup has started (such as data stageout?) + """ + + logger.info("Waiting for all remaining tasks to complete") + for task_id in self.tasks: + # .exception() is a less exception throwing way of + # waiting for completion than .result() + fut = self.tasks[task_id]['exec_fu'] + if not fut.done(): + logger.debug("Waiting for task {} to complete".format(task_id)) + fut.exception() + logger.info("All remaining tasks completed") + def cleanup(self): """DataFlowKernel cleanup. @@ -732,6 +748,8 @@ class DataFlowKernel(object): raise Exception("attempt to clean up DFK when it has already been cleaned-up") self.cleanup_called = True + self.cleanup_wait_for_all_tasks() + self.log_task_states() # Checkpointing takes priority over the rest of the tasks
Wait for all tasks to complete during DFK cleanup There is a potential race here with tasks that are submitted after DFK cleanup has begun: * a task on a local threaded executor might be running and submit such a task * I'm unclear if data stageout tasks appear as regular tasks The second of those points is probably the most immediate to be concerned about.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -69,6 +69,11 @@ class SWIG_build(build): self.run_command('build_ext') return build.run(self) +# list example scripts +examples = list(map( + lambda x: os.path.join('examples', x), + filter(lambda x: os.path.splitext(x)[1] == '.py', os.listdir('examples')))) + with open('README.rst') as ldf: long_description = ldf.read() url = 'https://github.com/jim-easterbrook/python-gphoto2' @@ -106,4 +111,8 @@ setup(name = 'gphoto2', ext_modules = ext_modules, packages = ['gphoto2', 'gphoto2.lib'], package_dir = {'' : 'src'}, + data_files = [ + ('share/python-gphoto2/examples', examples), + ('share/python-gphoto2', ['LICENSE.txt', 'README.rst']), + ], )
Add examples and README etc to package data_files. This should ensure they are available when package is installed with pip.
py
diff --git a/pin_passcode/views.py b/pin_passcode/views.py index <HASH>..<HASH> 100644 --- a/pin_passcode/views.py +++ b/pin_passcode/views.py @@ -16,10 +16,14 @@ def auth(request): if not username: username = 'admin' - user = get_user_model().objects.get(username=username) - user.backend = 'django.contrib.auth.backends.ModelBackend' + try: + user = get_user_model().objects.get(username=username) + user.backend = 'django.contrib.auth.backends.ModelBackend' + + login(request, user) + return HttpResponse(status=200) + except get_user_model().DoesNotExist: + pass - login(request, user) - return HttpResponse(status=200) return HttpResponse(status=401)
Handle User.DoesNotExist
py
diff --git a/pinax/referrals/views.py b/pinax/referrals/views.py index <HASH>..<HASH> 100644 --- a/pinax/referrals/views.py +++ b/pinax/referrals/views.py @@ -48,6 +48,7 @@ def process_referral(request, code): referral = get_object_or_404(Referral, code=code) session_key = ensure_session_key(request) referral.respond(request, "RESPONDED") + max_age = getattr(settings, "PINAX_COOKIE_MAX_AGE", None) try: response = redirect(request.GET[ getattr(settings, "PINAX_REFERRALS_REDIRECT_ATTRIBUTE", "redirect_to")] @@ -57,7 +58,8 @@ def process_referral(request, code): if request.user.is_anonymous: response.set_cookie( "pinax-referral", - f"{code}:{session_key}" + f"{code}:{session_key}", + max_age=max_age ) else: response.delete_cookie("pinax-referral")
Update views.py Added `max_age` to set_cookie method
py
diff --git a/probfit/py23_compat.py b/probfit/py23_compat.py index <HASH>..<HASH> 100644 --- a/probfit/py23_compat.py +++ b/probfit/py23_compat.py @@ -13,6 +13,6 @@ else: # just in case PY4 if PY2: - range = xrange + range = xrange # pylint: disable=undefined-variable else: range = range
Disable pylint warning for xrange. Pylint complains that xrange is an undefined variable when its run under Python 3. This is true, which is why the offending line is wrapped with an 'if PY3' clause.
py
diff --git a/pysrt/srtfile.py b/pysrt/srtfile.py index <HASH>..<HASH> 100644 --- a/pysrt/srtfile.py +++ b/pysrt/srtfile.py @@ -286,7 +286,13 @@ class SubRipFile(UserList, object): @classmethod def _detect_encoding(cls, path): - report = charade.detect(open(path).read()) + sample = open(path).read(1024) + + for bom, encoding in BOMS: + if sample.startswith(bom): + return encoding + + report = charade.detect(sample) encoding = report.get('encoding') if not encoding: return cls.DEFAULT_ENCODING
Reintroduce manual BOM detection since charade/chadet often miss them
py
diff --git a/src/future/types/newobject.py b/src/future/types/newobject.py index <HASH>..<HASH> 100644 --- a/src/future/types/newobject.py +++ b/src/future/types/newobject.py @@ -86,6 +86,8 @@ class newobject(object): def __nonzero__(self): if hasattr(self, '__bool__'): return type(self).__bool__(self) + if hasattr(self, '__len__'): + return type(self).__len__(self) # object has no __nonzero__ method return True
Putative fix for issue #<I>
py
diff --git a/tests/test-scout2.py b/tests/test-scout2.py index <HASH>..<HASH> 100644 --- a/tests/test-scout2.py +++ b/tests/test-scout2.py @@ -22,3 +22,21 @@ class TestScout2Class: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) process.wait() assert process.returncode == 0 + + # + # Make sure that Scout2's check-s3-acl option does not crash + # + def test_scout2_default_run(self): + command = './Scout2.py --force --services s3 --check-s3-acls --bucket-name misconfigured-bucket-objectacls-mismatch' + process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + process.wait() + assert process.returncode == 0 + + # + # Make sure that Scout2's check-s3-encryption option does not crash + # + def test_scout2_default_run(self): + command = './Scout2.py --force --services s3 --check-s3-encryption --bucket-name misconfigured-bucket-unencrypted-objects' + process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + process.wait() + assert process.returncode == 0
Add test cases for S3 acls and S3 encryption
py
diff --git a/ziggurat_foundations/models.py b/ziggurat_foundations/models.py index <HASH>..<HASH> 100644 --- a/ziggurat_foundations/models.py +++ b/ziggurat_foundations/models.py @@ -355,7 +355,7 @@ class ExternalIdentityMixin(BaseModel): @declared_attr def external_user_name(cls): - return sa.Column(sa.Unicode(50), default=u'') + return sa.Column(sa.Unicode(255), default=u'') @declared_attr def local_user_name(cls):
make sure model creates column of right size
py
diff --git a/acos_client/v21/action.py b/acos_client/v21/action.py index <HASH>..<HASH> 100644 --- a/acos_client/v21/action.py +++ b/acos_client/v21/action.py @@ -44,13 +44,18 @@ class Action(base.BaseV21): if partition is not None: write_cmd = "active-partition {0}\r\n{1}".format(partition, write_cmd) + last_e = None for i in range(0, 5): # Request raises an exception when the "maybe error" is returned. try: return self._request("POST", "cli.deploy", params=None, payload=write_cmd, **kwargs) except acos_errors.ACOSException as e: + last_e = e # Catch 'might fail error' if e.msg.startswith("write memory") or '2039 ' in e.msg: time.sleep(1) continue raise e + + if last_e is not None: + raise last_e
throw last exception if retry loop finishes
py
diff --git a/cumulus/management/commands/syncstatic.py b/cumulus/management/commands/syncstatic.py index <HASH>..<HASH> 100644 --- a/cumulus/management/commands/syncstatic.py +++ b/cumulus/management/commands/syncstatic.py @@ -18,6 +18,8 @@ class Command(BaseCommand): optparse.make_option('-t', '--test-run', action='store_true', dest='test_run', default=False, help="Performs a test run of the sync."), + optparse.make_option('-c', '--container', + dest='container', help="Override STATIC_CONTAINER."), ) # settings from cumulus.settings @@ -51,6 +53,7 @@ class Command(BaseCommand): self.wipe = options.get('wipe') self.test_run = options.get('test_run') self.verbosity = int(options.get('verbosity')) + self.CONTAINER = options.get('container', self.CONTAINER) self.sync_files() def sync_files(self):
Allow container name to be overridden at runtime
py
diff --git a/bibliopixel/animation/animation.py b/bibliopixel/animation/animation.py index <HASH>..<HASH> 100644 --- a/bibliopixel/animation/animation.py +++ b/bibliopixel/animation/animation.py @@ -52,7 +52,7 @@ class Animation(object): @color_list.setter def color_list(self, cl): - self.layout.color_list[:] = cl + self.layout.color_list = cl @property def completed(self):
Fix animation to use the layout's color_list property * Otherwise we don't get the cool conversions from the Layout.color_list setter.
py
diff --git a/test/test_exceptions.py b/test/test_exceptions.py index <HASH>..<HASH> 100644 --- a/test/test_exceptions.py +++ b/test/test_exceptions.py @@ -7,7 +7,7 @@ class Person(StructuredNode): def test_cypher_exception_can_be_displayed(): - print CypherException("SOME QUERY", (), "ERROR", None, None) + print(CypherException("SOME QUERY", (), "ERROR", None, None)) def test_object_does_not_exist(): try:
Fix syntax Error for compability with python 3.X
py
diff --git a/modules/search.py b/modules/search.py index <HASH>..<HASH> 100644 --- a/modules/search.py +++ b/modules/search.py @@ -19,7 +19,19 @@ import urllib2 from xml.dom import minidom import xml.parsers.expat -#from subdownloader.modules import configuration +from subdownloader import subtitlefile + +class Movie(object): + def __init__(self, movieName): + self.movieName = movieName + self.movieSiteLink = "" + self.IMDBSite = "" + self.IMDBRating = "" + self.MovieYear = "" + self.MovieId = "" #this ID will be used when calling the 2nd step function to get the Subtitle Details + self.totalSubs = None #Sometimes we get the TotalSubs in the 1st step before we get the details of the subtitles + self.subs = [] #this is an array of Subtitle + class SearchByName(object):
Added the class Movie for crawling.
py
diff --git a/ait/core/bin/ait_ccsds_send_example.py b/ait/core/bin/ait_ccsds_send_example.py index <HASH>..<HASH> 100755 --- a/ait/core/bin/ait_ccsds_send_example.py +++ b/ait/core/bin/ait_ccsds_send_example.py @@ -26,8 +26,8 @@ zoe 1 packet_type 0001 <spare> 0 element_id 0001 -data_packet 1 -version_id 0001 +data_packet 1 +version_id 0001 format_id 000001 <unknown> 00000000 frame_id 00000001 @@ -38,8 +38,14 @@ buf = hs_packet.pack(*data) host = 'localhost' port = 3076 -while True: - s.sendto(buf, (host, port)) - log.info('Sent telemetry (%d bytes) to %s:%d' - % (hs_packet.size, host, port)) - time.sleep(1) + +def main(): + while True: + s.sendto(buf, (host, port)) + log.info('Sent telemetry (%d bytes) to %s:%d' + % (hs_packet.size, host, port)) + time.sleep(1) + + +if __name__ == '__main__': + main()
Issue #<I> - Prevent auto execute on import for ait_ccsds_send_example
py
diff --git a/cheroot/cli.py b/cheroot/cli.py index <HASH>..<HASH> 100644 --- a/cheroot/cli.py +++ b/cheroot/cli.py @@ -134,6 +134,7 @@ def parse_wsgi_bind_location(bind_addr_string): # with value: "<value>" and port: None if bind_addr_string.startswith('@'): return AbstractSocket(bind_addr_string[1:]) + # try and match for an IP/hostname and port match = six.moves.urllib.parse.urlparse('//{}'.format(bind_addr_string)) try: @@ -143,6 +144,7 @@ def parse_wsgi_bind_location(bind_addr_string): return TCPSocket(addr, port) except ValueError: pass + # else, assume a UNIX socket path return UnixSocket(path=bind_addr_string)
Add some blank lines to give structure in `parse_wsgi_bind_location`
py
diff --git a/treepoem/__init__.py b/treepoem/__init__.py index <HASH>..<HASH> 100644 --- a/treepoem/__init__.py +++ b/treepoem/__init__.py @@ -77,10 +77,10 @@ def _get_bbox(code): BBOX_COMMAND, universal_newlines=True, stdin=subprocess.PIPE, - stdout=subprocess.DEVNULL, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - _, err_output = gs_process.communicate(full_code, timeout=2) + _, err_output = gs_process.communicate(full_code) err_output = err_output.strip() # Unfortunately the error-handling in the postscript means that # returncode is 0 even if there was an error, but this gives
Remove Python 3isms - now works in <I> and <I>
py
diff --git a/cartoframes/context.py b/cartoframes/context.py index <HASH>..<HASH> 100644 --- a/cartoframes/context.py +++ b/cartoframes/context.py @@ -136,6 +136,12 @@ class CartoContext(object): def _is_authenticated(self): """Checks if credentials allow for authenticated carto access""" + # POSTs need to be over HTTPS + if urlparse(self.creds.base_url()).scheme != 'https': + raise ValueError( + '`base_url`s need to be over `https`. Update your `base_url`.' + ) + # check if user is authenticated try: self.sql_client.send( 'select * from information_schema.tables limit 0')
adds validation of baseurl on https
py
diff --git a/pygal/view.py b/pygal/view.py index <HASH>..<HASH> 100644 --- a/pygal/view.py +++ b/pygal/view.py @@ -135,8 +135,8 @@ class Box(object): if not self.width: self.xmax = self.xmin + 1 if not self.height: - self.ymin -= .5 - self.ymax = self.ymin + 1 + self.ymin /= 2 + self.ymax += self.ymin xmargin = self.margin * self.width self.xmin -= xmargin self.xmax += xmargin
Fix box differently to avoid getting a null height on huge numbers
py
diff --git a/astrobase/varbase/lcfit.py b/astrobase/varbase/lcfit.py index <HASH>..<HASH> 100644 --- a/astrobase/varbase/lcfit.py +++ b/astrobase/varbase/lcfit.py @@ -1967,11 +1967,11 @@ def mandelagol_fit_magseries(times, mags, errs, if plotcorner: if isinstance(trueparams,dict): trueparamkeys = np.sort(list(trueparams.keys())) - trueparams = [trueparams[k] for k in trueparamkeys] + truelist = [trueparams[k] for k in trueparamkeys] fig = corner.corner( samples, labels=trueparamkeys, - truths=trueparams, + truths=truelist, quantiles=[0.1585, 0.5, .8415], show_titles=True ) else:
lcfit: code cleaning for readability
py
diff --git a/omxplayer/player.py b/omxplayer/player.py index <HASH>..<HASH> 100644 --- a/omxplayer/player.py +++ b/omxplayer/player.py @@ -90,8 +90,8 @@ class OMXPlayer(object): process = subprocess.Popen(command, stdout=devnull, preexec_fn=os.setsid) - self._process_monitor = threading.Thread(target=monitor, args=(process, - on_exit)) + self._process_monitor = threading.Thread(target=monitor, + args=(process, on_exit)) self._process_monitor.start() return process
Cleanup: Beautify formatting in omxplayer/player.py
py
diff --git a/build.py b/build.py index <HASH>..<HASH> 100755 --- a/build.py +++ b/build.py @@ -178,7 +178,7 @@ def build_src_external_src_exports_js(t): '--exports', 'src/objectliterals.exports', EXPORTS) -@target('build/src/external/src/types.js', 'bin/generate-exports', +@target('build/src/external/src/types.js', 'bin/generate-exports.py', 'src/objectliterals.exports') def build_src_external_src_types_js(t): t.output('%(PYTHON)s', 'bin/generate-exports.py',
Fix build/src/external/src/types.js dependency The dependencies for build/src/external/src/types.js were wrong, which meant that build/src/external/src/types.js would always be rebuilt, which meant that almost everything else would be rebuilt too.
py
diff --git a/zappa/cli.py b/zappa/cli.py index <HASH>..<HASH> 100755 --- a/zappa/cli.py +++ b/zappa/cli.py @@ -450,6 +450,10 @@ class ZappaCLI(object): # been specified AND that stage_env='showmigrations') # By having command_rest collect everything but --all we can split it # apart here instead of relying on argparse. + if not args.command: + parser.print_help() + return + if args.command == 'manage' and not self.vargs.get('all'): self.stage_env = self.vargs['command_rest'].pop(0) else: @@ -512,7 +516,7 @@ class ZappaCLI(object): self.api_stage = stage if command not in ['status', 'manage']: - if not self.vargs['json']: + if not self.vargs.get('json', None): click.echo("Calling " + click.style(command, fg="green", bold=True) + " for stage " + click.style(self.api_stage, bold=True) + ".." ) @@ -1660,10 +1664,10 @@ class ZappaCLI(object): 'project_name': self.get_project_name() } } - + if profile_region: - zappa_settings[env]['aws_region'] = profile_region - + zappa_settings[env]['aws_region'] = profile_region + if has_django: zappa_settings[env]['django_settings'] = django_settings else:
Fixed when running zappa with no parameters Fixed issue when running Zappa with no arguments. First, there was no checking to verify if any command was passed as an argument in the command-line and later on the dispatch_command function we were trying to get the value of 'json' in self.vargs causing an unhandled KeyError exception. Now, if no argument is specified, the help will be displayed to the users.
py
diff --git a/messaging_client/messaging_client.py b/messaging_client/messaging_client.py index <HASH>..<HASH> 100644 --- a/messaging_client/messaging_client.py +++ b/messaging_client/messaging_client.py @@ -24,9 +24,16 @@ class MessagingClient(object): """ return self.command_line_parser.parse() - def connect(self, host, port): + def set_address(self, host, port): + self.host = host + self.port = port + + def connect(self, host=None, port=None): """Connects to given host address and port.""" - self.socket.connect(host, port) + if host is None and port is None: + self.socket.connect(self.host, self.port) + else: + self.socket.connect(host, port) def close(self): """Closes the connection to remote host."""
Added option to add host and port to MessagingClient before connect.
py
diff --git a/lib/tri/table/__init__.py b/lib/tri/table/__init__.py index <HASH>..<HASH> 100644 --- a/lib/tri/table/__init__.py +++ b/lib/tri/table/__init__.py @@ -1334,7 +1334,7 @@ class Table(RefinableObject): .filter(**self.bulk_filter) \ .exclude(**self.bulk_exclude) - if self._bulk_form.fields_by_name._all_pks_.value == '1': + if table.request.POST.get('_all_pks_') == '1': return queryset else: pks = [key[len('pk_'):] for key in self.request.POST if key.startswith('pk_')]
bulk_queryset should be usable to create your own bulk actions without using Table.bulk_form
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from distutils.core import setup setup( name='claripy', version='4.6.6.28', - packages=['claripy', 'claripy.backends', 'claripy.frontends', 'claripy.vsa', 'claripy.ast'], + packages=['claripy', 'claripy.backends', 'claripy.frontends', 'claripy.vsa', 'claripy.ast', 'claripy.frontend_mixins'], install_requires=[ 'ana', 'angr-only-z3-custom',
Update setup.py with the new module.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -53,9 +53,9 @@ setup(license="Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)", ], install_requires=[ "attrs>=17.4.0", # https://github.com/biocommons/hgvs/issues/473 - "biocommons.seqrepo", + "biocommons.seqrepo<1.0", "biopython==1.69", # 1.70 fails on rtd due to numpy absence - "bioutils>=0.2.2", + "bioutils>=0.2.2,<1.0", "configparser>=3.3.0", "enum34", "ipython<6", # for hgvs-shell; >=6 for Py3 only
pin biocommons dependencies to versions that support <I> and <I>+
py
diff --git a/angr/simos.py b/angr/simos.py index <HASH>..<HASH> 100644 --- a/angr/simos.py +++ b/angr/simos.py @@ -58,9 +58,17 @@ class SimOS(object): A class describing OS/arch-level configuration. """ - def __init__(self, project, name=None): + def __init__(self, project, name=None, disable_externs=False): + """ + Constructor for a SimOS. + :param project: + :param name: + :param disable_externs: Disables support for externs. You may want this if you're in a small memory space and + just don't have any externs anyway (e.g., microcontrollers) + """ self.arch = project.arch self.project = project + self.disable_externs = disable_externs self.name = name self.return_deadend = None @@ -68,8 +76,9 @@ class SimOS(object): """ Configure the project to set up global settings (like SimProcedures). """ - self.return_deadend = self.project.loader.extern_object.allocate() - self.project.hook(self.return_deadend, P['stubs']['CallReturn']()) + if not self.disable_externs: + self.return_deadend = self.project.loader.extern_object.allocate() + self.project.hook(self.return_deadend, P['stubs']['CallReturn']()) def irelative_resolver(resolver_addr): # autohooking runs before this does, might have provided this already
Hack to disable externs support if there are no externs
py
diff --git a/simple_elastic/index.py b/simple_elastic/index.py index <HASH>..<HASH> 100644 --- a/simple_elastic/index.py +++ b/simple_elastic/index.py @@ -124,16 +124,19 @@ class ElasticIndex: results.append(items) return results - def scroll(self, query=None, scroll='5m', size=100): + def scroll(self, query=None, scroll='5m', size=100, unpack=True): """Scroll an index with the specified search query. Works as a generator. Will yield `size` results per iteration until all hits are returned. """ query = self.match_all if query is None else query - response = self.instance.search(index=self.index, doc_type=self.doc_type, body=query, size=size, scroll=scroll) + response = self.instance.search(index=self.index, doc_type=self.doc_type, body=query, size=size) while len(response['hits']['hits']) > 0: scroll_id = response['_scroll_id'] - yield [source['_source'] if '_source' in source else source for source in response['hits']['hits']] + if unpack: + yield [source['_source'] if '_source' in source else source for source in response['hits']['hits']] + else: + yield response['hits']['hits'] response = self.instance.scroll(scroll_id=scroll_id, scroll=scroll) def get(self, identifier):
feat(Scroll): Add unpack param. This allows the user to specify wether the _source field is unpacked or not. In case of unpacking no metadata fields are returned.
py
diff --git a/spinoff/util/testing.py b/spinoff/util/testing.py index <HASH>..<HASH> 100644 --- a/spinoff/util/testing.py +++ b/spinoff/util/testing.py @@ -185,11 +185,15 @@ class MockActor(BaseActor): class RootActor(MockActor): - def handle(self, msg): - if msg[0] == 'error' and isinstance(msg[-2], AssertionError): - raise msg[-2] - else: - super(RootActor, self).handle(msg) + def raise_errors(self): + for msg in self.messages: + if msg[0] == 'error': # and isinstance(msg[-2], AssertionError): + # see: http://twistedmatrix.com/trac/ticket/5178 + if not isinstance(msg[2][1], basestring): + raise msg[2][0], None, msg[2][1] + else: + print(msg[2][1]) + raise msg[2][0] def run(a_cls, *args, **kwargs): @@ -197,4 +201,5 @@ def run(a_cls, *args, **kwargs): a = a_cls(*args, **kwargs) a._parent = root a.start() + root.raise_errors() return root, a
Added automatic raising of received exceptions in util.testing.run
py