diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/vent/menus/ntap.py b/vent/menus/ntap.py index <HASH>..<HASH> 100644 --- a/vent/menus/ntap.py +++ b/vent/menus/ntap.py @@ -212,7 +212,8 @@ class ActionNTap(npyscreen.ActionForm): # format the data into something ncontrol likes else: - payload = {'id': list(x['id'] for x in self.ms.values)} + payload = {'id': list(x['id'] for x in + self.ms.get_selected_objects())} # grab the url that network-tap is listening to try:
fixed bug where operations were performed on all items rather than user selected
py
diff --git a/tests/test_customers.py b/tests/test_customers.py index <HASH>..<HASH> 100644 --- a/tests/test_customers.py +++ b/tests/test_customers.py @@ -48,20 +48,23 @@ def test_customers_all(client, response): response.get('https://api.mollie.com/v2/customers', 'customer_multiple') customers = client.customers.all() + assert isinstance(customers, List) assert customers.count == 3 iterated = 0 + iterated_customer_ids = [] for customer in customers: + assert isinstance(customer, Customer) iterated += 1 assert customer.id is not None - assert customer.mode is not None - assert customer.resource is not None - assert customer.name is not None - assert customer.email is not None - assert customer.locale is not None - assert customer.created_at is not None - assert iterated == 3 + iterated_customer_ids.append(customer.id) + assert iterated == customer.count, 'Unexpected amount of customer retrieved' + assert len(set(iterated_customer_ids)) == customers.count, 'Unexpected amount of unique customer ids retrieved' +def test_customer_get(client, response): + """Retrieve a single customer.""" + pass + def test_customer_get_related_mandates(client, response): """Retrieve related mandates for a customer.""" response.get('https://api.mollie.com/v2/customers/%s' % CUSTOMER_ID, 'customer_updated')
Make test_customers_all() the same as all similar tests
py
diff --git a/machinist/_fsm.py b/machinist/_fsm.py index <HASH>..<HASH> 100644 --- a/machinist/_fsm.py +++ b/machinist/_fsm.py @@ -645,8 +645,9 @@ class _FiniteStateInterpreter(object): given rich input and deliver the resulting outputs to the wrapped L{IOutputExecutor}. - @param input: An instance of one of the rich input types this state - machine was initialized with. + @param input: An L{IRichInput} provider that must be an instance of + one of the rich input types this state machine was initialized + with. @return: The output from the wrapped L{IFiniteStateMachine}. """
Update the _FiniteStateInterpreter's receive docstring
py
diff --git a/librosa/feature/spectral.py b/librosa/feature/spectral.py index <HASH>..<HASH> 100644 --- a/librosa/feature/spectral.py +++ b/librosa/feature/spectral.py @@ -447,9 +447,16 @@ def spectral_rolloff(y=None, sr=22050, S=None, n_fft=2048, hop_length=512, From time-series input >>> y, sr = librosa.load(librosa.util.example_audio_file()) + >>> # Approximate maximum frequencies with roll_percent=0.85 (default) >>> rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr) >>> rolloff array([[ 8376.416, 968.994, ..., 8925.513, 9108.545]]) + >>> # Approximate minimum frequencies with roll_percent=0.1 + >>> rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr, roll_percent=0.1) + >>> rolloff + array([[ 75.36621094, 64.59960938, 64.59960938, ..., 75.36621094, + 75.36621094, 64.59960938]]) + From spectrogram input
Add approximate minimum frequency example [ci skip]
py
diff --git a/example_py/list_network_interfaces.py b/example_py/list_network_interfaces.py index <HASH>..<HASH> 100755 --- a/example_py/list_network_interfaces.py +++ b/example_py/list_network_interfaces.py @@ -8,8 +8,8 @@ Requires: pip install pyroute2 docopt Usage: - list_network_interfaces print - list_network_interfaces -h | --help + list_network_interfaces.py print + list_network_interfaces.py -h | --help """ from __future__ import print_function
Fixed docstring for docopt. Forgot .py.
py
diff --git a/nested_inline/admin.py b/nested_inline/admin.py index <HASH>..<HASH> 100644 --- a/nested_inline/admin.py +++ b/nested_inline/admin.py @@ -101,9 +101,9 @@ class NestedModelAdmin(admin.ModelAdmin): instance = form.instance else: instance = None - fieldsets = list(nested_inline.get_fieldsets(request)) - readonly = list(nested_inline.get_readonly_fields(request)) - prepopulated = dict(nested_inline.get_prepopulated_fields(request)) + fieldsets = list(nested_inline.get_fieldsets(request, instance)) + readonly = list(nested_inline.get_readonly_fields(request, instance)) + prepopulated = dict(nested_inline.get_prepopulated_fields(request, instance)) wrapped_nested_formset = helpers.InlineAdminFormSet(nested_inline, nested_formset, fieldsets, prepopulated, readonly, model_admin=self) wrapped_nested_formsets.append(wrapped_nested_formset)
Update admin.py I got into problem with readonly_fields when I got None even for form where I tried to modify an instance.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -131,8 +131,8 @@ def get_build_args(): _LOGGER.warn('pkg-config failed to find tesseract/lept libraries: {}'.format(e)) build_args = get_tesseract_version() - if build_args['cython_compile_time_env']['TESSERACT_VERSION'] >= 0x040000: - _LOGGER.debug('tesseract >= 4.00 requires c++11 compiler support') + if build_args['cython_compile_time_env']['TESSERACT_VERSION'] >= 0x030502: + _LOGGER.debug('tesseract >= 03.05.02 requires c++11 compiler support') build_args['extra_compile_args'] = ['-std=c++11', '-DUSE_STD_NAMESPACE'] _LOGGER.debug('build parameters: {}'.format(build_args))
Fix compilation with tesseract <I> It needs -std=c<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import find_packages, setup setup( name='hpfeeds3', - version='0.9.2.dev0', + version='0.9.2', description='Python implementation of the honeypot feeds broker', author='John Carr', author_email='john.carr@unrouted.co.uk',
Preparing release <I>
py
diff --git a/urls.py b/urls.py index <HASH>..<HASH> 100644 --- a/urls.py +++ b/urls.py @@ -31,7 +31,7 @@ urlpatterns = patterns('', (r'^api/', include('api.urls')), (r'^syn/', include('synagg.urls')), (r'^massmedia/', include('massmedia.urls')), - (r'^sitemaps/', include('news_sitemaps.urls')), + # (r'^sitemaps/', include('news_sitemaps.urls')), (r'^news/', include('stories.urls')), (r'^frontendadmin/', include('frontendadmin.urls')), (r'^sitemap\.xml$', 'django.contrib.sitemaps.views.index', {'sitemaps': sitemaps}),
Commented out sitemaps urls until they are included
py
diff --git a/telethon/utils.py b/telethon/utils.py index <HASH>..<HASH> 100644 --- a/telethon/utils.py +++ b/telethon/utils.py @@ -54,7 +54,7 @@ mimetypes.add_type('audio/flac', '.flac') mimetypes.add_type('application/x-tgsticker', '.tgs') USERNAME_RE = re.compile( - r'@|(?:https?://)?(?:www\.)?(?:telegram\.(?:me|dog)|t\.me)/(@|joinchat/)?' + r'@|(?:https?://)?(?:www\.)?(?:telegram\.(?:me|dog)|t\.me)/(@|\+|joinchat/)?' ) TG_JOIN_RE = re.compile( r'tg://(join)\?invite='
Add support for new invite link format (#<I>)
py
diff --git a/telethon/telegram_client.py b/telethon/telegram_client.py index <HASH>..<HASH> 100644 --- a/telethon/telegram_client.py +++ b/telethon/telegram_client.py @@ -2505,7 +2505,11 @@ class TelegramClient(TelegramBareClient): for builder, callback in self._event_builders: event = builder.build(update) if event: - event._set_client(self) + if hasattr(event, '_set_client'): + event._set_client(self) + else: + event._client = self + event.original_update = update try: callback(event)
Fix events.Raw not having ._set_client
py
diff --git a/salt/modules/grains.py b/salt/modules/grains.py index <HASH>..<HASH> 100644 --- a/salt/modules/grains.py +++ b/salt/modules/grains.py @@ -437,7 +437,7 @@ def delval(key, destructive=False, refresh=True): salt '*' grains.delval key ''' - setval(key, None, destructive=destructive, refresh=refresh) + setval(key, None, destructive=destructive) def ls(): # pylint: disable=C0103
Remove extra refresh reference that snuck in
py
diff --git a/vm.py b/vm.py index <HASH>..<HASH> 100644 --- a/vm.py +++ b/vm.py @@ -560,8 +560,12 @@ def constant_fold(code, silent=True, ignore_errors=True): except ValueError: pass - if isnumber(a) and b == "cast_str": - string = '"%s"' % str(a) + if isconstant(a) and b == "cast_str": + if isstring(a): + string = a[1:-1] + else: + string = str(a) + string = '"%s"' % string del code[i:i+2] code.insert(i, string) if not silent:
Expands cast_str constant folding
py
diff --git a/spyderlib/widgets/externalshell/sitecustomize.py b/spyderlib/widgets/externalshell/sitecustomize.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/externalshell/sitecustomize.py +++ b/spyderlib/widgets/externalshell/sitecustomize.py @@ -600,9 +600,6 @@ def clear_post_mortem(): ipython_shell = get_ipython() if ipython_shell: ipython_shell.set_custom_exc((None,), None) - else: - from IPython.terminal.ipapp import TerminalIPythonApp - TerminalIPythonApp.exec_lines = ['clear_post_mortem()'] else: sys.excepthook = spyder_default_excepthook
Remove unused IPython startup command.
py
diff --git a/docs/sphinxext/plot_generator.py b/docs/sphinxext/plot_generator.py index <HASH>..<HASH> 100644 --- a/docs/sphinxext/plot_generator.py +++ b/docs/sphinxext/plot_generator.py @@ -13,13 +13,24 @@ import token import tokenize import shutil import json - -import matplotlib -matplotlib.use('Agg') -import matplotlib.pyplot as plt - -from matplotlib import image - +import sys + +try: + import matplotlib + matplotlib.use('Agg') + import matplotlib.pyplot as plt + from matplotlib import image +except ImportError: + import mock + + MOCK_MODULES = ['matplotlib', 'matplotlib.pyplot'] + for mod_name in MOCK_MODULES: + sys.modules[mod_name] = mock.Mock() + + import matplotlib + matplotlib.use('Agg') + import matplotlib.pyplot as plt + from matplotlib import image RST_TEMPLATE = """ .. _{sphinx_tag}:
Minor changes in plot_generator.py to work in readthedocs.
py
diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index <HASH>..<HASH> 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -320,7 +320,7 @@ class Consumer(object): initial_request = policy.get_initial_request() request_generator = _RequestQueueGenerator( self._request_queue, initial_request=initial_request) - rpc = policy.call_rpc(request_generator) + rpc = policy.call_rpc(iter(request_generator)) request_generator.rpc = rpc responses = _pausable_iterator(rpc, self._can_consume) try:
Fix missing iter on request stream (#<I>)
py
diff --git a/storm/ssh_config.py b/storm/ssh_config.py index <HASH>..<HASH> 100644 --- a/storm/ssh_config.py +++ b/storm/ssh_config.py @@ -9,7 +9,6 @@ from paramiko.config import SSHConfig from operator import itemgetter from exceptions import StormValueError -from paramiko.config import proxy_re class StormConfig(SSHConfig): @@ -47,6 +46,7 @@ class StormConfig(SSHConfig): if '=' in line: # Ensure ProxyCommand gets properly split if line.lower().strip().startswith('proxycommand'): + proxy_re = re.compile(r"^(proxycommand)\s*=*\s*(.*)", re.I) match = proxy_re.match(line) key, value = match.group(1).lower(), match.group(2) else:
proxy_re import causes problems for older versions of paramiko. it's hardcoded into storm codebase now. fixes #<I>.
py
diff --git a/s_tui/Sources/RaplPowerSource.py b/s_tui/Sources/RaplPowerSource.py index <HASH>..<HASH> 100644 --- a/s_tui/Sources/RaplPowerSource.py +++ b/s_tui/Sources/RaplPowerSource.py @@ -63,10 +63,10 @@ class RaplPowerSource(Source): return self.get_power_usage() def get_maximum(self): - return self.read_max_power_file() + return self.max_power def get_summary(self): - return {'Cur Power': '%d %s' % (self.last_measurement_value, self.get_measurement_unit()) + return {'Cur Power': '%d %s' % (self.last_measurement_value / self.MICRO_JAUL_IN_JAUL, self.get_measurement_unit()) , 'Max Power': '%d %s' % (self.max_power, self.get_measurement_unit())} def get_source_name(self):
Small error in rapl power source
py
diff --git a/squad/api/rest.py b/squad/api/rest.py index <HASH>..<HASH> 100644 --- a/squad/api/rest.py +++ b/squad/api/rest.py @@ -173,7 +173,7 @@ class BuildSerializer(serializers.HyperlinkedModelSerializer): testruns = serializers.HyperlinkedIdentityField(view_name='build-testruns') testjobs = serializers.HyperlinkedIdentityField(view_name='build-testjobs') status = serializers.HyperlinkedIdentityField(read_only=True, view_name='build-status', allow_null=True) - metadata = serializers.JSONField(read_only=True) + metadata = serializers.HyperlinkedIdentityField(read_only=True, view_name='build-metadata') class Meta: model = Build @@ -194,6 +194,11 @@ class BuildViewSet(ModelViewSet): def get_queryset(self): return self.queryset.filter(project__in=self.get_project_ids()) + @detail_route(methods=['get'], suffix='metadata') + def metadata(self, request, pk=None): + build = self.get_object() + return Response(build.metadata) + @detail_route(methods=['get'], suffix='status') def status(self, request, pk=None): try:
api: only expose Build.metadata on specialized endpoint Build.metadata is expensive, and can require fetching several test runs with their potentially large text fields. We don't want to do that for each build when listing them.
py
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,7 +33,6 @@ sys.path.insert(0, os.path.abspath('../../sk_dsp_comm')) # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'matplotlib.sphinxext.only_directives', 'matplotlib.sphinxext.plot_directive', 'sphinx.ext.autodoc', 'sphinx.ext.doctest',
only_directive is builtin now so removing from extensions list
py
diff --git a/umap/umap_.py b/umap/umap_.py index <HASH>..<HASH> 100644 --- a/umap/umap_.py +++ b/umap/umap_.py @@ -1307,7 +1307,8 @@ class UMAP(BaseEstimator): if self.target_n_neighbors < 2 and self.target_n_neighbors != -1: raise ValueError("target_n_neighbors must be greater than 2") if not isinstance(self.n_components, int): - raise ValueError("n_components must be an int") + self.n_components = int(self.n_components) + #raise ValueError("n_components must be an int") if self.n_components < 1: raise ValueError("n_components must be greater than 0") if self.n_epochs is not None and (
convert to Python int rather than raise an error
py
diff --git a/tests/gui/test_group_ungroup.py b/tests/gui/test_group_ungroup.py index <HASH>..<HASH> 100644 --- a/tests/gui/test_group_ungroup.py +++ b/tests/gui/test_group_ungroup.py @@ -65,6 +65,10 @@ def trigger_ungroup_signals(): menubar_ctrl = main_window_controller.get_controller('menu_bar_controller') call_gui_callback(rafcon.core.singleton.state_machine_manager.add_state_machine, create_state_machine()) + # TODO avoid this in all tests running, too!!! -> in single runs it does work without!!! + while len(sm_manager_model.state_machines) <= 0: + # give model time to be created + testing_utils.wait_for_gui() first_sm_id = sm_manager_model.state_machines.keys()[0] call_gui_callback(main_window_controller.view['main_window'].grab_focus)
fix(group and ungroup): put in work around for wait again but mark it as TODO
py
diff --git a/src/plone/app/mosaic/upgrades.py b/src/plone/app/mosaic/upgrades.py index <HASH>..<HASH> 100644 --- a/src/plone/app/mosaic/upgrades.py +++ b/src/plone/app/mosaic/upgrades.py @@ -83,6 +83,8 @@ def upgrade_8_to_9(context): # Add Mosaic view into available view methods view_methods = [i for i in fti.getAvailableViewMethods(portal)] view_methods.append('layout_view') + if 'view' in view_methods: + view_methods.remove('view') fti.view_methods = list(set(view_methods)) if fti.default_view == 'view':
Fix upgrade step to remove 'view' from available view methods for types with layout behavior enabled
py
diff --git a/opengem/output/risk.py b/opengem/output/risk.py index <HASH>..<HASH> 100644 --- a/opengem/output/risk.py +++ b/opengem/output/risk.py @@ -46,9 +46,11 @@ class RiskXMLWriter(writer.FileWriter): pe_values = _curve_pe_as_gmldoublelist(curve_object) - subnode_pe = self.root_node.findall(".//" + self.abcissa_tag) - if len(subnode_pe): - if subnode_pe[0].text != pe_values: + # This use of not None is b/c of the trap w/ ElementTree find + # for nodes that have no child nodes. + subnode_pe = self.root_node.find(self.abcissa_tag) + if subnode_pe is not None: + if subnode_pe.text != pe_values: raise Exception("Curves must share the same Abcissa!") else: subnode_pe = etree.SubElement(self.root_node,
A little tidy-up from findall to a find check, having checked the lxml docs on how empty nodes are handled in if statements. Former-commit-id: ac4a9aea<I>a<I>a<I>b<I>a<I>a<I>d<I>b5b3af<I>
py
diff --git a/nosedjango/nosedjango.py b/nosedjango/nosedjango.py index <HASH>..<HASH> 100644 --- a/nosedjango/nosedjango.py +++ b/nosedjango/nosedjango.py @@ -148,9 +148,6 @@ class NoseDjango(Plugin): settings.DATABASE_ENGINE = 'sqlite3' settings.TEST_DATABASE_NAME = None # in-memory database - # Do our custom testrunner stuff - custom_before() - # Some Django code paths evaluate differently # between DEBUG and not DEBUG. Example of this include the url # dispatcher when 404's are hit. Django's own test runner forces DEBUG
Fix a bug caused by a merge Closes #<I>
py
diff --git a/test/input/func_noerror_models_py33.py b/test/input/func_noerror_models_py33.py index <HASH>..<HASH> 100644 --- a/test/input/func_noerror_models_py33.py +++ b/test/input/func_noerror_models_py33.py @@ -23,7 +23,7 @@ class SomeModel(models.Model): except self.MultipleObjectsReturned: print('lala') - print (self.get_some_field_display()) + print(self.get_some_field_display()) class SubclassModel(SomeModel):
Another space before a bracked in a print statement is making tests fail...
py
diff --git a/tests/test_jira.py b/tests/test_jira.py index <HASH>..<HASH> 100644 --- a/tests/test_jira.py +++ b/tests/test_jira.py @@ -26,7 +26,7 @@ import logging import unittest from base import TestBaseBackend -from grimoire_elk.ocean.jenkins import JenkinsOcean +from grimoire_elk.raw.jenkins import JenkinsOcean class TestJira(TestBaseBackend):
[tests] Update import section test_jira
py
diff --git a/tests/test_potential.py b/tests/test_potential.py index <HASH>..<HASH> 100644 --- a/tests/test_potential.py +++ b/tests/test_potential.py @@ -2780,10 +2780,10 @@ class SolidBodyRotationSpiralArmsPotential(SolidBodyRotationWrapperPotential): return SolidBodyRotationWrapperPotential.__new__(cls,amp=1., pot=spn.toPlanar(), omega=1.1,pa=0.4) -class mockFlatSolidBodyRotationSpiralArmsPotential(testMWPotential): +class mockFlatSolidBodyRotationSpiralArmsPotential(testplanarMWPotential): def __init__(self): - testMWPotential.__init__(self, - potlist=[potential.LogarithmicHaloPotential(normalize=1.), - SolidBodyRotationWrapperPotential(amp=1.,pot=potential.SpiralArmsPotential(),omega=1.3)]) + testplanarMWPotential.__init__(self, + potlist=[potential.LogarithmicHaloPotential(normalize=1.).toPlanar(), + SolidBodyRotationWrapperPotential(amp=1.,pot=potential.SpiralArmsPotential().toPlanar(),omega=1.3)]) def OmegaP(self): return self._potlist[1].OmegaP()
Make the solidbody orbit test a wrapper of a planar potential, to test that path through the code
py
diff --git a/LiSE/LiSE/cache.py b/LiSE/LiSE/cache.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/cache.py +++ b/LiSE/LiSE/cache.py @@ -157,7 +157,10 @@ class RulesHandledCache(object): unhandl = unhandl[spot] if turn not in unhandl.setdefault(branch, {}): unhandl[branch][turn] = list(self.iter_unhandled_rules(branch, turn, tick)) - unhandl[branch][turn].remove(entity + (rulebook, rule)) + try: + unhandl[branch][turn].remove(entity + (rulebook, rule)) + except ValueError: + pass shalo.add(rule) def retrieve(self, *args):
Suppress an error when storing a handled rule I think this is safe, since storing handledness in this situation is just redundant.
py
diff --git a/tests/build_image/tests.py b/tests/build_image/tests.py index <HASH>..<HASH> 100644 --- a/tests/build_image/tests.py +++ b/tests/build_image/tests.py @@ -63,6 +63,13 @@ class BuildImageTestCase(CLITestCase): def test_info(self, api): self._setup_detail(api) self._setup_list_2(api) + with self.expect_output('detail.txt'): + self.runner.run(['build-image', 'info', 'test_image_1']) + self.assertEqual(api.calls['build-images'], [('GET', {'image_id': 'test_image_1'})]) + + def test_info_json(self, api): + self._setup_detail(api) + self._setup_list_2(api) with self.expect_output('detail.json', parse_json=True): self.runner.run(['--json', 'build-image', 'info', 'test_image_1']) self.assertEqual(api.calls['build-images'], [('GET', {'image_id': 'test_image_1'})])
Add test for build-image detail.
py
diff --git a/dreamssh/config.py b/dreamssh/config.py index <HASH>..<HASH> 100644 --- a/dreamssh/config.py +++ b/dreamssh/config.py @@ -33,6 +33,8 @@ ssh.keydir = os.path.join(main.config.datadir, "ssh") ssh.privkey = "id_rsa" ssh.pubkey = "id_rsa.pub" ssh.localdir = "~/.ssh" +ssh.userdirtemplate = os.path.join(main.config.datadir, "users", "%s") +ssh.userauthkeys = os.path.join(ssh.userdirtemplate, "authorized_keys") ssh.banner = """: : Welcome to :
Added new config options for user keys.
py
diff --git a/mysql/toolkit/script/dump.py b/mysql/toolkit/script/dump.py index <HASH>..<HASH> 100644 --- a/mysql/toolkit/script/dump.py +++ b/mysql/toolkit/script/dump.py @@ -51,10 +51,7 @@ def dump_commands(commands, sql_script, db=None, sub_folder='fails'): os.mkdir(dump_dir) # Create list of (path, content) tuples - command_filepath = [] - for count, fail in enumerate(fails): - txt_file = os.path.join(dump_dir, str(count) + '.sql') - command_filepath.append((fail, txt_file)) + command_filepath = [(fail, os.path.join(dump_dir, str(count) + '.sql')) for count, fail in enumerate(fails)] # Dump failed commands to text file in the same directory as the script # Utilize's multiprocessing module if it is available
Replace command_filepath list creation with list comprehension
py
diff --git a/integration_test/integration_tests.py b/integration_test/integration_tests.py index <HASH>..<HASH> 100644 --- a/integration_test/integration_tests.py +++ b/integration_test/integration_tests.py @@ -72,7 +72,7 @@ def send_letter_notification_test_response(python_client): def send_precompiled_letter_notification_test_response(python_client): unique_name = str(uuid.uuid4()) - with open('integrations_test/test_files/one_page_pdf.pdf', "rb") as pdf_file: + with open('integration_test/test_files/one_page_pdf.pdf', "rb") as pdf_file: response = python_client.send_precompiled_letter_notification( reference=unique_name, pdf_file=pdf_file
Fix one_page_pdf.pdf path
py
diff --git a/tests/integration/test_utils.py b/tests/integration/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/integration/test_utils.py +++ b/tests/integration/test_utils.py @@ -63,10 +63,10 @@ def test_check_docker(): def test_run_cmd(): - ret = run_cmd(["sh", "-c", "for x in `seq 1 5`; do echo $x; sleep 1; done"]) + ret = run_cmd(["sh", "-c", "for x in `seq 1 5`; do echo $x; sleep 0.01; done"]) assert not ret - ret = run_cmd(["sh", "-c", "for x in `seq 1 5`; do echo $x; sleep 1; done"], + ret = run_cmd(["sh", "-c", "for x in `seq 1 5`; do echo $x; sleep 0.01; done"], return_output=True) assert ret == '1\n2\n3\n4\n5\n'
Make test_run_cmd faster.
py
diff --git a/rarfile.py b/rarfile.py index <HASH>..<HASH> 100644 --- a/rarfile.py +++ b/rarfile.py @@ -465,7 +465,7 @@ class RarInfo(object): return False def needs_password(self): - return self.flags & RAR_FILE_PASSWORD + return (self.flags & RAR_FILE_PASSWORD) > 0 class RarFile(object):
Return boolean from needs_password() Fixes: #<I>
py
diff --git a/test/getdns_test.py b/test/getdns_test.py index <HASH>..<HASH> 100644 --- a/test/getdns_test.py +++ b/test/getdns_test.py @@ -19,9 +19,8 @@ class TestGetdnsMethods(unittest.TestCase): def test_bogus_attribute(self): c = getdns.Context() - with self.assertRaises(AttributeError, c.asdf) as e: - - except + with self.assertRaises(AttributeError): + c.asdf del(c) def test_append_name(self):
fix usage of assertRaises in test_bogus_attributes test
py
diff --git a/pyamg/blackbox.py b/pyamg/blackbox.py index <HASH>..<HASH> 100644 --- a/pyamg/blackbox.py +++ b/pyamg/blackbox.py @@ -99,12 +99,13 @@ def solver_configuration(A, B=None, verb=True): # Detect symmetry if ishermitian(A, fast_check=True): config['symmetry'] = 'hermitian' + if verb: + print " Detected a Hermitian matrix" else: config['symmetry'] = 'nonsymmetric' - # - if verb: - print " Detected a " + config['symmetry'] + " matrix" - + if verb: + print " Detected a non-Hermitian matrix" + ## # Symmetry dependent parameters if config['symmetry'] == 'hermitian':
Changed blackbox solver output to correctly state non-Hermitian instead of non-symmetric
py
diff --git a/kmip/core/enums.py b/kmip/core/enums.py index <HASH>..<HASH> 100644 --- a/kmip/core/enums.py +++ b/kmip/core/enums.py @@ -500,6 +500,15 @@ class KeyRoleType(enum.Enum): PVKOTH = 0x00000015 +class State(enum.Enum): + PRE_ACTIVE = 0x00000001 + ACTIVE = 0x00000002 + DEACTIVATED = 0x00000003 + COMPROMISED = 0x00000004 + DESTROYED = 0x00000005 + DESTROYED_COMPROMISED = 0x00000006 + + # 9.1.3.2.24 class QueryFunction(enum.Enum): QUERY_OPERATIONS = 0x00000001
Adding the State enumeration This change adds the State enumeration to the enumerations set.
py
diff --git a/downhill/dataset.py b/downhill/dataset.py index <HASH>..<HASH> 100644 --- a/downhill/dataset.py +++ b/downhill/dataset.py @@ -100,7 +100,7 @@ class Dataset: if not self.iteration_size: try: self.iteration_size = len(inputs) - except TypeError: # has no len + except TypeError, AttributeError: # has no len self.iteration_size = 100 logging.info('%s: %d mini-batches from callable', self.name, self.iteration_size)
Catch AttributeError for no length. Closes issue #1.
py
diff --git a/wal_e/operator/backup.py b/wal_e/operator/backup.py index <HASH>..<HASH> 100644 --- a/wal_e/operator/backup.py +++ b/wal_e/operator/backup.py @@ -53,7 +53,7 @@ class Backup(object): bl = self._backup_list(detail) # If there is no query, return an exhaustive list, otherwise - # find a backup instad. + # find a backup instead. if query is None: bl_iter = bl else:
Fix a typo in comment. Harmless.
py
diff --git a/salt/utils/event.py b/salt/utils/event.py index <HASH>..<HASH> 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -279,18 +279,10 @@ class SaltEvent(object): def connect_pull(self, timeout=1000): ''' Establish a connection with the event pull socket - Set the send timeout of the socket options to timeout (in milliseconds) + Set the linger timeout of the socket options to timeout (in milliseconds) Default timeout is 1000 ms - The linger timeout must be at least as long as this timeout ''' self.push = self.context.socket(zmq.PUSH) - try: - # bug in 0MQ default send timeout of -1 (infinite) is not infinite - self.push.setsockopt(zmq.SNDTIMEO, timeout) - except AttributeError: - # This is for ZMQ < 2.2 (Caught when ssh'ing into the Jenkins - # CentOS5, which still uses 2.1.9) - pass self.push.setsockopt(zmq.LINGER, timeout) self.push.connect(self.pulluri) self.cpush = True
SNDTIMEO is not implemented in pyzmq, and since we are using linger this gets us the same thing
py
diff --git a/liquid_tags/graphviz.py b/liquid_tags/graphviz.py index <HASH>..<HASH> 100644 --- a/liquid_tags/graphviz.py +++ b/liquid_tags/graphviz.py @@ -51,6 +51,7 @@ Output import base64 import re +from errno import EINVAL, EPIPE from .mdx_liquid_tags import LiquidTags SYNTAX = '{% dot graphviz [program] [dot code] %}'
Python 3 requires from errno import EINVAL, EPIPE Python 2 did not require these import but Python 3 does.
py
diff --git a/brain/py2stdlib.py b/brain/py2stdlib.py index <HASH>..<HASH> 100644 --- a/brain/py2stdlib.py +++ b/brain/py2stdlib.py @@ -301,17 +301,16 @@ def infer_enum_class(node, context=None): for target in parent.targets: # Replace all the assignments with our mocked class. classdef = dedent(''' - class %(name)s(%(base_name)s): + class %(name)s(object): @property def value(self): - return %(value)r + return %(value)s @property def name(self): return %(name)r - %(name)s = %(value)r - ''' % {'base_name': 'int' if 'Int' in basename else 'str', - 'name': target.name, - 'value': real_value.value}) + %(name)s = %(value)s + ''' % {'name': target.name, + 'value': real_value.as_string()}) fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] fake.parent = target.parent new_targets.append(fake.instanciate_class())
Use the same representation from the definition time for mocked enums.
py
diff --git a/visidata/settings.py b/visidata/settings.py index <HASH>..<HASH> 100644 --- a/visidata/settings.py +++ b/visidata/settings.py @@ -194,7 +194,7 @@ class OptionsObject: objname = self._opts.objname(obj) vd.cmdlog.addRow(vd.cmdlog.newRow(sheet=objname, row=optname, keystrokes='', input=str(value), - longname='set-option')) + longname='set-option', undofuncs=[])) else: curval = None vd.warning('setting unknown option %s' % optname)
[cmdlog-] intialise undofuncs to empty list when add a row to cmdlog Closes #<I> The assumption in the code is that undofuncs is an empty list. This assumption was not met with `set-options`.
py
diff --git a/alarmdecoder/messages.py b/alarmdecoder/messages.py index <HASH>..<HASH> 100644 --- a/alarmdecoder/messages.py +++ b/alarmdecoder/messages.py @@ -13,6 +13,7 @@ devices. """ import re +import datetime from .util import InvalidMessageError @@ -25,11 +26,14 @@ class BaseMessage(object): raw = None """The raw message text""" + timestamp = None + """The timestamp of the message""" + def __init__(self): """ Constructor """ - pass + self.timestamp = datetime.datetime.now() def __str__(self): """
Added timestamp to messages.
py
diff --git a/api/models.py b/api/models.py index <HASH>..<HASH> 100644 --- a/api/models.py +++ b/api/models.py @@ -19,6 +19,7 @@ from django.db.models.signals import post_delete from django.db.models.signals import post_save from django.utils.encoding import python_2_unicode_compatible from django_fsm import FSMField, transition +from django_fsm.signals import post_transition from json_field.fields import JSONField from api import fields, tasks @@ -536,6 +537,13 @@ post_save.connect(_log_build_created, sender=Build, dispatch_uid='api.models') post_save.connect(_log_release_created, sender=Release, dispatch_uid='api.models') post_save.connect(_log_config_updated, sender=Config, dispatch_uid='api.models') + +# save FSM transitions as they happen +def _save_transition(**kwargs): + kwargs['instance'].save() + +post_transition.connect(_save_transition) + # wire up etcd publishing if we can connect try: _etcd_client = etcd.Client(host=settings.ETCD_HOST, port=int(settings.ETCD_PORT))
fix(fsm): save state transitions as they happen
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,13 @@ from setuptools import setup +with open('README.rst', encoding="utf-8") as f: + readme = f.read() + setup( name='cryptocompare', - version='0.1', + version='0.2', description='Wrapper for CryptoCompare.com', + long_description=readme, url='https://github.com/lagerfeuer/cryptocompare', author='lagerfeuer', author_email='lukas.deutz@tuta.io',
added long_description and <I>
py
diff --git a/tensorbase/stoch.py b/tensorbase/stoch.py index <HASH>..<HASH> 100644 --- a/tensorbase/stoch.py +++ b/tensorbase/stoch.py @@ -105,8 +105,7 @@ class GaussianLayerConv(StochLayer): x must be 2D. [batch_size * eqsamples* iwsamples, num_latent] """ mu, std, h, w = self.params - shape = tf.pack([100, self.eq_samples, self.iw_samples, x_dims[0], x_dims[1], self.num_latent]) - print(shape.get_shape()) + shape = tf.pack([32, self.eq_samples, self.iw_samples, x_dims[0], x_dims[1], self.num_latent]) x_reshape = tf.reshape(x, shape) c = - 0.5 * math.log(2 * math.pi) if standard is False:
convnet and deconvnet
py
diff --git a/lyricsgenius/api.py b/lyricsgenius/api.py index <HASH>..<HASH> 100644 --- a/lyricsgenius/api.py +++ b/lyricsgenius/api.py @@ -122,7 +122,7 @@ class _API(object): def _result_is_lyrics(self, song_title): """Returns False if result from Genius is not actually song lyrics""" regex = re.compile( - r"(tracklist)|(track list)|(album art(work)?)|(liner notes)|(booklet)|(credits)", re.IGNORECASE) + r"(tracklist)|(track list)|(album art(work)?)|(liner notes)|(booklet)|(credits)|(remix)|(interview)|(skit)", re.IGNORECASE) return not regex.search(song_title)
Add additional terms to filter non-songs
py
diff --git a/neuropythy/util/core.py b/neuropythy/util/core.py index <HASH>..<HASH> 100644 --- a/neuropythy/util/core.py +++ b/neuropythy/util/core.py @@ -171,7 +171,7 @@ def normalize(data): return newdict elif pimms.is_array(data, ('number', 'string', 'unicode', 'bool')): # numpy arrays just get turned into lists - return data.tolist() if pimms.is_nparray(data) else data + return np.asarray(data).tolist() elif data is Ellipsis: return {normalize.type_key: [None, 'ellipsis']} elif pimms.is_scalar(data):
fix so that arrays of nparrays will work with normalize()
py
diff --git a/dvc/output.py b/dvc/output.py index <HASH>..<HASH> 100644 --- a/dvc/output.py +++ b/dvc/output.py @@ -872,8 +872,8 @@ class Output: try: self.get_dir_cache(jobs=jobs, remote=remote) - except RemoteMissingDepsError as ex: - raise ex + except RemoteMissingDepsError: + raise except DvcException: logger.debug(f"failed to pull cache for '{self}'")
Reraise RemoteMissingDepsError as is
py
diff --git a/bbc_tracklist.py b/bbc_tracklist.py index <HASH>..<HASH> 100755 --- a/bbc_tracklist.py +++ b/bbc_tracklist.py @@ -184,7 +184,8 @@ def tag_audio_file(audio_file, tracklisting): """ try: save_tag_to_audio_file(audio_file, tracklisting) - except IOError: + # TODO: is IOError required now or would the mediafile exception cover it? + except (IOError, mediafile.UnreadableFileError): print("Unable to save tag to file:", audio_file) audio_tagging_successful = False except TagNotNeededError:
Add exception case for mediafile Didn't use to be present. Was causing tagging to fail, if either m4a or mp3 weren't present. IOError might be removable now.
py
diff --git a/country_converter/version.py b/country_converter/version.py index <HASH>..<HASH> 100644 --- a/country_converter/version.py +++ b/country_converter/version.py @@ -1 +1 @@ -__version__ = "0.7.1d1" +__version__ = "0.7.1.dev1"
made version numbering pypi compatible
py
diff --git a/src/OpenSSL/crypto.py b/src/OpenSSL/crypto.py index <HASH>..<HASH> 100644 --- a/src/OpenSSL/crypto.py +++ b/src/OpenSSL/crypto.py @@ -3239,19 +3239,3 @@ load_pkcs12 = utils.deprecated( ), DeprecationWarning, ) - - -# There are no direct unit tests for this initialization. It is tested -# indirectly since it is necessary for functions like dump_privatekey when -# using encryption. -# -# Thus OpenSSL.test.test_crypto.FunctionTests.test_dump_privatekey_passphrase -# and some other similar tests may fail without this (though they may not if -# the Python runtime has already done some initialization of the underlying -# OpenSSL library (and is linked against the same one that cryptography is -# using)). -_lib.OpenSSL_add_all_algorithms() - -# Set the default string mask to match OpenSSL upstream (since 2005) and -# RFC5280 recommendations. -_lib.ASN1_STRING_set_default_mask_asc(b"utf8only")
remove more global initialization that isn't required on modern openssl (#<I>)
py
diff --git a/src/p4p/client/raw.py b/src/p4p/client/raw.py index <HASH>..<HASH> 100644 --- a/src/p4p/client/raw.py +++ b/src/p4p/client/raw.py @@ -204,7 +204,7 @@ class Context(object): def __del__(self): if self._ctxt is not None: warnings.warn("%s collected without close()" % self.__class__) - self.close() + self.close() def __enter__(self): return self
client: avoid superfluous close() Fine with py 3.x where __del__ is a finalizer. Potentially troublesome with 2.x where 'self' may be partially collected (to break a loop).
py
diff --git a/docs/examples/async_rpc.py b/docs/examples/async_rpc.py index <HASH>..<HASH> 100644 --- a/docs/examples/async_rpc.py +++ b/docs/examples/async_rpc.py @@ -1,6 +1,6 @@ with ClusterRpcProxy(config) as cluster_rpc: - hello_res = cluster_rpc.service_x.remote_method.async("hello") - world_res = cluster_rpc.service_x.remote_method.async("world") + hello_res = cluster_rpc.service_x.remote_method.call_async("hello") + world_res = cluster_rpc.service_x.remote_method.call_async("world") # do work while waiting hello_res.result() # "hello-x-y" world_res.result() # "world-x-y"
use call_async instead of async in docs example
py
diff --git a/safe_qgis/test_dock.py b/safe_qgis/test_dock.py index <HASH>..<HASH> 100644 --- a/safe_qgis/test_dock.py +++ b/safe_qgis/test_dock.py @@ -863,18 +863,19 @@ class DockTest(unittest.TestCase): # Enable on-the-fly reprojection setCanvasCrs(GEOCRS, True) - # Zoom to an area where there is no overlaop with layers - myRect = QgsRectangle(106.61001188831219, -6.130614191176471, - 106.67188745972703, -6.080190955882353) + # Zoom to an area where there is no overlap with layers + myRect = QgsRectangle(106.635434302702, -6.101567666986, + 106.635434302817, -6.101567666888) CANVAS.setExtent(myRect) # Press RUN - QTest.mouseClick(myButton, QtCore.Qt.LeftButton) + DOCK.accept() myResult = DOCK.wvResults.page().currentFrame().toPlainText() # Check for an error containing InsufficientOverlapError - myMessage = 'Result not as expected: %s' % myResult myExpectedString = 'InsufficientOverlapError' + myMessage = 'Result not as expected %s not in: %s' % ( + myExpectedString, myResult) # This is the expected impact number self.assertIn(myExpectedString, myResult, myMessage)
Fix for failing test for #<I>
py
diff --git a/src/bezier/_algebraic_intersection.py b/src/bezier/_algebraic_intersection.py index <HASH>..<HASH> 100644 --- a/src/bezier/_algebraic_intersection.py +++ b/src/bezier/_algebraic_intersection.py @@ -1379,14 +1379,9 @@ def all_intersections(candidates_left, candidates_right): """ result = [] for index_first, first in enumerate(candidates_left): - # NOTE: In the below we replace ``isinstance(a, B)`` with - # ``a.__class__ is B``, which is a 3-3.5x speedup. - curve1 = first.curve if first.__class__ is _LINEARIZATION else first - nodes1 = curve1._nodes + nodes1 = first._nodes for index_second, second in enumerate(candidates_right): - curve2 = ( - second.curve if second.__class__ is _LINEARIZATION else second) - nodes2 = curve2._nodes + nodes2 = second._nodes # Only attempt this if the bounding boxes intersect. bbox_int = _geometric_intersection.bbox_intersect(nodes1, nodes2) @@ -1396,7 +1391,7 @@ def all_intersections(candidates_left, candidates_right): st_vals = intersect_curves(nodes1, nodes2) for s, t in st_vals: intersection = _intersection_helpers.Intersection( - curve1, s, curve2, t) + first, s, second, t) intersection.index_first = index_first intersection.index_second = index_second result.append(intersection)
Stop supporting Linearization in algebraic `all_intersections()`. This also fixes a lint error with too many locals (which is a secondary, not primary, concern). See failure: <URL>
py
diff --git a/pyemma/msm/tests/test_its_oom.py b/pyemma/msm/tests/test_its_oom.py index <HASH>..<HASH> 100644 --- a/pyemma/msm/tests/test_its_oom.py +++ b/pyemma/msm/tests/test_its_oom.py @@ -30,9 +30,17 @@ import warnings from pyemma.msm import markov_model from pyemma.util.linalg import _sort_by_norm -from pyemma.msm import timescales_msm +from pyemma.msm import timescales_msm as _ts_msm from six.moves import range +def timescales_msm(*args, **kw): + # wrap this function to use multi-processing, since these tests are running quite long. + if 'n_jobs' in kw: + pass + else: + kw['n_jobs'] = None # let the environment determine this. + return _ts_msm(*args, **kw) + def oom_transformations(Ct, C2t, rank): # Number of states: N = Ct.shape[0]
[test_its_oom] use n_jobs=None to speed up testing.
py
diff --git a/salt/cloud/clouds/nova.py b/salt/cloud/clouds/nova.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/nova.py +++ b/salt/cloud/clouds/nova.py @@ -148,7 +148,6 @@ avail_locations = namespaced_function(avail_locations, globals()) script = namespaced_function(script, globals()) destroy = namespaced_function(destroy, globals()) reboot = namespaced_function(reboot, globals()) -conn = False # Only load in this module is the OPENSTACK configurations are in place
Remove the global conn variable. fixes #<I> This should have been removed before the previous pull request
py
diff --git a/pyemma/_base/serialization/serialization.py b/pyemma/_base/serialization/serialization.py index <HASH>..<HASH> 100644 --- a/pyemma/_base/serialization/serialization.py +++ b/pyemma/_base/serialization/serialization.py @@ -169,6 +169,12 @@ class _SerializableBase(object): self.__save_data_producer = value # forward flag to the next data producer if hasattr(self, 'data_producer') and self.data_producer and self.data_producer is not self: + # TODO: review, this could be desired, but is super inefficient. + from pyemma.coordinates.data import DataInMemory + if isinstance(self.data_producer, DataInMemory): + import warnings + warnings.warn("We refuse to save NumPy arrays wrapped with DataInMemory.") + return assert isinstance(self.data_producer, _SerializableBase), self.data_producer self.data_producer._save_data_producer = value
[serialization] deny saving datainmemory
py
diff --git a/axiom/batch.py b/axiom/batch.py index <HASH>..<HASH> 100644 --- a/axiom/batch.py +++ b/axiom/batch.py @@ -19,9 +19,7 @@ from twisted.conch.insults import insults from twisted.conch.manhole_ssh import ConchFactory, TerminalUser, TerminalSession from twisted.conch import interfaces as iconch -from epsilon import extime, process, cooperator, modal - -from vertex import juice +from epsilon import extime, process, cooperator, modal, juice from axiom import iaxiom, errors as eaxiom, item, attributes
break circular project dependencies by moving juice juice is now a feature of epsilon. This is a simple mechanical move, which should make releasing axiom a bit easier, since it no longer depends on vertex. Author: glyph Reviewer: washort Fixes #<I>
py
diff --git a/helpers/postgresql.py b/helpers/postgresql.py index <HASH>..<HASH> 100644 --- a/helpers/postgresql.py +++ b/helpers/postgresql.py @@ -245,10 +245,11 @@ class Postgresql: ret = subprocess.call(self._pg_ctl + ['start', '-o', self.server_options()]) == 0 ret and self.load_replication_slots() self.save_configuration_files() - if os.path.exists(self.recovery_conf): - self.on_change_callback('replica') - else: - self.on_change_callback('master') + if self.on_change_callback: + if os.path.exists(self.recovery_conf): + self.on_change_callback('replica') + else: + self.on_change_callback('master') return ret def stop(self):
call on_change_callback only if it has been set.
py
diff --git a/pyxmpp/iq.py b/pyxmpp/iq.py index <HASH>..<HASH> 100644 --- a/pyxmpp/iq.py +++ b/pyxmpp/iq.py @@ -127,6 +127,18 @@ class Iq(Stanza): stanza = Iq(stanza_type = "result", from_jid = self.to_jid, to_jid = self.from_jid, stanza_id = self.stanza_id) return stanza + + def get_payload(self): + """Return the stanza payload object or `None`. + + :Returntype: `StanzaPayload` + """ + if self._payload is None: + self.decode_payload() + if self._payload: + return self._payload[0] + else: + return None def add_payload(self, payload): """Add new the stanza payload. Fails if there is already some
Iq.get_payload() method added
py
diff --git a/cmd.py b/cmd.py index <HASH>..<HASH> 100755 --- a/cmd.py +++ b/cmd.py @@ -3,7 +3,7 @@ import os import binascii import argparse import json -import threading +import base64 from trezorlib.client import TrezorClient from trezorlib.api_blockchain import BlockchainApi @@ -135,10 +135,17 @@ class Commands(object): args.pin_protection, args.label, 'english') def sign_message(self, args): - return pb2json(self.client.sign_message(args.n, args.message), {'message': args.message}) + ret = self.client.sign_message(args.n, args.message) + output = { + 'message': args.message, + 'address': ret.address, + 'signature': base64.b64encode(ret.signature) + } + return output def verify_message(self, args): - return self.client.verify_message(args.address, args.signature, args.message) + signature = base64.b64decode(args.signature) + return self.client.verify_message(args.address, signature, args.message) def firmware_update(self, args): if not args.file:
Fixing sign_message/verify_message API in cmd
py
diff --git a/law/contrib/pyarrow/util.py b/law/contrib/pyarrow/util.py index <HASH>..<HASH> 100644 --- a/law/contrib/pyarrow/util.py +++ b/law/contrib/pyarrow/util.py @@ -40,7 +40,7 @@ def merge_parquet_files(src_paths, dst_path, force=True, callback=None, writer_o # default writer options _writer_opts = dict( - version="2.0", + version="2.4", compression="gzip", use_dictionary=True, data_page_size=2097152,
Fix parquet writer version in pyarrow contrib.
py
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,6 +33,7 @@ MOCK_MODULES = ['twisted', 'twisted.internet', 'magic', 'gpgme', 'configobj', + 'validate', 'argparse'] MOCK_DIRTY = ['notmuch'] for mod_name in MOCK_MODULES:
docs: fix rtfd.org build
py
diff --git a/synapse/axon.py b/synapse/axon.py index <HASH>..<HASH> 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -562,8 +562,6 @@ class Axon(s_config.Config, AxonMixin): self.syncdir = None - self.onfini(self._onAxonFini) - self.onfini(self.core.fini) self.onfini(self.heap.fini) self.onfini(self.dmon.fini) @@ -589,6 +587,8 @@ class Axon(s_config.Config, AxonMixin): self.axonbus.runSynSvc(self.iden, self, **props) self.axcthr = self._fireAxonClones() + self.onfini(self._onAxonFini) + @staticmethod @s_config.confdef(name='axon') def _axon_confdefs():
Fini axon threads AFTER giving the persist file objects a chance to shutdown
py
diff --git a/bench/serializer-bench/clean_bench.py b/bench/serializer-bench/clean_bench.py index <HASH>..<HASH> 100644 --- a/bench/serializer-bench/clean_bench.py +++ b/bench/serializer-bench/clean_bench.py @@ -39,7 +39,11 @@ def run_clean_bench(drive_path, parameters, workload): serv = subprocess.Subprocess( ["./serializer-bench", "-f", drive_path, "--forever"] + format_args(parameters), stderr = subprocess.PIPE, stdout = subprocess.PIPE) - output = serv.communicate()[1] + try: + output = serv.communicate()[1] + finally: + try: serv.terminate() + except RuntimeError: pass assert "RethinkDB ran out of disk space." in output # Now run the actual test on it @@ -47,7 +51,11 @@ def run_clean_bench(drive_path, parameters, workload): serv = subprocess.Subprocess( ["./serializer-bench", "-f", drive_path] + format_args(parameters) + format_args(workload), stderr = subprocess.PIPE, stdout = subprocess.PIPE) - output = serv.communicate()[1] + try: + output = serv.communicate()[1] + finally: + try: serv.terminate() + except RuntimeError: pass if serv.returncode != 0: raise RuntimeError("RethinkDB serializer failed:\n" + output)
Made clean_bench.py kill its subprocess
py
diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py index <HASH>..<HASH> 100644 --- a/tests/integration/modules/test_cp.py +++ b/tests/integration/modules/test_cp.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- - -from __future__ import absolute_import, print_function, unicode_literals - import hashlib import logging import os @@ -40,9 +36,7 @@ class CPModuleTest(ModuleCase): TODO: maybe move this behavior to ModuleCase itself? """ - return salt.utils.data.decode( - super(CPModuleTest, self).run_function(*args, **kwargs) - ) + return salt.utils.data.decode(super().run_function(*args, **kwargs)) @with_tempfile() @slowTest @@ -437,7 +431,7 @@ class CPModuleTest(ModuleCase): cp.cache_file """ nginx_port = get_unused_localhost_port() - url_prefix = "http://localhost:{0}/".format(nginx_port) + url_prefix = "http://localhost:{}/".format(nginx_port) temp_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True) nginx_root_dir = os.path.join(temp_dir, "root")
Drop Py2 and six on tests/integration/modules/test_cp.py
py
diff --git a/sos/plugins/yum.py b/sos/plugins/yum.py index <HASH>..<HASH> 100644 --- a/sos/plugins/yum.py +++ b/sos/plugins/yum.py @@ -24,6 +24,7 @@ class Yum(Plugin, RedHatPlugin): files = ('/etc/yum.conf',) packages = ('yum',) + verify_packages = ('yum',) option_list = [ ("yumlist", "list repositories and packages", "slow", False),
[yum] move yum package verification to yum plugin
py
diff --git a/digitalocean/Event.py b/digitalocean/Event.py index <HASH>..<HASH> 100644 --- a/digitalocean/Event.py +++ b/digitalocean/Event.py @@ -1,10 +1,10 @@ import requests class Event(object): - def __init__(self, event_id="", client_id="", api_key=""): + def __init__(self, event_id=""): self.id = event_id - self.client_id = client_id - self.api_key = api_key + self.client_id = None + self.api_key = None self.event_type_id = None self.percentage = None self.droplet_id = None
Removed client_id and api_key from __init__ of Event
py
diff --git a/sirmordred/task.py b/sirmordred/task.py index <HASH>..<HASH> 100644 --- a/sirmordred/task.py +++ b/sirmordred/task.py @@ -252,3 +252,8 @@ class Task(): logger.error("Error retrieving Elasticsearch version: " + url) raise return major + + @staticmethod + def retain_data(hours_to_retain, es_url, index): + elastic = get_elastic(es_url, index) + elastic.delete_items(hours_to_retain)
[task] Delete data based on retation_hours param This code enforces the data retention policy after the execution of a task. In a nutshell, the data in a target index is deleted based on the value declare in the config param `retentions_hours`.
py
diff --git a/slickqa/data.py b/slickqa/data.py index <HASH>..<HASH> 100644 --- a/slickqa/data.py +++ b/slickqa/data.py @@ -409,8 +409,8 @@ class GraphValueReference(micromodels.Model): class Graph(micromodels.Model): - columns = micromodels.FieldCollectionField(GraphColumnReference) - values = micromodels.FieldCollectionField(GraphValueReference) + columns = micromodels.ModelCollectionField(GraphColumnReference) + values = micromodels.ModelCollectionField(GraphValueReference) class Result(micromodels.Model):
Use ModelCollectionField for list of models
py
diff --git a/kwalitee/cli/check.py b/kwalitee/cli/check.py index <HASH>..<HASH> 100644 --- a/kwalitee/cli/check.py +++ b/kwalitee/cli/check.py @@ -323,7 +323,7 @@ def authors(obj, commit='HEAD', skip_merge_commits=False): if skip_merge_commits and _is_merge_commit(commit): continue message = commit.message - author = u'{0.author} <{0.author.email}>'.format( + author = u'{0.author.name} <{0.author.email}>'.format( commit).encode('utf-8') errors = check_author(author, **options) message = re.sub(re_line, ident, message)
cli: fix author name in output * Fixes 'authors: wrong format in error message'. (closes #<I>)
py
diff --git a/cloudvolume/skeleton.py b/cloudvolume/skeleton.py index <HASH>..<HASH> 100644 --- a/cloudvolume/skeleton.py +++ b/cloudvolume/skeleton.py @@ -946,7 +946,7 @@ class Skeleton(object): c.f. http://research.mssm.edu/cnic/swc.html """ - from ... import __version__ + from . import __version__ swc = """# ORIGINAL_SOURCE CloudVolume {} # CREATURE # REGION
fix: swc exporter broken due to relative path import location change
py
diff --git a/wcmatch/glob.py b/wcmatch/glob.py index <HASH>..<HASH> 100644 --- a/wcmatch/glob.py +++ b/wcmatch/glob.py @@ -36,7 +36,7 @@ __all__ = ( # We don't use util.platform only because we mock it in tests, # and scandir will not work with bytes on the wrong system. WIN = sys.platform.startswith('win') -NO_SCANDIR_WORKAROUND = util.PY36 or (util.PY35 and not WIN) +NO_SCANDIR_WORKAROUND = util.PY36 F = FORCECASE = _wcparse.FORCECASE I = IGNORECASE = _wcparse.IGNORECASE
Don't use scandir on Python < <I>
py
diff --git a/src/saml2/time_util.py b/src/saml2/time_util.py index <HASH>..<HASH> 100644 --- a/src/saml2/time_util.py +++ b/src/saml2/time_util.py @@ -306,6 +306,9 @@ def not_on_or_after(not_on_or_after): not_on_or_after = time.mktime(not_on_or_after) elif isinstance(not_on_or_after, basestring): not_on_or_after = str_to_time(not_on_or_after) + + if not_on_or_after == 0: + return True now = daylight_corrected_now()
A not_on_or_after of 0 means forever
py
diff --git a/lib/ansiblelint/rules/CommandsInsteadOfModulesRule.py b/lib/ansiblelint/rules/CommandsInsteadOfModulesRule.py index <HASH>..<HASH> 100644 --- a/lib/ansiblelint/rules/CommandsInsteadOfModulesRule.py +++ b/lib/ansiblelint/rules/CommandsInsteadOfModulesRule.py @@ -38,7 +38,7 @@ class CommandsInsteadOfModulesRule(AnsibleLintRule): _modules = {'git': 'git', 'hg': 'hg', 'curl': 'get_url or uri', 'wget': 'get_url or uri', 'svn': 'subversion', 'service': 'service', 'mount': 'mount', 'rpm': 'yum or rpm_key', 'yum': 'yum', 'apt-get': 'apt-get', - 'unzip': 'unarchive', 'tar': 'unarchive'} + 'unzip': 'unarchive', 'tar': 'unarchive', 'chkconfig': 'service'} def matchtask(self, file, task): if task["action"]["module"] in self._commands and task["action"]["module_arguments"]:
adding chkconfig_insteadof_service rule
py
diff --git a/test/test_wheres.py b/test/test_wheres.py index <HASH>..<HASH> 100644 --- a/test/test_wheres.py +++ b/test/test_wheres.py @@ -3,17 +3,21 @@ import cmudict from os import path +def _assert_filesize(filename): + if (0 >= path.getsize(filename)): + raise AssertionError(filename + " <= 0 bytes") + def test_where(): - assert path.isfile(cmudict.where()) + _assert_filesize(cmudict.where()) def test_where_license(): - assert path.isfile(cmudict.where_license()) + _assert_filesize(cmudict.where_license()) def test_where_phones(): - assert path.isfile(cmudict.where_phones()) + _assert_filesize(cmudict.where_phones()) def test_where_symbols(): - assert path.isfile(cmudict.where_symbols()) + _assert_filesize(cmudict.where_symbols()) def test_where_vp(): - assert path.isfile(cmudict.where_vp()) + _assert_filesize(cmudict.where_vp())
Simplified test_wheres and explicitly raise assertionerror.
py
diff --git a/phoebe/parameters/parameters.py b/phoebe/parameters/parameters.py index <HASH>..<HASH> 100644 --- a/phoebe/parameters/parameters.py +++ b/phoebe/parameters/parameters.py @@ -4507,8 +4507,8 @@ class FloatParameter(Parameter): value = value % (360*u.deg) logger.warning("wrapping value of {} to {}".format(self.qualifier, value)) - # make sure the value is within the limits - if not self.within_limits(value): + # make sure the value is within the limits, if this isn't an array or nan + if isinstance(value, float) and not self.within_limits(value): raise ValueError("value of {} must be within limits of {}".format(self.qualifier, self.limits)) # make sure we can convert back to the default_unit @@ -5809,7 +5809,10 @@ class ConstraintParameter(Parameter): # to the locals dictionary. locals()[func] = getattr(builtin, func) - value = float(eval(eq.format(**values))) + try: + value = float(eval(eq.format(**values))) + except ValueError: + value = np.nan else:
allow for FloatParameters to be set to nan if a constraint fails particularly useful for the pot<->requiv constraint for contacts when the system parameters no longer are valid (will fail system checks due to overflow/underflow)
py
diff --git a/yoti_python_sdk/activity_details.py b/yoti_python_sdk/activity_details.py index <HASH>..<HASH> 100644 --- a/yoti_python_sdk/activity_details.py +++ b/yoti_python_sdk/activity_details.py @@ -11,7 +11,7 @@ from yoti_python_sdk.profile import Profile, ApplicationProfile class ActivityDetails: def __init__(self, receipt, decrypted_profile=None, decrypted_application_profile=None): - self.decrypted_profile = decrypted_profile # TODO: This isn't used, can we remove it? + self.decrypted_profile = decrypted_profile self.user_profile = {} # will be removed in v3.0.0 self.base64_selfie_uri = None self.decrypted_application_profile = decrypted_application_profile
SDK-<I>: Add comments to the helper methods in ApplicationProfile
py
diff --git a/soco/data_structures_entry.py b/soco/data_structures_entry.py index <HASH>..<HASH> 100644 --- a/soco/data_structures_entry.py +++ b/soco/data_structures_entry.py @@ -95,13 +95,12 @@ def attempt_datastructure_upgrade(didl_item): # matter what it is! item_id = '11111111{0}'.format(path) - # Ignore other metadata for now, in future ask ms data - # structure to upgrade metadata from the service + # Pass over all the available metadata in the metadata dict, in the + # future ask ms data structure to upgrade metadata from the service metadata = {} - try: - metadata['title'] = didl_item.title - except AttributeError: - pass + for key, value in didl_item.to_dict().items(): + if key not in metadata: + metadata[key] = value # Get class try:
Pass metadata over to upraded music service item
py
diff --git a/lib/PrettyPrinter.py b/lib/PrettyPrinter.py index <HASH>..<HASH> 100644 --- a/lib/PrettyPrinter.py +++ b/lib/PrettyPrinter.py @@ -46,7 +46,7 @@ def pp_color(p_todo_str, p_todo): p_todo_str = '%s%s%s' % (color, p_todo_str, NEUTRAL_COLOR) if Config.HIGHLIGHT_PROJECTS_CONTEXTS: - p_todo_str = re.sub(r'\B(\+|@)\S+', PROJECT_COLOR + r'\g<0>' + color, \ + p_todo_str = re.sub(r'\B(\+|@)(\S*\w)', PROJECT_COLOR + r'\g<0>' + color, \ p_todo_str) p_todo_str += NEUTRAL_COLOR
Match projects and contexts properly when highlighting.
py
diff --git a/odl/util/graphics.py b/odl/util/graphics.py index <HASH>..<HASH> 100644 --- a/odl/util/graphics.py +++ b/odl/util/graphics.py @@ -41,6 +41,8 @@ def show_discrete_function(dfunc, method='', title=None, indices=None, Parameters ---------- + dfunc : `DiscreteLpVector` + The discretized funciton to visualize. method : `str`, optional 1d methods:
MAINT: added doc for undocumented parameter in show_discrete_function
py
diff --git a/LiSE/LiSE/proxy.py b/LiSE/LiSE/proxy.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/proxy.py +++ b/LiSE/LiSE/proxy.py @@ -1851,17 +1851,20 @@ class EngineProxy(AbstractEngine): self._character_rulebooks_cache = StructuredDefaultDict( 1, RuleBookProxy, kwargs_munger=lambda inst, k: { 'engine': self, - 'bookname': k + 'bookname': (inst.key, k) } ) self._char_node_rulebooks_cache = StructuredDefaultDict( 1, RuleBookProxy, kwargs_munger=lambda inst, k: { 'engine': self, - 'bookname': k + 'bookname': (inst.key, k) } ) self._char_port_rulebooks_cache = StructuredDefaultDict( - 2, RuleBookProxy + 2, RuleBookProxy, kwargs_munger=lambda inst, k: { + 'engine': self, + 'bookname': (inst.parent.key, inst.key, k) + } ) self._character_portals_cache = PortalObjCache() self._character_avatars_cache = PickyDefaultDict(dict)
Fix caching of rulebook proxies
py
diff --git a/tests/test_command__devenv.py b/tests/test_command__devenv.py index <HASH>..<HASH> 100644 --- a/tests/test_command__devenv.py +++ b/tests/test_command__devenv.py @@ -81,6 +81,12 @@ class DevEnvTestCase(TestCase): raise SkipTest("Skipping because virtualenv does not work") virtualenv_dir = path.abspath(path.join(curdir, 'virtualenv-python')) bin_dir = path.join(virtualenv_dir, 'Scripts' if assertions.is_windows() else 'bin') + if assertions.is_windows(): + easy_install = path.join(bin_dir, "easy_install") + try: + self.execute_assert_success("{} -U distribute".format(easy_install)) + except ExecutionError, error: + pass python = path.join(bin_dir, 'python') with utils.chdir(PROJECT_ROOT): self.execute_assert_success("{python} setup.py develop".format(python=python))
HOSTDEV-<I> fixing test
py
diff --git a/contrib/sacrebleu/sacrebleu.py b/contrib/sacrebleu/sacrebleu.py index <HASH>..<HASH> 100755 --- a/contrib/sacrebleu/sacrebleu.py +++ b/contrib/sacrebleu/sacrebleu.py @@ -165,7 +165,7 @@ from collections import Counter, namedtuple from itertools import zip_longest from typing import List, Iterable, Tuple -VERSION = '1.2' +VERSION = '1.2.1' try: # SIGPIPE is not available on Windows machines, throwing an exception. @@ -1340,7 +1340,6 @@ def main(): if args.score_only: print('{:.2f}'.format(chrf)) else: - version_str = build_signature_chrf(args, len(refs)) print('CHRF = {:.2f}'.format(chrf)) if __name__ == '__main__':
small bugfix (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ test_requirements = [] setup( name='vcf-annotate-polyphen', - version='0.1.0', + version='0.1.1', description="a tool to annotate human VCF files with PolyPhen2 effect measures", long_description=readme, author="B. Arman Aksoy",
fix issue with the unstable PyPi server: bump the minor version
py
diff --git a/phoebe/frontend/io.py b/phoebe/frontend/io.py index <HASH>..<HASH> 100644 --- a/phoebe/frontend/io.py +++ b/phoebe/frontend/io.py @@ -605,11 +605,12 @@ def par_value(param, index=None): unit = None val = param.get_quantity(unit=unit).value - if d['qualifier'] == 'alb': - val = [1.0-val] + # if d['qualifier'] == 'alb': + # val = [1.0-val] - else: - val = [val] + # else: + # val = [val] + val = [val] elif isinstance(param, phb.parameters.ChoiceParameter): ptype = 'choice' val = [param.get_value()] @@ -809,8 +810,8 @@ def pass_to_legacy(eb, filename='2to1.phoebe'): param=None if param != None: val, ptype = par_value(param) - if param.qualifier == 'alb_refl_bol': - val = [1-float(val[0])] + # if param.qualifier == 'alb_refl_bol': + # val = [1-float(val[0])] pname = ret_parname(param.qualifier, component = param.component, ptype=ptype) if pname[0] not in parnames: parnames.extend(pname)
fixed inconsistent alb treatment in legacy wrapper
py
diff --git a/modules/CUAV/camera.py b/modules/CUAV/camera.py index <HASH>..<HASH> 100644 --- a/modules/CUAV/camera.py +++ b/modules/CUAV/camera.py @@ -751,7 +751,7 @@ def mavlink_packet(m): bottle = m.servo7_raw if bottle == 1000: mpstate.console.set_status('Bottle', 'Bottle: HELD', row=0, fg='green') - elif bottle == 1380: + elif bottle == 1430: mpstate.console.set_status('Bottle', 'Bottle: DROP', row=0, fg='red') else: mpstate.console.set_status('Bottle', 'Bottle: %u' % bottle, row=0, fg='red')
camera: bottle release at <I>
py
diff --git a/nodeconductor/structure/tests/test_project.py b/nodeconductor/structure/tests/test_project.py index <HASH>..<HASH> 100644 --- a/nodeconductor/structure/tests/test_project.py +++ b/nodeconductor/structure/tests/test_project.py @@ -153,7 +153,7 @@ class ProjectFilterTest(test.APITransactionTestCase): self.assertEqual(len(response.data), 1, 'Expected project to be returned when ordering by %s' % ordering) -class ProjectCreateUpdateDeleteTest(test.APITransactionTestCase): +class ProjectUpdateDeleteTest(test.APITransactionTestCase): def setUp(self): self.staff = factories.UserFactory(is_staff=True)
Drop create from test name [WAL-<I>]
py
diff --git a/visidata/pivot.py b/visidata/pivot.py index <HASH>..<HASH> 100644 --- a/visidata/pivot.py +++ b/visidata/pivot.py @@ -46,7 +46,7 @@ class SheetPivot(Sheet): self.groupByCols = groupByCols # whose values become rows def isNumericRange(self, col): - return isNumeric(col) and options.hist_numeric_ranges + return isNumeric(col) and options.numeric_binning def initCols(self): self.columns = []
[options-] all hist_numeric_ranges are now changed to numeric_binning
py
diff --git a/anytemplate/__init__.py b/anytemplate/__init__.py index <HASH>..<HASH> 100644 --- a/anytemplate/__init__.py +++ b/anytemplate/__init__.py @@ -81,8 +81,10 @@ see its help; see the output of 'help(anytemplate.render)', etc. """ from __future__ import absolute_import from .globals import AUTHOR, VERSION, LOGGER -from .api import find_engine, renders, render, render_to, \\ - TemplateEngineNotFound, TemplateNotFound +from .api import ( + find_engine, renders, render, render_to, TemplateEngineNotFound, + TemplateNotFound +) from .engine import list_engines_by_priority as list_engines __author__ = AUTHOR
refactor: enclose imported objects instead of breaking lines with '\'
py
diff --git a/tests/test_image.py b/tests/test_image.py index <HASH>..<HASH> 100644 --- a/tests/test_image.py +++ b/tests/test_image.py @@ -10,7 +10,7 @@ import unittest from tempfile import mkdtemp from landsat.image import Simple, PanSharpen -from landsat.ndvi import NDVI +from landsat.ndvi import NDVI, NDVIWithManualColorMap class TestProcess(unittest.TestCase): @@ -65,3 +65,8 @@ class TestProcess(unittest.TestCase): print p.run() self.assertTrue(exists(join(self.temp_folder, 'test', 'test_NDVI.TIF'))) + def test_ndvi_with_manual_colormap(self): + + p = NDVIWithManualColorMap(path=self.landsat_image, dst_path=self.temp_folder) + print p.run() + self.assertTrue(exists(join(self.temp_folder, 'test', 'test_NDVI.TIF')))
add test for ndvi with manual colormap
py
diff --git a/yt_array.py b/yt_array.py index <HASH>..<HASH> 100644 --- a/yt_array.py +++ b/yt_array.py @@ -139,8 +139,8 @@ def validate_comparison_units(this, other, op_string): if isinstance(other, YTArray): if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) - - return other.in_units(this.units) + if this.units.expr != other.units.expr: + return other.in_units(this.units) return other
Only do a unit conversion for comparison operators if we need it This leads to a 5x speedup for iterating over a YTArray --HG-- branch : yt
py
diff --git a/honcho/export/base.py b/honcho/export/base.py index <HASH>..<HASH> 100644 --- a/honcho/export/base.py +++ b/honcho/export/base.py @@ -12,11 +12,14 @@ class BaseExport(object): self.concurrency = concurrency try: - self.uid = pwd.getpwnam(options.user).pw_uid + user_entry = pwd.getpwnam(options.user) except KeyError: raise CommandError("No such user available: {}" .format(options.user)) + self.uid = user_entry.pw_uid + self.gid = user_entry.pw_gid + def _mkdir(self, directory): if os.path.exists(directory): return @@ -29,7 +32,7 @@ class BaseExport(object): def _chown(self, filename): try: - os.chown(filename, self.uid, self.uid) + os.chown(filename, self.uid, self.gid) except OSError: raise CommandError("Can not chown {} to {}" .format(self.options.log,
Don't assume user gid == user uid There's no reason the user's primary group gid should be the same as their uid.
py
diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/client.py +++ b/pyrogram/client/client.py @@ -872,6 +872,7 @@ class Client(Methods, BaseClient): app.stop() """ + def do_it(): self.terminate() self.disconnect() @@ -918,6 +919,7 @@ class Client(Methods, BaseClient): app.stop() """ + def do_it(): self.stop() self.start() @@ -1525,7 +1527,7 @@ class Client(Methods, BaseClient): count = 0 if not include: - for path in sorted(Path(root).rglob("*.py")): + for path in sorted(Path(root.replace(".", "/")).rglob("*.py")): module_path = '.'.join(path.parent.parts + (path.stem,)) module = reload(import_module(module_path)) @@ -1587,7 +1589,7 @@ class Client(Methods, BaseClient): warn_non_existent_functions = True try: - module = reload(import_module(module_path)) + module = import_module(module_path) except ImportError: log.warning('[{}] [UNLOAD] Ignoring non-existent module "{}"'.format( self.session_name, module_path))
Fix plugins not being properly unloaded Also, plugins' roots values will follow python notation: folder.plugins instead of folder/plugins, in case of roots inside another folder.
py
diff --git a/everest/tests/test_rdb.py b/everest/tests/test_rdb.py index <HASH>..<HASH> 100644 --- a/everest/tests/test_rdb.py +++ b/everest/tests/test_rdb.py @@ -24,7 +24,7 @@ from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.engine import create_engine -from sqlalchemy.sql.expression import Function +from sqlalchemy.sql.expression import FunctionElement from sqlalchemy.sql.expression import cast from zope.interface import implementer # pylint: disable=E0611,F0401 @@ -97,7 +97,7 @@ class RdbTestCase(Pep8CompliantTestCase): id_attribute='my_id', slug_expression=slug_expr) self.assert_true(MyDerivedEntity.__dict__['slug'].expr is slug_expr) - self.assert_true(isinstance(MyDerivedEntity.slug, Function)) + self.assert_true(isinstance(MyDerivedEntity.slug, FunctionElement)) mpr.dispose() # Test mapping polymorphic class with custom slug in the base class. base_mpr = mapper(MyEntityWithCustomId, t2,
Preparing for the move to SQLAlchemy <I>.x
py
diff --git a/norduniclient/core.py b/norduniclient/core.py index <HASH>..<HASH> 100644 --- a/norduniclient/core.py +++ b/norduniclient/core.py @@ -45,7 +45,7 @@ except ImportError: NEO4J_URI, NEO4J_USERNAME, NEO4J_PASSWORD = None, None, None logger.info('Starting up without a Django environment.') logger.info('Initial: norduniclient.neo4jdb == None.') - logger.info('Use norduniclient.init_db(uri) to open a database connection.') + logger.info('Use norduniclient.init_db to open a database connection.') META_TYPES = ['Physical', 'Logical', 'Relation', 'Location'] @@ -74,6 +74,10 @@ class GraphDB(object): self._manager = None return self._manager + @manager.setter + def manager(self, manager): + self._manager = manager + def init_db(uri=NEO4J_URI, username=NEO4J_USERNAME, password=NEO4J_PASSWORD, encrypted=False): if uri:
Added a setter for Grapdb.manager
py