diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -36,6 +36,7 @@ setup(
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
|
Added support for Python <I> in setup.py classifiers
|
py
|
diff --git a/dynesty/dynamicsampler.py b/dynesty/dynamicsampler.py
index <HASH>..<HASH> 100644
--- a/dynesty/dynamicsampler.py
+++ b/dynesty/dynamicsampler.py
@@ -1007,6 +1007,16 @@ class DynamicSampler(object):
update_interval = np.inf # no need to update with no bounds
self.sampler.update_interval = update_interval
+ # Update internal log-volume quantities
+ if self.new_logl_min == -np.inf:
+ bound_logvol = 0.
+ else:
+ vol_idx = np.argmin(abs(self.saved_logl - self.new_logl_min))
+ bound_logvol = self.saved_logvol[vol_idx]
+ bound_dlv = math.log((nlive_new + 1.) / nlive_new)
+ self.sampler.saved_logvol[-1] = bound_logvol
+ self.sampler.dlv = bound_dlv
+
# Tell the sampler *not* to try and remove the previous addition of
# live points (the internal results are garbage anyways).
self.sampler.added_live = False
|
dsampler lvol fix Resolves #<I>. Initial tests seem to indicate performance is mostly unchanged, but better safe than sorry?
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -64,6 +64,8 @@ setup(
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
],
install_requires=requires,
entry_points={'console_scripts': [
|
Added new managed version of Python.
|
py
|
diff --git a/proton-c/bindings/python/proton/utils.py b/proton-c/bindings/python/proton/utils.py
index <HASH>..<HASH> 100644
--- a/proton-c/bindings/python/proton/utils.py
+++ b/proton-c/bindings/python/proton/utils.py
@@ -249,7 +249,7 @@ class BlockingConnection(Handler):
self.container.timeout = container_timeout
if self.disconnected or self._is_closed():
self.container.stop()
- if self.disconnected:
+ if self.disconnected and not self._is_closed():
raise ConnectionException("Connection %s disconnected" % self.url)
def on_link_remote_close(self, event):
@@ -265,9 +265,11 @@ class BlockingConnection(Handler):
def on_transport_tail_closed(self, event):
self.on_transport_closed(event)
+ def on_transport_head_closed(self, event):
+ self.on_transport_closed(event)
+
def on_transport_closed(self, event):
- if event.connection.state & Endpoint.LOCAL_ACTIVE:
- self.disconnected = True
+ self.disconnected = True
class AtomicCount(object):
def __init__(self, start=0, step=1):
|
PROTON-<I>: fix handling of disconnection for BlockingConnection
|
py
|
diff --git a/soco/core.py b/soco/core.py
index <HASH>..<HASH> 100755
--- a/soco/core.py
+++ b/soco/core.py
@@ -660,7 +660,8 @@ class SoCo(object):
dom = XML.fromstring(response.content)
- self.speaker_info['zone_name'] = really_utf8(dom.findtext('.//ZoneName'))
+ if (dom.findtext('.//ZoneName') != None):
+ self.speaker_info['zone_name'] = really_utf8(dom.findtext('.//ZoneName'))
self.speaker_info['zone_icon'] = dom.findtext('.//ZoneIcon')
self.speaker_info['uid'] = dom.findtext('.//LocalUID')
self.speaker_info['serial_number'] = dom.findtext('.//SerialNumber')
|
Don't crash if the zone name isn't set
|
py
|
diff --git a/buildozer/targets/ios.py b/buildozer/targets/ios.py
index <HASH>..<HASH> 100644
--- a/buildozer/targets/ios.py
+++ b/buildozer/targets/ios.py
@@ -411,7 +411,7 @@ class TargetIos(Target):
if save:
with open(password_file, 'wb') as fd:
- fd.write(password)
+ fd.write(password.encode())
def get_target(buildozer):
return TargetIos(buildozer)
|
Encode password before it is saved to file as Python 3 input returns a string, not bytes
|
py
|
diff --git a/suitable/tests/conftest.py b/suitable/tests/conftest.py
index <HASH>..<HASH> 100644
--- a/suitable/tests/conftest.py
+++ b/suitable/tests/conftest.py
@@ -19,14 +19,20 @@ class Container(object):
self.password = password
def spawn_api(self, api_class, **kwargs):
- return api_class(
- '%s:%s' % (self.host, self.port),
- remote_user=self.username,
- remote_pass=self.password,
- connection='smart',
- extra_vars={
+ options = {
+ 'remote_user': self.username,
+ 'remote_pass': self.password,
+ 'connection': 'smart',
+ 'extra_vars': {
'ansible_python_interpreter': '/usr/bin/python3'
}
+ }
+
+ options.update(kwargs)
+
+ return api_class(
+ '%s:%s' % (self.host, self.port),
+ ** options
)
def vanilla_api(self, **kwargs):
|
Fixes conftest options not being propagated
|
py
|
diff --git a/doctr/__main__.py b/doctr/__main__.py
index <HASH>..<HASH> 100644
--- a/doctr/__main__.py
+++ b/doctr/__main__.py
@@ -259,6 +259,11 @@ def deploy(args, parser):
full_key_path=args.key_path,
branch_whitelist=branch_whitelist)
+ if args.command:
+ run(['git', 'checkout', get_travis_branch()])
+ run(args.command, shell=True)
+ run(['git', 'checkout', deploy_branch])
+
if args.sync:
built_docs = args.built_docs or find_sphinx_build_dir()
if args.temp_dir:
@@ -273,11 +278,6 @@ def deploy(args, parser):
else:
added, removed = [], []
- if args.command:
- run(['git', 'checkout', get_travis_branch()])
- run(args.command, shell=True)
- run(['git', 'checkout', deploy_branch])
-
changes = commit_docs(added=added, removed=removed)
if changes:
if can_push and args.push:
|
Run the command before syncing That way it can affect the build docs files before they are synced.
|
py
|
diff --git a/examples/sampleserver.py b/examples/sampleserver.py
index <HASH>..<HASH> 100644
--- a/examples/sampleserver.py
+++ b/examples/sampleserver.py
@@ -84,7 +84,7 @@ class ConcreteServer(OpenIDServer):
return append_args(addr, args)
def get_setup_response(self, req):
- return redirect(self.get_user_setup_url())
+ return redirect(self.get_user_setup_url(req))
server = ConcreteServer()
|
[project @ Another typo fix in sampleserver]
|
py
|
diff --git a/scripts/construct_mutation_rates.py b/scripts/construct_mutation_rates.py
index <HASH>..<HASH> 100755
--- a/scripts/construct_mutation_rates.py
+++ b/scripts/construct_mutation_rates.py
@@ -69,7 +69,7 @@ def load_genes(path):
if line.startswith("hgnc"):
continue
- symbol, tx_id, *_ = line.strip().split("\t")
+ symbol, tx_id, _, _ = line.strip().split("\t")
if symbol not in transcripts:
transcripts[symbol] = []
|
fix python2/python3 issue
|
py
|
diff --git a/javalang/test/test_java_8_syntax.py b/javalang/test/test_java_8_syntax.py
index <HASH>..<HASH> 100644
--- a/javalang/test/test_java_8_syntax.py
+++ b/javalang/test/test_java_8_syntax.py
@@ -134,6 +134,10 @@ class LambdaSupportTest(unittest.TestCase):
with self.assertRaises(parser.JavaSyntaxError):
parse.parse(setup_java_class("(a b c) -> {};"))
+ def test_cast_works(self):
+ """ this tests that a cast expression works as expected. """
+ parse.parse(setup_java_class("String x = (String) A.x() ;"))
+
class MethodReferenceSyntaxTest(unittest.TestCase):
|
Add a failing test_cast_works test - this test is failing due to lambda code changes.
|
py
|
diff --git a/niworkflows/interfaces/utils.py b/niworkflows/interfaces/utils.py
index <HASH>..<HASH> 100644
--- a/niworkflows/interfaces/utils.py
+++ b/niworkflows/interfaces/utils.py
@@ -288,8 +288,8 @@ class SanitizeImage(SimpleInterface):
| sform, scode <- qform, qcode |
+-------------------+------------------+------------------+------------------\
+------------------------------------------------+
- | * | * | True | False \
-| qform, qcode <- sform, scode |
+ | * | True | * | False \
+| sform, scode <- qform, qcode |
+-------------------+------------------+------------------+------------------\
+------------------------------------------------+
| * | False | True | * \
@@ -339,7 +339,7 @@ class SanitizeImage(SimpleInterface):
self._results['out_file'] = out_fname
# Row 2:
- if valid_qform and qform_code > 0 and sform_code == 0:
+ if valid_qform and qform_code > 0:
img.set_sform(img.get_qform(), qform_code)
warning_txt = 'Note on orientation: sform matrix set'
description = """\
|
if qform is valid always force sform to be have the same value
|
py
|
diff --git a/salt/modules/openstack_mng.py b/salt/modules/openstack_mng.py
index <HASH>..<HASH> 100644
--- a/salt/modules/openstack_mng.py
+++ b/salt/modules/openstack_mng.py
@@ -91,7 +91,7 @@ def restart_service(service_name, minimum_running_time=None):
boot_time = float(open('/proc/uptime').read().split(' ')[0])
expr_time = int(service_info.get('ExecMainStartTimestampMonotonic', 0)) / 1000000 < boot_time - minimum_running_time
- expr_active = True if service_info.get('ActiveState') == "active" else False
+ expr_active = service_info.get('ActiveState') == "active"
if expr_time or not expr_active:
# restart specific system service
|
Removed redundant expr_active condition result
|
py
|
diff --git a/vncdotool/rfb.py b/vncdotool/rfb.py
index <HASH>..<HASH> 100644
--- a/vncdotool/rfb.py
+++ b/vncdotool/rfb.py
@@ -129,7 +129,7 @@ class RFBClient(Protocol):
#~ print "rfb"
maj, min = [int(x) for x in buffer[3:-1].split('.')]
#~ print maj, min
- if (maj, min) not in [(3,3), (3,7), (3,8)]:
+ if (maj, min) not in [(3,3), (3,7), (3,8), (4,0)]:
log.msg("wrong protocol version\n")
self.transport.loseConnection()
buffer = buffer[12:]
|
rfb: give VNC <I> a chance - untested due to lack of access to <I> servers
|
py
|
diff --git a/src/pyipmi/msgs/__init__.py b/src/pyipmi/msgs/__init__.py
index <HASH>..<HASH> 100644
--- a/src/pyipmi/msgs/__init__.py
+++ b/src/pyipmi/msgs/__init__.py
@@ -96,7 +96,7 @@ class Conditional:
def decode(self, obj, data):
if self._condition_fn(obj):
- self._field.encode(obj, data)
+ self._field.decode(obj, data)
class Bitfield(BaseField):
|
msg: fix in Conditional class for decode
|
py
|
diff --git a/ayrton/execute.py b/ayrton/execute.py
index <HASH>..<HASH> 100644
--- a/ayrton/execute.py
+++ b/ayrton/execute.py
@@ -133,8 +133,6 @@ class Command:
def __init__ (self, path):
self.path= path
- self.exe= resolve_program (path)
- logger.debug ('found exe %s', self.exe)
self.command= None
self.stdin_pipe= None
@@ -147,6 +145,11 @@ class Command:
self.child_pid= None
+ # this is at the very bottom so, if anything happens, all the other
+ # attibutes are already defined when __del__() runs
+ self.exe= resolve_program (path)
+ logger.debug ('found exe %s', self.exe)
+
def prepare_fds (self):
"""Create needed file descriptors (pipes, mostly) before forking."""
|
[*] resolve the program as late in __init__() as possible, so instance variables are defined at __del__() time.
|
py
|
diff --git a/raiden/network/transport/matrix/client.py b/raiden/network/transport/matrix/client.py
index <HASH>..<HASH> 100644
--- a/raiden/network/transport/matrix/client.py
+++ b/raiden/network/transport/matrix/client.py
@@ -693,7 +693,7 @@ class GMatrixClient(MatrixClient):
self.token = self.api.token = token
def set_sync_filter_id(self, sync_filter_id: Optional[int]) -> Optional[int]:
- """ Sets the events limit per room for sync and return previous limit """
+ """ Sets the sync filter to the given id and returns previous filters id """
prev_id = self._sync_filter_id
self._sync_filter_id = sync_filter_id
return prev_id
|
Correct docstring of `set_sync_filter_id`
|
py
|
diff --git a/klue/swagger/client.py b/klue/swagger/client.py
index <HASH>..<HASH> 100644
--- a/klue/swagger/client.py
+++ b/klue/swagger/client.py
@@ -228,10 +228,10 @@ class ClientCaller():
pass
else:
# Unknown exception...
- log.info("Unknown exce: " + response.text)
- k = KlueException(response.text)
+ log.info("Unknown exception: " + response.text)
+ k = KlueException("Call to %s %s returned unknown exception: %s" % (self.method, self.url, response.text))
k.status_code = response.status_code
- return self.error_callback(k)
+ return self.error_callback.__func__(k)
result = self._unmarshal(response)
log.info("Call to %s %s returned an instance of %s" % (self.method, self.url, type(result)))
@@ -243,7 +243,7 @@ class ClientCaller():
try:
result = unmarshal_response(response, self.operation)
except jsonschema.exceptions.ValidationError as e:
- k = ValidationError(str(e))
- k.status_code = 500
- return self.error_callback(k)
+ log.warn("Failed to unmarshal response: %s" % e)
+ k = ValidationError("Failed to unmarshal response because: %s" % str(e))
+ return self.error_callback.__func__(k)
return result
|
Fix bug when calling error_callback stored as instance attribute When storing error_callback as an instance attribute, calling it directly on the instance will be mistaken for a method call. Have to use __func__ to get to the original raw method.
|
py
|
diff --git a/centrosome/zernike.py b/centrosome/zernike.py
index <HASH>..<HASH> 100644
--- a/centrosome/zernike.py
+++ b/centrosome/zernike.py
@@ -196,9 +196,12 @@ def get_zernike_indexes(limit=10):
The Zernikes are stored as complex numbers with the real part
being (N,M) and the imaginary being (N,-M)
"""
- zernike_n_m = []
- for n in range(limit):
- for m in range(n+1):
- if (m+n) & 1 == 0:
- zernike_n_m.append((n,m))
- return np.array(zernike_n_m)
+ def zernike_indexes_iter(n_max):
+ for n in range(0, n_max):
+ for m in range(n%2, n+1, 2):
+ yield n
+ yield m
+
+ z_ind = np.fromiter(zernike_indexes_iter(limit), np.intc)
+ z_ind = z_ind.reshape( (len(z_ind) // 2, 2) )
+ return z_ind
|
changed get_zernike_indexes to directly create numpy array from a generator, rather than create a list of tuples and converting that to a numpy array
|
py
|
diff --git a/falafel/mappers/installed_rpms.py b/falafel/mappers/installed_rpms.py
index <HASH>..<HASH> 100644
--- a/falafel/mappers/installed_rpms.py
+++ b/falafel/mappers/installed_rpms.py
@@ -136,6 +136,7 @@ def installed_rpms(context):
if line.startswith("error:"):
packages["__error"] = True
else:
- name, rpm = parse_line(line)
- packages[name].append(InstalledRpm(rpm))
+ if line.strip():
+ name, rpm = parse_line(line)
+ packages[name].append(InstalledRpm(rpm))
return InstalledRpms(packages) if packages else None
|
installed_rpms mapper now handles blank lines better
|
py
|
diff --git a/scout/build/individual.py b/scout/build/individual.py
index <HASH>..<HASH> 100644
--- a/scout/build/individual.py
+++ b/scout/build/individual.py
@@ -17,6 +17,12 @@ def build_individual(ind):
Returns:
ind_obj (dict): A Individual object
+ Raises:
+ PedigreeError: if sex is unknown,
+ if phenotype is unknown,
+ if analysis_type is unknwon,
+ or missing individual_id
+
dict(
individual_id = str, # required
display_name = str,
|
Update Docstring in Individual.py From Feedback Update docstring to include pedigree exception.
|
py
|
diff --git a/pyuploadcare/api_resources.py b/pyuploadcare/api_resources.py
index <HASH>..<HASH> 100644
--- a/pyuploadcare/api_resources.py
+++ b/pyuploadcare/api_resources.py
@@ -698,7 +698,7 @@ class FileList(BaseApiList):
"""
base_url = '/files/'
constructor = File.construct_from
- datetime_ordering_fields = ('', 'datetime_uploaded')
+ datetime_ordering_fields = ['', 'datetime_uploaded']
def __init__(self, *args, **kwargs):
self.stored = kwargs.pop('stored', None)
@@ -798,4 +798,4 @@ class GroupList(BaseApiList):
"""
base_url = '/groups/'
constructor = FileGroup.construct_from
- datetime_ordering_fields = ('', 'datetime_created')
+ datetime_ordering_fields = ['', 'datetime_created']
|
Changed tuples to lists
|
py
|
diff --git a/gcimagebundle/gcimagebundlelib/imagebundle.py b/gcimagebundle/gcimagebundlelib/imagebundle.py
index <HASH>..<HASH> 100755
--- a/gcimagebundle/gcimagebundlelib/imagebundle.py
+++ b/gcimagebundle/gcimagebundlelib/imagebundle.py
@@ -236,6 +236,11 @@ def main():
else:
output_bucket = 'gs://%s/%s' % (
bucket, os.path.basename(output_file))
+
+ # /usr/local/bin not in redhat root PATH by default
+ if '/usr/local/bin' not in os.environ['PATH']:
+ os.environ['PATH'] += ':/usr/local/bin'
+
# TODO: Consider using boto library directly.
cmd = ['gsutil', 'cp', output_file, output_bucket]
retcode = subprocess.call(cmd)
|
Adding /usr/local/bin to path before gsutil call for sudo-ing Redhat users
|
py
|
diff --git a/telebot/__init__.py b/telebot/__init__.py
index <HASH>..<HASH> 100644
--- a/telebot/__init__.py
+++ b/telebot/__init__.py
@@ -112,8 +112,11 @@ class TeleBot:
new_messages.append(msg)
if len(new_messages) > 0:
- self.__notify_update(new_messages)
- self._notify_command_handlers(new_messages)
+ self.process_new_messages(new_messages)
+
+ def process_new_messages(self, new_messages):
+ self.__notify_update(new_messages)
+ self._notify_command_handlers(new_messages)
def __notify_update(self, new_messages):
for listener in self.update_listener:
|
Add process_new_message public method for webhook. issue #<I>
|
py
|
diff --git a/saltcloud/utils/parsers.py b/saltcloud/utils/parsers.py
index <HASH>..<HASH> 100644
--- a/saltcloud/utils/parsers.py
+++ b/saltcloud/utils/parsers.py
@@ -346,7 +346,7 @@ class CloudProvidersListsMixIn(object):
def _mixin_after_parsed(self):
list_options_selected = filter(
- lambda option: getattr(self.options, option.dest) is True,
+ lambda option: getattr(self.options, option.dest) is not None,
self.providers_listings_group.option_list
)
if len(list_options_selected) > 1:
|
Hurray for tests!!! These weren't exclusive options anymore.
|
py
|
diff --git a/bitsharesbase/operationids.py b/bitsharesbase/operationids.py
index <HASH>..<HASH> 100644
--- a/bitsharesbase/operationids.py
+++ b/bitsharesbase/operationids.py
@@ -44,6 +44,7 @@ ops = [
"transfer_from_blind",
"asset_settle_cancel",
"asset_claim_fees",
+ "fba_distribute"
]
operations = {o: ops.index(o) for o in ops}
|
[operationids] added missing fba_distribute
|
py
|
diff --git a/troposphere/autoscaling.py b/troposphere/autoscaling.py
index <HASH>..<HASH> 100644
--- a/troposphere/autoscaling.py
+++ b/troposphere/autoscaling.py
@@ -179,6 +179,7 @@ class AutoScalingGroup(AWSObject):
'LifecycleHookSpecificationList':
([LifecycleHookSpecification], False),
'LoadBalancerNames': (list, False),
+ 'MaxInstanceLifetime': (integer, False),
'MaxSize': (integer, True),
'MetricsCollection': ([MetricsCollection], False),
'MinSize': (integer, True),
|
Adding AWS::AutoScaling::AutoScalingGroup props, per March <I>, <I> update
|
py
|
diff --git a/examples/opf/clients/cpu/cpu.py b/examples/opf/clients/cpu/cpu.py
index <HASH>..<HASH> 100755
--- a/examples/opf/clients/cpu/cpu.py
+++ b/examples/opf/clients/cpu/cpu.py
@@ -62,7 +62,8 @@ def runCPU():
actline.axes.set_ylim(0, 100)
predline.axes.set_ylim(0, 100)
- while plt.fignum_exists(fig.number):
+ while True:
+ s = time.time()
# Get the CPU usage.
cpu = psutil.cpu_percent()
|
Partial revert as fignum_exists doesn't exist on plt
|
py
|
diff --git a/testutils.py b/testutils.py
index <HASH>..<HASH> 100644
--- a/testutils.py
+++ b/testutils.py
@@ -355,7 +355,7 @@ def make_tests(input_dir, msg_dir, filter_rgx, callbacks):
for module_file, messages_file in (
get_tests_info(input_dir, msg_dir, 'func_', '')
):
- if not is_to_run(module_file):
+ if not is_to_run(module_file) or module_file.endswith('.pyc'):
continue
base = module_file.replace('func_', '').replace('.py', '')
|
Ignore .pyc files for the old functional test framework.
|
py
|
diff --git a/flask_appbuilder/security/views.py b/flask_appbuilder/security/views.py
index <HASH>..<HASH> 100644
--- a/flask_appbuilder/security/views.py
+++ b/flask_appbuilder/security/views.py
@@ -330,7 +330,7 @@ class RoleModelView(ModelView):
list_columns = ['name', 'permissions']
order_columns = ['name']
- @action("Copy Role", lazy_gettext('Copy Role'), lazy_gettext('Copy the selected roles?'), icon='fa-copy', single=False)
+ @action("copyrole", lazy_gettext('Copy Role'), lazy_gettext('Copy the selected roles?'), icon='fa-copy', single=False)
def copy_role(self, items):
self.update_redirect()
for item in items:
|
[ui] Fix, Copy role modal not showing
|
py
|
diff --git a/s_tui/Sources/TemperatureSource.py b/s_tui/Sources/TemperatureSource.py
index <HASH>..<HASH> 100644
--- a/s_tui/Sources/TemperatureSource.py
+++ b/s_tui/Sources/TemperatureSource.py
@@ -47,10 +47,17 @@ class TemperatureSource(Source):
# Set temperature threshold if a custom one is set
if temp_thresh is not None:
- if int(temp_thresh) > 0:
- self.temp_thresh = int(temp_thresh)
- logging.debug("Updated custom threshold to " +
- str(self.temp_thresh))
+ try:
+ if int(temp_thresh) > 0:
+ self.temp_thresh = int(temp_thresh)
+ logging.debug("Updated custom threshold to " +
+ str(self.temp_thresh))
+ else:
+ self.temp_thresh = self.THRESHOLD_TEMP
+ except ValueError:
+ self.temp_thresh = self.THRESHOLD_TEMP
+ else:
+ self.temp_thresh = self.THRESHOLD_TEMP
self.update()
logging.debug("Update is updated to " + str(self.update))
|
Handle errors regarding temperature threshold (#<I>) If no custom threshold is set we need something to compare to. If the custom threshold is not a valid value we need to catch ValueError. If the custom threshold is lower than zero we need to set some value, too. For all these cases, we use self.THRESHOLD_TEMP as value for temp_thresh. This fixes issue #<I>.
|
py
|
diff --git a/dpark/tracker.py b/dpark/tracker.py
index <HASH>..<HASH> 100644
--- a/dpark/tracker.py
+++ b/dpark/tracker.py
@@ -127,13 +127,15 @@ class TrackerClient(object):
if self.ctx is None:
self.ctx = zmq.Context()
+ sock = None
try:
sock = self.ctx.socket(zmq.REQ)
sock.connect(self.addr)
sock.send_pyobj(msg)
return sock.recv_pyobj()
finally:
- sock.close()
+ if sock:
+ sock.close()
def stop(self):
if self.ctx is not None:
|
Bugfix: variable maybe not defined in finnal block.
|
py
|
diff --git a/tests/integration/long/test_consistency.py b/tests/integration/long/test_consistency.py
index <HASH>..<HASH> 100644
--- a/tests/integration/long/test_consistency.py
+++ b/tests/integration/long/test_consistency.py
@@ -335,8 +335,8 @@ class ConnectivityTest(unittest.TestCase):
address = hosts[0].address
node_to_stop = int(address.split('.')[-1:][0])
cluster.shutdown()
- cluster = Cluster(protocol_version=PROTOCOL_VERSION)
- cluster.connect(contact_points=["127.0.0.2"], wait_for_all_pools=True)
+ cluster = Cluster(contact_points=["127.0.0.2"],protocol_version=PROTOCOL_VERSION)
+ cluster.connect(wait_for_all_pools=True)
try:
force_stop(node_to_stop)
wait_for_down(cluster, node_to_stop)
|
Moving contact_points param to constructor for ConnectivityTest
|
py
|
diff --git a/abydos/bm.py b/abydos/bm.py
index <HASH>..<HASH> 100644
--- a/abydos/bm.py
+++ b/abydos/bm.py
@@ -1,4 +1,26 @@
# -*- coding: utf-8 -*-
+"""abydos.bmd
+
+Copyright 2014 by Christopher C. Little.
+This file is part of Abydos.
+
+This file is based on Alexander Beider and Stephen P. Morse's implementation of
+the Beider-Morse Phonetic Matching (BMPM) System, available at
+http://stevemorse.org/phonetics/bmpm.htm.
+
+Abydos is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+Abydos is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with Abydos. If not, see <http://www.gnu.org/licenses/>.
+"""
import re
import unicodedata
|
added docstring to bm module
|
py
|
diff --git a/resolwe/test/testcases/__init__.py b/resolwe/test/testcases/__init__.py
index <HASH>..<HASH> 100644
--- a/resolwe/test/testcases/__init__.py
+++ b/resolwe/test/testcases/__init__.py
@@ -63,8 +63,7 @@ class TestCaseHelpers(DjangoSimpleTestCase):
super().setUp()
- # Reset Elastic search indices.
- index_builder.destroy()
+ # Prepare Elastic search indices.
index_builder.discover_indexes()
index_builder.create_mappings()
index_builder.unregister_signals()
@@ -89,6 +88,15 @@ class TestCaseHelpers(DjangoSimpleTestCase):
self.addCleanup(self._clean_up)
+ def tearDown(self):
+ """Cleanup environment."""
+ from resolwe.elastic.builder import index_builder
+
+ super().tearDown()
+
+ # Reset Elastic search indices.
+ index_builder.destroy()
+
def keep_data(self, mock_purge=True):
"""Do not delete output files after tests."""
self.fail(
|
Only destroy ES indices on test tear down
|
py
|
diff --git a/pyblish_qml/ipc/server.py b/pyblish_qml/ipc/server.py
index <HASH>..<HASH> 100644
--- a/pyblish_qml/ipc/server.py
+++ b/pyblish_qml/ipc/server.py
@@ -267,6 +267,8 @@ class Server(object):
sys.stdout.write(line)
if not self.listening:
+ self._start_pulse()
+
if self.modal:
_listen()
else:
@@ -274,8 +276,6 @@ class Server(object):
thread.daemon = True
thread.start()
- self._start_pulse()
-
self.listening = True
def _start_pulse(self):
|
Ensure pulse thread is created before we start listening
|
py
|
diff --git a/openfisca_survey_manager/scenarios.py b/openfisca_survey_manager/scenarios.py
index <HASH>..<HASH> 100644
--- a/openfisca_survey_manager/scenarios.py
+++ b/openfisca_survey_manager/scenarios.py
@@ -9,7 +9,7 @@ import pandas
import re
from openfisca_core import formulas, periods, simulations
-from openfisca_core.tools.memory import get_memory_usage
+from openfisca_core.tools.memory import get_memory_usage, print_memory_usage
from openfisca_survey_manager.calibration import Calibration
from .survey_collections import SurveyCollection
@@ -552,6 +552,13 @@ class AbstractSurveyScenario(object):
survey_ = survey_collection.get_survey(survey)
return survey_.get_values(table = table, variables = variables) # .reset_index(drop = True)
+ def memory_usage(self, reference = False):
+ if reference:
+ simulation = self.reference_simulation
+ else:
+ simulation = self.simulation
+ print_memory_usage(simulation)
+
def neutralize_variables(self, tax_benefit_system):
"""
Neutralizing input variables not present in the input_data_frame and keep some crucial variables
|
Add memoru_usage method to SurveyScenario
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@ except IOError:
setup(
name="12factor-vault",
- version="0.1.14",
+ version="0.1.15",
packages=["vault12factor"],
classifiers=[
"Development Status :: 4 - Beta",
|
bump release version to <I>
|
py
|
diff --git a/AlphaTwirl/EventReader/EventLoopProgressReportWriter.py b/AlphaTwirl/EventReader/EventLoopProgressReportWriter.py
index <HASH>..<HASH> 100755
--- a/AlphaTwirl/EventReader/EventLoopProgressReportWriter.py
+++ b/AlphaTwirl/EventReader/EventLoopProgressReportWriter.py
@@ -6,9 +6,9 @@ class EventLoopProgressReportWriter(object):
"""A progress report writer of an event loop
"""
- def write(self, taskid, component, event):
+ def write(self, taskid, dataset, event):
return ProgressReport(
- name = component.name,
+ name = dataset.name,
done = event.iEvent + 1,
total = event.nEvents,
taskid = taskid
|
rename component dataset in EventLoopProgressReportWriter
|
py
|
diff --git a/tests/test_gallery.py b/tests/test_gallery.py
index <HASH>..<HASH> 100644
--- a/tests/test_gallery.py
+++ b/tests/test_gallery.py
@@ -292,6 +292,9 @@ def test_gallery(settings, tmpdir):
def test_gallery_max_img_pixels(settings, tmpdir, monkeypatch):
"Test the Gallery class with the max_img_pixels setting."
+ # monkeypatch is used here to reset the value to the PIL default.
+ # This value does not matter, other than it is "large"
+ # to show that settings['max_img_pixels'] works.
monkeypatch.setattr('PIL.Image.MAX_IMAGE_PIXELS', 100_000_000)
with open(str(tmpdir.join('my.css')), mode='w') as f:
|
Add a comment explaining why monkeypatch is used in this test
|
py
|
diff --git a/sos/plugins/networking.py b/sos/plugins/networking.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/networking.py
+++ b/sos/plugins/networking.py
@@ -112,6 +112,13 @@ class Networking(Plugin):
cmd = "ip6tables -t "+tablename+" -nvL"
self.add_cmd_output(cmd)
+ def collect_nftables(self):
+ """ Collects nftables rulesets with 'nft' commands if the modules
+ are present """
+
+ if self.check_ext_prog("grep -q nf_tables /proc/modules"):
+ self.add_cmd_output("nft list ruleset")
+
def setup(self):
super(Networking, self).setup()
self.add_copy_spec([
@@ -126,6 +133,9 @@ class Networking(Plugin):
"/etc/network*",
"/etc/NetworkManager/NetworkManager.conf",
"/etc/NetworkManager/system-connections",
+ "/etc/nftables",
+ "/etc/sysconfig/nftables.conf",
+ "/etc/nftables.conf",
"/etc/dnsmasq*",
"/sys/class/net/*/flags",
"/etc/iproute2"
@@ -149,6 +159,8 @@ class Networking(Plugin):
self.collect_ip6table("nat")
self.collect_ip6table("mangle")
+ self.collect_nftables()
+
self.add_cmd_output("netstat %s -neopa" % self.ns_wide,
root_symlink="netstat")
|
[networking] collect nftables rules and files This patch adds to the networking plugin the command: $ nft list rulesets Which will provide all currently loaded nftables rulesets. In addition, it collects static nftables configuration.
|
py
|
diff --git a/hebel/__init__.py b/hebel/__init__.py
index <HASH>..<HASH> 100644
--- a/hebel/__init__.py
+++ b/hebel/__init__.py
@@ -119,12 +119,14 @@ def init(device_id=None, random_seed=None):
pycuda_ops.init()
def _finish_up():
- global context
- context.pop()
- context = None
+ global is_initialized
+ if is_initialized:
+ global context
+ context.pop()
+ context = None
- from pycuda.tools import clear_context_caches
- clear_context_caches()
+ from pycuda.tools import clear_context_caches
+ clear_context_caches()
import atexit
atexit.register(_finish_up)
\ No newline at end of file
|
Check if context is initialized before attempting to destroy it.
|
py
|
diff --git a/pykakasi/kanwa.py b/pykakasi/kanwa.py
index <HASH>..<HASH> 100644
--- a/pykakasi/kanwa.py
+++ b/pykakasi/kanwa.py
@@ -37,12 +37,12 @@ class kanwa (object):
key = "%04x"%ord(unicode(char))
else:
key = "%04x"%ord(char)
- try: #already exist?
- table = self._jisyo_table[key]
- except:
+ if key in self._jisyo_table:
+ return self._jisyo_table[key]
+ else:
try:
- table = self._jisyo_table[key] = loads(decompress(self._kanwadict[key]))
+ self._jisyo_table[key] = loads(decompress(self._kanwadict[key]))
+ return self._jisyo_table[key]
except:
return None
- return table
|
Kanwa: reduce try..except block
|
py
|
diff --git a/src/python/pants/backend/jvm/tasks/jar_publish.py b/src/python/pants/backend/jvm/tasks/jar_publish.py
index <HASH>..<HASH> 100644
--- a/src/python/pants/backend/jvm/tasks/jar_publish.py
+++ b/src/python/pants/backend/jvm/tasks/jar_publish.py
@@ -385,7 +385,7 @@ class JarPublish(JarTask, ScmPublish):
for repo, data in self.repos.items():
auth = data.get('auth')
if auth:
- credentials = self.context.resolve(auth).next()
+ credentials = next(iter(context.resolve(auth)))
user = credentials.username(data['resolver'])
password = credentials.password(data['resolver'])
self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user))
|
Fix credentials fetching during publishing Reviewed at <URL>
|
py
|
diff --git a/nhlib/site.py b/nhlib/site.py
index <HASH>..<HASH> 100644
--- a/nhlib/site.py
+++ b/nhlib/site.py
@@ -45,6 +45,7 @@ class Site(object):
If any of ``vs30``, ``z1pt0`` or ``z2pt5`` is zero or negative.
.. note::
+
:class:`Sites <Site>` are pickleable
"""
__slots__ = 'location vs30 vs30measured z1pt0 z2pt5'.split()
|
site: fixed a minor doc building issue in Site
|
py
|
diff --git a/dpark/job.py b/dpark/job.py
index <HASH>..<HASH> 100644
--- a/dpark/job.py
+++ b/dpark/job.py
@@ -42,7 +42,7 @@ class Job:
return cls.nextJobId
LOCALITY_WAIT = 0
-WAIT_FOR_RUNNING = 15
+WAIT_FOR_RUNNING = 10
MAX_TASK_FAILURES = 4
MAX_TASK_MEMORY = 15 << 10 # 15GB
@@ -278,6 +278,7 @@ class SimpleJob(Job):
logger.warning("task %d timeout %.1f (at %s), re-assign it",
task.id, now - task.start, task.host)
self.launched[i] = False
+ self.blacklist[i].append(task.host)
self.tasksLaunched -= 1
if self.tasksFinished > self.numTasks / 3:
|
add non-responsable slaves into blacklist
|
py
|
diff --git a/epylint.py b/epylint.py
index <HASH>..<HASH> 100755
--- a/epylint.py
+++ b/epylint.py
@@ -58,7 +58,7 @@ def lint(filename):
parentPath = os.path.dirname(parentPath)
# Start pylint
- process = Popen("pylint -f parseable -r n --disable=C,R,I '%s'" %
+ process = Popen('pylint -f parseable -r n --disable=C,R,I "%s"' %
childPath, shell=True, stdout=PIPE, stderr=PIPE,
cwd=parentPath)
p = process.stdout
|
apply patch provided by vijayendra bapte on the python projects list for using epylint under windows environment
|
py
|
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -883,6 +883,7 @@ class LocalClient(object):
else:
if kwargs.get(u'yield_pub_data'):
yield pub_data
+ kwargs.setdefault('expect_minions', True)
for fn_ret in self.get_iter_returns(pub_data[u'jid'],
pub_data[u'minions'],
timeout=self._get_timeout(timeout),
|
Yield timed out minions from LocalClient.cmd_iter Fixes #<I>
|
py
|
diff --git a/bcbio/pipeline/sample.py b/bcbio/pipeline/sample.py
index <HASH>..<HASH> 100644
--- a/bcbio/pipeline/sample.py
+++ b/bcbio/pipeline/sample.py
@@ -64,7 +64,7 @@ def delayed_bam_merge(data):
if cur_out_file:
config = copy.deepcopy(data["config"])
config["algorithm"]["save_diskspace"] = False
- if len(in_files) > 0:
+ if len(cur_in_files) > 0:
merged_file = merge_bam_files(cur_in_files, os.path.dirname(cur_out_file), config,
out_file=cur_out_file)
else:
@@ -74,7 +74,8 @@ def delayed_bam_merge(data):
if ext:
data[file_key + "-plus"][ext] = merged_file
else:
- data["%s-orig" % file_key] = data[file_key]
+ if len(cur_in_files) == 0:
+ data["%s-orig" % file_key] = data[file_key]
data[file_key] = merged_file
data.pop("region", None)
data.pop("combine", None)
|
Avoid adding *-orig files when merging from subparts instead of incorporating new file name
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from distutils.core import setup
setup(
name='py-canary',
- version='0.2.0',
+ version='0.2.1',
packages=['canary'],
url='https://github.com/snjoetw/py-canary',
license='MIT',
|
Bumped version to <I>
|
py
|
diff --git a/docxtpl/__init__.py b/docxtpl/__init__.py
index <HASH>..<HASH> 100644
--- a/docxtpl/__init__.py
+++ b/docxtpl/__init__.py
@@ -9,6 +9,7 @@ __version__ = '0.3.5'
from lxml import etree
from docx import Document
+from docx.opc.oxml import serialize_part_xml, parse_xml
from jinja2 import Template
from cgi import escape
import re
@@ -102,7 +103,7 @@ class DocxTemplate(object):
def get_headers_footers_xml(self, uri):
for relKey, val in self.docx._part._rels.items():
if val.reltype == uri:
- yield relKey, val._target._blob
+ yield relKey, self.xml_to_string(parse_xml(val._target._blob))
def get_headers_footers_encoding(self,xml):
m = re.match(r'<\?xml[^\?]+\bencoding="([^"]+)"',xml,re.I)
|
Force header / footer nodes format. get_headers_footers_xml was returning the xml in a raw string without any processing on it. If we had a quote in docx document, it was encode in a xml entity ("), but because of the entities, Jinja raise error. Now, xml passes through etree.tostring and we have a clean string.
|
py
|
diff --git a/prawcore/const.py b/prawcore/const.py
index <HASH>..<HASH> 100644
--- a/prawcore/const.py
+++ b/prawcore/const.py
@@ -1,6 +1,6 @@
"""Constants for the prawcore package."""
-__version__ = '0.0.8'
+__version__ = '0.0.9'
ACCESS_TOKEN_PATH = '/api/v1/access_token'
AUTHORIZATION_PATH = '/api/v1/authorize'
|
Bump to <I>.
|
py
|
diff --git a/drdump/__init__.py b/drdump/__init__.py
index <HASH>..<HASH> 100644
--- a/drdump/__init__.py
+++ b/drdump/__init__.py
@@ -1,4 +1,4 @@
"""
A Django data dump script generator
"""
-__version__ = '0.2.1'
\ No newline at end of file
+__version__ = '0.2.2'
\ No newline at end of file
|
Forget to change version, REAL bump to <I>
|
py
|
diff --git a/autopython/ipython.py b/autopython/ipython.py
index <HASH>..<HASH> 100644
--- a/autopython/ipython.py
+++ b/autopython/ipython.py
@@ -131,3 +131,4 @@ class PresenterShell(object):
def end(self):
self._stop_shell_thread()
+ print()
|
Cosmetic change: print an empty line after quiting
|
py
|
diff --git a/xcat/xcat.py b/xcat/xcat.py
index <HASH>..<HASH> 100644
--- a/xcat/xcat.py
+++ b/xcat/xcat.py
@@ -179,9 +179,11 @@ def console(ctx):
child_node_count_result = yield from executor.count_nodes(node.children)
click.echo("%i child node found." % child_node_count_result)
- for child in node.children(child_node_count_result):
- child_name = yield from executor.get_string(child.name)
- click.echo(child_name)
+ futures = map(asyncio.Task, (executor.get_string(child.name) for child in node.children(child_node_count_result) ))
+ results = (yield from asyncio.gather(*futures))
+
+ for result in results:
+ click.echo(result)
@asyncio.coroutine
def command_cd(node, params):
|
Speed up of the ls command.
|
py
|
diff --git a/structures.py b/structures.py
index <HASH>..<HASH> 100644
--- a/structures.py
+++ b/structures.py
@@ -393,7 +393,7 @@ class Integer(Construct):
>>> Integer(1).build(-1)
Traceback (most recent call last):
...
- structures.BuildingError: ubyte format requires 0 <= number <= 255
+ structures.BuildingError: ...
>>> Integer(3)
Traceback (most recent call last):
@@ -1178,7 +1178,7 @@ class CString(Subconstruct):
>>> s.parse(_)
Traceback (most recent call last):
...
- structures.ParsingError: 'utf-16-le' codec can't decode byte 0x66 in position 0: truncated data
+ structures.ParsingError: 'utf-16...' codec can't decode byte 0x66 in position 0: truncated data
:param encoding: See ``StringEncoded``.
|
Deal with pypy giving different error messages
|
py
|
diff --git a/test.py b/test.py
index <HASH>..<HASH> 100644
--- a/test.py
+++ b/test.py
@@ -378,6 +378,11 @@ print len(options.long_option.split())
self.assertEqual(iam1, iam2)
+ def test_cwd(self):
+ from pbs import pwd
+ self.assertEqual(str(pwd(_cwd='/tmp')), '/tmp\n')
+ self.assertEqual(str(pwd(_cwd='/etc')), '/etc\n')
+
if __name__ == "__main__":
|
test stub for _cwd
|
py
|
diff --git a/pysnmp/proto/mpmod/rfc3412.py b/pysnmp/proto/mpmod/rfc3412.py
index <HASH>..<HASH> 100644
--- a/pysnmp/proto/mpmod/rfc3412.py
+++ b/pysnmp/proto/mpmod/rfc3412.py
@@ -90,6 +90,11 @@ class SnmpV3MessageProcessingModel(AbstractMessageProcessingModel):
contextEngineId = snmpEngineID
else:
contextEngineId = peerSnmpEngineData['contextEngineId']
+ # Defaulting contextEngineID to securityEngineID should
+ # probably be done on Agent side (see 7.1.3.d.2,) so this
+ # is a sort of workaround.
+ if not contextEngineId:
+ contextEngineId = peerSnmpEngineData['securityEngineID']
# 7.1.5
if not contextName:
|
a workaround against possibly broken Agents: SNMPv3 Manager code defaults ContextEngineId to SecurityEngineId whenever ContextEngineId is not reported by authoritative SNMP engine on discovery.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -94,6 +94,7 @@ setup(
author = 'The Emma2 team',
# list packages here
packages = ['emma2',
+ 'emma.coordinates',
'emma2.msm',
'emma2.msm.analysis',
'emma2.msm.analysis.dense',
|
[setup] added coordinates package to setup
|
py
|
diff --git a/zk_shell/tests/test_mirror_cmds.py b/zk_shell/tests/test_mirror_cmds.py
index <HASH>..<HASH> 100644
--- a/zk_shell/tests/test_mirror_cmds.py
+++ b/zk_shell/tests/test_mirror_cmds.py
@@ -135,10 +135,11 @@ class MirrorCmdsTestCase(ShellTestCase):
self.shell.onecmd("mirror %s/very %s/backup false false true" % (
self.tests_path, self.tests_path))
self.shell.onecmd("tree %s/backup" % (self.tests_path))
- expected_output = u""".
-\u251c\u2500\u2500 znode3\n\u251c\u2500\u2500 nested\n\u2502 \u251c\u2500\u2500 znode\n\u2502 \u251c\u2500\u2500 znode2
-"""
- self.assertEqual(expected_output, self.output.getvalue())
+
+ self.assertIn("znode3", self.output.getvalue())
+ self.assertIn("nested", self.output.getvalue())
+ self.assertIn("znode", self.output.getvalue())
+ self.assertIn("znode2", self.output.getvalue())
def test_mirror_local_bad_path(self):
""" try mirror non existent path in the local zk cluster """
|
Fix mirror tests (don't depend on tree's order, it may vary)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,6 +18,7 @@ setup(
scripts=['scripts/multiqc'],
install_requires=[
'jinja2',
- 'simplejson'
+ 'simplejson',
+ 'pyyaml'
]
)
|
Updated setup.py with an extra package dependency.
|
py
|
diff --git a/pyuavcan/_cli/_main.py b/pyuavcan/_cli/_main.py
index <HASH>..<HASH> 100644
--- a/pyuavcan/_cli/_main.py
+++ b/pyuavcan/_cli/_main.py
@@ -165,3 +165,7 @@ def _configure_logging(verbosity_level: int) -> None:
except Exception as ex:
_logger.debug('Colored logs are not available: %s: %s', type(ex), ex)
_logger.info('Consider installing "coloredlogs" from PyPI to make log messages look better')
+
+ # Handle special cases one by one.
+ if log_level < logging.INFO:
+ logging.getLogger('pydsdl').setLevel(logging.INFO) # Too much low-level logs from PyDSDL.
|
Never use DEBUG-level logging for PyDSDL because it yields too much data
|
py
|
diff --git a/astrobase/lcproc.py b/astrobase/lcproc.py
index <HASH>..<HASH> 100644
--- a/astrobase/lcproc.py
+++ b/astrobase/lcproc.py
@@ -3550,8 +3550,9 @@ def runpf(lcfile,
if finmags.size < minobservations:
LOGERROR('not enough non-nan observations for '
- 'this LC. have: %s, required: %s, magcol: %s, skipping...' %
- (finmags.size, mcol, minobservations))
+ 'this LC. have: %s, required: %s, '
+ 'magcol: %s, skipping...' %
+ (finmags.size, minobservations, mcol))
continue
pfmkeys = []
|
lcproc: minor fix
|
py
|
diff --git a/ratcave/scene.py b/ratcave/scene.py
index <HASH>..<HASH> 100644
--- a/ratcave/scene.py
+++ b/ratcave/scene.py
@@ -60,9 +60,12 @@ class Scene(HasUniforms):
gl.glClearColor(*(self.bgColor + (1.,)))
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
- def draw(self):
+ def draw(self, clear=True):
"""Draw each visible mesh in the scene from the perspective of the scene's camera and lit by its light."""
+ if clear:
+ self.clear()
+
with glutils.enable_states(self.gl_states):
self.camera.update()
@@ -85,7 +88,6 @@ class Scene(HasUniforms):
for face, rotation in enumerate([[180, 90, 0], [180, -90, 0], [90, 0, 0], [-90, 0, 0], [180, 0, 0], [0, 0, 180]]):
self.camera.rotation.xyz = rotation
cubetexture.attach_to_fbo(face)
- self.clear()
self.draw()
|
brought back auto-clearing--it makes working a lot simpler.
|
py
|
diff --git a/ci/make_conda_packages.py b/ci/make_conda_packages.py
index <HASH>..<HASH> 100644
--- a/ci/make_conda_packages.py
+++ b/ci/make_conda_packages.py
@@ -31,8 +31,8 @@ def main(env, do_upload):
"--python", env['CONDA_PY'],
"--numpy", env['CONDA_NPY'],
"--skip-existing",
- "-c", "quantopian",
- "-c", "https://conda.anaconda.org/quantopian/label/ci"]
+ "-c", "quantopian/label/ci",
+ "-c", "quantopian"]
output = None
|
BLD: Swap conda build args to check CI label/channel first BLD: Change url to just channel/label path
|
py
|
diff --git a/tests/support/parser/__init__.py b/tests/support/parser/__init__.py
index <HASH>..<HASH> 100644
--- a/tests/support/parser/__init__.py
+++ b/tests/support/parser/__init__.py
@@ -449,6 +449,14 @@ class SaltTestingParser(optparse.OptionParser):
ret.update(filename_map[path_expr])
break
+ if any(x.startswith('integration.proxy.') for x in ret):
+ # Ensure that the salt-proxy daemon is started for these tests.
+ self.options.proxy = True
+
+ if any(x.startswith('integration.ssh.') for x in ret):
+ # Ensure that an ssh daemon is started for these tests.
+ self.options.ssh = True
+
return ret
def parse_args(self, args=None, values=None):
|
Enable proxy/ssh daemons when filename mapping causes those tests to be run The proxy and ssh tests won't run when the `--proxy` and `--ssh` CLI flags, respectively, aren't passed to runtests.py. This ensures that we start the daemons when the file mapping logic triggers them to be run.
|
py
|
diff --git a/tests/machine_control/test_machine_controller.py b/tests/machine_control/test_machine_controller.py
index <HASH>..<HASH> 100644
--- a/tests/machine_control/test_machine_controller.py
+++ b/tests/machine_control/test_machine_controller.py
@@ -1170,7 +1170,7 @@ class TestMachineController(object):
BASE_ADDRESS = 0x68900000
# Create the mock controller
cn._send_scp = mock.Mock()
- cn.read_struct_field = mock.Mock(return_value = BASE_ADDRESS)
+ cn.read_struct_field = mock.Mock(return_value=BASE_ADDRESS)
# Override _send_ffcs such that it ensures increasing values of
# ((region << 18) | cores)
|
Fix flake8 in MachineController tests. Somehow missed after merging the faster app loading stuff... :s
|
py
|
diff --git a/indra/assemblers/sif_assembler.py b/indra/assemblers/sif_assembler.py
index <HASH>..<HASH> 100644
--- a/indra/assemblers/sif_assembler.py
+++ b/indra/assemblers/sif_assembler.py
@@ -31,7 +31,21 @@ class SifAssembler(object):
def make_model(self, use_name_as_key=False, include_mods=False,
include_complexes=False):
- """Assemble the graph from the assembler's list of INDRA Statements."""
+ """Assemble the graph from the assembler's list of INDRA Statements.
+
+ Parameters
+ ----------
+ use_name_as_key : boolean
+ If True, uses the name of the agent as the key to the nodes in
+ the network. If False (default) uses the matches_key() of the
+ agent.
+ include_mods : boolean
+ If True, adds Modification statements into the graph as directed
+ edges. Default is False.
+ include_complexes : boolean
+ If True, creates two edges (in both directions) between all pairs
+ of nodes in Complex statements. Default is False.
+ """
def add_node_edge(s, t, polarity):
if s is not None:
s = self._add_node(s, use_name_as_key=use_name_as_key)
|
Add docstring to SifAssembler
|
py
|
diff --git a/gromacs/__init__.py b/gromacs/__init__.py
index <HASH>..<HASH> 100644
--- a/gromacs/__init__.py
+++ b/gromacs/__init__.py
@@ -133,6 +133,7 @@ class AutoCorrectionWarning(Warning):
class BadParameterWarning(Warning):
"""Warns if some parameters or variables are unlikely to be appropriate or correct."""
+import warnings
# These warnings should always be displayed because other parameters
# can have changed, eg during interactive use.
for w in (AutoCorrectionWarning, BadParameterWarning,
@@ -148,7 +149,6 @@ import tools
# Ignore warnings from a few programs that do not produce
# documentation when run with '-h' (only applies when the default for
# failuremode of core.GromacsCommand is changed to 'warn')
-import warnings
warnings.simplefilter("ignore", GromacsFailureWarning)
_have_g_commands = []
_missing_g_commands = []
|
fixed: warnings imported too late git-svn-id: svn+ssh://gonzo.med.jhmi.edu/scratch/svn/woolf_repository/users/oliver/Library/GromacsWrapper@<I> df5ba8eb-4b0b-<I>-8c<I>-c<I>f<I>b<I>c
|
py
|
diff --git a/pyqode/core/editor.py b/pyqode/core/editor.py
index <HASH>..<HASH> 100644
--- a/pyqode/core/editor.py
+++ b/pyqode/core/editor.py
@@ -1644,13 +1644,13 @@ class QCodeEdit(QtGui.QPlainTextEdit):
key == "selectionBackground" or key == "selectionForeground"
or not key):
p = self.palette()
- c = self.style.value("background")
+ c = QtGui.QColor(self.style.value("background"))
p.setColor(p.Base, c)
- c = self.style.value("foreground")
+ c = QtGui.QColor(self.style.value("foreground"))
p.setColor(p.Text, c)
- c = self.style.value("selectionBackground")
+ c = QtGui.QColor(self.style.value("selectionBackground"))
p.setColor(QtGui.QPalette.Highlight, c)
- c = self.style.value("selectionForeground")
+ c = QtGui.QColor(self.style.value("selectionForeground"))
p.setColor(QtGui.QPalette.HighlightedText, c)
self.setPalette(p)
|
Ensure we have a QColor
|
py
|
diff --git a/commitizen/git.py b/commitizen/git.py
index <HASH>..<HASH> 100644
--- a/commitizen/git.py
+++ b/commitizen/git.py
@@ -3,16 +3,9 @@ from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import List, Optional
-from typing_extensions import Protocol
-
from commitizen import cmd
-class GitProtocol(Protocol):
- rev: str
- name: str
-
-
class GitObject:
rev: str
name: str
|
fix(git): missing dependency removed
|
py
|
diff --git a/hypermap/aggregator/solr.py b/hypermap/aggregator/solr.py
index <HASH>..<HASH> 100644
--- a/hypermap/aggregator/solr.py
+++ b/hypermap/aggregator/solr.py
@@ -20,12 +20,10 @@ def get_date(layer):
"""
date = None
type = 1
- # for WM layer we may have a range
- if hasattr(layer, 'layerwm'):
- layer_dates = layer.get_layer_dates()
- if layer_dates:
- date = layer_dates[0][0]
- type = layer_dates[0][1]
+ layer_dates = layer.get_layer_dates()
+ if layer_dates:
+ date = layer_dates[0][0]
+ type = layer_dates[0][1]
if date is None:
date = layer.created.date()
if type == 0:
@@ -180,6 +178,7 @@ class SolrHypermap(object):
"bbox": wkt,
"DomainName": layer.service.get_domain,
}
+
solr_date, type = get_date(layer)
if solr_date is not None:
solr_record['LayerDate'] = solr_date
|
Fixes the date problem reported today
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -48,6 +48,8 @@ except (IOError, ImportError):
with open(readme, 'rb') as f:
long_description = f.read().decode('utf-8')
+PY2 = sys.version_info[0] == 2
+
setup(
name = 'BenchExec',
version = version,
@@ -72,13 +74,14 @@ setup(
package_data = {'benchexec.tablegenerator': ['template.*']},
entry_points = {
"console_scripts": [
- 'benchexec = benchexec:main',
'runexec = benchexec.runexecutor:main',
+ ] + ([
+ 'benchexec = benchexec:main',
'table-generator = benchexec.tablegenerator:main',
- ]
+ ] if not PY2 else []),
},
install_requires = ['tempita==0.5.2'],
setup_requires=['nose>=1.0'],
- test_suite = 'nose.collector' if sys.version_info[0] != 2 else 'benchexec.test_python2.Python2Tests',
+ test_suite = 'nose.collector' if not PY2 else 'benchexec.test_python2.Python2Tests',
zip_safe = True,
)
|
Under Python 2, install only runexec, because the remaining tools are not expected to work.
|
py
|
diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py
index <HASH>..<HASH> 100644
--- a/src/graphql/execution/execute.py
+++ b/src/graphql/execution/execute.py
@@ -1082,11 +1082,11 @@ def get_field_def(
"""Get field definition.
This method looks up the field on the given type definition. It has special casing
- for the two introspection fields, ``__schema`` and ``__typename``. ``__typename`` is
- special because it can always be queried as a field, even in situations where no
- other fields are allowed, like on a Union. ``__schema`` could get automatically
- added to the query type, but that would require mutating type definitions, which
- would cause issues.
+ for the three introspection fields, ``__schema``, ``__type`, and ``__typename``.
+ ``__typename`` is special because it can always be queried as a field, even in
+ situations where no other fields are allowed, like on a Union. ``__schema`` and
+ ``__type`` could get automatically added to the query type, but that would require
+ mutating type definitions, which would cause issues.
For internal use only.
"""
|
Make documentation for get_field_def more accurate Replicates graphql/graphql-js@fb<I>fcee9b3f9c4ae<I>ba4d8db5b7bd<I>c1
|
py
|
diff --git a/pages/tests/test_unit.py b/pages/tests/test_unit.py
index <HASH>..<HASH> 100644
--- a/pages/tests/test_unit.py
+++ b/pages/tests/test_unit.py
@@ -625,4 +625,11 @@ class UnitTestCase(TestCase):
p.save()
self.assertEqual(unicode(p), u"page-%d" % p.id)
-
+ def test_context_processor(self):
+ """Test that the page's context processor is properly activated."""
+ from pages.views import details
+ req = get_request_mock()
+ page1 = self.new_page(content={'slug':'page1', 'title':'hello'})
+ page1.save()
+ self.set_setting("PAGES_MEDIA_URL", "test_request_context")
+ self.assertContains(details(req, path='/'), "test_request_context")
|
Add a test to be sure the context processor works properly.
|
py
|
diff --git a/unleash/util.py b/unleash/util.py
index <HASH>..<HASH> 100644
--- a/unleash/util.py
+++ b/unleash/util.py
@@ -20,7 +20,7 @@ class VirtualEnv(object):
@property
def python(self):
- return os.path.join(self.python, 'bin', 'python')
+ return os.path.join(self.path, 'bin', 'python')
def check_output(self, *args, **kwargs):
env = {}
|
Fixed wrong construction of python path in util.py.
|
py
|
diff --git a/tests/test_for_support/test_for_validation.py b/tests/test_for_support/test_for_validation.py
index <HASH>..<HASH> 100644
--- a/tests/test_for_support/test_for_validation.py
+++ b/tests/test_for_support/test_for_validation.py
@@ -31,7 +31,7 @@ sbml_invalid = join(dirname(__file__), "data", "validation", "tiny_FBC2.xml")
@pytest.mark.parametrize("filename, expected", [
- (sbml_valid, [0, 1, False]),
+ (sbml_valid, [0, 0, False]),
(sbml_invalid, [2, 0, True])])
def test_load_cobra_model(filename, expected):
notifications = {"warnings": [], "errors": []}
|
test: temporarily do not expect warnings Change due to redesign in cobrapy. This will have to be reverted in future.
|
py
|
diff --git a/pypuppetdb/api/v3.py b/pypuppetdb/api/v3.py
index <HASH>..<HASH> 100644
--- a/pypuppetdb/api/v3.py
+++ b/pypuppetdb/api/v3.py
@@ -116,8 +116,8 @@ class API(BaseAPI):
resource['type'],
resource['tags'],
resource['exported'],
- resource['sourcefile'],
- resource['sourceline'],
+ resource['file'],
+ resource['line'],
resource['parameters'],
)
|
Updated api/v3.py for APIv3 resource name changes PuppetDB APIv3 from APIv2 has 2 keys within the resource endpoint have been changed. 'sourcefile' has been changed to 'file' 'sourceline' has been changed to 'line' This patch allows pypuppetdb to read resources from APIv3. ref: <URL>
|
py
|
diff --git a/tests/render/test_data_documentation_site_builder.py b/tests/render/test_data_documentation_site_builder.py
index <HASH>..<HASH> 100644
--- a/tests/render/test_data_documentation_site_builder.py
+++ b/tests/render/test_data_documentation_site_builder.py
@@ -5,7 +5,6 @@ from great_expectations.render.renderer.site_builder import SiteBuilder
def test_cli_profile(titanic_data_context):
- print("BOOO")
titanic_data_context.profile_datasource(titanic_data_context.list_datasources()[0]["name"])
|
Removed a forgotten BOOO print :)
|
py
|
diff --git a/rejected/process.py b/rejected/process.py
index <HASH>..<HASH> 100644
--- a/rejected/process.py
+++ b/rejected/process.py
@@ -709,7 +709,7 @@ class Process(multiprocessing.Process, state.State):
exit(1)
# Setup the Sentry client
- if raven and cfg['sentry_dsn']:
+ if raven and 'sentry_dsn' in cfg:
self.sentry_client = raven.Client(cfg['sentry_dsn'])
# Setup the stats counter instance
|
Fix the sentry init to not blowup
|
py
|
diff --git a/librosa/segment.py b/librosa/segment.py
index <HASH>..<HASH> 100644
--- a/librosa/segment.py
+++ b/librosa/segment.py
@@ -84,7 +84,7 @@ def recurrence_matrix(data, k=None, width=1, metric='sqeuclidean', sym=False):
or ``k = 2`` if ``t <= 2 * width + 1``
- width : int > 0
only link neighbors ``(data[:, i], data[:, j])`` if ``|i-j| >= width``
- - metric : see ``scipy.spatial.distance.pdist()``
+ - metric : see ``scipy.spatial.distance.cdist()``
distance metric to use for nearest-neighbor calculation
- sym : bool
set ``sym=True`` to only link mutual nearest-neighbors
@@ -114,8 +114,7 @@ def recurrence_matrix(data, k=None, width=1, metric='sqeuclidean', sym=False):
return band
# Build the distance matrix
- D = scipy.spatial.distance.squareform(
- scipy.spatial.distance.pdist(data.T, metric=metric))
+ D = scipy.spatial.distance.cdist(data.T, data.T, metric=metric)
# Max out the diagonal band
D = D + _band_infinite()
|
pdist -> cdist
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,20 +16,17 @@
#
import os
-
import f5
-from pip.req import parse_requirements as parse_reqs
from setuptools import find_packages
from setuptools import setup
if 'rpm' not in os.getcwd():
- install_requires = map(lambda x: str(x.req),
- parse_reqs('./setup_requirements.txt',
- session='setup'))
+ with open('setup_requirements.txt') as fh:
+ required = [x for x in fh.read().splitlines() if not x.startswith('#')]
else:
- install_requires = []
-print('install_requires', install_requires)
+ required = []
+
setup(
name='f5-sdk',
description='F5 Networks Python SDK',
@@ -39,7 +36,7 @@ setup(
author_email='f5_common_python@f5.com',
url='https://github.com/F5Networks/f5-common-python',
keywords=['F5', 'sdk', 'api', 'icontrol', 'bigip', 'api', 'ltm'],
- install_requires=install_requires,
+ install_requires=required,
packages=find_packages(
exclude=["*.test", "*.test.*", "test.*", "test_*", "test", "test*"]
),
|
Fixes pip installs for python 3 Issues: Fixes #<I> Problem: The means by which dependencies was being determined was not working in python 3. The pip API is not public, so we shouldn't be using it as we were Analysis: This changes the means of looking up requirements to be similar, but to not use the interal pip APIs to prevent breakage Tests:
|
py
|
diff --git a/pyipmi/fru.py b/pyipmi/fru.py
index <HASH>..<HASH> 100644
--- a/pyipmi/fru.py
+++ b/pyipmi/fru.py
@@ -356,6 +356,7 @@ class FruInventory(object):
self.chassis_info_area = None
self.board_info_area = None
self.product_info_area = None
+ self.multirecord_area = None
if data:
self._from_data(data)
|
fru: initilized self.multirecord_area Was not initialized to None.
|
py
|
diff --git a/pyemu/pst/pst_utils.py b/pyemu/pst/pst_utils.py
index <HASH>..<HASH> 100644
--- a/pyemu/pst/pst_utils.py
+++ b/pyemu/pst/pst_utils.py
@@ -1161,7 +1161,10 @@ def csv_to_ins_file(
else:
nprefix = prefix
if longnames:
- nname = f"{nprefix}_usecol:{clabel}"
+ if nprefix != "":
+ nname = f"{nprefix}_usecol:{clabel}"
+ else:
+ nname = f"usecol:{clabel}"
oname = f"{nname}_{rlabel}"
else:
nname = nprefix + clabel.replace(" ", "").replace("_", "")
|
small fix in csv to ins to avoid leading underscore with prefix is empty
|
py
|
diff --git a/blogit.py b/blogit.py
index <HASH>..<HASH> 100755
--- a/blogit.py
+++ b/blogit.py
@@ -253,7 +253,7 @@ class Entry(object):
body = list()
for line in file.readlines():
body.append(line)
- self.body = ' '.join(body)
+ self.body = ''.join(body)
file.close()
if self.kind == 'link':
|
lines should be joined with "" not with " "
|
py
|
diff --git a/salt/scripts.py b/salt/scripts.py
index <HASH>..<HASH> 100644
--- a/salt/scripts.py
+++ b/salt/scripts.py
@@ -107,6 +107,11 @@ def salt_minion():
if '' in sys.path:
sys.path.remove('')
+ if salt.utils.is_windows():
+ minion = salt.cli.daemons.Minion()
+ minion.start()
+ return
+
if '--disable-keepalive' in sys.argv:
sys.argv.remove('--disable-keepalive')
minion = salt.cli.daemons.Minion()
|
Don't merge-forward the change to salt/scripts.py
|
py
|
diff --git a/slackclient/__init__.py b/slackclient/__init__.py
index <HASH>..<HASH> 100644
--- a/slackclient/__init__.py
+++ b/slackclient/__init__.py
@@ -1 +1 @@
-from _client import SlackClient
+from ._client import SlackClient
|
use abs ref to _client
|
py
|
diff --git a/qiskit/pulse/builder.py b/qiskit/pulse/builder.py
index <HASH>..<HASH> 100644
--- a/qiskit/pulse/builder.py
+++ b/qiskit/pulse/builder.py
@@ -1063,7 +1063,7 @@ def inline() -> ContextManager[None]:
@_transform_context(transforms.pad, inplace=True)
def pad(*chs: chans.Channel) -> ContextManager[None]: # pylint: disable=unused-argument
- """Pad all availale timeslots with delays upon exiting context.
+ """Pad all available timeslots with delays upon exiting context.
Args:
chs: Channels to pad with delays. Defaults to all channels in context
@@ -1219,9 +1219,9 @@ def frequency_offset(frequency: float,
Args:
frequency: Amount of frequency offset in Hz.
- channels: Channels to offset phase of.
- compensate_phase: Compensate for accumulated phase in accumulated with
- respect to the channels frame at its initial frequency.
+ channels: Channels to offset frequency of.
+ compensate_phase: Compensate for accumulated phase accumulated with
+ respect to the channels' frame at its initial frequency.
Yields:
None
|
Pulse builder interface: fix spelling (#<I>)
|
py
|
diff --git a/oedialect/compiler.py b/oedialect/compiler.py
index <HASH>..<HASH> 100644
--- a/oedialect/compiler.py
+++ b/oedialect/compiler.py
@@ -243,7 +243,7 @@ class OECompiler(postgresql.psycopg2.PGCompiler):
for c in clauselist.clauses)
if s]
- if clauselist.operator is not None:
+ if clauselist.operator == operators.and_ or clauselist.operator == operators.or_:
sep = OPERATORS[clauselist.operator]
clauses = {"type": "operator",
"operator": sep,
|
Use clauselists only for boolean operators
|
py
|
diff --git a/invenio_files_rest/ext.py b/invenio_files_rest/ext.py
index <HASH>..<HASH> 100644
--- a/invenio_files_rest/ext.py
+++ b/invenio_files_rest/ext.py
@@ -45,7 +45,7 @@ class _FilesRESTState(object):
"""Load default storage factory."""
imp = self.app.config.get("FILES_REST_RECORD_FILE_FACTORY")
if imp:
- import_string(imp)
+ return import_string(imp)
else:
try:
get_distribution('invenio-records-files')
|
ext: missing return * FIX Fixes missing return statement in `_FilesRESTState.record_file_factory()`.
|
py
|
diff --git a/SoftLayer/CLI/image/import.py b/SoftLayer/CLI/image/import.py
index <HASH>..<HASH> 100644
--- a/SoftLayer/CLI/image/import.py
+++ b/SoftLayer/CLI/image/import.py
@@ -22,9 +22,10 @@ from SoftLayer.CLI import formatting
@click.option('--ibm-api-key',
default="",
help="The IBM Cloud API Key with access to IBM Cloud Object "
- "Storage instance. For help creating this key see "
- "https://console.bluemix.net/docs/services/cloud-object-"
- "storage/iam/users-serviceids.html#serviceidapikeys")
+ "Storage instance and IBM KeyProtect instance. For help "
+ "creating this key see https://console.bluemix.net/docs/"
+ "services/cloud-object-storage/iam/users-serviceids.html"
+ "#serviceidapikeys")
@click.option('--root-key-id',
default="",
help="ID of the root key in Key Protect")
|
Add KeyProtect instance in help text
|
py
|
diff --git a/deezer/client.py b/deezer/client.py
index <HASH>..<HASH> 100644
--- a/deezer/client.py
+++ b/deezer/client.py
@@ -188,9 +188,7 @@ class Client:
json = response.json()
if "error" in json:
raise ValueError(
- "API request return error for object: {} id: {}".format(
- object_t, object_id
- )
+ f"API request return error for object: {object_t} id: {object_id}"
)
return self._process_json(json, parent)
@@ -326,8 +324,10 @@ class Client:
:returns: a list of :class:`~deezer.resources.Resource` objects.
>>> client.advanced_search({"artist": "Daft Punk", "album": "Homework"})
- >>> client.advanced_search({"artist": "Daft Punk", "album": "Homework"},
- ... relation="track")
+ >>> client.advanced_search(
+ ... {"artist": "Daft Punk", "album": "Homework"},
+ ... relation="track",
+ ... )
"""
if not isinstance(terms, dict):
raise TypeError("terms must be a dict")
|
refactor: convert string formatting to f-strings
|
py
|
diff --git a/ryu/ofproto/ofproto_v1_2_parser.py b/ryu/ofproto/ofproto_v1_2_parser.py
index <HASH>..<HASH> 100644
--- a/ryu/ofproto/ofproto_v1_2_parser.py
+++ b/ryu/ofproto/ofproto_v1_2_parser.py
@@ -2220,7 +2220,7 @@ class OFPMatchField(StringifyMixin):
def _put_header(self, buf, offset):
ofproto_parser.msg_pack_into('!I', buf, offset, self.header)
- self.length += 4
+ self.length = 4
def _put(self, buf, offset, value):
ofproto_parser.msg_pack_into(self.pack_str, buf, offset, value)
|
of<I> OFPMatchField: make this safe to serialize multiple times this will be used by OFPMatch old api compat code.
|
py
|
diff --git a/pyexchange/connection.py b/pyexchange/connection.py
index <HASH>..<HASH> 100644
--- a/pyexchange/connection.py
+++ b/pyexchange/connection.py
@@ -5,6 +5,7 @@ Licensed under the Apache License, Version 2.0 (the "License");?you may not use
Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import logging
+import sys
from ntlm import HTTPNtlmAuthHandler
try:
@@ -81,8 +82,12 @@ class ExchangeNTLMAuthConnection(ExchangeBaseConnection):
# lxml tostring returns str in Python 2, and bytes in python 3
# if XML is actually unicode, urllib2 will barf.
# Oddly enough this only seems to be a problem in 2.7. 2.6 doesn't seem to care.
- if isinstance(body, str):
- body = body.decode(encoding)
+ if sys.version_info < (3, 0):
+ if isinstance(body, unicode):
+ body = body.encode(encoding)
+ else:
+ if isinstance(body, str):
+ body = body.encode(encoding)
request = urllib2.Request(self.url, body)
|
Use only byte strings when generating Urllib2 Requests - Urllib2 requests should always be passed byte strings as recommended by <URL>
|
py
|
diff --git a/salt/states/file.py b/salt/states/file.py
index <HASH>..<HASH> 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -3184,7 +3184,7 @@ def recurse(name,
if clean:
# TODO: Use directory(clean=True) instead
- keep += _gen_keep_files(name, require)
+ keep.update(_gen_keep_files(name, require))
removed = _clean_dir(name, list(keep), exclude_pat)
if removed:
if __opts__['test']:
|
Fix test failure - change "set += list" to "set.update(list)"
|
py
|
diff --git a/aioxmpp/stream.py b/aioxmpp/stream.py
index <HASH>..<HASH> 100644
--- a/aioxmpp/stream.py
+++ b/aioxmpp/stream.py
@@ -11,6 +11,17 @@ possible.
.. autoclass:: StanzaStream
+Low-level stanza tracking
+=========================
+
+The following classes are used to track stanzas in the XML stream to the
+server. This is independent of things like `XEP-0184 Message Delivery
+Receipts`__; it only provides tracking to the remote server and even that only
+if stream management is used. Otherwise, it only provides tracking in the
+:mod:`aioxmpp` internal queues.
+
+__ http://xmpp.org/extensions/xep-0184.html
+
.. autoclass:: StanzaToken
.. autoclass:: StanzaState
|
Put StanzaToken and StanzaState in their own section
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -24,7 +24,7 @@ elif sys.version_info[0] == 3:
INSTALL_REQUIRES = []
setup(name = 'pytimeparse',
- version = '1.1.0',
+ version = '1.1.1',
description = 'Time expression parser',
author = 'Will Roberts',
author_email = 'wildwilhelm@gmail.com',
|
setup: version <I> with better setuptools support
|
py
|
diff --git a/marshmallow/marshalling.py b/marshmallow/marshalling.py
index <HASH>..<HASH> 100644
--- a/marshmallow/marshalling.py
+++ b/marshmallow/marshalling.py
@@ -14,10 +14,8 @@ import collections
from marshmallow.utils import (
EXCLUDE, INCLUDE, RAISE, is_collection, missing, set_value,
)
-from marshmallow.compat import iteritems, basestring
-from marshmallow.exceptions import (
- ValidationError,
-)
+from marshmallow.compat import iteritems
+from marshmallow.exceptions import ValidationError
__all__ = [
'Marshaller',
@@ -56,8 +54,6 @@ class ErrorStore(object):
errors.setdefault(field_name, []).extend(messages)
def store_validation_error(self, field_names, error, index=None):
- if isinstance(field_names, basestring):
- field_names = (field_names, )
self.error_kwargs.update(error.kwargs)
for field_name in field_names:
self.store_error(field_name, error.messages, index=index)
@@ -78,7 +74,7 @@ class ErrorStore(object):
try:
value = getter_func(data)
except ValidationError as error:
- return self.store_validation_error(field_name, error, index)
+ return self.store_validation_error((field_name,), error, index)
return value
|
Expect field_names to be a collection in store_validation_error
|
py
|
diff --git a/flask_avatars/__init__.py b/flask_avatars/__init__.py
index <HASH>..<HASH> 100644
--- a/flask_avatars/__init__.py
+++ b/flask_avatars/__init__.py
@@ -325,3 +325,19 @@ class Avatars(object):
avatar_l.save(path_l, optimize=True, quality=85)
return [filename_s, filename_m, filename_l]
+
+ @staticmethod
+ def gravatar(**kwargs):
+ return _Avatars.gravatar(**kwargs)
+
+ @staticmethod
+ def robohash(**kwargs):
+ return _Avatars.robohash(**kwargs)
+
+ @staticmethod
+ def social_media(**kwargs):
+ return _Avatars.social_media(**kwargs)
+
+ @staticmethod
+ def default(**kwargs):
+ return _Avatars.default(**kwargs)
|
Add mirror methods for Avatars class
|
py
|
diff --git a/pyvalid/__accepts.py b/pyvalid/__accepts.py
index <HASH>..<HASH> 100644
--- a/pyvalid/__accepts.py
+++ b/pyvalid/__accepts.py
@@ -1,6 +1,10 @@
from collections import Callable
-import inspect
import functools
+import sys
+if sys.version_info < (3, 0, 0):
+ from inspect import getargspec
+else:
+ from inspect import getfullargspec as getargspec
from pyvalid.__exceptions import InvalidArgumentNumberError, \
ArgumentValidationError
@@ -21,10 +25,10 @@ class Accepts(Callable):
def decorator_wrapper(*func_args, **func_kwargs):
if self.accepted_arg_values:
# Forget all information about function arguments.
- self.accepted_args.clear()
- self.optional_args.clear()
+ self.accepted_args[:] = list()
+ self.optional_args[:] = list()
# Collect information about fresh arguments.
- args_info = inspect.getfullargspec(func)
+ args_info = getargspec(func)
self.__scan_func(args_info)
# Validate function arguments.
self.__validate_args(func.__name__, func_args, func_kwargs)
|
Added support of Python ver. <I> and <I>
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.