diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/glfw.py b/glfw.py
index <HASH>..<HASH> 100644
--- a/glfw.py
+++ b/glfw.py
@@ -806,7 +806,8 @@ def destroy_window(window):
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
- del callback_repository[window_addr]
+ if window_addr in callback_repository:
+ del callback_repository[window_addr]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
|
fix KeyError in destroy_window
|
py
|
diff --git a/ue4cli/UnrealManagerBase.py b/ue4cli/UnrealManagerBase.py
index <HASH>..<HASH> 100644
--- a/ue4cli/UnrealManagerBase.py
+++ b/ue4cli/UnrealManagerBase.py
@@ -262,6 +262,10 @@ class UnrealManagerBase(object):
Builds the editor for the Unreal project in the specified directory, using the specified build configuration
"""
+ # If the project is a pure Blueprint project, there is no C++ code to build
+ if os.path.exists(os.path.join(dir, 'Source')) == False:
+ Utility.printStderr('Pure Blueprint project, nothing to build.')
+
# Verify that the specified build configuration is valid
if configuration not in self.validBuildConfigurations():
raise UnrealManagerException('invalid build configuration "' + configuration + '"')
|
Make build a no-op for pure Blueprint projects
|
py
|
diff --git a/napalm_junos/__init__.py b/napalm_junos/__init__.py
index <HASH>..<HASH> 100644
--- a/napalm_junos/__init__.py
+++ b/napalm_junos/__init__.py
@@ -0,0 +1,16 @@
+# Copyright 2016 Dravetech AB. All rights reserved.
+#
+# The contents of this file are licensed under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with the
+# License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+"""napalm_iosxr package."""
+from junos import JunOSDriver
|
Added driver to the basic module so it can be discovered
|
py
|
diff --git a/GPy/plotting/gpy_plot/latent_plots.py b/GPy/plotting/gpy_plot/latent_plots.py
index <HASH>..<HASH> 100644
--- a/GPy/plotting/gpy_plot/latent_plots.py
+++ b/GPy/plotting/gpy_plot/latent_plots.py
@@ -256,7 +256,7 @@ def plot_latent(self, labels=None, which_indices=None,
xlabel='latent dimension %i' % input_1, ylabel='latent dimension %i' % input_2, **imshow_kwargs)
if (labels is not None) and legend:
legend = find_best_layout_for_subplots(len(np.unique(labels)))[1]
- else:
+ elif legend:
labels = np.ones(self.num_data)
legend = False
scatters = _plot_latent_scatter(canvas, X, which_indices, labels, marker, num_samples, projection='2d', **scatter_kwargs or {})
|
[latent plots] legend was always plotted
|
py
|
diff --git a/apio/__init__.py b/apio/__init__.py
index <HASH>..<HASH> 100644
--- a/apio/__init__.py
+++ b/apio/__init__.py
@@ -4,7 +4,7 @@
# -- Author Jesús Arroyo
# -- Licence GPLv2
-VERSION = (0, 3, '0b1')
+VERSION = (0, 3, '0b2')
__version__ = ".".join([str(s) for s in VERSION])
__title__ = 'apio'
|
Bump to version <I>b2
|
py
|
diff --git a/source/awesome_tool/statemachine/state_machine_manager.py b/source/awesome_tool/statemachine/state_machine_manager.py
index <HASH>..<HASH> 100644
--- a/source/awesome_tool/statemachine/state_machine_manager.py
+++ b/source/awesome_tool/statemachine/state_machine_manager.py
@@ -173,6 +173,6 @@ class StateMachineManager(ModelMT, Observable):
raise AttributeError("State machine not in list of all state machines")
self._active_state_machine_id = state_machine_id
active_state_machine = self.get_active_state_machine()
- from awesome_tool.network.singleton import network_connections
- network_connections.set_storage_base_path(active_state_machine.base_path)
-
+ if active_state_machine.base_path:
+ from awesome_tool.network.singleton import network_connections
+ network_connections.set_storage_base_path(active_state_machine.base_path)
\ No newline at end of file
|
Fix bug loading state machine without base_path
|
py
|
diff --git a/src/you_get/extractors/google.py b/src/you_get/extractors/google.py
index <HASH>..<HASH> 100644
--- a/src/you_get/extractors/google.py
+++ b/src/you_get/extractors/google.py
@@ -61,7 +61,7 @@ def google_download(url, output_dir = '.', merge = True, info_only = False, **kw
if not real_urls:
real_urls = [r1(r'<meta property="og:image" content="([^"]+)', html)]
real_urls = [re.sub(r'w\d+-h\d+-p', 's0', u) for u in real_urls]
- post_date = r1(r'"?(20\d\d-[01]\d-[0123]\d)"?', html)
+ post_date = r1(r'"?(20\d\d[-/][01]\d[-/][0123]\d)"?', html)
post_id = r1(r'/posts/([^"]+)', html)
title = post_date + "_" + post_id
|
[google+] fix more
|
py
|
diff --git a/openquake/hazard/opensha.py b/openquake/hazard/opensha.py
index <HASH>..<HASH> 100644
--- a/openquake/hazard/opensha.py
+++ b/openquake/hazard/opensha.py
@@ -398,10 +398,10 @@ class ClassicalMixin(BasePSHAMixin):
results = self.do_curves(
site_list, serializer=self.write_hazardcurve_file)
self.do_means(site_list, curve_serializer=self.write_hazardcurve_file,
- map_serializer=self.write_hazardmap_file)
+ map_serializer=self.serialize_hazardmap)
self.do_quantiles(
site_list, curve_serializer=self.write_hazardcurve_file,
- map_serializer=self.write_hazardmap_file)
+ map_serializer=self.serialize_hazardmap)
return results
@@ -522,9 +522,11 @@ class ClassicalMixin(BasePSHAMixin):
xmlwriter.serialize(hc_data)
return nrml_path
- def write_hazardmap_file(self, map_keys):
- """Generate a NRML file with a hazard map for a collection of
- hazard map nodes from KVS, identified through their KVS keys.
+ def serialize_hazardmap(self, map_keys):
+ """
+ Takes a collection of hazard map nodes from the KVS,
+ identified by their KVS keys, and either writes an NRML hazard
+ map file or creates a DB output record for the hazard map.
map_keys is a list of KVS keys of the hazard map nodes to be
serialized.
|
Rename write_hazardmap_file to serialize_hazardmap and fix the docstring.
|
py
|
diff --git a/ovirtlago/testlib.py b/ovirtlago/testlib.py
index <HASH>..<HASH> 100644
--- a/ovirtlago/testlib.py
+++ b/ovirtlago/testlib.py
@@ -46,6 +46,13 @@ def get_test_prefix():
return _test_prefix
+def get_prefixed_name(entity_name):
+ suite = os.environ.get('SUITE')
+ return (
+ 'lago-' + os.path.basename(suite).replace('.', '-') + '-' + entity_name
+ )
+
+
def with_ovirt_prefix(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
|
Moving over get_prefixed_name() to ovirtlago/testlib This is more useful than having it copied around in multiple tests. Just a handy function. After it'll be included in a released Lago, will convert ovirt-system-tests to use it.
|
py
|
diff --git a/geomet/wkb.py b/geomet/wkb.py
index <HASH>..<HASH> 100644
--- a/geomet/wkb.py
+++ b/geomet/wkb.py
@@ -124,7 +124,6 @@ def dumps(obj, big_endian=True, dims='2D'):
if dims == '2D':
num_dims = 2
- mapping = WKB_2D
elif dims in ('Z', 'M'):
num_dims = 3
elif dims == 'ZM':
|
wkb: Removed a redundant line of code.
|
py
|
diff --git a/graphistry/plotter.py b/graphistry/plotter.py
index <HASH>..<HASH> 100644
--- a/graphistry/plotter.py
+++ b/graphistry/plotter.py
@@ -71,6 +71,11 @@ class Plotter(object):
else:
g = graph
n = self.nodes if nodes is None else nodes
+
+ if self.source is None or self.destination is None:
+ raise ValueError('Source/destination must be bound before plotting.')
+ if n is not None and self.node is None:
+ raise ValueError('Node identifier must be bound when using node dataframe')
dataset = self._plot_dispatch(g, n)
if dataset is None:
raise TypeError('Expected Pandas dataframe or Igraph graph')
|
NewAPI: Check that at least source/dest are bound before attempting to plot
|
py
|
diff --git a/dwave/system/samplers/dwave_sampler.py b/dwave/system/samplers/dwave_sampler.py
index <HASH>..<HASH> 100644
--- a/dwave/system/samplers/dwave_sampler.py
+++ b/dwave/system/samplers/dwave_sampler.py
@@ -347,7 +347,8 @@ class DWaveSampler(dimod.Sampler, dimod.Structured):
if not (solver.nodes.issuperset(bqm.linear) and
solver.edges.issuperset(bqm.quadratic)):
- msg = "Problem graph incompatible with solver."
+ msg = ("Problem graph incompatible with solver. Please use 'EmbeddingComposite' "
+ "to map the problem graph to the solver.")
raise BinaryQuadraticModelStructureError(msg)
future = solver.sample_bqm(bqm, **kwargs)
@@ -376,7 +377,8 @@ class DWaveSampler(dimod.Sampler, dimod.Structured):
# on missing nodes for lists
if isinstance(h, list):
if len(h) > self.solver.num_qubits:
- msg = "Problem graph incompatible with solver."
+ msg = ("Problem graph incompatible with solver. Please use 'EmbeddingComposite' "
+ "to map the problem graph to the solver.")
raise BinaryQuadraticModelStructureError(msg)
nodes = self.solver.nodes
h = dict((v, b) for v, b in enumerate(h) if b and v in nodes)
|
Suggest using `EmbeddingComposite` when the hardware graph is incompatible (#<I>) * Error message * Update * Updated error message in sample method * Minor correction
|
py
|
diff --git a/hamster/stuff.py b/hamster/stuff.py
index <HASH>..<HASH> 100644
--- a/hamster/stuff.py
+++ b/hamster/stuff.py
@@ -135,7 +135,7 @@ def format_activity(name, category, description, pad_description = False):
if description:
text+= "\n"
if pad_description:
- text += " "
+ text += " " * 23
text += """<span style="italic" size="small">%s</span>""" % description
|
padding description some more since we have now also end time. this is somewhat lame though
|
py
|
diff --git a/welly/project.py b/welly/project.py
index <HASH>..<HASH> 100644
--- a/welly/project.py
+++ b/welly/project.py
@@ -327,7 +327,7 @@ class Project(object):
num_passes = sum(results)
q = num_passes / num_tests
q_colour = q_colours.get(q, '#FFCC33')
- c_mean = '{:.2f}'.format(float(np.nanmean(c))) if np.any(c[~np.isnan(c)]) else np.nan
+ c_mean = '{:.2f}'.format(float(np.nanmean(c.df.values))) if np.any(c.df.values[~np.isnan(c.df.values)]) else np.nan
curves.append(('#CCEECC', c.mnemonic, f"{num_passes}/{num_tests}", q_colour, c_mean, c.units))
q_total += num_passes
q_count += num_tests
|
fix test_project() by adapting project.curve_table_html() to new curve object
|
py
|
diff --git a/graphos/renderers/matplotlib_renderer.py b/graphos/renderers/matplotlib_renderer.py
index <HASH>..<HASH> 100644
--- a/graphos/renderers/matplotlib_renderer.py
+++ b/graphos/renderers/matplotlib_renderer.py
@@ -7,6 +7,9 @@ import base64
class BaseMatplotlibChart(BaseChart):
+ def get_template(self):
+ return "graphos/matplotlib_renderer/line_chart.html"
+
def get_serieses(self):
data_only = self.get_data()[1:]
serieses = []
@@ -17,8 +20,6 @@ class BaseMatplotlibChart(BaseChart):
class LineChart(BaseMatplotlibChart):
- def get_template(self):
- return "graphos/matplotlib_renderer/line_chart.html"
def get_image(self):
import matplotlib.pyplot as plt
|
moved get_template method to base class
|
py
|
diff --git a/alignak/daemons/arbiterdaemon.py b/alignak/daemons/arbiterdaemon.py
index <HASH>..<HASH> 100644
--- a/alignak/daemons/arbiterdaemon.py
+++ b/alignak/daemons/arbiterdaemon.py
@@ -1185,7 +1185,7 @@ class Arbiter(Daemon): # pylint: disable=too-many-instance-attributes
if alive:
# Kill processes
for process in alive:
- logger.warning("Process %s did not stopped, trying to kill", process.name())
+ logger.info("Process %s did not stopped, trying to kill", process.name())
process.kill()
_, alive = psutil.wait_procs(alive, timeout=timeout, callback=on_terminate)
if alive:
|
Change a log from warning to info level
|
py
|
diff --git a/scripts/find_enrichment.py b/scripts/find_enrichment.py
index <HASH>..<HASH> 100755
--- a/scripts/find_enrichment.py
+++ b/scripts/find_enrichment.py
@@ -121,9 +121,9 @@ if __name__ == "__main__":
if 0.7 < overlap < 0.95:
sys.stderr.write("\nWARNING: only {} fraction of genes/proteins in study are found in "
"the population background.\n\n".format(overlap))
- if overlap <= 0.7:
- exit("\nERROR: only {} of genes/proteins in the study are found in the "
- "background population. Please check.\n".format(overlap))
+ #if overlap <= 0.7:
+ # exit("\nERROR: only {} of genes/proteins in the study are found in the "
+ # "background population. Please check.\n".format(overlap))
assoc = read_associations(assoc_fn)
|
Remove obsolete runtime option from test code.
|
py
|
diff --git a/eventcore/consumer.py b/eventcore/consumer.py
index <HASH>..<HASH> 100644
--- a/eventcore/consumer.py
+++ b/eventcore/consumer.py
@@ -38,8 +38,8 @@ class Consumer(metaclass=abc.ABCMeta): # noqa
"""
method_mapping = Registry.get_event(name)
if not method_mapping:
- log.warning('@{}.process_event no subscriber for event `{}`'
- .format(self.__class__.__name__, name))
+ log.info('@{}.process_event no subscriber for event `{}`'
+ .format(self.__class__.__name__, name))
return
for event, methods in method_mapping.items():
event_instance = event(subject, data)
|
Change logger level on no subscribers for event
|
py
|
diff --git a/tools/interop_matrix/client_matrix.py b/tools/interop_matrix/client_matrix.py
index <HASH>..<HASH> 100644
--- a/tools/interop_matrix/client_matrix.py
+++ b/tools/interop_matrix/client_matrix.py
@@ -192,6 +192,7 @@ LANG_RELEASE_MATRIX = {
('v1.38.1', ReleaseInfo(runtimes=['go1.16'])),
('v1.39.1', ReleaseInfo(runtimes=['go1.16'])),
('v1.40.0', ReleaseInfo(runtimes=['go1.16'])),
+ ('v1.41.0', ReleaseInfo(runtimes=['go1.16'])),
]),
'java':
OrderedDict([
|
Add <I> release of grpc-go to interop matrix (#<I>)
|
py
|
diff --git a/astrobase/lcproc.py b/astrobase/lcproc.py
index <HASH>..<HASH> 100644
--- a/astrobase/lcproc.py
+++ b/astrobase/lcproc.py
@@ -629,7 +629,7 @@ def getlclist(listpickle,
ext_cosdecl = np.cos(np.radians(extcat['decl']))
ext_sindecl = np.sin(np.radians(extcat['decl']))
ext_cosra = np.cos(np.radians(extcat['ra']))
- ext_sinra = np.sin(np.radians(extcat['decl']))
+ ext_sinra = np.sin(np.radians(extcat['ra']))
ext_xyz = np.column_stack((ext_cosra*ext_cosdecl,
ext_sinra*ext_cosdecl,
|
lcproc.getlclist fix bugs with xmatchexternal
|
py
|
diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index <HASH>..<HASH> 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -328,23 +328,6 @@ class MSVCCompiler(CCompiler):
},
}
- def _make_out_path(self, output_dir, strip_dir, src_name):
- base, ext = os.path.splitext(src_name)
- if strip_dir:
- base = os.path.basename(base)
- else:
- base = self._make_relative(base)
- try:
- # XXX: This may produce absurdly long paths. We should check
- # the length of the result and trim base until we fit within
- # 260 characters.
- return os.path.join(output_dir, base + self.out_extensions[ext])
- except LookupError:
- # Better to raise an exception instead of silently continuing
- # and later complain about sources and targets having
- # different lengths
- raise CompileError(f"Don't know how to compile {src_name}")
-
def compile( # noqa: C901
self,
sources,
|
Remove _msvccompiler._make_out_path. The function is almost entirely redundant to the implementation in the parent class. The only difference is the order in which `strip_dir` is processed, but even there, it doesn't appear as if `strip_dir` is ever True, so take the risk and consolidate behavior.
|
py
|
diff --git a/aiogram/utils/exceptions.py b/aiogram/utils/exceptions.py
index <HASH>..<HASH> 100644
--- a/aiogram/utils/exceptions.py
+++ b/aiogram/utils/exceptions.py
@@ -7,6 +7,7 @@
- MessageNotModified
- MessageToForwardNotFound
- MessageToDeleteNotFound
+ - MessageToPinNotFound
- MessageIdentifierNotSpecified
- MessageTextIsEmpty
- MessageCantBeEdited
@@ -182,6 +183,13 @@ class MessageToDeleteNotFound(MessageError):
match = 'message to delete not found'
+class MessageToPinNotFound(MessageError):
+ """
+ Will be raised when you try to pin deleted or unknown message.
+ """
+ match = 'message to pin not found'
+
+
class MessageToReplyNotFound(MessageError):
"""
Will be raised when you try to reply to very old or deleted or unknown message.
|
Add exception MessageToPinNotFound (#<I>)
|
py
|
diff --git a/src/transformers/commands/add_new_model_like.py b/src/transformers/commands/add_new_model_like.py
index <HASH>..<HASH> 100644
--- a/src/transformers/commands/add_new_model_like.py
+++ b/src/transformers/commands/add_new_model_like.py
@@ -844,14 +844,24 @@ def add_model_to_main_init(
new_lines = []
framework = None
while idx < len(lines):
+ new_framework = False
if not is_empty_line(lines[idx]) and find_indent(lines[idx]) == 0:
framework = None
elif lines[idx].lstrip().startswith("if not is_torch_available"):
framework = "pt"
+ new_framework = True
elif lines[idx].lstrip().startswith("if not is_tf_available"):
framework = "tf"
+ new_framework = True
elif lines[idx].lstrip().startswith("if not is_flax_available"):
framework = "flax"
+ new_framework = True
+
+ if new_framework:
+ # For a new framework, we need to skip until the else: block to get where the imports are.
+ while lines[idx].strip() != "else:":
+ new_lines.append(lines[idx])
+ idx += 1
# Skip if we are in a framework not wanted.
if framework is not None and frameworks is not None and framework not in frameworks:
|
Fix add new model like frameworks (#<I>) * Add new model like adds only the selected frameworks object in init * Small fix
|
py
|
diff --git a/kubespawner/spawner.py b/kubespawner/spawner.py
index <HASH>..<HASH> 100644
--- a/kubespawner/spawner.py
+++ b/kubespawner/spawner.py
@@ -1602,6 +1602,7 @@ class KubeSpawner(Spawner):
@run_on_executor
def asynchronize(self, method, *args, **kwargs):
+ self.log.info(f"Asynchronously calling {method} with {args} and {kwargs}")
return method(*args, **kwargs)
@property
@@ -1834,6 +1835,7 @@ class KubeSpawner(Spawner):
pod = yield gen.maybe_future(self.modify_pod_hook(self, pod))
for i in range(retry_times):
try:
+ self.log.info(f"Attempting to create pod {pod['metadata']['name']}, with timeout {self.k8s_api_request_timeout}")
yield self.asynchronize(
self.api.create_namespaced_pod,
self.namespace,
|
Add some more logging This is probably an error at the intersection of networking and async, which makes it pretty hard to pin down
|
py
|
diff --git a/codalib/util.py b/codalib/util.py
index <HASH>..<HASH> 100644
--- a/codalib/util.py
+++ b/codalib/util.py
@@ -159,7 +159,7 @@ def xsDateTime_format(xdt):
return xdt_str + 'Z'
offset_hours = offset.days*24+offset.seconds/(60*60)
offset_minutes = (offset.seconds % (60*60))/60
- xdt_str += "{:+03d}:{:02d}".format(offset_hours, offset_minutes)
+ xdt_str += "{0:+03d}:{1:02d}".format(offset_hours, offset_minutes)
return xdt_str
|
include format field numbers for <I> and <I> compatibility
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ with open('requirements.txt') as f:
setuptools.setup(
name="np",
- version='0.2.0',
+ version='1.0.0',
description="np = numpy++: numpy with added convenience functionality",
url='https://github.com/k7hoven/np',
author='Koos Zevenhoven',
|
Version number to <I> !!
|
py
|
diff --git a/drivers/python/rethinkdb/net.py b/drivers/python/rethinkdb/net.py
index <HASH>..<HASH> 100644
--- a/drivers/python/rethinkdb/net.py
+++ b/drivers/python/rethinkdb/net.py
@@ -316,7 +316,7 @@ class Connection(object):
except KeyboardInterrupt as err:
# When interrupted while waiting for a response cancel the outstanding
# requests by resetting this connection
- self.reconnect()
+ self.reconnect(noreply_wait=False)
raise err
# Construct response
|
fix recursion in python driver when reconnecting due to keyboard interrupt
|
py
|
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index <HASH>..<HASH> 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -36,6 +36,7 @@ def mkbuildcontext(dockerfile):
'Dockerfiles with Python 3')
else:
dfinfo.size = len(dockerfile.getvalue())
+ dockerfile.seek(0)
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
|
Should be done for StringIO objects as well
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,8 @@ setup(name='gbdxtools',
'gbdx-auth==0.1.2',
'Pillow==3.1.1',
'pygeoif==0.6',
- 'sympy==1.0'],
+ 'sympy==1.0',
+ 'ndg-httpsclient==0.4.0'],
setup_requires=['pytest-runner'],
tests_require=['pytest','vcrpy']
)
|
add ndg-httpsclient==<I> to setup.py so we do not get ssl errors
|
py
|
diff --git a/peppy/project.py b/peppy/project.py
index <HASH>..<HASH> 100644
--- a/peppy/project.py
+++ b/peppy/project.py
@@ -783,8 +783,11 @@ class Project(AttributeDict):
subproject, ", ".join([sp for sp in config["subprojects"]])))
_LOGGER.debug("Updating with: {}".format(subproj_updates))
self.add_entries(subproj_updates)
+ elif subproject:
+ _LOGGER.warn("Subproject {} requested but no subprojects "
+ "are defined".format(subproject))
else:
- _LOGGER.debug("No subproject")
+ _LOGGER.debug("No subproject requested")
# In looper 0.4, for simplicity the paths section was eliminated.
# For backwards compatibility, mirror the paths section into metadata.
|
message case disambiguation and severity gradation
|
py
|
diff --git a/pysnmp/entity/rfc3413/ntforg.py b/pysnmp/entity/rfc3413/ntforg.py
index <HASH>..<HASH> 100644
--- a/pysnmp/entity/rfc3413/ntforg.py
+++ b/pysnmp/entity/rfc3413/ntforg.py
@@ -55,9 +55,11 @@ class NotificationOriginator:
if statusInformation:
if origRetries == origRetryCount:
- cbFun(origSendRequestHandle,
- statusInformation['errorIndication'],
- cbCtx)
+ self._handleResponse(
+ origSendRequestHandle,
+ statusInformation['errorIndication'],
+ cbFun,
+ cbCtx)
return
# 3.3.6a
@@ -99,7 +101,15 @@ class NotificationOriginator:
return
# 3.3.6c
- cbFun(origSendRequestHandle, None, cbCtx)
+ self._handleResponse(origSendRequestHandle, None, cbFun, cbCtx)
+
+ def _handleResponse(
+ self,
+ sendRequestHandle,
+ errorIndication,
+ cbFun,
+ cbCtx):
+ cbFun(sendRequestHandle, errorIndication, cbCtx)
def sendNotification(
self,
@@ -257,6 +267,8 @@ class NotificationOriginator:
snmpEngine.transportDispatcher.jobStarted(id(self))
+ return sendPduHandle
+
# XXX
# move/group/implement config setting/retrieval at a stand-alone module
|
refactor NotificationOriginator to allow receiption method customization
|
py
|
diff --git a/ait/core/test/test_dtype.py b/ait/core/test/test_dtype.py
index <HASH>..<HASH> 100644
--- a/ait/core/test/test_dtype.py
+++ b/ait/core/test/test_dtype.py
@@ -208,6 +208,40 @@ def testget():
with nose.tools.assert_raises(ValueError):
dtype.get('U8[-42]')
+def testString():
+ dt = dtype.get("S16")
+ assert isinstance(dt, dtype.PrimitiveType)
+ assert dt.string
+ assert not dt.float
+ assert not dt.signed
+ assert dt.name == "S16"
+ assert dt.nbytes == 16
+ assert dt.nbits == (16 * 8)
+
+ dt = dtype.get("S32")
+ assert isinstance(dt, dtype.PrimitiveType)
+ assert dt.string
+ assert not dt.float
+ assert not dt.signed
+ assert dt.name == "S32"
+ assert dt.nbytes == 32
+ assert dt.nbits == (32 * 8)
+
+ ival = 1
+ errmsgs = []
+ assert not dt.validate(ival, errmsgs)
+ assert errmsgs
+
+ fval = 1.1
+ errmsgs = []
+ assert not dt.validate(fval, errmsgs)
+ assert errmsgs
+
+ sval = "1"
+ errmsgs = []
+ assert dt.validate(sval)
+ assert not errmsgs
+
if __name__ == '__main__':
nose.main()
|
Issue #<I> - Updated test_dtype to include regression tests for type string
|
py
|
diff --git a/estnltk/taggers/tagger.py b/estnltk/taggers/tagger.py
index <HASH>..<HASH> 100644
--- a/estnltk/taggers/tagger.py
+++ b/estnltk/taggers/tagger.py
@@ -33,6 +33,8 @@ class Tagger:
def make_layer(self, raw_text: str, layers: MutableMapping[str, Layer], status: dict = None) -> Layer:
assert status is None or isinstance(status, dict), 'status should be None or dict, not ' + str(type(status))
+ if status is None:
+ status = {}
layer = self._make_layer(raw_text, layers, status)
assert isinstance(layer, Layer), 'make_layer must return Layer'
assert layer.name == self.output_layer, 'incorrect layer name: {} != {}'.format(layer.name, self.output_layer)
|
revert last change of Tagger status
|
py
|
diff --git a/BAC0/core/io/Read.py b/BAC0/core/io/Read.py
index <HASH>..<HASH> 100644
--- a/BAC0/core/io/Read.py
+++ b/BAC0/core/io/Read.py
@@ -581,13 +581,14 @@ def find_reason(apdu):
reasons = AbortReason.enumerations
else:
if apdu.errorCode and apdu.errorClass:
- return '{}'.format(apdu.errorCode)
+ return "{}".format(apdu.errorCode)
else:
- raise ValueError('Cannot find reason...')
+ raise ValueError("Cannot find reason...")
code = apdu.apduAbortRejectReason
try:
return [k for k, v in reasons.items() if v == code][0]
except IndexError:
return code
- except KeyError:
- return type(apdu)
+ except KeyError as err:
+ return "KeyError: %s has no key %r" % (type(apdu), err.args[0])
+
|
Better error message as suggested by Master Bender
|
py
|
diff --git a/mautrix/bridge/portal.py b/mautrix/bridge/portal.py
index <HASH>..<HASH> 100644
--- a/mautrix/bridge/portal.py
+++ b/mautrix/bridge/portal.py
@@ -429,7 +429,10 @@ class BasePortal(ABC):
) -> EventID:
if self.encrypted and self.matrix.e2ee:
event_type, content = await self.matrix.e2ee.encrypt(self.mxid, event_type, content)
- return await intent.send_message_event(self.mxid, event_type, content, **kwargs)
+ event_id = await intent.send_message_event(self.mxid, event_type, content, **kwargs)
+ if intent.api.is_real_user:
+ asyncio.create_task(intent.mark_read(self.mxid, event_id))
+ return event_id
@property
@abstractmethod
|
portal: automatically mark read after sending message
|
py
|
diff --git a/src/feat/models/applicationjson.py b/src/feat/models/applicationjson.py
index <HASH>..<HASH> 100644
--- a/src/feat/models/applicationjson.py
+++ b/src/feat/models/applicationjson.py
@@ -202,7 +202,7 @@ def read_action(doc, *args, **kwargs):
return ActionPayload()
params = json.loads(data)
if not isinstance(params, dict):
- raise ValueError("Invalid action parameters")
+ return ActionPayload(value=params)
return ActionPayload(params)
|
Lets action payload to be only a single value.
|
py
|
diff --git a/src/ossos-pipeline/ossos/fitsviewer/displayable.py b/src/ossos-pipeline/ossos/fitsviewer/displayable.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/ossos/fitsviewer/displayable.py
+++ b/src/ossos-pipeline/ossos/fitsviewer/displayable.py
@@ -294,8 +294,8 @@ class ErrEllipse(object):
"""
self.center = (x_cen, y_cen)
- self.a = a
- self.b = b
+ self.a = max(a, 10)
+ self.b = max(b, 10)
self.pa = pa
self.artist = Ellipse(self.center, self.a, self.b, self.pa, edgecolor='b', facecolor='g', alpha=0.2)
|
made the ellipse have a minimum size of <I> pixel a/b
|
py
|
diff --git a/_pytest/unittest.py b/_pytest/unittest.py
index <HASH>..<HASH> 100644
--- a/_pytest/unittest.py
+++ b/_pytest/unittest.py
@@ -134,6 +134,7 @@ class TestCaseFunction(pytest.Function):
pass
def runtest(self):
+ setattr(self._testcase, "__name__", self.name)
self._testcase(result=self)
def _prunetraceback(self, excinfo):
|
Fixes #<I> The unittest.skip decorator runs @functools.wraps on self._testcase. functools.wraps expects a "__name__" attribute and this patch adds one. It might not be the correct fix, but it works for me.
|
py
|
diff --git a/tests/unit/modules/blockdev_test.py b/tests/unit/modules/blockdev_test.py
index <HASH>..<HASH> 100644
--- a/tests/unit/modules/blockdev_test.py
+++ b/tests/unit/modules/blockdev_test.py
@@ -86,6 +86,7 @@ class TestBlockdevModule(TestCase):
with patch.dict(blockdev.__salt__, {'cmd.run': mock}):
self.assertEqual(blockdev.fstype(device), fs_type)
+ @skipIf(not salt.utils.which('resize2fs'), 'resize2fs not found')
def test_resize2fs(self):
'''
unit tests for blockdev.resize2fs
|
Skip test_resize2fs if resize2fs does not exists (#<I>)
|
py
|
diff --git a/tests/test_celerite.py b/tests/test_celerite.py
index <HASH>..<HASH> 100644
--- a/tests/test_celerite.py
+++ b/tests/test_celerite.py
@@ -356,6 +356,19 @@ def test_log_likelihood(method, seed=42):
ll3 = gp.log_likelihood(y)
assert not np.allclose(ll2, ll3)
+ # Test zero delta t
+ ind = len(x) // 2
+ x = np.concatenate((x[:ind], [x[ind]], x[ind:]))
+ y = np.concatenate((y[:ind], [y[ind]], y[ind:]))
+ yerr = np.concatenate((yerr[:ind], [yerr[ind]], yerr[ind:]))
+ gp.compute(x, yerr)
+ ll = gp.log_likelihood(y)
+ K = gp.get_matrix(include_diagonal=True)
+ ll0 = -0.5 * np.dot(y, np.linalg.solve(K, y))
+ ll0 -= 0.5 * np.linalg.slogdet(K)[1]
+ ll0 -= 0.5 * len(x) * np.log(2*np.pi)
+ assert np.allclose(ll, ll0)
+
@method_switch
def test_predict(method, seed=42):
|
adding test for dt = 0
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import setup
setup(
name='plucky',
- version='0.3.5',
+ version='0.3.6',
description='Plucking (deep) keys/paths safely from python collections has never been easier.',
long_description=open('README.rst').read(),
author='Radomir Stevanovic',
|
bumped to <I>
|
py
|
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100755
--- a/tests.py
+++ b/tests.py
@@ -8,6 +8,7 @@ import mock
import sys
from django.conf import settings
from django.test import RequestFactory, override_settings
+from django.http import HttpResponse
import request_logging
from request_logging.middleware import LoggingMiddleware, DEFAULT_LOG_LEVEL, DEFAULT_COLORIZE, DEFAULT_MAX_BODY_LENGTH,\
@@ -368,8 +369,9 @@ class DRFTestCase(BaseLogTestCase):
def test_no_response_logging_is_honored(self, mock_log):
uri = "/widgets"
request = self.factory.get(uri)
- self.middleware.process_request(request)
- self._assert_not_logged(mock_log, "Unannotated")
+ mock_response = HttpResponse('{"example":"response"}', content_type='application/json', status=422)
+ self.middleware.process_response(request, response=mock_response)
+ self._assert_not_logged(mock_log, '"example":"response"')
def test_non_existent_drf_route_logs(self, mock_log):
uri = "/widgets/1234"
|
[tests] Correct failing to to check responses aren't logged
|
py
|
diff --git a/salt/modules/glusterfs.py b/salt/modules/glusterfs.py
index <HASH>..<HASH> 100644
--- a/salt/modules/glusterfs.py
+++ b/salt/modules/glusterfs.py
@@ -550,7 +550,6 @@ def add_volume_bricks(name, bricks):
return True
-
def enable_quota_volume(name):
'''
Enable quota on a glusterfs volume.
@@ -645,7 +644,6 @@ def list_quota_volume(name):
Name of the gluster volume
'''
-
cmd = 'volume quota {0}'.format(name)
cmd += ' list'
@@ -659,4 +657,3 @@ def list_quota_volume(name):
ret[path] = _etree_to_dict(limit)
return ret
-
|
Add volumes quota support to GlusterFS Module # blank line corrections
|
py
|
diff --git a/github_release.py b/github_release.py
index <HASH>..<HASH> 100755
--- a/github_release.py
+++ b/github_release.py
@@ -322,7 +322,18 @@ def get_releases(repo_name, verbose=False):
return releases
+@backoff.on_predicate(backoff.expo, lambda x: x is None, max_time=5)
def get_release(repo_name, tag_name):
+ """Return release
+
+ .. note::
+
+ If the release is not found (e.g the release was just created and
+ the GitHub response is not yet updated), this function is called again by
+ leveraging the `backoff` decorator.
+
+ See https://github.com/j0057/github-release/issues/67
+ """
releases = get_releases(repo_name)
try:
release = next(r for r in releases if r['tag_name'] == tag_name)
|
Improve robustness of asset upload during release creation Fix #<I>
|
py
|
diff --git a/indra/tools/live_curation.py b/indra/tools/live_curation.py
index <HASH>..<HASH> 100644
--- a/indra/tools/live_curation.py
+++ b/indra/tools/live_curation.py
@@ -506,6 +506,31 @@ class LiveCurator(object):
# Finally, we update the scorer with the new curation counts
self.scorer.update_counts(prior_counts, subtype_counts)
+ def save_curations(self, corpus_id, use_cache=True):
+ """Save the current state of curations for a corpus given its ID
+
+ If the corpus ID cannot be found, an InvalidCorpusError is raised.
+
+ Parameters
+ ----------
+ corpus_id : str
+ the ID of the corpus to save the
+ use_cache : bool
+ If True, also save the current curation to the local cache.
+ Default: True.
+ """
+ # Do NOT use cache or S3 when getting the corpus, otherwise it will
+ # overwrite the current corpus
+ try:
+ corpus = self.get_corpus(corpus_id, check_s3=False,
+ use_cache=False)
+ corpus.upload_curations(s3key=corpus_id)
+ except InvalidCorpusError:
+ logger.info('No')
+ return
+
+ corpus.upload_curations(corpus_id, use_cache=use_cache)
+
def update_beliefs(self, corpus_id):
"""Return updated belief scores for a given corpus.
|
Add LiveCurator method saving curations for corpus
|
py
|
diff --git a/examples/tornado_server.py b/examples/tornado_server.py
index <HASH>..<HASH> 100755
--- a/examples/tornado_server.py
+++ b/examples/tornado_server.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
from __future__ import absolute_import
+
+import random
import sys
import tornado.ioloop
@@ -56,7 +58,11 @@ class MyServer(tornado.tcpserver.TCPServer):
response.arg_2 = 'hello world'
response.arg_3 = context.message.arg_3
- connection.frame_and_write(response)
+ # Simulate some response delay
+ tornado.ioloop.IOLoop.instance().call_later(
+ random.random(),
+ lambda: connection.frame_and_write(response)
+ )
elif context.message.message_type == Types.PING_REQ:
connection.pong()
|
add some random splay to example service response times
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ setup(
description='Official Stanford NLP Python Library',
long_description=long_description,
-
+ long_description_content_type="text/markdown",
# The project's main homepage.
url='https://github.com/stanfordnlp/stanfordnlp.git',
@@ -59,7 +59,7 @@ setup(
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
- install_requires=['numpy', 'ply', 'protobuf', 'requests', 'torch', 'tqdm'],
+ install_requires=['numpy', 'protobuf', 'requests', 'torch', 'tqdm'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
|
small changes before final release of <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ with open('requirements.txt') as f:
setup(
name='pyicloud',
- version='0.2.1',
+ version='0.3.0',
url='https://github.com/picklepete/pyicloud',
description=(
'PyiCloud is a module which allows pythonistas to '
@@ -16,5 +16,13 @@ setup(
author='Peter Evans',
author_email='evans.peter@gmail.com',
packages=find_packages(),
- install_requires=required
+ install_requires=required,
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ ],
)
|
Bumping version number; adding trove classifiers (including those for python-version limitations).
|
py
|
diff --git a/ClassUtil.py b/ClassUtil.py
index <HASH>..<HASH> 100644
--- a/ClassUtil.py
+++ b/ClassUtil.py
@@ -8,9 +8,9 @@ Copyright
"""
__author__ = 'Jason R. Coombs <jaraco@sandia.gov>'
-__version__ = '$Revision: 3 $'[11:-2]
-__vssauthor__ = '$Author: Jaraco $'[9:-2]
-__date__ = '$Modtime: 04-06-23 12:18 $'[10:-2]
+__version__ = '$Rev: 3 $'[6:-2]
+__svnauthor__ = '$Author: Jaraco $'[9:-2]
+__date__ = '$Date: 04-06-23 12:18 $'[7:-2]
import types
|
Fixed all of the module tags to use svnauthor instead of vssauthor. Changed all to use svn keywords.
|
py
|
diff --git a/lib/webinterface_tests.py b/lib/webinterface_tests.py
index <HASH>..<HASH> 100644
--- a/lib/webinterface_tests.py
+++ b/lib/webinterface_tests.py
@@ -42,6 +42,7 @@ class _FakeApache(object):
self.config_tree = None
self.server_root = None
self.mpm_query = None
+ self.exists_config_define = None
def parse_qs(self, *args, **kargs):
return cgi.parse_qs(*args, **kargs)
|
Added exists_config_define to the FakeApache module, as it is needed by some mod_python versions.
|
py
|
diff --git a/panflute/version.py b/panflute/version.py
index <HASH>..<HASH> 100644
--- a/panflute/version.py
+++ b/panflute/version.py
@@ -2,4 +2,4 @@
Panflute version
"""
-__version__ = '2.0.5'
+__version__ = '2.1.0'
|
version.py: minor bump as new API has been added
|
py
|
diff --git a/lib/svtplay_dl/fetcher/hls.py b/lib/svtplay_dl/fetcher/hls.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/fetcher/hls.py
+++ b/lib/svtplay_dl/fetcher/hls.py
@@ -83,7 +83,10 @@ class HLS(VideoRetriever):
sys.exit(2)
match = re.search(r'URI="(https?://.*?)"', keydata)
- key = get_http_data(match.group(1))
+ error, key = get_http_data(match.group(1))
+ if error:
+ log.error("Can't get crypto key to decode files.")
+ return
rand = os.urandom(16)
decryptor = AES.new(key, AES.MODE_CBC, rand)
|
hls: handle error when getting key
|
py
|
diff --git a/bika/lims/browser/sample.py b/bika/lims/browser/sample.py
index <HASH>..<HASH> 100644
--- a/bika/lims/browser/sample.py
+++ b/bika/lims/browser/sample.py
@@ -71,7 +71,21 @@ class ajaxSampleSubmit():
plone.protect.CheckAuthenticator(self.request.form)
plone.protect.PostOnly(self.request.form)
- if form.has_key("save_button"):
+ can_edit = True
+ workflow = getToolByName(self.context, 'portal_workflow')
+ if workflow.getInfoFor(self.context, 'cancellation_state') == "cancelled":
+ can_edit = False
+ else:
+ ars = self.context.getAnalysisRequests()
+ for ar in ars:
+ for a in ar.getAnalyses():
+ if workflow.getInfoFor(a.getObject(), 'review_state') in ('verified', 'published'):
+ can_edit = False
+ break
+ if not can_edit:
+ break
+
+ if can_edit:
sample = self.context
sampleType = form['SampleType']
samplePoint = form['SamplePoint']
@@ -99,6 +113,9 @@ class ajaxSampleSubmit():
for ar in ars:
ar.reindexObject()
message = _("Changes Saved.")
+ else:
+ message = _("Changes not allowed.")
+
self.context.plone_utils.addPortalMessage(message, 'info')
return json.dumps({'success':message})
|
Don't allow sample to be edited when not appropriate
|
py
|
diff --git a/EulerPy/tests.py b/EulerPy/tests.py
index <HASH>..<HASH> 100644
--- a/EulerPy/tests.py
+++ b/EulerPy/tests.py
@@ -160,7 +160,7 @@ class EulerTests(unittest.TestCase):
def test_verify_correct(self):
# Encoded in Base64 to prevent problem 1 spoilers
- solution = b64decode('MjMzMTY4').decode(encoding='UTF-8')
+ solution = b64decode('MjMzMTY4'.encode('UTF-8')).decode('UTF-8')
problem_solution = 'print ({0})'.format(solution)
with open('001.py', 'w') as file:
|
Encode solution string before decoding This should fix tests on Python <I> where b<I>decode requires bytes.
|
py
|
diff --git a/firenado/service.py b/firenado/service.py
index <HASH>..<HASH> 100644
--- a/firenado/service.py
+++ b/firenado/service.py
@@ -63,11 +63,9 @@ class FirenadoService(object):
:return: The data connected object in the top of the hierarchy.
"""
- from firenado.data import DataConnectedMixin
-
if self.consumer is None:
return None
-
+ from firenado.data import DataConnectedMixin
if isinstance(self.consumer, DataConnectedMixin):
return self.consumer
invert_op = getattr(self.consumer, "get_data_connected", None)
|
Imported data connected mixin only if consumer isn't none. Refs: #<I>
|
py
|
diff --git a/djpaypal/models/webhooks.py b/djpaypal/models/webhooks.py
index <HASH>..<HASH> 100644
--- a/djpaypal/models/webhooks.py
+++ b/djpaypal/models/webhooks.py
@@ -126,7 +126,7 @@ class WebhookEvent(PaypalObject):
return cls.objects.get(id=self.resource[cls.id_field_name])
def send_signal(self):
- signal = WEBHOOK_SIGNALS.get(self.event_type)
+ signal = WEBHOOK_SIGNALS.get(self.event_type.lower())
if signal:
return signal.send(sender=self.__class__, event=self)
@@ -263,6 +263,8 @@ def webhook_handler(*event_types):
# First expand all wildcards and verify the event types are valid
event_types_to_register = set()
for event_type in event_types:
+ # Always convert to lowercase
+ event_type = event_type.lower()
if "*" in event_type:
# expand it
for t in WEBHOOK_EVENT_TYPES:
|
Always convert event type signaling to lowercase
|
py
|
diff --git a/beets/mediafile.py b/beets/mediafile.py
index <HASH>..<HASH> 100644
--- a/beets/mediafile.py
+++ b/beets/mediafile.py
@@ -736,11 +736,12 @@ class MP3DescStorageStyle(MP3StorageStyle):
return None
def delete(self, mutagen_file):
- frame = None
+ found_frame = None
for frame in mutagen_file.tags.getall(self.key):
if frame.desc.lower() == self.description.lower():
+ found_frame = frame
break
- if frame is not None:
+ if found_frame is not None:
del mutagen_file[frame.HashKey]
|
Don't accidentaly delete fallback id3 frame Original: beetbox/beets@a<I>b<I>
|
py
|
diff --git a/ravel.py b/ravel.py
index <HASH>..<HASH> 100644
--- a/ravel.py
+++ b/ravel.py
@@ -312,6 +312,13 @@ def system_bus() :
Bus(dbus.Connection.bus_get(DBUS.BUS_SYSTEM, private = False))
#end system_bus
+def connect_server(address) :
+ "opens a connection to a server at the specified network address and" \
+ " returns a Bus object for the connection."
+ return \
+ Bus(dbus.Connection.open(address, private = False))
+#end connect_server
+
#+
# Client-side proxies for server-side objects
#-
|
add function to create Bus by connecting to a server address
|
py
|
diff --git a/message_sender/tasks.py b/message_sender/tasks.py
index <HASH>..<HASH> 100644
--- a/message_sender/tasks.py
+++ b/message_sender/tasks.py
@@ -115,6 +115,12 @@ class ConcurrencyLimiter(object):
else:
delay = getattr(settings, 'VOICE_MESSAGE_DELAY', 0)
+ if not msg_time:
+ return
+
+ time_since = (datetime.now() - msg_time).total_seconds()
+ if time_since > delay:
+ return
# Convert from datetime to seconds since epoch
msg_time = (msg_time - datetime(1970, 1, 1)).total_seconds()
bucket = int(msg_time // cls.BUCKET_SIZE)
@@ -126,7 +132,7 @@ class ConcurrencyLimiter(object):
if value < 0:
# Set the expiry time to the delay minus the time passed since
# the message was sent.
- cache.set(key, 0, delay + cls.BUCKET_SIZE)
+ cache.set(key, 0, delay - time_since)
else:
cache.decr(key)
|
Take time since msg sent into account in decr
|
py
|
diff --git a/unbabel/api.py b/unbabel/api.py
index <HASH>..<HASH> 100644
--- a/unbabel/api.py
+++ b/unbabel/api.py
@@ -115,7 +115,12 @@ class UnbabelApi(object):
data["visibility"] = visibility
if public_url:
data["public_url"] = public_url
+ print "Data for request"
+ print data
result = requests.post("%stranslation/"%self.api_url,headers=headers,data=json.dumps(data))
+ print result
+ print result.content
+ print result.status_code
if result.status_code == 201:
json_object = json.loads(result.content)
translation = Translation(uid=json_object["uid"],
@@ -125,7 +130,6 @@ class UnbabelApi(object):
translation = json_object.get("translation",None)
)
return translation
- return translation
elif result.status_code == 401:
raise UnauthorizedException(result.content)
elif result.status_code == 400:
|
small change on unbabel py
|
py
|
diff --git a/dark/civ/proteins.py b/dark/civ/proteins.py
index <HASH>..<HASH> 100644
--- a/dark/civ/proteins.py
+++ b/dark/civ/proteins.py
@@ -749,7 +749,7 @@ class ProteinGrouper(object):
else:
append(
'<a id="sample-%s"></a>'
- '<span class="sample">Sample '
+ '<span class="sample">'
'<span class="sample-name">%s</span> '
'did not match anything.</span>' %
(sampleName, sampleName))
|
Tiny change to improve the HTML output generated by proteins-to-pathogens-civ.py
|
py
|
diff --git a/vex/config.py b/vex/config.py
index <HASH>..<HASH> 100644
--- a/vex/config.py
+++ b/vex/config.py
@@ -22,12 +22,14 @@ if sys.version_info < (3, 3):
class InvalidConfigError(Exception):
"""Raised when there is an error during a .vexrc file parse.
"""
- def __init__(self, errors):
+ def __init__(self, filename, errors):
Exception.__init__(self)
+ self.filename = filename
self.errors = errors
def __str__(self):
- return "errors on lines {0!r}".format(
+ return "errors in {0!r}, lines {1!r}".format(
+ self.filename,
list(tup[0] for tup in self.errors)
)
@@ -146,4 +148,4 @@ def parse_vexrc(inp, environ):
except GeneratorExit:
break
if errors:
- raise InvalidConfigError(errors)
+ raise InvalidConfigError(inp.name, errors)
|
mention filename with parse errors e.g. if someone pastes bashrc garbage into vexrc, let them know it is a problem with what they put in vexrc so they don't have to reason about what they are seeing
|
py
|
diff --git a/python/docs/conf.py b/python/docs/conf.py
index <HASH>..<HASH> 100644
--- a/python/docs/conf.py
+++ b/python/docs/conf.py
@@ -45,6 +45,7 @@ extensions = [
autosummary_generate = True
add_module_names = False
+autodoc_default_flags = ['members', 'inherited-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
|
Make members, inherited-members default in autodoc
|
py
|
diff --git a/c7n/policy.py b/c7n/policy.py
index <HASH>..<HASH> 100644
--- a/c7n/policy.py
+++ b/c7n/policy.py
@@ -472,6 +472,17 @@ class LambdaMode(PolicyExecutionMode):
"action-%s" % action.name, utils.dumps(results))
return resources
+ def expand_variables(self, variables):
+ """expand variables in the mode role fields.
+ """
+ p = variables['policy'].copy()
+ if 'mode' in variables['policy']:
+ if 'role' in variables['policy']['mode']:
+ mode = variables['policy']['mode'].copy()
+ mode['role'] = mode['role'].format(**variables)
+ p['mode'] = mode
+ return p
+
def provision(self):
# Avoiding runtime lambda dep, premature optimization?
from c7n.mu import PolicyLambda, LambdaManager
@@ -479,6 +490,11 @@ class LambdaMode(PolicyExecutionMode):
with self.policy.ctx:
self.policy.log.info(
"Provisioning policy lambda %s", self.policy.name)
+ variables = {
+ 'account_id': self.policy.options.account_id,
+ 'policy': self.policy.data
+ }
+ self.policy.data = self.expand_variables(variables)
try:
manager = LambdaManager(self.policy.session_factory)
except ClientError:
|
core - interpolate/replace {account_id} in policy mode: role: for multi-account usage (#<I>)
|
py
|
diff --git a/optlang/glpk_interface.py b/optlang/glpk_interface.py
index <HASH>..<HASH> 100644
--- a/optlang/glpk_interface.py
+++ b/optlang/glpk_interface.py
@@ -441,10 +441,11 @@ class Model(interface.Model):
@objective.setter
def objective(self, value):
+ if self._objective is not None:
+ for variable in self._objective.variables:
+ glp_set_obj_coef(self.problem, variable.index, 0.)
super(Model, self.__class__).objective.fset(self, value) # TODO: This needs to be sped up
self._objective = value
- for i in xrange(1, glp_get_num_cols(self.problem) + 1):
- glp_set_obj_coef(self.problem, i, 0)
expression = self._objective.expression
if isinstance(expression, types.FloatType) or isinstance(expression, types.IntType):
pass
|
GLPK objective coefficients are much more efficiently reset to zero if a new objective is set.
|
py
|
diff --git a/tests/unit/modules/test_osquery.py b/tests/unit/modules/test_osquery.py
index <HASH>..<HASH> 100644
--- a/tests/unit/modules/test_osquery.py
+++ b/tests/unit/modules/test_osquery.py
@@ -50,7 +50,7 @@ class OSQueryTestCase(TestCase, LoaderModuleMockMixin):
MagicMock(return_value=_table_attrs_results)):
with patch.object(osquery, '_osquery',
MagicMock(return_value=_os_query_results)):
- self.assertEqual(osquery.version(), u'2.6.1')
+ self.assertEqual(osquery.version(), u'2.6.1')
def test_deb_packages(self):
'''
@@ -215,4 +215,3 @@ class OSQueryTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(osquery.osquery_info(attrs=['build_platform',
'start_time']),
_os_query_results)
-
|
Fixing some minor lint issues.
|
py
|
diff --git a/climlab/convection/_emanuel_convection/setup.py b/climlab/convection/_emanuel_convection/setup.py
index <HASH>..<HASH> 100644
--- a/climlab/convection/_emanuel_convection/setup.py
+++ b/climlab/convection/_emanuel_convection/setup.py
@@ -22,7 +22,7 @@ def configuration(parent_package='', top_path=None):
f90flags.append('-O0')
# Suppress all compiler warnings (avoid huge CI log files)
f90flags.append('-w')
- except CompilerNotFound:
+ except:
print('No Fortran compiler found, not building the RRTMG_LW radiation module!')
build = False
|
More robustly set build=False for convention code
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ setup(
],
install_requires=[
'click==4.0',
- 'python-owasp-zap-v2.4==0.0.5',
+ 'python-owasp-zap-v2.4==0.0.7',
'tabulate==0.7.5',
'termcolor==1.1.0',
],
|
Upgrade python-owasp-zap-<I> dependency to <I>
|
py
|
diff --git a/dramatiq/brokers/rabbitmq.py b/dramatiq/brokers/rabbitmq.py
index <HASH>..<HASH> 100644
--- a/dramatiq/brokers/rabbitmq.py
+++ b/dramatiq/brokers/rabbitmq.py
@@ -401,15 +401,18 @@ class _RabbitmqConsumer(Consumer):
self.channel._queue_consumer_generator.pending_events.append(self.interrupt_message)
# Finally, drain the socket for select
self.interrupt_sock_r.recv(512)
- except OSError as e:
+ except OSError as e: # pragma: no cover
if e.errno != errno.EGAIN:
raise
def close(self):
try:
- self.connection._impl.ioloop.remove_handler(self.interrupt_sock_r.fileno())
- self.interrupt_sock_w.close()
- self.interrupt_sock_r.close()
+ try:
+ self.interrupt_sock_w.close()
+ self.interrupt_sock_r.close()
+ self.connection._impl.ioloop.remove_handler(self.interrupt_sock_r.fileno())
+ except KeyError:
+ pass
if self.channel.is_open:
self.channel.cancel()
|
refactor: harden interrupt cleanup code against early disconnect
|
py
|
diff --git a/sample/src/app/app.py b/sample/src/app/app.py
index <HASH>..<HASH> 100644
--- a/sample/src/app/app.py
+++ b/sample/src/app/app.py
@@ -21,6 +21,7 @@ import sys
import traceback
import json
import requests
+import re
from flask import Flask
from flask import request
@@ -28,7 +29,7 @@ app = Flask(__name__)
vcap_application = json.loads(os.getenv('VCAP_APPLICATION','{ "name": "none", "application_uris": [ "http://localhost:8080" ] }'))
host = vcap_application['application_uris'][0]
-name = vcap_application['name'].rstrip('01234567889.').rstrip('-')
+name = re.sub('-[0-9].*\\Z', '', vcap_application['name'])
def route(url):
return host + url
|
Allow for build numbers in app name without affecting service name
|
py
|
diff --git a/nion/swift/test/HardwareSource_test.py b/nion/swift/test/HardwareSource_test.py
index <HASH>..<HASH> 100644
--- a/nion/swift/test/HardwareSource_test.py
+++ b/nion/swift/test/HardwareSource_test.py
@@ -439,7 +439,7 @@ def _test_exception_during_record_halts_playback(testcase, hardware_source, expo
start = time.time()
while time.time() - start < exposure * 10.0 and hardware_source.is_recording:
time.sleep(0.05)
- # print(time.time() - start)
+ time.sleep(0.05) # avoid test race condition
testcase.assertFalse(hardware_source.is_recording)
finally:
hardware_source.abort_recording(sync_timeout=3.0)
|
Attempt to fix race condition in an acquire test.
|
py
|
diff --git a/cloudinary_storage/storage.py b/cloudinary_storage/storage.py
index <HASH>..<HASH> 100644
--- a/cloudinary_storage/storage.py
+++ b/cloudinary_storage/storage.py
@@ -10,6 +10,7 @@ from django.core.files.uploadedfile import UploadedFile
from django.core.files.storage import Storage
from django.utils.deconstruct import deconstructible
+from . import app_settings
from .helpers import get_resources_by_path
|
Added app_settings import to storage file
|
py
|
diff --git a/perception/image.py b/perception/image.py
index <HASH>..<HASH> 100644
--- a/perception/image.py
+++ b/perception/image.py
@@ -1679,7 +1679,7 @@ class DepthImage(Image):
data[ind[0], ind[1]] = 0.0
return DepthImage(data, self._frame)
- def pixels_farther_than(self, depth_im):
+ def pixels_farther_than(self, depth_im, filter_equal_depth=False):
"""
Returns the pixels that are farther away
than those in the corresponding depth image.
@@ -1688,6 +1688,8 @@ class DepthImage(Image):
----------
depth_im : :obj:`DepthImage`
depth image to query replacement with
+ filter_equal_depth : bool
+ whether or not to mark depth values that are equal
Returns
-------
@@ -1695,7 +1697,10 @@ class DepthImage(Image):
the pixels
"""
# take closest pixel
- farther_px = np.where(self.data > depth_im.data)
+ if filter_equal_depth:
+ farther_px = np.where(self.data > depth_im.data)
+ else:
+ farther_px = np.where(self.data >= depth_im.data)
farther_px = np.c_[farther_px[0], farther_px[1]]
return farther_px
|
Added an option to filter equal depth values
|
py
|
diff --git a/s3upload/forms.py b/s3upload/forms.py
index <HASH>..<HASH> 100644
--- a/s3upload/forms.py
+++ b/s3upload/forms.py
@@ -24,6 +24,9 @@ class S3UploadForm(forms.Form):
content_type = forms.CharField(widget=forms.HiddenInput())
+ # http://docs.aws.amazon.com/AmazonS3/latest/dev/HTTPPOSTForms.html#HTTPPOSTFormFields
+ # The file or content must be the last field rendered in the form.
+ # Any fields below it are ignored.
file = forms.FileField()
content_type_prefix = '' # e.g. 'image/', 'text/'
@@ -102,12 +105,14 @@ class S3UploadForm(forms.Form):
return self.upload_to
def get_policy(self):
+ # http://docs.aws.amazon.com/AmazonS3/latest/dev/HTTPPOSTForms.html#HTTPPOSTConstructPolicy
connection = self.get_connection()
policy = connection.build_post_policy(self.get_expiration_time(),
self.get_conditions())
return self._base64_encode(policy.replace('\n', '').encode('utf-8'))
def get_signature(self):
+ # http://docs.aws.amazon.com/AmazonS3/latest/dev/HTTPPOSTForms.html#HTTPPOSTConstructingPolicySignature
digest = hmac.new(self.get_secret_key().encode('utf-8'),
self.get_policy(), sha1).digest()
return self._base64_encode(digest)
|
Added some comments which reference the AWS docs for html form post.
|
py
|
diff --git a/MAVProxy/modules/lib/mp_menu.py b/MAVProxy/modules/lib/mp_menu.py
index <HASH>..<HASH> 100644
--- a/MAVProxy/modules/lib/mp_menu.py
+++ b/MAVProxy/modules/lib/mp_menu.py
@@ -9,6 +9,7 @@ November 2013
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import multiproc
import platform
+from MAVProxy.modules.lib.wx_loader import wx
class MPMenuGeneric(object):
'''a MP menu separator'''
@@ -50,6 +51,7 @@ class MPMenuItem(MPMenuGeneric):
self.returnkey = returnkey
self.handler = handler
self.handler_result = None
+ self._id = wx.NewId()
def find_selected(self, event):
'''find the selected menu item'''
@@ -66,11 +68,7 @@ class MPMenuItem(MPMenuGeneric):
self.handler_result = call()
def id(self):
- '''id used to identify the returned menu items
- uses a 16 bit signed integer'''
- # must be below SHRT_MAX
- id = int(hash((self.name, self.returnkey))) % 32767
- return id
+ return self._id
def _append(self, menu):
'''append this menu item to a menu'''
|
menu: switch to allocated menu IDs this avoids collusions in the <I> bit ID space and re-use of special IDs that lead to odd icons
|
py
|
diff --git a/devassistant/yaml_loader.py b/devassistant/yaml_loader.py
index <HASH>..<HASH> 100644
--- a/devassistant/yaml_loader.py
+++ b/devassistant/yaml_loader.py
@@ -1,9 +1,6 @@
import os
import yaml
-from devassistant import argument
-from devassistant.assistants import yaml_assistant
-
class YamlLoader(object):
@classmethod
def load_all_yamls(cls, directories):
|
Remove unused imports from yaml_loader
|
py
|
diff --git a/astropy_helpers/version_helpers.py b/astropy_helpers/version_helpers.py
index <HASH>..<HASH> 100644
--- a/astropy_helpers/version_helpers.py
+++ b/astropy_helpers/version_helpers.py
@@ -87,7 +87,8 @@ def _version_split(version):
# details. Note that the imports have to be absolute, since this is also used
# by affiliated packages.
_FROZEN_VERSION_PY_TEMPLATE = """
-# Autogenerated by {packagename}'s setup.py on {timestamp}
+# Autogenerated by {packagename}'s setup.py on {timestamp!s}
+import datetime
{header}
@@ -96,6 +97,7 @@ minor = {minor}
bugfix = {bugfix}
release = {rel}
+timestamp = {timestamp!r}
debug = {debug}
try:
@@ -130,7 +132,7 @@ githash = "{githash}"
def _get_version_py_str(packagename, version, githash, release, debug,
uses_git=True):
- timestamp = str(datetime.datetime.now())
+ timestamp = datetime.datetime.now()
major, minor, bugfix = _version_split(version)
if packagename.lower() == 'astropy':
|
Perhaps a bit superfluous, but also include the timestamp of the build as a datetime object--no reason not to considering we already put the timestamp in a comment as well.
|
py
|
diff --git a/mistletoe/block_token.py b/mistletoe/block_token.py
index <HASH>..<HASH> 100644
--- a/mistletoe/block_token.py
+++ b/mistletoe/block_token.py
@@ -477,6 +477,12 @@ class List(BlockToken):
matches.append(output)
if next_marker is None:
break
+
+ if matches:
+ # Only consider the last list item loose if there's more than one element
+ last_parse_buffer = matches[-1][0]
+ last_parse_buffer.loose = len(last_parse_buffer) > 1 and last_parse_buffer.loose
+
return matches
@staticmethod
|
Fixed incorrect loose list (#<I>) (#<I>) * Fixed incorrect loose list (#<I>) * Moved into parsing stage
|
py
|
diff --git a/skyfield/__init__.py b/skyfield/__init__.py
index <HASH>..<HASH> 100644
--- a/skyfield/__init__.py
+++ b/skyfield/__init__.py
@@ -5,5 +5,5 @@ functions and classes there. See the ``documentation`` directory inside
the source code, as well as the http://rhodesmill.org/skyfield/ site!
"""
-VERSION = (1, 25)
+VERSION = (1, 26)
__version__ = '.'.join(map(str, VERSION))
|
With trepedation, declare version <I> I always avoid the I/O code because of the danger that any tweak will break it for someone’s system or use case. This big epic rewrite only raises the stakes. But it passes the tests, so, here goes.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import find_packages, setup
setup(
name="ereuse-utils",
- version='0.3.0b1',
+ version='0.3.0b2',
packages=find_packages(),
url='https://github.com/eReuse/utils',
license='AGPLv3 License',
|
Bump to <I>b2
|
py
|
diff --git a/sdk/python/sawtooth_processor_test/mock_validator.py b/sdk/python/sawtooth_processor_test/mock_validator.py
index <HASH>..<HASH> 100644
--- a/sdk/python/sawtooth_processor_test/mock_validator.py
+++ b/sdk/python/sawtooth_processor_test/mock_validator.py
@@ -273,7 +273,7 @@ class MockValidator(object):
msg_type2 = to_message_type(obj2)
if msg_type != msg_type2:
- raise UnexpectedMessageException(msg_type, obj1, obj2)
+ return False
if msg_type in self._comparators:
return self._comparators[msg_type](obj1, obj2)
|
Fix UnexpectedMessageException ordering issue This modifies _compare() to return False instead of throwing an exception. Previously, the exception thrown did not have enough context to determine the appropriate order of obj1, obj2, resulting in incorrect messages sent to the user (as expected and received messages were swapped).
|
py
|
diff --git a/astrobase/periodbase/kbls.py b/astrobase/periodbase/kbls.py
index <HASH>..<HASH> 100644
--- a/astrobase/periodbase/kbls.py
+++ b/astrobase/periodbase/kbls.py
@@ -880,9 +880,11 @@ def bls_snr(blsdict,
thisminepoch = spfit['fitinfo']['fitepoch']
if isinstance(thisminepoch, np.ndarray):
- LOGWARNING('minimum epoch is actually an array: '
- '%s instead of a float, are time values '
- 'repeated in the original input? '
+ LOGWARNING('minimum epoch is actually an array:\n'
+ '%s\n'
+ 'instead of a float, '
+ 'are there duplicate time values '
+ 'in the original input? '
'will use the first value in this array.'
% repr(thisminepoch))
thisminepoch = thisminepoch[0]
|
kbls.bls_snr: add check for rare case when minepoch is an array
|
py
|
diff --git a/dingo/core/network/grids.py b/dingo/core/network/grids.py
index <HASH>..<HASH> 100644
--- a/dingo/core/network/grids.py
+++ b/dingo/core/network/grids.py
@@ -212,9 +212,28 @@ class MVGridDingo(GridDingo):
"""
# do the routing
- self._graph = mv_routing.solve(self._graph, debug, anim)
- self._graph = mv_connect.mv_connect_satellites(self, self._graph, debug)
- self._graph = mv_connect.mv_connect_stations(self.grid_district, self._graph, debug)
+ self._graph = mv_routing.solve(graph=self._graph,
+ debug=debug,
+ anim=anim)
+
+ # connect satellites (step 1, with restrictions like max. string length, max peak load per string)
+ self._graph = mv_connect.mv_connect_satellites(mv_grid=self,
+ graph=self._graph,
+ mode='normal',
+ debug=debug)
+
+ # connect satellites to closest line/station on a MV ring that have not been connected in step 1
+ self._graph = mv_connect.mv_connect_satellites(mv_grid=self,
+ graph=self._graph,
+ mode='isolated',
+ debug=debug)
+
+ # connect stations
+ self._graph = mv_connect.mv_connect_stations(mv_grid_district=self.grid_district,
+ graph=self._graph,
+ debug=debug)
+
+
# create MV Branch objects from graph edges (lines) and link these objects back to graph edges
# TODO:
|
call conn. of isolated satellites in MVGridDingo.routing() and pimp it up
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,9 +8,6 @@ except ImportError:
from distutils.core import setup
-from pyphi import __version__
-
-
with open('README.rst') as f:
readme = f.read()
@@ -34,8 +31,8 @@ test_require = [
setup(
- name="pyphi",
- version=__version__,
+ name='pyphi',
+ version='0.5.0',
description='A Python library for computing integrated information.',
author='Will Mayner',
author_email='wmayner@gmail.com',
|
Don't import pyphi in setup.py
|
py
|
diff --git a/visa.py b/visa.py
index <HASH>..<HASH> 100644
--- a/visa.py
+++ b/visa.py
@@ -13,7 +13,7 @@
from __future__ import division, unicode_literals, print_function, absolute_import
-from pyvisa import logger, __version__, log_to_screen
+from pyvisa import logger, __version__, log_to_screen, constants
from pyvisa.highlevel import ResourceManager
from pyvisa.errors import (Error, VisaIOError, VisaIOWarning, VisaTypeError,
UnknownHandler, OSNotSupported, InvalidBinaryFormat,
|
Added constants to visa.py
|
py
|
diff --git a/bokeh/models/widgets/sliders.py b/bokeh/models/widgets/sliders.py
index <HASH>..<HASH> 100644
--- a/bokeh/models/widgets/sliders.py
+++ b/bokeh/models/widgets/sliders.py
@@ -99,6 +99,9 @@ class AbstractSlider(Widget):
* "mouseup": the callback will be executed only once when the slider is released.
The "mouseup" policy is intended for scenarios in which the callback is expensive in time.
+
+ .. warning::
+ Callback policy currently apply to JS callbacks
""")
bar_color = Color(default="#e6e6e6", help="""
|
Add documentation on callback policy for slider (#<I>) * Add warning message and small test * ValueError too restrictive * ValueError too restrictive add a warning code with validation decorator * ValueError too restrictive add a warning code with validation decorator * Identation problem remove on super * Add proper indentation No callback warning * Add proper indentation No callback warning
|
py
|
diff --git a/pymatgen/io/vasp/tests/test_outputs.py b/pymatgen/io/vasp/tests/test_outputs.py
index <HASH>..<HASH> 100644
--- a/pymatgen/io/vasp/tests/test_outputs.py
+++ b/pymatgen/io/vasp/tests/test_outputs.py
@@ -90,7 +90,7 @@ class VasprunTest(PymatgenTest):
def test_optical_absorption_coeff(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.BSE.xml.gz")
absorption_coeff = v.optical_absorption_coeff
- self.assertEqual(absorption_coeff[1], 24966408728.917931)
+ self.assertEqual(absorption_coeff[1], 0.13254281688694558)
def test_vasprun_with_more_than_two_unlabelled_dielectric_functions(self):
with self.assertRaises(NotImplementedError):
|
update the test absorption_coeff to match the unit of cm^-1
|
py
|
diff --git a/allegedb/allegedb/window.py b/allegedb/allegedb/window.py
index <HASH>..<HASH> 100644
--- a/allegedb/allegedb/window.py
+++ b/allegedb/allegedb/window.py
@@ -85,8 +85,8 @@ def within_history(rev, windowdict):
if not windowdict:
return False
begin = windowdict._past[0][0] if windowdict._past else \
- windowdict._future[0][0]
- end = windowdict._future[-1][0] if windowdict._future else \
+ windowdict._future[-1][0]
+ end = windowdict._future[0][0] if windowdict._future else \
windowdict._past[-1][0]
return begin <= rev <= end
|
Correct window.within_history(..) for the new stacks layout
|
py
|
diff --git a/pandas/io/tests/test_parsers.py b/pandas/io/tests/test_parsers.py
index <HASH>..<HASH> 100644
--- a/pandas/io/tests/test_parsers.py
+++ b/pandas/io/tests/test_parsers.py
@@ -1799,12 +1799,14 @@ a,b,c
tm.assert_frame_equal(result, expected)
def test_decompression(self):
- data = open(self.csv1, 'rb').read()
+ try:
+ import gzip, bz2
+ except ImportError:
+ raise nose.SkipTest
+ data = open(self.csv1, 'rb').read()
expected = self.read_csv(self.csv1)
- import gzip, bz2
-
try:
tmp = gzip.GzipFile('__tmp__', mode='wb')
tmp.write(data)
|
TST: skip compression tests if either gzip/bz2 not built with user's python. close #<I>
|
py
|
diff --git a/usb1/__init__.py b/usb1/__init__.py
index <HASH>..<HASH> 100644
--- a/usb1/__init__.py
+++ b/usb1/__init__.py
@@ -2608,8 +2608,9 @@ class USBContext:
it cannot call any synchronous libusb function.
"""
def wrapped_callback(context_p, device_p, event, _):
- assert addressof(context_p.contents) == addressof(
- self.__context_p.contents), (context_p, self.__context_p)
+ assert context_p == self.__context_p.value, (
+ context_p, self.__context_p,
+ )
device = USBDevice(
context=self,
device_p=device_p,
|
usb1: Fix context pointer comparison in hotplugRegisterCallback This was likely broken by the pointer type change in usb1: Enforce object finalization order. as void and non-void pointers do not have the same attributes in ctypes.
|
py
|
diff --git a/openapi_core/unmarshalling/schemas/factories.py b/openapi_core/unmarshalling/schemas/factories.py
index <HASH>..<HASH> 100644
--- a/openapi_core/unmarshalling/schemas/factories.py
+++ b/openapi_core/unmarshalling/schemas/factories.py
@@ -1,4 +1,4 @@
-from copy import deepcopy
+from copy import copy
import warnings
from openapi_schema_validator import OAS30Validator, oas30_format_checker
@@ -89,7 +89,7 @@ class SchemaUnmarshallersFactory(object):
return OAS30Validator(schema.__dict__, **kwargs)
def _get_format_checker(self):
- fc = deepcopy(oas30_format_checker)
+ fc = copy(oas30_format_checker)
for name, formatter in self.custom_formatters.items():
fc.checks(name)(formatter.validate)
return fc
|
Format checker deepcopy to shallowcopy
|
py
|
diff --git a/latools/latools.py b/latools/latools.py
index <HASH>..<HASH> 100644
--- a/latools/latools.py
+++ b/latools/latools.py
@@ -293,8 +293,8 @@ class analyse(object):
if internal_standard in self.analytes:
self.internal_standard = internal_standard
else:
- ValueError('The internal standard ({}) is not amongst the'.format(internal_standard) +
- 'analytes in\nyour data files. Please make sure it is specified correctly.')
+ raise ValueError('The internal standard ({}) is not amongst the'.format(internal_standard) +
+ 'analytes in\nyour data files. Please make sure it is specified correctly.')
self.minimal_analytes = set([internal_standard])
# keep record of which stages of processing have been performed
|
missing 'raise' in ValueError call closes #<I>
|
py
|
diff --git a/tests/test_vacbot.py b/tests/test_vacbot.py
index <HASH>..<HASH> 100644
--- a/tests/test_vacbot.py
+++ b/tests/test_vacbot.py
@@ -10,6 +10,20 @@ from sucks import *
# the library's design and its multithreaded nature, and b) I'm manually testing every change anyhow,
# as it's not clear how the robot really behaves.
+def test_custom_command():
+ # Ensure a custom-built command generates the expected XML payload
+ c = VacBotCommand('CustomCommand', {'type': 'customtype'})
+ assert_equals(ElementTree.tostring(c.to_xml()),
+
+ b'<ctl td="CustomCommand"><customcommand type="customtype" /></ctl>')
+
+def test_custom_command_noargs():
+ # Ensure a custom-built command with no args generates XML without an args element
+ c = VacBotCommand('CustomCommand')
+ assert_equals(ElementTree.tostring(c.to_xml()),
+ b'<ctl td="CustomCommand" />')
+
+
def test_clean_command():
c = Clean(10)
assert_equals(c.terminal, False)
|
Add tests for the command formatting changes Tests preserving the CamelCasing of commands, and support for zero args
|
py
|
diff --git a/ykman/cli/info.py b/ykman/cli/info.py
index <HASH>..<HASH> 100644
--- a/ykman/cli/info.py
+++ b/ykman/cli/info.py
@@ -36,18 +36,18 @@ import click
def get_overall_fips_status(serial):
- stati = {}
+ statuses = {}
with open_device(transports=TRANSPORT.OTP, serial=serial) as dev:
- stati['OTP'] = OtpController(dev._driver).is_in_fips_mode
+ statuses['OTP'] = OtpController(dev._driver).is_in_fips_mode
with open_device(transports=TRANSPORT.CCID, serial=serial) as dev:
- stati['OATH'] = OathController(dev._driver).is_in_fips_mode
+ statuses['OATH'] = OathController(dev._driver).is_in_fips_mode
with open_device(transports=TRANSPORT.FIDO, serial=serial) as dev:
- stati['FIDO U2F'] = FipsU2fController(dev._driver).is_in_fips_mode
+ statuses['FIDO U2F'] = FipsU2fController(dev._driver).is_in_fips_mode
- return stati
+ return statuses
@click.command()
|
Replace "stati" with "statuses"
|
py
|
diff --git a/tests/test_midi.py b/tests/test_midi.py
index <HASH>..<HASH> 100644
--- a/tests/test_midi.py
+++ b/tests/test_midi.py
@@ -6,8 +6,10 @@ from sebastian.midi.midi import load_midi
class TestMidi(TestCase):
def test_load_midi(self):
+ import os.path
+ filename = os.path.join(os.path.dirname(__file__), "scale.mid")
self.assertEqual(
- list(load_midi("scale.mid")[0]),
+ list(load_midi(filename)[0]),
[
{'midi_pitch': 60, 'offset_64': 42, 'duration_64': 15},
{'midi_pitch': 62, 'offset_64': 56, 'duration_64': 7},
|
use absolute path for test midi filename
|
py
|
diff --git a/labsuite/compilers/pfusx.py b/labsuite/compilers/pfusx.py
index <HASH>..<HASH> 100644
--- a/labsuite/compilers/pfusx.py
+++ b/labsuite/compilers/pfusx.py
@@ -19,6 +19,7 @@ Output is a JSON file which represents a protocol that can run on any
OT-One machine.
"""
+import sys
import os
import re
import json
@@ -296,7 +297,7 @@ def compile(*sequences, output=None):
Takes a list of sequence arguments (RVD or DNA) and outputs a generated
protocol to make plasmids targetting those sequences.
"""
-
+
# Limit right now is the number of tips in the static deck map we're
# using for this protocol.
if len(sequences) > 15:
@@ -328,7 +329,7 @@ def compile(*sequences, output=None):
# Nicely formatted well map for the description.
output_map = []
for well in sorted(well_map):
- output_map.append("{}: {}".format(well_map[well]))
+ output_map.append("{}: {}".format(well, well_map[well]))
# Take our three transfer groups and make them into a consolidated
# transfer list.
|
pFusX: Fix for tuple index out of range in well formatting. Nosetests didn't help here because there's no full compiler test...
|
py
|
diff --git a/ansible/modules/hashivault/hashivault_write.py b/ansible/modules/hashivault/hashivault_write.py
index <HASH>..<HASH> 100755
--- a/ansible/modules/hashivault/hashivault_write.py
+++ b/ansible/modules/hashivault/hashivault_write.py
@@ -184,7 +184,7 @@ def hashivault_write(module):
return result
if not read_data:
read_data = {}
- read_data = read_data.get('data', {})
+ read_data = read_data['data']['data']
write_data = dict(read_data)
write_data.update(data)
|
fix key/values should be readed in `.data.data` rather than `.data` in kv2 when `update:yes` data is not correctly writed due to a wrong reading path. key/values are stored in ``` { data: { data: - key1: value1 - ... } } ``` not in ``` { data: - key1: value1 - ... } ```
|
py
|
diff --git a/examples/fullTree.py b/examples/fullTree.py
index <HASH>..<HASH> 100644
--- a/examples/fullTree.py
+++ b/examples/fullTree.py
@@ -22,12 +22,16 @@ from __future__ import print_function
from scoop import futures
def func0(n):
+ # Task submission is asynchronous; It will return immediately.
task = futures.submit(func1, n)
- result = futures.join(task)
+ # The call blocks here until it gets the result
+ result = task.result()
return result
def func1(n):
+ # This call result in a generator function
result = futures.map(func2, [i+1 for i in range(0,n)])
+ # The results gets evaluated here when they are accessed here
return sum(result)
def func2(n):
@@ -35,7 +39,8 @@ def func2(n):
return sum(result)
def func3(n):
- result = futures.map(func4, [i+1 for i in range(0,n)])
+ # To force an immediate evaluation, you can wrap your map in a list such as:
+ result = list(futures.map(func4, [i+1 for i in range(0,n)]))
return sum(result)
def func4(n):
@@ -44,7 +49,9 @@ def func4(n):
def main():
task = futures.submit(func0, 20)
- result = futures.join(task)
+ # You can wait for a result before continuing computing
+ futures.wait([task], return_when=futures.ALL_COMPLETED)
+ result = task.result()
print(result)
return result
|
Added comments and compliance to new API to fullTree.py
|
py
|
diff --git a/werkzeug/_reloader.py b/werkzeug/_reloader.py
index <HASH>..<HASH> 100644
--- a/werkzeug/_reloader.py
+++ b/werkzeug/_reloader.py
@@ -40,11 +40,16 @@ def _iter_module_files():
entered.add(path_entry)
try:
for filename in os.listdir(path_entry):
- if not filename.endswith(('.py', '.pyc', '.pyo')):
- continue
- filename = _verify_file(os.path.join(path_entry, filename))
- if filename:
- yield filename
+ path = os.path.join(path_entry, filename)
+ if os.path.isdir(path):
+ for filename in _recursive_walk(path):
+ yield filename
+ else:
+ if not filename.endswith(('.py', '.pyc', '.pyo')):
+ continue
+ filename = _verify_file(path)
+ if filename:
+ yield filename
except OSError:
pass
|
Make _recursive_walk actually recursive
|
py
|
diff --git a/master/buildbot/process/logobserver.py b/master/buildbot/process/logobserver.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/process/logobserver.py
+++ b/master/buildbot/process/logobserver.py
@@ -155,16 +155,12 @@ class BufferLogObserver(LogObserver):
self.stderr.append(data)
def _get(self, chunks):
- if chunks is None:
- return [u'']
- if len(chunks) > 1:
- chunks = [''.join(chunks)]
- elif not chunks:
- chunks = [u'']
- return chunks
+ if chunks is None or not chunks:
+ return u''
+ return u''.join(chunks)
def getStdout(self):
- return self._get(self.stdout)[0]
+ return self._get(self.stdout)
def getStderr(self):
- return self._get(self.stderr)[0]
+ return self._get(self.stderr)
|
Fix trac ticket <I> The unicode output in a log is lost & is treated as an ASCII string when there are multiple chunks. This can cause exceptions when mixing strings which will validate that the contents of an "ascii" string are ascii.
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.