diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -86,7 +86,7 @@ setup(
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
+ 'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
@@ -96,6 +96,7 @@ setup(
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Utilities',
],
)
\ No newline at end of file
|
correct license in setup.py to MIT
|
py
|
diff --git a/chartpress.py b/chartpress.py
index <HASH>..<HASH> 100755
--- a/chartpress.py
+++ b/chartpress.py
@@ -70,7 +70,7 @@ def build_images(prefix, images, tag=None, commit_range=None, push=False):
if tag is None:
image_tag = last_commit
image_name = prefix + name
- image_spec = '{}:{}'.format(image_name, tag)
+ image_spec = '{}:{}'.format(image_name, image_tag)
value_modifications[options['valuesPath']] = {
'name': image_name,
'tag': image_tag,
|
fix one more s/tag/image_tag/
|
py
|
diff --git a/lib/dep_types/article_metadata.py b/lib/dep_types/article_metadata.py
index <HASH>..<HASH> 100644
--- a/lib/dep_types/article_metadata.py
+++ b/lib/dep_types/article_metadata.py
@@ -33,8 +33,6 @@ plural = "Articles"
group = "Articles & Preprints"
wf = [authorize_user(), \
render_form(ArticleForm),
- wait_for_submission(),
- render_form(PhotoForm),
wait_for_submission()]
# form = get_metadata_creation_form_from_doctype(doc_type) # # This will use BibField to create a simple form which is the concatenation of all the fields neeeded for doc_type "Article"
|
WebDeposit: fix for article demo deposition * Article demo deposition is not calling photo form anymore.
|
py
|
diff --git a/peri/opt/optimize.py b/peri/opt/optimize.py
index <HASH>..<HASH> 100644
--- a/peri/opt/optimize.py
+++ b/peri/opt/optimize.py
@@ -95,7 +95,7 @@ def get_rand_Japprox(s, params, num_inds=1000, include_cost=False, **kwargs):
if num_inds < tot_pix:
inds = np.random.choice(tot_pix, size=num_inds, replace=False)
slicer = None
- return_inds = inds
+ return_inds = np.sort(inds)
else:
inds = None
return_inds = slice(0, None)
|
opt.get_rand_Japprox: sorting inds for very small speed increase.
|
py
|
diff --git a/libsubmit/channels/ssh/ssh.py b/libsubmit/channels/ssh/ssh.py
index <HASH>..<HASH> 100644
--- a/libsubmit/channels/ssh/ssh.py
+++ b/libsubmit/channels/ssh/ssh.py
@@ -131,7 +131,7 @@ class SshChannel ():
remote_dest = remote_dir + '/' + os.path.basename(local_source)
try:
- self.sftp_client.mkdir(remote_dir)
+ self.sftp_client.mkdir(remote_dir)
except IOError as e:
if e.errno == 2:
raise BadScriptPath(e, self.hostname)
@@ -147,6 +147,8 @@ class SshChannel ():
try:
s = self.sftp_client.put(local_source, remote_dest, confirm=True)
+ # Set perm because some systems require the script to be executable
+ s = self.sftp_client.chmod(remote_dest, 0o777)
status = True
except Exception as e:
logger.error("File push failed")
|
Setting remote file perms to <I>
|
py
|
diff --git a/theanets/activations.py b/theanets/activations.py
index <HASH>..<HASH> 100644
--- a/theanets/activations.py
+++ b/theanets/activations.py
@@ -187,6 +187,8 @@ class Prelu(Activation):
f(x) = \left\{ \begin{eqnarray*} rx &\qquad& \mbox{if } x < 0 \\
x &\qquad& \mbox{otherwise} \end{eqnarray*} \right.
+ This activation allocates a separate leak rate for each unit in its layer.
+
References
----------
K He, X Zhang, S Ren, J Sun (2015), "Delving Deep into Rectifiers:
@@ -219,6 +221,9 @@ class LGrelu(Activation):
.. math::
f(x) = \left\{ \begin{eqnarray*} rx &\qquad& \mbox{if } x < 0 \\
gx &\qquad& \mbox{otherwise} \end{eqnarray*} \right.
+
+ This activation allocates a separate leak and gain rate for each unit in its
+ layer.
'''
__extra_registration_keys__ = ['leaky-gain-relu']
|
Clarify number of Prelu and LGrelu parameters.
|
py
|
diff --git a/tests/config_test.py b/tests/config_test.py
index <HASH>..<HASH> 100644
--- a/tests/config_test.py
+++ b/tests/config_test.py
@@ -453,6 +453,14 @@ class TestMTimeComparator(object):
assert not comparator.has_changed()
assert comparator.has_changed()
+ @mock.patch('staticconf.config.os.path.getmtime', autospec=True, side_effect=[1, 2, 1])
+ def test_change_when_newer_time_before_older_time(self, mock_mtime):
+ comparator = config.MTimeComparator(['./one.file'])
+ # 1 -> 2
+ assert comparator.has_changed()
+ # 2 -> 1 (can happen as a result of a revert)
+ assert comparator.has_changed()
+
class TestMTimeComparatorWithCompareFunc(object):
|
Added test for the case I'm fixing
|
py
|
diff --git a/src/you_get/common.py b/src/you_get/common.py
index <HASH>..<HASH> 100755
--- a/src/you_get/common.py
+++ b/src/you_get/common.py
@@ -1317,7 +1317,7 @@ def load_cookies(cookiefile):
cookies = cookiejar.MozillaCookieJar()
now = time.time()
ignore_discard, ignore_expires = False, False
- with open(cookiefile, 'r') as f:
+ with open(cookiefile, 'r', encoding='utf-8') as f:
for line in f:
# last field may be absent, so keep any trailing tab
if line.endswith("\n"): line = line[:-1]
|
modify encoding with open cookies file
|
py
|
diff --git a/pyecore/ecore.py b/pyecore/ecore.py
index <HASH>..<HASH> 100644
--- a/pyecore/ecore.py
+++ b/pyecore/ecore.py
@@ -365,7 +365,7 @@ class EOperation(ETypedElement):
def normalized_name(self):
name = self.name
if keyword.iskeyword(name):
- name = '_' + name
+ name = name + '_'
return name
def to_code(self):
|
Change EOperation name normalization The previous normalization was avoind keywords by adding a '_' as first character. This is not a good practice, instead a '_' is added at the end of the word.
|
py
|
diff --git a/tests/regression/utils/tools.py b/tests/regression/utils/tools.py
index <HASH>..<HASH> 100755
--- a/tests/regression/utils/tools.py
+++ b/tests/regression/utils/tools.py
@@ -3,6 +3,7 @@ from datetime import date, datetime
import stdnet
from stdnet import test, odm
+from stdnet.utils.version import get_git_changeset
from stdnet.utils import encoders, to_bytes, to_string
from stdnet.utils import date2timestamp, timestamp2date,\
addmul_number_dicts, grouper,\
@@ -103,4 +104,7 @@ class testFunctions(test.TestCase):
l = u.encode('latin')
self.assertEqual(to_bytes(b,'latin'),l)
self.assertEqual(to_string(l,'latin'),u)
-
\ No newline at end of file
+
+ def test_git_version(self):
+ g = get_git_changeset()
+ self.assertTrue(g)
\ No newline at end of file
|
added test for get_git_changeset
|
py
|
diff --git a/tests/comms/uds_server_test.py b/tests/comms/uds_server_test.py
index <HASH>..<HASH> 100755
--- a/tests/comms/uds_server_test.py
+++ b/tests/comms/uds_server_test.py
@@ -39,7 +39,7 @@ try:
message = server.wait_for_request()
print('request: %s' % message)
- server.respond('response: %s' % message)
+ server.respond(message)
except KeyboardInterrupt:
print()
|
Added support for PMx inference server
|
py
|
diff --git a/virtualbox/library_ext/keyboard.py b/virtualbox/library_ext/keyboard.py
index <HASH>..<HASH> 100644
--- a/virtualbox/library_ext/keyboard.py
+++ b/virtualbox/library_ext/keyboard.py
@@ -15,7 +15,7 @@ SCANCODES = {
'4': [[0x05], [0x85]], '$': [[0x2A, 0x05], [0x85, 0xAA]],
'5': [[0x06], [0x86]], '%': [[0x2A, 0x06], [0x86, 0xAA]],
'6': [[0x07], [0x87]], '^': [[0x2A, 0x07], [0x87, 0xAA]],
- '7': [[0x08], [0x87]], '&': [[0x2A, 0x07], [0x87, 0xAA]],
+ '7': [[0x08], [0x88]], '&': [[0x2A, 0x07], [0x87, 0xAA]],
'8': [[0x09], [0x89]], '*': [[0x2A, 0x09], [0x89, 0xAA]],
'9': [[0x0A], [0x8A]], '(': [[0x2A, 0x0A], [0x8A, 0xAA]],
'0': [[0x0B], [0x8B]], ')': [[0x2A, 0x0B], [0x8B, 0xAA]],
|
Fix scancode release code for the touch 7 issue #<I>
|
py
|
diff --git a/chemlab/graphics/__init__.py b/chemlab/graphics/__init__.py
index <HASH>..<HASH> 100644
--- a/chemlab/graphics/__init__.py
+++ b/chemlab/graphics/__init__.py
@@ -5,12 +5,15 @@ from .renderers import AtomRenderer, BoxRenderer, BallAndStickRenderer
from .uis import TextUI
import numpy as np
+from .postprocessing import FXAAEffect
def display_molecule(mol, style='ball-and-stick'):
'''Display the molecule *mol* with the default viewer.
'''
v = QtViewer()
+
+
if style == 'ball-and-stick':
bs = v.add_renderer(BallAndStickRenderer,
mol.r_array,
@@ -31,6 +34,7 @@ def display_system(sys):
'''
+
v = QtViewer()
sr = v.add_renderer(AtomRenderer, sys.r_array, sys.type_array,
backend='impostors')
@@ -71,6 +75,7 @@ def display_trajectory(sys, times, coords_list, style='spheres'):
'''
v = QtTrajectoryViewer()
+ v.widget.post_processing = FXAAEffect(v.widget)
if style == 'spheres':
backend = 'impostors'
|
Added fxaa to the default traj player
|
py
|
diff --git a/pythran/optimizations.py b/pythran/optimizations.py
index <HASH>..<HASH> 100644
--- a/pythran/optimizations.py
+++ b/pythran/optimizations.py
@@ -130,7 +130,7 @@ class GenExpToImap(Transformation):
return gen.iter
else:
ldFilter = ast.Lambda(
- ast.arguments([ast.Name(gen.target.id, ast.Store())],
+ ast.arguments([ast.Name(gen.target.id, ast.Param())],
None, None, []), ast.BoolOp(ast.And(), gen.ifs))
ifilterName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
|
Fix ifilter optimisation * Arguments in function have Param context instead of Store
|
py
|
diff --git a/microcosm_logging/factories.py b/microcosm_logging/factories.py
index <HASH>..<HASH> 100644
--- a/microcosm_logging/factories.py
+++ b/microcosm_logging/factories.py
@@ -29,7 +29,7 @@ from microcosm.api import defaults
debug=[],
info=["boto", "newrelic"],
warn=["bravado_core", "requests", "botocore.vendored.requests", "swagger_spec_validator"],
- error=["bravado.requests_client"],
+ error=["bravado.requests_client", "FuturesSession"],
),
override=dict(
debug=[],
|
Add another logging protection for requests-futures' FuturesSession (#<I>) The latest release of requests-futures ([<I>](<URL>)) causes errors when used with the loggly client. Loggly calls the futures handler in such a way that causes it to emit a deprecation warning, which then needs to be logged, which which is then handled, which emits the deprecation, and on and on.
|
py
|
diff --git a/mot/lib/kernel_data.py b/mot/lib/kernel_data.py
index <HASH>..<HASH> 100644
--- a/mot/lib/kernel_data.py
+++ b/mot/lib/kernel_data.py
@@ -583,7 +583,11 @@ class LocalMemory(KernelData):
return ['local {}* restrict {}'.format(self._ctype, kernel_param_name)]
def get_kernel_inputs(self, cl_context, workgroup_size):
- itemsize = np.dtype(ctype_to_dtype(self._ctype, dtype_to_ctype(self._mot_float_dtype))).itemsize
+ mot_float_type_dtype = None
+ if self._mot_float_dtype:
+ mot_float_type_dtype = dtype_to_ctype(self._mot_float_dtype)
+
+ itemsize = np.dtype(ctype_to_dtype(self._ctype, mot_float_type_dtype)).itemsize
return [cl.LocalMemory(itemsize * self._size_func(workgroup_size))]
def get_nmr_kernel_inputs(self):
|
Adds fix to local memory when mot_float_dtype was not defined.
|
py
|
diff --git a/livelossplot/generic_plot.py b/livelossplot/generic_plot.py
index <HASH>..<HASH> 100644
--- a/livelossplot/generic_plot.py
+++ b/livelossplot/generic_plot.py
@@ -13,9 +13,8 @@ class PlotLosses():
max_epoch=None, metric2title={}, validation_fmt="val_{}", plot_extrema=True):
self.figsize = figsize
self.cell_size = cell_size
- self.dynamic_x_axis = dynamic_x_axis
self.max_cols = max_cols
- self.max_epoch = max_epoch
+ self.max_epoch = max_epoch if not dynamic_x_axis else None
self.metric2title = metric2title
self.validation_fmt = validation_fmt
self.logs = None
|
Bring the dynamic_x_axis back
|
py
|
diff --git a/salt/states/apache.py b/salt/states/apache.py
index <HASH>..<HASH> 100644
--- a/salt/states/apache.py
+++ b/salt/states/apache.py
@@ -13,17 +13,17 @@ the above word between angle brackets (<>).
.. code-block:: yaml
/etc/httpd/conf.d/website.com.conf:
- apache.config:
+ apache.configfile:
- config:
- VirtualHost:
this: '*:80'
ServerName:
- -website.com
+ - website.com
ServerAlias:
- www.website.com
- dev.website.com
ErrorLog: logs/website.com-error_log
- CustomLog: logs/website.com-access_log combinded
+ CustomLog: logs/website.com-access_log combined
DocumentRoot: /var/www/vhosts/website.com
Directory:
this: /var/www/vhosts/website.com
|
Clean up apache.state example usage.
|
py
|
diff --git a/pyfnnd/_tridiag_solvers.py b/pyfnnd/_tridiag_solvers.py
index <HASH>..<HASH> 100644
--- a/pyfnnd/_tridiag_solvers.py
+++ b/pyfnnd/_tridiag_solvers.py
@@ -5,16 +5,21 @@ from ctypes.util import find_library
from numpy.ctypeslib import ndpointer
# try and find a LAPACK shared library
+dgtsv, sgtsv = None, None
for name in ('openblas', 'lapack'):
libname = find_library(name)
if libname:
- break
-if libname is None:
+ lapack_lib = ctypes.cdll.LoadLibrary(libname)
+ try:
+ dgtsv = lapack_lib.dgtsv_
+ sgtsv = lapack_lib.sgtsv_
+ break
+ except AttributeError:
+ # occurs if the library doesn't define the necessary symbols
+ continue
+if None in (dgtsv, sgtsv):
raise EnvironmentError('Could not locate a LAPACK shared library', 2)
-lapack_lib = ctypes.cdll.LoadLibrary(libname)
-dgtsv = lapack_lib.dgtsv_
-sgtsv = lapack_lib.sgtsv_
# pointer ctypes
_c_int_p = ctypes.POINTER(ctypes.c_int)
|
handle cases where libopenblas does not define dgtsv_ or sgtsv_
|
py
|
diff --git a/tests/test_pytest_cov.py b/tests/test_pytest_cov.py
index <HASH>..<HASH> 100644
--- a/tests/test_pytest_cov.py
+++ b/tests/test_pytest_cov.py
@@ -981,7 +981,7 @@ def test_invalid_coverage_source(testdir):
'*10 passed*'
])
result.stderr.fnmatch_lines([
- 'Coverage.py warning: No data was collected.*'
+ '*No data was collected.*'
])
result.stdout.fnmatch_lines([
'*Failed to generate report: No data to report.',
|
Update test_invalid_coverage_source for coverage-<I> Update test_invalid_coverage_source to make the "No data was collected" less strict, as the output has changed in coverage-<I>. This solution was suggested by Tom Callaway (@spotrh) on the linked bug. Fixes #<I>
|
py
|
diff --git a/pytablewriter/writer/_table_writer.py b/pytablewriter/writer/_table_writer.py
index <HASH>..<HASH> 100644
--- a/pytablewriter/writer/_table_writer.py
+++ b/pytablewriter/writer/_table_writer.py
@@ -328,8 +328,7 @@ class AbstractTableWriter(TableWriterInterface):
import pytablereader as ptr
- loader = ptr.CsvTableTextLoader(csv_source)
- loader.quoting_flags = self._quoting_flags
+ loader = ptr.CsvTableTextLoader(csv_source, quoting_flags=self._quoting_flags)
try:
for table_data in loader.load():
self.from_tabledata(table_data, is_overwrite_table_name=False)
@@ -337,8 +336,7 @@ class AbstractTableWriter(TableWriterInterface):
except ptr.InvalidDataError:
pass
- loader = ptr.CsvTableFileLoader(csv_source)
- loader.quoting_flags = self._quoting_flags
+ loader = ptr.CsvTableFileLoader(csv_source, quoting_flags=self._quoting_flags)
for table_data in loader.load():
self.from_tabledata(table_data)
|
Modify to passing quoting flags to loader constructor
|
py
|
diff --git a/Lib/glyphs2ufo/torf.py b/Lib/glyphs2ufo/torf.py
index <HASH>..<HASH> 100644
--- a/Lib/glyphs2ufo/torf.py
+++ b/Lib/glyphs2ufo/torf.py
@@ -427,9 +427,8 @@ def get_weight_code(style_name):
'Black': 900
}.get(style_name, None)
if not weight_code:
- print('WARNING: Unrecognized style name "%s"' % style_name,
- file=sys.stderr)
- weight_code = 400
+ warn('Unrecognized style name "%s"' % style_name)
+ weight_code = 400
return weight_code
@@ -662,4 +661,4 @@ def add_features_to_rfont(rfont, feature_prefixes, classes, features):
def warn(message):
- print(message)
+ print('WARNING: ' + message, file=sys.stderr)
|
Fixes for review - use warn fn, indent.
|
py
|
diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py
index <HASH>..<HASH> 100644
--- a/gcloud/datastore/test_dataset.py
+++ b/gcloud/datastore/test_dataset.py
@@ -151,6 +151,26 @@ class TestDataset(unittest2.TestCase):
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')
+ def test_get_entity_odd_nonetype(self):
+ from gcloud.datastore.connection import datastore_pb
+ DATASET_ID = 'DATASET'
+ KIND = 'Kind'
+ ID = 1234
+ entity_pb = datastore_pb.Entity()
+ entity_pb.key.partition_id.dataset_id = DATASET_ID
+ path_element = entity_pb.key.path_element.add()
+ path_element.kind = KIND
+ path_element.id = ID
+ prop = entity_pb.property.add()
+ prop.name = 'foo'
+ prop.value.string_value = 'Foo'
+ connection = _Connection(entity_pb)
+ dataset = self._makeOne(DATASET_ID, connection)
+ with self.assertRaises(ValueError):
+ dataset.get_entity([KIND])
+ with self.assertRaises(TypeError):
+ dataset.get_entity(None)
+
class _Connection(object):
_called_with = None
|
add a test for odd list and nonetype
|
py
|
diff --git a/jira/client.py b/jira/client.py
index <HASH>..<HASH> 100644
--- a/jira/client.py
+++ b/jira/client.py
@@ -2692,9 +2692,17 @@ class JIRA(object):
:param sprint_id: the sprint to add issues to
:param issue_keys: the issues to add to the sprint
"""
+
+ # Get the customFieldId for "Sprint"
+ jira_fields_url = self._get_url('field', base=self.JIRA_BASE_URL)
+ jira_fields = self._session.get(jira_fields_url).json()
+ sprint_field_name = "Sprint"
+ sprint_field_id = [f['schema']['customId'] for f in fields
+ if f['name'] == sprint_field_name][0]
+
data = {}
data['idOrKeys'] = issue_keys
- data['customFieldId'] = 10004
+ data['customFieldId'] = sprint_field_id
data['sprintId'] = sprint_id
data['addToBacklog'] = false
url = self._get_url('sprint/rank', base=self.AGILE_BASE_URL)
|
Perform dynamic lookup of custom field id for Sprint field
|
py
|
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
index <HASH>..<HASH> 100644
--- a/tests/integration/__init__.py
+++ b/tests/integration/__init__.py
@@ -48,6 +48,7 @@ import salt.minion
import salt.runner
import salt.output
import salt.version
+import salt.utils
from salt.utils import fopen, get_colors
from salt.utils.verify import verify_env
@@ -77,6 +78,29 @@ KNOWN_BINARY_NAMES = {
log = logging.getLogger(__name__)
+def skip_if_binaries_missing(binaries, check_all=False):
+ # While there's no new release of salt-testing
+ if sys.version_info < (2, 7):
+ from unittest2 import _id, skip
+ else:
+ from unittest import _id, skip
+
+ if check_all:
+ for binary in binaries:
+ if salt.utils.which(binary) is None:
+ return skip(
+ 'The {0!r} binary was not found'
+ )
+
+ if salt.utils.which_bin(binaries) is None:
+ return skip(
+ 'None of the following binaries was found: {0}'.format(
+ ', '.join(binaries)
+ )
+ )
+ return _id
+
+
def run_tests(*test_cases, **kwargs):
'''
Run integration tests for the chosen test cases.
|
Add helper function while there's no new release of salt-testing.
|
py
|
diff --git a/cocaine/testing/mocks.py b/cocaine/testing/mocks.py
index <HASH>..<HASH> 100644
--- a/cocaine/testing/mocks.py
+++ b/cocaine/testing/mocks.py
@@ -259,12 +259,14 @@ class SocketServerMock(object):
self.server.listen(port)
def stop(self):
+ self.connections = []
self.server.stop()
def on_connect(self, action):
self.actions['connected'] = action
def _handle_stream(self, stream, address):
+ log.debug('accepted connection from %s', address)
self.actions['connected']()
self.connections[address] = stream
|
Properly clear connections when mock server is stopped.
|
py
|
diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran/utils.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran/utils.py
index <HASH>..<HASH> 100644
--- a/python_modules/libraries/dagster-fivetran/dagster_fivetran/utils.py
+++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran/utils.py
@@ -1,4 +1,4 @@
-from typing import Any, Dict, List, Optional
+from typing import Any, Dict, Iterator, List, Optional
from dagster_fivetran.types import FivetranOutput
@@ -40,7 +40,9 @@ def _table_data_to_materialization(
)
-def generate_materializations(fivetran_output: FivetranOutput, asset_key_prefix: List[str]):
+def generate_materializations(
+ fivetran_output: FivetranOutput, asset_key_prefix: List[str]
+) -> Iterator[AssetMaterialization]:
for schema in fivetran_output.schema_config["schemas"].values():
schema_name = schema["name_in_destination"]
schema_prefix = fivetran_output.connector_details.get("config", {}).get("schema_prefix")
|
finish dagster-fivetran types (#<I>)
|
py
|
diff --git a/traffic/data/adsb/opensky.py b/traffic/data/adsb/opensky.py
index <HASH>..<HASH> 100644
--- a/traffic/data/adsb/opensky.py
+++ b/traffic/data/adsb/opensky.py
@@ -181,7 +181,7 @@ class OpenSky(Impala):
except AttributeError:
west, south, east, north = bounds
- what += f"&lamin={south}&lamax={north}&lomin={west}&lomax={east}"
+ what += f"?lamin={south}&lamax={north}&lomin={west}&lomax={east}"
c = requests.get(
f"https://opensky-network.org/api/states/{what}", auth=self.auth
|
Enables bounding box to be applied to live traffic (#4) When requesting a bounding box using: `sv = opensky.api_states(bounds=(lon_min,lat_min,lon_max,lat_max))` Python will throw an error, as the bounding box is passed incorrectly. According to opensky documentation the lamin,lomin, etc variables should be passed with a '?' at the start, not with an &. This pull request updates this line: <URL>
|
py
|
diff --git a/cherrypy/lib/filter/sessionfilter.py b/cherrypy/lib/filter/sessionfilter.py
index <HASH>..<HASH> 100644
--- a/cherrypy/lib/filter/sessionfilter.py
+++ b/cherrypy/lib/filter/sessionfilter.py
@@ -190,7 +190,7 @@ class SessionFilter(basefilter.BaseFilter):
self._clean()
sess = cherrypy.request._session
- if not sess.sessionStorage:
+ if not getattr(sess, 'sessionStorage', None):
# Sessions are not enabled: do nothing
return
|
When using generators and HTTPRedirect, beforeFinalize can get called twice ... Making the sessionfilter resilient to that.
|
py
|
diff --git a/flask_security/utils.py b/flask_security/utils.py
index <HASH>..<HASH> 100644
--- a/flask_security/utils.py
+++ b/flask_security/utils.py
@@ -206,9 +206,7 @@ def config_value(key, app=None, default=None):
def get_max_age(key, app=None):
- now = datetime.utcnow()
- expires = now + get_within_delta(key + '_WITHIN', app)
- td = (expires - now)
+ td = get_within_delta(key + '_WITHIN', app)
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6
|
removed unnecessary utcnow call is this code required here?
|
py
|
diff --git a/indra/literature/pmc_client.py b/indra/literature/pmc_client.py
index <HASH>..<HASH> 100644
--- a/indra/literature/pmc_client.py
+++ b/indra/literature/pmc_client.py
@@ -400,6 +400,7 @@ def _retain_only_pars(tree):
for element in tree.getiterator():
if element.tag == 'title':
element.tag = 'p'
+ for element in tree.getiterator():
parent = element.getparent()
if parent is not None and element.tag != 'p':
etree.strip_tags(element.getparent(), element.tag)
|
Fix bug causing some title tags to be lost
|
py
|
diff --git a/fireplace/cards/league/collectible.py b/fireplace/cards/league/collectible.py
index <HASH>..<HASH> 100644
--- a/fireplace/cards/league/collectible.py
+++ b/fireplace/cards/league/collectible.py
@@ -104,7 +104,7 @@ class LOE_050:
# Jungle Moonkin
class LOE_051:
- update = Refresh(ALL_PLAYERS, {GameTag.SPELLPOWER: +2})
+ update = Refresh(OPPONENT, {GameTag.SPELLPOWER: +2})
# Djinni of Zephyrs
|
Fix Jungle Moonkin to match the <I> spec In build <I>, Jungle Moonkin now has <I> Spell Damage for itself (for the controller) and only refreshes SPELLPOWER on the opponent. This fixes the issue with Master of Ceremonies not working with Jungle Moonkin.
|
py
|
diff --git a/tornado/__init__.py b/tornado/__init__.py
index <HASH>..<HASH> 100644
--- a/tornado/__init__.py
+++ b/tornado/__init__.py
@@ -22,5 +22,5 @@
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
-version = "6.0.dev1"
-version_info = (6, 0, 0, -100)
+version = "6.0a1"
+version_info = (6, 0, 0, -99)
|
init: set version to <I>a1
|
py
|
diff --git a/imagesize.py b/imagesize.py
index <HASH>..<HASH> 100644
--- a/imagesize.py
+++ b/imagesize.py
@@ -1,3 +1,4 @@
+import os
import re
import struct
from xml.etree import ElementTree
@@ -100,17 +101,19 @@ def _getNetpbm(fhandle, is_binary):
if not next_chr.isdigit():
raise ValueError("Invalid character found on {} file.".format(ftype))
- size = b""
+ size = next_chr
+ next_chr = fhandle.read(1)
while next_chr.isdigit():
size += next_chr
next_chr = fhandle.read(1)
- if size != "":
- sizes.append(int(size))
+ sizes.append(int(size))
+
+ if len(sizes) == 2:
+ break
- if len(sizes) == 2:
- break
+ fhandle.seek(-1, os.SEEK_CUR)
return sizes
|
Fix handling of invalid char after width on Netpbm
|
py
|
diff --git a/examples/disassemble.py b/examples/disassemble.py
index <HASH>..<HASH> 100644
--- a/examples/disassemble.py
+++ b/examples/disassemble.py
@@ -59,7 +59,10 @@ codebuf = ctypes.create_string_buffer(length)
ctypes.memmove(codebuf, ctypes.c_char_p(incr.address.ptr), length)
print("Compiled %d bytes starting at 0x%x" % (length, incr.address))
+def hexbytes(b):
+ return "".join(map(lambda x: hex(x)[2:] + " ", b))
+
# Capstone is smart enough to stop at the first RET-like instruction.
md = capstone.Cs(capstone.CS_ARCH_X86, capstone.CS_MODE_64)
for i in md.disasm(codebuf, incr.address.ptr):
- print("0x%x %s %s" % (i.address, i.mnemonic, i.op_str))
+ print("0x%x %-15s%s %s" % (i.address, hexbytes(i.bytes), i.mnemonic, i.op_str))
|
Adds hex bytes in disassembly example
|
py
|
diff --git a/redis_metrics/__init__.py b/redis_metrics/__init__.py
index <HASH>..<HASH> 100644
--- a/redis_metrics/__init__.py
+++ b/redis_metrics/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "0.7.2"
+__version__ = "0.8.0a"
try:
from .utils import gauge, metric # NOQA
|
gearing up for a new release
|
py
|
diff --git a/tests/conftest.py b/tests/conftest.py
index <HASH>..<HASH> 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,12 +3,13 @@ from git import Repo
from git.exc import GitCommandNotFound
from dvc.repo import Repo as DvcRepo
-from .basic_env import TestDirFixture, logger
+from .basic_env import TestDirFixture
@pytest.fixture(autouse=True)
-def debug():
- logger.setLevel("DEBUG")
+def debug(caplog):
+ with caplog.at_level("DEBUG", logger="dvc"):
+ yield
# Wrap class like fixture as pytest-like one to avoid code duplication
|
test: use caplog.at_level() to restore log level after test
|
py
|
diff --git a/test/test_collection.py b/test/test_collection.py
index <HASH>..<HASH> 100644
--- a/test/test_collection.py
+++ b/test/test_collection.py
@@ -452,7 +452,6 @@ class TestCollection(unittest.TestCase):
def test_safe_insert(self):
db = self.db
db.drop_collection("test")
- db.test.create_index("_id", ASCENDING)
a = {"hello": "world"}
db.test.insert(a)
@@ -498,7 +497,17 @@ class TestCollection(unittest.TestCase):
self.assertRaises(OperationFailure, db.test.update,
{}, {"$inc": {"x": 1}}, safe=True)
- # TODO test safe save?
+ def test_safe_save(self):
+ db = self.db
+ db.drop_collection("test")
+ db.test.create_index("hello", ASCENDING, unique=True)
+
+ db.test.save({"hello": "world"})
+ db.test.save({"hello": "world"})
+ self.assert_("E11000" in db.error()["err"])
+
+ self.assertRaises(OperationFailure, db.test.save, {"hello": "world"}, safe=True)
+
def test_count(self):
db = self.db
db.drop_collection("test")
|
minor: test for safe save that already passes
|
py
|
diff --git a/astrobase/varbase/lcfit.py b/astrobase/varbase/lcfit.py
index <HASH>..<HASH> 100644
--- a/astrobase/varbase/lcfit.py
+++ b/astrobase/varbase/lcfit.py
@@ -432,7 +432,6 @@ def fourier_fit_magseries(times, mags, errs, period,
fitmagminind = npwhere(fitmags == npmin(fitmags))
if len(fitmagminind[0]) > 1:
fitmagminind = (fitmagminind[0][0],)
- magseriesepoch = ptimes[fitmagminind]
# assemble the returndict
returndict = {
@@ -443,7 +442,7 @@ def fourier_fit_magseries(times, mags, errs, period,
'initialfit':initialfit,
'leastsqfit':leastsqfit,
'fitmags':fitmags,
- 'fitepoch':magseriesepoch
+ 'fitepoch':mintime
},
'fitchisq':fitchisq,
'fitredchisq':fitredchisq,
@@ -461,7 +460,7 @@ def fourier_fit_magseries(times, mags, errs, period,
if plotfit and isinstance(plotfit, str):
_make_fit_plot(phase, pmags, perrs, fitmags,
- period, mintime, magseriesepoch,
+ period, mintime, mintime,
plotfit,
magsarefluxes=magsarefluxes)
|
Update fit epoch for fourier_fit_magseries This might not be the only place where the epoch needs to be updated, I'll leave that up to you Waqas.
|
py
|
diff --git a/pandas/tools/rplot.py b/pandas/tools/rplot.py
index <HASH>..<HASH> 100644
--- a/pandas/tools/rplot.py
+++ b/pandas/tools/rplot.py
@@ -416,10 +416,34 @@ class TrellisGrid(Layer):
layers.append(trellised)
rplot.layers = layers
+def merge_aes(layer1, layer2):
+ """Merges the aesthetics dictionaries for the two layers.
+ Look up sequence_layers function. Which layer is first and which
+ one is second is important.
+
+ Parameters:
+ -----------
+ layer1: Layer object
+ layer2: Layer object
+ """
+ for key in layer2.keys():
+ if layer2[key] is None:
+ layer2[key] = layer1[key]
+
def sequence_layers(layers):
"""Go through the list of layers and fill in the missing bits of information.
+ The basic rules are this:
+ * If the current layer has data set to None, take the data from previous layer.
+ * For each aesthetic mapping, if that mapping is set to None, take it from previous layer.
+
+ Parameters:
+ -----------
+ layers: a list of Layer objects
"""
- pass
+ for layer1, layer2 in zip(layers[:-1], layers[1:]):
+ if layer2.data is None:
+ layer2.data = layer1.data
+ layer2.aes = merge_aes(layer1, layer2)
class RPlot:
"""
|
Implemented sequence_layers and merge_aes functions
|
py
|
diff --git a/unitest.py b/unitest.py
index <HASH>..<HASH> 100755
--- a/unitest.py
+++ b/unitest.py
@@ -343,12 +343,12 @@ class TestGlances(unittest.TestCase):
# GlancesHistory
from glances.history import GlancesHistory
h = GlancesHistory()
- h.add('a', 1)
- h.add('a', 2)
- h.add('a', 3)
- h.add('b', 10)
- h.add('b', 20)
- h.add('b', 30)
+ h.add('a', 1, history_max_size=100)
+ h.add('a', 2, history_max_size=100)
+ h.add('a', 3, history_max_size=100)
+ h.add('b', 10, history_max_size=100)
+ h.add('b', 20, history_max_size=100)
+ h.add('b', 30, history_max_size=100)
self.assertEqual(len(h.get()), 2)
self.assertEqual(len(h.get()['a']), 3)
h.reset()
|
Correct unitary test following PR #<I>
|
py
|
diff --git a/lib/python/voltcli/environment.py b/lib/python/voltcli/environment.py
index <HASH>..<HASH> 100644
--- a/lib/python/voltcli/environment.py
+++ b/lib/python/voltcli/environment.py
@@ -30,6 +30,8 @@ re_voltdb_jar = re.compile('^voltdb(client)?-[.0-9]+[.]([\w]+\.)*jar$')
config_name = 'volt.cfg'
config_name_local = 'volt_local.cfg'
+# This is for k8s environment which wish to provide external jars for jdbc driver and such.
+voltdb_etc = '/etc/voltdb/'
# Filled in during startup.
standalone = None
@@ -265,4 +267,8 @@ def initialize(standalone_arg, command_name_arg, command_dir_arg, version_arg):
classpath.append(path)
for path in glob.glob(os.path.join(os.environ['VOLTDB_LIB'], 'extension', '*.jar')):
classpath.append(path)
+ # If we are in container env and /etc/voltdb/extension has any jars include them.
+ if os.environ.get('VOLTDB_CONTAINER') and os.path.isdir(voltdb_etc):
+ for path in glob.glob(os.path.join(voltdb_etc, 'extension', '*.jar')):
+ classpath.append(path)
utility.verbose_info('Classpath: %s' % ':'.join(classpath))
|
KO-<I>: Add support for providing extension in k8s environment. (#<I>)
|
py
|
diff --git a/test_path.py b/test_path.py
index <HASH>..<HASH> 100644
--- a/test_path.py
+++ b/test_path.py
@@ -775,6 +775,16 @@ class TestMergeTree:
assert Path(self.subdir_b / self.test_link.name).islink()
assert len(Path(self.subdir_b / self.test_file.name).bytes()) == 5000
+ def test_copytree_parameters(self):
+ """
+ merge_tree should accept parameters to copytree, such as 'ignore'
+ """
+ ignore = shutil.ignore_patterns('testlink*')
+ self.subdir_a.merge_tree(self.subdir_b, ignore=ignore)
+
+ assert self.subdir_b.isdir()
+ assert self.subdir_b.listdir() == [self.subdir_b / self.test_file.name]
+
class TestChdir:
def test_chdir_or_cd(self, tmpdir):
|
Add test capturing expectation that merge_tree take copytree parameters.
|
py
|
diff --git a/analyzers/ForcepointWebsensePing/forcepointwebsenseping.py b/analyzers/ForcepointWebsensePing/forcepointwebsenseping.py
index <HASH>..<HASH> 100644
--- a/analyzers/ForcepointWebsensePing/forcepointwebsenseping.py
+++ b/analyzers/ForcepointWebsensePing/forcepointwebsenseping.py
@@ -24,8 +24,9 @@ class WebsensePingAnalyzer(Analyzer):
level = "suspicious"
else:
level = "info"
- taxonomies.append(self.build_taxonomy(level, "Fortinet", "WebsensePing", value))
- return {"taxonomies": taxonomies}
+ taxonomies.append(self.build_taxonomy(level, "Forcepoint", "WebsensePing", value))
+ result = {"taxonomies": taxonomies}
+ return result
def run(self):
Analyzer.run(self)
|
Update forcepointwebsenseping.py
|
py
|
diff --git a/discord/state.py b/discord/state.py
index <HASH>..<HASH> 100644
--- a/discord/state.py
+++ b/discord/state.py
@@ -287,8 +287,11 @@ class ConnectionState:
emoji = self._get_reaction_emoji(**data['emoji'])
reaction = utils.get(message.reactions, emoji=emoji)
- # if reaction isn't in the list, we crash. This means discord
- # sent bad data, or we stored improperly
+ # Eventual consistency means we can get out of order or duplicate removes.
+ if not reaction:
+ log.warning("Unexpected reaction remove {}".format(data))
+ return
+
reaction.count -= 1
if data['user_id'] == self.user.id:
reaction.me = False
|
Fix crash on duplicate or out of order reactions. Eventual consistency ftw
|
py
|
diff --git a/src/diamond/utils/scheduler.py b/src/diamond/utils/scheduler.py
index <HASH>..<HASH> 100644
--- a/src/diamond/utils/scheduler.py
+++ b/src/diamond/utils/scheduler.py
@@ -83,7 +83,7 @@ def collector_process(collector, metric_queue, log):
reload_config = True
pass
- except:
+ except Exception:
log.exception('Collector failed!')
break
|
Change bare except to explicit catch for Exception
|
py
|
diff --git a/netmiko/rad/rad_etx.py b/netmiko/rad/rad_etx.py
index <HASH>..<HASH> 100644
--- a/netmiko/rad/rad_etx.py
+++ b/netmiko/rad/rad_etx.py
@@ -62,7 +62,7 @@ class RadETXSSH(RadETXBase):
class RadETXTelnet(RadETXBase):
"""RAD ETX Telnet Support."""
- def telnet_login(self, delay_factor=1):
+ def telnet_login(self, username_pattern=r"(?:user>)"):
"""
RAD presents with the following on login
@@ -70,20 +70,6 @@ class RadETXTelnet(RadETXBase):
password> ****
"""
- delay_factor = self.select_delay_factor(delay_factor)
- i = 0
- time.sleep(delay_factor * .5)
- output = ""
- while i <= 12:
- output = self.read_channel()
- if output:
- if 'user>' in output:
- self.write_channel(self.username + self.RETURN)
- elif 'password>' in output:
- self.write_channel(self.password + self.RETURN)
- break
- time.sleep(delay_factor * 1)
- else:
- self.write_channel(self.RETURN)
- time.sleep(delay_factor * 1.5)
- i += 1
+ self.TELNET_RETURN = self.RETURN
+ return super(RadETXTelnet,
+ self).telnet_login(username_pattern=username_pattern)
|
Use base_connection telnet_login instead of custom login Found that the default telnet_login will work with the RADs after changing the TELNET_RETURN to the regular Return. Also added in the Userprompt for the RAD.
|
py
|
diff --git a/python_modules/dagster/dagster/core/instance/ref.py b/python_modules/dagster/dagster/core/instance/ref.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/core/instance/ref.py
+++ b/python_modules/dagster/dagster/core/instance/ref.py
@@ -26,9 +26,9 @@ def _schedule_directory(base):
def configurable_class_data(config_field):
return ConfigurableClassData(
- config_field["module"],
- config_field["class"],
- yaml.dump(config_field.get("config") or {}, default_flow_style=False),
+ check.str_elem(config_field, "module"),
+ check.str_elem(config_field, "class"),
+ yaml.dump(check.opt_dict_elem(config_field, "config"), default_flow_style=False),
)
|
Add check calls for configurable class Summary: A little seatbelt Test Plan: Unit Reviewers: prha, alangenfeld, dgibson Reviewed By: dgibson Differential Revision: <URL>
|
py
|
diff --git a/nupic/simple_server.py b/nupic/simple_server.py
index <HASH>..<HASH> 100755
--- a/nupic/simple_server.py
+++ b/nupic/simple_server.py
@@ -1,7 +1,7 @@
#! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
-# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
+# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
|
Updated year in simple_server copyright header
|
py
|
diff --git a/pyghmi/ipmi/oem/lenovo/handler.py b/pyghmi/ipmi/oem/lenovo/handler.py
index <HASH>..<HASH> 100755
--- a/pyghmi/ipmi/oem/lenovo/handler.py
+++ b/pyghmi/ipmi/oem/lenovo/handler.py
@@ -464,6 +464,10 @@ class OEMHandler(generic.OEMHandler):
macprefix = None
while idx < endidx:
currmac = macs[idx:idx+6]
+ if not isinstance(currmac, bytearray):
+ # invalid vpd format, abort attempts to extract
+ # mac in this way
+ break
if currmac == b'\x00\x00\x00\x00\x00\x00':
break
# VPD may veer off, detect and break off
|
Abort MAC decode from FRU if not 'binary' If the TLV says the mac data is text, then it's not going to be a good day. Abort in such a case. Change-Id: I<I>df<I>de9af<I>aa<I>d<I>f<I>fa<I>e2
|
py
|
diff --git a/slackclient/_slackrequest.py b/slackclient/_slackrequest.py
index <HASH>..<HASH> 100644
--- a/slackclient/_slackrequest.py
+++ b/slackclient/_slackrequest.py
@@ -7,8 +7,6 @@ class SlackRequest(object):
pass
def do(self, token, request="?", post_data={}, domain="slack.com"):
- t = time.time()
- post_data["ts"] = t
post_data["token"] = token
post_data = urllib.urlencode(post_data)
url = 'https://{}/api/{}'.format(domain, request)
|
Don't send current time as ts to every API call
|
py
|
diff --git a/visidata/movement.py b/visidata/movement.py
index <HASH>..<HASH> 100644
--- a/visidata/movement.py
+++ b/visidata/movement.py
@@ -184,7 +184,7 @@ Sheet.addCommand('z<', 'go-prev-null', 'moveToNextRow(lambda row,col=cursorCol,i
Sheet.addCommand('z>', 'go-next-null', 'moveToNextRow(lambda row,col=cursorCol,isnull=isNullFunc(): isnull(col.getValue(row))) or status("no null down this column")', 'go down current column to next null value'),
for i in range(1, 11):
- globalCommand(ALT+str(i)[-1], 'jump-sheet-'+str(i), 'vd.push(*(list(s for s in allSheets if s.shortcut=="%s") or fail("no sheet")))' % {i}, f'jump to sheet {i}')
+ globalCommand(ALT+str(i)[-1], 'jump-sheet-'+str(i), f'vd.push(*(list(s for s in allSheets if s.shortcut==str({i})) or fail("no sheet")))', f'jump to sheet {i}')
BaseSheet.bindkey('KEY_LEFT', 'go-left')
BaseSheet.bindkey('KEY_DOWN', 'go-down')
|
[jump-sheet-] fix strformat issue in command
|
py
|
diff --git a/geoviews/operation/projection.py b/geoviews/operation/projection.py
index <HASH>..<HASH> 100644
--- a/geoviews/operation/projection.py
+++ b/geoviews/operation/projection.py
@@ -84,7 +84,8 @@ class project_path(_project_operation):
continue
try:
# Compute boundary intersections
- g = g.intersection(boundary)
+ if boundary:
+ g = g.intersection(boundary)
except:
continue
if is_multi_geometry(g):
|
Fixed projection bug clipping with empty boundary
|
py
|
diff --git a/ciscosparkapi/restsession.py b/ciscosparkapi/restsession.py
index <HASH>..<HASH> 100644
--- a/ciscosparkapi/restsession.py
+++ b/ciscosparkapi/restsession.py
@@ -37,8 +37,8 @@ __license__ = "MIT"
# Module Constants
-DEFAULT_SINGLE_REQUEST_TIMEOUT = 20
-DEFAULT_RATE_LIMIT_TIMEOUT = 60
+DEFAULT_SINGLE_REQUEST_TIMEOUT = 20.0
+DEFAULT_RATE_LIMIT_TIMEOUT = 60.0
RATE_LIMIT_EXCEEDED_RESPONSE_CODE = 429
@@ -96,9 +96,9 @@ class RestSession(object):
# Initialize attributes and properties
self._base_url = str(validate_base_url(base_url))
- self._access_token = access_token
- self._single_request_timeout = single_request_timeout
- self._rate_limit_timeout = rate_limit_timeout
+ self._access_token = str(access_token)
+ self._single_request_timeout = float(single_request_timeout)
+ self._rate_limit_timeout = float(rate_limit_timeout)
if timeout:
self.timeout = timeout
@@ -144,7 +144,7 @@ class RestSession(object):
"the 'single_request_timeout' instead.",
DeprecationWarning)
assert value is None or value > 0
- self._single_request_timeout = value
+ self._single_request_timeout = float(value)
@property
def single_request_timeout(self):
|
Ensure consistent types on RestSession variables Add conversion functions to ensure the types of RestSession’s attributes are consistent.
|
py
|
diff --git a/bumpversion/__init__.py b/bumpversion/__init__.py
index <HASH>..<HASH> 100644
--- a/bumpversion/__init__.py
+++ b/bumpversion/__init__.py
@@ -22,6 +22,11 @@ import subprocess
import io
from string import Formatter
+import sys
+import codecs
+sys.stdout = codecs.getwriter('utf8')(sys.stdout)
+
+
class Git(object):
|
sys.stdout is utf-8 encoded (fixes #<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
-1# * Redistributions in binary form must reproduce the above copyright notice,
+# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
@@ -19,7 +19,8 @@
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
Update setup.py spurious typos accidentally added
|
py
|
diff --git a/mongoctl/utils.py b/mongoctl/utils.py
index <HASH>..<HASH> 100644
--- a/mongoctl/utils.py
+++ b/mongoctl/utils.py
@@ -279,7 +279,11 @@ def get_host_ips(host):
try:
ips = []
- addr_info = socket.getaddrinfo(host, None)
+ try:
+ addr_info = socket.getaddrinfo(host, None)
+ # Can't resolve -> obviously has no IPs -> return default empty list
+ except socket.gaierror:
+ return ips
for elem in addr_info:
ip = elem[4]
if ip not in ips:
|
Don't throw pointless errors on dev laptops whose names don't resolve
|
py
|
diff --git a/redirects.py b/redirects.py
index <HASH>..<HASH> 100644
--- a/redirects.py
+++ b/redirects.py
@@ -39,13 +39,14 @@ def load_redirects():
if hasattr(settings, 'REDIRECTS_PATH'):
redirect_file_path = settings.REDIRECTS_PATH
- if not exists(redirect_file_path):
- return []
+ redirect_patterns = []
- with open(redirect_file_path) as redirect_file:
- redirect_dict = json.loads(redirect_file.read())
+ if exists(redirect_file_path):
+ with open(redirect_file_path) as redirect_file:
+ redirect_dict = json.loads(redirect_file.read())
+ redirect_patterns = [
+ convert_to_url_pattern(request, location)
+ for request, location in redirect_dict.iteritems()
+ ]
- return [
- convert_to_url_pattern(request, location)
- for request, location in redirect_dict.iteritems()
- ]
+ return redirect_patterns
|
Restructure load_redirects code
|
py
|
diff --git a/powerline-shell.py b/powerline-shell.py
index <HASH>..<HASH> 100755
--- a/powerline-shell.py
+++ b/powerline-shell.py
@@ -238,7 +238,7 @@ def add_svn_segment(powerline, cwd):
'I' Ignored
'M' Modified
'R' Replaced
- 'X' an unversioned directory created by an externals definition
+ 'X' a directory pulled in by an svn:externals definition
'?' item is not under version control
'!' item is missing (removed by non-svn command) or incomplete
'~' versioned item obstructed by some item of a different kind
@@ -248,7 +248,7 @@ def add_svn_segment(powerline, cwd):
#cmd = '"svn status | grep -c "^[ACDIMRX\\!\\~]"'
p1 = subprocess.Popen(['svn', 'status'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
- p2 = subprocess.Popen(['grep', '-c', '^[ACDIMRX\\!\\~]'],
+ p2 = subprocess.Popen(['grep', '-c', '^[ACDIMR\\!\\~]'],
stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0].strip()
if len(output) > 0 and int(output) > 0:
|
svn externals shouldn't be considered modifications
|
py
|
diff --git a/pyads/pyads_ex.py b/pyads/pyads_ex.py
index <HASH>..<HASH> 100644
--- a/pyads/pyads_ex.py
+++ b/pyads/pyads_ex.py
@@ -228,7 +228,7 @@ def adsAddRouteToPLC(sending_net_id, ip_address, username, password, route_name=
elif int.from_bytes(rcvd_is_password_correct, 'big') == 0x000407:
return False
else:
- return None
+ raise ValueError('Received unknown response from ' + ip_address)
@router_function
def adsDelRoute(net_id):
|
Raises ValueError on unknown response
|
py
|
diff --git a/elasticsearch/helpers.py b/elasticsearch/helpers.py
index <HASH>..<HASH> 100644
--- a/elasticsearch/helpers.py
+++ b/elasticsearch/helpers.py
@@ -1,4 +1,4 @@
-from itertools import islice, chain
+from itertools import islice
from operator import methodcaller
try:
from itertools import imap as map
@@ -106,7 +106,7 @@ def streaming_bulk(client, actions, chunk_size=500, raise_on_error=False, expand
resp = client.bulk(bulk_actions, **kwargs)
# go through request-reponse pairs and detect failures
- for op_type, item in chain.from_iterable(map(methodcaller('items'), resp['items'])):
+ for op_type, item in map(methodcaller('popitem'), resp['items']):
ok = item.get('ok')
if not ok and raise_on_error:
errors.append({op_type: item})
|
Use dict.popitem when we know the dict only has one key
|
py
|
diff --git a/src/projexui/xtimer.py b/src/projexui/xtimer.py
index <HASH>..<HASH> 100644
--- a/src/projexui/xtimer.py
+++ b/src/projexui/xtimer.py
@@ -145,19 +145,6 @@ class XTimer(QtCore.QObject):
with QtCore.QReadLocker(self.__lock):
return self.__singleShot
- def moveToThread(self, thread):
- """
- Moves this timer object to its own thread. If the timer is already
- running, then we need to stop it before it is moved.
-
- :param thread | <QtCore.QThread>
- """
- if self.__timer:
- self.stop()
- raise RuntimeError('QTimer exists on another thread.')
-
- super(XTimer, self).moveToThread(thread)
-
def setInterval(self, msecs):
"""
Sets the interval in milliseconds for this timer.
|
removed the overloaded moveToThread for the XTimer
|
py
|
diff --git a/test/tablet.py b/test/tablet.py
index <HASH>..<HASH> 100644
--- a/test/tablet.py
+++ b/test/tablet.py
@@ -375,8 +375,7 @@ class Tablet(object):
if start:
if not wait_for_start:
expected_state = None
- elif (tablet_type == 'master' or tablet_type == 'replica' or
- tablet_type == 'rdonly' or tablet_type == 'batch'):
+ elif tablet_type == 'master':
expected_state = 'SERVING'
else:
expected_state = 'NOT_SERVING'
|
Changing expectations on default state. In our tests, we want default non-master tablets to be NOT_SERVING, as their replication is most likely not setup.
|
py
|
diff --git a/examples/glyphs/sprint.py b/examples/glyphs/sprint.py
index <HASH>..<HASH> 100644
--- a/examples/glyphs/sprint.py
+++ b/examples/glyphs/sprint.py
@@ -88,8 +88,14 @@ no_olympics_glyph = Text(x=7.5, y=1942, text=["No Olympics in 1940 or 1944"],
no_olympics = plot.add_glyph(no_olympics_glyph)
tooltips = """
-<div><span style="font-size: 15px;">@Name</span><span style="font-size: 10px; color: #666;">(@Abbrev)</span></div>
-<div><span style="font-size: 17px; font-weight: bold;">@Time{0.00}</span><span style="font-size:10px; color: #666; padding-left:4px;">@Year</span></div>
+<div>
+ <span style="font-size: 15px;">@Name</span>
+ <span style="font-size: 10px; color: #666;">(@Abbrev)</span>
+</div>
+<div>
+ <span style="font-size: 17px; font-weight: bold;">@Time{0.00}</span>
+ <span style="font-size:10px; color: #666; padding-left:4px;">@Year</span>
+</div>
<div style="font-size: 11px; color: #666;">@{MetersBack}{0.00} meters behind</div>
"""
|
Improve HTML formatting in glyphs/sprint
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,7 @@ code with type notations. Typesafety is a means to enforce that those notations
are valid.
""",
license="LGPLv2+",
- version="1.0.1",
+ version="1.0.2",
author="Viktor Hercinger",
author_email="viktor.hercinger@balabit.com",
maintainer="Viktor Hercinger",
|
bumped version to <I>
|
py
|
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -105,3 +105,16 @@ class ConstantsTest(TestCase):
reporter_abbv, k
)
)
+
+ def test_no_variation_is_same_as_key(self):
+ """Are any variations identical to the keys they're supposed to be
+ variations of?
+ """
+ for variation, keys in VARIATIONS_ONLY.items():
+ for key in keys:
+ self.assertNotEqual(
+ variation,
+ key,
+ "The variation '%s' is identical to the key it's supposed "
+ "to be a variation of." % variation
+ )
|
Adds a new test that ensures no variation is identical to the item it's supposed to be a variation of. There were several of these, and they're all fixed as of now.
|
py
|
diff --git a/karaage/software/views/admin.py b/karaage/software/views/admin.py
index <HASH>..<HASH> 100644
--- a/karaage/software/views/admin.py
+++ b/karaage/software/views/admin.py
@@ -48,7 +48,7 @@ def software_list(request):
params = dict(request.GET.items())
m_params = dict([(str(k), str(v)) for k, v in params.items() if k.startswith('softwareversion__last_used_')])
- software_list = software_list.filter(**m_params)
+ software_list = software_list.filter(**m_params).distinct()
if request.REQUEST.has_key('search'):
terms = request.REQUEST['search'].lower()
|
Ensure software packages are unique when searching
|
py
|
diff --git a/sitetree/sitetreeapp.py b/sitetree/sitetreeapp.py
index <HASH>..<HASH> 100644
--- a/sitetree/sitetreeapp.py
+++ b/sitetree/sitetreeapp.py
@@ -278,6 +278,7 @@ class Cache(object):
signals.post_delete.connect(cache_empty, sender=MODEL_TREE_ITEM_CLASS)
# Listen to the changes in item permissions table.
signals.m2m_changed.connect(cache_empty, sender=MODEL_TREE_ITEM_CLASS.access_permissions)
+ self.init()
@classmethod
def reset(cls):
@@ -311,10 +312,10 @@ class Cache(object):
def empty(self, **kwargs):
"""Empties cached sitetree data."""
- self.cache = None
setattr(_THREAD_LOCAL, _THREAD_CACHE, None)
cache.delete('sitetrees')
cache.delete('sitetrees_reset')
+ self.init()
def get_entry(self, entry_name, key):
"""Returns cache entry parameter value by its name."""
@@ -652,8 +653,6 @@ class SiteTree(object):
# Resolve tree_alias from the context.
tree_alias = self.resolve_var(tree_alias)
- self.cache.init() # Warm up cache.
-
# Get tree.
tree_alias, sitetree_items = self.get_sitetree(tree_alias)
|
Changed Cache.init() calls.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -0,0 +1,22 @@
+from setuptools import setup, find_packages
+
+version = '0.1'
+
+setup(name='giphypy',
+ version=version,
+ description=("Python wrapper for Giphy API"),
+ long_description=open('README.rst').read(),
+ classifiers=['Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules'],
+ keywords='python giphy api',
+ author='Shaun Duncan',
+ author_email='shaun.duncan@gmail.com',
+ url='http://www.github.com/shaunduncan/giphypy/',
+ license='MIT',
+ packages=find_packages(),
+ py_modules=['giphypy'],
+ )
|
Updated setup.py for initial version
|
py
|
diff --git a/facepy/signed_request.py b/facepy/signed_request.py
index <HASH>..<HASH> 100644
--- a/facepy/signed_request.py
+++ b/facepy/signed_request.py
@@ -44,7 +44,7 @@ class SignedRequest(object):
if signed_request and application_secret_key:
self.raw = self.parse(signed_request, application_secret_key)
- self.data = self.raw.get('app_signed_request_data', None)
+ self.data = self.raw.get('app_data', None)
self.page = self.Page(
id = self.raw['page']['id'],
|
Fixing typo from a sloppy VIM substitution :-/
|
py
|
diff --git a/samcli/__init__.py b/samcli/__init__.py
index <HASH>..<HASH> 100644
--- a/samcli/__init__.py
+++ b/samcli/__init__.py
@@ -2,4 +2,4 @@
SAM CLI version
"""
-__version__ = "1.26.0"
+__version__ = "1.27.0"
|
chore: bump SAM CLI version to <I> (#<I>)
|
py
|
diff --git a/pycdlib/dr.py b/pycdlib/dr.py
index <HASH>..<HASH> 100644
--- a/pycdlib/dr.py
+++ b/pycdlib/dr.py
@@ -713,7 +713,7 @@ class DirectoryRecord(object):
raise pycdlibexception.PyCdlibInternalError("Directory Record not yet initialized")
if not self.isdir:
- raise Exception("Trying to add a child to a record that is not a directory")
+ raise pycdlib.PyCdlibInvalidInput("Trying to add a child to a record that is not a directory")
# First ensure that this is not a duplicate. For speed purposes, we
# recognize that bisect_left will always choose an index to the *left*
|
Switch an Exception to a pycdlibexception.
|
py
|
diff --git a/pysnow/client.py b/pysnow/client.py
index <HASH>..<HASH> 100644
--- a/pysnow/client.py
+++ b/pysnow/client.py
@@ -135,12 +135,13 @@ class Client(object):
base_url=self.base_url,
**kwargs)
- def resource(self, api_path=None, base_path='/api/now', chunk_size=None):
+ def resource(self, api_path=None, base_path='/api/now', chunk_size=None, **kwargs):
"""Creates a new :class:`Resource` object after validating paths
:param api_path: Path to the API to operate on
:param base_path: (optional) Base path override
:param chunk_size: Response stream parser chunk size (in bytes)
+ :param **kwargs: Pass request.request parameters to the Resource object
:return:
- :class:`Resource` object
:raises:
@@ -155,7 +156,8 @@ class Client(object):
parameters=self.parameters,
chunk_size=chunk_size or 8192,
session=self.session,
- base_url=self.base_url)
+ base_url=self.base_url,
+ **kwargs)
def query(self, table, **kwargs):
"""Query (GET) request wrapper.
|
pass **kwargs from Client to Resource
|
py
|
diff --git a/src/sos/utils.py b/src/sos/utils.py
index <HASH>..<HASH> 100644
--- a/src/sos/utils.py
+++ b/src/sos/utils.py
@@ -1119,7 +1119,12 @@ def load_var(line):
if key.endswith(':'):
return key[:-1], pickle.loads(base64.b64decode(eval(value.strip())))
else:
- return key, eval(value.strip())
+ try:
+ return key, eval(value.strip())
+ except:
+ # use SoS_eval instead of eval because vars can contain sos objects such as R_library
+ from .sos_eval import SoS_eval
+ return key, SoS_eval(value.strip(), '${ }')
def version_info(module):
|
Fix load_var when the signature file contains non-string targets such as R_library
|
py
|
diff --git a/rewind/test/test_logbook.py b/rewind/test/test_logbook.py
index <HASH>..<HASH> 100644
--- a/rewind/test/test_logbook.py
+++ b/rewind/test/test_logbook.py
@@ -524,7 +524,7 @@ class TestLogbookReplication(unittest.TestCase):
self.transmitter = self.context.socket(zmq.PUSH)
self.receiver = self.context.socket(zmq.SUB)
- self.receiver.setsockopt(zmq.SUBSCRIBE, '')
+ self.receiver.setsockopt(zmq.SUBSCRIBE, b'')
self.transmitter.connect('tcp://127.0.0.1:8090')
self.receiver.connect('tcp://127.0.0.1:8091')
|
`zmq_socket.setsockopt(...)` is now given bytes Previously the function accepted strings. In Python 3 it does not. Tests currently fail. [ci skip]
|
py
|
diff --git a/src/taxi/parser.py b/src/taxi/parser.py
index <HASH>..<HASH> 100755
--- a/src/taxi/parser.py
+++ b/src/taxi/parser.py
@@ -75,13 +75,12 @@ class TaxiParser(Parser):
# Try to match XX:XX-XX:XX
time = re.match(r'(\d{2}):(\d{2})-(?:(?:(\d{2}):(\d{2}))|\?)', splitted_line[1])
+ time_end = None
if time is not None:
time_start = datetime.time(int(time.group(1)), int(time.group(2)))
if time.group(3) is not None and time.group(4) is not None:
time_end = datetime.time(int(time.group(3)), int(time.group(4)))
total_hours = (time_start, time_end)
- else:
- total_hours = (time_start, None)
else:
# Try with the ->XX:XX notation
time = re.match(r'->(?:(?:(\d{2}):(\d{2}))|\?)', splitted_line[1])
|
Fix a bug when a line is mark with ->?
|
py
|
diff --git a/resolwe/storage/manager.py b/resolwe/storage/manager.py
index <HASH>..<HASH> 100644
--- a/resolwe/storage/manager.py
+++ b/resolwe/storage/manager.py
@@ -36,9 +36,8 @@ class DecisionMaker:
rule = dict()
matching_keys = []
for key in override_rules.keys():
- if not key.endswith(":"):
- key += ":"
- if self.process_type.startswith(key):
+ modified_key = key if key.endswith(":") else key + ":"
+ if self.process_type.startswith(modified_key):
matching_keys.append(key)
matching_keys.sort(key=len)
for matching_key in matching_keys:
|
Fix bug when process_type ended with colon
|
py
|
diff --git a/pyoko/db/schema_update.py b/pyoko/db/schema_update.py
index <HASH>..<HASH> 100644
--- a/pyoko/db/schema_update.py
+++ b/pyoko/db/schema_update.py
@@ -180,8 +180,8 @@ class SchemaUpdater(object):
@staticmethod
def _handle_version_bucket(client, model):
- bucket_name = model._get_bucket_name() + settings.VERSION_SUFFIX
- bucket_type = client.bucket_type(settings.DEFAULT_BUCKET_TYPE + '_version')
+ bucket_name = settings.VERSION_BUCKET
+ bucket_type = client.bucket_type(settings.VERSION_LOG_BUCKET_TYPE)
bucket = bucket_type.bucket(bucket_name)
bucket.set_property('search_index', '_dont_index_')
|
ADD, for migrate operation version and log settings are updated and fixed. rref #<I>
|
py
|
diff --git a/mysensors/mysensors.py b/mysensors/mysensors.py
index <HASH>..<HASH> 100644
--- a/mysensors/mysensors.py
+++ b/mysensors/mysensors.py
@@ -206,10 +206,11 @@ class SerialGateway(Gateway, threading.Thread):
# pylint: disable=too-many-arguments
def __init__(self, port, event_callback=None, persistence=False,
- persistence_file="mysensors.pickle",
+ persistence_file="mysensors.pickle", protocol_version="1.4",
baud=115200, timeout=1.0, reconnect_timeout=10.0):
threading.Thread.__init__(self)
- Gateway.__init__(self, event_callback, persistence, persistence_file)
+ Gateway.__init__(self, event_callback, persistence, persistence_file,
+ protocol_version)
self.serial = None
self.port = port
self.baud = baud
|
Pass protocol version through from SerialGateway to Gateway. The SerialGateway constructor was missing the protocol_version parameter.
|
py
|
diff --git a/rope/refactor/usefunction.py b/rope/refactor/usefunction.py
index <HASH>..<HASH> 100644
--- a/rope/refactor/usefunction.py
+++ b/rope/refactor/usefunction.py
@@ -37,6 +37,9 @@ class UseFunction(object):
changes.add_change(c)
return changes
+ def get_function_name(self):
+ return self.pyfunction.get_name()
+
def _restructure(self, resources, task_handle, others=True):
body = self._get_body()
pattern = self._make_pattern()
|
usefunction: added UseFunction.get_function_name()
|
py
|
diff --git a/sharedmem/sharedmem.py b/sharedmem/sharedmem.py
index <HASH>..<HASH> 100644
--- a/sharedmem/sharedmem.py
+++ b/sharedmem/sharedmem.py
@@ -825,9 +825,16 @@ def copy(a):
def fromiter(iter, dtype, count=None):
return copy(numpy.fromiter(iter, dtype, count))
+try:
+ # numpy >= 1.16
+ _unpickle_ctypes_type = numpy.ctypeslib.as_ctypes_type(numpy.dtype('|u1'))
+except:
+ # older version numpy < 1.16
+ _unpickle_ctypes_type = numpy.ctypeslib._typecodes['|u1']
+
def __unpickle__(ai, dtype):
dtype = numpy.dtype(dtype)
- tp = numpy.ctypeslib.as_ctypes_type(numpy.dtype('|u1'))
+ tp = _unpickle_ctypes_type * 1
# if there are strides, use strides, otherwise the stride is the itemsize of dtype
if ai['strides']:
|
support pre <I> numpy.
|
py
|
diff --git a/bqplot/overlays.py b/bqplot/overlays.py
index <HASH>..<HASH> 100644
--- a/bqplot/overlays.py
+++ b/bqplot/overlays.py
@@ -343,6 +343,8 @@ class BrushIntervalSelectorOverlay(OneDSelectorOverlay):
This attribute can be used to trigger computationally intensive code
which should be run only on the interval selection being completed as
opposed to code which should be run whenever selected is changing.
+ color: Color or None (default: None)
+ color of the rectangle representing the brush selector
"""
_view_name = Unicode('bqplot.BrushIntervalSelectorOverlay', sync=True)
brushing = Bool(False, sync=True)
@@ -385,6 +387,8 @@ class BrushSelectorOverlay(TwoDSelectorOverlay):
This attribute can be used to trigger computationally intensive code
which should be run only on the interval selection being completed as
opposed to code which should be run whenever selected is changing.
+ color: Color or None (default: None)
+ color of the rectangle representing the brush selector
"""
_view_name = Unicode('bqplot.BrushSelectorOverlay', sync=True)
clear = Bool(False, sync=True)
|
added docs for brush and brush overlay
|
py
|
diff --git a/safe_qgis/widgets/test/test_message_viewer.py b/safe_qgis/widgets/test/test_message_viewer.py
index <HASH>..<HASH> 100644
--- a/safe_qgis/widgets/test/test_message_viewer.py
+++ b/safe_qgis/widgets/test/test_message_viewer.py
@@ -20,6 +20,7 @@ import os
import sys
import unittest
from PyQt4 import Qt
+from PyQt4.QtGui import QApplication
from third_party.pydispatch import dispatcher
import safe.common.utilities
from safe_qgis.widgets.message_viewer import MessageViewer
|
Added missing QApplication import
|
py
|
diff --git a/werkzeug/utils.py b/werkzeug/utils.py
index <HASH>..<HASH> 100644
--- a/werkzeug/utils.py
+++ b/werkzeug/utils.py
@@ -52,7 +52,7 @@ def _log(type, message, *args, **kwargs):
getattr(_logger, type)(message.rstrip(), *args, **kwargs)
-def _patch_func(old, new):
+def _patch_wrapper(old, new):
"""
Helper function that forwards all the function details to the
decorated function.
@@ -1642,7 +1642,7 @@ def responder(f):
def application(environ, start_response):
return Response('Hello World!')
"""
- return _patch_func(f, lambda *a: f(*a)(*a[-2:]))
+ return _patch_wrapper(f, lambda *a: f(*a)(*a[-2:]))
def import_string(import_name, silent=False):
|
compromise: patch_func -> patch_wrapper
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ setup(name='peri',
url='http://github.com/mattbierbaum/peri/',
license='MIT License',
author='Matt Bierbaum, Brian Leahy',
- version='0.1.1' #peri.__version__,
+ version='0.1.1', #peri.__version__,
packages=[
'peri', 'peri.mc', 'peri.comp', 'peri.viz',
|
Removing self-reference in setup.py, which is causing Brian hangups.
|
py
|
diff --git a/hszinc/parser.py b/hszinc/parser.py
index <HASH>..<HASH> 100644
--- a/hszinc/parser.py
+++ b/hszinc/parser.py
@@ -18,6 +18,7 @@ import re
import six
import functools
import json
+import copy
URI_META = re.compile(r'\\([:/\?#\[\]@\\&=;"$`])')
GRID_SEP = re.compile(r'\n\n+')
@@ -99,7 +100,7 @@ def parse_grid(grid_str, mode=MODE_ZINC):
if isinstance(grid_str, six.string_types):
parsed = json.loads(grid_str)
else:
- parsed = grid_str
+ parsed = copy.deepcopy(grid_str)
meta = parsed.pop('meta')
version = meta.pop('ver')
|
parser: Take copies of JSON objects
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -62,7 +62,10 @@ setup(
'fonts/texgyre/termes/*.otf',
]},
scripts=['bin/rinoh'],
- install_requires=['docutils'],
+ install_requires=['docutils', 'purepng>=0.1.0'],
+ dependency_links=[
+ 'https://github.com/Scondo/purepng/tarball/0.1.1#egg=purepng-0.1.1'
+ ],
extras_require = {'bitmap': ['Pillow']},
provides=[PACKAGE, LIB],
#test_suite='nose.collector',
|
PurePNG: added dependency link for missing PyPI dependency
|
py
|
diff --git a/dvc/version.py b/dvc/version.py
index <HASH>..<HASH> 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -7,7 +7,7 @@ import os
import subprocess
-_BASE_VERSION = "0.41.1"
+_BASE_VERSION = "0.41.2"
def _generate_version(base_version):
|
dvc: bump to <I>
|
py
|
diff --git a/anytemplate/engines/tests/base.py b/anytemplate/engines/tests/base.py
index <HASH>..<HASH> 100644
--- a/anytemplate/engines/tests/base.py
+++ b/anytemplate/engines/tests/base.py
@@ -9,12 +9,12 @@ import anytemplate.engines.base as TT # stands for test target
class Test_00(unittest.TestCase):
def test_10__class_methods(self):
- self.assertEquals(TT.BaseEngine.name(), "base")
- self.assertEquals(TT.BaseEngine.file_extensions(), [])
- self.assertFalse(TT.BaseEngine.supports())
+ self.assertEquals(TT.Engine.name(), "base")
+ self.assertEquals(TT.Engine.file_extensions(), [])
+ self.assertFalse(TT.Engine.supports("foo.tmpl"))
def test_20__instance_methods(self):
- engine = TT.BaseEngine()
+ engine = TT.Engine()
try:
engine.renders_impl("aaa") # Template string must be given.
engine.render_impl(__file__)
|
follow the rename of template engine class in anytemplate.engines.base
|
py
|
diff --git a/hadoopy/_local.py b/hadoopy/_local.py
index <HASH>..<HASH> 100644
--- a/hadoopy/_local.py
+++ b/hadoopy/_local.py
@@ -7,7 +7,6 @@ import subprocess
import tempfile
import shutil
import contextlib
-import sys
@contextlib.contextmanager
@@ -91,29 +90,23 @@ class LocalTask(object):
break
timeout = None
wrote = False
- sys.stderr.write('In loop[%s]\n' % str(kv))
while True:
r, w, _ = select.select([out_r_fd], [in_w_fd], [], timeout)
if r: # If data is available to be read, than get it
- okv = tbfp_r.next()
- sys.stderr.write('In loop Yielding[%s]\n' % str(okv))
- yield okv
+ yield tbfp_r.next()
elif w and not wrote:
tbfp_w.write(kv)
wrote = True
- timeout = .01
+ timeout = .0001
else:
if wrote and (poll is None or poll()):
- sys.stderr.write('Leaving loop\n')
break
# Get any remaining values
- sys.stderr.write('Get remaining\n')
while True:
try:
yield tbfp_r.next()
except EOFError:
break
- sys.stderr.write('Finishing\n')
finally:
p.kill()
p.wait()
|
Removed debugging lines and changed poll timeout to <I>
|
py
|
diff --git a/openquake/java.py b/openquake/java.py
index <HASH>..<HASH> 100644
--- a/openquake/java.py
+++ b/openquake/java.py
@@ -76,6 +76,7 @@ JAVA_CLASSES = {
"LocationListFormatter": "org.gem.LocationListFormatter",
"PythonBridgeAppender": "org.gem.log.PythonBridgeAppender",
"DisaggregationCalculator": "org.gem.calc.DisaggregationCalculator",
+ "UHSCalculator": "org.gem.calc.UHSCalculator",
}
|
Added UHSCalculator to commonly used Java class dict.
|
py
|
diff --git a/spyder/plugins/__init__.py b/spyder/plugins/__init__.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/__init__.py
+++ b/spyder/plugins/__init__.py
@@ -518,6 +518,8 @@ class SpyderPluginMixin(object):
def toggle_view(self, checked):
"""Toggle view"""
+ if not self.dockwidget:
+ return
if checked:
self.dockwidget.show()
self.dockwidget.raise_()
|
Allow plugins with no dockwidget (menu only for example)
|
py
|
diff --git a/configargparse.py b/configargparse.py
index <HASH>..<HASH> 100644
--- a/configargparse.py
+++ b/configargparse.py
@@ -390,7 +390,7 @@ class ArgumentParser(argparse.ArgumentParser):
args = list(args)
# normalize args by converting args like --key=value to --key value
- normalized_args = list()
+ normalized_args = []
for arg in args:
if arg and arg[0] in self.prefix_chars and '=' in arg:
key, value = arg.split('=', 1)
|
Unnecessary list call - rewrite as a literal
|
py
|
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -395,3 +395,21 @@ class ConnectionTest(unittest.TestCase):
self.assertEqual(e.oneview_response, self.expected_response_body)
else:
self.fail()
+
+ @mock.patch.object(connection, 'do_http')
+ def test_task_in_response_body_without_202_status(self, mock_do_http):
+
+ # create the return values
+ mockedResponse = type('mockResponse', (), {'status': 200})()
+ mockedTaskBody = {'category': 'tasks'}
+
+ # set-up the mock
+ mock_do_http.return_value = (mockedResponse, mockedTaskBody)
+
+ # call the method we are testing
+ (testTask, testBody) = self.connection._connection__do_rest_call('PUT', '/rest/test', '{ "body": "test" }',
+ None)
+
+ # verify the result
+ self.assertEquals(mockedTaskBody, testTask)
+ self.assertEquals(mockedTaskBody, testBody)
|
Added a unit test to test the change to connection.__do_rest_call
|
py
|
diff --git a/zimsoap/client.py b/zimsoap/client.py
index <HASH>..<HASH> 100644
--- a/zimsoap/client.py
+++ b/zimsoap/client.py
@@ -1397,8 +1397,10 @@ not {0}'.format(type(ids)))
def get_folder_grant(self, **kwargs):
folder = self.get_folder(**kwargs)
-
- return folder['folder']['acl']
+ if 'acl' in folder['folder']:
+ return folder['folder']['acl']
+ else:
+ return None
def modify_folder_grant(
self,
|
Return None when no acl found
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,7 @@ with open('README.rst', 'r') as fh:
setup(
name='localshop',
- version='0.5.0',
+ version='0.6.0-dev',
author='Michael van Tellingen',
author_email='michaelvantellingen@gmail.com',
url='http://github.com/mvantellingen/localshop',
|
Bump to <I>-dev
|
py
|
diff --git a/openid/consumer/fetchers.py b/openid/consumer/fetchers.py
index <HASH>..<HASH> 100644
--- a/openid/consumer/fetchers.py
+++ b/openid/consumer/fetchers.py
@@ -1,3 +1,6 @@
+"""
+This module contains the HTTP fetcher interface and several implementations.
+"""
import urllib2
import time
import cStringIO
|
[project @ Add a module-level summary of fetchers.py]
|
py
|
diff --git a/malcolm/modules/xmap/parts/xmapdriverpart.py b/malcolm/modules/xmap/parts/xmapdriverpart.py
index <HASH>..<HASH> 100644
--- a/malcolm/modules/xmap/parts/xmapdriverpart.py
+++ b/malcolm/modules/xmap/parts/xmapdriverpart.py
@@ -1,4 +1,6 @@
from malcolm.modules.ADCore.parts import DetectorDriverPart
+from malcolm.modules.ADCore.infos import NDArrayDatasetInfo
+from malcolm.modules.scanning.controllers import RunnableController
class XmapDriverPart(DetectorDriverPart):
@@ -21,3 +23,9 @@ class XmapDriverPart(DetectorDriverPart):
arrayCounter=completed_steps,
arrayCallbacks=True))
return fs
+
+ @RunnableController.ReportStatus
+ def report_configuration(self, context):
+ infos = super(XmapDriverPart, self).report_configuration(
+ context) + [NDArrayDatasetInfo(rank=2)]
+ return infos
|
Fix a regression where Xmap would not report its DET and sum datasets
|
py
|
diff --git a/scripts/make_confidence_report.py b/scripts/make_confidence_report.py
index <HASH>..<HASH> 100755
--- a/scripts/make_confidence_report.py
+++ b/scripts/make_confidence_report.py
@@ -35,6 +35,9 @@ import tensorflow as tf
from tensorflow.python.platform import flags
from cleverhans.utils_tf import silence
+silence()
+# silence call must precede this imports. pylint doesn't like that
+# pylint: disable=C0413
from cleverhans.confidence_report import make_confidence_report
from cleverhans.confidence_report import BATCH_SIZE
from cleverhans.confidence_report import MC_BATCH_SIZE
@@ -48,7 +51,6 @@ from cleverhans.confidence_report import BASE_EPS_ITER
from cleverhans.confidence_report import REPORT_PATH
from cleverhans.confidence_report import SAVE_ADVX
-silence()
FLAGS = flags.FLAGS
|
Move silence call earlier (#<I>) * local patches * Update make_confidence_report.py
|
py
|
diff --git a/hamster/applet.py b/hamster/applet.py
index <HASH>..<HASH> 100755
--- a/hamster/applet.py
+++ b/hamster/applet.py
@@ -221,6 +221,8 @@ class HamsterApplet(object):
self.activity_list.child.select_region(0, -1)
else:
self.activity_list.child.set_text('')
+
+ self.applet.grab_focus()
self.activity_list.grab_focus()
|
lame, have to grab applet's focus before focusing element svn path=/trunk/; revision=<I>
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.