diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
|---|---|---|
diff --git a/shoebot/core/drawqueue_sink.py b/shoebot/core/drawqueue_sink.py
index <HASH>..<HASH> 100644
--- a/shoebot/core/drawqueue_sink.py
+++ b/shoebot/core/drawqueue_sink.py
@@ -20,6 +20,7 @@ class DrawQueueSink(object):
'''
r_context = self.create_rcontext(size, frame)
drawqueue.render(r_context)
+ self.rendering_finished(size, frame, r_context)
return r_context
def create_rcontext(self, size, frame):
|
Tell rendercontext we have finished, so pngs get rendered.
|
py
|
diff --git a/mapchete/io/vector.py b/mapchete/io/vector.py
index <HASH>..<HASH> 100644
--- a/mapchete/io/vector.py
+++ b/mapchete/io/vector.py
@@ -122,6 +122,8 @@ def _validated_crs(crs):
return CRS().from_epsg(int(crs))
elif isinstance(crs, int):
return CRS().from_epsg(crs)
+ elif isinstance(crs, dict):
+ return CRS().from_dict(crs)
else:
raise TypeError("invalid CRS given")
|
also enable parsing CRS represented as dict
|
py
|
diff --git a/caravel/views.py b/caravel/views.py
index <HASH>..<HASH> 100644
--- a/caravel/views.py
+++ b/caravel/views.py
@@ -78,6 +78,9 @@ class TableColumnInlineView(CompactCRUDMixin, CaravelModelView): # noqa
"Whether to make this column available as a "
"[Time Granularity] option, column has to be DATETIME or "
"DATETIME-like"),
+ 'expression': utils.markdown(
+ "a valid SQL expression as supported by the underlying backend. "
+ "Example: `substr(name, 1, 1)`", True),
}
appbuilder.add_view_no_menu(TableColumnInlineView)
@@ -106,6 +109,11 @@ class SqlMetricInlineView(CompactCRUDMixin, CaravelModelView): # noqa
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
'expression', 'table']
+ description_columns = {
+ 'expression': utils.markdown(
+ "a valid SQL expression as supported by the underlying backend. "
+ "Example: `count(DISTINCT userid)`", True),
+ }
add_columns = edit_columns
page_size = 500
appbuilder.add_view_no_menu(SqlMetricInlineView)
|
Adding expression column description in the CRUD
|
py
|
diff --git a/salt/utils/minions.py b/salt/utils/minions.py
index <HASH>..<HASH> 100644
--- a/salt/utils/minions.py
+++ b/salt/utils/minions.py
@@ -101,7 +101,7 @@ class CkMinions(object):
salt.utils.fopen(datap)
).get('grains')
comps = expr.rsplit(':', 1)
- match = salt.utils.traverse_dict(grains, comps[0])
+ match = salt.utils.traverse_dict(grains, comps[0], {})
if len(comps) < 2:
continue
if not match:
|
update minions file for traverse_dict changes
|
py
|
diff --git a/pip_accel/__init__.py b/pip_accel/__init__.py
index <HASH>..<HASH> 100644
--- a/pip_accel/__init__.py
+++ b/pip_accel/__init__.py
@@ -149,8 +149,8 @@ def main():
logger.fatal("pip reported unrecoverable installation errors. Please fix and rerun!")
sys.exit(1)
finally:
- # Always cleanup temporary build directory.
- shutil.rmtree(build_directory)
+ # Always cleanup temporary build directory.
+ shutil.rmtree(build_directory)
# Abort when after N retries we still failed to download source distributions.
logger.fatal("External command failed %i times, aborting!" % MAX_RETRIES)
sys.exit(1)
|
Fix code indentation of recent changes
|
py
|
diff --git a/openquake/commands/renumber_sources.py b/openquake/commands/renumber_sources.py
index <HASH>..<HASH> 100644
--- a/openquake/commands/renumber_sources.py
+++ b/openquake/commands/renumber_sources.py
@@ -33,6 +33,7 @@ def renumber_sources(smlt_file):
logging.basicConfig(level=logging.INFO)
number = 1
for path in readinput.get_paths(smlt_file):
+ logging.info('Renumbering %s', path)
root = nrml.read(path)
if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
raise ObsoleteFormat('Please use oq upgrade_nrml .')
|
Added log [skip CI] Former-commit-id: 4f6e<I>bc<I>ab3d7f<I>d<I>
|
py
|
diff --git a/openquake/calculators/export/hazard.py b/openquake/calculators/export/hazard.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/export/hazard.py
+++ b/openquake/calculators/export/hazard.py
@@ -113,7 +113,8 @@ def export_ses_csv(ekey, dstore):
trt, r['strike'], r['dip'], r['rake'],
r['boundary']))
rows.sort() # by rupture serial
- writers.write_csv(dest, rows, header=header, sep='\t')
+ writers.write_csv(dest, rows, header=header, sep='\t',
+ comment='investigation_time=%s' % oq.investigation_time)
return [dest]
|
Saved the investigation_time in ruptures.csv
|
py
|
diff --git a/tests/test_incident.py b/tests/test_incident.py
index <HASH>..<HASH> 100644
--- a/tests/test_incident.py
+++ b/tests/test_incident.py
@@ -62,6 +62,21 @@ class TestIncident(unittest.TestCase):
self.assertEqual(self.client.default_payload, {})
@httpretty.activate
+ def test_invalid_query_type(self):
+ json_body = json.dumps({'result': [{'number': self.mock_incident['number']}]})
+ httpretty.register_uri(httpretty.GET,
+ "https://%s/%s" % (self.mock_connection['fqdn'], self.mock_incident['path']),
+ body=json_body,
+ status=200,
+ content_type="application/json")
+
+ try:
+ self.client.query(table='incident', query=1).get_one()
+ self.assertFalse('Query of type int should fail')
+ except pysnow.InvalidUsage:
+ pass
+
+ @httpretty.activate
def test_get_incident_by_dict_query(self):
"""
Make sure fetching by dict type query works
|
Added invalid_query_type test
|
py
|
diff --git a/third_party/stdlib/unittest_case.py b/third_party/stdlib/unittest_case.py
index <HASH>..<HASH> 100644
--- a/third_party/stdlib/unittest_case.py
+++ b/third_party/stdlib/unittest_case.py
@@ -684,7 +684,8 @@ class TestCase(object):
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
- elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
+ #elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
+ elements = (seq_type_name[0].upper() + seq_type_name[1:].lower(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
|
Substitute for use of str.capitalize(). (#<I>)
|
py
|
diff --git a/bulbs/feeds/serializers.py b/bulbs/feeds/serializers.py
index <HASH>..<HASH> 100644
--- a/bulbs/feeds/serializers.py
+++ b/bulbs/feeds/serializers.py
@@ -41,8 +41,9 @@ class GlanceContentSerializer(serializers.Serializer):
"slug": obj.slug,
"featured_media": GlanceFeaturedMediaSerializer(obj).data,
'link': self.context['request'].build_absolute_uri(obj.get_absolute_url()),
- 'authors': ["America's Finest News Source"], # TODO
+ # mparent(2016-05-04) TODO: Optional author support
+ 'authors': ["America's Finest News Source"],
'tags': {
- 'section': [tag.name for tag in obj.tags.all()], # TODO: Ordered
+ 'section': [tag.name for tag in obj.ordered_tags()],
},
}
|
Glance Feed: TODO cleanup
|
py
|
diff --git a/pymongo/cursor.py b/pymongo/cursor.py
index <HASH>..<HASH> 100644
--- a/pymongo/cursor.py
+++ b/pymongo/cursor.py
@@ -626,8 +626,16 @@ class Cursor(object):
kwargs["_connection_to_use"] = self.__connection_id
kwargs.update(self.__kwargs)
- response = db.connection._send_message_with_response(message,
- **kwargs)
+ try:
+ response = db.connection._send_message_with_response(message,
+ **kwargs)
+ except AutoReconnect:
+ # Don't try to send kill cursors on another socket
+ # or to another server. It can cause a _pinValue
+ # assertion on some server releases if we get here
+ # due to a socket timeout.
+ self.__killed = True
+ raise
if isinstance(response, tuple):
(connection_id, response) = response
@@ -641,6 +649,9 @@ class Cursor(object):
self.__as_class,
self.__tz_aware)
except AutoReconnect:
+ # Don't send kill cursors to another server after a "not master"
+ # error. It's completely pointless.
+ self.__killed = True
db.connection.disconnect()
raise
self.__id = response["cursor_id"]
|
Avoid _pinValue assertion on mongod PYTHON-<I>
|
py
|
diff --git a/amqplib/client_0_8/transport.py b/amqplib/client_0_8/transport.py
index <HASH>..<HASH> 100644
--- a/amqplib/client_0_8/transport.py
+++ b/amqplib/client_0_8/transport.py
@@ -82,9 +82,8 @@ class _AbstractTransport(object):
"""
size = len(payload)
- self._write(pack('>BHI', frame_type, channel, size))
- self._write(payload)
- self._write('\xce')
+ self._write(pack('>BHI%dsB' % size,
+ frame_type, channel, size, payload, 0xce))
class SSLTransport(_AbstractTransport):
|
Send frames to the TCP socket with a single call instead of 3, this small change causes the unittests to run over 2x faster on my laptop.
|
py
|
diff --git a/pyalveo/pyalveo.py b/pyalveo/pyalveo.py
index <HASH>..<HASH> 100644
--- a/pyalveo/pyalveo.py
+++ b/pyalveo/pyalveo.py
@@ -1110,12 +1110,15 @@ class Client(object):
:param displaydoc: if True, make this the display document for the item
:type displaydoc: Boolean
+
+ :param preferName: if True, given document name will be the document id rather than filename. Useful if you want to upload under a different filename.
+ :type preferName: Boolean
:rtype: String
:returns: The URL of the newly created document
"""
-
- if file is not None:
+
+ if not preferName and file is not None:
docid = os.path.basename(file)
else:
docid = name
|
Update for uploading document and setting it's name
|
py
|
diff --git a/bitmerchant/wallet/keys.py b/bitmerchant/wallet/keys.py
index <HASH>..<HASH> 100644
--- a/bitmerchant/wallet/keys.py
+++ b/bitmerchant/wallet/keys.py
@@ -316,10 +316,11 @@ class PublicKey(Key):
p = curve.p()
alpha = (pow(x, 3, p) + curve.a() * x + curve.b()) % p
beta = square_root_mod_prime(alpha, p)
- if y_odd:
- public_pair = PublicPair(x, beta)
- else:
+ y_even = not y_odd
+ if y_even == bool(beta & 1):
public_pair = PublicPair(x, p - beta)
+ else:
+ public_pair = PublicPair(x, beta)
else:
raise KeyParseError("The given key is not in a known format.")
return cls.from_public_pair(public_pair, network=network,
|
Fix insidious bug for restoring from public keys
|
py
|
diff --git a/slave/transport.py b/slave/transport.py
index <HASH>..<HASH> 100755
--- a/slave/transport.py
+++ b/slave/transport.py
@@ -297,7 +297,12 @@ try:
self._serial.write(data)
def __read__(self, num_bytes):
- return self._serial.read(num_bytes)
+ # The serial.SerialTimeoutException is only raised on write timeouts.
+ # In case of a read timeout, an empty string is returned.
+ data = self._serial.read(num_bytes)
+ if len(data) == 0:
+ raise Serial.Timeout()
+ return data
except ImportError:
pass
|
Added propper handling of reading timeouts in `transport.Serial` class.
|
py
|
diff --git a/evm/estimators/gas.py b/evm/estimators/gas.py
index <HASH>..<HASH> 100644
--- a/evm/estimators/gas.py
+++ b/evm/estimators/gas.py
@@ -9,16 +9,12 @@ from evm.utils.spoof import (
def _get_computation_error(state, transaction):
- snapshot = state.snapshot()
- try:
- computation = state.do_call(transaction)
+ computation = state.do_call(transaction)
- if computation.is_error:
- return computation._error
- else:
- return None
- finally:
- state.revert(snapshot)
+ if computation.is_error:
+ return computation._error
+ else:
+ return None
@curry
|
remove extra snapshot/revert that isn't needed for gas estimation
|
py
|
diff --git a/docs/source/conf.py b/docs/source/conf.py
index <HASH>..<HASH> 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -139,12 +139,12 @@ todo_include_todos = True
#html_theme = 'alabaster'
-try:
- import sphinx_rtd_theme
- html_theme = "sphinx_rtd_theme"
- html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
-except:
- print "rtd theme not found"
+# try:
+# import sphinx_rtd_theme
+# html_theme = "sphinx_rtd_theme"
+# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+# except:
+# print "rtd theme not found"
# Theme options are theme-specific and customize the look and feel of a theme
|
fix: rtd theme should not be enabled by default (does not work with rendering ipywidgets locally)
|
py
|
diff --git a/mopidy_youtube/backend.py b/mopidy_youtube/backend.py
index <HASH>..<HASH> 100644
--- a/mopidy_youtube/backend.py
+++ b/mopidy_youtube/backend.py
@@ -8,10 +8,11 @@ import unicodedata
from multiprocessing.pool import ThreadPool
from urlparse import parse_qs, urlparse
-from mopidy import backend
+from mopidy import backend, exceptions
from mopidy.models import Album, SearchResult, Track
import pafy
+import youtube
import pykka
@@ -146,6 +147,8 @@ class YouTubeBackend(pykka.ThreadingActor, backend.Backend):
self.search_results = config['youtube']['search_results']
self.playlist_max_videos = config['youtube']['playlist_max_videos']
self.uri_schemes = ['youtube', 'yt']
+ if youtube.API.test_api_key(self) is False:
+ raise exceptions.BackendError('Failed to verify YouTube API key')
class YouTubeLibraryProvider(backend.LibraryProvider):
|
check to see if google API key is valid and raise exception if not
|
py
|
diff --git a/a10_neutron_lbaas/neutron_ext/extensions/a10DeviceInstance.py b/a10_neutron_lbaas/neutron_ext/extensions/a10DeviceInstance.py
index <HASH>..<HASH> 100644
--- a/a10_neutron_lbaas/neutron_ext/extensions/a10DeviceInstance.py
+++ b/a10_neutron_lbaas/neutron_ext/extensions/a10DeviceInstance.py
@@ -62,7 +62,7 @@ class A10DeviceInstance(extensions.ExtensionDescriptor):
attr_map = RESOURCE_ATTRIBUTE_MAP
resources = resource_helper.build_resource_info(my_plurals,
attr_map,
- constants.A10_APPLIANCE)
+ constants.A10_DEVICE_INSTANCE)
return resources
|
Particularly embarrassing occurrence of 'appliance' corrected
|
py
|
diff --git a/examples/comment/reply_to_media_comments.py b/examples/comment/reply_to_media_comments.py
index <HASH>..<HASH> 100644
--- a/examples/comment/reply_to_media_comments.py
+++ b/examples/comment/reply_to_media_comments.py
@@ -54,7 +54,7 @@ for comment in tqdm(comments):
commenter = comment['user']['username']
text = comment['text']
# if using python3 change to:
- if sys.version_info[0] < 3:
+ if sys.version_info.major < 3:
bot.logger.info(unicode("Checking comment `{text}` from `{commenter}`".format(text=text, commenter=commenter), 'utf-8'))
else:
bot.logger.info("Checking comment `{text}` from `{commenter}`".format(text=text, commenter=commenter))
|
Changed line <I> checking sys.version_info major
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -62,5 +62,5 @@ setup(
py_modules=["web3utils"],
# https://packaging.python.org/en/latest/requirements.html
- install_requires=['psutil>5,<6'],
+ install_requires=['psutil>5,<6', 'web3==3.10.0'],
)
|
web3 is definitely required web3 does not seem to use semantic versioning. In order to avoid incompatibility issues, pin directly to a specific version.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,8 +11,7 @@ setup(name='hamlpy',
keywords = 'haml django converter',
url = 'http://github.com/jessemiller/HamlPy',
license = 'MIT',
- requires = [
- 'django',
+ install_requires = [
'pygments'
],
entry_points = {
|
Fix setup.py to install pygments dependency
|
py
|
diff --git a/tests/unit/test_meta.py b/tests/unit/test_meta.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_meta.py
+++ b/tests/unit/test_meta.py
@@ -36,14 +36,14 @@ class MetaImageTest(unittest.TestCase):
print("Created: {}".format(cls._temp_path))
@my_vcr.use_cassette('tests/unit/cassettes/test_meta_pxbounds_overlap.yaml', filter_headers=['authorization'])
- def test_image_pxbounds_overlapping(self, clip=True):
+ def test_image_pxbounds_overlapping(self):
wv2 = CatalogImage('1030010076B8F500')
_bands, ysize, xsize = wv2.shape
image_shape = shape(wv2)
image_bounds = image_shape.bounds
width = image_bounds[2] - image_bounds[0]
clip_area = translate(image_shape, xoff=-0.5 * width)
- xmin, ymin, xmax, ymax = wv2.pxbounds(clip_area)
+ xmin, ymin, xmax, ymax = wv2.pxbounds(clip_area, clip=True)
self.assertEquals(xmin, 0)
self.assertEquals(ymin, 0)
self.assertEquals(xmax, xsize/2)
|
fixing test for clip pxbounds
|
py
|
diff --git a/gspread/v4/client.py b/gspread/v4/client.py
index <HASH>..<HASH> 100644
--- a/gspread/v4/client.py
+++ b/gspread/v4/client.py
@@ -73,12 +73,23 @@ class Client(BaseClient):
raise APIError(response)
def list_spreadsheet_files(self):
- url = (
- "https://www.googleapis.com/drive/v3/files"
- "?q=mimeType%3D'application%2Fvnd.google-apps.spreadsheet'"
- )
- r = self.request('get', url)
- return r.json()['files']
+ files = []
+ page_token = ''
+ url = "https://www.googleapis.com/drive/v3/files"
+ params = {
+ 'q': "mimeType='application/vnd.google-apps.spreadsheet'",
+ "pageSize": 1000
+ }
+
+ while page_token is not None:
+ if page_token:
+ params['pageToken'] = page_token
+
+ res = self.request('get', url, params=params).json()
+ files.extend(res['files'])
+ page_token = res.get('nextPageToken', None)
+
+ return files
def open(self, title):
"""Opens a spreadsheet.
|
Fetch all spreadsheets in Spreadsheet.list_spreadsheet_files
|
py
|
diff --git a/hooks/pre_commit_checks.py b/hooks/pre_commit_checks.py
index <HASH>..<HASH> 100644
--- a/hooks/pre_commit_checks.py
+++ b/hooks/pre_commit_checks.py
@@ -125,10 +125,10 @@ def _CheckCopyrightNonThirdParty(input_api):
def _Check(input_api, license_re, sources):
bad_files = []
for f in sources:
+ if input_api.IsIgnoredFile(f):
+ continue
contents = f.contents
if not license_re.search(contents):
- if input_api.IsIgnoredFile(f):
- continue
bad_files.append(f.filename)
if bad_files:
return [_FormatError(
|
Don't search for copyright headers in ignored files At the moment, ignored files are skipped after they fail the copyright header check, which can be quite slow in some cases (e.g. test files). This patch modifies the code to not search for copyright headers in ignored files at all. R=<EMAIL> Review URL: <URL>
|
py
|
diff --git a/vasppy/poscar.py b/vasppy/poscar.py
index <HASH>..<HASH> 100644
--- a/vasppy/poscar.py
+++ b/vasppy/poscar.py
@@ -147,7 +147,7 @@ class Poscar:
[ print( ''.join( [' {: .10f}'.format( element ) for element in row ] ) ) for row in self.cell.matrix ]
print( ' '.join( self.atoms ) )
print( ' '.join( [ str(n) for n in self.atom_numbers ] ) )
- if opts['selective']:
+ if opts.get('selective'):
print( 'Selective Dynamics' )
print( coordinate_type )
self.output_coordinates_only( coordinate_type=coordinate_type, opts=opts )
|
Fixed bug in Poscar output handling selective dynamics
|
py
|
diff --git a/helpers.py b/helpers.py
index <HASH>..<HASH> 100644
--- a/helpers.py
+++ b/helpers.py
@@ -212,7 +212,7 @@ def find_app_by_short_name(short_name, pbclient):
return response[0]
except exceptions.ConnectionError:
raise
- except ProjectNotFound:
+ except:
raise
|
Capture all exceptions and raise them.
|
py
|
diff --git a/ceph_deploy/install.py b/ceph_deploy/install.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/install.py
+++ b/ceph_deploy/install.py
@@ -85,8 +85,7 @@ def install(args):
if args.repo:
return install_repo(args)
- if args.nogpgcheck:
- gpgcheck = 0
+ gpgcheck = 0 if args.nogpgcheck else 1
if args.version_kind == 'stable':
version = args.release
|
[RM-<I>] install: define gpgcheck always
|
py
|
diff --git a/visidata/addons/pyobj.py b/visidata/addons/pyobj.py
index <HASH>..<HASH> 100644
--- a/visidata/addons/pyobj.py
+++ b/visidata/addons/pyobj.py
@@ -3,8 +3,9 @@ from visidata import *
option('pyobj_show_hidden', False, 'show methods and _private properties')
globalCommand('^X', 'expr = input("eval: ", "expr"); push_pyobj(expr, eval(expr))', 'eval Python expression and open the result')
-# find new key
-globalCommand('^A', 'status(type(cursorRow)); push_pyobj("%s.row[%s]" % (sheet.name, cursorRowIndex), cursorRow)', 'push sheet for this row as python object')
+
+globalCommand('^Y', 'status(type(cursorRow)); push_pyobj("%s.row[%s]" % (sheet.name, cursorRowIndex), cursorRow)', 'push sheet for this row as python object')
+globalCommand('z^Y', 'status(type(cursorValue)); push_pyobj("%s.row[%s].%s" % (sheet.name, cursorRowIndex, cursorCol.name), cursorValue)', 'push sheet for this cell value as python object')
#### generic list/dict/object browsing
def push_pyobj(name, pyobj, src=None):
|
^Y now pushes sheet of cursorRow (was ^A); z^Y pushes cursorValue
|
py
|
diff --git a/arangodb/orm/fields.py b/arangodb/orm/fields.py
index <HASH>..<HASH> 100644
--- a/arangodb/orm/fields.py
+++ b/arangodb/orm/fields.py
@@ -449,10 +449,13 @@ class DatetimeField(ModelField):
"""
"""
- if self.time and not self.null:
- return u'%s' % self.time.strftime(DatetimeField.DATE_FORMAT)
- else:
+ if self.null and self.time is None:
return None
+ else:
+ if self.time is None:
+ raise Exception('Datetime cannot be None')
+ else:
+ return u'%s' % self.time.strftime(DatetimeField.DATE_FORMAT)
def loads(self, date_string):
"""
@@ -516,10 +519,13 @@ class DateField(ModelField):
"""
"""
- if self.date and not self.null:
- return u'%s' % self.date.strftime(DateField.DATE_FORMAT)
- else:
+ if self.null and self.date is None:
return None
+ else:
+ if self.date is None:
+ raise Exception('Datetime cannot be None')
+ else:
+ return u'%s' % self.date.strftime(DateField.DATE_FORMAT)
def loads(self, date_string):
"""
|
Fixing handling with None and not None values
|
py
|
diff --git a/src/sos/utils.py b/src/sos/utils.py
index <HASH>..<HASH> 100644
--- a/src/sos/utils.py
+++ b/src/sos/utils.py
@@ -921,7 +921,8 @@ class SlotManager(object):
self.lock_file = os.path.join(os.path.expanduser('~'), '.sos', f'manager_{manager_id}.lck')
self.slot_file = os.path.join(os.path.expanduser('~'), '.sos', f'manager_{manager_id}.slot')
if reset:
- self._write_slot(0)
+ with fasteners.InterProcessLock(self.lock_file):
+ self._write_slot(0)
def _read_slot(self):
with open(self.slot_file, 'r') as slot:
|
Try to protect the lock file when SlotManger is created #<I>
|
py
|
diff --git a/tests/wipy/time.py b/tests/wipy/time.py
index <HASH>..<HASH> 100644
--- a/tests/wipy/time.py
+++ b/tests/wipy/time.py
@@ -61,12 +61,12 @@ spot_test(-1072915201, (1965, 12, 31, 23, 59, 59, 4, 365))
t1 = time.time()
time.sleep(2)
t2 = time.time()
-print(time.ticks_diff(t1, t2) == 2)
+print(abs(time.ticks_diff(t1, t2) -2) <= 1)
t1 = time.ticks_ms()
time.sleep_ms(50)
t2 = time.ticks_ms()
-print(time.ticks_diff(t1, t2) == 50)
+print(abs(time.ticks_diff(t1, t2)- 50) <= 1)
t1 = time.ticks_us()
time.sleep_us(1000)
|
tests/wipy: Improve robustness of time test.
|
py
|
diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py
index <HASH>..<HASH> 100644
--- a/satpy/writers/cf_writer.py
+++ b/satpy/writers/cf_writer.py
@@ -208,6 +208,9 @@ def _encode_nc(obj):
elif isinstance(obj, np.void):
return tuple(obj)
elif isinstance(obj, np.ndarray):
+ if not len(obj.dtype) and obj.dtype == np.bool_:
+ # Convert array of booleans to array of strings
+ obj = obj.astype(str)
if not len(obj.dtype) and len(obj.shape) <= 1:
# Multi-dimensional nc attributes are not supported, so we have to skip record arrays and multi-dimensional
# arrays here
|
Convert array of booleans to array of strings
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -110,10 +110,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
|
Add Python <I> to trove classifiers (#<I>)
|
py
|
diff --git a/pandocfilters.py b/pandocfilters.py
index <HASH>..<HASH> 100755
--- a/pandocfilters.py
+++ b/pandocfilters.py
@@ -13,27 +13,30 @@ import io
import json
import os
import sys
+import atexit
+import shutil
+import tempfile
# some utility-functions: make it easier to create your own filters
-def get_filename4code(module, content, ext=None):
+def get_filename4code(module, content, ext=None, remove=True):
"""Generate filename based on content
The function ensures that the (temporary) directory exists, so that the
file can be written.
+ An atexit function gets registered to remove the temporary directory at the
+ end of the script, unless disabled with the remove parameter.
+
Example:
filename = get_filename4code("myfilter", code)
"""
- imagedir = module + "-images"
+ imagedir = tempfile.mkdtemp(prefix=module)
+ if remove:
+ atexit.register(lambda: shutil.rmtree(imagedir))
fn = hashlib.sha1(content.encode(sys.getfilesystemencoding())).hexdigest()
- try:
- os.mkdir(imagedir)
- sys.stderr.write('Created directory ' + imagedir + '\n')
- except OSError:
- pass
if ext:
fn += "." + ext
return os.path.join(imagedir, fn)
|
Create temporary directory in a more safe manner. Also make sure the temporary directory gets removed at the end of the script, unless prohibited.
|
py
|
diff --git a/chess/syzygy.py b/chess/syzygy.py
index <HASH>..<HASH> 100644
--- a/chess/syzygy.py
+++ b/chess/syzygy.py
@@ -1257,6 +1257,9 @@ class Tablebases(object):
"""
num = 0
+ if not os.path.isdir(directory):
+ raise IOError("not a tablebase directory: {0}".format(repr(directory)))
+
for filename in filenames():
if load_wdl and os.path.isfile(os.path.join(directory, filename) + ".rtbw"):
wdl_table = WdlTable(directory, filename)
|
Let syzygy open_directory check for existence
|
py
|
diff --git a/tests/test_ytarray.py b/tests/test_ytarray.py
index <HASH>..<HASH> 100644
--- a/tests/test_ytarray.py
+++ b/tests/test_ytarray.py
@@ -42,6 +42,8 @@ from yt.testing import fake_random_ds, requires_module
from yt.funcs import fix_length
from yt.units.unit_symbols import \
cm, m, g
+from yt.utilities.physical_ratios import \
+ metallicity_sun
def operate_and_compare(a, b, op, answer):
# Test generator for YTArrays tests
@@ -985,3 +987,10 @@ def test_numpy_wrappers():
yield assert_array_equal, YTArray(union_answer, 'cm'), uunion1d(a1, a2)
yield assert_array_equal, union_answer, np.union1d(a1, a2)
+
+def test_dimensionless_conversion():
+ a = YTQuantity(1, 'Zsun')
+ b = a.in_units('Zsun')
+ a.convert_to_units('Zsun')
+ yield assert_true, a.units.cgs_value == metallicity_sun
+ yield assert_true, b.units.cgs_value == metallicity_sun
|
Add a test to ensure no-op in-place conversions of dimensionless quantities is really a no-op. This came up as a failure in the fields tests, so I've added a test to the unit module to aid future refactoring. --HG-- branch : yt
|
py
|
diff --git a/alchemist/app.py b/alchemist/app.py
index <HASH>..<HASH> 100644
--- a/alchemist/app.py
+++ b/alchemist/app.py
@@ -118,7 +118,7 @@ def _find_application(self):
for frame in reversed(inspect.stack()[1:]):
name = frame[0].f_globals.get('__package__')
if (name and (not name.startswith('alchemist')
- or name.startswith('alchemist.tests'))):
+ or name.startswith('alchemist.tests.a'))):
app = self._get_application_from_name(name)
if app:
return app
|
Confine alchemist.tests bypass.
|
py
|
diff --git a/libagent/device/trezor.py b/libagent/device/trezor.py
index <HASH>..<HASH> 100644
--- a/libagent/device/trezor.py
+++ b/libagent/device/trezor.py
@@ -3,6 +3,7 @@
import binascii
import logging
import os
+import sys
import semver
@@ -34,12 +35,12 @@ class Trezor(interface.Device):
return self._defs.PassphraseAck(passphrase=self.passphrase)
def create_pin_handler(conn):
- try:
- from PyQt5.QtWidgets import QApplication, QInputDialog, QLineEdit
- except ImportError:
+ if os.isatty(sys.stdin.fileno()):
return conn.callback_PinMatrixRequest # CLI-based PIN handler
def qt_handler(_):
+ # pylint: disable=import-error
+ from PyQt5.QtWidgets import QApplication, QInputDialog, QLineEdit
label = ('Use the numeric keypad to describe number positions.\n'
'The layout is:\n'
' 7 8 9\n'
|
pin: use PyQt only when running with no TTY
|
py
|
diff --git a/lib/svtplay_dl/service/tv4play.py b/lib/svtplay_dl/service/tv4play.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/tv4play.py
+++ b/lib/svtplay_dl/service/tv4play.py
@@ -204,7 +204,10 @@ class Tv4play(Service, OpenGraphThumbMixin):
self.cookies = data.cookies
fail = re.search("<p class='failed-login'>([^<]+)</p>", data.text)
if fail:
- return ServiceError(fail.group(1))
+ message = fail.group(1)
+ if is_py2:
+ message = message.encode("utf8")
+ return ServiceError(message)
return True
|
tv4play: wrong user/passwd crashes the script. exception handling cant handle unicode on py2
|
py
|
diff --git a/benchbuild/environments/domain/declarative.py b/benchbuild/environments/domain/declarative.py
index <HASH>..<HASH> 100644
--- a/benchbuild/environments/domain/declarative.py
+++ b/benchbuild/environments/domain/declarative.py
@@ -56,7 +56,7 @@ class ContainerImage(list):
DEFAULT_BASES: tp.Dict[str, ContainerImage] = {
'benchbuild:alpine': ContainerImage() \
- .from_("alpine:latest") \
+ .from_("alpine:edge") \
.run('apk', 'update') \
.run('apk', 'add', 'python3', 'python3-dev', 'postgresql-dev',
'linux-headers', 'musl-dev', 'git', 'gcc', 'sqlite-libs',
|
pygit2 <I> requires libgit2-dev <I> libgit2-dev:<I> is only available in alpine:edge. So, we bump the version.
|
py
|
diff --git a/firebirdsql/tests/test_basic.py b/firebirdsql/tests/test_basic.py
index <HASH>..<HASH> 100755
--- a/firebirdsql/tests/test_basic.py
+++ b/firebirdsql/tests/test_basic.py
@@ -224,3 +224,27 @@ class TestBasic(base.TestBase):
cur.execute("CREATE TABLE foo (a INTEGER)")
except firebirdsql.OperationalError:
pass
+
+ def test_boolean(self):
+ cur = self.connection.cursor()
+ cur.execute("CREATE TABLE boolean_test (b BOOLEAN)")
+ cur.close()
+ self.connection.commit()
+
+ cur = self.connection.cursor()
+ cur.execute("insert into boolean_test(b) values (true)")
+ cur.execute("insert into boolean_test(b) values (false)")
+ cur.close()
+
+ cur = self.connection.cursor()
+ cur.execute("select * from boolean_test where b is true")
+ self.assertEqual(cur.fetchone()[0], True)
+ cur.close()
+
+ cur = self.connection.cursor()
+ cur.execute("select * from boolean_test where b is false")
+ self.assertEqual(cur.fetchone()[0], False)
+ cur.close()
+
+ self.connection.close()
+
|
test boolean (fail)
|
py
|
diff --git a/isotopic_logging/proxy.py b/isotopic_logging/proxy.py
index <HASH>..<HASH> 100644
--- a/isotopic_logging/proxy.py
+++ b/isotopic_logging/proxy.py
@@ -40,7 +40,7 @@ class LoggerProxy(object):
def __init__(self, logger, injector):
self._original = logger
- self._injector = injector
+ self.injector = injector
def __getattr__(self, name):
"""
@@ -57,7 +57,7 @@ class LoggerProxy(object):
@wraps(result)
def wrapper(message, *args, **kwargs):
- return result(self._injector.mark(message), *args, **kwargs)
+ return result(self.injector.mark(message), *args, **kwargs)
# Cache wrapper, so it won't be constructed again for future calls.
setattr(self, name, wrapper)
|
make injector public in proxy (#4)
|
py
|
diff --git a/ipyrad/assemble/rawedit.py b/ipyrad/assemble/rawedit.py
index <HASH>..<HASH> 100644
--- a/ipyrad/assemble/rawedit.py
+++ b/ipyrad/assemble/rawedit.py
@@ -197,6 +197,8 @@ def cutadaptit_single(data, sample):
fullcomp(data.paramsdict["restriction_overhang"][1])[::-1] \
+ data._hackersonly["p3_adapter"])
else:
+ LOGGER.warning("No barcode information present, and is therefore not "+\
+ "being used for adapter trimming of SE gbs data.")
## else no search for barcodes on 3'
adapter = \
fullcomp(data.paramsdict["restriction_overhang"][1])[::-1] \
|
bugfix; error was raised in no barcodes during step2 filtering for gbs data. Now just a warning is printed
|
py
|
diff --git a/examples/twitter-timeline.py b/examples/twitter-timeline.py
index <HASH>..<HASH> 100644
--- a/examples/twitter-timeline.py
+++ b/examples/twitter-timeline.py
@@ -32,8 +32,7 @@ params = {'include_rts': 1, # Include retweets
response = twitter.get('https://api.twitter.com/1/statuses/home_timeline.json',
params=params,
access_token=access_token,
- access_token_secret=access_token_secret,
- header_auth=True)
+ access_token_secret=access_token_secret)
for i, tweet in enumerate(response.content, 1):
handle = tweet['user']['screen_name'].encode('utf-8')
|
removing header_auth from Twitter example to ensure it works without
|
py
|
diff --git a/spyderlib/utils/programs.py b/spyderlib/utils/programs.py
index <HASH>..<HASH> 100644
--- a/spyderlib/utils/programs.py
+++ b/spyderlib/utils/programs.py
@@ -149,9 +149,11 @@ def run_python_script_in_terminal(fname, wdir, args, interact,
"""Run Python script in an external system terminal"""
# If fname has spaces on it it can't be ran on Windows, so we have to
- # enclose it in quotes
+ # enclose it in quotes. Also wdir can come with / as os.sep, so we
+ # need to take care of it
if os.name == 'nt':
fname = '"' + fname + '"'
+ wdir = wdir.replace('/', '\\')
p_args = ['python']
p_args += get_python_args(fname, python_args, interact, debug, args)
|
Fix error on Windows and Python 2 when running code on external terminal
|
py
|
diff --git a/tests/profiles/googlefonts_test.py b/tests/profiles/googlefonts_test.py
index <HASH>..<HASH> 100644
--- a/tests/profiles/googlefonts_test.py
+++ b/tests/profiles/googlefonts_test.py
@@ -432,12 +432,12 @@ def test_condition__registered_vendor_ids():
print('"B&H ": "Bigelow & Holmes" is a valid vendor id that contains an ampersand.')
assert "B&H " in registered_ids # Bigelow & Holmes
- print('"AE ": "AE Type" is a good vendor id with 2 letters and padded with spaces.')
- assert "AE " in registered_ids # AE Type
+ print('"MS ": "Microsoft Corp." is a good vendor id with 2 letters and padded with spaces.')
+ assert "MS " in registered_ids # Microsoft Corp.
print('All vendor ids must be 4 chars long!')
assert "GNU" not in registered_ids # 3 chars long is bad
- assert "AE" not in registered_ids # 2 chars long is bad
+ assert "MS" not in registered_ids # 2 chars long is bad
assert "H" not in registered_ids # 1 char long is bad
print('"H ": "Hurme Design" is a good vendor id with a single letter padded with spaces.')
|
vendor ID "AE " was changed this week to "AES " so... I'll use "MS ", for Microsoft Corp. as an example of a valid 2 letter ID since it is highly unlikely to change, as "AE " did.
|
py
|
diff --git a/django_markdown/urls.py b/django_markdown/urls.py
index <HASH>..<HASH> 100644
--- a/django_markdown/urls.py
+++ b/django_markdown/urls.py
@@ -1,9 +1,18 @@
""" Define preview URL. """
+from django import VERSION
from django.conf.urls import url
from .views import preview
-urlpatterns = [
- url('preview/$', preview, name='django_markdown_preview')
-]
+
+if VERSION >= (1, 8):
+ urlpatterns = [
+ url('preview/$', preview, name='django_markdown_preview')
+ ]
+else:
+ # django <= 1.7 compatibility
+ from django.conf.urls import patterns
+ urlpatterns = patterns(
+ '', url('preview/$', preview, name='django_markdown_preview')
+ )
|
fix urls.py django.conf.urls.patterns will be depricated in django <I>
|
py
|
diff --git a/osbs/core.py b/osbs/core.py
index <HASH>..<HASH> 100755
--- a/osbs/core.py
+++ b/osbs/core.py
@@ -973,7 +973,6 @@ class Openshift(object):
# Get the JSON for the ImageStream
imagestream_json = self.get_image_stream(name).json()
- logger.debug("imagestream: %r", imagestream_json)
changed = False
# existence of dockerImageRepository is limiting how many tags are updated
@@ -989,11 +988,7 @@ class Openshift(object):
changed = True
if changed:
- imagestream_json = self.update_image_stream(name, imagestream_json).json()
-
- # Note the tags before import
- oldtags = imagestream_json.get('status', {}).get('tags', [])
- logger.debug("tags before import: %r", oldtags)
+ self.update_image_stream(name, imagestream_json)
stream_import['metadata']['name'] = name
stream_import['spec']['images'] = []
|
Dont log imagestream and tags before importing, which is polluting build logs * CLOUDBLD-<I>
|
py
|
diff --git a/lancet/core.py b/lancet/core.py
index <HASH>..<HASH> 100644
--- a/lancet/core.py
+++ b/lancet/core.py
@@ -282,13 +282,14 @@ class Args(BaseArgs):
constant_set = set(self.constant_keys)
unordered_varying = set(collection.keys()).difference(constant_set)
# Finding out how fast keys are varying
- grouplens = [(len([len(list(y)) for (_,y) in itertools.groupby(collection[k])]),k) for k in collection]
+ grouplens = [(len([len(list(y)) for (_,y) in itertools.groupby(collection[k])]),k) for k in collection
+ if (k not in self.unsortable_keys)]
varying_counts = [(n,k) for (n,k) in sorted(grouplens) if (k in unordered_varying)]
# Grouping keys with common frequency alphanumerically (desired behaviour).
ddict = defaultdict(list)
for (n,k) in varying_counts: ddict[n].append(k)
alphagroups = [sorted(ddict[k]) for k in sorted(ddict)]
- return [el for group in alphagroups for el in group]
+ return [el for group in alphagroups for el in group] + sorted(self.unsortable_keys)
@property
def dframe(self):
|
Using unsortable_keys attribute to help exclude unsortable types
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -525,7 +525,6 @@ SETUP_KWARGS = {'name': NAME,
],
'packages': ['salt',
'salt.auth',
- 'salt.caches',
'salt.cli',
'salt.client',
'salt.client.raet',
|
remove salt.caches for full removal, see PR #<I>
|
py
|
diff --git a/hug/interface.py b/hug/interface.py
index <HASH>..<HASH> 100644
--- a/hug/interface.py
+++ b/hug/interface.py
@@ -331,7 +331,7 @@ class CLI(Interface):
self.interface.cli = self
used_options = {'h', 'help'}
- nargs_set = self.interface.takes_kargs
+ nargs_set = self.interface.takes_kargs or self.interface.takes_kwargs
self.parser = argparse.ArgumentParser(description=route.get('doc', self.interface.spec.__doc__))
if 'version' in route:
self.parser.add_argument('-v', '--version', action='version',
|
Set narg to true for kwargs
|
py
|
diff --git a/lib/pyfrc/test_support/fake_time.py b/lib/pyfrc/test_support/fake_time.py
index <HASH>..<HASH> 100644
--- a/lib/pyfrc/test_support/fake_time.py
+++ b/lib/pyfrc/test_support/fake_time.py
@@ -63,7 +63,10 @@ class FakeTime:
# Setup driver station hooks
import wpilib
- assert not hasattr(wpilib.DriverStation, 'instance')
+ assert not hasattr(wpilib.DriverStation, 'instance'), \
+ "You must not initialize the driver station before your robot " + \
+ "code executes. Perhaps you have a global somewhere? Globals are " + \
+ "generally evil and should be avoided!"
# The DS thread causes too many problems, disable it by getting
# rid of the thread function
|
Give a better error message if the DS was initialized
|
py
|
diff --git a/pyopenuv/__version__.py b/pyopenuv/__version__.py
index <HASH>..<HASH> 100644
--- a/pyopenuv/__version__.py
+++ b/pyopenuv/__version__.py
@@ -1,2 +1,2 @@
"""Define a version constant."""
-__version__ = '1.0.6'
+__version__ = '1.0.7'
|
Bumped version to <I>
|
py
|
diff --git a/synth.py b/synth.py
index <HASH>..<HASH> 100644
--- a/synth.py
+++ b/synth.py
@@ -21,6 +21,9 @@ import subprocess
logging.basicConfig(level=logging.DEBUG)
+AUTOSYNTH_MULTIPLE_COMMITS = True
+
+
gapic = gcp.GAPICMicrogenerator()
common_templates = gcp.CommonTemplates()
|
build: set AUTOSYNTH_MULTIPLE_COMMITS=true for context aware commits (#<I>)
|
py
|
diff --git a/master/buildbot/interfaces.py b/master/buildbot/interfaces.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/interfaces.py
+++ b/master/buildbot/interfaces.py
@@ -17,9 +17,10 @@
Define the interfaces that are implemented by various buildbot classes.
"""
-# E0211: Method has no argument
-# E0213: Method should have "self" as first argument
-# pylint: disable-msg=E0211,E0213
+
+# disable pylint warnings triggered by interface definitions
+# pylint: disable=no-self-argument
+# pylint: disable=no-method-argument
from zope.interface import Interface, Attribute
|
replaced depricated 'disable-msg' pylint pragma with 'disable'
|
py
|
diff --git a/airflow/providers/amazon/aws/hooks/cloud_formation.py b/airflow/providers/amazon/aws/hooks/cloud_formation.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/amazon/aws/hooks/cloud_formation.py
+++ b/airflow/providers/amazon/aws/hooks/cloud_formation.py
@@ -19,9 +19,10 @@
"""
This module contains AWS CloudFormation Hook
"""
-from typing import Optional
+from typing import Optional, Union
from botocore.exceptions import ClientError
+from boto3 import client, resource
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -40,7 +41,7 @@ class AWSCloudFormationHook(AwsBaseHook):
def __init__(self, *args, **kwargs):
super().__init__(client_type='cloudformation', *args, **kwargs)
- def get_stack_status(self, stack_name):
+ def get_stack_status(self, stack_name: Union[client, resource]) -> Optional[dict]:
"""
Get stack status from CloudFormation.
"""
|
added type hints for aws cloud formation (#<I>)
|
py
|
diff --git a/chalice/cli/__init__.py b/chalice/cli/__init__.py
index <HASH>..<HASH> 100644
--- a/chalice/cli/__init__.py
+++ b/chalice/cli/__init__.py
@@ -302,8 +302,11 @@ def package(ctx, single_file, stage, out):
"generated cloudformation template. If this option "
"is provided, a buildspec.yml will be generated "
"as a separate file and not included in the cfn "
- "template. This file should be named 'buildspec.yml'"
- "and placed in the root directory of your app."))
+ "template. This allows you to make changes to how "
+ "the project is built without having to redeploy "
+ "a CloudFormation template. This file should be "
+ "named 'buildspec.yml' and placed in the root "
+ "directory of your app."))
@click.argument('filename')
@click.pass_context
def generate_pipeline(ctx, codebuild_image, source, buildspec_file, filename):
|
Update help docs with motivation for buildspec option
|
py
|
diff --git a/pymagicc/__init__.py b/pymagicc/__init__.py
index <HASH>..<HASH> 100644
--- a/pymagicc/__init__.py
+++ b/pymagicc/__init__.py
@@ -20,6 +20,8 @@ from .scenarios import rcp26, rcp45, rcp60, rcp85, rcps, zero_emissions # noqa
__version__ = get_versions()["version"]
del get_versions
+logger = logging.getLogger(__name__)
+
if not _config["is_windows"]:
wine_installed = (
subprocess.call(
@@ -28,7 +30,7 @@ if not _config["is_windows"]:
== 0
)
if not wine_installed:
- logging.warning("Wine is not installed")
+ logger.warning("Wine is not installed")
def run(scenario, magicc_version=6, **kwargs):
|
Fix up logger instantiation Using root logger is bad practice, see <URL>
|
py
|
diff --git a/ndb/model.py b/ndb/model.py
index <HASH>..<HASH> 100644
--- a/ndb/model.py
+++ b/ndb/model.py
@@ -284,7 +284,7 @@ Key = ndb.key.Key # For export.
# NOTE: Property and Error classes are added later.
__all__ = ['Key', 'ModelAdapter', 'ModelKey', 'MetaModel', 'Model', 'Expando',
- 'BlobKey',
+ 'BlobKey', 'GeoPt',
'transaction', 'transaction_async',
'in_transaction', 'transactional',
'get_multi', 'get_multi_async',
|
Forgot to export GeoPt.
|
py
|
diff --git a/Main.py b/Main.py
index <HASH>..<HASH> 100755
--- a/Main.py
+++ b/Main.py
@@ -75,8 +75,6 @@ class Application(object): # TODO: rename to CLIApplication
self.print_todo(number)
self.dirty = True
- self.dirty = True
-
def dep(self):
""" Handles dependencies between todos. """
def handle_add_rm(operation):
|
Don't set dirty variable twice.
|
py
|
diff --git a/gns3server/controller/gns3vm/vmware_gns3_vm.py b/gns3server/controller/gns3vm/vmware_gns3_vm.py
index <HASH>..<HASH> 100644
--- a/gns3server/controller/gns3vm/vmware_gns3_vm.py
+++ b/gns3server/controller/gns3vm/vmware_gns3_vm.py
@@ -90,8 +90,10 @@ class VMwareGNS3VM(BaseGNS3VM):
"""
List all VMware VMs
"""
-
- return (yield from self._vmware_manager.list_vms())
+ try:
+ return (yield from self._vmware_manager.list_vms())
+ except VMwareError as e:
+ raise GNS3VMError("Could not list VMware VMs: {}".format(str(e)))
@asyncio.coroutine
def start(self):
|
Fix stack when looking for GNS3 VM with player installed Fix #<I>
|
py
|
diff --git a/docs/source/_ext/progress.py b/docs/source/_ext/progress.py
index <HASH>..<HASH> 100644
--- a/docs/source/_ext/progress.py
+++ b/docs/source/_ext/progress.py
@@ -106,9 +106,12 @@ class ProgressTable(Directive):
tr += nodes.description('',
nodes.inline(text="\u2713" if comps[cur][-1] else " "),
classes=['field-name', 'progress-checkbox'])
+ text_description = nodes.inline()
+ self.state.nested_parse(StringList(['{:s}'.format(nl['description'].lstrip() if nl['description'] is not None else ' ')], parent=self), 0, text_description)
+
tr += nodes.description('',
nodes.strong(text='{:s} '.format(nl['item'])),
- nodes.inline(text='{:s}'.format(nl['description'] if nl['description'] is not None else ' ')),
+ text_description,
classes=['field-value'])
body += tr
|
Allow markup in progress descriptions to be interpreted
|
py
|
diff --git a/python_modules/dagster/dagster/core/definitions/partition.py b/python_modules/dagster/dagster/core/definitions/partition.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/core/definitions/partition.py
+++ b/python_modules/dagster/dagster/core/definitions/partition.py
@@ -18,10 +18,6 @@ from .mode import DEFAULT_MODE_NAME
from .utils import check_valid_name
-def by_name(partition):
- return partition.name
-
-
class Partition(namedtuple("_Partition", ("value name"))):
"""
Partition is the representation of a logical slice across an axis of a pipeline's work
@@ -56,19 +52,6 @@ def last_empty_partition(context, partition_set_def):
return selected
-def first_partition(context, partition_set_def=None):
- check.inst_param(context, "context", ScheduleExecutionContext)
- partition_set_def = check.inst_param(
- partition_set_def, "partition_set_def", PartitionSetDefinition
- )
-
- partitions = partition_set_def.get_partitions(context.scheduled_execution_time)
- if not partitions:
- return None
-
- return partitions[0]
-
-
class PartitionSetDefinition(
namedtuple(
"_PartitionSetDefinition",
|
Remove unused partition functions Summary: As the title. Test Plan: bk Reviewers: sashank, prha Reviewed By: prha Differential Revision: <URL>
|
py
|
diff --git a/anytemplate/engines/base.py b/anytemplate/engines/base.py
index <HASH>..<HASH> 100644
--- a/anytemplate/engines/base.py
+++ b/anytemplate/engines/base.py
@@ -30,6 +30,7 @@ class CompileError(Exception):
pass
+# pylint: disable=unused-argument
def fallback_renders(template_content, *args, **kwargs):
"""
Render given template string `template_content`.
@@ -72,7 +73,11 @@ def fallback_render(template, context, at_paths=None,
return open(tmpl).read()
+# pylint: enable=unused-argument
class Engine(object):
+ """
+ Abstract class implementation of Template Engines.
+ """
_name = "base"
_file_extensions = []
|
disable pylint warns (unused-argument for anytempalte.engines.base.fallback_render{s,}) as it's safely ignored
|
py
|
diff --git a/examples/v1arch/asyncore/agent/ntforg/send-trap-over-ipv4-and-ipv6.py b/examples/v1arch/asyncore/agent/ntforg/send-trap-over-ipv4-and-ipv6.py
index <HASH>..<HASH> 100644
--- a/examples/v1arch/asyncore/agent/ntforg/send-trap-over-ipv4-and-ipv6.py
+++ b/examples/v1arch/asyncore/agent/ntforg/send-trap-over-ipv4-and-ipv6.py
@@ -30,6 +30,6 @@ transportDispatcher.registerTransport(
transportDispatcher.sendMessage(
encoder.encode(trapMsg), udp.domainName, ('localhost', 162)
)
-transportDispatcher.stopDispatcher() # XXX run only once
+transportDispatcher.jobStarted(1)
transportDispatcher.runDispatcher()
transportDispatcher.closeDispatcher()
|
transportDispatcher.stopDispatcher() is depricated
|
py
|
diff --git a/pyoko/db/base.py b/pyoko/db/base.py
index <HASH>..<HASH> 100644
--- a/pyoko/db/base.py
+++ b/pyoko/db/base.py
@@ -56,7 +56,8 @@ class DBObjects(object):
self.compiled_query = ''
# self._solr_query = {} # query parts, will be compiled before execution
self._solr_query = [] # query parts, will be compiled before execution
- self._solr_params = {} # search parameters. eg: rows, fl, start, sort etc.
+ self._solr_params = {
+ 'sort': 'timestamp desc'} # search parameters. eg: rows, fl, start, sort etc.
self._solr_locked = False
self._solr_cache = {}
self.key = None
|
added a default vale (timestamp desc) for sorting
|
py
|
diff --git a/tests/test_requests.py b/tests/test_requests.py
index <HASH>..<HASH> 100644
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -200,7 +200,7 @@ def test_redirect_compiles_url():
app.bind('Request', REQUEST)
request = app.make('Request').load_app(app)
- request.redirect('test/url')
+ request.redirect('/test/url')
assert request.compile_route_to_url() == '/test/url'
|
fixed redirection tets to be more likely scenario
|
py
|
diff --git a/holoviews/operation/datashader.py b/holoviews/operation/datashader.py
index <HASH>..<HASH> 100644
--- a/holoviews/operation/datashader.py
+++ b/holoviews/operation/datashader.py
@@ -475,7 +475,12 @@ class regrid(ResamplingOperation):
regridded.append(rarray)
regridded = xr.Dataset({vd.name: xarr for vd, xarr in zip(element.vdims, regridded)})
- return element.clone(regridded, datatype=['xarray'])
+ if xtype == 'datetime':
+ xstart, xend = np.array([xstart, xend]).astype('datetime64[us]')
+ if ytype == 'datetime':
+ ystart, yend = np.array([ystart, yend]).astype('datetime64[us]')
+ bbox = BoundingBox(points=[(xstart, ystart), (xend, yend)])
+ return element.clone(regridded, bounds=bbox, datatype=['xarray'])
|
Fixed bounding box issues in regrid (#<I>)
|
py
|
diff --git a/spikeextractors/extractors/yassextractors/yassextractors.py b/spikeextractors/extractors/yassextractors/yassextractors.py
index <HASH>..<HASH> 100644
--- a/spikeextractors/extractors/yassextractors/yassextractors.py
+++ b/spikeextractors/extractors/yassextractors/yassextractors.py
@@ -41,7 +41,7 @@ class YassSortingExtractor(SortingExtractor):
if start_frame is None:
start_frame = 0
if end_frame is None:
- end_frame = 1E50 # use large time
+ end_frame = np.inf
idx2 = np.where(np.logical_and(spike_times>=start_frame, spike_times<end_frame))[0]
spike_times = spike_times[idx2]
|
Update spikeextractors/extractors/yassextractors/yassextractors.py
|
py
|
diff --git a/beeswarm/shared/tests/__init__.py b/beeswarm/shared/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/beeswarm/shared/tests/__init__.py
+++ b/beeswarm/shared/tests/__init__.py
@@ -1 +0,0 @@
-__author__ = 'czardoz'
|
removed author name added by pycharm
|
py
|
diff --git a/src/saml2/response.py b/src/saml2/response.py
index <HASH>..<HASH> 100644
--- a/src/saml2/response.py
+++ b/src/saml2/response.py
@@ -209,10 +209,10 @@ def for_me(conditions, myself):
if not restriction.audience:
continue
for audience in restriction.audience:
- if audience.text.strip() == myself:
+ if audience.text and audience.text.strip() == myself:
return True
else:
- logger.debug("AudienceRestriction - One condition not satisfied: %s != %s" % (audience.text.strip(), myself))
+ logger.debug("AudienceRestriction - One condition not satisfied: {} != {}".format(audience.text, myself))
logger.debug("AudienceRestrictions not satisfied!")
return False
|
Response with unvalued AudienceRestriction (Condition) Handling
|
py
|
diff --git a/toolkit/srm/srm.py b/toolkit/srm/srm.py
index <HASH>..<HASH> 100644
--- a/toolkit/srm/srm.py
+++ b/toolkit/srm/srm.py
@@ -323,7 +323,7 @@ class SRM(BaseEstimator):
# M-step
# Update Sigma_s and compute its trace
- sigma_s = inv_sigma_s_rhos + shared_response.dot(shared_response.T) / float(samples)
+ sigma_s = inv_sigma_s_rhos + shared_response.dot(shared_response.T) / samples
trace_sigma_s = samples * np.trace(sigma_s)
# Update each subject's mapping transform W_i and error variance rho_i^2
@@ -338,7 +338,7 @@ class SRM(BaseEstimator):
rho2[subject] = trace_xtx[subject]
rho2[subject] += -2 * np.sum(w[subject] * a_subject).sum()
rho2[subject] += trace_sigma_s
- rho2[subject] /= float(samples * voxels[subject])
+ rho2[subject] /= samples * voxels[subject]
if self.verbose:
# Calculate and print the current log-likelihood for checking convergence
|
Remove superfluous float division casting in SRM
|
py
|
diff --git a/angr/analyses/cfg/cfg_fast.py b/angr/analyses/cfg/cfg_fast.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/cfg/cfg_fast.py
+++ b/angr/analyses/cfg/cfg_fast.py
@@ -1001,7 +1001,7 @@ class CFGFast(ForwardAnalysis, CFGBase): # pylint: disable=abstract-method
try:
return next(self._regions.irange(minimum=address, reverse=True))
- except KeyError:
+ except StopIteration:
return None
# Methods for scanning the entire image
|
#<I> missed a spot
|
py
|
diff --git a/slacker/__init__.py b/slacker/__init__.py
index <HASH>..<HASH> 100644
--- a/slacker/__init__.py
+++ b/slacker/__init__.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import cgi
import json
import requests
@@ -227,7 +228,7 @@ class Chat(BaseAPI):
return self.post('chat.postMessage',
params={
'channel': channel,
- 'text': text,
+ 'text': cgi.escape(text),
'username': username,
'parse': parse,
'link_names': link_names,
@@ -239,7 +240,7 @@ class Chat(BaseAPI):
def update(self, channel, ts, text):
self.post('chat.update',
- params={'channel': channel, 'ts': ts, 'text': text})
+ params={'channel': channel, 'ts': ts, 'text': cgi.escape(text)})
def delete(self, channel, ts):
self.post('chat.delete', params={'channel': channel, 'ts': ts})
@@ -334,7 +335,7 @@ class Files(BaseAPI):
'filetype': filetype,
'filename': filename,
'title': title,
- 'initial_comment': initial_comment,
+ 'initial_comment': cgi.escape(initial_comment),
'channels': channels
},
files={'file': f})
|
HTML escaping for messages and comments.
|
py
|
diff --git a/grammpy/Grammars/RawGrammar.py b/grammpy/Grammars/RawGrammar.py
index <HASH>..<HASH> 100644
--- a/grammpy/Grammars/RawGrammar.py
+++ b/grammpy/Grammars/RawGrammar.py
@@ -119,7 +119,7 @@ class RawGrammar:
def get_rule(self, rules=None):
if rules is None:
- return self.__rules.get()
+ return [rule for rule in self.__rules.get() if rule._active]
converted = self._control_rules(rules)
if not HashContainer.is_iterable(rules):
return self.__rules.get(converted)[0]
@@ -129,7 +129,7 @@ class RawGrammar:
return self.get_rule(rules)
def rules(self):
- return [rule for rule in self.__rules.get() if rule._active]
+ return self.rule()
def rules_count(self):
return len(self.rules())
|
Rule iteration based on get_rule
|
py
|
diff --git a/indra/statements/statements.py b/indra/statements/statements.py
index <HASH>..<HASH> 100644
--- a/indra/statements/statements.py
+++ b/indra/statements/statements.py
@@ -582,9 +582,8 @@ class Statement(object):
for attr in ['evidence', 'belief', 'uuid', 'supports', 'supported_by',
'is_activation']:
kwargs.pop(attr, None)
- for attr in ['_full_hash', '_shallow_hash']:
- my_hash = kwargs.pop(attr, None)
- my_shallow_hash = kwargs.pop(attr, None)
+ my_hash = kwargs.pop('_full_hash', None)
+ my_shallow_hash = kwargs.pop('_shallow_hash', None)
for attr in self._agent_order:
attr_value = kwargs.get(attr)
if isinstance(attr_value, list):
|
Fix copying of hashes in make_generic_copy method.
|
py
|
diff --git a/openquake/hazardlib/nrml.py b/openquake/hazardlib/nrml.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/nrml.py
+++ b/openquake/hazardlib/nrml.py
@@ -305,6 +305,24 @@ validators = {
}
+def parse_src_groups(fname, converter, monitor):
+ """
+ :param fname:
+ the full pathname of a source model file
+ :param converter:
+ a SourceConverter instance
+ :param monitor:
+ a :class:`openquake.performance.Monitor` instance
+ """
+ if fname.endswith(('.xml', '.nrml')):
+ sm = to_python(fname, converter)
+ elif fname.endswith('.hdf5'):
+ sm = sourceconverter.to_python(fname, converter)
+ else:
+ raise ValueError('Unrecognized extension in %s' % fname)
+ return sm.src_groups
+
+
class SourceModelParser(object):
"""
A source model parser featuring a cache.
|
Initial work to parse the source models in parallel Former-commit-id: fb2f<I>cbe<I>f<I>ddec9f0d8dc8ccb<I>c6fae<I>f
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,4 +9,5 @@ setup(name='keen-client',
author_email='team@keen.io',
url='https://github.com/keenlabs/KeenClient-Python',
packages=['keen'],
+ install_requires=['requests']
)
|
tell pypi that we require the requests package
|
py
|
diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index <HASH>..<HASH> 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -5,10 +5,10 @@ import os
is_pypy = '__pypy__' in sys.builtin_module_names
-
-# warnings.filterwarnings('ignore',
-# r'.+ distutils\b.+ deprecated',
-# DeprecationWarning)
+import warnings
+warnings.filterwarnings('ignore',
+ r'.+ distutils\b.+ deprecated',
+ DeprecationWarning)
def warn_distutils_present():
|
Temporarily add back filter, tests fail without it
|
py
|
diff --git a/src/collectors/postgres/postgres.py b/src/collectors/postgres/postgres.py
index <HASH>..<HASH> 100644
--- a/src/collectors/postgres/postgres.py
+++ b/src/collectors/postgres/postgres.py
@@ -379,7 +379,8 @@ class TableScanStats(QueryStats):
path = "%(datname)s.scans.%(metric)s"
multi_db = True
query = """
- SELECT COALESCE(sum(seq_scan),0) AS sequential,
+ SELECT 'relname' AS relname,
+ COALESCE(sum(seq_scan),0) AS sequential,
COALESCE(sum(idx_scan),0) AS index
FROM pg_stat_user_tables
"""
|
Hack to get seq_scan / idx_scan query to work properly
|
py
|
diff --git a/pynlpl/formats/folia.py b/pynlpl/formats/folia.py
index <HASH>..<HASH> 100644
--- a/pynlpl/formats/folia.py
+++ b/pynlpl/formats/folia.py
@@ -6111,7 +6111,9 @@ class Document(object):
self.metadatatype = MetaDataType.NATIVE
self.metadatafile = None #reference to external metadata file
-
+ self.submetadata = {}
+ self.submetadatatype = {}
+ self.submetadatafile = {}
self.textclasses = set() #will contain the text classes found
|
implemented initialisation of submetadata structures
|
py
|
diff --git a/pycfdns/__init__.py b/pycfdns/__init__.py
index <HASH>..<HASH> 100644
--- a/pycfdns/__init__.py
+++ b/pycfdns/__init__.py
@@ -46,11 +46,14 @@ class CloudflareUpdater:
raise CloudflareZoneException("Could not get zone ID") from error
return zone_id
- async def get_zone_records(self, zone_id):
+ async def get_zone_records(self, zone_id, record_type = None):
"""Get the records of a zone."""
records = []
endpoint = f"{zone_id}/dns_records&per_page=100"
+ if record_type:
+ endpoint += f"&type={record_type}"
+
url = BASE_URL.format(endpoint)
data = await self.api.get_json(url)
data = data["result"]
|
Allow getting zone records of specific type (#5)
|
py
|
diff --git a/tools/yaml2jmxtrans.py b/tools/yaml2jmxtrans.py
index <HASH>..<HASH> 100755
--- a/tools/yaml2jmxtrans.py
+++ b/tools/yaml2jmxtrans.py
@@ -96,7 +96,11 @@ class Queries(object):
"""
root = {'servers' : [] }
for host_name in host_names:
- root['servers'].append(self.create_host_entry(host_name, query_names, query_port, username, password, urlTemplate))
+ ## Extract port if present
+ if present(host, sep, port) = host_name.partition(":")
+ if sep == "":
+ port = query_port
+ root['servers'].append(self.create_host_entry(host, query_names, port, username, password, urlTemplate))
return root
def create_graphite_output_writer(self, typeName):
|
Update tools/yaml2jmxtrans.py Be able to provide different jmx ports for each host specifying host:port in yaml conf
|
py
|
diff --git a/tests/test_gitpuller.py b/tests/test_gitpuller.py
index <HASH>..<HASH> 100644
--- a/tests/test_gitpuller.py
+++ b/tests/test_gitpuller.py
@@ -293,3 +293,21 @@ def test_explicit_unshallow(long_remote, clean_environment):
os.environ['NBGITPULLER_DEPTH'] = "2"
with Puller(long_remote, 'explicitly_full', depth=0) as puller:
assert count_loglines(puller) == 10
+
+def test_pull_on_shallow_clone(long_remote, clean_environment):
+ """
+ Test that we can perform a pull on a shallow clone
+ """
+ with Puller(long_remote, depth=0) as shallow_puller:
+ with Pusher(long_remote) as pusher:
+ pusher.push_file('test_file', 'test')
+
+ orig_head = shallow_puller.git('rev-parse', 'HEAD')
+ shallow_puller.pull_all()
+ new_head = shallow_puller.git('rev-parse', 'HEAD')
+ upstream_head = long_remote.git('rev-parse', 'HEAD')
+
+ assert orig_head != new_head
+ assert new_head == upstream_head
+
+ pusher.git('push', '--force', 'origin', '%s:master' % orig_head)
|
tests: test pulling on top of shallow-clone
|
py
|
diff --git a/mycluster/persist.py b/mycluster/persist.py
index <HASH>..<HASH> 100644
--- a/mycluster/persist.py
+++ b/mycluster/persist.py
@@ -66,18 +66,6 @@ class JobDB(object):
self.queue_db = dbroot['queue_db']
- from .version import get_git_version
- if 'version' not in dbroot:
- dbroot['version'] = get_git_version()
- else:
- current_version = dbroot['version']
- new_version = get_git_version()
- # Add any migrations required here
- if current_version != new_version:
- pass
-
- dbroot['version'] = new_version
-
if 'remote_site_db' not in dbroot:
from BTrees.OOBTree import OOBTree
dbroot['remote_site_db'] = OOBTree()
|
Remove version from database as schema is stable at the moment
|
py
|
diff --git a/master/buildbot/test/unit/test_steps_source_oldsource_Repo.py b/master/buildbot/test/unit/test_steps_source_oldsource_Repo.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/unit/test_steps_source_oldsource_Repo.py
+++ b/master/buildbot/test/unit/test_steps_source_oldsource_Repo.py
@@ -22,7 +22,7 @@ from twisted.trial import unittest
# suppress warnings on Python-2.6 and higher; catch_warnings doesn't
# exist in 2.5
-if sys.version >= (2, 6):
+if sys.version_info >= (2, 6):
with warnings.catch_warnings():
# ignore deprecation warnings
warnings.simplefilter('ignore')
|
fix typo causing deprecation warnings during tests
|
py
|
diff --git a/mlbgame/__init__.py b/mlbgame/__init__.py
index <HASH>..<HASH> 100644
--- a/mlbgame/__init__.py
+++ b/mlbgame/__init__.py
@@ -0,0 +1,6 @@
+import sys
+
+if sys.version_info[0] != 2:
+ print("mlbgame requires Python 2.6+ and does not work with Python 3")
+ print("You are running Python version {}.{}".format(sys.version_info.major, sys.version_info.minor))
+ sys.exit(1)
\ No newline at end of file
|
Ensure it is only run with python <I>+
|
py
|
diff --git a/LiSE/LiSE/character.py b/LiSE/LiSE/character.py
index <HASH>..<HASH> 100644
--- a/LiSE/LiSE/character.py
+++ b/LiSE/LiSE/character.py
@@ -82,7 +82,6 @@ class AbstractCharacter(object):
pred = getatt('preportal')
adj = succ = edge = getatt('portal')
- graph = getatt('stat')
def do(self, func, *args, **kwargs):
"""Apply the function to myself, and return myself.
|
Unshadow the .graph attribute I need that attribute. Gorm graphs keep all their stats there. It can't be an alias.
|
py
|
diff --git a/examples/django_demo/generic_foreignkey/models.py b/examples/django_demo/generic_foreignkey/models.py
index <HASH>..<HASH> 100644
--- a/examples/django_demo/generic_foreignkey/models.py
+++ b/examples/django_demo/generic_foreignkey/models.py
@@ -6,7 +6,7 @@ from django.contrib.contenttypes.fields import GenericForeignKey
class TaggedItem(models.Model):
- """Example GemericForeinKey model from django docs"""
+ """Example GenericForeinKey model from django docs"""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
|
fix typo in generic_foreignkey
|
py
|
diff --git a/master/buildbot/pbmanager.py b/master/buildbot/pbmanager.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/pbmanager.py
+++ b/master/buildbot/pbmanager.py
@@ -63,7 +63,7 @@ class PBManager(service.MultiService):
if not disp.users:
disp = self.dispatchers[registration.portstr]
del self.dispatchers[registration.portstr]
- return disp.disownServiceParent()
+ return defer.maybeDeferred(disp.disownServiceParent)
return defer.succeed(None)
class Registration(object):
|
fix shutdown error when disownServiceParent doesn't return a deferred
|
py
|
diff --git a/trimesh/voxel.py b/trimesh/voxel.py
index <HASH>..<HASH> 100644
--- a/trimesh/voxel.py
+++ b/trimesh/voxel.py
@@ -67,7 +67,7 @@ def mesh_to_run(mesh, pitch):
hits = mesh.ray.intersects_location(rays)
raw_shape = np.ptp(bounds/pitch, axis=0).astype(int)
grid_origin = bounds[0]
- grid_index = ((grid/pitch) - (grid_origin[0:2]/pitch)).astype(int)
+ grid_index = np.rint((grid/pitch) - (grid_origin[0:2]/pitch)).astype(int)
run_z = deque()
run_xy = deque()
|
[voxel][fix] Fix rounding error related to grid_index
|
py
|
diff --git a/salt/states/group.py b/salt/states/group.py
index <HASH>..<HASH> 100644
--- a/salt/states/group.py
+++ b/salt/states/group.py
@@ -73,8 +73,14 @@ def _changes(name,
change = {}
if gid:
- if lgrp['gid'] != gid:
- change['gid'] = gid
+ try:
+ gid = int(gid)
+ if lgrp['gid'] != gid:
+ change['gid'] = gid
+ except (TypeError, ValueError):
+ ret['result'] = False
+ ret['comment'] = 'Invalid gid'
+ return ret
if members:
# -- if new member list if different than the current
|
v2 try of gid converted to integer
|
py
|
diff --git a/src/poetry/vcs/git/backend.py b/src/poetry/vcs/git/backend.py
index <HASH>..<HASH> 100644
--- a/src/poetry/vcs/git/backend.py
+++ b/src/poetry/vcs/git/backend.py
@@ -334,7 +334,7 @@ class Git:
url: bytes
path: bytes
submodules = parse_submodules(config)
- for path, url, _ in submodules:
+ for path, url, name in submodules:
path_relative = Path(path.decode("utf-8"))
path_absolute = repo_root.joinpath(path_relative)
@@ -342,7 +342,16 @@ class Git:
source_root.mkdir(parents=True, exist_ok=True)
with repo:
- revision = repo.open_index()[path].sha.decode("utf-8")
+ try:
+ revision = repo.open_index()[path].sha.decode("utf-8")
+ except KeyError:
+ logger.debug(
+ "Skip submodule %s in %s, path %s not found",
+ name,
+ repo.path,
+ path,
+ )
+ continue
cls.clone(
url=url.decode("utf-8"),
|
fix: skip cloning badly defined submodules eg with this dependency ``` pyscf = { git = "<URL>
|
py
|
diff --git a/mongo_orchestration/replica_sets.py b/mongo_orchestration/replica_sets.py
index <HASH>..<HASH> 100644
--- a/mongo_orchestration/replica_sets.py
+++ b/mongo_orchestration/replica_sets.py
@@ -246,7 +246,7 @@ class ReplicaSet(object):
return member config
"""
member_config = params.get('rsParams', {})
- server_id = params.pop('server_id')
+ server_id = params.pop('server_id', None)
proc_params = {'replSet': self.repl_id}
proc_params.update(params.get('procParams', {}))
|
server_id may not exist in the request.
|
py
|
diff --git a/src/quart/app.py b/src/quart/app.py
index <HASH>..<HASH> 100644
--- a/src/quart/app.py
+++ b/src/quart/app.py
@@ -226,7 +226,7 @@ class Quart(Scaffold):
before_websocket_funcs: The functions to execute before handling
a websocket.
"""
- super().__init__(import_name, template_folder, root_path, static_folder, static_url_path)
+ super().__init__(import_name, static_folder, static_url_path, template_folder, root_path)
instance_path = Path(instance_path) if instance_path else self.auto_find_instance_path()
if not instance_path.is_absolute():
|
Fix import order due to <I>a<I>b<I>ebea3b<I>db<I>c8ea<I>e<I> Simple re-ordering of arguments is required.
|
py
|
diff --git a/fusesoc/utils.py b/fusesoc/utils.py
index <HASH>..<HASH> 100644
--- a/fusesoc/utils.py
+++ b/fusesoc/utils.py
@@ -72,7 +72,7 @@ def get_verilator_root():
if verilator is None:
return None
output = subprocess.check_output(verilator + ' -V',
- shell=True).splitlines();
+ shell=True).decode().splitlines()
pattern = re.compile("VERILATOR_ROOT")
for l in output:
if pattern.search(l):
|
utils: make get_verilator_root work with python3 str and byte string handling in python<I> and python3.x differ a lot, doing str type of operations on a byte string in python3 doesn't fly. To fix this, decode the byte string into a str before doing operations on it.
|
py
|
diff --git a/openquake/calculators/extract.py b/openquake/calculators/extract.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/extract.py
+++ b/openquake/calculators/extract.py
@@ -586,7 +586,7 @@ def extract_gridded_sources(dstore, what):
task_no = int(qdict.get('task_no', ['0'])[0])
dic = {}
for i, lonlats in enumerate(dstore['ps_grid/%02d' % task_no][()]):
- dic[i] = lonlats
+ dic[i] = numpy.round(F64(lonlats), 3)
return ArrayWrapper((), {'json': dumps(dic)})
|
Rounding the coordinates in extract_gridded_sources [skip hazardlib]
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -80,7 +80,7 @@ setuptools.setup(
# Location where the package may be downloaded:
download_url='https://pypi.org/project/harvesters/',
# A list of required Python modules:
- install_requires=['harvesters_util=0.1.0', 'genicam2', 'numpy'],
+ install_requires=['harvesters_util==0.1.0', 'genicam2', 'numpy'],
#
license='Apache Software License V2.0',
# A detailed description of the package:
|
Pin the version of harvester_gui
|
py
|
diff --git a/estnltk/taggers/text_segmentation/patterns.py b/estnltk/taggers/text_segmentation/patterns.py
index <HASH>..<HASH> 100644
--- a/estnltk/taggers/text_segmentation/patterns.py
+++ b/estnltk/taggers/text_segmentation/patterns.py
@@ -201,13 +201,13 @@ email_and_www_patterns = [
'_group_': 2,
'_priority_': (0, 0, 6),
'_regex_pattern_': re.compile(r'''
- (^|[^{ALPHANUM}]) # beginning or non-alphanum
+ (^|[^{ALPHANUM}]) # beginning or non-alphanum
(
- [{ALPHANUM}_\-.]+ # domain name
- (\s\.\s|\.) # period
- (ee|org|edu|com|uk|ru|fi|lv|lt) # top-level domain
+ [{ALPHANUM}_\-.]+ # domain name
+ (\s\.\s|\.) # period
+ (ee|org|edu|com|uk|ru|fi|lv|lt|eu|se|nl|de|dk) # top-level domain
)
- ([^{ALPHANUM}]|$) # non-alphanum or ending
+ ([^{ALPHANUM}]|$) # non-alphanum or ending
'''.format(**MACROS), re.X),
'normalized': lambda m: re.sub(r'\s','', m.group(2) ) },
|
CompoundTokenTagger's rules fix: more country code top-level domains
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.