diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/mongoctl/mongoctl.py b/mongoctl/mongoctl.py index <HASH>..<HASH> 100644 --- a/mongoctl/mongoctl.py +++ b/mongoctl/mongoctl.py @@ -4080,8 +4080,7 @@ class ReplicaSetCluster(DocumentWrapper): mem_conf = member.get_member_repl_config() rs_conf = self.read_rs_config() return (rs_conf is not None and - self.get_member_id_if_exists(mem_conf, - rs_conf['members']) is not None) + self.match_member_id(mem_conf, rs_conf['members']) is not None) ########################################################################### def has_any_server_that(self, predicate):
Missing renamed function call
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,8 @@ MAKEFILE_SOURCES_LIST_RE = re.compile(r''' ''', re.VERBOSE) -if not os.path.isdir(LIBSASS_DIR) and os.path.isdir('.git'): +if not os.path.isfile(os.path.join(LIBSASS_DIR, 'Makefile')) and \ + os.path.isdir('.git'): print(file=sys.stderr) print('You seem to miss initializing submodules; ' 'try the following command', file=sys.stderr)
Submodules have their empty dir even if uninitialized
py
diff --git a/src/diamond/server.py b/src/diamond/server.py index <HASH>..<HASH> 100644 --- a/src/diamond/server.py +++ b/src/diamond/server.py @@ -166,7 +166,8 @@ class Server(object): elif (os.path.isfile(fpath) and len(f) > 3 and f[-3:] == '.py' - and f[0:4] != 'test'): + and f[0:4] != 'test' + and f[0] != '.'): # Check filter if filter and os.path.join(path, f) != filter:
Dont try to load collectors files that start with a period. It can be extended attribute files or vim backups or similar and can lead to some weird errors.
py
diff --git a/gooey/gui/util/taskkill.py b/gooey/gui/util/taskkill.py index <HASH>..<HASH> 100644 --- a/gooey/gui/util/taskkill.py +++ b/gooey/gui/util/taskkill.py @@ -7,5 +7,9 @@ if sys.platform.startswith("win"): def taskkill(pid): os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid)) else: # POSIX + import psutil def taskkill(pid): - os.kill(pid, signal.SIGTERM) + parent = psutil.Process(pid) + for child in parent.children(recursive=True): + child.kill() + parent.kill()
Kill child processes as well as shell process
py
diff --git a/src/xmlsig/constants.py b/src/xmlsig/constants.py index <HASH>..<HASH> 100644 --- a/src/xmlsig/constants.py +++ b/src/xmlsig/constants.py @@ -36,7 +36,7 @@ TransformDsaSha256 = DSigNs11 + 'dsa-sha256' TransformEcdsaSha1 = DSignNsMore + 'ecdsa-sha1' TransformEcdsaSha224 = DSignNsMore + 'ecdsa-sha224' TransformEcdsaSha256 = DSignNsMore + 'ecdsa-sha256' -TransformEcdsaSha384 = DSignNsMore +'cdsa-sha384' +TransformEcdsaSha384 = DSignNsMore + 'ecdsa-sha384' TransformEcdsaSha512 = DSignNsMore + 'ecdsa-sha512' TransformHmacRipemd160 = DSignNsMore + 'hmac-ripemd160' TransformHmacSha1 = DSigNs + 'hmac-sha1'
Fix typo in TransformEcdsaSha<I> constant Fixes: x1a<I>f0c ("Added XDSig Validation and fixed some nodes usage")
py
diff --git a/jss/jss.py b/jss/jss.py index <HASH>..<HASH> 100755 --- a/jss/jss.py +++ b/jss/jss.py @@ -1097,8 +1097,8 @@ class FileUpload(object): "%s" % id_types) self._id = str(_id) - self.resource = {'name': (resource, open(resource, 'rb'), - 'multipart/form-data')} + self.resource = {'name': (os.path.basename(resource), + open(resource, 'rb'), 'multipart/form-data')} self.set_upload_url()
Fix FileUpload resource names to not be the full path to the original file.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ setup( 'Flask applications.', long_description=read('README.rst'), include_package_data=True, + zip_safe=False, author='Marc Brinkmann', author_email='git@marcbrinkmann.de', url='http://github.com/mbr/flask-debug',
Added zip_safe=False.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ setup( version=coinbase.wallet.__version__, packages=['coinbase', 'coinbase.wallet'], include_package_data=True, - license='MIT License', + license='Apache 2.0', description='Coinbase API client library', long_description=README, url='https://github.com/coinbase/coinbase-python/',
Update license in setup.py This was missed in 0bc<I>.
py
diff --git a/pytumblr/__init__.py b/pytumblr/__init__.py index <HASH>..<HASH> 100644 --- a/pytumblr/__init__.py +++ b/pytumblr/__init__.py @@ -496,9 +496,12 @@ class TumblrRestClient(object): files = [] if 'data' in params: if isinstance(params['data'], list): - files = [('data['+str(idx)+']', data, open(data, 'rb').read()) for idx, data in enumerate(params['data'])] + for idx, data in enumerate(params['data']): + with open(data, 'rb') as f: + files.append(('data['+str(idx)+']', data, f.read())) else: - files = [('data', params['data'], open(params['data'], 'rb').read())] + with open(params['data'], 'rb') as f: + files = [('data', params['data'], f.read())] del params['data'] validate_params(valid_parameters, params)
TumblrRestClient.send_api_request: ensure files get closed
py
diff --git a/isogeo_pysdk/isogeo_sdk.py b/isogeo_pysdk/isogeo_sdk.py index <HASH>..<HASH> 100644 --- a/isogeo_pysdk/isogeo_sdk.py +++ b/isogeo_pysdk/isogeo_sdk.py @@ -468,6 +468,28 @@ class Isogeo(object): # end of method return thez_req.json() + # -- DOWNLOADS ----------------------------------------------------------- + + def xml19139(self, jeton, id_resource, prot="https"): + """Get resource exported into XML ISO 19139""" + + # checking bearer validity + jeton = self.check_bearer_validity(jeton) + + # resource search + head = {"Authorization": "Bearer " + jeton[0]} + md_url = "{}://v1.{}.isogeo.com/resources/{}.xml".format(prot, + self.base_url, + id_resource) + xml_req = requests.get(md_url, + headers=head, + stream=True, + proxies=self.proxies + ) + + # end of method + return xml_req + # -- UTILITIES ----------------------------------------------------------- def check_bearer_validity(self, jeton):
add method to download metadata XML version
py
diff --git a/www/tests/issues.py b/www/tests/issues.py index <HASH>..<HASH> 100644 --- a/www/tests/issues.py +++ b/www/tests/issues.py @@ -1988,12 +1988,10 @@ str(globals()) # issue 885 -from traceback import * try: 1/0 except: - import traceback - assert '1/0' in traceback.format_exc() + assert '1/0' in tb.format_exc() # issue 883 for _ in range(2): @@ -2006,9 +2004,9 @@ try: exec('def f(): return 1/0\nf()') except ZeroDivisionError: stack_trace = tb.format_exc() - assert 'exec(\'def f(): return 1/0\\nf()\')\n' in stack_trace - assert 'f()\n' in stack_trace - assert 'def f(): return 1/0\n' in stack_trace + assert 'exec(\'def f(): return 1/0\\nf()\')' in stack_trace + assert 'f()' in stack_trace + assert 'def f(): return 1/0\\n' in stack_trace # issue 900 "".format(**globals())
Minor change in tests/issues.py
py
diff --git a/federation/entities/matrix/entities.py b/federation/entities/matrix/entities.py index <HASH>..<HASH> 100644 --- a/federation/entities/matrix/entities.py +++ b/federation/entities/matrix/entities.py @@ -181,10 +181,9 @@ class MatrixRoomMessage(Post, MatrixEntityMixin): if payloads: self._payloads.extend(payloads) - @staticmethod - def get_tag_room_alias(tag: str) -> str: + def get_tag_room_alias(self, tag: str) -> str: config = get_matrix_configuration() - return f"#_{config['appservice']['shortcode']}_#{slugify(tag)}" + return f"#_{config['appservice']['shortcode']}_#{slugify(tag)}:{self.server_name}" def get_tag_room_alias_url_safe(self, tag: str) -> str: return f"{quote(self.get_tag_room_alias(tag))}"
Fix get_tag_room_alias Needs to include also the server name.
py
diff --git a/charmhelpers/core/host.py b/charmhelpers/core/host.py index <HASH>..<HASH> 100644 --- a/charmhelpers/core/host.py +++ b/charmhelpers/core/host.py @@ -472,6 +472,8 @@ def restart_on_change_helper(lambda_f, restart_map, stopstart=False, {svc: func, ...} @returns result of lambda_f() """ + if restart_functions is None: + restart_functions = {} checksums = {path: path_hash(path) for path in restart_map} r = lambda_f() # create a list of lists of the services to restart @@ -482,11 +484,11 @@ def restart_on_change_helper(lambda_f, restart_map, stopstart=False, services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts))) if services_list: actions = ('stop', 'start') if stopstart else ('restart',) - for action in actions: - for service_name in services_list: - if restart_functions and restart_functions.get(service_name): - restart_functions[service_name](service_name) - else: + for service_name in services_list: + if service_name in restart_functions: + restart_functions[service_name](service_name) + else: + for action in actions: service(action, service_name) return r
Fix double call to restart function and other fixes from tinwoods review
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( license='GPLv3', packages=find_packages(exclude=['doc', 'test']), include_package_data=True, - install_requires=['numpy','pyomo','scipy','pandas>=0.19.0','networkx>=1.10'], + install_requires=['numpy','pyomo>=5.3','scipy','pandas>=0.19.0','networkx>=1.10'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console',
setup: Add dependency on PYOMO version <I>
py
diff --git a/src/you_get/extractor/magisto.py b/src/you_get/extractor/magisto.py index <HASH>..<HASH> 100644 --- a/src/you_get/extractor/magisto.py +++ b/src/you_get/extractor/magisto.py @@ -9,7 +9,8 @@ def magisto_download(url, output_dir='.', merge=True, info_only=False): title1 = r1(r'<meta name="twitter:title" content="([^"]*)"', html) title2 = r1(r'<meta name="twitter:description" content="([^"]*)"', html) - title = "%s %s" % (title1, title2) + video_hash = r1(r'http://www.magisto.com/video/([^/]+)', url) + title = "%s %s - %s" % (title1, title2, video_hash) url = r1(r'<source type="[^"]+" src="([^"]*)"', html) type, ext, size = url_info(url)
Magisto: include video hash in filename
py
diff --git a/lib/drizzlepac/adrizzle.py b/lib/drizzlepac/adrizzle.py index <HASH>..<HASH> 100644 --- a/lib/drizzlepac/adrizzle.py +++ b/lib/drizzlepac/adrizzle.py @@ -187,8 +187,12 @@ def run(configObj, wcsmap=None): outcon = np.zeros((1,output_wcs._naxis2,output_wcs._naxis1),dtype=np.int32) else: outcon = outcon.astype(np.int32) - - # TODO: add code to expand outcon as necessary + planeid = int((uniqid - 1)/ 32) + + # Add a new plane to the context image if planeid overflows + while outcon.shape[0] <= planeid: + plane = np.zeros_like(outcon[0]) + outcon = np.append(outcon, plane, axis=0) # Interpret wt_scl parameter if configObj['wt_scl'] == 'exptime':
Add the other half of the fix for context image bug - the code that expands the context array in the run method git-svn-id: <URL>
py
diff --git a/aikif/toolbox/xml_tools.py b/aikif/toolbox/xml_tools.py index <HASH>..<HASH> 100644 --- a/aikif/toolbox/xml_tools.py +++ b/aikif/toolbox/xml_tools.py @@ -194,8 +194,8 @@ class XmlFile(mod_file.TextFile): txt += '| XmlFile = ' + str(self.element_count) + ' elements\n' return txt - def count_elements_in_file(self): - return count_via_minidom(self.fullname , 'MindOntology_Definition') # sentence, MindOntology_Definition + def count_elements_in_file(self, tag_name='MindOntology_Definition'): + return count_via_minidom(self.fullname , tag_name) if __name__ == '__main__': TEST()
fix for hard coded value in count_elements for xml
py
diff --git a/pronto/serializers/obo.py b/pronto/serializers/obo.py index <HASH>..<HASH> 100644 --- a/pronto/serializers/obo.py +++ b/pronto/serializers/obo.py @@ -22,14 +22,16 @@ class OboSerializer(FastoboSerializer, BaseSerializer): file.write(b"\n") # dump terms if self.ont._terms: - for i, (id, data) in enumerate(self.ont._terms.items()): + for i, id in enumerate(sorted(self.ont._terms)): + data = self.ont._terms[id] frame = self._to_term_frame(Term(self.ont, data)) file.write(str(frame).encode("utf-8")) if i < len(self.ont._terms) - 1 or self.ont._relationships: file.write(b"\n") # dump typedefs if self.ont._relationships: - for i, (id, data) in enumerate(self.ont._relationships.items()): + for i, id in enumerate(sorted(self.ont._relationships)): + data = self.ont._relationships[id] frame = self._to_typedef_frame(Relationship(self.ont, data)) file.write(str(frame).encode("utf-8")) if i < len(self.ont._relationships) - 1:
Fix OBO serializer assuming `Ontology._terms` is properly ordered
py
diff --git a/SoftLayer/CLI/environment.py b/SoftLayer/CLI/environment.py index <HASH>..<HASH> 100644 --- a/SoftLayer/CLI/environment.py +++ b/SoftLayer/CLI/environment.py @@ -41,6 +41,7 @@ class Environment(object): 'my': 'metadata', 'vm': 'cci', 'hardware': 'server', + 'hw': 'server', 'bmetal': 'bmc', } stdout = sys.stdout
Adds `sl hw` as an alias to `sl server` for hardware servers
py
diff --git a/taskw/warrior.py b/taskw/warrior.py index <HASH>..<HASH> 100644 --- a/taskw/warrior.py +++ b/taskw/warrior.py @@ -119,7 +119,7 @@ class TaskWarrior(object): return task def get_task(self, **kw): - valid_keys = {'id', 'uuid', 'description'} + valid_keys = set(['id', 'uuid', 'description']) id_keys = valid_keys.intersection(kw.keys()) if len(id_keys) != 1:
Remove set literal for python <I> compatibility.
py
diff --git a/isort/isort.py b/isort/isort.py index <HASH>..<HASH> 100644 --- a/isort/isort.py +++ b/isort/isort.py @@ -56,8 +56,12 @@ class SortImports(object): self.config = settings.from_path(settings_path).copy() for key, value in itemsview(setting_overrides): - if type(self.config.get(key)) in (list, tuple): - self.config[key] = list(set(self.config[key]).union(value)) + access_key = key.replace('not_', '').lower() + if type(self.config.get(access_key)) in (list, tuple): + if key.startswith('not_'): + self.config[access_key] = list(set(self.config[access_key]).difference(value)) + else: + self.config[access_key] = list(set(self.config[access_key]).union(value)) else: self.config[key] = value
Add support for excluding settings to direct isort python command
py
diff --git a/examples/plotting/file/les_mis.py b/examples/plotting/file/les_mis.py index <HASH>..<HASH> 100644 --- a/examples/plotting/file/les_mis.py +++ b/examples/plotting/file/les_mis.py @@ -54,7 +54,8 @@ p.axis.major_label_standoff = 0 p.xaxis.major_label_orientation = np.pi/3 p.rect('xname', 'yname', 0.9, 0.9, source=source, - color='colors', alpha='alphas', line_color=None) + color='colors', alpha='alphas', line_color=None, + hover_line_color='black', hover_color='colors') p.select_one(HoverTool).tooltips = [ ('names', '@yname, @xname'),
added black outline and hover color (#<I>)
py
diff --git a/airflow/contrib/auth/backends/ldap_auth.py b/airflow/contrib/auth/backends/ldap_auth.py index <HASH>..<HASH> 100644 --- a/airflow/contrib/auth/backends/ldap_auth.py +++ b/airflow/contrib/auth/backends/ldap_auth.py @@ -63,7 +63,7 @@ class LdapUser(models.User): # todo: BASE or ONELEVEL? - res = conn.search(configuration.get("ldap", "basedn"), search_filter, search_scope=LEVEL) + res = conn.search(configuration.get("ldap", "basedn"), search_filter) # todo: use list or result? if not res: @@ -166,4 +166,4 @@ def login(self, request): class LoginForm(Form): username = StringField('Username', [InputRequired()]) - password = PasswordField('Password', [InputRequired()]) \ No newline at end of file + password = PasswordField('Password', [InputRequired()])
Remove search scope from LDAP query to make it work with ActiveDirectory.
py
diff --git a/spinoff/util/testing/control.py b/spinoff/util/testing/control.py index <HASH>..<HASH> 100644 --- a/spinoff/util/testing/control.py +++ b/spinoff/util/testing/control.py @@ -274,10 +274,10 @@ class Buffer(object): self.d = Deferred() ret.append((yield self.d)) if exactly: - yield self.expect_not() + yield self.expect_none() returnValue(ret if upto > 1 else ret[0]) - def expect_not(self): + def expect_none(self): """If the queue is not empty, returns False immediately, otherwise a Deferred that fires a bit later and whose result is True or False depending on whether the queue is still empty when the Deferred fires or not.
Renamed util.testing.control.Buffer.expect_not => expect_none
py
diff --git a/gandi/cli/commands/mail.py b/gandi/cli/commands/mail.py index <HASH>..<HASH> 100644 --- a/gandi/cli/commands/mail.py +++ b/gandi/cli/commands/mail.py @@ -67,7 +67,7 @@ def create(gandi, email, quota, fallback, alias): ' without prompting. (default=False).') @click.argument('email', type=EMAIL_TYPE, metavar='login@domain.tld') @pass_gandi -def delete(gandi, email, force, alias): +def delete(gandi, email, force): """Delete a mailbox.""" login, domain = email
fix regression on mail delete commit a<I>bb<I>c<I>dfbe introduced issue #<I>
py
diff --git a/config-entrypoint.py b/config-entrypoint.py index <HASH>..<HASH> 100755 --- a/config-entrypoint.py +++ b/config-entrypoint.py @@ -11,7 +11,7 @@ import os import sys import boto3 -from botocore.exceptions import ClientError +from botocore.exceptions import ClientError, NoCredentialsError def main(argv): config_bucket = os.getenv('QUILT_SERVER_CONFIG_S3_BUCKET') @@ -26,11 +26,7 @@ def main(argv): print("Error: need QUILT_SERVER_CONFIG_S3_BUCKET when running on EC2.", file=sys.stderr) return 1 - if not is_ec2 and config_bucket: - print("Error: QUILT_SERVER_CONFIG_S3_BUCKET should not be used in dev.", file=sys.stderr) - return 1 - - if is_ec2: + if config_bucket: config_path = '/config.py' if not os.path.exists(config_path): s3_client = boto3.client('s3') @@ -39,7 +35,7 @@ def main(argv): config = config_obj['Body'].read() with open(config_path, 'wb') as config_file: config_file.write(config) - except ClientError as ex: + except (ClientError, NoCredentialsError) as ex: print("Failed to read s3://%s/config.py: %s" % (config_bucket, ex)) return 1 else:
Allow using S3 config outside of EC2
py
diff --git a/denovonear/load_gene.py b/denovonear/load_gene.py index <HASH>..<HASH> 100755 --- a/denovonear/load_gene.py +++ b/denovonear/load_gene.py @@ -234,7 +234,7 @@ def minimise_transcripts(ensembl, gene_id, de_novos): max_transcripts = {x: counts[x] for x in counts if x in tx_ids} # find which de novos occur in the transcript with the most de novos - gene = construct_gene_object(ensembl, max_transcripts.keys()[0]) + gene = construct_gene_object(ensembl, next(iter(max_transcripts))) denovos_in_gene = get_de_novos_in_transcript(gene, de_novos) # trim the de novos to the ones not in the current transcript
fix bug with accessing key in python3 dict
py
diff --git a/sebastian/core/transforms.py b/sebastian/core/transforms.py index <HASH>..<HASH> 100644 --- a/sebastian/core/transforms.py +++ b/sebastian/core/transforms.py @@ -108,6 +108,19 @@ def midi_pitch(point): @transform_sequence +def midi_to_pitch(point): # @@@ add key hint later + midi_pitch = point[MIDI_PITCH] + + octave, pitch = divmod(midi_pitch, 12) + pitch = [-2, 5, 0, -5, 2, -3, 4, -1, 6, 1, -4, 3][pitch] + + point["octave"] = octave + point["pitch"] = pitch + + return point + + +@transform_sequence def lilypond(point): if "lilypond" not in point: octave = point["octave"]
transform for going from midi to internal pitch
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -48,11 +48,11 @@ except(IOError, ImportError): setup( name='esgfpid', - version='0.7.8-dev', + version='0.7.8', author='Merret Buurman, German Climate Computing Centre (DKRZ)', author_email='buurman@dkrz.de', url='https://github.com/IS-ENES-Data/esgf-pid', - download_url='https://github.com/IS-ENES-Data/esgf-pid/archive/0.7.8-dev.tar.gz', + download_url='https://github.com/IS-ENES-Data/esgf-pid/archive/0.7.8.tar.gz', description='Library for sending PID requests to a rabbit messaging queue during ESGF publication.', long_description=long_description, packages=packages + test_packages,
Finished version <I> (only for a stupid fix in pypi branch of <I>).
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ def read(fname): setup( name='django-session-security', - version='0.1', + version='1.0rc1', description='Let the user secure his session for usage in public computers', author='James Pic', author_email='jamespic@gmail.com',
Set version to <I>rc1
py
diff --git a/fluent_contents/models/fields.py b/fluent_contents/models/fields.py index <HASH>..<HASH> 100644 --- a/fluent_contents/models/fields.py +++ b/fluent_contents/models/fields.py @@ -189,7 +189,12 @@ class PlaceholderField(PlaceholderRelation): """ Internal Django method, used to return the placeholder ID when exporting the model instance. """ - placeholder = getattr(obj, self.name) # not using self.attname, access the descriptor instead. + try: + # not using self.attname, access the descriptor instead. + placeholder = getattr(obj, self.name) + except Placeholder.DoesNotExist: + return None # Still allow ModelForm / admin to open and create a new Placeholder if the table was truncated. + return placeholder.id if placeholder else None # Be consistent with other fields, like ForeignKey
Fix opening the admin if the placeholder table is truncated The placeholder does not have to exist, as it will be automatially recreated by the form.
py
diff --git a/a10_neutron_lbaas/schedulers/tenant_hash.py b/a10_neutron_lbaas/schedulers/tenant_hash.py index <HASH>..<HASH> 100644 --- a/a10_neutron_lbaas/schedulers/tenant_hash.py +++ b/a10_neutron_lbaas/schedulers/tenant_hash.py @@ -6,11 +6,17 @@ class TenantHashFilter(base.BaseSchedulerFilter): def __init__(self, driver, devices): super(TenantHashFilter, self).__init__(driver, devices) - # TODO -- bug -- this can't be global to init anymore - self.appliance_hash = acos_client.Hash(self.devices.keys()) + self.hash_rings = {} + + def _hash_ring(self, ring_key, bucket_keys): + norm_key = ':'.join(bucket_keys) + if norm_key not in self.hash_rings + self.hash_rings[norm_key] = acos_client.Hash(bucket_keys) + + return self.hash_rings[norm_key].get_server(ring_key) def select_device(self, a10_context=None, devices, tenant_id, lbaas_obj=None): - s = self.appliance_hash.get_server(tenant_id) + s = _hash_ring(tenant_id, devices.keys()) return [self.devices[s]]
fix hash ring for scheduling filters
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ version = '13.1.0' install_requires = ( - 'djangorestframework>=3.2.0,<3.3', + 'djangorestframework>=3.1.0,<3.3', 'incuna_mail>=2.0.0,<4.0.0', 'incuna-pigeon>=0.1.0,<1.0.0', )
Expand setup.py requirement range.
py
diff --git a/pipenv/core.py b/pipenv/core.py index <HASH>..<HASH> 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1338,7 +1338,6 @@ def get_pip_args( allow_global=False, # type: bool ): # type: (...) -> List[str] - from .environment import Environment from .vendor.packaging.version import parse as parse_version arg_map = { "pre": ["--pre"], @@ -1354,10 +1353,9 @@ def get_pip_args( ], "src_dir": src_dir, } - environment = project.get_environment(allow_global=allow_global) - if environment.pip_version >= parse_version("19.0"): + if project.environment.pip_version >= parse_version("19.0"): arg_map["no_use_pep517"].append("--no-use-pep517") - if environment.pip_version < parse_version("19.1"): + if project.environment.pip_version < parse_version("19.1"): arg_map["no_use_pep517"].append("--no-build-isolation") arg_set = [] for key in arg_map.keys():
Detect global setting at environment creation time
py
diff --git a/yt_array.py b/yt_array.py index <HASH>..<HASH> 100644 --- a/yt_array.py +++ b/yt_array.py @@ -197,10 +197,10 @@ class YTArray(np.ndarray): less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit, - logical_and: ensure_same_units, - logical_or: ensure_same_units, - logical_xor: ensure_same_units, - logical_not: ensure_same_units, + logical_and: comparison_unit, + logical_or: comparison_unit, + logical_xor: comparison_unit, + logical_not: comparison_unit, maximum: passthrough_unit, minimum: passthrough_unit, isreal: return_without_unit,
Fixing an issue with the wrong functions being used in the ufunc registry. I don't think this triggered any buggy behavior in practice, but these ufuncs should work properly now with this change. --HG-- branch : yt-<I>
py
diff --git a/netpyne/analysis/spikes.py b/netpyne/analysis/spikes.py index <HASH>..<HASH> 100644 --- a/netpyne/analysis/spikes.py +++ b/netpyne/analysis/spikes.py @@ -453,7 +453,7 @@ def plotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gid if len(cellGids) > 0: gidColors = {cell['gid']: popColors[cell['tags']['pop']] for cell in cells} # dict with color for each gid try: - sel, spkts,spkgids = getSpktSpkid(cellGids=[] if include == ['allCells'] else cellGids, timeRange=timeRange) # using [] is faster for all cells + sel, spkts, spkgids = getSpktSpkid(cellGids=[] if include == ['allCells'] else cellGids, timeRange=timeRange) # using [] is faster for all cells except: import sys print((sys.exc_info()))
remove weird character from spikes.py
py
diff --git a/safe/impact_functions/earthquake/earthquake_building_impact.py b/safe/impact_functions/earthquake/earthquake_building_impact.py index <HASH>..<HASH> 100644 --- a/safe/impact_functions/earthquake/earthquake_building_impact.py +++ b/safe/impact_functions/earthquake/earthquake_building_impact.py @@ -7,6 +7,7 @@ from safe.impact_functions.core import ( from safe.metadata import ( hazard_earthquake, layer_vector_polygon, + layer_vector_point, layer_raster_numeric, unit_mmi_depth, exposure_structure, @@ -79,7 +80,7 @@ class EarthquakeBuildingImpactFunction(FunctionProvider): 'definition': exposure_definition, 'subcategory': exposure_structure, 'units': [unit_building_type_type], - 'layer_constraints': [layer_vector_polygon] + 'layer_constraints': [layer_vector_polygon, layer_vector_point] } } }
Allow point layer as part the valid list of exposure layers #<I>
py
diff --git a/geopy/geocoders/osm.py b/geopy/geocoders/osm.py index <HASH>..<HASH> 100644 --- a/geopy/geocoders/osm.py +++ b/geopy/geocoders/osm.py @@ -79,7 +79,6 @@ class Nominatim(Geocoder): self.country_bias = country_bias self.format_string = format_string self.view_box = view_box - self.country_bias = country_bias self.domain = domain.strip('/') self.api = "%s://%s/search" % (self.scheme, self.domain)
Remove duplicate assignment Line duplicated twice (see three lines above) `self.country_bias = country_bias`
py
diff --git a/soco/core.py b/soco/core.py index <HASH>..<HASH> 100755 --- a/soco/core.py +++ b/soco/core.py @@ -250,7 +250,8 @@ class SoCo(_SocoSingletonBase): self._zgs_cache = None def __str__(self): - return "<SoCo object at ip {0}>".format(self.ip_address) + return "<{0} object at ip {1}>".format( + self.__class__.__name__, self.ip_address) def __repr__(self): return '{0}("{1}")'.format(self.__class__.__name__, self.ip_address)
Update __str__ to cater for subclasses
py
diff --git a/asn1crypto/keys.py b/asn1crypto/keys.py index <HASH>..<HASH> 100644 --- a/asn1crypto/keys.py +++ b/asn1crypto/keys.py @@ -439,7 +439,8 @@ class NamedCurve(ObjectIdentifier): """ cls._map[oid] = name - cls._reverse_map[name] = oid + if cls._reverse_map is not None: + cls._reverse_map[name] = oid cls._key_sizes[oid] = key_size
Ensure keys.NamedCurve.register() works if setup() has been run or not
py
diff --git a/beeswarm/feeder/tests/test_telnet.py b/beeswarm/feeder/tests/test_telnet.py index <HASH>..<HASH> 100644 --- a/beeswarm/feeder/tests/test_telnet.py +++ b/beeswarm/feeder/tests/test_telnet.py @@ -67,15 +67,8 @@ class Telnet_Test(unittest.TestCase): BeeSession.feeder_id = 'f51171df-c8f6-4af4-86c0-f4e163cf69e8' current_bee = bee_telnet.telnet(beesessions, bee_info) - current_bee.do_session('127.0.0.1') - session_id, session = beesessions.popitem() - - # Make sure we only spawned one session. - self.assertEquals(beesessions, {}) - - # Make sure we were able to log in. - self.assertEquals(session.did_login, True) - + current_bee.connect() + current_bee.login(bee_info['login'], bee_info['password']) srv.stop() def test_validate_senses(self):
updated telnet test, removed non-determinism
py
diff --git a/synchro/synchrotest.py b/synchro/synchrotest.py index <HASH>..<HASH> 100644 --- a/synchro/synchrotest.py +++ b/synchro/synchrotest.py @@ -82,9 +82,6 @@ excluded_tests = [ 'TestGridfs.test_threaded_reads', 'TestGridfs.test_threaded_writes', - # Relies on threads; tested directly. - 'TestCollection.test_parallel_scan', - # Motor's aggregate API is different, always sends "cursor={}" by default. 'TestCollection.test_aggregate', 'TestCollection.test_aggregate_raw_bson', @@ -165,6 +162,7 @@ excluded_tests = [ 'TestDatabase.test_eval', 'TestCollation.*', 'TestCollection.test_find_one_and_write_concern', + 'TestCollection.test_parallel_scan', 'TestCollection.test_parallel_scan_max_time_ms', 'TestCollection.test_write_error_text_handling', 'TestCommandMonitoring.test_legacy_insert_many',
MOTOR-<I> Update parallelScan test skip reason
py
diff --git a/python_modules/dagster/dagster/core/definitions/partitioned_schedule.py b/python_modules/dagster/dagster/core/definitions/partitioned_schedule.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster/dagster/core/definitions/partitioned_schedule.py +++ b/python_modules/dagster/dagster/core/definitions/partitioned_schedule.py @@ -49,7 +49,8 @@ def build_schedule_from_partitioned_job( partitioned_config = cast(PartitionedConfig, job.mode_definitions[0].partitioned_config) partition_set = cast(PartitionSetDefinition, job.get_partition_set_def()) partitions_def = cast(TimeWindowPartitionsDefinition, partitioned_config.partitions_def) - else: + else: # UnresolvedAssetJobDefinition + check.invariant(job.partitions_def is not None, "Job does not have a partitions_def.") partition_set = cast(PartitionSetDefinition, job.get_partition_set_def()) partitions_def = cast(TimeWindowPartitionsDefinition, job.partitions_def)
improve error for build_schedule_from_partitioned_job with non-partitioned asset job (#<I>)
py
diff --git a/algo.py b/algo.py index <HASH>..<HASH> 100644 --- a/algo.py +++ b/algo.py @@ -137,17 +137,17 @@ class AlgoRandom(Algo): super().__init__(trainingData) self.infos['name'] = 'random' - # estimation of the distribution - fqs = [0, 0, 0, 0, 0] - for x in self.allXs: - for y in self.allYs: - if self.rm[x, y] > 0: - fqs[self.rm[x, y] - 1] += 1 - fqs = [fq/sum(fqs) for fq in fqs] - self.distrib = rv_discrete(values=([1, 2, 3, 4, 5], fqs)) + # compute unbiased variance of ratings + num = denum = 0 + for _, _, r in self.allRatings: + num += (r - self.meanRatings)**2 + denum += 1 + denum -= 1 + + self.var = num / denum def estimate(self, *_): - return self.distrib.rvs() + return np.random.normal(self.meanRatings, self.var) class AlgoUsingSim(Algo): """Abstract class for algos using a similarity measure"""
changed algo random to now use a normal distribution
py
diff --git a/wordfreq/query.py b/wordfreq/query.py index <HASH>..<HASH> 100644 --- a/wordfreq/query.py +++ b/wordfreq/query.py @@ -36,6 +36,32 @@ def word_frequency(word, lang, wordlist='multi', default=0.): else: return row[0] + +def iter_wordlist(wordlist, lang=None): + """ + Returns a generator, yielding (word, lang, frequency) triples from + a wordlist in descending order of frequency. + + If a `lang` is specified, the results will only contain words in that + language. + """ + c = CONN.cursor() + if lang is None: + results = c.execute( + "SELECT word, lang, freq from words where wordlist=? " + "ORDER BY freq desc", + (wordlist,) + ) + else: + results = c.execute( + "SELECT word, lang, freq from words where " + "wordlist=? and lang=? ORDER BY freq DESC", + (wordlist, lang) + ) + + return results + + METANL_CONSTANT = 50291582140.06433 def metanl_word_frequency(word, lang, default=0.): """
add query.iter_wordlist, to visit all words in a list
py
diff --git a/drivers/python/rethinkdb/net_twisted.py b/drivers/python/rethinkdb/net_twisted.py index <HASH>..<HASH> 100644 --- a/drivers/python/rethinkdb/net_twisted.py +++ b/drivers/python/rethinkdb/net_twisted.py @@ -241,7 +241,7 @@ class TwistedCursor(Cursor): return isinstance(self.error, RqlCursorEmpty) and len(self.items) == 0 def _get_next(self, timeout): - if self.is_empty() or self.has_error(): + if len(self.items) == 0 and self.error: return defer.fail(self.error) def returnNextItem(item):
Fix _get_next behavior to match the good one. Now, it raises the error only when it has read all rows he has.
py
diff --git a/setupext_janitor/janitor.py b/setupext_janitor/janitor.py index <HASH>..<HASH> 100644 --- a/setupext_janitor/janitor.py +++ b/setupext_janitor/janitor.py @@ -70,14 +70,17 @@ class CleanCommand(_CleanCommand): if self.dist: for cmd_name, _ in self.distribution.get_command_list(): if 'dist' in cmd_name: - print('--- start of if dist in cmd_name --') #mhw + #print('--- start of if dist in cmd_name --') #mhw command = self.distribution.get_command_obj(cmd_name) - print(command) - command.ensure_finalized() - print('passed command.ensure_finalized()') #mhw + #print(command) + try: + command.ensure_finalized() + except Exception as e: + print(f'\n*** Exception encountered and ignored:\n\t{command}\n\t{e}\n') + #print('passed command.ensure_finalized()') #mhw if getattr(command, 'dist_dir', None): dir_names.add(command.dist_dir) - print('--- end of if dist in cmd_name --') #mhw + #print('--- end of if dist in cmd_name --') #mhw if self.eggs: for name in os.listdir(self.egg_base):
WIP: don't exit on error, but we don't deal with it either!
py
diff --git a/openquake/calculators/risk/hazard_getters.py b/openquake/calculators/risk/hazard_getters.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/risk/hazard_getters.py +++ b/openquake/calculators/risk/hazard_getters.py @@ -208,9 +208,11 @@ class GroundMotionValuesGetter(object): cursor.execute(min_dist_query, args) min_dist = cursor.fetchall()[0][0] # breaks if there are no points + min_dist += 0.1 # 0.1 is some numerical tolerance + gmvs_query = """-- return all the gmvs inside the min_dist radius SELECT gmvs FROM hzrdr.gmf - WHERE %s >= ST_Distance_Sphere(location, %s) + WHERE %s > ST_Distance_Sphere(location, %s) AND imt = %s AND gmf_set_id IN %s {} ORDER BY gmf_set_id, result_grp_ordinal """.format(spectral_filters)
Added a numerical tolerance to min_dist
py
diff --git a/vertex/_unfortunate_defer_hack.py b/vertex/_unfortunate_defer_hack.py index <HASH>..<HASH> 100644 --- a/vertex/_unfortunate_defer_hack.py +++ b/vertex/_unfortunate_defer_hack.py @@ -1,6 +1,12 @@ -from twisted.internet.defer import Deferred, FirstError +from twisted.internet.defer import Deferred +try: + from twisted.internet.defer import FirstError +except: + class FirstError(Exception): + """omgwtf + """ from twisted.python import failure class NoFailure(Exception):
compatibility with <I>, trunk
py
diff --git a/reana_db/version.py b/reana_db/version.py index <HASH>..<HASH> 100755 --- a/reana_db/version.py +++ b/reana_db/version.py @@ -14,4 +14,4 @@ and parsed by ``setup.py``. from __future__ import absolute_import, print_function -__version__ = "0.6.0.dev20191212" +__version__ = "0.6.0.dev20191213"
release: <I>.de<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( 'click>=3.0', 'Flask>=0.10', 'Flask-BabelPkg>=0.9.4', - 'Flask-Login>=0.2.0', + 'Flask-Login==0.2.11', 'Flask-OpenID>=1.1.0', 'Flask-SQLAlchemy>=0.16', 'Flask-WTF>=0.9.1',
Pinning Flask-Login o <I> since <I> breaks the common interface
py
diff --git a/anyconfig/schema.py b/anyconfig/schema.py index <HASH>..<HASH> 100644 --- a/anyconfig/schema.py +++ b/anyconfig/schema.py @@ -47,7 +47,9 @@ def validate(obj, schema, format_checker=None, safe=True): format_checker = jsonschema.FormatChecker() # :raises: NameError try: jsonschema.validate(obj, schema, format_checker=format_checker) - except (jsonschema.ValidationError, jsonschema.SchemaError) as exc: + return (True, '') + except (jsonschema.ValidationError, jsonschema.SchemaError, + Exception) as exc: if safe: return (False, str(exc)) else:
fix a possible bug in anyconfig.schema.validate that it may return success even if validation failed for some cases
py
diff --git a/landsat/landsat.py b/landsat/landsat.py index <HASH>..<HASH> 100755 --- a/landsat/landsat.py +++ b/landsat/landsat.py @@ -30,8 +30,7 @@ search, download, and process Landsat imagery. Commands: Search: - landsat.py search [-h] [-l LIMIT] [-s START] [-e END] [-c CLOUD] - [--onlysearch] [--imageprocess] + landsat.py search [-h] [-l LIMIT] [-s START] [-e END] [-c CLOUD] [--imageprocess] {pr,shapefile,country} positional arguments:
removed --only-search. The option is no longer available.
py
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py @@ -1392,7 +1392,7 @@ def list_pkgs(versions_as_list=False, version_num) # Check for virtual packages. We need dctrl-tools for this. - if not removed: + if not removed and not HAS_APT: try: virtpkgs_all = _get_virtual() except CommandExecutionError as cee: @@ -1401,9 +1401,7 @@ def list_pkgs(versions_as_list=False, for realpkg, provides in six.iteritems(virtpkgs_all): # grep-available returns info on all virtual packages. Ignore any # virtual packages that do not have the real package installed. - # _get_virtual() do not use grep-available if HAS_APT is true - # so we can skip loop below - if not HAS_APT and realpkg in ret['installed']: + if realpkg in ret['installed']: virtpkgs.update(provides) for virtname in virtpkgs: # Set virtual package versions to '1'
move decision statement to upper if block
py
diff --git a/tests/test_modeling_tf_albert.py b/tests/test_modeling_tf_albert.py index <HASH>..<HASH> 100644 --- a/tests/test_modeling_tf_albert.py +++ b/tests/test_modeling_tf_albert.py @@ -303,3 +303,26 @@ class TFAlbertModelTest(TFModelTesterMixin, unittest.TestCase): for model_name in TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST[:1]: model = TFAlbertModel.from_pretrained(model_name) self.assertIsNotNone(model) + + +@require_tf +class TFAlbertModelIntegrationTest(unittest.TestCase): + @slow + def test_inference_masked_lm(self): + model = TFAlbertForPreTraining.from_pretrained("albert-base-v2") + input_ids = tf.constant([[0, 1, 2, 3, 4, 5]]) + output = model(input_ids)[0] + + expected_shape = [1, 6, 30000] + self.assertEqual(output.shape, expected_shape) + + expected_slice = tf.constant( + [ + [ + [4.595668, 0.74462754, -1.818147], + [4.5954347, 0.7454184, -1.8188258], + [4.5954905, 0.7448235, -1.8182316], + ] + ] + ) + tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-4)
Added integration tests for TensorFlow implementation of the ALBERT model (#<I>) * TF Albert integration test * TF Alber integration test added
py
diff --git a/mapper/object_mapper.py b/mapper/object_mapper.py index <HASH>..<HASH> 100644 --- a/mapper/object_mapper.py +++ b/mapper/object_mapper.py @@ -128,8 +128,8 @@ class ObjectMapper(object): from_obj.__dict__ inst = to_type() - key_from = from_obj.__class__.__name__ - key_to = to_type.__name__ + key_from = from_obj.__class__ + key_to = to_type def not_private(s): return not s.startswith('_')
fix alredy exists mapping erro when both types has the same name (different modules/packages) on map() method
py
diff --git a/openquake/commonlib/valid.py b/openquake/commonlib/valid.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/valid.py +++ b/openquake/commonlib/valid.py @@ -548,11 +548,15 @@ def intensity_measure_types_and_levels(value): :param value: input string :returns: Intensity Measure Type and Levels dictionary - >>> intensity_measure_types_and_levels('{"PGA": [0.1, 0.2]}') - {'PGA': [0.1, 0.2]} + >>> intensity_measure_types_and_levels('{"SA(0.10)": [0.1, 0.2]}') + {'SA(0.1)': [0.1, 0.2]} """ dic = dictionary(value) - for imt_str, imls in dic.iteritems(): + for imt_str, imls in dic.items(): + norm_imt = str(imt.from_string(imt_str)) + if norm_imt != imt_str: + dic[norm_imt] = imls + del dic[imt_str] check_levels(imls, imt_str) # ValueError if the levels are invalid return dic
Improved the validation of the IMTs
py
diff --git a/spyder/widgets/ipythonconsole/shell.py b/spyder/widgets/ipythonconsole/shell.py index <HASH>..<HASH> 100644 --- a/spyder/widgets/ipythonconsole/shell.py +++ b/spyder/widgets/ipythonconsole/shell.py @@ -221,8 +221,11 @@ These commands were executed: reply = user_exp[expression] data = reply.get('data') if 'get_namespace_view' in method: - view = ast.literal_eval(data['text/plain']) - self.sig_namespace_view.emit(view) + if 'text/plain' in data: + view = ast.literal_eval(data['text/plain']) + self.sig_namespace_view.emit(view) + else: + view = {} elif 'get_var_properties' in method: properties = ast.literal_eval(data['text/plain']) self.sig_var_properties.emit(properties)
Added a verification for the existence of the 'text/plain' key
py
diff --git a/tests/test_serializers.py b/tests/test_serializers.py index <HASH>..<HASH> 100644 --- a/tests/test_serializers.py +++ b/tests/test_serializers.py @@ -91,4 +91,11 @@ class TestTokenObtainSerializer(TestCase): with override_api_settings(SECRET_KEY='not_secret'): token = s.get_token(payload) - self.assertEqual(token, 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjk0NjY4NDgwMH0.NHpdD2X8ub4SE_MZLBedWa57FCpntGaN_r6f8kNKdUs') + # Token could be one of two depending on header dict ordering + self.assertIn( + token, + ( + 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjk0NjY4NDgwMH0.NHpdD2X8ub4SE_MZLBedWa57FCpntGaN_r6f8kNKdUs', + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjk0NjY4NDgwMH0.jvxQgXCSDToR8uKoRJcMT-LmMJJn2-NM76nfSR2FOgs', + ), + )
Hopefully fix this once and for all now :P
py
diff --git a/api/events/monitors/resources.py b/api/events/monitors/resources.py index <HASH>..<HASH> 100644 --- a/api/events/monitors/resources.py +++ b/api/events/monitors/resources.py @@ -75,7 +75,13 @@ def get_container_resources(container, gpu_resources): job_uuid, experiment_uuid)) - stats = container.stats(decode=True, stream=False) + try: + stats = container.stats(decode=True, stream=False) + except NotFound: + logger.info("`{}` was not found".format(container.name)) + RedisJobContainers.remove_container(container.id) + return + precpu_stats = stats['precpu_stats'] cpu_stats = stats['cpu_stats']
Remove container id if not found for resources monitoring
py
diff --git a/tests/profiling/zipkin_span_benchmark_test.py b/tests/profiling/zipkin_span_benchmark_test.py index <HASH>..<HASH> 100644 --- a/tests/profiling/zipkin_span_benchmark_test.py +++ b/tests/profiling/zipkin_span_benchmark_test.py @@ -82,7 +82,7 @@ def test_zipkin_span_thread_local( TCPTransportHandler(), ]) @pytest.mark.parametrize('sample_rate', [0.15, 100]) -@pytest.mark.parametrize('num_spans', [1, 10, 100]) +@pytest.mark.parametrize('num_spans', [1, 100, 1000]) def test_zipkin_span_logging( benchmark, transport_handler,
Increase the number of spans to test.
py
diff --git a/djnetaxept/managers.py b/djnetaxept/managers.py index <HASH>..<HASH> 100644 --- a/djnetaxept/managers.py +++ b/djnetaxept/managers.py @@ -150,7 +150,7 @@ class NetaxeptTransactionManager(models.Manager): self.require_auth(payment) - if not self.get_query_set().filter(payment=payment, operation='CAPTURE').exists(): + if not self.get_query_set().filter(Q(operation='CAPTURE') | Q(operation='SALE'), payment=payment).exists(): logger.error("No amount captured, cannot credit") raise NoAmountCaptured @@ -187,7 +187,7 @@ class NetaxeptTransactionManager(models.Manager): self.require_auth(payment) - if self.get_query_set().filter(payment=payment, operation='CAPTURE').exists(): + if self.get_query_set().filter(Q(operation='CAPTURE') | Q(operation='SALE'), payment=payment).exists(): logger.error("Amount allready captured, cannot annul") raise AmountAllreadyCaptured
allow credit on sale, disallow annul on sale
py
diff --git a/openpnm/io/Statoil.py b/openpnm/io/Statoil.py index <HASH>..<HASH> 100644 --- a/openpnm/io/Statoil.py +++ b/openpnm/io/Statoil.py @@ -84,6 +84,8 @@ class Statoil(GenericIO): # Add link2 props to net net['throat.length'] = sp.array(link2['throat.length']) net['throat.volume'] = sp.array(link2['throat.volume']) + net['throat.pore1_length'] = sp.array(link2['throat.pore1_length']) + net['throat.pore2_length'] = sp.array(link2['throat.pore2_length']) net['throat.clay_volume'] = sp.array(link2['throat.clay_volume']) # --------------------------------------------------------------------- # Parse the node1 file
Add extra props from statoil import
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ class PyTest(TestCommand): sys.exit(errno) # requirements -install_requirements = ['guessit>=0.9.1', 'babelfish>=0.5.2', 'enzyme>=0.4.1', 'beautifulsoup4>=4.2.0', +install_requirements = ['guessit>=0.9.1,<2.0', 'babelfish>=0.5.2', 'enzyme>=0.4.1', 'beautifulsoup4>=4.2.0', 'requests>=2.0', 'click>=4.0', 'dogpile.cache>=0.5.4', 'stevedore>=1.0.0', 'chardet>=2.3.0', 'pysrt>=1.0.1', 'six>=1.9.0']
Restrict to guessit<<I>
py
diff --git a/sos/sos_script.py b/sos/sos_script.py index <HASH>..<HASH> 100755 --- a/sos/sos_script.py +++ b/sos/sos_script.py @@ -1125,14 +1125,15 @@ for __n, __v in {}.items(): self.global_def += '{} = {}\n'.format(statement[1], statement[2]) else: self.global_def += statement[1] - # remove the global section after inserting it to each step of the process - self.sections = [x for x in self.sections if not x.is_global] # if there is no section in the script, we create a default section with global # definition being the content. - if not self.sections: + if not [x for x in self.sections if not x.is_global]: self.sections.append(SoS_Step(self.content, [('default', None, None)], global_sigil=self.global_sigil)) - #self.sections[0].statements = self.global_def - #self.global_def = '' + for section in [x for x in self.sections if x.is_global]: + self.sections[-1].statements.extend(section.statements) + self.global_def = '' + # remove the global section after inserting it to each step of the process + self.sections = [x for x in self.sections if not x.is_global] # for section in self.sections: # for nested / included sections, we need to keep their own global definition
Fix the handling of global variables due to the addition of multiple global sections
py
diff --git a/user_management/api/tests/urls.py b/user_management/api/tests/urls.py index <HASH>..<HASH> 100644 --- a/user_management/api/tests/urls.py +++ b/user_management/api/tests/urls.py @@ -1,5 +1,5 @@ from django.conf.urls import include, url -from django.contrib.auth.views import login +from django.contrib.auth.views import LoginView urlpatterns = [ @@ -16,5 +16,5 @@ urlpatterns = [ ), namespace='user_management_api', )), - url(r'^login/$', login, name='login') + url(r'^login/$', LoginView.as_view(), name='login') ]
Upgrade to django <I>
py
diff --git a/kuyruk/worker.py b/kuyruk/worker.py index <HASH>..<HASH> 100644 --- a/kuyruk/worker.py +++ b/kuyruk/worker.py @@ -341,7 +341,7 @@ class Worker(object): def _heartbeat_tick(self, connection, stop_event): while not stop_event.wait(1): try: - connection.send_heartbeat() + connection.heartbeat_tick() except socket.timeout: pass except Exception as e:
use Connection.heartbeat_tick instead of Connection.send_heartbeat
py
diff --git a/grid_frame.py b/grid_frame.py index <HASH>..<HASH> 100644 --- a/grid_frame.py +++ b/grid_frame.py @@ -301,11 +301,10 @@ class GridFrame(wx.Frame): self.grid.size_grid() self.grid_box.Add(self.grid, flag=wx.ALL, border=5) self.main_sizer.Fit(self) - if 'age' in self.parent.Parent.validation_mode: - self.grid.paint_invalid_cells(self.parent.Parent.warn_dict['age']) - self.grid.ForceRefresh() - - + if self.parent.Parent.validation_mode: + if 'age' in self.parent.Parent.validation_mode: + self.grid.paint_invalid_cells(self.parent.Parent.warn_dict['age']) + self.grid.ForceRefresh() def init_grid_headers(self):
prevent a warning message if validation_mode has not been set
py
diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py index <HASH>..<HASH> 100644 --- a/salt/modules/localemod.py +++ b/salt/modules/localemod.py @@ -212,7 +212,7 @@ def gen_locale(locale, **kwargs): if not valid and not locale_info['charmap']: # charmap was not supplied, so try copying the codeset locale_info['charmap'] = locale_info['codeset'] - locale = _join_locale(locale_info) + locale = salt.utils.locales.join_locale(locale_info) valid = __salt__['file.search'](search, '^{0}$'.format(locale)) else: # directory-based search if on_suse:
join_locale is now in salt.utils.locales
py
diff --git a/GPy/models/gp_kronecker_gaussian_regression.py b/GPy/models/gp_kronecker_gaussian_regression.py index <HASH>..<HASH> 100644 --- a/GPy/models/gp_kronecker_gaussian_regression.py +++ b/GPy/models/gp_kronecker_gaussian_regression.py @@ -5,7 +5,6 @@ import numpy as np from ..core import Model from paramz import ObsAr from .. import likelihoods -from .. import kern class GPKroneckerGaussianRegression(Model): """ @@ -29,14 +28,9 @@ class GPKroneckerGaussianRegression(Model): } """ - def __init__(self, X1, X2, Y, kern1=None, kern2=None, noise_var=1., name='KGPR'): + def __init__(self, X1, X2, Y, kern1, kern2, noise_var=1., name='KGPR'): Model.__init__(self, name=name) - if kern1 is None: - kern1 = kern.RBF(X1.shape[1]) - if kern2 is None: - kern2 = kern.RBF(X2.shape[1]) - # accept the construction arguments self.X1 = ObsAr(X1) self.X2 = ObsAr(X2)
didnt realize last 2 changes went to PR, undoing
py
diff --git a/tests/downscaling/conftest.py b/tests/downscaling/conftest.py index <HASH>..<HASH> 100644 --- a/tests/downscaling/conftest.py +++ b/tests/downscaling/conftest.py @@ -62,3 +62,27 @@ def qds_month(): coords={"quantile": [0, 0.3, 5.0, 7, 1], "month": range(1, 13)}, attrs={"group": "time.month", "window": 1}, ) + + +@pytest.fixture +def obs_sim_fut_tuto(): + def _obs_sim_fut_tuto(fut_offset=3, delta=0.1, smth_win=3, trend=True): + ds = xr.tutorial.open_dataset("air_temperature") + obs = ds.air.resample(time="D").mean() + sim = obs.rolling(time=smth_win, min_periods=1).mean() + delta + fut_time = sim.time + np.timedelta64(730 + fut_offset * 365, "D").astype( + "<m8[ns]" + ) + fut = sim + ( + 0 + if not trend + else xr.DataArray( + np.linspace(0, 2, num=sim.time.size), + dims=("time",), + coords={"time": sim.time}, + ) + ) + fut["time"] = fut_time + return obs, sim, fut + + return _obs_sim_fut_tuto
New simple fixture to get obs, sim, fut from xr tuto
py
diff --git a/nipap-cli/nipap_cli/nipap_cli.py b/nipap-cli/nipap_cli/nipap_cli.py index <HASH>..<HASH> 100755 --- a/nipap-cli/nipap_cli/nipap_cli.py +++ b/nipap-cli/nipap_cli/nipap_cli.py @@ -998,7 +998,8 @@ def modify_prefix(arg, opts): """ spec = { 'prefix': arg } - spec['vrf_rt'] = get_vrf(opts.get('vrf_rt'), abort=True).rt + v = get_vrf(opts.get('vrf_rt'), abort=True) + spec['vrf_rt'] = v.rt res = Prefix.list(spec) if len(res) == 0:
Fix exception for non-existent VRF Trying to modify a prefix in a non-existent VRF would throw an exception instead of a nicely formatted error message as the vrf variable wasn't set at the correct time. Fixed! Fixes #<I>.
py
diff --git a/mzgtfs/test_feed.py b/mzgtfs/test_feed.py index <HASH>..<HASH> 100644 --- a/mzgtfs/test_feed.py +++ b/mzgtfs/test_feed.py @@ -77,6 +77,15 @@ class TestFeed(unittest.TestCase): f = feed.Feed(util.example_feed()) with self.assertRaises(KeyError): f.read('invalidfile') + + def test_read_padding(self): + # The Google GTFS example feed is missing columns in + # stop_times.txt. Check the padding mechanism works. + f = feed.Feed(util.example_feed()) + data = f.read('stop_times') + # Check that all 9 elements are present. + for entity in f.read('stop_times'): + assert len(entity.data) == 9 def test_agencies(self): f = feed.Feed(util.example_feed())
<I>% test coverage for feed
py
diff --git a/udiskie/notify.py b/udiskie/notify.py index <HASH>..<HASH> 100644 --- a/udiskie/notify.py +++ b/udiskie/notify.py @@ -36,11 +36,6 @@ class Notify(object): self._timeout = timeout or {} self._default = self._timeout.get('timeout', -1) self._log = logging.getLogger(__name__) - # pynotify does not store hard references to the notification - # objects. When a signal is received and the notification does not - # exist anymore, no handller will be called. Therefore, we need to - # prevent these notifications from being destroyed by storing - # references (note, notify2 doesn't need this): self._notifications = [] # Subscribe all enabled events to the daemon: udisks = mounter.udisks @@ -198,8 +193,11 @@ class Notify(object): def on_action_click(notification, action): callback(*args) notification.add_action(action, label, on_action_click) - # Need to store a reference (see __init__) only if there is a - # signal connected: + # pynotify does not store hard references to the notification + # objects. When a signal is received and the notification does not + # exist anymore, no handller will be called. Therefore, we need to + # prevent these notifications from being destroyed by storing + # references (note, notify2 doesn't need this): notification.connect('closed', self._notifications.remove) self._notifications.append(notification)
Move another bug related comment in the right place
py
diff --git a/tomodachi/cli/__init__.py b/tomodachi/cli/__init__.py index <HASH>..<HASH> 100644 --- a/tomodachi/cli/__init__.py +++ b/tomodachi/cli/__init__.py @@ -260,9 +260,14 @@ class CLI: print("Invalid config file, invalid JSON format: {}".format(str(e))) sys.exit(2) - if "--production" in args: - index = args.index("--production") - args.pop(index) + env_production = str(os.getenv('TOMODACHI_PRODUCTION', '')).lower() or None + if env_production and env_production in ("0", "no", "none", "false"): + env_production = None + + if env_production or "--production" in args: + if "--production" in args: + index = args.index("--production") + args.pop(index) watcher = None else: cwd = os.getcwd()
Added support for TOMODACHI_PRODUCTION env
py
diff --git a/goatools/obo_parser.py b/goatools/obo_parser.py index <HASH>..<HASH> 100755 --- a/goatools/obo_parser.py +++ b/goatools/obo_parser.py @@ -39,9 +39,10 @@ class OBOReader(object): self.obo_file = obo_file # GOTerm attributes that are necessary for any operations: else: - raise Exception("download obo file first\n " + raise Exception("COULD NOT READ({OBO})\n" + "download obo file first\n " "[http://geneontology.org/ontology/" - "go-basic.obo]") + "go-basic.obo]".format(OBO=obo_file)) def __iter__(self): """Return one GO Term record at a time from an obo file."""
If unable to read obo, print specific filename of attempted file in.
py
diff --git a/salt/modules/dig.py b/salt/modules/dig.py index <HASH>..<HASH> 100644 --- a/salt/modules/dig.py +++ b/salt/modules/dig.py @@ -137,7 +137,7 @@ def CNAME(host, nameserver=None): .. code-block:: bash - salt ns1 dig.CNAME www.google.com + salt ns1 dig.CNAME mail.google.com """ dig = ["dig", "+short", str(host), "CNAME"]
use a working cname example for joe
py
diff --git a/paramiko/hostkeys.py b/paramiko/hostkeys.py index <HASH>..<HASH> 100644 --- a/paramiko/hostkeys.py +++ b/paramiko/hostkeys.py @@ -80,8 +80,14 @@ class HostKeys (UserDict.DictMixin): posix, it will usually be stored in C{os.path.expanduser("~/.ssh/known_hosts")}. + If this method is called multiple times, the host keys are merged, + not cleared. So multiple calls to C{load} will just call L{add}, + replacing any existing entries and adding new ones. + @param filename: name of the file to read host keys from @type filename: str + + @raise IOError: if there was an error reading the file """ f = file(filename, 'r') for line in f:
[project @ <EMAIL><I>-<I>da<I>fb7a<I>be] slightly more docs to hostkeys
py
diff --git a/lib/lago/__init__.py b/lib/lago/__init__.py index <HASH>..<HASH> 100644 --- a/lib/lago/__init__.py +++ b/lib/lago/__init__.py @@ -702,7 +702,7 @@ class Prefix(object): if "disks" not in spec.keys(): spec["disks"] = ova_disk else: - spec["disks"] = spec["disks"] + ova_disk + spec["disks"] = ova_disk + spec["disks"] new_disks = [] spec['name'] = name
Making the disk order correct for ova case In order for sysprep to handle the right disk the ova disk must be first Change-Id: I0d1dd<I>bb<I>f9fe<I>efee<I>af2bb<I>f
py
diff --git a/MAVProxy/modules/mavproxy_fence.py b/MAVProxy/modules/mavproxy_fence.py index <HASH>..<HASH> 100644 --- a/MAVProxy/modules/mavproxy_fence.py +++ b/MAVProxy/modules/mavproxy_fence.py @@ -29,7 +29,7 @@ def init(_mpstate): mpstate = _mpstate mpstate.fence = fence_state() mpstate.command_map['fence'] = (cmd_fence, "geo-fence management") - mpstate.completions["fence"] = ["<draw|list|clear>", + mpstate.completions["fence"] = ["<draw|list|clear|enable|disable>", "<load|save> (FILENAME)"] if mpstate.continue_mode and mpstate.status.logdir != None: fencetxt = os.path.join(mpstate.status.logdir, 'fence.txt')
fence: added enable/disable to completions
py
diff --git a/telethon/tl/custom/conversation.py b/telethon/tl/custom/conversation.py index <HASH>..<HASH> 100644 --- a/telethon/tl/custom/conversation.py +++ b/telethon/tl/custom/conversation.py @@ -418,10 +418,14 @@ class Conversation(ChatGetter): return self + def cancel(self): + """Cancels the current conversation and exits the context manager.""" + raise _ConversationCancelled() + def __exit__(self, *args): return self._client.loop.run_until_complete(self.__aexit__(*args)) - async def __aexit__(self, *args): + async def __aexit__(self, exc_type, exc_val, exc_tb): chat_id = utils.get_peer_id(self._chat_peer) if self._client._ids_in_conversations[chat_id] == 1: del self._client._ids_in_conversations[chat_id] @@ -430,3 +434,8 @@ class Conversation(ChatGetter): del self._client._conversations[self._id] self._cancel_all() + return isinstance(exc_val, _ConversationCancelled) + + +class _ConversationCancelled(InterruptedError): + pass
Add Conversation.cancel() (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ if sys.version_info <= (2, 5): REQUIREMENTS = [ 'httplib2', - 'oauth2client == 1.3', + 'oauth2client', 'protobuf >= 2.5.0', 'pycrypto', 'pyopenssl',
Unpin oauth2client after <I> release.
py
diff --git a/version.py b/version.py index <HASH>..<HASH> 100644 --- a/version.py +++ b/version.py @@ -1 +1 @@ -__version__ = (0, 2, 1) +__version__ = (0, 3, 0)
bumping version for delorean
py
diff --git a/esipy/client.py b/esipy/client.py index <HASH>..<HASH> 100644 --- a/esipy/client.py +++ b/esipy/client.py @@ -33,7 +33,6 @@ CachedResponse = namedtuple( class EsiClient(BaseClient): - __schemes__ = set(['https']) __image_server__ = { @@ -274,21 +273,20 @@ class EsiClient(BaseClient): now = (datetime.utcnow() - epoch).total_seconds() cache_timeout = int(expire) - int(now) - else: - # if no expire, define that there is no cache - # -1 will be now -1sec, so it'll expire - cache_timeout = -1 + self.cache.set( + cache_key, + CachedResponse( + status_code=res.status_code, + headers=res.headers, + content=res.content, + url=res.url, + ), + cache_timeout, + ) - self.cache.set( - cache_key, - CachedResponse( - status_code=res.status_code, - headers=res.headers, - content=res.content, - url=res.url, - ), - cache_timeout, - ) + else: + # no expires header so we won't cache the call + pass def __make_cache_key(self, request): headers = frozenset(request._p['header'].items())
(#<I>) Instead of using a magic number just don't attempt to cache the data when there is no expire time. This avoids an error in redis version <I> at least where a timeout value of -1 for setex barfs.
py
diff --git a/yotta/lib/component.py b/yotta/lib/component.py index <HASH>..<HASH> 100644 --- a/yotta/lib/component.py +++ b/yotta/lib/component.py @@ -538,7 +538,7 @@ class Component(pack.Pack): errors.append(e) if not t: logger.error( - 'could not install %s %s for target %s' % + 'could not install target %s %s for %s' % (dspec.name, ver, previous_name) ) break @@ -550,7 +550,10 @@ class Component(pack.Pack): leaf_target = t if dspec is None: break - return (target.DerivedTarget(leaf_target, target_hierarchy[1:]), errors) + if leaf_target is None: + return (None, errors) + else: + return (target.DerivedTarget(leaf_target, target_hierarchy[1:]), errors) def installedDependencies(self): ''' Return true if satisfyDependencies has been called.
don't break if the target can't be installed, display a nice message
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -57,8 +57,6 @@ def read_to_rst(fname): except ImportError: return read(fname) -read_to_rst('README.md') - version_dummy = {} exec(read('GPy/__version__.py'), version_dummy) __version__ = version_dummy['__version__'] @@ -139,7 +137,7 @@ setup(name = 'GPy', include_package_data = True, py_modules = ['GPy.__init__'], test_suite = 'GPy.testing', - #long_description=read_to_rst('README.md'), + long_description=read_to_rst('README.md'), install_requires=['numpy>=1.7', 'scipy>=0.16', 'six', 'paramz'], extras_require = {'docs':['sphinx'], 'optional':['mpi4py',
[description] was not being converted to rst
py
diff --git a/main.py b/main.py index <HASH>..<HASH> 100644 --- a/main.py +++ b/main.py @@ -10,8 +10,12 @@ import sys if (__name__ == "__main__"): RUN_FROM_MAIN = False - HOST = '192.168.200.83' + + HOST = '127.0.0.1' PORT = 7777 + + if (len(sys.argv) > 1): + HOST = sys.argv[1] ADDR = (HOST, PORT) protocol = 102 @@ -54,7 +58,7 @@ if (__name__ == "__main__"): client.close() else: - bot = TerraBot("127.0.0.1") + bot = TerraBot(HOST) bot.start() while threading.active_count() > 0: time.sleep(0.1)
You can now specify an ip by CLI
py
diff --git a/visidata/sheets.py b/visidata/sheets.py index <HASH>..<HASH> 100644 --- a/visidata/sheets.py +++ b/visidata/sheets.py @@ -1148,6 +1148,7 @@ SheetsSheet.addCommand('gz^C', 'cancel-rows', 'for vs in selectedRows: cancelThr SheetsSheet.addCommand(ENTER, 'open-row', 'dest=cursorRow; vd.sheets.remove(sheet) if not sheet.precious else None; vd.push(openRow(dest))', 'open sheet referenced in current row') BaseSheet.addCommand('q', 'quit-sheet', 'vd.quit(sheet)', 'quit current sheet') +BaseSheet.addCommand('Q', 'quit-sheet-free', 'vs.rows.clear(); vd.allSheets.remove(sheet); vd.quit(sheet)', 'discard current sheet and free memory') globalCommand('gq', 'quit-all', 'vd.quit(*vd.sheets)', 'quit all sheets (clean exit)') BaseSheet.addCommand('Z', 'splitwin-half', 'splitPane(vd.options.disp_splitwin_pct or 50)', 'ensure split pane is set and push under sheet onto other pane')
[quit] add Q/quit-sheet-free to quit and free associated memory
py
diff --git a/system_maintenance/tests/functional/tests.py b/system_maintenance/tests/functional/tests.py index <HASH>..<HASH> 100644 --- a/system_maintenance/tests/functional/tests.py +++ b/system_maintenance/tests/functional/tests.py @@ -149,3 +149,18 @@ class FunctionalTest(StaticLiveServerTestCase): self.assertEqual( len(self.browser.find_elements_by_css_selector( '.btn-group.hide-on-mobile > .btn')), 14) + + def test_layout_and_styling(self): + # Go to the authentication page + self.browser.get(self.system_maintenance_url('authentication')) + window_width = 768 + self.browser.set_window_size(window_width, window_width / 2) + + # Username and password input boxes are centered + self.find_authentication_elements() + center_username = self.username_inputbox.location['x'] + \ + self.username_inputbox.size['width'] / 2 + center_password = self.password_inputbox.location['x'] + \ + self.password_inputbox.size['width'] / 2 + self.assertAlmostEqual(center_username, window_width / 2, delta=5) + self.assertAlmostEqual(center_password, window_width / 2, delta=5)
Test that Bootstrap works by checking that authentication input boxes are centered
py
diff --git a/isochrones/__init__.py b/isochrones/__init__.py index <HASH>..<HASH> 100644 --- a/isochrones/__init__.py +++ b/isochrones/__init__.py @@ -6,7 +6,9 @@ except NameError: __ISOCHRONES_SETUP__ = False if not __ISOCHRONES_SETUP__: - __all__ = ['dartmouth','basti','padova'] + __all__ = ['dartmouth','basti','padova', + 'Isochrone', 'StarModel', 'BinaryStarModel', + 'TripleStarModel'] from .isochrone import Isochrone from .starmodel import StarModel, BinaryStarModel, TripleStarModel
added more to __all__
py
diff --git a/src/collectors/netapp/netapp.py b/src/collectors/netapp/netapp.py index <HASH>..<HASH> 100644 --- a/src/collectors/netapp/netapp.py +++ b/src/collectors/netapp/netapp.py @@ -25,7 +25,7 @@ Example NetAppCollector.conf: user = root password = strongpassword -```` +``` The primary source for documentation about the API has been "NetApp unified storage performance management using open interfaces"
Fix minor bug in NetAppCollector doc string.
py
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based.py +++ b/openquake/calculators/event_based.py @@ -167,14 +167,15 @@ def compute_hazard(sources_or_ruptures, src_filter, if param['oqparam'].save_ruptures is False: res.events = get_events(ruptures) res['ruptures'] = {} - yield res + #yield res for block in block_splitter(ruptures, RUPTURES_PER_BLOCK): getter = GmfGetter( rlzs_by_gsim, block, sitecol, param['oqparam'], param['min_iml'], param['samples']) - res = AccumDict(ruptures={}) + #res = AccumDict(ruptures={}) res.update(getter.compute_gmfs_curves(monitor)) - yield res + #yield res + return res @base.calculators.add('event_based')
Removed generator task compute_hazard
py
diff --git a/sos/report/__init__.py b/sos/report/__init__.py index <HASH>..<HASH> 100644 --- a/sos/report/__init__.py +++ b/sos/report/__init__.py @@ -532,8 +532,7 @@ class SoSReport(SoSComponent): 'verbosity': self.opts.verbosity, 'cmdlineopts': self.opts, 'devices': self.devices, - 'namespaces': self.namespaces, - 'tempfile_util': self.tempfile_util + 'namespaces': self.namespaces } def get_temp_file(self):
[report] Remove useless inclusion of tempfile_util in commons An early implementation of #<I> included expanding `TempFileUtil` and passing it as a means to create temp files to write command output directly to. That approach was abandoned in favor of a more robust implementation, but the cleanup of the previous design was apparently not complete. Fix this by removing the inclusion of tempfile_util in our `commons` dict.
py
diff --git a/py/test/selenium/webdriver/common/executing_async_javascript_tests.py b/py/test/selenium/webdriver/common/executing_async_javascript_tests.py index <HASH>..<HASH> 100644 --- a/py/test/selenium/webdriver/common/executing_async_javascript_tests.py +++ b/py/test/selenium/webdriver/common/executing_async_javascript_tests.py @@ -23,6 +23,13 @@ from selenium.common.exceptions import TimeoutException from selenium.webdriver.remote.webelement import WebElement +@pytest.fixture(autouse=True) +def reset_timeouts(driver): + driver.set_script_timeout(0) + yield + driver.set_script_timeout(30) + + def testShouldNotTimeoutIfCallbackInvokedImmediately(driver, pages): pages.load("ajaxy_page.html") result = driver.execute_async_script("arguments[arguments.length - 1](123);")
[py] reduce the default script timeout in executing_async_javascript_tests.py (#<I>) When using w3c driver, the default script timeout is <I> seconds, so tests expecting to timeout and not overriding the script timeout take <I> seconds to run. Setting it to 0 makes the tests a lot faster. Then, it's reset to the default value once tests finish.
py
diff --git a/src/python/src/grpc/_adapter/rear.py b/src/python/src/grpc/_adapter/rear.py index <HASH>..<HASH> 100644 --- a/src/python/src/grpc/_adapter/rear.py +++ b/src/python/src/grpc/_adapter/rear.py @@ -170,7 +170,8 @@ class RearLink(ticket_interfaces.RearLink, activated.Activated): if event.status.code is _low.Code.OK: category = tickets.Kind.COMPLETION elif event.status.code is _low.Code.CANCELLED: - category = tickets.Kind.CANCELLATION + # TODO(issue 752): Use a CANCELLATION ticket kind here. + category = tickets.Kind.SERVICER_FAILURE elif event.status.code is _low.Code.EXPIRED: category = tickets.Kind.EXPIRATION else:
Avoid CANCELLATION ticket kind for back-to-front tickets. It's not (yet, see issue <I>) allowed and code at the higher level doesn't know how to deal with it.
py
diff --git a/plydata/dataframe/common.py b/plydata/dataframe/common.py index <HASH>..<HASH> 100644 --- a/plydata/dataframe/common.py +++ b/plydata/dataframe/common.py @@ -221,7 +221,7 @@ class Evaluator: def _all_expressions_evaluated(self): """ - Return True all expressions match with the columns + Return True if all expressions match with the columns Saves some processor cycles """
DOC: Add missing word in docstring title
py
diff --git a/gprof2dot.py b/gprof2dot.py index <HASH>..<HASH> 100755 --- a/gprof2dot.py +++ b/gprof2dot.py @@ -3170,6 +3170,9 @@ def main(): formatNames = list(formats.keys()) formatNames.sort() + themeNames = list(themes.keys()) + themeNames.sort() + optparser = optparse.OptionParser( usage="\n\t%prog [options] [file] ...") optparser.add_option( @@ -3196,9 +3199,9 @@ def main(): help="preferred method of calculating total time: callratios or callstacks (currently affects only perf format) [default: %default]") optparser.add_option( '-c', '--colormap', - type="choice", choices=('color', 'pink', 'gray', 'bw', 'print'), + type="choice", choices=themeNames, dest="theme", default="color", - help="color map: color, pink, gray, bw, or print [default: %default]") + help="color map: %s [default: %%default]" % naturalJoin(themeNames)) optparser.add_option( '-s', '--strip', action="store_true",
Don't hardcode theme names in opt parsing code This change makes them work the same way as formatNames.
py
diff --git a/safe/utilities/analysis_handler.py b/safe/utilities/analysis_handler.py index <HASH>..<HASH> 100644 --- a/safe/utilities/analysis_handler.py +++ b/safe/utilities/analysis_handler.py @@ -464,7 +464,7 @@ class AnalysisHandler(QObject): if self.zoom_to_impact_flag: self.iface.zoomToActiveLayer() if self.hide_exposure_flag: - exposure_layer = self.impact_function.exposure + exposure_layer = self.impact_function.exposure.qgis_layer() legend = self.iface.legendInterface() legend.setLayerVisible(exposure_layer, False)
fix hide exposure layer after an IF in develop
py
diff --git a/tensorflow_probability/python/experimental/sts_gibbs/spike_and_slab.py b/tensorflow_probability/python/experimental/sts_gibbs/spike_and_slab.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/python/experimental/sts_gibbs/spike_and_slab.py +++ b/tensorflow_probability/python/experimental/sts_gibbs/spike_and_slab.py @@ -646,6 +646,7 @@ def _symmetric_update_chol(chol, idx, value): """Sets the value of a row and column in a Cholesky-factorized matrix.""" # TODO(davmre): is a more efficient direct implementation possible? old_value = tf.reduce_sum(chol * chol[..., idx : idx + 1, :], axis=-1) + old_value = tf.ensure_shape(old_value, value.shape) return _symmetric_increment_chol(chol, idx, increment=value - old_value)
Retain shape information through a reduce_sum. PiperOrigin-RevId: <I>
py
diff --git a/fabfile.py b/fabfile.py index <HASH>..<HASH> 100644 --- a/fabfile.py +++ b/fabfile.py @@ -249,6 +249,7 @@ def collect_remote_statics(): 'osmtogeojson': 'git://github.com/aaronlidman/osm-and-geojson.git@master', 'loading': 'git://github.com/ebrelsford/Leaflet.loading.git@master', 'contextmenu': 'git://github.com/aratcliffe/Leaflet.contextmenu.git@master', + 'markercluster': 'git://github.com/Leaflet/Leaflet.markercluster.git@master', } with cd(remote_static_dir): for subdir, path in remote_repositories.iteritems():
Add markercluster plugin in fabfile
py