Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
8,800
def gds_validate_integer_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (__HOLE__, ValueError), exp: raise_parse_error(node, 'Requires sequence of integers') return input_data
TypeError
dataset/ETHPy150Open lsaffre/lino/lino/sandbox/bcss/PerformInvestigation.py/GeneratedsSuper.gds_validate_integer_list
8,801
def gds_validate_float_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (TypeError, __HOLE__), exp: raise_parse_error(node, 'Requires sequence of floats') return input_data
ValueError
dataset/ETHPy150Open lsaffre/lino/lino/sandbox/bcss/PerformInvestigation.py/GeneratedsSuper.gds_validate_float_list
8,802
def gds_validate_double_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (TypeError, __HOLE__), exp: raise_parse_error(node, 'Requires sequence of doubles') return input_data
ValueError
dataset/ETHPy150Open lsaffre/lino/lino/sandbox/bcss/PerformInvestigation.py/GeneratedsSuper.gds_validate_double_list
8,803
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'SSIN': SSIN_ = child_.text SSIN_ = self.gds_validate_string(SSIN_, node, 'SSIN') self.SSIN = SSIN_ self.validate_t_SSIN(self.SSIN) # validate type t_SSIN elif nodeName_ == 'OrgUnit': OrgUnit_ = child_.text OrgUnit_ = self.gds_validate_string(OrgUnit_, node, 'OrgUnit') self.OrgUnit = OrgUnit_ elif nodeName_ == 'Purpose': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError), exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'Purpose') self.Purpose = ival_ elif nodeName_ == 'Period': obj_ = PeriodType.factory() obj_.build(child_) self.set_Period(obj_) elif nodeName_ == 'InscriptionCode': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError), exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'InscriptionCode') self.InscriptionCode = ival_ elif nodeName_ == 'PhaseCode': sval_ = child_.text try: ival_ = int(sval_) except (__HOLE__, ValueError), exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'PhaseCode') self.PhaseCode = ival_ # end class InscriptionType
TypeError
dataset/ETHPy150Open lsaffre/lino/lino/sandbox/bcss/PerformInvestigation.py/InscriptionType.buildChildren
8,804
def load_env(): """ Load .env file to the environment """ try: with open('.env') as f: content = f.read() except __HOLE__: content = '' for line in content.splitlines(): m1 = re.match(r'\A([A-Za-z_0-9]+)=(.*)\Z', line) if m1: key, val = m1.group(1), m1.group(2) m2 = re.match(r"\A'(.*)'\Z", val) if m2: val = m2.group(1) m3 = re.match(r'\A"(.*)"\Z', val) if m3: val = re.sub(r'\\(.)', r'\1', m3.group(1)) os.environ.setdefault(key, val)
IOError
dataset/ETHPy150Open djng/djng/server/manage.py/load_env
8,805
def hydrate(self, bundle): """ Takes data stored in the bundle for the field and returns it. Used for taking simple data and building a instance object. """ if self.readonly: return None if self.instance_name not in bundle.data: if self.is_related and not self.is_m2m: # We've got an FK (or alike field) & a possible parent object. # Check for it. if bundle.related_obj and bundle.related_name in (self.attribute, self.instance_name): return bundle.related_obj if self.blank: return None if self.attribute: try: val = getattr(bundle.obj, self.attribute, None) if val is not None: return val except __HOLE__: pass if self.instance_name: try: if hasattr(bundle.obj, self.instance_name): return getattr(bundle.obj, self.instance_name) except ObjectDoesNotExist: pass if self.has_default(): if callable(self._default): return self._default() return self._default if self.null: return None raise ApiFieldError("The '%s' field has no data and doesn't allow a default or null value." % self.instance_name) return bundle.data[self.instance_name]
ObjectDoesNotExist
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/ApiField.hydrate
8,806
def convert(self, value): if value is None: return None try: # Try to return the URL if it's a ``File``, falling back to the string # itself if it's been overridden or is a default. return getattr(value, 'url', value) except __HOLE__: return None
ValueError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/FileField.convert
8,807
def convert(self, value): if value is None: return None if isinstance(value, six.string_types): try: year, month, day = value[:10].split('-') return datetime_safe.date(int(year), int(month), int(day)) except __HOLE__: raise ApiFieldError("Date provided to '%s' field doesn't appear to be a valid date string: '%s'" % (self.instance_name, value)) return value
ValueError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/DateField.convert
8,808
def hydrate(self, bundle): value = super(DateField, self).hydrate(bundle) if value and not hasattr(value, 'year'): try: # Try to rip a date/datetime out of it. value = make_aware(parse(value)) if hasattr(value, 'hour'): value = value.date() except __HOLE__: pass return value
ValueError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/DateField.hydrate
8,809
def convert(self, value): if value is None: return None if isinstance(value, six.string_types): try: year, month, day = value[:10].split('-') hour, minute, second = value[10:18].split(':') return make_aware(datetime_safe.datetime(int(year), int(month), int(day), int(hour), int(minute), int(second))) except __HOLE__: raise ApiFieldError("Datetime provided to '%s' field doesn't appear to be a valid datetime string: '%s'" % (self.instance_name, value)) return value
ValueError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/DateTimeField.convert
8,810
def hydrate(self, bundle): value = super(DateTimeField, self).hydrate(bundle) if value and not hasattr(value, 'year'): if isinstance(value, six.string_types): try: # Try to rip a date/datetime out of it. value = make_aware(parse(value)) except (ValueError, __HOLE__): raise ApiFieldError("Datetime provided to '%s' field doesn't appear to be a valid datetime string: '%s'" % (self.instance_name, value)) else: raise ApiFieldError("Datetime provided to '%s' field must be a string: %s" % (self.instance_name, value)) return value
TypeError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/DateTimeField.hydrate
8,811
def resource_from_uri(self, fk_resource, uri, request=None, related_obj=None, related_name=None): """ Given a URI is provided, the related resource is attempted to be loaded based on the identifiers in the URI. """ err_msg = "Could not find the provided %s object via resource URI '%s'." % (fk_resource._meta.resource_name, uri,) if not uri: raise ApiFieldError(err_msg) try: obj = fk_resource.get_via_uri(uri, request=request) bundle = fk_resource.build_bundle( obj=obj, request=request, via_uri=True ) return fk_resource.full_dehydrate(bundle) except __HOLE__: raise ApiFieldError(err_msg)
ObjectDoesNotExist
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/RelatedField.resource_from_uri
8,812
def resource_from_data(self, fk_resource, data, request=None, related_obj=None, related_name=None): """ Given a dictionary-like structure is provided, a fresh related resource is created using that data. """ # Try to hydrate the data provided. data = dict_strip_unicode_keys(data) obj = None if getattr(fk_resource._meta, 'include_resource_uri', True) and 'resource_uri' in data: uri = data['resource_uri'] err_msg = "Could not find the provided %s object via resource URI '%s'." % (fk_resource._meta.resource_name, uri,) try: obj = fk_resource.get_via_uri(uri, request=request) except ObjectDoesNotExist: raise ApiFieldError(err_msg) fk_bundle = fk_resource.build_bundle( data=data, obj=obj, request=request ) if related_obj: fk_bundle.related_obj = related_obj fk_bundle.related_name = related_name unique_keys = { k: v for k, v in data.items() if k == 'pk' or (hasattr(fk_resource, k) and getattr(fk_resource, k).unique) } # If we have no unique keys, we shouldn't go look for some resource that # happens to match other kwargs. In the case of a create, it might be the # completely wrong resource. # We also need to check to see if updates are allowed on the FK resource. if not obj and unique_keys: try: fk_resource.obj_get(fk_bundle, **data) except (ObjectDoesNotExist, NotFound, TypeError): try: # Attempt lookup by primary key fk_resource.obj_get(fk_bundle, **unique_keys) except (__HOLE__, NotFound): pass except MultipleObjectsReturned: pass # If we shouldn't update a resource, or we couldn't find a matching # resource we'll just return a populated bundle instead # of mistakenly updating something that should be read-only. fk_bundle = fk_resource.full_hydrate(fk_bundle) fk_resource.is_valid(fk_bundle) return fk_bundle
ObjectDoesNotExist
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/RelatedField.resource_from_data
8,813
def dehydrate(self, bundle, for_list=True): foreign_obj = None if callable(self.attribute): previous_obj = bundle.obj foreign_obj = self.attribute(bundle) elif isinstance(self.attribute, six.string_types): foreign_obj = bundle.obj for attr in self._attrs: previous_obj = foreign_obj try: foreign_obj = getattr(foreign_obj, attr, None) except __HOLE__: foreign_obj = None if not foreign_obj: if not self.null: if callable(self.attribute): raise ApiFieldError("The related resource for resource %s could not be found." % (previous_obj)) else: raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attr)) return None fk_resource = self.get_related_resource(foreign_obj) fk_bundle = Bundle(obj=foreign_obj, request=bundle.request) return self.dehydrate_related(fk_bundle, fk_resource, for_list=for_list)
ObjectDoesNotExist
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/ToOneField.dehydrate
8,814
def dehydrate(self, bundle, for_list=True): if not bundle.obj or not bundle.obj.pk: if not self.null: raise ApiFieldError("The model '%r' does not have a primary key and can not be used in a ToMany context." % bundle.obj) return [] the_m2ms = None previous_obj = bundle.obj attr = self.attribute if callable(self.attribute): the_m2ms = self.attribute(bundle) elif isinstance(self.attribute, six.string_types): the_m2ms = bundle.obj for attr in self._attrs: previous_obj = the_m2ms try: the_m2ms = getattr(the_m2ms, attr, None) except __HOLE__: the_m2ms = None if the_m2ms is None: break if the_m2ms is None: if not self.null: raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attr)) if isinstance(the_m2ms, models.Manager): the_m2ms = the_m2ms.all() m2m_dehydrated = [ self.dehydrate_related( Bundle(obj=m2m, request=bundle.request), self.get_related_resource(m2m), for_list=for_list ) for m2m in the_m2ms ] return m2m_dehydrated
ObjectDoesNotExist
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/ToManyField.dehydrate
8,815
def to_time(self, s): try: dt = parse(s) except (__HOLE__, TypeError) as e: raise ApiFieldError(str(e)) else: return datetime.time(dt.hour, dt.minute, dt.second, dt.microsecond)
ValueError
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/fields.py/TimeField.to_time
8,816
def opt_port(self, portstr): """ Specify the port number to listen on. """ try: self['port'] = int(portstr) except __HOLE__: raise UsageError( "Specify an integer between 0 and 65535 as a port number.") if self['port'] >= 2 ** 16: raise UsageError( "Specify an integer between 0 and 65535 as a port number.") elif self['port'] < 0: raise UsageError( "Specify an integer between 0 and 65535 as a port number.")
ValueError
dataset/ETHPy150Open twisted/nevow/nevow/_widget_plugin.py/Options.opt_port
8,817
def opt_element(self, qualifiedName): """ Specify the LiveElement or LiveFragment class to serve. """ try: factory = namedAny(qualifiedName) except (__HOLE__, AttributeError): raise UsageError("Specify a valid class name to --element") self['element'] = factory
ValueError
dataset/ETHPy150Open twisted/nevow/nevow/_widget_plugin.py/Options.opt_element
8,818
def execute(command, cwd=None): try: st = subprocess.PIPE proc = subprocess.Popen( args=command, stdout=st, stderr=st, stdin=st, cwd=cwd) (output, error) = proc.communicate() code = proc.returncode return code, output, error except __HOLE__ as error: return -1, '', error
OSError
dataset/ETHPy150Open mozilla/pontoon/bin/mozilla-en-US.py/execute
8,819
def __call__(self, *args, **kw): obj = args[0] try: cache = obj.__cache except __HOLE__: cache = obj.__cache = {} key = (self.func, args[1:], frozenset(kw.items())) try: res = cache[key] except KeyError: res = cache[key] = self.func(*args, **kw) return res
AttributeError
dataset/ETHPy150Open koenbok/Cactus/cactus/utils/helpers.py/memoize.__call__
8,820
def test_empty_subdir1(self): """ Setting subdir to empty path should raise an error. """ try: self.auth.load_creds(subdir='') # raises ValueError (zero length field name in format) for python 2.6 # OSError for the rest except OSError: pass except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('OSError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_empty_subdir1
8,821
def test_empty_subdir2(self): """ Setting subdir to `None` should raise an error. """ self.auth.creds_subdir = None try: self.auth.load_creds() except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('ValueError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_empty_subdir2
8,822
def test_missingdir(self): """ Setting subdir to nonexistent directory should raise an error. """ try: self.auth.load_creds(subdir='/nosuchdir') # raises ValueError (zero length field name in format) for python 2.6 # OSError for the rest except __HOLE__: pass except ValueError: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('OSError exception not thrown.')
OSError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_missingdir
8,823
def test_missingfile1(self): """ Defaults for authentication will fail since 'credentials.txt' not present in default subdir, as read from `os.environ['TWITTER']`. """ try: self.auth.load_creds() # raises ValueError (zero length field name in format) for python 2.6 # OSError for the rest except __HOLE__: pass except ValueError: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('OSError exception not thrown.')
OSError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_missingfile1
8,824
def test_missingfile2(self): """ Credentials file 'foobar' cannot be found in default subdir. """ try: self.auth.load_creds(creds_file='foobar') # raises ValueError (zero length field name in format) for python 2.6 # OSError for the rest except OSError: pass except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('OSError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_missingfile2
8,825
def test_incomplete_file(self): """ Credentials file 'bad_oauth1-1.txt' is incomplete """ try: self.auth.load_creds(creds_file='bad_oauth1-1.txt', subdir=self.subdir) except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('ValueError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_incomplete_file
8,826
def test_malformed_file1(self): """ First key in credentials file 'bad_oauth1-2.txt' is ill-formed """ try: self.auth.load_creds(creds_file='bad_oauth1-2.txt', subdir=self.subdir) except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('ValueError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_malformed_file1
8,827
def test_malformed_file2(self): """ First key in credentials file 'bad_oauth1-2.txt' is ill-formed """ try: self.auth.load_creds(creds_file='bad_oauth1-3.txt', subdir=self.subdir) except __HOLE__: pass except Exception as e: self.fail('Unexpected exception thrown: %s' % e) else: self.fail('ValueError exception not thrown.')
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/test/unit/test_twitter_auth.py/TestCredentials.test_malformed_file2
8,828
def Batches(self): """Reads from the record_generator and generates UploadWorkItems. Yields: Instances of class UploadWorkItem Raises: ResumeError: If the progress database and data file indicate a different number of rows. """ if self.skip_first: logger.info('Skipping header line.') try: self.reader.next() except StopIteration: return exhausted = False self.line_number = 1 self.column_count = None logger.info('Starting import; maximum %d entities per post', self.batch_size) state = None if self.progress_generator: for progress_key, state, kind, key_start, key_end in ( self.progress_generator): if key_start: try: self._AdvanceTo(key_start) self._ReadRows(key_start, key_end) yield self._MakeItem(key_start, key_end, self.read_rows, progress_key=progress_key) except __HOLE__: logger.error('Mismatch between data file and progress database') raise ResumeError( 'Mismatch between data file and progress database') elif state == DATA_CONSUMED_TO_HERE: try: self._AdvanceTo(key_end + 1) except StopIteration: state = None if self.progress_generator is None or state == DATA_CONSUMED_TO_HERE: while not exhausted: key_start = self.line_number key_end = self.line_number + self.batch_size - 1 try: self._ReadRows(key_start, key_end) except StopIteration: exhausted = True key_end = self.line_number - 1 if key_start <= key_end: yield self._MakeItem(key_start, key_end, self.read_rows)
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/UploadWorkItemGenerator.Batches
8,829
def GetImplementationClass(kind_or_class_key): """Returns the implementation class for a given kind or class key. Args: kind_or_class_key: A kind string or a tuple of kind strings. Return: A db.Model subclass for the given kind or class key. """ if isinstance(kind_or_class_key, tuple): try: implementation_class = polymodel._class_map[kind_or_class_key] except __HOLE__: raise db.KindError('No implementation for class \'%s\'' % kind_or_class_key) else: implementation_class = db.class_for_kind(kind_or_class_key) return implementation_class
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/GetImplementationClass
8,830
def EncodeContent(self, rows, loader=None): """Encodes row data to the wire format. Args: rows: A list of pairs of a line number and a list of column values. loader: Used for dependency injection. Returns: A list of datastore.Entity instances. Raises: ConfigurationError: if no loader is defined for self.kind """ if not loader: try: loader = Loader.RegisteredLoader(self.kind) except __HOLE__: logger.error('No Loader defined for kind %s.' % self.kind) raise ConfigurationError('No Loader defined for kind %s.' % self.kind) entities = [] for line_number, values in rows: key = loader.generate_key(line_number, values) if isinstance(key, datastore.Key): parent = key.parent() key = key.name() else: parent = None entity = loader.create_entity(values, key_name=key, parent=parent) def ToEntity(entity): if isinstance(entity, db.Model): return entity._populate_entity() else: return entity if not entity: continue if isinstance(entity, list): entities.extend(map(ToEntity, entity)) elif entity: entities.append(ToEntity(entity)) return entities
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/RequestManager.EncodeContent
8,831
def GetMapper(self, kind): """Returns a mapper for the registered kind. Returns: A Mapper instance. Raises: ConfigurationError: if no Mapper is defined for kind """ if not self.mapper: try: self.mapper = Mapper.RegisteredMapper(kind) except __HOLE__: logger.error('No Mapper defined for kind %s.' % kind) raise ConfigurationError('No Mapper defined for kind %s.' % kind) return self.mapper
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/RequestManager.GetMapper
8,832
def __ExtractProperties(self, entity): """Converts an entity into a list of string values. Args: entity: An entity to extract the properties from. Returns: A list of the properties of the entity. Raises: MissingPropertyError: if an expected field on the entity is missing. """ encoding = [] for name, fn, default in self.__properties: try: encoding.append(fn(entity[name])) except __HOLE__: if name == '__key__': encoding.append(fn(entity.key())) elif default is None: raise MissingPropertyError(name) else: encoding.append(default) return encoding
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/Exporter.__ExtractProperties
8,833
def LoadConfig(config_file_name, exit_fn=sys.exit): """Loads a config file and registers any Loader classes present. Used for a legacy Python configuration file. Args: config_file_name: The name of the configuration file. exit_fn: Used for dependency injection. """ if config_file_name: config_file = open(config_file_name, 'r') try: bulkloader_config = imp.load_module( 'bulkloader_config', config_file, config_file_name, ('', 'r', imp.PY_SOURCE)) sys.modules['bulkloader_config'] = bulkloader_config if hasattr(bulkloader_config, 'loaders'): for cls in bulkloader_config.loaders: Loader.RegisterLoader(cls()) if hasattr(bulkloader_config, 'exporters'): for cls in bulkloader_config.exporters: Exporter.RegisterExporter(cls()) if hasattr(bulkloader_config, 'mappers'): for cls in bulkloader_config.mappers: Mapper.RegisterMapper(cls()) except __HOLE__, e: m = re.search(r"[^']*'([^']*)'.*", str(e)) if m.groups() and m.group(1) == 'Loader': print >> sys.stderr, """ The config file format has changed and you appear to be using an old-style config file. Please make the following changes: 1. At the top of the file, add this: from google.appengine.tools.bulkloader import Loader 2. For each of your Loader subclasses add the following at the end of the __init__ definitioion: self.alias_old_names() 3. At the bottom of the file, add this: loaders = [MyLoader1,...,MyLoaderN] Where MyLoader1,...,MyLoaderN are the Loader subclasses you want the bulkloader to have access to. """ exit_fn(1) else: raise except Exception, e: if isinstance(e, NameClashError) or 'bulkloader_config' in vars() and ( hasattr(bulkloader_config, 'bulkloader') and isinstance(e, bulkloader_config.bulkloader.NameClashError)): print >> sys.stderr, ( 'Found both %s and %s while aliasing old names on %s.' % (e.old_name, e.new_name, e.klass)) exit_fn(1) else: raise
NameError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/bulkloader.py/LoadConfig
8,834
def compile_gpu_func(nan_is_error, inf_is_error, big_is_error): """ compile utility function used by contains_nan and contains_inf """ global f_gpumin, f_gpumax, f_gpuabsmax if not cuda.cuda_available: return guard_input = cuda.fvector('nan_guard') cuda_compile_failed = False if (nan_is_error or inf_is_error) and f_gpumin is None: try: f_gpumin = theano.function( [guard_input], T.min(guard_input), mode='FAST_RUN' ) except RuntimeError: # This can happen if cuda is available, but the # device is in exclusive mode and used by another # process. cuda_compile_failed = True if inf_is_error and not cuda_compile_failed and f_gpumax is None: try: f_gpumax = theano.function( [guard_input], T.max(guard_input), mode='FAST_RUN' ) except RuntimeError: # This can happen if cuda is available, but the # device is in exclusive mode and used by another # process. cuda_compile_failed = True if big_is_error and not cuda_compile_failed and f_gpuabsmax is None: try: f_gpuabsmax = theano.function( [guard_input], T.max(T.abs_(guard_input)), mode='FAST_RUN' ) except __HOLE__: # This can happen if cuda is available, but the # device is in exclusive mode and used by another # process. cuda_compile_failed = True
RuntimeError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/compile/nanguardmode.py/compile_gpu_func
8,835
def Exec(cmd, stdin=None): """Executes a process and returns exit code, stdout, stderr. Args: cmd: str or sequence, command and optional arguments to execute. stdin: str, optional, to send to standard in. Returns: Tuple. (Integer return code, string standard out, string standard error). Raises: ExecError: When an error occurs while executing cmd. Exec did not complete. """ shell = isinstance(cmd, basestring) logging.debug('Exec(%s, shell=%s)', cmd, shell) try: p = subprocess.Popen( cmd, shell=shell, stderr=subprocess.PIPE, stdin=subprocess.PIPE, stdout=subprocess.PIPE) except __HOLE__ as e: raise ExecError(str(e)) stdout, stderr = p.communicate(stdin) return p.returncode, stdout, stderr
OSError
dataset/ETHPy150Open google/cauliflowervest/src/cauliflowervest/client/util.py/Exec
8,836
def GetRootDisk(): """Returns the device name of the root disk. Returns: str, like "/dev/disk...." Raises: Error: When the root disk could not be found. """ try: returncode, stdout, _ = Exec(('/sbin/mount')) except ExecError: returncode = 'ExecError' if returncode != 0: raise Error( 'Could not enumerate mounted disks, mount exit status %s' % returncode) for line in stdout.splitlines(): try: device, _, mount_point, _ = line.split(' ', 3) if mount_point == '/' and re.search(r'^[/a-z0-9]+$', device, re.I): return device except __HOLE__: pass raise Error('Could not find root disk.')
ValueError
dataset/ETHPy150Open google/cauliflowervest/src/cauliflowervest/client/util.py/GetRootDisk
8,837
def SafeOpen(path, mode, open_=open): """Opens a file, guaranteeing that its directory exists, and makes it 0600. Args: path: str, path to the file to open, just like open(). mode: str, open mode to perform, just like open(). open_: callable, dependency injection for tests only. Returns: A handle to the open file, just like open(). """ try: os.makedirs(os.path.dirname(path), 0700) os.mknod(path, 0600 | stat.S_IFREG) except __HOLE__: # File exists. pass return open_(path, mode)
OSError
dataset/ETHPy150Open google/cauliflowervest/src/cauliflowervest/client/util.py/SafeOpen
8,838
def SupplyEntropy(entropy, open_=open): """Supply entropy to the system RNG. Args: entropy: str, some string of bytes. open_: optional, default open, function with open-like interface Raises: SupplyEntropyError: when the operations to supply entropy to the system fail. """ if not entropy: raise SupplyEntropyError('no entropy supplied') try: f = open_('/dev/random', 'w') f.write(entropy) f.close() except __HOLE__ as e: raise SupplyEntropyError(str(e))
IOError
dataset/ETHPy150Open google/cauliflowervest/src/cauliflowervest/client/util.py/SupplyEntropy
8,839
def _filterargs(source): """Juice from a source file the four args needed by decoder.""" argsregex = (r"}\('(.*)', *(\d+), *(\d+), *'(.*)'\." r"split\('\|'\), *(\d+), *(.*)\)\)") args = re.search(argsregex, source, re.DOTALL).groups() try: return args[0], args[3].split('|'), int(args[1]), int(args[2]) except __HOLE__: raise UnpackingError('Corrupted p.a.c.k.e.r. data.')
ValueError
dataset/ETHPy150Open mrknow/filmkodi/plugin.video.mrknow/lib/utils/unpack95High.py/_filterargs
8,840
def produceVDIDSlicesForEntireS3Dataset(s3InterfaceFactory, s3Dataset, vdm): s3Interface, bucketname, keyPrefix = parseS3Dataset(s3InterfaceFactory, s3Dataset) if s3Interface.bucketExists(bucketname) and s3Interface.keyExists(bucketname, keyPrefix): return produceVDIDSlicesForSingleBucketKeyPair(s3Interface, bucketname, keyPrefix, s3Dataset) else: if not s3Interface.bucketExists(bucketname): logging.info("Can't load dataset. Bucket '%s' doesn't exist.", bucketname) raise InvalidDatasetException("No bucket matching '%s'" % str(bucketname)) keysAndSizesMatching = s3Interface.listKeysWithPrefix(bucketname, keyPrefix + "_") indicesKeysAndSizes = [] for key, size, mtime in keysAndSizesMatching: try: index = int(key[len(keyPrefix)+1:]) indicesKeysAndSizes.append((index, key, size)) except __HOLE__: pass keysAndSizesMatching = [(key, size) for _, key, size in sorted(indicesKeysAndSizes)] if not keysAndSizesMatching: raise InvalidDatasetException( "No keys matching %s/%s in %s" % (bucketname, keyPrefix, s3Interface) ) slices = [] for key, _ in keysAndSizesMatching: slices.extend( produceVDIDSlicesForSingleBucketKeyPair(s3Interface, bucketname, key, s3Dataset) ) return slices
ValueError
dataset/ETHPy150Open ufora/ufora/ufora/FORA/python/PythonIoTasks.py/produceVDIDSlicesForEntireS3Dataset
8,841
def train(self, message, is_spam): """Train database with message""" totals = self.storage.totals tokens = self.storage.tokens if is_spam: totals['spam'] += 1 else: totals['ham'] += 1 # compute hashes of uppercase words hashes = map(lambda x: hash(string.upper(x)), \ self.__get_words_list(message)) for h in hashes: try: t = tokens[h] if is_spam: t[1] += 1 # spam_count else: t[0] += 1 # ham_count except __HOLE__: # no word in storage spam_count = 1 if is_spam else 0 ham_count = 1 - spam_count tokens[h] = [ham_count, spam_count] self.storage.save_if_needed()
KeyError
dataset/ETHPy150Open dchest/pybayesantispam/bayes.py/Bayes.train
8,842
def spam_rating(self, message): """Calculate and return spam rating of message""" totals = self.storage.totals if not totals: return 0.4 total_spam = totals['spam'] total_ham = totals['ham'] tokens = self.storage.tokens if not tokens: return 0.4 hashes = map(lambda x: hash(string.upper(x)), \ self.__get_words_list(message)) ratings = [] for h in hashes: try: ham_count, spam_count = tokens[h] # ham_count *= 2 # this increases weight of ham if spam_count > 0 and ham_count == 0: rating = 0.99 elif spam_count == 0 and ham_count > 0: rating = 0.01 elif total_spam > 0 and total_ham > 0: ham_prob = float(ham_count) / float(total_ham) spam_prob = float(spam_count) / float(total_spam) rating = spam_prob / (ham_prob + spam_prob) if rating < 0.01: rating = 0.01 else: rating = 0.4 # normally this won't happen except __HOLE__: rating = 0.4 # never seen this word ratings.append(rating) if (len(ratings) > 20): # leave only 20 most "interesting" ratings: # 10 hightest and 10 lowest ratings.sort() ratings = ratings[:10] + ratings[-10:] p = reduce(operator.mul, ratings) omp = reduce(operator.mul, map(lambda r: 1.0-r, ratings)) try: return p / (p + omp) except ZeroDivisionError: # got float underflow, not sure about rating return 0.5
KeyError
dataset/ETHPy150Open dchest/pybayesantispam/bayes.py/Bayes.spam_rating
8,843
def main(): try: opts, args = getopt.getopt(sys.argv[1:], "shc", ["help", "train-spam", "train-ham", "check"]) if len(opts) == 0 or len(args) == 0: raise getopt.error("no options or argumens") except getopt.error, msg: print(msg) usage() sys.exit(2) storage = Storage(args[0]) try: storage.load() except __HOLE__: print("Creating database") bayes = Bayes(storage) # process options for o, a in opts: if o in ("--help", ""): usage() sys.exit(0) elif o in ("-s", "--train-spam"): bayes.train(sys.stdin.read(), True) print("Trained as spam") elif o in ("-h", "--train-ham"): bayes.train(sys.stdin.read(), False) print("Trained as ham") elif o in ("-c", "--check"): print("%.2f" % bayes.spam_rating(sys.stdin.read())) else: assert False, "unhandled option" storage.finish()
IOError
dataset/ETHPy150Open dchest/pybayesantispam/bayes.py/main
8,844
@register.tag def fieldset(parser, token): # pragma: nocover """ Compilation function for fieldset block tag Render a form fieldset *This is an aux tag that is not used and excluded from coverage tests* https://docs.djangoproject.com/en/1.8/howto/custom-template-tags/#writing-the-compilation-function https://docs.djangoproject.com/en/1.8/howto/custom-template-tags/#parsing-until-another-block-tag http://stackoverflow.com/a/30097784/940098 :param parser: template parser :param token: tag name and variables :return: HTML string """ try: tag_name, fieldset_name = token.split_contents() except __HOLE__: # pragma: nocover raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) nodelist = parser.parse(('endfieldset',)) parser.delete_first_token() return FieldsetNode(nodelist, fieldset_name)
ValueError
dataset/ETHPy150Open Wtower/django-ninecms/ninecms/templatetags/ninecms_extras.py/fieldset
8,845
def is_fp_closed(obj): """ Checks whether a given file-like object is closed. :param obj: The file-like object to check. """ try: # Check via the official file-like-object way. return obj.closed except AttributeError: pass try: # Check if the object is a container for another file-like object that # gets released on exhaustion (e.g. HTTPResponse). return obj.fp is None except __HOLE__: pass raise ValueError("Unable to determine whether fp is closed.")
AttributeError
dataset/ETHPy150Open kennethreitz/requests/requests/packages/urllib3/util/response.py/is_fp_closed
8,846
def run(): parser = create_argparser() args = parser.parse_args() if args.no_color or args.no_colour: color.enabled = False if args.verbose: log.set_level(log.level - min(args.verbose, 2) * 10) if args.freeze: log.set_level(0) color.enabled = False exit_code = 0 try: moult(**vars(args)) except MoultCommandError as e: exit_code = 1 log.fatal('Error: %s', e) except __HOLE__: exit_code = 1 import getpass print('\n{}, eat a snickers'.format(getpass.getuser())) finally: if not utils.running_under_virtualenv(): printer.output('/!\\ You are not in a Virtual Environment /!\\', color=color.MAN) return exit_code
KeyboardInterrupt
dataset/ETHPy150Open tweekmonster/moult/moult/program.py/run
8,847
def __init__(self, argument, parent=None): super(ArgumentWidget, self).__init__(parent) self.layout = QtGui.QHBoxLayout() self.layout.setContentsMargins(1, 0, 1, 1) self.setLayout(self.layout) label = QtGui.QLabel(argument) self._label = label self._component_id = None self.layout.addWidget(label) self.editor = QtGui.QLineEdit() self.editor.setReadOnly(True) try: self.editor.setPlaceholderText("Drag a component from above") except __HOLE__: # feature added in Qt 4.7 pass self.layout.addWidget(self.editor) self.setAcceptDrops(True)
AttributeError
dataset/ETHPy150Open glue-viz/glue/glue/dialogs/link_editor/qt/link_equation.py/ArgumentWidget.__init__
8,848
def get(self, subscript): try: return self[subscript] except __HOLE__: return None
IndexError
dataset/ETHPy150Open adurdin/platformgen/tilemap.py/TileMap.get
8,849
def run_only_if_redis_is_available(func): """Decorator for checking if python-redis is available. Note: this test will be silently skipped if python-redis is missing. """ try: import redis except __HOLE__: redis = None pred = lambda: redis is not None return run_only(func, pred)
ImportError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/redisstat/test/testredisstat.py/run_only_if_redis_is_available
8,850
def decorate_class(self, klass): if issubclass(klass, unittest.TestCase): # If it's a TestCase, we assume you want to freeze the time for the # tests, from setUpClass to tearDownClass # Use getattr as in Python 2.6 they are optional orig_setUpClass = getattr(klass, 'setUpClass', None) orig_tearDownClass = getattr(klass, 'tearDownClass', None) @classmethod def setUpClass(cls): self.start() if orig_setUpClass is not None: orig_setUpClass() @classmethod def tearDownClass(cls): if orig_tearDownClass is not None: orig_tearDownClass() self.stop() klass.setUpClass = setUpClass klass.tearDownClass = tearDownClass return klass else: seen = set() for base_klass in klass.mro(): for (attr, attr_value) in base_klass.__dict__.items(): if attr.startswith('_') or attr in seen: continue seen.add(attr) if not callable(attr_value) or inspect.isclass(attr_value): continue try: setattr(klass, attr, self(attr_value)) except (AttributeError, __HOLE__): # Sometimes we can't set this for built-in types and custom callables continue return klass
TypeError
dataset/ETHPy150Open spulec/freezegun/freezegun/api.py/_freeze_time.decorate_class
8,851
def start(self): if self.tick: time_to_freeze = TickingDateTimeFactory(self.time_to_freeze, real_datetime.now()) else: time_to_freeze = FrozenDateTimeFactory(self.time_to_freeze) # Change the modules datetime.datetime = FakeDatetime datetime.date = FakeDate fake_time = FakeTime(time_to_freeze, time.time) fake_localtime = FakeLocalTime(time_to_freeze, time.localtime) fake_gmtime = FakeGMTTime(time_to_freeze, time.gmtime) fake_strftime = FakeStrfTime(time_to_freeze, time.strftime) time.time = fake_time time.localtime = fake_localtime time.gmtime = fake_gmtime time.strftime = fake_strftime copyreg.dispatch_table[real_datetime] = pickle_fake_datetime copyreg.dispatch_table[real_date] = pickle_fake_date # Change any place where the module had already been imported to_patch = [ ('real_date', real_date, 'FakeDate', FakeDate), ('real_datetime', real_datetime, 'FakeDatetime', FakeDatetime), ('real_gmtime', real_gmtime, 'FakeGMTTime', fake_gmtime), ('real_localtime', real_localtime, 'FakeLocalTime', fake_localtime), ('real_strftime', real_strftime, 'FakeStrfTime', fake_strftime), ('real_time', real_time, 'FakeTime', fake_time), ] real_names = tuple(real_name for real_name, real, fake_name, fake in to_patch) self.fake_names = tuple(fake_name for real_name, real, fake_name, fake in to_patch) self.reals = dict((id(fake), real) for real_name, real, fake_name, fake in to_patch) fakes = dict((id(real), fake) for real_name, real, fake_name, fake in to_patch) add_change = self.undo_changes.append # Save the current loaded modules self.modules_at_start = set(sys.modules.keys()) for mod_name, module in list(sys.modules.items()): if mod_name is None or module is None: continue elif mod_name.startswith(self.ignore): continue elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')): continue for module_attribute in dir(module): if module_attribute in real_names: continue try: attribute_value = getattr(module, module_attribute) except (ImportError, __HOLE__, TypeError): # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery) continue fake = fakes.get(id(attribute_value)) if fake: setattr(module, module_attribute, fake) add_change((module, module_attribute, attribute_value)) datetime.datetime.times_to_freeze.append(time_to_freeze) datetime.datetime.tz_offsets.append(self.tz_offset) datetime.date.dates_to_freeze.append(time_to_freeze) datetime.date.tz_offsets.append(self.tz_offset) return time_to_freeze
AttributeError
dataset/ETHPy150Open spulec/freezegun/freezegun/api.py/_freeze_time.start
8,852
def stop(self): datetime.datetime.times_to_freeze.pop() datetime.datetime.tz_offsets.pop() datetime.date.dates_to_freeze.pop() datetime.date.tz_offsets.pop() if not datetime.datetime.times_to_freeze: datetime.datetime = real_datetime datetime.date = real_date copyreg.dispatch_table.pop(real_datetime) copyreg.dispatch_table.pop(real_date) for module, module_attribute, original_value in self.undo_changes: setattr(module, module_attribute, original_value) self.undo_changes = [] # Restore modules loaded after start() modules_to_restore = set(sys.modules.keys()) - self.modules_at_start self.modules_at_start = set() for mod_name in modules_to_restore: module = sys.modules.get(mod_name, None) if mod_name is None or module is None: continue elif mod_name.startswith(self.ignore): continue elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')): continue for module_attribute in dir(module): if module_attribute in self.fake_names: continue try: attribute_value = getattr(module, module_attribute) except (ImportError, __HOLE__, TypeError): # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery) continue real = self.reals.get(id(attribute_value)) if real: setattr(module, module_attribute, real) time.time = time.time.previous_time_function time.gmtime = time.gmtime.previous_gmtime_function time.localtime = time.localtime.previous_localtime_function time.strftime = time.strftime.previous_strftime_function
AttributeError
dataset/ETHPy150Open spulec/freezegun/freezegun/api.py/_freeze_time.stop
8,853
def freeze_time(time_to_freeze=None, tz_offset=0, ignore=None, tick=False): # Python3 doesn't have basestring, but it does have str. try: string_type = basestring except __HOLE__: string_type = str if not isinstance(time_to_freeze, (string_type, datetime.date)): raise TypeError(('freeze_time() expected None, a string, date instance, or ' 'datetime instance, but got type {0}.').format(type(time_to_freeze))) if tick and not _is_cpython: raise SystemError('Calling freeze_time with tick=True is only compatible with CPython') if ignore is None: ignore = [] ignore.append('six.moves') ignore.append('django.utils.six.moves') return _freeze_time(time_to_freeze, tz_offset, ignore, tick) # Setup adapters for sqlite
NameError
dataset/ETHPy150Open spulec/freezegun/freezegun/api.py/freeze_time
8,854
def validate_properties(self, properties, category=None, status=None): """ Validates the properties and adds error messages to self._errors Parameter --------- properties : dict Contribution properties category : geokey.categories.models.Category Category the properties are validated against status : str Status for the contribution """ errors = [] if self.instance: status = status or self.instance.status if self.instance.properties: update = self.instance.properties.copy() update.update(properties) properties = update else: status = status or category.default_status properties = self.replace_null(properties) try: if status == 'draft': Observation.validate_partial(category, properties) else: Observation.validate_full(category, properties) except __HOLE__, e: errors.append(e) self._validated_data['properties'] = properties self._validated_data['meta']['status'] = status if errors: self._errors['properties'] = errors
ValidationError
dataset/ETHPy150Open ExCiteS/geokey/geokey/contributions/serializers.py/ContributionSerializer.validate_properties
8,855
def get_thumbnail_url(self, obj): """ Creates and returns a thumbnail for the MediaFile object Parameter --------- obj : geokey.contributions.models.MediaFile The instance that is serialised Returns ------- str The url to embed thumbnails on client side """ if isinstance(obj, ImageFile): # Some of the imported image files in the original community maps # seem to be broken. The error thrown when the image can not be # read is caught here. try: return self._get_thumb(obj.image).url except ( __HOLE__, InvalidImageFormatError ): return '' elif isinstance(obj, VideoFile): if obj.thumbnail: # thumbnail has been downloaded, return the link return self._get_thumb(obj.thumbnail).url request = requests.get( 'http://img.youtube.com/vi/%s/0.jpg' % obj.youtube_id, stream=True ) if request.status_code != requests.codes.ok: # Image not found, return placeholder thumbnail return '/static/img/play.png' lf = tempfile.NamedTemporaryFile() # Read the streamed image in sections for block in request.iter_content(1024 * 8): # If no more file then stop if not block: break # Write image block to temporary file lf.write(block) file_name = obj.youtube_id + '.jpg' obj.thumbnail.save(file_name, files.File(lf)) from PIL import Image w, h = Image.open(obj.thumbnail).size thumb = self._get_thumb(obj.thumbnail, size=(h, h)) obj.thumbnail.save(file_name, thumb) return self._get_thumb(obj.thumbnail).url elif isinstance(obj, AudioFile): return '/static/img/play.png'
IOError
dataset/ETHPy150Open ExCiteS/geokey/geokey/contributions/serializers.py/FileSerializer.get_thumbnail_url
8,856
def test_signature_inspection_2args_incorrect_shapes(self): # both _pdf and _cdf defined, but shapes are inconsistent: raises try: _distr3_gen(name='dummy') except __HOLE__: pass else: raise AssertionError('TypeError not raised.')
TypeError
dataset/ETHPy150Open scipy/scipy/scipy/stats/tests/test_distributions.py/TestSubclassingNoShapes.test_signature_inspection_2args_incorrect_shapes
8,857
def safelower(s): """ As string.lower(), but return `s` if something goes wrong. """ try: return s.lower() except __HOLE__: return s
AttributeError
dataset/ETHPy150Open twisted/ldaptor/ldaptor/entryhelpers.py/safelower
8,858
def render(self, data, template=None): if not isinstance(data, (list, tuple)): data = [data] if len(data) == 0: return try: writer = CSVKitWriter(sys.stdout) for i, row in enumerate(data): if i == 0: writer.writerow(row.serialize().keys()) writer.writerow(row.serialize().values()) except __HOLE__: # Handle pipes that could close before output is done. # See: http://stackoverflow.com/questions/15793886/ try: sys.stdout.close() except IOError: pass try: sys.stderr.close() except IOError: pass
IOError
dataset/ETHPy150Open newsdev/nyt-clerk/clerk/ext_csv.py/CSVOutputHandler.render
8,859
def getNodeNumber(self, t): try: return self.nodeToNumberMap[t] except __HOLE__: self.nodeToNumberMap[t] = self.nodeNumber self.nodeNumber += 1 return self.nodeNumber - 1
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/antlr3/dottreegen.py/DOTTreeGenerator.getNodeNumber
8,860
def _update_changed_fields(self, node, rpc_node): """Update rpc_node based on changed fields in a node. """ for field in objects.Node.fields: try: patch_val = getattr(node, field) except __HOLE__: # Ignore fields that aren't exposed in the API continue if patch_val == wtypes.Unset: patch_val = None if rpc_node[field] != patch_val: rpc_node[field] = patch_val
AttributeError
dataset/ETHPy150Open openstack/ironic/ironic/api/controllers/v1/node.py/NodesController._update_changed_fields
8,861
def remove_router_references(self, adapter, address=None): """Add/update references to routers.""" if _debug: NetworkServiceAccessPoint._debug("remove_router_references %r %r", adapter, address) delrlist = [] delnlist = [] # scan through the dictionary of router references for rkey in self.routers.keys(): # rip apart the key radapter, raddress = rkey # pick all references on the adapter, optionally limited to a specific address match = radapter is adapter if match and address is not None: match = (raddress == address) if not match: continue # save it for deletion delrlist.append(rkey) delnlist.extend(self.routers[rkey].networks) if _debug: NetworkServiceAccessPoint._debug(" - delrlist: %r", delrlist) NetworkServiceAccessPoint._debug(" - delnlist: %r", delnlist) # delete the entries for rkey in delrlist: try: del self.routers[rkey] except KeyError: if _debug: NetworkServiceAccessPoint._debug(" - rkey not in self.routers: %r", rkey) for nkey in delnlist: try: del self.networks[nkey] except __HOLE__: if _debug: NetworkServiceAccessPoint._debug(" - nkey not in self.networks: %r", rkey) #-----
KeyError
dataset/ETHPy150Open JoelBender/bacpypes/py25/bacpypes/netservice.py/NetworkServiceAccessPoint.remove_router_references
8,862
def sync_engine(func): """Queued version of the ``tornado.gen.engine``. Prevents calling of the wrapped function if there is already one instance of the function running asynchronously. Function will be called synchronously without blocking io_loop. This decorator can only be used on class methods, as it requires ``self`` to make sure that calls are scheduled on instance level (connection) instead of class level (method). """ @functools.wraps(func) def wrapper(self, *args, **kwargs): # Run method def run(args, kwargs): gen = func(self, *args, **kwargs) if isinstance(gen, types.GeneratorType): data.runner = SyncRunner(gen, finished) data.runner.run() else: return gen # Completion callback def finished(): data.runner = None try: args, kwargs = data.queue.popleft() run(args, kwargs) except __HOLE__: pass # Get call queue for this instance and wrapped method queue = getattr(self, '_call_queue', None) if queue is None: queue = self._call_queue = dict() data = queue.get(func, None) if data is None: queue[func] = data = CallQueue() # If there's something running, queue call if data.runner is not None: data.queue.append((args, kwargs)) else: # Otherwise run it run(args, kwargs) return wrapper
IndexError
dataset/ETHPy150Open mrjoes/tornadio2/tornadio2/gen.py/sync_engine
8,863
def test_or(self): i = self.s.union(self.otherword) self.assertEqual(self.s | set(self.otherword), i) self.assertEqual(self.s | frozenset(self.otherword), i) try: self.s | self.otherword except __HOLE__: pass else: self.fail("s|t did not screen-out general iterables")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestJointOps.test_or
8,864
def test_and(self): i = self.s.intersection(self.otherword) self.assertEqual(self.s & set(self.otherword), i) self.assertEqual(self.s & frozenset(self.otherword), i) try: self.s & self.otherword except __HOLE__: pass else: self.fail("s&t did not screen-out general iterables")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestJointOps.test_and
8,865
def test_sub(self): i = self.s.difference(self.otherword) self.assertEqual(self.s - set(self.otherword), i) self.assertEqual(self.s - frozenset(self.otherword), i) try: self.s - self.otherword except __HOLE__: pass else: self.fail("s-t did not screen-out general iterables")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestJointOps.test_sub
8,866
def test_xor(self): i = self.s.symmetric_difference(self.otherword) self.assertEqual(self.s ^ set(self.otherword), i) self.assertEqual(self.s ^ frozenset(self.otherword), i) try: self.s ^ self.otherword except __HOLE__: pass else: self.fail("s^t did not screen-out general iterables")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestJointOps.test_xor
8,867
def test_remove_keyerror_unpacking(self): # bug: www.python.org/sf/1576657 for v1 in ['Q', (1,)]: try: self.s.remove(v1) except __HOLE__, e: v2 = e.args[0] self.assertEqual(v1, v2) else: self.fail()
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestSet.test_remove_keyerror_unpacking
8,868
def test_remove_keyerror_set(self): key = self.thetype([3, 4]) try: self.s.remove(key) except __HOLE__ as e: self.assertTrue(e.args[0] is key, "KeyError should be {0}, not {1}".format(key, e.args[0])) else: self.fail()
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestSet.test_remove_keyerror_set
8,869
def test_changingSizeWhileIterating(self): s = set([1,2,3]) try: for i in s: s.update([4]) except __HOLE__: pass else: self.fail("no exception when changing size during iteration") #==============================================================================
RuntimeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestExceptionPropagation.test_changingSizeWhileIterating
8,870
def test_update_operator(self): try: self.set |= self.other except __HOLE__: pass else: self.fail("expected TypeError")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestOnlySetsInBinaryOps.test_update_operator
8,871
def test_intersection_update_operator(self): try: self.set &= self.other except __HOLE__: pass else: self.fail("expected TypeError")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestOnlySetsInBinaryOps.test_intersection_update_operator
8,872
def test_sym_difference_update_operator(self): try: self.set ^= self.other except __HOLE__: pass else: self.fail("expected TypeError")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestOnlySetsInBinaryOps.test_sym_difference_update_operator
8,873
def test_difference_update_operator(self): try: self.set -= self.other except __HOLE__: pass else: self.fail("expected TypeError")
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/TestOnlySetsInBinaryOps.test_difference_update_operator
8,874
def powerset(U): """Generates all subsets of a set or sequence U.""" U = iter(U) try: x = frozenset([U.next()]) for S in powerset(U): yield S yield S | x except __HOLE__: yield frozenset()
StopIteration
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_set.py/powerset
8,875
def _token_request(self, request_body): """ Return an updated token from a token request body. :param request_body: A dictionary of values to send in the body of the token request. :type request_body: ``dict`` :return: A dictionary with updated token information :rtype: ``dict`` """ data = urlencode(request_body) try: response = self.request('/o/oauth2/token', method='POST', data=data) except __HOLE__: raise GoogleAuthError('Invalid authorization response, please ' 'check your credentials and time drift.') token_info = response.object if 'expires_in' in token_info: expire_time = _utcnow() + datetime.timedelta( seconds=token_info['expires_in']) token_info['expire_time'] = _utc_timestamp(expire_time) return token_info
AttributeError
dataset/ETHPy150Open apache/libcloud/libcloud/common/google.py/GoogleBaseAuthConnection._token_request
8,876
def __init__(self, user_id, key, *args, **kwargs): """ Check to see if PyCrypto is available, and convert key file path into a key string if the key is in a file. :param user_id: Email address to be used for Service Account authentication. :type user_id: ``str`` :param key: The RSA Key or path to file containing the key. :type key: ``str`` """ if SHA256 is None: raise GoogleAuthError('PyCrypto library required for ' 'Service Account Authentication.') # Check to see if 'key' is a file and read the file if it is. if key.find("PRIVATE KEY---") == -1: # key is a file keypath = os.path.expanduser(key) is_file_path = os.path.exists(keypath) and os.path.isfile(keypath) if not is_file_path: raise ValueError("Missing (or not readable) key " "file: '%s'" % key) with open(keypath, 'r') as f: contents = f.read() try: key = json.loads(contents) key = key['private_key'] except __HOLE__: key = contents super(GoogleServiceAcctAuthConnection, self).__init__( user_id, key, *args, **kwargs)
ValueError
dataset/ETHPy150Open apache/libcloud/libcloud/common/google.py/GoogleServiceAcctAuthConnection.__init__
8,877
def _get_token_from_file(self): """ Read credential file and return token information. Mocked in libcloud.test.common.google.GoogleTestCase. :return: Token information dictionary, or None :rtype: ``dict`` or ``None`` """ token = None filename = os.path.realpath(os.path.expanduser(self.credential_file)) try: with open(filename, 'r') as f: data = f.read() token = json.loads(data) except __HOLE__: pass return token
IOError
dataset/ETHPy150Open apache/libcloud/libcloud/common/google.py/GoogleOAuth2Credential._get_token_from_file
8,878
def mkdirs(path): try: makedirs(path) except __HOLE__ as err: if err.errno != EEXIST: raise # we've been playing fast and loose with kwargs, but the swiftclient isn't # going to accept any old thing
OSError
dataset/ETHPy150Open saltstack/salt/salt/utils/openstack/swift.py/mkdirs
8,879
def skip_unless_module(module): def _inner(fun): @wraps(fun) def __inner(*args, **kwargs): try: importlib.import_module(module) except __HOLE__: raise SkipTest('Does not have %s' % (module, )) return fun(*args, **kwargs) return __inner return _inner # -- adds assertWarns from recent unittest2, not in Python 2.7.
ImportError
dataset/ETHPy150Open celery/cell/cell/tests/utils.py/skip_unless_module
8,880
def __exit__(self, exc_type, exc_value, tb): self.warnings_manager.__exit__(exc_type, exc_value, tb) if exc_type is not None: # let unexpected exceptions pass through return try: exc_name = self.expected.__name__ except __HOLE__: exc_name = str(self.expected) first_matching = None for m in self.warnings: w = m.message if not isinstance(w, self.expected): continue if first_matching is None: first_matching = w if (self.expected_regex is not None and not self.expected_regex.search(str(w))): continue # store warning for later retrieval self.warning = w self.filename = m.filename self.lineno = m.lineno return # Now we simply try to choose a helpful failure message if first_matching is not None: raise self.failureException( '%r does not match %r' % (self.expected_regex.pattern, str(first_matching))) if self.obj_name: raise self.failureException('%s not triggered by %s' % (exc_name, self.obj_name)) else: raise self.failureException('%s not triggered' % exc_name)
AttributeError
dataset/ETHPy150Open celery/cell/cell/tests/utils.py/_AssertWarnsContext.__exit__
8,881
def assertItemsEqual(self, expected_seq, actual_seq, msg=None): try: expected = sorted(expected_seq) actual = sorted(actual_seq) except __HOLE__: # Unsortable items (example: set(), complex(), ...) expected = list(expected_seq) actual = list(actual_seq) missing, unexpected = unorderable_list_difference( expected, actual) else: return self.assertSequenceEqual(expected, actual, msg=msg) errors = [] if missing: errors.append('Expected, but missing:\n %s' % ( safe_repr(missing))) if unexpected: errors.append('Unexpected, but present:\n %s' % ( safe_repr(unexpected))) if errors: standardMsg = '\n'.join(errors) self.fail(self._formatMessage(msg, standardMsg))
TypeError
dataset/ETHPy150Open celery/cell/cell/tests/utils.py/Case.assertItemsEqual
8,882
@contextmanager def replace_module_value(module, name, value=None): has_prev = hasattr(module, name) prev = getattr(module, name, None) if value: setattr(module, name, value) else: try: delattr(module, name) except __HOLE__: pass yield if prev is not None: setattr(sys, name, prev) if not has_prev: try: delattr(module, name) except AttributeError: pass
AttributeError
dataset/ETHPy150Open celery/cell/cell/tests/utils.py/replace_module_value
8,883
@contextmanager def mock_module(*names): prev = {} class MockModule(ModuleType): def __getattr__(self, attr): setattr(self, attr, Mock()) return ModuleType.__getattribute__(self, attr) mods = [] for name in names: try: prev[name] = sys.modules[name] except KeyError: pass mod = sys.modules[name] = MockModule(name) mods.append(mod) try: yield mods finally: for name in names: try: sys.modules[name] = prev[name] except __HOLE__: try: del(sys.modules[name]) except KeyError: pass
KeyError
dataset/ETHPy150Open celery/cell/cell/tests/utils.py/mock_module
8,884
def _normalise_repo_groups(self, option): """Resolve inherited memberships""" data = self.repo_groups tainted = data.keys() round_ = 0 while tainted: round_ += 1 logging.debug('Normalise {0}: round {1}'.format(option, round_)) did_work = False for item in tainted: try: members = data[item][option] except __HOLE__: logging.debug('Removed empty item {0}'.format(item)) tainted.remove(item) did_work = True continue unresolved = [x for x in members if x.startswith('@')] if len(unresolved) == 0: logging.debug('Nothing to resolve in {0}'.format(item)) tainted.remove(item) did_work = True continue resolved = [] dirty = False for member in unresolved: mem = member.lstrip('@') try: new_members = data[mem][option] except KeyError: raise UnknownDependencyException(member) for new_mem in new_members: if new_mem.startswith('@'): # Unresolved membership in upstream group dirty = True break resolved += new_members if not dirty: # No dependencies remain - replace resolved groups for member in unresolved: members.remove(member) members += resolved data[item][option] = members did_work = True if did_work is False: raise CyclicalDependencyException(','.join(tainted)) self.repo_groups = data
KeyError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHookConfig._normalise_repo_groups
8,885
def _aggregate_hooks(self, hook_groups): if not hasattr(hook_groups, '__iter__'): # Check for __iter__ attribute rather than iter(), # which also captures strings. raise ValueError('hook_groups must be iterable') hooks = {} logging.debug('Aggregating hooks for hook groups {0}'.format( hook_groups)) for hook_group in hook_groups: logging.debug('Evaluating hook group {0}'.format(hook_group)) try: hg = self.hook_groups[hook_group] logging.debug('hg {0} -> {1}'.format(hook_group, hg)) except __HOLE__: raise NoSuchHookGroupException(hook_group) for hook_type, hook_list in hg.items(): if hook_type not in hooks: hooks[hook_type] = hook_list else: for hook in hook_list: if hook not in hooks[hook_type]: hooks[hook_type].append(hook) return hooks
KeyError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHookConfig._aggregate_hooks
8,886
def repo_group_membership(self, repo): """Returns list of repo group membership for repo""" membership = [] for repo_group, data in self.repo_groups.items(): try: group_members = data['members'] except __HOLE__: continue if repo in group_members: if repo not in membership: membership.append(repo_group) # Add global repo group if repo is in any other group # and the global membership group exists if membership and '*' in self.repo_groups: membership.append('*') logging.debug('{0} is a member of {1}'.format(repo, membership)) return membership
KeyError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHookConfig.repo_group_membership
8,887
def repo_group_hook_groups(self, repo): """Returns list of repo group membership for repo""" # 1. Get repo group membership for repo repo_groups = self.repo_group_membership(repo) # 2. Combine lists of hook groups from repo groups membership = [] for repo_group in repo_groups: try: hook_groups = self.repo_groups[repo_group]['hooks'] except __HOLE__: logging.debug('No hook groups in {0}'.format(repo_group)) continue for hook_group in hook_groups: if hook_group not in membership: membership.append(hook_group) if not len(membership): logging.debug('No hook groups for {0}'.format(repo)) return membership
KeyError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHookConfig.repo_group_hook_groups
8,888
def repos(self): """Returns list of known repos""" rg = self.repo_groups members_ = map(lambda x: x['members'], rg.values()) try: members = reduce(lambda x, y: list(set(x + y)), members_) except __HOLE__: members = [] return list(members)
TypeError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHookConfig.repos
8,889
def _is_git_repo(self, path): if not os.path.isdir(path): return False orig_dir = os.getcwd() try: os.chdir(path) except __HOLE__: return False with open('/dev/null', 'wb') as devnull: ret = subprocess.call(['git', 'rev-parse'], stderr=devnull) os.chdir(orig_dir) if ret != 0: return False else: return True
OSError
dataset/ETHPy150Open aelse/cpthook/cpthook.py/CptHook._is_git_repo
8,890
def clean_pmid(input_pmid): try: pmid = remove_nonprinting_characters(input_pmid) pmid = pmid.lower().replace("pmid:", "") match = re.match("^(\d{3,15})$", pmid) if match: pmid = match.group(1) else: pmid = None except __HOLE__: pmid = None if not pmid: logger.debug(u"MALFORMED PMID {input_pmid}".format( input_pmid=input_pmid)) return pmid
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/clean_pmid
8,891
def _extract_biblio_efetch(self, page, id=None): if "ArticleDate" in page: dict_of_keylists = {"year": ["PubmedArticleSet", "MedlineCitation", "Article", "ArticleDate", "Year"], "month": ["PubmedArticleSet", "MedlineCitation", "Article", "ArticleDate", "Month"], "day": ["PubmedArticleSet", "MedlineCitation", "Article", "ArticleDate", "Day"], "title": ["PubmedArticleSet", "MedlineCitation", "Article", "ArticleTitle"], "abstract": ["PubmedArticleSet", "MedlineCitation", "Article", "Abstract", "AbstractText"], "issn": ["PubmedArticleSet", "MedlineCitation", "Article", "Journal", "ISSN"], "journal": ["PubmedArticleSet", "MedlineCitation", "Article", "Journal", "Title"], } else: dict_of_keylists = {"year": ["PubmedArticleSet", "MedlineCitation", "Article", "PubDate", "Year"], "month": ["PubmedArticleSet", "MedlineCitation", "Article", "PubDate", "Month"], "day": ["PubmedArticleSet", "MedlineCitation", "Article", "PubDate", "Day"], "title": ["PubmedArticleSet", "MedlineCitation", "Article", "ArticleTitle"], "abstract": ["PubmedArticleSet", "MedlineCitation", "Article", "Abstract", "AbstractText"], "issn": ["PubmedArticleSet", "MedlineCitation", "Article", "Journal", "ISSN"], "journal": ["PubmedArticleSet", "MedlineCitation", "Article", "Journal", "Title"], } biblio_dict = provider._extract_from_xml(page, dict_of_keylists) dom_authors = provider._find_all_in_xml(page, "LastName") try: biblio_dict["authors"] = ", ".join([author.firstChild.data for author in dom_authors]) except (AttributeError, TypeError): pass mesh_list = provider._find_all_in_xml(page, "DescriptorName") try: if mesh_list: biblio_dict["keywords"] = "; ".join([mesh_term.firstChild.data for mesh_term in mesh_list]) except (AttributeError, __HOLE__): pass try: biblio_dict["issn"] = biblio_dict["issn"].replace("-", "") except (AttributeError, KeyError): pass try: datetime_published = datetime.datetime(year=biblio_dict["year"], month=biblio_dict["month"], day=biblio_dict["day"]) biblio_dict["date"] = datetime_published.isoformat() biblio_dict["year"] = re.sub("\D", "", str(biblio_dict["year"])) del biblio_dict["month"] del biblio_dict["day"] except (AttributeError, TypeError, KeyError): logger.debug(u"%20s don't have full date information %s" % (self.provider_name, id)) pass try: biblio_dict["year"] = str(biblio_dict["year"]) except (KeyError): pass return biblio_dict
TypeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/Pubmed._extract_biblio_efetch
8,892
def _extract_aliases_from_pmid(self, page, pmid): (doc, lookup_function) = provider._get_doc_from_xml(page) doi = None pmc = None try: articleidlist = doc.getElementsByTagName("ArticleIdList")[0] for articleid in articleidlist.getElementsByTagName("ArticleId"): if (articleid.getAttribute("IdType") == u"doi"): doi = articleid.firstChild.data if (articleid.getAttribute("IdType") == u"pmc"): pmc = articleid.firstChild.data if not doi: #give it another try, in another part of the xml # see http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id=23682040&retmode=xml&email=team@total-impact.org&tool=total-impact article = doc.getElementsByTagName("Article")[0] for elocationid in article.getElementsByTagName("ELocationID"): if (elocationid.getAttribute("EIdType") == u"doi"): if (elocationid.getAttribute("ValidYN") == u"Y"): doi = elocationid.firstChild.data except (IndexError, __HOLE__): pass #sometimes no doi, or PMID has a doi-fragment in the doi field: aliases_list = [] if doi: if "10." in doi: aliases_list += [("doi", doi), ("url", "http://dx.doi.org/"+doi)] if pmc: aliases_list += [("pmc", pmc), ("url", "http://www.ncbi.nlm.nih.gov/pmc/articles/"+pmc)] return aliases_list
TypeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/Pubmed._extract_aliases_from_pmid
8,893
def aliases(self, aliases, provider_url_template=None, cache_enabled=True): new_aliases = [] for alias in aliases: (namespace, nid) = alias if (namespace == "doi"): aliases_from_doi_url = self.aliases_from_doi_url_template %nid page = self._get_eutils_page(aliases_from_doi_url, nid, cache_enabled) if page: new_aliases += self._extract_aliases_from_doi(page, nid) if (namespace == "pmid"): # look up doi and other things on pubmed page aliases_from_pmid_url = self.aliases_from_pmid_url_template %nid page = self._get_eutils_page(aliases_from_pmid_url, nid, cache_enabled) if page: new_aliases += self._extract_aliases_from_pmid(page, nid) biblio = self._extract_biblio_efetch(page, nid) if biblio: new_aliases += [("biblio", biblio)] # also, add link to paper on pubmed new_aliases += [("url", self.aliases_pubmed_url_template %nid)] if not "doi" in [namespace for (namespace, temp_nid) in new_aliases]: aliases_doi_from_pmid_url = self.aliases_doi_from_pmid_url_template %nid try: response = self.http_get(aliases_doi_from_pmid_url, cache_enabled=cache_enabled) if response.status_code==200 and ("doi" in response.text): doi = json.loads(response.text)["records"][0]["doi"] new_aliases += [("doi", doi)] except (KeyError, __HOLE__): pass except ProviderHttpError as e: logger.warning(u"ProviderHttpError when calling {url}".format( url=aliases_doi_from_pmid_url)) # just keep going for now. eventually will need to replace the call w somehting that works. pass # get uniques for things that are unhashable new_aliases_unique = [k for k,v in itertools.groupby(sorted(new_aliases))] return new_aliases_unique
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/Pubmed.aliases
8,894
def _filter(self, id, citing_pmcids, filter_ptype): pmcids_string = " OR ".join(["PMC"+pmcid for pmcid in citing_pmcids]) query_string = filter_ptype + "[ptyp] AND (" + pmcids_string + ")" pmcid_filter_url = self.metrics_pmc_filter_url_template %query_string page = self._get_eutils_page(pmcid_filter_url, id) (doc, lookup_function) = provider._get_doc_from_xml(page) try: id_docs = doc.getElementsByTagName("Id") pmids = [id_doc.firstChild.data for id_doc in id_docs] except __HOLE__: logger.warning(u"%20s no Id xml tags for %s" % (self.provider_name, id)) pmids = [] return pmids
TypeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/Pubmed._filter
8,895
def _extract_citing_pmcids(self, page): if (not "PubMedToPMCcitingformSET" in page): raise ProviderContentMalformedError() dict_of_keylists = {"pubmed:pmc_citations": ["PubMedToPMCcitingformSET", "REFORM"]} (doc, lookup_function) = provider._get_doc_from_xml(page) try: pmcid_doms = doc.getElementsByTagName("PMCID") pmcids = [pmcid_dom.firstChild.data for pmcid_dom in pmcid_doms] except __HOLE__: logger.warning(u"%20s no PMCID xml tags for %s" % (self.provider_name, id)) pmcids = [] return pmcids # documentation for pubmedtopmcciting: http://www.pubmedcentral.nih.gov/utils/entrez2pmcciting.cgi # could take multiple PMC IDs
TypeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/pubmed.py/Pubmed._extract_citing_pmcids
8,896
def mkdir_p(dst): try: os.makedirs(dst) except __HOLE__ as exc: if exc.errno == errno.EEXIST and os.path.isdir(dst): pass else: raise
OSError
dataset/ETHPy150Open ucb-sts/sts/sts/util/convenience.py/mkdir_p
8,897
def rm_rf(dst): try: if os.path.exists(dst): shutil.rmtree(dst) except __HOLE__: pass
OSError
dataset/ETHPy150Open ucb-sts/sts/sts/util/convenience.py/rm_rf
8,898
def _add_builtins(names=__all__): for name in names: try: add2builtin(name, getattr(sys.modules[__name__], name)) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/interactive.py/_add_builtins
8,899
def winSetClipboard(text): GMEM_DDESHARE = 0x2000 ctypes.windll.user32.OpenClipboard(0) ctypes.windll.user32.EmptyClipboard() try: # works on Python 2 (bytes() only takes one argument) hCd = ctypes.windll.kernel32.GlobalAlloc(GMEM_DDESHARE, len(bytes(text))+1) except TypeError: # works on Python 3 (bytes() requires an encoding) hCd = ctypes.windll.kernel32.GlobalAlloc(GMEM_DDESHARE, len(bytes(text, 'ascii'))+1) pchData = ctypes.windll.kernel32.GlobalLock(hCd) try: # works on Python 2 (bytes() only takes one argument) ctypes.cdll.msvcrt.strcpy(ctypes.c_char_p(pchData), bytes(text)) except __HOLE__: # works on Python 3 (bytes() requires an encoding) ctypes.cdll.msvcrt.strcpy(ctypes.c_char_p(pchData), bytes(text, 'ascii')) ctypes.windll.kernel32.GlobalUnlock(hCd) ctypes.windll.user32.SetClipboardData(1,hCd) ctypes.windll.user32.CloseClipboard()
TypeError
dataset/ETHPy150Open gvalkov/git-link/gitlink/pyperclip.py/winSetClipboard