Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,100
def __init__(self, message): try: self.media_id = message.pop('MediaId') self.format = message.pop('Format') self.recognition = message.pop('Recognition', None) except __HOLE__: raise ParseError() super(VoiceMessage, self).__init__(message)
KeyError
dataset/ETHPy150Open wechat-python-sdk/wechat-python-sdk/wechat_sdk/messages.py/VoiceMessage.__init__
6,101
@property def state(self): if self._cursor == -1: raise exceptions.RoboError('No state') try: return self._states[self._cursor] except __HOLE__: raise exceptions.RoboError('Index out of range')
IndexError
dataset/ETHPy150Open jmcarp/robobrowser/robobrowser/browser.py/RoboBrowser.state
6,102
@property def find(self): """See ``BeautifulSoup::find``.""" try: return self.parsed.find except __HOLE__: raise exceptions.RoboError
AttributeError
dataset/ETHPy150Open jmcarp/robobrowser/robobrowser/browser.py/RoboBrowser.find
6,103
@property def find_all(self): """See ``BeautifulSoup::find_all``.""" try: return self.parsed.find_all except __HOLE__: raise exceptions.RoboError
AttributeError
dataset/ETHPy150Open jmcarp/robobrowser/robobrowser/browser.py/RoboBrowser.find_all
6,104
@property def select(self): """See ``BeautifulSoup::select``.""" try: return self.parsed.select except __HOLE__: raise exceptions.RoboError
AttributeError
dataset/ETHPy150Open jmcarp/robobrowser/robobrowser/browser.py/RoboBrowser.select
6,105
def follow_link(self, link, **kwargs): """Click a link. :param Tag link: Link to click :param kwargs: Keyword arguments to `Session::send` """ try: href = link['href'] except __HOLE__: raise exceptions.RoboError('Link element must have "href" ' 'attribute') self.open(self._build_url(href), **kwargs)
KeyError
dataset/ETHPy150Open jmcarp/robobrowser/robobrowser/browser.py/RoboBrowser.follow_link
6,106
def _internal_increment(self, namespace, request): """Internal function for incrementing from a MemcacheIncrementRequest. Args: namespace: A string containing the namespace for the request, if any. Pass an empty string if there is no namespace. request: A MemcacheIncrementRequest instance. Returns: An integer or long if the offset was successful, None on error. """ key = request.key() entry = self._GetKey(namespace, key) if entry is None: if not request.has_initial_value(): return None if namespace not in self._the_cache: self._the_cache[namespace] = {} self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()), expiration=0, flags=0, gettime=self._gettime) entry = self._GetKey(namespace, key) assert entry is not None try: old_value = long(entry.value) if old_value < 0: raise ValueError except __HOLE__: logging.error('Increment/decrement failed: Could not interpret ' 'value for key = "%s" as an unsigned integer.', key) return None delta = request.delta() if request.direction() == MemcacheIncrementRequest.DECREMENT: delta = -delta new_value = old_value + delta if not (0 <= new_value < 2**64): new_value = 0 entry.value = str(new_value) return new_value
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/memcache/memcache_stub.py/MemcacheServiceStub._internal_increment
6,107
def load_resource(res): try: f = file(join(dirname(__file__), 'shared', res)) except __HOLE__: return '' try: return f.read() finally: f.close()
IOError
dataset/ETHPy150Open limodou/uliweb/uliweb/lib/werkzeug/debug/render.py/load_resource
6,108
def color_palette(palette=None, n_colors=None, desat=None): """Return a list of colors defining a color palette. Availible seaborn palette names: deep, muted, bright, pastel, dark, colorblind Other options: hls, husl, any named matplotlib palette, list of colors Calling this function with ``palette=None`` will return the current matplotlib color cycle. Matplotlib paletes can be specified as reversed palettes by appending "_r" to the name or as dark palettes by appending "_d" to the name. (These options are mutually exclusive, but the resulting list of colors can also be reversed). This function can also be used in a ``with`` statement to temporarily set the color cycle for a plot or set of plots. Parameters ---------- palette: None, string, or sequence, optional Name of palette or None to return current palette. If a sequence, input colors are used but possibly cycled and desaturated. n_colors : int, optional Number of colors in the palette. If ``None``, the default will depend on how ``palette`` is specified. Named palettes default to 6 colors, but grabbing the current palette or passing in a list of colors will not change the number of colors unless this is specified. Asking for more colors than exist in the palette will cause it to cycle. desat : float, optional Proportion to desaturate each color by. Returns ------- palette : list of RGB tuples. Color palette. Behaves like a list, but can be used as a context manager and possesses an ``as_hex`` method to convert to hex color codes. See Also -------- set_palette : Set the default color cycle for all plots. set_color_codes : Reassign color codes like ``"b"``, ``"g"``, etc. to colors from one of the seaborn palettes. Examples -------- Show one of the "seaborn palettes", which have the same basic order of hues as the default matplotlib color cycle but more attractive colors. .. plot:: :context: close-figs >>> import seaborn as sns; sns.set() >>> sns.palplot(sns.color_palette("muted")) Use discrete values from one of the built-in matplotlib colormaps. .. plot:: :context: close-figs >>> sns.palplot(sns.color_palette("RdBu", n_colors=7)) Make a "dark" matplotlib sequential palette variant. (This can be good when coloring multiple lines or points that correspond to an ordered variable, where you don't want the lightest lines to be invisible). .. plot:: :context: close-figs >>> sns.palplot(sns.color_palette("Blues_d")) Use a categorical matplotlib palette, add some desaturation. (This can be good when making plots with large patches, which look best with dimmer colors). .. plot:: :context: close-figs >>> sns.palplot(sns.color_palette("Set1", n_colors=8, desat=.5)) Use as a context manager: .. plot:: :context: close-figs >>> import numpy as np, matplotlib.pyplot as plt >>> with sns.color_palette("husl", 8): ... _ = plt.plot(np.c_[np.zeros(8), np.arange(8)].T) """ if palette is None: palette = get_color_cycle() if n_colors is None: n_colors = len(palette) elif not isinstance(palette, string_types): palette = palette if n_colors is None: n_colors = len(palette) else: if n_colors is None: n_colors = 6 if palette == "hls": palette = hls_palette(n_colors) elif palette == "husl": palette = husl_palette(n_colors) elif palette.lower() == "jet": raise ValueError("No.") elif palette in SEABORN_PALETTES: palette = SEABORN_PALETTES[palette] elif palette in dir(mpl.cm): palette = mpl_palette(palette, n_colors) elif palette[:-2] in dir(mpl.cm): palette = mpl_palette(palette, n_colors) else: raise ValueError("%s is not a valid palette name" % palette) if desat is not None: palette = [desaturate(c, desat) for c in palette] # Always return as many colors as we asked for pal_cycle = cycle(palette) palette = [next(pal_cycle) for _ in range(n_colors)] # Always return in r, g, b tuple format try: palette = map(mpl.colors.colorConverter.to_rgb, palette) palette = _ColorPalette(palette) except __HOLE__: raise ValueError("Could not generate a palette for %s" % str(palette)) return palette
ValueError
dataset/ETHPy150Open mwaskom/seaborn/seaborn/palettes.py/color_palette
6,109
def _curl_process_params(body, content_type, query_params): extra = None modifier = None if query_params: try: query_params = urlencode([(k, v.encode('utf8')) for k, v in query_params.items()]) except TypeError: pass query_params = '?' + str(query_params) if 'json' in content_type or 'javascript' in content_type: if isinstance(body, dict): body = json.dumps(body) modifier = '-d' # See http://curl.haxx.se/docs/manpage.html#-F # for multipart vs x-www-form-urlencoded # x-www-form-urlencoded is same way as browser, multipart is RFC 2388 which allows file uploads. elif 'multipart' in content_type or 'x-www-form-urlencoded' in content_type: try: body = ' '.join(['%s=%s' % (k, v) for k, v in body.items()]) except __HOLE__: modifier = '-d' else: content_type = None modifier = '-F' elif body: body = str(body) modifier = '-d' else: modifier = None content_type = None # TODO: Clean up. return modifier, body, query_params, content_type, extra
AttributeError
dataset/ETHPy150Open django-silk/silk/silk/code_generation/curl.py/_curl_process_params
6,110
def get_conn(): ''' Return a conn object for the passed VM data ''' driver = get_driver(Provider.CLOUDSTACK) verify_ssl_cert = config.get_cloud_config_value('verify_ssl_cert', get_configured_provider(), __opts__, default=True, search_global=False) if verify_ssl_cert is False: try: import libcloud.security libcloud.security.VERIFY_SSL_CERT = False except (ImportError, __HOLE__): raise SaltCloudSystemExit( 'Could not disable SSL certificate verification. ' 'Not loading module.' ) return driver( key=config.get_cloud_config_value( 'apikey', get_configured_provider(), __opts__, search_global=False ), secret=config.get_cloud_config_value( 'secretkey', get_configured_provider(), __opts__, search_global=False ), secure=config.get_cloud_config_value( 'secure', get_configured_provider(), __opts__, default=True, search_global=False ), host=config.get_cloud_config_value( 'host', get_configured_provider(), __opts__, search_global=False ), path=config.get_cloud_config_value( 'path', get_configured_provider(), __opts__, search_global=False ), port=config.get_cloud_config_value( 'port', get_configured_provider(), __opts__, default=None, search_global=False ) )
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/cloud/clouds/cloudstack.py/get_conn
6,111
def get_project(conn, vm_): ''' Return the project to use. ''' try: projects = conn.ex_list_projects() except __HOLE__: # with versions <0.15 of libcloud this is causing an AttributeError. log.warning('Cannot get projects, you may need to update libcloud to 0.15 or later') return False projid = config.get_cloud_config_value('projectid', vm_, __opts__) if not projid: return False for project in projects: if str(projid) in (str(project.id), str(project.name)): return project log.warning("Couldn't find project {0} in projects".format(projid)) return False
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/cloud/clouds/cloudstack.py/get_project
6,112
def create(vm_): ''' Create a single VM from a data dict ''' try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'cloudstack', vm_['profile'], vm_=vm_) is False: return False except __HOLE__: pass # Since using "provider: <provider-engine>" is deprecated, alias provider # to use driver: "driver: <provider-engine>" if 'provider' in vm_: vm_['driver'] = vm_.pop('provider') salt.utils.cloud.fire_event( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) log.info('Creating Cloud VM {0}'.format(vm_['name'])) conn = get_conn() kwargs = { 'name': vm_['name'], 'image': get_image(conn, vm_), 'size': get_size(conn, vm_), 'location': get_location(conn, vm_), } if get_keypair(vm_) is not False: kwargs['ex_keyname'] = get_keypair(vm_) if get_networkid(vm_) is not False: kwargs['networkids'] = get_networkid(vm_) kwargs['networks'] = ( # The only attr that is used is 'id'. CloudStackNetwork(None, None, None, kwargs['networkids'], None, None), ) if get_project(conn, vm_) is not False: kwargs['project'] = get_project(conn, vm_) salt.utils.cloud.fire_event( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), {'kwargs': {'name': kwargs['name'], 'image': kwargs['image'].name, 'size': kwargs['size'].name}}, transport=__opts__['transport'] ) displayname = cloudstack_displayname(vm_) if displayname: kwargs['ex_displayname'] = displayname else: kwargs['ex_displayname'] = kwargs['name'] volumes = {} ex_blockdevicemappings = block_device_mappings(vm_) if ex_blockdevicemappings: for ex_blockdevicemapping in ex_blockdevicemappings: if 'VirtualName' not in ex_blockdevicemapping: ex_blockdevicemapping['VirtualName'] = '{0}-{1}'.format(vm_['name'], len(volumes)) salt.utils.cloud.fire_event( 'event', 'requesting volume', 'salt/cloud/{0}/requesting'.format(ex_blockdevicemapping['VirtualName']), {'kwargs': {'name': ex_blockdevicemapping['VirtualName'], 'device': ex_blockdevicemapping['DeviceName'], 'size': ex_blockdevicemapping['VolumeSize']}}, ) try: volumes[ex_blockdevicemapping['DeviceName']] = conn.create_volume( ex_blockdevicemapping['VolumeSize'], ex_blockdevicemapping['VirtualName'] ) except Exception as exc: log.error( 'Error creating volume {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'requesting a volume: \n{1}'.format( ex_blockdevicemapping['VirtualName'], exc ), # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False else: ex_blockdevicemapping = {} try: data = conn.create_node(**kwargs) except Exception as exc: log.error( 'Error creating {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: \n{1}'.format( vm_['name'], str(exc) ), # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False for device_name in six.iterkeys(volumes): try: conn.attach_volume(data, volumes[device_name], device_name) except Exception as exc: log.error( 'Error attaching volume {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'attach a volume: \n{1}'.format( ex_blockdevicemapping.get('VirtualName', 'UNKNOWN'), exc ), # Show the traceback if the debug logging level is enabled exc_info=log.isEnabledFor(logging.DEBUG) ) return False ssh_username = config.get_cloud_config_value( 'ssh_username', vm_, __opts__, default='root' ) vm_['ssh_host'] = get_ip(data) vm_['password'] = data.extra['password'] vm_['key_filename'] = get_key() ret = salt.utils.cloud.bootstrap(vm_, __opts__) ret.update(data.__dict__) if 'password' in data.extra: del data.extra['password'] log.info('Created Cloud VM \'{0[name]}\''.format(vm_)) log.debug( '\'{0[name]}\' VM creation details:\n{1}'.format( vm_, pprint.pformat(data.__dict__) ) ) salt.utils.cloud.fire_event( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) return ret
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/cloud/clouds/cloudstack.py/create
6,113
def filetype(self,fname=None): """ checks file type of file, returning: 'escan' for Epics Scan None otherwise """ try: u = open(fname,'r') t = u.readline() u.close() if 'Epics Scan' in t: return 'escan' except __HOLE__: pass return None
IOError
dataset/ETHPy150Open xraypy/xraylarch/plugins/io/gse_escan.py/EscanData.filetype
6,114
def _getline(self,lines): "return mode keyword," inp = lines.pop() is_comment = True mode = None if len(inp) > 2: is_comment = inp[0] in (';','#') s = inp[1:].strip().lower() for j in self.mode_names: if s.startswith(j): mode = j break if mode is None and not is_comment: w1 = inp.strip().split()[0] try: x = float(w1) mode = 'data' except __HOLE__: pass return (mode, inp)
ValueError
dataset/ETHPy150Open xraypy/xraylarch/plugins/io/gse_escan.py/EscanData._getline
6,115
def read_ascii(self,fname=None): """read ascii data file""" lines = self._open_ascii(fname=fname) if lines is None: return -1 maxlines = len(lines) iline = 1 ndata_points = None tmp_dat = [] tmp_y = [] col_details = [] col_legend = None ntotal_at_2d = [] ny_counter = 0 mode = None while lines: key, raw = self._getline(lines) iline= iline+1 if key is not None and key != mode: mode = key if (len(raw) < 3): continue self.ShowProgress( iline* 100.0 /(maxlines+1)) if mode == '2d': self.dimension = 2 sx = raw.split() yval = float(sx[2]) tmp_y.append(yval) self.yaddr = sx[1].strip() if self.yaddr.endswith(':'): self.yaddr = self.yaddr[:-1] mode = None if len(tmp_dat)>0: ntotal_at_2d.append(len(tmp_dat)) elif mode == 'epics scan': # real numeric column data print( 'Warning: file appears to have a second scan appended!') break elif mode == 'data': # real numeric column data tmp_dat.append(numpy.array([float(i) for i in raw.split()])) elif mode == '-----': if col_legend is None: col_legend = lines.pop()[1:].strip().split() elif mode in ( '=====', 'n_points'): pass elif mode == 'user titles': self.user_titles.append(raw[1:].strip()) elif mode == 'pv list': str = raw[1:].strip().replace('not connected',' = not connected') if str.lower().startswith(mode): continue desc = str addr = '' val = 'unknown' try: x = str.split('=') desc = x[0].replace('\t','').strip() val = x[1].strip() if '(' in desc and desc.endswith(')'): n = desc.rfind('(') addr = desc[n+1:-1] desc = desc[:n].rstrip() except: pass self.env_addr.append(addr) self.env_desc.append(desc) self.env_val.append(val) elif mode == 'scan regions': self.scan_regions.append(raw[1:].strip()) elif mode == 'scan ended at': self.stop_time = raw[20:].strip() elif mode == 'scan began at': self.start_time = raw[20:].strip() elif mode == 'column labels': col_details.append(raw[1:].strip()) elif mode is None: sx = [i.strip() for i in raw[1:].split('=')] if len(sx)>1: if sx[0] == 'scan prefix': self.scan_prefix = sx[1] if sx[0] == 'scan dimension': self.dimension = int(float(sx[1])) else: print( 'UNKOWN MODE = ',mode, raw[:20]) del lines try: col_details.pop(0) except __HOLE__: print( 'Empty Scan File') return -2 if len(self.user_titles) > 1: self.user_titles.pop(0) if len(self.scan_regions) > 1: self.scan_regions.pop(0) # check that 2d maps are of consistent size if self.dimension == 2: ntotal_at_2d.append(len(tmp_dat)) np_row0 = ntotal_at_2d[0] nrows = len(ntotal_at_2d) npts = len(tmp_dat) if npts != np_row0 * nrows: for i,n in enumerate(ntotal_at_2d): if n == np_row0*(i+1): nrows,npts_total = i+1,n if len(tmp_y) > nrows or len(tmp_dat)> npts_total: print( 'Warning: Some trailing data may be lost!') tmp_y = tmp_y[:nrows] tmp_dat = tmp_dat[:npts_total] # self.y = numpy.array(tmp_y) # done reading file self._make_arrays(tmp_dat,col_legend,col_details) tmp_dat = None self.xaddr = self.pos_addr[0].strip() for addr,desc in zip(self.env_addr,self.env_desc): if self.xaddr == addr: self.xdesc = desc if self.yaddr == addr: self.ydesc = desc self.has_fullxrf = False if os.path.exists("%s.fullxrf" %fname): self.read_fullxrf("%s.fullxrf" %fname, len(self.x), len(self.y))
IndexError
dataset/ETHPy150Open xraypy/xraylarch/plugins/io/gse_escan.py/EscanData.read_ascii
6,116
def clean_smd(self): # Check that the SMD is an SMD of the given ANC. try: smd = "%02d" % int(self.cleaned_data['smd']) except __HOLE__: raise forms.ValidationError("An SMD looks like 01, 02, ...") anc = self.cleaned_data['anc'] smd_list = anc_data[anc[0]]['ancs'][anc[1]]['smds'].keys() if smd not in smd_list: raise forms.ValidationError("That's not an SMD in %s." % anc) return smd
ValueError
dataset/ETHPy150Open codefordc/ancfinder/ancfindersite/backend_views.py/SMDUpdateForm.clean_smd
6,117
def wrap_elasticluster(args): """Wrap elasticluster commands to avoid need to call separately. - Uses .bcbio/elasticluster as default configuration location. - Sets NFS client parameters for elasticluster Ansible playbook. Uses async clients which provide better throughput on reads/writes: http://nfs.sourceforge.net/nfs-howto/ar01s05.html (section 5.9 for tradeoffs) """ if "-s" not in args and "--storage" not in args: # clean up old storage directory if starting a new cluster # old pickle files will cause consistent errors when restarting storage_dir = os.path.join(os.path.dirname(DEFAULT_EC_CONFIG), "storage") std_args = [x for x in args if not x.startswith("-")] if len(std_args) >= 3 and std_args[1] == "start": cluster = std_args[2] pickle_file = os.path.join(storage_dir, "%s.pickle" % cluster) if os.path.exists(pickle_file): os.remove(pickle_file) args = [args[0], "--storage", storage_dir] + args[1:] if "-c" not in args and "--config" not in args: args = [args[0]] + ["--config", DEFAULT_EC_CONFIG] + args[1:] os.environ["nfsoptions"] = "rw,async,nfsvers=3" # NFS tuning sys.argv = args try: return elasticluster.main.main() except __HOLE__ as exc: return exc.args[0]
SystemExit
dataset/ETHPy150Open chapmanb/bcbio-nextgen-vm/bcbiovm/aws/common.py/wrap_elasticluster
6,118
def __getattr__(self, name): path = self._config._normalize_path(".".join((self._name, name))) try: return self._config._settings[path] except __HOLE__: group_path = path + "." keys = self._config._settings.keys() if any(1 for k in keys if k.startswith(group_path)): return Group(self._config, path) return None
KeyError
dataset/ETHPy150Open progrium/ginkgo/ginkgo/config.py/Group.__getattr__
6,119
@property def nodes_authorized(self): """Get authorized, non-deleted nodes. Returns an empty list if the attached add-on does not include a node model. """ try: schema = self.config.settings_models['node'] except __HOLE__: return [] return [ node_addon.owner for node_addon in schema.find(Q('user_settings', 'eq', self)) if node_addon.owner and not node_addon.owner.is_deleted ]
KeyError
dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/base/__init__.py/AddonUserSettingsBase.nodes_authorized
6,120
@must_be_logged_in def revoke_oauth_access(self, external_account, auth, save=True): """Revoke all access to an ``ExternalAccount``. TODO: This should accept node and metadata params in the future, to allow fine-grained revocation of grants. That's not yet been needed, so it's not yet been implemented. """ for node in self.get_nodes_with_oauth_grants(external_account): try: addon_settings = node.get_addon(external_account.provider, deleted=True) except __HOLE__: # No associated addon settings despite oauth grant pass else: addon_settings.deauthorize(auth=auth) if User.find(Q('external_accounts', 'eq', external_account._id)).count() == 1: # Only this user is using the account, so revoke remote access as well. self.revoke_remote_oauth_access(external_account) for key in self.oauth_grants: self.oauth_grants[key].pop(external_account._id, None) if save: self.save()
AttributeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/base/__init__.py/AddonOAuthUserSettingsBase.revoke_oauth_access
6,121
def verify_oauth_access(self, node, external_account, metadata=None): """Verify that access has been previously granted. If metadata is not provided, this checks only if the node can access the account. This is suitable to check to see if the node's addon settings is still connected to an external account (i.e., the user hasn't revoked it in their user settings pane). If metadata is provided, this checks to see that all key/value pairs have been granted. This is suitable for checking access to a particular folder or other resource on an external provider. """ metadata = metadata or {} # ensure the grant exists try: grants = self.oauth_grants[node._id][external_account._id] except __HOLE__: return False # Verify every key/value pair is in the grants dict for key, value in metadata.iteritems(): if key not in grants or grants[key] != value: return False return True
KeyError
dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/base/__init__.py/AddonOAuthUserSettingsBase.verify_oauth_access
6,122
def merge(self, user_settings): """Merge `user_settings` into this instance""" if user_settings.__class__ is not self.__class__: raise TypeError('Cannot merge different addons') for node_id, data in user_settings.oauth_grants.iteritems(): if node_id not in self.oauth_grants: self.oauth_grants[node_id] = data else: node_grants = user_settings.oauth_grants[node_id].iteritems() for ext_acct, meta in node_grants: if ext_acct not in self.oauth_grants[node_id]: self.oauth_grants[node_id][ext_acct] = meta else: for k, v in meta: if k not in self.oauth_grants[node_id][ext_acct]: self.oauth_grants[node_id][ext_acct][k] = v user_settings.oauth_grants = {} user_settings.save() try: config = settings.ADDONS_AVAILABLE_DICT[ self.oauth_provider.short_name ] Model = config.settings_models['node'] except __HOLE__: pass else: connected = Model.find(Q('user_settings', 'eq', user_settings)) for node_settings in connected: node_settings.user_settings = self node_settings.save() self.save()
KeyError
dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/base/__init__.py/AddonOAuthUserSettingsBase.merge
6,123
def after_fork(self, node, fork, user, save=True): """After forking, copy user settings if the user is the one who authorized the addon. :return: A tuple of the form (cloned_settings, message) """ clone, _ = super(AddonOAuthNodeSettingsBase, self).after_fork( node=node, fork=fork, user=user, save=False, ) if self.has_auth and self.user_settings.owner == user: metadata = None if self.complete: try: metadata = self.user_settings.oauth_grants[node._id][self.external_account._id] except (KeyError, __HOLE__): pass clone.set_auth(self.external_account, user, metadata=metadata, log=False) message = '{addon} authorization copied to forked {category}.'.format( addon=self.config.full_name, category=fork.project_or_component, ) else: message = ( u'{addon} authorization not copied to forked {category}. You may ' u'authorize this fork on the <u><a href="{url}">Settings</a></u> ' u'page.' ).format( addon=self.config.full_name, url=fork.web_url_for('node_setting'), category=fork.project_or_component, ) if save: clone.save() return clone, message
AttributeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/base/__init__.py/AddonOAuthNodeSettingsBase.after_fork
6,124
def uriparse(uri, strict=False): """Given a valid URI, return a 5-tuple of (scheme, authority, path, query, fragment). The query part is a URLQuery object, the rest are strings. Raises ValueError if URI is malformed. """ key = uri, strict try: scheme, authority, path, q, fragment = _parse_cache[key] return (scheme, authority, path, q.copy(), fragment) except __HOLE__: pass if len(_parse_cache) >= MAX_CACHE_SIZE: _parse_cache.clear() if strict: mo = URI_RE_STRICT.search(uri) if mo: _, scheme, _, authority, path, _, query, _, fragment = mo.groups() else: raise ValueError("Invalid URI: %r" % (uri,)) else: mo = URI_RE.search(uri) if mo: _, scheme, authority, path, _, query, _, fragment = mo.groups() else: raise ValueError("Invalid URI: %r" % (uri,)) if query: q = queryparse(query) else: q = URLQuery() t = (scheme, authority, path, q, fragment) _parse_cache[key] = (scheme, authority, path, q.copy(), fragment) return (scheme, authority, path, q, fragment)
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/uriparse
6,125
def queryparse(query, evaluator=lambda x: x): q = URLQuery() parts = query.split("&") for part in parts: if part: try: l, r = part.split("=", 1) except __HOLE__: l, r = part, "" key = unquote_plus(l) val = evaluator(unquote_plus(r)) q[key] = val return q # URL queries can have names repeated # This is a dictionary that manages multiple values in a list. # getting the string value is urlencoded form.
ValueError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/queryparse
6,126
def getlist(self, key): try: val = dict.__getitem__(self, key) except __HOLE__: return [] if type(val) is list: return val else: return [val]
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/URLQuery.getlist
6,127
def unquote(s): """unquote('abc%20def') -> 'abc def'.""" res = s.split('%') for i in xrange(1, len(res)): item = res[i] try: res[i] = _hextochr[item[:2]] + item[2:] except __HOLE__: res[i] = '%' + item return "".join(res)
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/unquote
6,128
def quote(s, safe = '/'): """quote('abc def') -> 'abc%20def' Each part of a URL, e.g. the path info, the query, etc., has a different set of reserved characters that must be quoted. RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists the following reserved characters. reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," Each of these characters is reserved in some component of a URL, but not necessarily in all of them. By default, the quote function is intended for quoting the path section of a URL. Thus, it will not encode '/'. This character is reserved, but in typical usage the quote function is being called on a path where the existing slash characters are used as reserved characters. """ # per RFC 3986 section 2.5 if type(s) is unicode: s = s.encode("utf8") cachekey = (safe, always_safe) try: safe_map = _safemaps[cachekey] except __HOLE__: safe += always_safe safe_map = {} for i in range(256): c = chr(i) safe_map[c] = (c in safe) and c or ('%%%02X' % i) _safemaps[cachekey] = safe_map res = map(safe_map.__getitem__, s) return ''.join(res)
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/quote
6,129
def urlencode(query, doseq=0): """Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input. """ if hasattr(query,"items"): # mapping objects query = query.items() else: # it's a bother at times that strings and string-like objects are # sequences... try: # non-sequence items should not work with len() # non-empty strings will fail this if len(query) and not isinstance(query[0], tuple): raise TypeError # zero-length sequences of all types will get here and succeed, # but that's a minor nit - since the original implementation # allowed empty dicts that type of behavior probably should be # preserved for consistency except __HOLE__: ty,va,tb = sys.exc_info() raise TypeError("not a valid non-string sequence or mapping object", tb) l = [] if not doseq: # preserve old behavior for k, v in query: k = quote_plus(str(k)) v = quote_plus(v) l.append(k + '=' + v) else: for k, v in query: k = quote_plus(str(k)) if isinstance(v, basestring): if v: v = quote_plus(v) l.append("%s=%s" % (k, v)) else: l.append(k) else: try: # is this a sufficient test for sequence-ness? x = len(v) except TypeError: # not a sequence v = quote_plus(v) if v: l.append("%s=%s" % (k, v)) else: l.append(k) else: # loop over the sequence for elt in v: if elt: l.append(k + '=' + quote_plus(elt)) else: l.append(k) return '&'.join(l)
TypeError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/urlencode
6,130
def set(self, url, strict=True): if isinstance(url, basestring): if isinstance(url, unicode): # URL's are defined to be in the ASCII character set. url = quote(url, ";/?:@&=+$,") self.clear(strict) try: self._parse(url, strict) except __HOLE__: self.clear(strict) raise else: self._urlstr = url self._badurl = False elif isinstance(url, self.__class__): self._set_from_instance(url) else: raise ValueError("Invalid initializer: %r" % (url,))
ValueError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/urlparse.py/UniversalResourceLocator.set
6,131
def assign_sizes(data, aes): """Assigns size to the given data based on the aes and adds the right legend Parameters ---------- data : DataFrame dataframe which should have sizes assigned to aes : aesthetic mapping, including a mapping from sizes to variable Returns ------- data : DataFrame the changed dataframe legend_entry : dict An entry into the legend dictionary. Documented in `components.legend` """ legend_entry = dict() # We need to normalize size so that the points aren't really big or # really small. # TODO: add different types of normalization (log, inverse, etc.) if 'size' in aes: size_col = aes['size'] # Check that values are in the right format try : values = data[size_col].astype(np.float) except __HOLE__ : raise GgplotError( "Size aesthetic '%s' contains non-numerical data" % size_col) _min = values.min() normalize = lambda v : 30 + (200.0 * (v - _min) / (v.max() - _min)) data[":::size_mapping:::"] = normalize(values) labels, scale_type, indices = get_labels(data, size_col) if scale_type == "continuous" : quantiles = np.percentile(data[":::size_mapping:::"], indices) elif scale_type == "discrete" : quantiles = normalize(np.array(labels, dtype=np.float)) else : raise GgplotError("Unknow scale_type: '%s'" % scale_type) legend_entry = { 'column_name': size_col, 'dict': dict(zip(quantiles, labels)), 'scale_type': scale_type} return data, legend_entry
ValueError
dataset/ETHPy150Open yhat/ggplot/ggplot/components/size.py/assign_sizes
6,132
def tearDown(self): try: rmtree(self.dir) except __HOLE__ as e: # If directory already deleted, keep going if e.errno not in (errno.ENOENT, errno.EACCES, errno.EPERM): raise e
OSError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/recorders/test/test_dump.py/TestDumpRecorder.tearDown
6,133
@c3bottles.route("/create", methods=("GET", "POST")) @c3bottles.route("/create/<string:lat>/<string:lng>", methods=("GET", "POST")) @login_required def create_dp( number=None, description=None, lat=None, lng=None, level=None, crates=None, errors=None, success=None, center_lat=None, center_lng=None ): if not g.user.can_edit: abort(401) if request.method == "POST": number = request.form.get("number") description = request.form.get("description") lat = request.form.get("lat") lng = request.form.get("lng") level = request.form.get("level") crates = request.form.get("crates") try: DropPoint( number=number, description=description, lat=lat, lng=lng, level=level, crates=crates ) except ValueError as e: errors = e.args else: db.session.commit() if request.form.get("action") == "stay": center_lat = lat center_lng = lng number = None description = None lat = None lng = None level = None crates = None success = True else: return render_template( "success.html", text="Your drop point has been created successfully." ) try: lat_f = float(lat) lng_f = float(lng) except (__HOLE__, TypeError): lat_f = None lng_f = None if errors is not None: error_list = [v for d in errors for v in d.values()] error_fields = [k for d in errors for k in d.keys()] else: error_list = [] error_fields = [] return render_template( "create_dp.html", all_dps_json=DropPoint.get_dps_json(), number=number, description=description, center_lat=center_lat, center_lng=center_lng, lat=lat_f, lng=lng_f, level=level, crates=crates, error_list=error_list, error_fields=error_fields, success=success ) # vim: set expandtab ts=4 sw=4:
ValueError
dataset/ETHPy150Open der-michik/c3bottles/view/create.py/create_dp
6,134
def _get_next_line_number(self, class_name): if self.combined or self.streaming: # This has an obvious side effect. Oh well. self.combined_line_number += 1 return self.combined_line_number else: try: return len(self._test_cases[class_name]) + 1 except __HOLE__: # A result is created before the call to _track so the test # case may not be tracked yet. In that case, the line is 1. return 1
KeyError
dataset/ETHPy150Open mblayman/tappy/tap/tracker.py/Tracker._get_next_line_number
6,135
def compile(self, func, args, return_type=None, flags=DEFAULT_FLAGS): """ Compile the function or retrieve an already compiled result from the cache. """ cache_key = (func, args, return_type, flags) try: cr = self.cr_cache[cache_key] except __HOLE__: cr = compile_extra(self.typingctx, self.targetctx, func, args, return_type, flags, locals={}) self.cr_cache[cache_key] = cr return cr
KeyError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/CompilationCache.compile
6,136
def reset_module_warnings(self, module): """ Reset the warnings registry of a module. This can be necessary as the warnings module is buggy in that regard. See http://bugs.python.org/issue4180 """ if isinstance(module, str): module = sys.modules[module] try: del module.__warningregistry__ except __HOLE__: pass
AttributeError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/TestCase.reset_module_warnings
6,137
def assertPreciseEqual(self, first, second, prec='exact', ulps=1, msg=None, ignore_sign_on_zero=False, abs_tol=None ): """ Versatile equality testing function with more built-in checks than standard assertEqual(). For arrays, test that layout, dtype, shape are identical, and recursively call assertPreciseEqual() on the contents. For other sequences, recursively call assertPreciseEqual() on the contents. For scalars, test that two scalars or have similar types and are equal up to a computed precision. If the scalars are instances of exact types or if *prec* is 'exact', they are compared exactly. If the scalars are instances of inexact types (float, complex) and *prec* is not 'exact', then the number of significant bits is computed according to the value of *prec*: 53 bits if *prec* is 'double', 24 bits if *prec* is single. This number of bits can be lowered by raising the *ulps* value. ignore_sign_on_zero can be set to True if zeros are to be considered equal regardless of their sign bit. abs_tol if this is set to a float value its value is used in the following. If, however, this is set to the string "eps" then machine precision of the type(first) is used in the following instead. This kwarg is used to check if the absolute difference in value between first and second is less than the value set, if so the numbers being compared are considered equal. (This is to handle small numbers typically of magnitude less than machine precision). Any value of *prec* other than 'exact', 'single' or 'double' will raise an error. """ try: self._assertPreciseEqual(first, second, prec, ulps, msg, ignore_sign_on_zero, abs_tol) except __HOLE__ as exc: failure_msg = str(exc) # Fall off of the 'except' scope to avoid Python 3 exception # chaining. else: return # Decorate the failure message with more information self.fail("when comparing %s and %s: %s" % (first, second, failure_msg))
AssertionError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/TestCase.assertPreciseEqual
6,138
def _assertPreciseEqual(self, first, second, prec='exact', ulps=1, msg=None, ignore_sign_on_zero=False, abs_tol=None): """Recursive workhorse for assertPreciseEqual().""" def _assertNumberEqual(first, second, delta=None): if (delta is None or first == second == 0.0 or math.isinf(first) or math.isinf(second)): self.assertEqual(first, second, msg=msg) # For signed zeros if not ignore_sign_on_zero: try: if math.copysign(1, first) != math.copysign(1, second): self.fail( self._formatMessage(msg, "%s != %s" % (first, second))) except __HOLE__: pass else: self.assertAlmostEqual(first, second, delta=delta, msg=msg) first_family = self._detect_family(first) second_family = self._detect_family(second) assertion_message = "Type Family mismatch. (%s != %s)" % (first_family, second_family) if msg: assertion_message += ': %s' % (msg,) self.assertEqual(first_family, second_family, msg=assertion_message) # We now know they are in the same comparison family compare_family = first_family # For recognized sequences, recurse if compare_family == "ndarray": dtype = self._fix_dtype(first.dtype) self.assertEqual(dtype, self._fix_dtype(second.dtype)) self.assertEqual(first.ndim, second.ndim, "different number of dimensions") self.assertEqual(first.shape, second.shape, "different shapes") self.assertEqual(first.flags.writeable, second.flags.writeable, "different mutability") # itemsize is already checked by the dtype test above self.assertEqual(self._fix_strides(first), self._fix_strides(second), "different strides") if first.dtype != dtype: first = first.astype(dtype) if second.dtype != dtype: second = second.astype(dtype) for a, b in zip(first.flat, second.flat): self._assertPreciseEqual(a, b, prec, ulps, msg, ignore_sign_on_zero, abs_tol) return elif compare_family == "sequence": self.assertEqual(len(first), len(second), msg=msg) for a, b in zip(first, second): self._assertPreciseEqual(a, b, prec, ulps, msg, ignore_sign_on_zero, abs_tol) return elif compare_family == "exact": exact_comparison = True elif compare_family in ["complex", "approximate"]: exact_comparison = False elif compare_family == "enum": self.assertIs(first.__class__, second.__class__) self._assertPreciseEqual(first.value, second.value, prec, ulps, msg, ignore_sign_on_zero, abs_tol) return elif compare_family == "unknown": # Assume these are non-numeric types: we will fall back # on regular unittest comparison. self.assertIs(first.__class__, second.__class__) exact_comparison = True else: assert 0, "unexpected family" # If a Numpy scalar, check the dtype is exactly the same too # (required for datetime64 and timedelta64). if hasattr(first, 'dtype') and hasattr(second, 'dtype'): self.assertEqual(first.dtype, second.dtype) # Mixing bools and non-bools should always fail if (isinstance(first, self._bool_types) != isinstance(second, self._bool_types)): assertion_message = ("Mismatching return types (%s vs. %s)" % (first.__class__, second.__class__)) if msg: assertion_message += ': %s' % (msg,) self.fail(assertion_message) try: if cmath.isnan(first) and cmath.isnan(second): # The NaNs will compare unequal, skip regular comparison return except TypeError: # Not floats. pass # if absolute comparison is set, use it if abs_tol is not None: if abs_tol == "eps": rtol = np.finfo(type(first)).eps elif isinstance(abs_tol, float): rtol = abs_tol else: raise ValueError("abs_tol is not \"eps\" or a float, found %s" % abs_tol) if abs(first - second) < rtol: return exact_comparison = exact_comparison or prec == 'exact' if not exact_comparison and prec != 'exact': if prec == 'single': bits = 24 elif prec == 'double': bits = 53 else: raise ValueError("unsupported precision %r" % (prec,)) k = 2 ** (ulps - bits - 1) delta = k * (abs(first) + abs(second)) else: delta = None if isinstance(first, self._complex_types): _assertNumberEqual(first.real, second.real, delta) _assertNumberEqual(first.imag, second.imag, delta) else: _assertNumberEqual(first, second, delta)
TypeError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/TestCase._assertPreciseEqual
6,139
def _create_trashcan_dir(): try: os.mkdir(_trashcan_dir) except __HOLE__ as e: if e.errno != errno.EEXIST: raise
OSError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/_create_trashcan_dir
6,140
def _purge_trashcan_dir(): freshness_threshold = time.time() - _trashcan_timeout for fn in sorted(os.listdir(_trashcan_dir)): fn = os.path.join(_trashcan_dir, fn) try: st = os.stat(fn) if st.st_mtime < freshness_threshold: shutil.rmtree(fn, ignore_errors=True) except __HOLE__ as e: # In parallel testing, several processes can attempt to # remove the same entry at once, ignore. pass
OSError
dataset/ETHPy150Open numba/numba/numba/tests/support.py/_purge_trashcan_dir
6,141
def handle(self, **options): address = options.get('address') # Break address into host:port if address: if ':' in address: host, port = address.split(':', 1) port = int(port) else: host = address port = None else: host, port = None, None services = { 'http': http.NsotHTTPServer, } # Ensure we perform an upgrade before starting any service. if options.get('upgrade'): print("Performing upgrade before service startup...") call_command( 'upgrade', verbosity=0, noinput=options.get('noinput') ) # Ensure we collect static before starting any service, but only if # SERVE_STATIC_FILES=True. if options.get('collectstatic') and settings.SERVE_STATIC_FILES: print("Performing collectstatic before service startup...") call_command('collectstatic', interactive=False, ignore=['src']) service_name = options.get('service') try: service_class = services[service_name] except __HOLE__: raise CommandError('%r is not a valid service' % service_name) service = service_class( debug=options.get('debug'), host=host, port=port, workers=options.get('workers'), worker_class=options.get('worker_class'), timeout=options.get('timeout'), max_requests=options.get('max_requests'), max_requests_jitter=options.get('max_requests_jitter'), preload=options.get('preload'), ) # Remove command line arguments to avoid optparse failures with service # code that calls call_command which reparses the command line, and if # --no-upgrade is supplied a parse error is thrown. sys.argv = sys.argv[:1] service.run()
KeyError
dataset/ETHPy150Open dropbox/nsot/nsot/management/commands/start.py/Command.handle
6,142
def dump(start=0): check_for_fields() try: lastdump = tyrant["lastdump"] except __HOLE__: lastdump = "*" filecount = 1 itemcount = 1 filename = FILENAME_TEMPLATE % (SLAVE_NAME, now, filecount) writer = csv.writer(open(filename, "w")) with solr.pooled_connection(fp._fp_solr) as host: items_to_dump = host.query("source:local AND import_date:[%s TO %s]" % (lastdump, now), rows=10000, start=start) resultlen = len(items_to_dump) while resultlen > 0: print "writing %d results from start=%s" % (resultlen, items_to_dump.results.start) for r in items_to_dump.results: row = [r["track_id"], r["codever"], tyrant[str(r["track_id"])], r["length"], r.get("artist", ""), r.get("release", ""), r.get("track", "") ] writer.writerow(row) itemcount += resultlen if itemcount > ITEMS_PER_FILE: filecount += 1 filename = FILENAME_TEMPLATE % (SLAVE_NAME, now, filecount) print "Making new file, %s" % filename writer = csv.writer(open(filename, "w")) itemcount = resultlen items_to_dump = items_to_dump.next_batch() resultlen = len(items_to_dump) # Write the final completion time tyrant["lastdump"] = now
KeyError
dataset/ETHPy150Open echonest/echoprint-server/replication/slave_dump.py/dump
6,143
@authenticated_rest_api_view @has_request_variables def api_freshdesk_webhook(request, user_profile, stream=REQ(default='')): try: payload = ujson.loads(request.body) ticket_data = payload["freshdesk_webhook"] except ValueError: return json_error("Malformed JSON input") required_keys = [ "triggered_event", "ticket_id", "ticket_url", "ticket_type", "ticket_subject", "ticket_description", "ticket_status", "ticket_priority", "requester_name", "requester_email", ] for key in required_keys: if ticket_data.get(key) is None: logging.warning("Freshdesk webhook error. Payload was:") logging.warning(request.body) return json_error("Missing key %s in JSON" % (key,)) try: stream = request.GET['stream'] except (AttributeError, KeyError): stream = 'freshdesk' ticket = TicketDict(ticket_data) subject = "#%s: %s" % (ticket.id, ticket.subject) try: event_info = parse_freshdesk_event(ticket.triggered_event) except __HOLE__: return json_error("Malformed event %s" % (ticket.triggered_event,)) if event_info[1] == "created": content = format_freshdesk_ticket_creation_message(ticket) elif event_info[0] == "note_type": content = format_freshdesk_note_message(ticket, event_info) elif event_info[0] in ("status", "priority"): content = format_freshdesk_property_change_message(ticket, event_info) else: # Not an event we know handle; do nothing. return json_success() check_send_message(user_profile, get_client("ZulipFreshdeskWebhook"), "stream", [stream], subject, content) return json_success()
ValueError
dataset/ETHPy150Open zulip/zulip/zerver/views/webhooks/freshdesk.py/api_freshdesk_webhook
6,144
def read(self): """ Read a simple PNG file, return width, height, pixels and image metadata This function is a very early prototype with limited flexibility and excessive use of memory. """ signature = self.file.read(8) if (signature != struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)): raise Error("PNG file has invalid header") compressed = [] image_metadata = {} while True: try: tag, data = self.read_chunk() except __HOLE__, e: raise Error('Chunk error: ' + e.args[0]) # print >> sys.stderr, tag, len(data) if tag == asbytes('IHDR'): # http://www.w3.org/TR/PNG/#11IHDR (width, height, bits_per_sample, color_type, compression_method, filter_method, interlaced) = struct.unpack("!2I5B", data) bps = bits_per_sample // 8 if bps == 0: raise Error("unsupported pixel depth") if bps > 2 or bits_per_sample != (bps * 8): raise Error("invalid pixel depth") if color_type == 0: greyscale = True has_alpha = False planes = 1 elif color_type == 2: greyscale = False has_alpha = False planes = 3 elif color_type == 4: greyscale = True has_alpha = True planes = 2 elif color_type == 6: greyscale = False has_alpha = True planes = 4 else: raise Error("unknown PNG colour type %s" % color_type) if compression_method != 0: raise Error("unknown compression method") if filter_method != 0: raise Error("unknown filter method") self.bps = bps self.planes = planes self.psize = bps * planes self.width = width self.height = height self.row_bytes = width * self.psize elif tag == asbytes('IDAT'): # http://www.w3.org/TR/PNG/#11IDAT compressed.append(data) elif tag == asbytes('bKGD'): if greyscale: image_metadata["background"] = struct.unpack("!1H", data) else: image_metadata["background"] = struct.unpack("!3H", data) elif tag == asbytes('tRNS'): if greyscale: image_metadata["transparent"] = struct.unpack("!1H", data) else: image_metadata["transparent"] = struct.unpack("!3H", data) elif tag == asbytes('gAMA'): image_metadata["gamma"] = ( struct.unpack("!L", data)[0]) / 100000.0 elif tag == asbytes('IEND'): # http://www.w3.org/TR/PNG/#11IEND break scanlines = array('B', zlib.decompress(asbytes('').join(compressed))) if interlaced: pixels = self.deinterlace(scanlines) else: pixels = self.read_flat(scanlines) image_metadata["greyscale"] = greyscale image_metadata["has_alpha"] = has_alpha image_metadata["bytes_per_sample"] = bps image_metadata["interlaced"] = interlaced return width, height, pixels, image_metadata
ValueError
dataset/ETHPy150Open ardekantur/pyglet/pyglet/image/codecs/pypng.py/Reader.read
6,145
@classmethod def get(cls, uid, safe=False): """ params: uid - the user id which to fetch safe - return users without secure fields like salt and hash db - provide your own instantiated lazydb.Db() connector """ if uid is not None: users = cls.getall() try: return users[uid] if not safe else cls._publishable(users[uid]) except __HOLE__: return None
KeyError
dataset/ETHPy150Open mekarpeles/waltz/waltz/__init__.py/User.get
6,146
@classmethod def delete(cls, uid): users = cls.db().get(cls.udb, default={}, touch=True) try: del users[uid] return cls.db().put(cls.udb, users) except __HOLE__: return False
KeyError
dataset/ETHPy150Open mekarpeles/waltz/waltz/__init__.py/User.delete
6,147
def parse(): optParser = getOptParser() opts,args = optParser.parse_args() encoding = "utf8" try: f = args[-1] # Try opening from the internet if f.startswith('http://'): try: import urllib.request, urllib.parse, urllib.error, cgi f = urllib.request.urlopen(f) contentType = f.headers.get('content-type') if contentType: (mediaType, params) = cgi.parse_header(contentType) encoding = params.get('charset') except: pass elif f == '-': f = sys.stdin if sys.version_info[0] >= 3: encoding = None else: try: # Try opening from file system f = open(f, "rb") except __HOLE__ as e: sys.stderr.write("Unable to open file: %s\n" % e) sys.exit(1) except IndexError: sys.stderr.write("No filename provided. Use -h for help\n") sys.exit(1) treebuilder = treebuilders.getTreeBuilder(opts.treebuilder) if opts.sanitize: tokenizer = sanitizer.HTMLSanitizer else: tokenizer = HTMLTokenizer p = html5parser.HTMLParser(tree=treebuilder, tokenizer=tokenizer, debug=opts.log) if opts.fragment: parseMethod = p.parseFragment else: parseMethod = p.parse if opts.profile: import cProfile import pstats cProfile.runctx("run(parseMethod, f, encoding)", None, {"run": run, "parseMethod": parseMethod, "f": f, "encoding": encoding}, "stats.prof") # XXX - We should use a temp file here stats = pstats.Stats('stats.prof') stats.strip_dirs() stats.sort_stats('time') stats.print_stats() elif opts.time: import time t0 = time.time() document = run(parseMethod, f, encoding) t1 = time.time() if document: printOutput(p, document, opts) t2 = time.time() sys.stderr.write("\n\nRun took: %fs (plus %fs to print the output)"%(t1-t0, t2-t1)) else: sys.stderr.write("\n\nRun took: %fs"%(t1-t0)) else: document = run(parseMethod, f, encoding) if document: printOutput(p, document, opts)
IOError
dataset/ETHPy150Open html5lib/html5lib-python/parse.py/parse
6,148
def test_create_tags_invalid_parameters(self): # NOTE(ft): check tag validity checks self.assert_execution_error('InvalidParameterValue', 'CreateTags', {'ResourceId.1': fakes.ID_EC2_VPC_1, 'Tag.1.Value': ''}) self.assert_execution_error('InvalidParameterValue', 'CreateTags', {'ResourceId.1': fakes.ID_EC2_VPC_1, 'Tag.1.Key': ''}) self.assert_execution_error('InvalidParameterValue', 'CreateTags', {'ResourceId.1': fakes.ID_EC2_VPC_1, 'Tag.1.Key': 'a' * 128}) self.assert_execution_error('InvalidParameterValue', 'CreateTags', {'ResourceId.1': fakes.ID_EC2_VPC_1, 'Tag.1.Key': 'fake-key', 'Tag.1.Value': 'a' * 256}) # NOTE(ft): check resource type check self.assert_execution_error( 'InvalidID', 'CreateTags', {'ResourceId.1': fakes.random_ec2_id('fake'), 'Tag.1.Key': 'fake-key', 'Tag.1.Value': 'fake-value'}) # NOTE(ft): check resource existence check self.db_api.get_item_by_id.return_value = None for r_id in tag_api.RESOURCE_TYPES: if r_id in ('ami', 'ari', 'aki'): continue exc_class = ec2utils.NOT_FOUND_EXCEPTION_MAP[r_id] try: error_code = exc_class.ec2_code except __HOLE__: error_code = exc_class.__name__ self.assert_execution_error( error_code, 'CreateTags', {'ResourceId.1': fakes.random_ec2_id(r_id), 'Tag.1.Key': 'fake-key', 'Tag.1.Value': 'fake-value'})
AttributeError
dataset/ETHPy150Open openstack/ec2-api/ec2api/tests/unit/test_tag.py/TagTestCase.test_create_tags_invalid_parameters
6,149
def get_test_data_upload(creator, dataset, filename=TEST_DATA_FILENAME, encoding='utf8'): # Ensure panda subdir has been created try: os.mkdir(settings.MEDIA_ROOT) except __HOLE__: pass src = os.path.join(TEST_DATA_PATH, filename) dst = os.path.join(settings.MEDIA_ROOT, filename) copyfile(src, dst) return DataUpload.objects.create( filename=filename, original_filename=filename, size=os.path.getsize(dst), creator=creator, dataset=dataset, encoding=encoding)
OSError
dataset/ETHPy150Open pandaproject/panda/panda/tests/utils.py/get_test_data_upload
6,150
def get_test_related_upload(creator, dataset, filename=TEST_DATA_FILENAME): # Ensure panda subdir has been created try: os.mkdir(settings.MEDIA_ROOT) except __HOLE__: pass src = os.path.join(TEST_DATA_PATH, filename) dst = os.path.join(settings.MEDIA_ROOT, filename) copyfile(src, dst) return RelatedUpload.objects.create( filename=filename, original_filename=filename, size=os.path.getsize(dst), creator=creator, dataset=dataset)
OSError
dataset/ETHPy150Open pandaproject/panda/panda/tests/utils.py/get_test_related_upload
6,151
def get_first(self, label=None, fs=None, aspace=None): try: return next(self.select(label, fs, aspace)) except __HOLE__: raise ValueError('No annotations match those criteria')
StopIteration
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/AnnotationList.get_first
6,152
def remove(self, ann): """Remove the given C{Annotation} object. :param a: Annotation """ try: return self._elements.remove(ann) except __HOLE__: print('Error: Annotation not in set')
ValueError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/AnnotationSpace.remove
6,153
def _resolve_fs(self, path, create=False): """ Resolves a list of keys to this or a descendent feature structure. """ fs = self for name in path: try: fs = fs._elements[name] except __HOLE__: if create: fs = fs._elements[name] = FeatureStructure() else: fs = None if not isinstance(fs, FeatureStructure): raise KeyError('Could not resolve feature structure for path %r. Got %r' % (path, fs)) return fs
KeyError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure._resolve_fs
6,154
def _parse_key(self, key, create=False): try: key = key.strip('/').split('/') except __HOLE__: # assume key is already list of path elements pass return self._resolve_fs(key[:-1], create), key[-1]
AttributeError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure._parse_key
6,155
def __contains__(self, key): try: fs, key = self._parse_key(key) except __HOLE__: return False return key in fs._elements
KeyError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure.__contains__
6,156
def get(self, key, default=None): try: return self[key] except __HOLE__: return default
KeyError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure.get
6,157
def pop(self, key, default=None): try: fs, key = self._parse_key(key) return fs._elements.pop(key, default) except __HOLE__: return default
KeyError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure.pop
6,158
def __eq__(self, other): """ Equivalence is equivalent types (????) """ try: return self.type == other.type except __HOLE__: return False
AttributeError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure.__eq__
6,159
def subsumes(self, other): for key, val in self.items(): try: oval = other._elements[key] except __HOLE__: return False if isinstance(val, FeatureStructure) and isinstance(oval, FeatureStructure): if not val.subsumes(oval): return False elif val != oval: return False return True
KeyError
dataset/ETHPy150Open cidles/graf-python/src/graf/annotations.py/FeatureStructure.subsumes
6,160
def json_request(self, method, url, **kwargs): kwargs.setdefault('headers', {}) kwargs['headers'].setdefault('Content-Type', 'application/json') kwargs['headers'].setdefault('Accept', 'application/json') if 'data' in kwargs: kwargs['data'] = jsonutils.dumps(kwargs['data']) resp = self._http_request(url, method, **kwargs) body = resp.content if 'application/json' in resp.headers.get('content-type', ''): try: body = resp.json() except __HOLE__: LOG.error('Could not decode response body as JSON') else: body = None return resp, body
ValueError
dataset/ETHPy150Open openstack/python-monascaclient/monascaclient/common/http.py/HTTPClient.json_request
6,161
def get_woeid(location): query = urllib.quote(location, '') values = { 'app_id': YAHOO_APP_ID, 'query': query, } url = WHERE_API_QUERY_URL % values response = requests.get(url) data = json.loads(response.text) try: places = data['places'] if places['count'] > 0: place = places['place'][0] woeid = place['woeid'] else: woeid = None except __HOLE__, k: woeid = None return woeid
KeyError
dataset/ETHPy150Open hacktoolkit/hacktoolkit/apis/yahoo/geo/geoplanet/geoplanet.py/get_woeid
6,162
def resolve_woeid(woeid): values = { 'app_id': YAHOO_APP_ID, 'woeid': woeid } url = WHERE_API_RESOLVE_URL % values response = requests.get(url) data = json.loads(response.text) try: place = data['place'] place_name = place['name'] if place['admin1']: place_name += ', ' + place['admin1'] if place['country']: place_name += ', ' + place['country'] except __HOLE__, k: place_name = None return place_name
KeyError
dataset/ETHPy150Open hacktoolkit/hacktoolkit/apis/yahoo/geo/geoplanet/geoplanet.py/resolve_woeid
6,163
def force_text(s, encoding="utf-8"): if isinstance(s, six.text_type): return s try: if not isinstance(s, six.string_types): if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding) else: s = six.text_type(s) elif hasattr(s, '__unicode__'): s = six.text_type(s) else: s = six.text_type(bytes(s), encoding) else: s = s.decode(encoding) return s except (__HOLE__, UnicodeEncodeError): pass
UnicodeDecodeError
dataset/ETHPy150Open cobrateam/django-htmlmin/htmlmin/util.py/force_text
6,164
def removeDuplicates_complicated(self, A): """ Two pointers algorithm, open_ptr & closed_ptr :param A: a list of integers :return: an integer """ length = len(A) if length<=2: return length closed_ptr = 0 duplicate_count = 0 open_ptr = closed_ptr+1 while open_ptr<length: if A[closed_ptr]==A[open_ptr]: if duplicate_count>=1: # find next non-duplicate try: while A[closed_ptr]==A[open_ptr]: open_ptr+=1 duplicate_count = 0 except __HOLE__: break # one duplicate else: duplicate_count +=1 else: duplicate_count = 0 A[closed_ptr+1] = A[open_ptr] closed_ptr += 1 open_ptr += 1 return closed_ptr+1 # length
IndexError
dataset/ETHPy150Open algorhythms/LeetCode/081 Remove Duplicates from Sorted Array II.py/Solution.removeDuplicates_complicated
6,165
def __init__(self, language=None): ''' If a `language` identifier (such as 'en_US') is provided and a matching language exists, it is selected. If an identifier is provided and no matching language exists, a NoSuchLangError exception is raised by self.select_language(). If no `language` identifier is provided, we just fall back to the first one that is available. :Parameters: `language` : str, defaults to None If provided, indicates the language to be used. This needs to be a language identifier understood by select_language(), i.e. one of the options returned by list_languages(). If nothing is provided, the first available language is used. If no language is available, NoLanguageSelectedError is raised. ''' langs = self.list_languages() try: # If no language was specified, we just use the first one # that is available. fallback_lang = langs[0] except __HOLE__: raise NoLanguageSelectedError("No languages available!") self.select_language(language or fallback_lang)
IndexError
dataset/ETHPy150Open kivy/kivy/kivy/core/spelling/__init__.py/SpellingBase.__init__
6,166
def human_to_bytes(hsize, kilo=1024): ''' This function converts human-readable amounts of bytes to bytes. It understands the following units : - I{B} or no unit present for Bytes - I{k}, I{K}, I{kB}, I{KB} for kB (kilobytes) - I{m}, I{M}, I{mB}, I{MB} for MB (megabytes) - I{g}, I{G}, I{gB}, I{GB} for GB (gigabytes) - I{t}, I{T}, I{tB}, I{TB} for TB (terabytes) Note: The definition of I{kilo} defaults to 1kB = 1024Bytes. Strictly speaking, those should not be called I{kB} but I{kiB}. You can override that with the optional kilo parameter. @param hsize: The human-readable version of the Bytes amount to convert @type hsize: string or int @param kilo: Optional base for the kilo prefix @type kilo: int @return: An int representing the human-readable string converted to bytes ''' size = hsize.replace('i', '') size = size.lower() if not re.match("^[0-9]+[k|m|g|t]?[b]?$", size): raise RTSLibError("Cannot interpret size, wrong format: %s" % hsize) size = size.rstrip('ib') units = ['k', 'm', 'g', 't'] try: power = units.index(size[-1]) + 1 except __HOLE__: power = 0 size = int(size) else: size = int(size[:-1]) return size * (int(kilo) ** power)
ValueError
dataset/ETHPy150Open agrover/targetcli-fb/targetcli/ui_backstore.py/human_to_bytes
6,167
def ui_command_delete(self, name): ''' Recursively deletes the storage object having the specified I{name}. If there are LUNs using this storage object, they will be deleted too. EXAMPLE ======= B{delete mystorage} ------------------- Deletes the storage object named mystorage, and all associated LUNs. ''' self.assert_root() try: child = self.get_child(name) except __HOLE__: raise ExecutionError("No storage object named %s." % name) child.rtsnode.delete() self.remove_child(child) self.shell.log.info("Deleted storage object %s." % name)
ValueError
dataset/ETHPy150Open agrover/targetcli-fb/targetcli/ui_backstore.py/UIBackstore.ui_command_delete
6,168
def _create_file(self, filename, size, sparse=True): try: f = open(filename, "w+") except (OSError, __HOLE__): raise ExecutionError("Could not open %s" % filename) try: if sparse: try: os.posix_fallocate(f.fileno(), 0, size) except AttributeError: # Prior to version 3.3, Python does not provide fallocate os.ftruncate(f.fileno(), size) else: self.shell.log.info("Writing %d bytes" % size) while size > 0: write_size = min(size, 1024) f.write("\0" * write_size) size -= write_size except (OSError, IOError): os.remove(filename) raise ExecutionError("Could not expand file to %d bytes" % size) except OverflowError: raise ExecutionError("The file size is too large (%d bytes)" % size) finally: f.close()
IOError
dataset/ETHPy150Open agrover/targetcli-fb/targetcli/ui_backstore.py/UIFileIOBackstore._create_file
6,169
def __init__(self, irc=None): if irc is not None: assert not irc.getCallback(self.name()) self.__parent = super(Owner, self) self.__parent.__init__(irc) # Setup command flood detection. self.commands = ircutils.FloodQueue(conf.supybot.abuse.flood.interval()) conf.supybot.abuse.flood.interval.addCallback(self.setFloodQueueTimeout) # Setup plugins and default plugins for commands. # # This needs to be done before we connect to any networks so that the # children of supybot.plugins (the actual plugins) exist and can be # loaded. for (name, s) in registry._cache.items(): if 'alwaysLoadDefault' in name or 'alwaysLoadImportant' in name: continue if name.startswith('supybot.plugins'): try: (_, _, name) = registry.split(name) except __HOLE__: # unpack list of wrong size. continue # This is just for the prettiness of the configuration file. # There are no plugins that are all-lowercase, so we'll at # least attempt to capitalize them. if name == name.lower(): name = name.capitalize() conf.registerPlugin(name) if name.startswith('supybot.commands.defaultPlugins'): try: (_, _, _, name) = registry.split(name) except ValueError: # unpack list of wrong size. continue registerDefaultPlugin(name, s) # Setup Irc objects, connected to networks. If world.ircs is already # populated, chances are that we're being reloaded, so don't do this. if not world.ircs: for network in conf.supybot.networks(): try: self._connect(network) except socket.error as e: self.log.error('Could not connect to %s: %s.', network, e) except Exception as e: self.log.exception('Exception connecting to %s:', network) self.log.error('Could not connect to %s: %s.', network, e)
ValueError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner.__init__
6,170
def _connect(self, network, serverPort=None, password='', ssl=False): try: group = conf.supybot.networks.get(network) (server, port) = group.servers()[0] except (registry.NonExistentRegistryEntry, __HOLE__): if serverPort is None: raise ValueError('connect requires a (server, port) ' \ 'if the network is not registered.') conf.registerNetwork(network, password, ssl) serverS = '%s:%s' % serverPort conf.supybot.networks.get(network).servers.append(serverS) assert conf.supybot.networks.get(network).servers(), \ 'No servers are set for the %s network.' % network self.log.debug('Creating new Irc for %s.', network) newIrc = irclib.Irc(network) for irc in world.ircs: if irc != newIrc: newIrc.state.history = irc.state.history driver = drivers.newDriver(newIrc) self._loadPlugins(newIrc) return newIrc
IndexError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner._connect
6,171
def _loadPlugins(self, irc): self.log.debug('Loading plugins (connecting to %s).', irc.network) alwaysLoadImportant = conf.supybot.plugins.alwaysLoadImportant() important = conf.supybot.commands.defaultPlugins.importantPlugins() for (name, value) in conf.supybot.plugins.getValues(fullNames=False): if irc.getCallback(name) is None: load = value() if not load and name in important: if alwaysLoadImportant: s = '%s is configured not to be loaded, but is being '\ 'loaded anyway because ' \ 'supybot.plugins.alwaysLoadImportant is True.' self.log.warning(s, name) load = True if load: # We don't load plugins that don't start with a capital # letter. if name[0].isupper() and not irc.getCallback(name): # This is debug because each log logs its beginning. self.log.debug('Loading %s.', name) try: m = plugin.loadPluginModule(name, ignoreDeprecation=True) plugin.loadPluginClass(irc, m) except callbacks.Error as e: # This is just an error message. log.warning(str(e)) except plugins.NoSuitableDatabase as e: s = 'Failed to load %s: no suitable database(%s).' % (name, e) log.warning(s) except __HOLE__ as e: e = str(e) if e.endswith(name): s = 'Failed to load {0}: No plugin named {0} exists.'.format( utils.str.dqrepr(name)) elif "No module named 'config'" in e: s = ("Failed to load %s: This plugin may be incompatible " "with your current Python version. If this error is appearing " "with stock Supybot plugins, remove the stock plugins directory " "(usually ~/Limnoria/plugins) from 'config directories.plugins'." % name) else: s = 'Failed to load %s: import error (%s).' % (name, e) log.warning(s) except Exception as e: log.exception('Failed to load %s:', name) else: # Let's import the module so configuration is preserved. try: _ = plugin.loadPluginModule(name) except Exception as e: log.debug('Attempted to load %s to preserve its ' 'configuration, but load failed: %s', name, e) world.starting = False
ImportError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner._loadPlugins
6,172
def load(self, irc, msg, args, optlist, name): """[--deprecated] <plugin> Loads the plugin <plugin> from any of the directories in conf.supybot.directories.plugins; usually this includes the main installed directory and 'plugins' in the current directory. --deprecated is necessary if you wish to load deprecated plugins. """ ignoreDeprecation = False for (option, argument) in optlist: if option == 'deprecated': ignoreDeprecation = True if name.endswith('.py'): name = name[:-3] if irc.getCallback(name): irc.error('%s is already loaded.' % name.capitalize()) return try: module = plugin.loadPluginModule(name, ignoreDeprecation) except plugin.Deprecated: irc.error('%s is deprecated. Use --deprecated ' 'to force it to load.' % name.capitalize()) return except __HOLE__ as e: if str(e).endswith(name): irc.error('No plugin named %s exists.' % utils.str.dqrepr(name)) elif "No module named 'config'" in str(e): irc.error('This plugin may be incompatible with your current Python ' 'version. Try running 2to3 on it.') else: irc.error(str(e)) return cb = plugin.loadPluginClass(irc, module) name = cb.name() # Let's normalize this. conf.registerPlugin(name, True) irc.replySuccess()
ImportError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner.load
6,173
def reload(self, irc, msg, args, name): """<plugin> Unloads and subsequently reloads the plugin by name; use the 'list' command to see a list of the currently loaded plugins. """ if ircutils.strEqual(name, self.name()): irc.error('You can\'t reload the %s plugin.' % name) return callbacks = irc.removeCallback(name) if callbacks: module = sys.modules[callbacks[0].__module__] if hasattr(module, 'reload'): x = module.reload() try: module = plugin.loadPluginModule(name) if hasattr(module, 'reload') and 'x' in locals(): module.reload(x) if hasattr(module, 'config'): from imp import reload reload(module.config) for callback in callbacks: callback.die() del callback gc.collect() # This makes sure the callback is collected. callback = plugin.loadPluginClass(irc, module) irc.replySuccess() except __HOLE__: for callback in callbacks: irc.addCallback(callback) irc.error('No plugin named %s exists.' % name) else: irc.error('There was no plugin %s.' % name)
ImportError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner.reload
6,174
def defaultcapability(self, irc, msg, args, action, capability): """{add|remove} <capability> Adds or removes (according to the first argument) <capability> from the default capabilities given to users (the configuration variable supybot.capabilities stores these). """ if action == 'add': conf.supybot.capabilities().add(capability) irc.replySuccess() elif action == 'remove': try: conf.supybot.capabilities().remove(capability) irc.replySuccess() except __HOLE__: if ircdb.isAntiCapability(capability): irc.error('That capability wasn\'t in ' 'supybot.capabilities.') else: anticap = ircdb.makeAntiCapability(capability) conf.supybot.capabilities().add(anticap) irc.replySuccess()
KeyError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner.defaultcapability
6,175
def enable(self, irc, msg, args, plugin, command): """[<plugin>] <command> Enables the command <command> for all users. If <plugin> if given, only enables the <command> from <plugin>. This command is the inverse of disable. """ try: if plugin: plugin._disabled.remove(command, plugin.name()) command = '%s.%s' % (plugin.name(), command) else: self._disabled.remove(command) conf.supybot.commands.disabled().remove(command) irc.replySuccess() except __HOLE__: irc.error('That command wasn\'t disabled.')
KeyError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Owner/plugin.py/Owner.enable
6,176
@register.tag(name="render_placeholder") def do_render_placeholder(parser, token): try: tag_name, id_placeholder = token.split_contents() except __HOLE__: raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) if not (id_placeholder[0] == id_placeholder[-1] and id_placeholder[0] in ('"', "'")): raise template.TemplateSyntaxError( "%r tag's argument should be in quotes" % tag_name) return PlaceholderNode(id_placeholder[1:-1])
ValueError
dataset/ETHPy150Open ionyse/ionyweb/ionyweb/website/templatetags/placeholder_tags.py/do_render_placeholder
6,177
def setup(self, environ=None): '''Set up the :class:`.WsgiHandler` the first time this middleware is accessed. ''' from django.conf import settings from django.core.wsgi import get_wsgi_application # try: dotted = settings.WSGI_APPLICATION except __HOLE__: # pragma nocover dotted = None if dotted: app = module_attribute(dotted) else: app = get_wsgi_application() app = middleware_in_executor(app) return WsgiHandler((wait_for_body_middleware, app))
AttributeError
dataset/ETHPy150Open quantmind/pulsar/pulsar/apps/pulse/__init__.py/Wsgi.setup
6,178
def bump_nofile_limit(): from twisted.python import log log.msg("Open files limit: %d" % resource.getrlimit(resource.RLIMIT_NOFILE)[0]) soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) values_to_try = [v for v in [hard, 100000, 10000] if v > soft] for new_soft in values_to_try: try: resource.setrlimit(resource.RLIMIT_NOFILE, (new_soft, hard)) except __HOLE__: continue else: log.msg("Open files limit increased from %d to %d" % (soft, new_soft)) break else: log.msg("Can't bump open files limit")
ValueError
dataset/ETHPy150Open scrapinghub/splash/splash/server.py/bump_nofile_limit
6,179
def get_algorithm(algorithm): """Returns the wire format string and the hash module to use for the specified TSIG algorithm @rtype: (string, hash constructor) @raises NotImplementedError: I{algorithm} is not supported """ hashes = {} try: import hashlib hashes[dns.name.from_text('hmac-sha224')] = hashlib.sha224 hashes[dns.name.from_text('hmac-sha256')] = hashlib.sha256 hashes[dns.name.from_text('hmac-sha384')] = hashlib.sha384 hashes[dns.name.from_text('hmac-sha512')] = hashlib.sha512 hashes[dns.name.from_text('hmac-sha1')] = hashlib.sha1 hashes[dns.name.from_text('HMAC-MD5.SIG-ALG.REG.INT')] = hashlib.md5 import sys if sys.hexversion < 0x02050000: # hashlib doesn't conform to PEP 247: API for # Cryptographic Hash Functions, which hmac before python # 2.5 requires, so add the necessary items. class HashlibWrapper: def __init__(self, basehash): self.basehash = basehash self.digest_size = self.basehash().digest_size def new(self, *args, **kwargs): return self.basehash(*args, **kwargs) for name in hashes: hashes[name] = HashlibWrapper(hashes[name]) except __HOLE__: import md5, sha hashes[dns.name.from_text('HMAC-MD5.SIG-ALG.REG.INT')] = md5.md5 hashes[dns.name.from_text('hmac-sha1')] = sha.sha if isinstance(algorithm, (str, unicode)): algorithm = dns.name.from_text(algorithm) if algorithm in hashes: return (algorithm.to_digestable(), hashes[algorithm]) raise NotImplementedError("TSIG algorithm " + str(algorithm) + " is not supported")
ImportError
dataset/ETHPy150Open catap/namebench/nb_third_party/dns/tsig.py/get_algorithm
6,180
def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_editable, model_admin): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_query_set = model_admin.queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_editable = list_editable self.model_admin = model_admin # Get search parameters from the query string. try: self.page_num = int(request.GET.get(PAGE_VAR, 0)) except __HOLE__: self.page_num = 0 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET self.to_field = request.GET.get(TO_FIELD_VAR) self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if TO_FIELD_VAR in self.params: del self.params[TO_FIELD_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] self.order_field, self.order_type = self.get_ordering() self.query = request.GET.get(SEARCH_VAR, '') self.query_set = self.get_query_set() self.get_results(request) self.title = (self.is_popup and ugettext('Select %s') % force_unicode(self.opts.verbose_name) or ugettext('Select %s to change') % force_unicode(self.opts.verbose_name)) self.filter_specs, self.has_filters = self.get_filters(request) self.pk_attname = self.lookup_opts.pk.attname
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/admin/views/main.py/ChangeList.__init__
6,181
def get_ordering(self): lookup_opts, params = self.lookup_opts, self.params # For ordering, first check the "ordering" parameter in the admin # options, then check the object's default ordering. If neither of # those exist, order descending by ID by default. Finally, look for # manually-specified ordering from the query string. ordering = self.model_admin.ordering or lookup_opts.ordering or ['-' + lookup_opts.pk.name] if ordering[0].startswith('-'): order_field, order_type = ordering[0][1:], 'desc' else: order_field, order_type = ordering[0], 'asc' if ORDER_VAR in params: try: field_name = self.list_display[int(params[ORDER_VAR])] try: f = lookup_opts.get_field(field_name) except models.FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. try: if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) order_field = attr.admin_order_field except AttributeError: pass else: order_field = f.name except (IndexError, __HOLE__): pass # Invalid ordering specified. Just use the default. if ORDER_TYPE_VAR in params and params[ORDER_TYPE_VAR] in ('asc', 'desc'): order_type = params[ORDER_TYPE_VAR] return order_field, order_type
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/admin/views/main.py/ChangeList.get_ordering
6,182
def __next__(self): if len(self.urls) == 0: raise StopIteration obj = self.load(self.urls.pop(0)) # make sure data is string type if isinstance(obj, six.binary_type): obj = obj.decode('utf-8') elif not isinstance(obj, six.string_types): raise ValueError('Unknown types: [{0}]'.format(str(type(obj)))) # a very simple logic to distinguish json and yaml try: if obj.startswith('{'): obj = json.loads(obj) else: obj = yaml.load(obj) except __HOLE__: raise Exception('Unknown format startswith {0} ...'.format(obj[:10])) return obj
ValueError
dataset/ETHPy150Open mission-liao/pyswagger/pyswagger/getter.py/Getter.__next__
6,183
def _query(self, users, query_continue=None, properties=None): params = { 'action': "query", 'list': "users" } params['ususers'] = self._items(users, type=str) params['usprop'] = self._items(properties, levels=self.PROPERTIES) if query_continue is not None: params.update(query_continue) doc = self.session.get(params) try: if 'query-continue' in doc: query_continue = doc['query-continue']['users'] else: query_continue = None us_docs = doc['query']['users'] return us_docs, query_continue except __HOLE__ as e: raise MalformedResponse(str(e), doc)
KeyError
dataset/ETHPy150Open mediawiki-utilities/python-mediawiki-utilities/mw/api/collections/users.py/Users._query
6,184
@register.filter def get_choice_value(field): try: return dict(field.field.choices)[field.value()] except __HOLE__: return ', '.join([entry for id, entry in field.field.choices]) except KeyError: return _('None')
TypeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/appearance/templatetags/appearance_tags.py/get_choice_value
6,185
def test_infix_binops(self): for ia, a in enumerate(candidates): for ib, b in enumerate(candidates): results = infix_results[(ia, ib)] for op, res, ires in zip(infix_binops, results[0], results[1]): if res is TE: self.assertRaises(TypeError, eval, 'a %s b' % op, {'a': a, 'b': b}) else: self.assertEqual(format_result(res), format_result(eval('a %s b' % op)), '%s %s %s == %s failed' % (a, op, b, res)) try: z = copy.copy(a) except copy.Error: z = a # assume it has no inplace ops if ires is TE: try: exec 'z %s= b' % op except __HOLE__: pass else: self.fail("TypeError not raised") else: exec('z %s= b' % op) self.assertEqual(ires, z)
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_coercion.py/CoercionTest.test_infix_binops
6,186
def test_infinite_rec_classic_classes(self): # if __coerce__() returns its arguments reversed it causes an infinite # recursion for classic classes. class Tester: def __coerce__(self, other): return other, self exc = TestFailed("__coerce__() returning its arguments reverse " "should raise RuntimeError") try: Tester() + 1 except (__HOLE__, TypeError): return except: raise exc else: raise exc
RuntimeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_coercion.py/CoercionTest.test_infinite_rec_classic_classes
6,187
def handle(self, project_name=None, target=None, *args, **options): if project_name is None: raise CommandError("you must provide a project name") # Check that the project_name cannot be imported. try: import_module(project_name) except __HOLE__: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as a " "project name. Please try another name." % project_name) # Create a random SECRET_KEY hash to put it in the main settings. chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' options['secret_key'] = get_random_string(50, chars) super(Command, self).handle('project', project_name, target, **options)
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/management/commands/startproject.py/Command.handle
6,188
def __repr__(self): if isinstance(self.packet, str): try: return self.raw() except __HOLE__: return str(self.packet) else: return str(self.packet)
TypeError
dataset/ETHPy150Open kisom/pypcapfile/pcapfile/structs.py/pcap_packet.__repr__
6,189
def plot_2d_separator(classifier, X, fill=False, ax=None, eps=None): if eps is None: eps = X.std() / 2. x_min, x_max = X[:, 0].min() - eps, X[:, 0].max() + eps y_min, y_max = X[:, 1].min() - eps, X[:, 1].max() + eps xx = np.linspace(x_min, x_max, 100) yy = np.linspace(y_min, y_max, 100) X1, X2 = np.meshgrid(xx, yy) X_grid = np.c_[X1.ravel(), X2.ravel()] try: decision_values = classifier.decision_function(X_grid) levels = [0] fill_levels = [decision_values.min(), 0, decision_values.max()] except __HOLE__: # no decision_function decision_values = classifier.predict_proba(X_grid)[:, 1] levels = [.5] fill_levels = [0, .5, 1] if ax is None: ax = plt.gca() if fill: ax.contourf(X1, X2, decision_values.reshape(X1.shape), levels=fill_levels, colors=['blue', 'red']) else: ax.contour(X1, X2, decision_values.reshape(X1.shape), levels=levels, colors="black") ax.set_xlim(x_min, x_max) ax.set_ylim(y_min, y_max) ax.set_xticks(()) ax.set_yticks(())
AttributeError
dataset/ETHPy150Open amueller/nyu_ml_lectures/plots/plot_2d_separator.py/plot_2d_separator
6,190
def process_response(self, request, response): """ If request.secure_session was modified, or if the configuration is to save the session every time, save the changes and set a session cookie. """ if not (request.is_secure() or settings.DEBUG_SECURE): return response try: accessed = request.secure_session.accessed modified = request.secure_session.modified except __HOLE__: pass else: if accessed: patch_vary_headers(response, ('Cookie',)) if modified or settings.SESSION_SAVE_EVERY_REQUEST: if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.secure_session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) # Save the session data and refresh the client cookie. request.secure_session.save() response.set_cookie('secure_session_id', request.secure_session.session_key, max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, path=settings.SESSION_COOKIE_PATH, secure=not settings.DEBUG_SECURE) return response
AttributeError
dataset/ETHPy150Open mollyproject/mollyproject/molly/auth/middleware.py/SecureSessionMiddleware.process_response
6,191
def get(self, url): """Return the document contents pointed to by an HTTPS URL. If something goes wrong (404, timeout, etc.), raise ExpectedError. """ try: return self._opener.open(url).read() except (__HOLE__, IOError) as exc: raise ExpectedError("Couldn't download %s." % url, exc)
HTTPError
dataset/ETHPy150Open letsencrypt/letsencrypt/letsencrypt-auto-source/pieces/fetch.py/HttpsGetter.get
6,192
def save(filename=None, family='ipv4'): ''' Save the current in-memory rules to disk CLI Example: .. code-block:: bash salt '*' nftables.save /etc/nftables ''' if _conf() and not filename: filename = _conf() nft_families = ['ip', 'ip6', 'arp', 'bridge'] rules = "#! nft -f\n" for family in nft_families: out = get_rules(family) if out: rules += '\n' rules = rules + '\n'.join(out) rules = rules + '\n' try: with salt.utils.fopen(filename, 'w+') as _fh: # Write out any changes _fh.writelines(rules) except (IOError, __HOLE__) as exc: raise CommandExecutionError( 'Problem writing to configuration file: {0}'.format(exc) ) return rules
OSError
dataset/ETHPy150Open saltstack/salt/salt/modules/nftables.py/save
6,193
def getSRegNS(message): """Extract the simple registration namespace URI from the given OpenID message. Handles OpenID 1 and 2, as well as both sreg namespace URIs found in the wild, as well as missing namespace definitions (for OpenID 1) @param message: The OpenID message from which to parse simple registration fields. This may be a request or response message. @type message: C{L{openid.message.Message}} @returns: the sreg namespace URI for the supplied message. The message may be modified to define a simple registration namespace. @rtype: C{str} @raise ValueError: when using OpenID 1 if the message defines the 'sreg' alias to be something other than a simple registration type. """ # See if there exists an alias for one of the two defined simple # registration types. for sreg_ns_uri in [ns_uri_1_1, ns_uri_1_0]: alias = message.namespaces.getAlias(sreg_ns_uri) if alias is not None: break else: # There is no alias for either of the types, so try to add # one. We default to using the modern value (1.1) sreg_ns_uri = ns_uri_1_1 try: message.namespaces.addAlias(ns_uri_1_1, 'sreg') except __HOLE__, why: # An alias for the string 'sreg' already exists, but it's # defined for something other than simple registration raise SRegNamespaceError(why[0]) # we know that sreg_ns_uri defined, because it's defined in the # else clause of the loop as well, so disable the warning return sreg_ns_uri #pylint:disable-msg=W0631
KeyError
dataset/ETHPy150Open CollabQ/CollabQ/openid/extensions/sreg.py/getSRegNS
6,194
def parseExtensionArgs(self, args, strict=False): """Parse the unqualified simple registration request parameters and add them to this object. This method is essentially the inverse of C{L{getExtensionArgs}}. This method restores the serialized simple registration request fields. If you are extracting arguments from a standard OpenID checkid_* request, you probably want to use C{L{fromOpenIDRequest}}, which will extract the sreg namespace and arguments from the OpenID request. This method is intended for cases where the OpenID server needs more control over how the arguments are parsed than that method provides. >>> args = message.getArgs(ns_uri) >>> request.parseExtensionArgs(args) @param args: The unqualified simple registration arguments @type args: {str:str} @param strict: Whether requests with fields that are not defined in the simple registration specification should be tolerated (and ignored) @type strict: bool @returns: None; updates this object """ for list_name in ['required', 'optional']: required = (list_name == 'required') items = args.get(list_name) if items: for field_name in items.split(','): try: self.requestField(field_name, required, strict) except __HOLE__: if strict: raise self.policy_url = args.get('policy_url')
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/openid/extensions/sreg.py/SRegRequest.parseExtensionArgs
6,195
def _cast(self, value): # Many status variables are integers or floats but SHOW GLOBAL STATUS # returns them as strings try: value = int(value) except ValueError: try: value = float(value) except __HOLE__: pass if value == 'ON': return True elif value == 'OFF': return False return value
ValueError
dataset/ETHPy150Open adamchainz/django-mysql/django_mysql/status.py/BaseStatus._cast
6,196
def __init__(self, **kwargs): try: self.message = self.msg_fmt % kwargs except __HOLE__: exc_info = sys.exc_info() log.exception(_('Exception in string format operation: %s') % exc_info[1]) if TOSCAException._FATAL_EXCEPTION_FORMAT_ERRORS: raise exc_info[0]
KeyError
dataset/ETHPy150Open openstack/tosca-parser/toscaparser/common/exception.py/TOSCAException.__init__
6,197
def __init__(self, name, schema_dict): self.name = name if not isinstance(schema_dict, collections.Mapping): msg = (_('Schema definition of "%(pname)s" must be a dict.') % dict(pname=name)) ExceptionCollector.appendException(InvalidSchemaError(message=msg)) try: schema_dict['type'] except __HOLE__: msg = (_('Schema definition of "%(pname)s" must have a "type" ' 'attribute.') % dict(pname=name)) ExceptionCollector.appendException(InvalidSchemaError(message=msg)) self.schema = schema_dict self._len = None self.constraints_list = []
KeyError
dataset/ETHPy150Open openstack/tosca-parser/toscaparser/elements/constraints.py/Schema.__init__
6,198
def __iter__(self): for k in self.KEYS: try: self.schema[k] except __HOLE__: pass else: yield k
KeyError
dataset/ETHPy150Open openstack/tosca-parser/toscaparser/elements/constraints.py/Schema.__iter__
6,199
def __splitTemplate(value, valueParams): """ Split string into plus-expression(s) - patchParam: string node containing the placeholders - valueParams: list of params to inject """ # Convert list with nodes into Python dict # [a, b, c] => {0:a, 1:b, 2:c} mapper = { pos: value for pos, value in enumerate(valueParams) } result = [] splits = __replacer.split(value) if len(splits) == 1: return None pair = Node.Node(None, "plus") for entry in splits: if entry == "": continue if len(pair) == 2: newPair = Node.Node(None, "plus") newPair.append(pair) pair = newPair if __replacer.match(entry): pos = int(entry[1]) - 1 # Items might be added multiple times. Copy to protect original. try: repl = mapper[pos] except __HOLE__: raise UserError("Invalid positional value: %s in %s" % (entry, value)) copied = copy.deepcopy(mapper[pos]) if copied.type not in ("identifier", "call"): copied.parenthesized = True pair.append(copied) else: child = Node.Node(None, "string") child.value = entry pair.append(child) return pair
KeyError
dataset/ETHPy150Open zynga/jasy/jasy/js/optimize/Translation.py/__splitTemplate