Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
8,500
def get_instance_scenario(self, instance_id, tenant_id=None, verbose=False): '''Obtain the instance information, filtering by one or several of the tenant, uuid or name instance_id is the uuid or the name if it is not a valid uuid format Only one instance must mutch the filtering or an error is returned ''' for retry_ in range(0,2): try: with self.con: self.cur = self.con.cursor(mdb.cursors.DictCursor) #instance table where_list=[] if tenant_id is not None: where_list.append( "inst.tenant_id='" + tenant_id +"'" ) if af.check_valid_uuid(instance_id): where_list.append( "inst.uuid='" + instance_id +"'" ) else: where_list.append( "inst.name='" + instance_id +"'" ) where_text = " AND ".join(where_list) command = "SELECT inst.uuid as uuid,inst.name as name,inst.scenario_id as scenario_id, datacenter_id" +\ " ,datacenter_tenant_id, s.name as scenario_name,inst.tenant_id as tenant_id" + \ " ,inst.description as description,inst.created_at as created_at" +\ " FROM instance_scenarios as inst join scenarios as s on inst.scenario_id=s.uuid"+\ " WHERE " + where_text self.cur.execute(command) rows = self.cur.fetchall() if self.cur.rowcount==0: return -HTTP_Bad_Request, "No instance found with this criteria " + where_text elif self.cur.rowcount>1: return -HTTP_Bad_Request, "More than one instance found with this criteria " + where_text instance_dict = rows[0] #instance_vnfs cmd = "SELECT iv.uuid as uuid,sv.vnf_id as vnf_id,sv.name as vnf_name, sce_vnf_id, datacenter_id, datacenter_tenant_id"\ " FROM instance_vnfs as iv join sce_vnfs as sv on iv.sce_vnf_id=sv.uuid" \ " WHERE iv.instance_scenario_id='%s'" \ " ORDER BY iv.created_at " % instance_dict['uuid'] self.cur.execute(cmd) instance_dict['vnfs'] = self.cur.fetchall() for vnf in instance_dict['vnfs']: vnf_manage_iface_list=[] #instance vms cmd = "SELECT iv.uuid as uuid, vim_vm_id, status, error_msg, vim_info, iv.created_at as created_at, name "\ " FROM instance_vms as iv join vms on iv.vm_id=vms.uuid "\ " WHERE instance_vnf_id='%s' ORDER BY iv.created_at" % vnf['uuid'] self.cur.execute(cmd) vnf['vms'] = self.cur.fetchall() for vm in vnf['vms']: vm_manage_iface_list=[] #instance_interfaces cmd = "SELECT vim_interface_id, instance_net_id, internal_name,external_name, mac_address, ip_address, vim_info, i.type as type "\ " FROM instance_interfaces as ii join interfaces as i on ii.interface_id=i.uuid "\ " WHERE instance_vm_id='%s' ORDER BY created_at" % vm['uuid'] self.cur.execute(cmd ) vm['interfaces'] = self.cur.fetchall() for iface in vm['interfaces']: if iface["type"] == "mgmt" and iface["ip_address"]: vnf_manage_iface_list.append(iface["ip_address"]) vm_manage_iface_list.append(iface["ip_address"]) if not verbose: del iface["type"] if vm_manage_iface_list: vm["ip_address"] = ",".join(vm_manage_iface_list) if vnf_manage_iface_list: vnf["ip_address"] = ",".join(vnf_manage_iface_list) #instance_nets #select_text = "instance_nets.uuid as uuid,sce_nets.name as net_name,instance_nets.vim_net_id as net_id,instance_nets.status as status,instance_nets.external as external" #from_text = "instance_nets join instance_scenarios on instance_nets.instance_scenario_id=instance_scenarios.uuid " + \ # "join sce_nets on instance_scenarios.scenario_id=sce_nets.scenario_id" #where_text = "instance_nets.instance_scenario_id='"+ instance_dict['uuid'] + "'" cmd = "SELECT uuid,vim_net_id,status,error_msg,vim_info,external, sce_net_id, net_id as vnf_net_id, datacenter_id, datacenter_tenant_id"\ " FROM instance_nets" \ " WHERE instance_scenario_id='%s' ORDER BY created_at" % instance_dict['uuid'] self.cur.execute(cmd) instance_dict['nets'] = self.cur.fetchall() af.convert_datetime2str(instance_dict) af.convert_str2boolean(instance_dict, ('public','shared','external') ) return 1, instance_dict except (mdb.Error, __HOLE__), e: print "nfvo_db.get_instance_scenario DB Exception %d: %s" % (e.args[0], e.args[1]) r,c = self.format_error(e) if r!=-HTTP_Request_Timeout or retry_==1: return r,c
AttributeError
dataset/ETHPy150Open nfvlabs/openmano/openmano/nfvo_db.py/nfvo_db.get_instance_scenario
8,501
def delete_instance_scenario(self, instance_id, tenant_id=None): '''Deletes a instance_Scenario, filtering by one or serveral of the tenant, uuid or name instance_id is the uuid or the name if it is not a valid uuid format Only one instance_scenario must mutch the filtering or an error is returned ''' for retry_ in range(0,2): try: with self.con: self.cur = self.con.cursor(mdb.cursors.DictCursor) #instance table where_list=[] if tenant_id is not None: where_list.append( "tenant_id='" + tenant_id +"'" ) if af.check_valid_uuid(instance_id): where_list.append( "uuid='" + instance_id +"'" ) else: where_list.append( "name='" + instance_id +"'" ) where_text = " AND ".join(where_list) self.cur.execute("SELECT * FROM instance_scenarios WHERE "+ where_text) rows = self.cur.fetchall() if self.cur.rowcount==0: return -HTTP_Bad_Request, "No instance scenario found with this criteria " + where_text elif self.cur.rowcount>1: return -HTTP_Bad_Request, "More than one instance scenario found with this criteria " + where_text instance_uuid = rows[0]["uuid"] instance_name = rows[0]["name"] #sce_vnfs self.cur.execute("DELETE FROM instance_scenarios WHERE uuid='" + instance_uuid + "'") return 1, instance_uuid + " " + instance_name except (mdb.Error, __HOLE__), e: print "nfvo_db.delete_instance_scenario DB Exception %d: %s" % (e.args[0], e.args[1]) r,c = self.format_error(e, "delete", "No dependences can avoid deleting!!!!") if r!=-HTTP_Request_Timeout or retry_==1: return r,c
AttributeError
dataset/ETHPy150Open nfvlabs/openmano/openmano/nfvo_db.py/nfvo_db.delete_instance_scenario
8,502
def update_datacenter_nets(self, datacenter_id, new_net_list=[]): ''' Removes the old and adds the new net list at datacenter list for one datacenter. Attribute datacenter_id: uuid of the datacenter to act upon table: table where to insert new_net_list: the new values to be inserted. If empty it only deletes the existing nets Return: (Inserted items, Deleted items) if OK, (-Error, text) if error ''' for retry_ in range(0,2): created_time = time.time() try: with self.con: self.cur = self.con.cursor() cmd="DELETE FROM datacenter_nets WHERE datacenter_id='%s'" % datacenter_id print cmd self.cur.execute(cmd) deleted = self.cur.rowcount for new_net in new_net_list: created_time += 0.00001 self._new_row_internal('datacenter_nets', new_net, tenant_id=None, add_uuid=True, created_time=created_time) return len (new_net_list), deleted except (mdb.Error, __HOLE__), e: print "nfvo_db.update_datacenter_nets DB Exception %d: %s" % (e.args[0], e.args[1]) r,c = self.format_error(e) if r!=-HTTP_Request_Timeout or retry_==1: return r,c
AttributeError
dataset/ETHPy150Open nfvlabs/openmano/openmano/nfvo_db.py/nfvo_db.update_datacenter_nets
8,503
def CreateTemporaryDirectories(): """Creates the temporary sub-directories needed by the current run.""" for path in (GetRunDirPath(), GetVersionDirPath()): try: os.makedirs(path) except __HOLE__: if not os.path.isdir(path): raise
OSError
dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/temp_dir.py/CreateTemporaryDirectories
8,504
def register_middleware(self, middleware_instance): registered_count = 0 self._middlewares.append(middleware_instance) for hook in self._hooks.keys(): functor = getattr(middleware_instance, hook, None) if functor is None: try: functor = middleware_instance.get(hook, None) except __HOLE__: pass if functor is not None: self._hooks[hook].append(functor) registered_count += 1 return registered_count # # client/server #
AttributeError
dataset/ETHPy150Open 0rpc/zerorpc-python/zerorpc/context.py/Context.register_middleware
8,505
@ValidateLarchPlugin def read_gsexdi(fname, _larch=None, nmca=4, bad=None, **kws): """Read GSE XDI Scan Data to larch group, summing ROI data for MCAs and apply deadtime corrections """ group = _larch.symtable.create_group() group.__name__ ='GSE XDI Data file %s' % fname xdi = XDIFile(str(fname)) group._xdi = xdi group.filename = fname group.npts = xdi.npts group.bad_channels = bad group.dtc_taus = XSPRESS3_TAUS if _larch.symtable.has_symbol('_sys.gsecars.xspress3_taus'): group.dtc_taus = _larch.symtable._sys.gsecars.xspress3_taus for family in ('scan', 'mono', 'facility'): for key, val in xdi.attrs.get(family, {}).items(): if '||' in val: val, addr = val.split('||') try: val = float(val) except: pass setattr(group, "%s_%s" % (family, key), val) ocrs, icrs = [], [] try: ctime = xdi.CountTime except __HOLE__: ctime = xdi.TSCALER / 5.e7 is_xspress3 = any(['13QX4' in a[1] for a in xdi.attrs['column'].items()]) group.with_xspress3 = is_xspress3 for i in range(nmca): ocr = getattr(xdi, 'OutputCounts_mca%i' % (i+1), None) if ocr is None: ocr = ctime ocr = ocr/ctime icr = getattr(xdi, 'InputCounts_mca%i' % (i+1), None) if icr is not None: icr = icr/ctime else: icr = 1.0*ocr if is_xspress3: tau = group.dtc_taus[i] icr = estimate_icr(ocr*1.00, tau, niter=7) ocrs.append(ocr) icrs.append(icr) labels = [] sums = OrderedDict() for i, arrname in enumerate(xdi.array_labels): dat = getattr(xdi, arrname) aname = sumname = rawname = arrname.lower() if ('_mca' in aname and 'outputcounts' not in aname and 'clock' not in aname): sumname, imca = sumname.split('_mca') imca = int(imca) - 1 datraw = dat*1.0 rawname = sumname + '_nodtc' dat = dat * icrs[imca]/ ocrs[imca] if any(np.isnan(dat)): nan_pts = np.where(np.isnan(dat))[0] dat[nan_pts] = datraw[nan_pts] setattr(group, aname, dat) if sumname not in labels: labels.append(sumname) sums[sumname] = dat if rawname != sumname: sums[rawname] = datraw if rawname not in labels: labels.append(rawname) else: sums[sumname] = sums[sumname] + dat if rawname != sumname: sums[rawname] = sums[rawname] + datraw for name, dat in sums.items(): if not hasattr(group, name): setattr(group, name, dat) for arrname in xdi.array_labels: sname = arrname.lower() if sname not in labels: labels.append(sname) for imca in range(nmca): setattr(group, 'ocr_mca%i' % (imca+1), ocrs[imca]) setattr(group, 'icr_mca%i' % (imca+1), icrs[imca]) group.array_labels = labels return group
AttributeError
dataset/ETHPy150Open xraypy/xraylarch/plugins/io/gse_xdiscan.py/read_gsexdi
8,506
@internationalizeDocstring def todo(self, irc, msg, args, user, taskid): """[<username>] [<task id>] Retrieves a task for the given task id. If no task id is given, it will return a list of task ids that that user has added to their todo list. """ try: u = ircdb.users.getUser(msg.prefix) except __HOLE__: u = None if u != user and not self.registryValue('allowThirdpartyReader'): irc.error(_('You are not allowed to see other users todo-list.')) return # List the active tasks for the given user if not taskid: try: tasks = self.db.getTodos(user.id) utils.sortBy(operator.attrgetter('priority'), tasks) tasks = [format(_('#%i: %s'), t.id, self._shrink(t.task)) for t in tasks] Todo = 'Todo' if len(tasks) != 1: Todo = 'Todos' irc.reply(format(_('%s for %s: %L'), Todo, user.name, tasks)) except dbi.NoRecordError: if u != user: irc.reply(_('That user has no tasks in their todo list.')) else: irc.reply(_('You have no tasks in your todo list.')) return # Reply with the user's task else: try: t = self.db.get(user.id, taskid) if t.active: active = _('Active') else: active = _('Inactive') if t.priority: t.task += format(_(', priority: %i'), t.priority) at = time.strftime(conf.supybot.reply.format.time(), time.localtime(t.at)) s = format(_('%s todo for %s: %s (Added at %s)'), active, user.name, t.task, at) irc.reply(s) except dbi.NoRecordError: irc.errorInvalid(_('task id'), taskid)
KeyError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Todo/plugin.py/Todo.todo
8,507
def get_delete_permission(opts): try: from django.contrib.auth import get_permission_codename # flake8: noqa return '%s.%s' % (opts.app_label, get_permission_codename('delete', opts)) except __HOLE__: return '%s.%s' % (opts.app_label, opts.get_delete_permission())
ImportError
dataset/ETHPy150Open divio/django-filer/filer/utils/compatibility.py/get_delete_permission
8,508
def optimize(self, optimizer=None, start=None, **kwargs): self._IN_OPTIMIZATION_ = True if self.mpi_comm==None: super(SparseGP_MPI, self).optimize(optimizer,start,**kwargs) elif self.mpi_comm.rank==0: super(SparseGP_MPI, self).optimize(optimizer,start,**kwargs) self.mpi_comm.Bcast(np.int32(-1),root=0) elif self.mpi_comm.rank>0: x = self.optimizer_array.copy() flag = np.empty(1,dtype=np.int32) while True: self.mpi_comm.Bcast(flag,root=0) if flag==1: try: self.optimizer_array = x self._fail_count = 0 except (LinAlgError, ZeroDivisionError, __HOLE__): if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 elif flag==-1: break else: self._IN_OPTIMIZATION_ = False raise Exception("Unrecognizable flag for synchronization!") self._IN_OPTIMIZATION_ = False
ValueError
dataset/ETHPy150Open SheffieldML/GPy/GPy/core/sparse_gp_mpi.py/SparseGP_MPI.optimize
8,509
def __init__(self, initializer=None, age=None, payload=None, **kwarg): super(GrrMessage, self).__init__(initializer=initializer, age=age, **kwarg) if payload is not None: self.payload = payload # If the payload has a priority, the GrrMessage inherits it. try: self.priority = payload.priority except __HOLE__: pass if not self.task_id: self.task_id = self.GenerateTaskID()
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/rdfvalues/flows.py/GrrMessage.__init__
8,510
def __init__(self, layers, batch_size=None, input_space=None, input_source='features', target_source='targets', nvis=None, seed=None, layer_name=None, monitor_targets=True, **kwargs): super(MLP, self).__init__(**kwargs) self.seed = seed assert isinstance(layers, list) assert all(isinstance(layer, Layer) for layer in layers) assert len(layers) >= 1 self.layer_name = layer_name self.layer_names = set() for layer in layers: assert layer.get_mlp() is None if layer.layer_name in self.layer_names: raise ValueError("MLP.__init__ given two or more layers " "with same name: " + layer.layer_name) layer.set_mlp(self) self.layer_names.add(layer.layer_name) self.layers = layers self.batch_size = batch_size self.force_batch_size = batch_size self._input_source = input_source self._target_source = target_source self.monitor_targets = monitor_targets if input_space is not None or nvis is not None: self._nested = False self.setup_rng() # check if the layer_name is None (the MLP is the outer MLP) assert layer_name is None if nvis is not None: input_space = VectorSpace(nvis) # Check whether the input_space and input_source structures match try: DataSpecsMapping((input_space, input_source)) except __HOLE__: raise ValueError("The structures of `input_space`, %s, and " "`input_source`, %s do not match. If you " "specified a CompositeSpace as an input, " "be sure to specify the data sources as well." % (input_space, input_source)) self.input_space = input_space self._update_layer_input_spaces() else: self._nested = True self.freeze_set = set([])
ValueError
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/models/mlp.py/MLP.__init__
8,511
def assert_rel_error(test_case, actual, desired, tolerance): """ Determine that the relative error between `actual` and `desired` is within `tolerance`. If `desired` is zero, then use absolute error. test_case: :class:`unittest.TestCase` TestCase instance used for assertions. actual: float The value from the test. desired: float The value expected. tolerance: float Maximum relative error ``(actual - desired) / desired``. """ try: actual[0] except (TypeError, __HOLE__): if isnan(actual) and not isnan(desired): test_case.fail('actual nan, desired %s, rel error nan, tolerance %s' % (desired, tolerance)) if desired != 0: error = (actual - desired) / desired else: error = actual if abs(error) > tolerance: test_case.fail('actual %s, desired %s, rel error %s, tolerance %s' % (actual, desired, error, tolerance)) else: for i, (act, des) in enumerate(zip(actual, desired)): if isnan(act) and not isnan(des): test_case.fail('at %d: actual nan, desired %s, rel error nan,' ' tolerance %s' % (i, des, tolerance)) if des != 0: error = (act - des) / des else: error = act if abs(error) > tolerance: test_case.fail('at %d: actual %s, desired %s, rel error %s,' ' tolerance %s' % (i, act, des, error, tolerance))
IndexError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.util/src/openmdao/util/testutil.py/assert_rel_error
8,512
def from_content_disposition(self, content_disposition): try: filename = to_native_str(content_disposition, encoding='latin-1', errors='replace').split(';')[1].split('=')[1] filename = filename.strip('"\'') return self.from_filename(filename) except __HOLE__: return Response
IndexError
dataset/ETHPy150Open scrapy/scrapy/scrapy/responsetypes.py/ResponseTypes.from_content_disposition
8,513
def getdoc(object): """Get the documentation string for an object. All tabs are expanded to spaces. To clean up docstrings that are indented to line up with blocks of code, any whitespace than can be uniformly removed from the second line onwards is removed.""" try: doc = object.__doc__ except __HOLE__: return None if not isinstance(doc, types.StringTypes): return None return cleandoc(doc)
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/inspect.py/getdoc
8,514
def getmodule(object, _filename=None): """Return the module an object was defined in, or None if not found.""" if ismodule(object): return object if hasattr(object, '__module__'): return sys.modules.get(object.__module__) # Try the filename to modulename cache if _filename is not None and _filename in modulesbyfile: return sys.modules.get(modulesbyfile[_filename]) # Try the cache again with the absolute file name try: file = getabsfile(object, _filename) except __HOLE__: return None if file in modulesbyfile: return sys.modules.get(modulesbyfile[file]) # Update the filename to module name cache and check yet again # Copy sys.modules in order to cope with changes while iterating for modname, module in sys.modules.items(): if ismodule(module) and hasattr(module, '__file__'): f = module.__file__ if f == _filesbymodname.get(modname, None): # Have already mapped this module, so skip it continue _filesbymodname[modname] = f f = getabsfile(module) # Always map to the name the module knows itself by modulesbyfile[f] = modulesbyfile[ os.path.realpath(f)] = module.__name__ if file in modulesbyfile: return sys.modules.get(modulesbyfile[file]) # Check the main module main = sys.modules['__main__'] if not hasattr(object, '__name__'): return None if hasattr(main, object.__name__): mainobject = getattr(main, object.__name__) if mainobject is object: return main # Check builtins builtin = sys.modules['__builtin__'] if hasattr(builtin, object.__name__): builtinobject = getattr(builtin, object.__name__) if builtinobject is object: return builtin
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/inspect.py/getmodule
8,515
def getcomments(object): """Get lines of comments immediately preceding an object's source code. Returns None when source can't be found. """ try: lines, lnum = findsource(object) except (IOError, __HOLE__): return None if ismodule(object): # Look for a comment block at the top of the file. start = 0 if lines and lines[0][:2] == '#!': start = 1 while start < len(lines) and string.strip(lines[start]) in ('', '#'): start = start + 1 if start < len(lines) and lines[start][:1] == '#': comments = [] end = start while end < len(lines) and lines[end][:1] == '#': comments.append(string.expandtabs(lines[end])) end = end + 1 return string.join(comments, '') # Look for a preceding block of comments at the same indentation. elif lnum > 0: indent = indentsize(lines[lnum]) end = lnum - 1 if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \ indentsize(lines[end]) == indent: comments = [string.lstrip(string.expandtabs(lines[end]))] if end > 0: end = end - 1 comment = string.lstrip(string.expandtabs(lines[end])) while comment[:1] == '#' and indentsize(lines[end]) == indent: comments[:0] = [comment] end = end - 1 if end < 0: break comment = string.lstrip(string.expandtabs(lines[end])) while comments and string.strip(comments[0]) == '#': comments[:1] = [] while comments and string.strip(comments[-1]) == '#': comments[-1:] = [] return string.join(comments, '')
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/inspect.py/getcomments
8,516
def getframeinfo(frame, context=1): """Get information about a frame or traceback object. A tuple of five things is returned: the filename, the line number of the current line, the function name, a list of lines of context from the source code, and the index of the current line within that list. The optional second argument specifies the number of lines of context to return, which are centered around the current line.""" if istraceback(frame): lineno = frame.tb_lineno frame = frame.tb_frame else: lineno = frame.f_lineno if not isframe(frame): raise TypeError('arg is not a frame or traceback object') filename = getsourcefile(frame) or getfile(frame) if context > 0: start = lineno - 1 - context//2 try: lines, lnum = findsource(frame) except __HOLE__: lines = index = None else: start = max(start, 1) start = max(0, min(start, len(lines) - context)) lines = lines[start:start+context] index = lineno - 1 - start else: lines = index = None return Traceback(filename, lineno, frame.f_code.co_name, lines, index)
IOError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/inspect.py/getframeinfo
8,517
def DeleteBlob(self, blob_key): """Delete blob content.""" try: del self._blobs[blobstore.BlobKey(unicode(blob_key))] except __HOLE__: pass
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/blobstore/dict_blob_storage.py/DictBlobStorage.DeleteBlob
8,518
def run(self): import sys, subprocess try: from py import test as pytest except __HOLE__: raise Exception('Running tests requires pytest.') errno = subprocess.call([sys.executable, '-m', 'py.test']) raise SystemExit(errno)
ImportError
dataset/ETHPy150Open wickman/pystachio/setup.py/PyTest.run
8,519
def __setitem__(self, key, value): try: bnch = super(Header, self).__getitem__(key) bnch.value = value except __HOLE__: bnch = Bunch.Bunch(key=key, value=value, comment='') self.keyorder.append(key) super(Header, self).__setitem__(key, bnch) return bnch
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/BaseImage.py/Header.__setitem__
8,520
def set_card(self, key, value, comment=None): try: bnch = super(Header, self).__getitem__(key) bnch.value = value if not (comment is None): bnch.comment = comment except __HOLE__: if comment is None: comment = '' bnch = Bunch.Bunch(key=key, value=value, comment=comment) self.keyorder.append(key) super(Header, self).__setitem__(key, bnch) return bnch
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/BaseImage.py/Header.set_card
8,521
def get(self, key, alt=None): try: return self.__getitem__(key) except __HOLE__: return alt
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/BaseImage.py/Header.get
8,522
def execute(self, args): # pylint: disable=unpacking-non-sequence ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) extension = ext_loader.get_extension_class(args.name) out = StringIO() term_width, term_height = get_terminal_size() format_extension(extension, out, term_width) text = out.getvalue() pager = get_pager() if len(text.split('\n')) > term_height and pager: try: sp = subprocess.Popen(pager, stdin=subprocess.PIPE) sp.communicate(text) except __HOLE__: self.logger.warning('Could not use PAGER "{}"'.format(pager)) sys.stdout.write(text) else: sys.stdout.write(text)
OSError
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/commands/show.py/ShowCommand.execute
8,523
def createActor(self, newActorClass, targetActorRequirements, globalName, sourceHash = None): naa = self._addrManager.createLocalAddress() if getattr(self, '_exiting', False): return naa if not globalName: try: self._startChildActor(naa, newActorClass, self.myAddress, notifyAddr = self.myAddress, childRequirements = targetActorRequirements, sourceHash = sourceHash or self._sourceHash) # transport will contrive to call _pendingActorReady when the # child is initialized and connected to this parent. return naa except NoCompatibleSystemForActor: pass except __HOLE__: # hash source may not be available locally pass # Cannot create the actor directly, so ask the Admin for help actorClassName = '%s.%s'%(newActorClass.__module__, newActorClass.__name__) if hasattr(newActorClass, '__name__') else newActorClass self._send_intent( TransmitIntent(self._adminAddr, PendingActor(actorClassName, self.transport.myAddress, naa.addressDetails.addressInstanceNum, targetActorRequirements, globalName=globalName, sourceHash=sourceHash or self._sourceHash))) return naa
ImportError
dataset/ETHPy150Open godaddy/Thespian/thespian/system/actorManager.py/ActorManager.createActor
8,524
def manual_run(): errors, failed, count = 0, 0, 0 for test, in all_tests(skip_known_issues=False): count += 1 print(test.description) try: test() print "PASSED" except __HOLE__, e: failed += 1 print "****FAILED****", e; except Exception, e: errors += 1; print "****ERROR**** in %s" % test._source_urls, e print "Ran %(count)s tests. Failed: %(failed)s. Errors: %(errors)s."%vars()
AssertionError
dataset/ETHPy150Open RDFLib/rdflib/test/rdfa/run_w3c_rdfa_testsuite.py/manual_run
8,525
def parseColor(self, str): """ Handle a single ANSI color sequence """ # Drop the trailing 'm' str = str[:-1] if not str: str = '0' try: parts = map(int, str.split(';')) except __HOLE__: log.msg('Invalid ANSI color sequence (%d): %s' % (len(str), str)) self.currentFG, self.currentBG = self.defaultFG, self.defaultBG return for x in parts: if x == 0: self.currentFG, self.currentBG = self.defaultFG, self.defaultBG self.bold, self.flash, self.underline, self.reverse = 0, 0, 0, 0 self.display = 1 elif x == 1: self.bold = 1 elif 30 <= x <= 37: self.currentFG = x - 30 elif 40 <= x <= 47: self.currentBG = x - 40 elif x == 39: self.currentFG = self.defaultFG elif x == 49: self.currentBG = self.defaultBG elif x == 4: self.underline = 1 elif x == 5: self.flash = 1 elif x == 7: self.reverse = 1 elif x == 8: self.display = 0 elif x == 22: self.bold = 0 elif x == 24: self.underline = 0 elif x == 25: self.blink = 0 elif x == 27: self.reverse = 0 elif x == 28: self.display = 1 else: log.msg('Unrecognised ANSI color command: %d' % (x,))
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/conch/ui/ansi.py/AnsiParser.parseColor
8,526
def delegated(func): """A delegated method raises AttributeError in the absence of backend support.""" @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except __HOLE__: raise ImplementationError( "Method '%s' not provided by registered " "implementation '%s'" % (func.__name__, args[0].impl)) return wrapper # Map geometry methods to their GEOS delegates
KeyError
dataset/ETHPy150Open Toblerity/Shapely/shapely/impl.py/delegated
8,527
def __getitem__(self, key): try: return self.map[key] except __HOLE__: raise ImplementationError( "Method '%s' not provided by registered " "implementation '%s'" % (key, self.map))
KeyError
dataset/ETHPy150Open Toblerity/Shapely/shapely/impl.py/BaseImpl.__getitem__
8,528
@app.route("/length/<int:response_bytes>") def fix_length_response(response_bytes): if response_bytes < 1: raise Exception("Forbidded response length: {0}".format(response_bytes)) try: response = response_cache[response_bytes] return response except __HOLE__: response = urandom(response_bytes) response_cache[response_bytes] = response return response
KeyError
dataset/ETHPy150Open svanoort/python-client-benchmarks/app.py/fix_length_response
8,529
@classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/linalg/tests/test_graphmatrix.py/TestGraphMatrix.setupClass
8,530
def setAttributeNS(self, namespaceURI, localName, value): ''' Keyword arguments: namespaceURI -- namespace of attribute to create, None is for attributes in no namespace. localName -- local name of new attribute value -- value of new attribute ''' prefix = None if namespaceURI: try: prefix = self.getPrefix(namespaceURI) except __HOLE__, ex: prefix = 'ns2' self.setNamespaceAttribute(prefix, namespaceURI) qualifiedName = localName if prefix: qualifiedName = '%s:%s' %(prefix, localName) self._setAttributeNS(namespaceURI, qualifiedName, value)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/SOAPpy/wstools/Utility.py/ElementProxy.setAttributeNS
8,531
def parse_exclude_devices(exclude_list): """Parse Exclude devices list parses excluded device list in the form: dev_name:pci_dev_1;pci_dev_2 @param exclude list: list of string pairs in "key:value" format the key part represents the network device name the value part is a list of PCI slots separated by ";" """ exclude_mapping = {} for dev_mapping in exclude_list: try: dev_name, exclude_devices = dev_mapping.split(":", 1) except __HOLE__: raise ValueError(_("Invalid mapping: '%s'") % dev_mapping) dev_name = dev_name.strip() if not dev_name: raise ValueError(_("Missing key in mapping: '%s'") % dev_mapping) if dev_name in exclude_mapping: raise ValueError(_("Device %(dev_name)s in mapping: %(mapping)s " "not unique") % {'dev_name': dev_name, 'mapping': dev_mapping}) exclude_devices_list = exclude_devices.split(";") exclude_devices_set = set() for dev in exclude_devices_list: dev = dev.strip() if dev: exclude_devices_set.add(dev) exclude_mapping[dev_name] = exclude_devices_set return exclude_mapping
ValueError
dataset/ETHPy150Open openstack/neutron/neutron/plugins/ml2/drivers/mech_sriov/agent/common/config.py/parse_exclude_devices
8,532
def handle_errors(callback, parsed=None, out=sys.stderr): """Execute the callback, optionally passing it parsed, and return its return value. If an exception occurs, determine which kind it is, output an appropriate message, and return the corresponding error code.""" try: if parsed: return callback(parsed) else: return callback() except DeploymentError as e: traceback.print_exc(file=out) print(e, file=out) if hasattr(e, 'value') and hasattr(e.value, 'args') and len(e.value.args) > 0 and \ 'open_sftp_client' in e.value.args[0]: print('Timeout', file=out) return TIMEOUT return DEPLOYMENT_ERROR except MalformedResponseError as e: traceback.print_exc(file=out) print(e, file=out) if 'Service Unavailable' in e.body: return SERVICE_UNAVAILABLE return MALFORMED_RESPONSE except __HOLE__: pass except: traceback.print_exc(file=out) return EXCEPTION
SystemExit
dataset/ETHPy150Open quaddra/provision/provision/config.py/handle_errors
8,533
def import_by_path(path): """Append the path to sys.path, then attempt to import module with path's basename, finally making certain to remove appended path. http://stackoverflow.com/questions/1096216/override-namespace-in-python""" sys.path.append(os.path.dirname(path)) try: return __import__(os.path.basename(path)) except __HOLE__: logger.warn('unable to import {0}'.format(path)) finally: del sys.path[-1]
ImportError
dataset/ETHPy150Open quaddra/provision/provision/config.py/import_by_path
8,534
def assert_nodes_equal(nodes1, nodes2): # Assumes iterables of nodes, or (node,datadict) tuples nlist1 = list(nodes1) nlist2 = list(nodes2) try: d1 = dict(nlist1) d2 = dict(nlist2) except (ValueError, __HOLE__): d1 = dict.fromkeys(nlist1) d2 = dict.fromkeys(nlist2) assert_equal(d1, d2)
TypeError
dataset/ETHPy150Open networkx/networkx/networkx/testing/utils.py/assert_nodes_equal
8,535
def test_keyFlattening(self): """ Test that L{KeyFlattener.flatKey} returns the expected keys for format fields. """ def keyFromFormat(format): for ( literalText, fieldName, formatSpec, conversion, ) in aFormatter.parse(format): return KeyFlattener().flatKey( fieldName, formatSpec, conversion ) # No name try: self.assertEqual(keyFromFormat("{}"), "!:") except __HOLE__: if sys.version_info[:2] == (2, 6): # In python 2.6, an empty field name causes Formatter.parse to # raise ValueError. pass else: # In Python 2.7, it's allowed, so this exception is unexpected. raise # Just a name self.assertEqual(keyFromFormat("{foo}"), "foo!:") # Add conversion self.assertEqual(keyFromFormat("{foo!s}"), "foo!s:") self.assertEqual(keyFromFormat("{foo!r}"), "foo!r:") # Add format spec self.assertEqual(keyFromFormat("{foo:%s}"), "foo!:%s") self.assertEqual(keyFromFormat("{foo:!}"), "foo!:!") self.assertEqual(keyFromFormat("{foo::}"), "foo!::") # Both self.assertEqual(keyFromFormat("{foo!s:%s}"), "foo!s:%s") self.assertEqual(keyFromFormat("{foo!s:!}"), "foo!s:!") self.assertEqual(keyFromFormat("{foo!s::}"), "foo!s::") [keyPlusLiteral] = aFormatter.parse("{x}") key = keyPlusLiteral[1:] sameFlattener = KeyFlattener() self.assertEqual(sameFlattener.flatKey(*key), "x!:") self.assertEqual(sameFlattener.flatKey(*key), "x!:/2")
ValueError
dataset/ETHPy150Open twisted/twisted/twisted/logger/test/test_flatten.py/FlatFormattingTests.test_keyFlattening
8,536
def save(self): self._run_callbacks('before_save') fields_dict = self.fields.as_dict() try: # Attempt update id_ = fields_dict['id'] result = (r.table(self._table).get(id_).replace(r.row .without(r.row.keys().difference(list(fields_dict.keys()))) .merge(fields_dict), return_changes='always').run()) except __HOLE__: # Resort to insert result = (r.table(self._table).insert(fields_dict, return_changes=True) .run()) if result['errors'] > 0: raise OperationError(result['first_error']) # Force overwrite so that related caches are flushed self.fields.__dict__ = result['changes'][0]['new_val'] self._run_callbacks('after_save')
KeyError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.save
8,537
def delete(self): self._run_callbacks('before_delete') try: id_ = getattr(self.fields, 'id') result = r.table(self._table).get(id_).delete().run() except __HOLE__: raise OperationError('Cannot delete %r (object not saved or ' 'already deleted)' % self) if result['errors'] > 0: raise OperationError(result['first_error']) delattr(self.fields, 'id') # Remove any reference to the deleted object for field in self.fields.related: delattr(self.fields, field) self._run_callbacks('after_delete') # TODO: Get rid of this nasty decorator after renaming .get() on ObjectHandler
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.delete
8,538
@dispatch_to_metaclass def get(self, key, default=None): try: return getattr(self.fields, key) except __HOLE__: return default
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.get
8,539
def __getitem__(self, key): try: return getattr(self.fields, key) except __HOLE__: raise KeyError(key)
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.__getitem__
8,540
def __setitem__(self, key, value): try: setattr(self.fields, key, value) except __HOLE__: raise KeyError(key)
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.__setitem__
8,541
def __delitem__(self, key): try: delattr(self.fields, key) except __HOLE__: raise KeyError(key)
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.__delitem__
8,542
def __repr__(self): try: id_ = self.fields.id except __HOLE__: id_ = 'not saved' return '<%s: %s>' % (self.__class__.__name__, id_)
AttributeError
dataset/ETHPy150Open linkyndy/remodel/remodel/models.py/Model.__repr__
8,543
def get_option_list(self): option_list = super(CatCommand, self).get_option_list() try: __import__('pygments') option = make_option('-f', '--formatter', action='store', dest='formatter_name', default='terminal', help='Pygments specific formatter name.', ) option_list += (option,) except __HOLE__: pass return option_list
ImportError
dataset/ETHPy150Open codeinn/vcs/vcs/commands/cat.py/CatCommand.get_option_list
8,544
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param verify: (optional) Whether to verify SSL certificates. :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. """ conn = self.get_connection(request.url, proxies) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers(request) chunked = not (request.body is None or 'Content-Length' in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError as e: # this may raise a string formatting error. err = ("Invalid timeout {0}. Pass a (connect, read) " "timeout tuple, or a single float to set " "both timeouts to the same value".format(timeout)) raise ValueError(err) else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: if not chunked: resp = conn.urlopen( method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout ) # Send the request. else: if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: low_conn.putrequest(request.method, url, skip_accept_encoding=True) for header, value in request.headers.items(): low_conn.putheader(header, value) low_conn.endheaders() for i in request.body: low_conn.send(hex(len(i))[2:].encode('utf-8')) low_conn.send(b'\r\n') low_conn.send(i) low_conn.send(b'\r\n') low_conn.send(b'0\r\n\r\n') # Receive the response from the server try: # For Python 2.7+ versions, use buffering of HTTP # responses r = conn.getresponse(buffering=True) except __HOLE__: # For compatibility with Python 2.6 versions and back r = conn.getresponse() resp = HTTPResponse.from_httplib( r, pool=conn, connection=low_conn, preload_content=False, decode_content=False ) except: # If we hit any problems here, clean up the connection. # Then, reraise so that we can handle the actual exception. low_conn.close() raise except (ProtocolError, socket.error) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): # TODO: Remove this in 3.0.0: see #2811 if not isinstance(e.reason, NewConnectionError): raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) raise ConnectionError(e, request=request) except ClosedPoolError as e: raise ConnectionError(e, request=request) except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): raise SSLError(e, request=request) elif isinstance(e, ReadTimeoutError): raise ReadTimeout(e, request=request) else: raise return self.build_response(request, resp)
TypeError
dataset/ETHPy150Open BergWerkGIS/QGIS-CKAN-Browser/CKAN-Browser/request/adapters.py/HTTPAdapter.send
8,545
def solve_univariate_inequality(expr, gen, relational=True): """Solves a real univariate inequality. Examples ======== >>> from sympy.solvers.inequalities import solve_univariate_inequality >>> from sympy.core.symbol import Symbol >>> x = Symbol('x') >>> solve_univariate_inequality(x**2 >= 4, x) Or(And(-oo < x, x <= -2), And(2 <= x, x < oo)) >>> solve_univariate_inequality(x**2 >= 4, x, relational=False) (-oo, -2] U [2, oo) """ from sympy.solvers.solvers import solve, denoms # This keeps the function independent of the assumptions about `gen`. # `solveset` makes sure this function is called only when the domain is # real. d = Dummy(real=True) expr = expr.subs(gen, d) _gen = gen gen = d if expr is S.true: rv = S.Reals elif expr is S.false: rv = S.EmptySet else: e = expr.lhs - expr.rhs parts = n, d = e.as_numer_denom() if all(i.is_polynomial(gen) for i in parts): solns = solve(n, gen, check=False) singularities = solve(d, gen, check=False) else: solns = solve(e, gen, check=False) singularities = [] for d in denoms(e): singularities.extend(solve(d, gen)) include_x = expr.func(0, 0) def valid(x): v = e.subs(gen, x) try: r = expr.func(v, 0) except TypeError: r = S.false if r in (S.true, S.false): return r if v.is_real is False: return S.false else: v = v.n(2) if v.is_comparable: return expr.func(v, 0) return S.false start = S.NegativeInfinity sol_sets = [S.EmptySet] try: reals = _nsort(set(solns + singularities), separated=True)[0] except __HOLE__: raise NotImplementedError('sorting of these roots is not supported') for x in reals: end = x if end in [S.NegativeInfinity, S.Infinity]: if valid(S(0)): sol_sets.append(Interval(start, S.Infinity, True, True)) break pt = ((start + end)/2 if start is not S.NegativeInfinity else (end/2 if end.is_positive else (2*end if end.is_negative else end - 1))) if valid(pt): sol_sets.append(Interval(start, end, True, True)) if x in singularities: singularities.remove(x) elif include_x: sol_sets.append(FiniteSet(x)) start = end end = S.Infinity # in case start == -oo then there were no solutions so we just # check a point between -oo and oo (e.g. 0) else pick a point # past the last solution (which is start after the end of the # for-loop above pt = (0 if start is S.NegativeInfinity else (start/2 if start.is_negative else (2*start if start.is_positive else start + 1))) if valid(pt): sol_sets.append(Interval(start, end, True, True)) rv = Union(*sol_sets).subs(gen, _gen) return rv if not relational else rv.as_relational(_gen)
NotImplementedError
dataset/ETHPy150Open sympy/sympy/sympy/solvers/inequalities.py/solve_univariate_inequality
8,546
def _solve_inequality(ie, s): """ A hacky replacement for solve, since the latter only works for univariate inequalities. """ expr = ie.lhs - ie.rhs try: p = Poly(expr, s) if p.degree() != 1: raise NotImplementedError except (PolynomialError, __HOLE__): try: return reduce_rational_inequalities([[ie]], s) except PolynomialError: return solve_univariate_inequality(ie, s) a, b = p.all_coeffs() if a.is_positive or ie.rel_op in ('!=', '=='): return ie.func(s, -b/a) elif a.is_negative: return ie.reversed.func(s, -b/a) else: raise NotImplementedError
NotImplementedError
dataset/ETHPy150Open sympy/sympy/sympy/solvers/inequalities.py/_solve_inequality
8,547
def import_string(import_name, silent=False): """Imports an object based on a string. If *silent* is True the return value will be None if the import fails. Simplified version of the function with same name from `Werkzeug`_. :param import_name: The dotted name for the object to import. :param silent: If True, import errors are ignored and None is returned instead. :returns: The imported object. """ import_name = str(import_name) try: if '.' in import_name: module, obj = import_name.rsplit('.', 1) return getattr(__import__(module, None, None, [obj]), obj) else: return __import__(import_name) except (__HOLE__, AttributeError): if not silent: raise
ImportError
dataset/ETHPy150Open numan/py-analytics/analytics/utils.py/import_string
8,548
def load_models_csv(self,filepath, model_no = None): """ Load predictions from an individual sub model into a dataframe stored in the sub_models list, if no model_no is given then load data into next available index. Valid source is a CSV file. """ try: if model_no == None: model_no = len(self.sub_models) self.sub_models.append(data_io.load_flatfile_to_df(filepath, delimiter='')) else: self.sub_models[model_no]=data_io.load_flatfile_to_df(filepath, delimiter='') utils.info('Model loaded into index %s' % str(model_no)) except __HOLE__: raise Exception('Model number does not exist. Model number given, %s, is out of index range.' % str(model_no))
IndexError
dataset/ETHPy150Open theusual/kaggle-seeclickfix-ensemble/Bryan/ensembles.py/EnsembleAvg.load_models_csv
8,549
def sort_dataframes(self,sortcolumn): """ Sort all data frame attributes of class by a given column for ease of comparison. """ try: for i in range(len(self.sub_models)): self.sub_models[i] = self.sub_models[i].sort(sortcolumn) if 'df_true' in dir(self): self.df_true = self.df_true.sort(sortcolumn) if 'df_true_segment' in dir(self): self.df_true_segment = self.df_true_segment.sort(sortcolumn) except __HOLE__: raise Exception('Sort failed. Column %s not found in all dataframes.' % (sortcolumn))
KeyError
dataset/ETHPy150Open theusual/kaggle-seeclickfix-ensemble/Bryan/ensembles.py/EnsembleAvg.sort_dataframes
8,550
def _assertEqualsAndSerialize(self, expected, kranges): results = [] while True: try: results.append(kranges.next()) kranges = kranges.__class__.from_json(kranges.to_json()) except __HOLE__: break self.assertRaises(StopIteration, kranges.next) expected.sort() results.sort() self.assertEquals(expected, results)
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/appengine-mapreduce/python/test/mapreduce/key_ranges_test.py/KeyRangesTest._assertEqualsAndSerialize
8,551
def parse_url(url): """Parses the supplied Redis URL and returns a dict with the parsed/split data. For ambiguous URLs like redis://localhost and redis://my_socket_file this function will prioritize network URLs over socket URLs. redis://my_socket_file will be interpreted as a network URL, with the Redis server having a hostname of 'my_socket_file'. Use the file:// or redis+socket:// (Celery compatibility) URL scheme to force socket URL interpretations over network URLs. Positional arguments: url -- URL to redis server. Examples are in this file's Redis class docstring under REDIS_URL. Returns: Dictionary with parsed data, compatible with StrictRedis.__init__() keyword arguments. Raises: ValueError -- if the supplies URL was malformed or invalid. """ # Parse URL, make sure string is valid. try: split = urlsplit(url.rstrip('/')) except (__HOLE__, TypeError) as e: raise ValueError('Malformed URL specified: {0}'.format(e)) if split.scheme not in ['redis+socket', 'redis', 'file']: raise ValueError('Malformed URL specified.') scheme = split.scheme netloc = split.netloc hostname = split.hostname path = split.path password = split.password try: port = split.port except ValueError: port = None # Stupid urlsplit bug on Windows. # urlsplit sucks on Windows, work around this. if os.name == 'nt' and not path and '\\' in netloc: if '@' in netloc: position = netloc.find('@') + 1 path = netloc[position:] netloc = netloc[:position] else: path = netloc netloc = '' # Handle non-socket URLs. if scheme == 'redis' and netloc and not netloc.endswith('.') and not netloc.endswith('@'): result = dict(host=hostname) if password: result['password'] = password if port: result['port'] = port if path: if not path[1:].isdigit(): raise ValueError('Network URL path has non-digit characters: {0}'.format(path[1:])) result['db'] = int(path[1:]) return result # Handle socket URLs. if port: raise ValueError('Socket URL looks like non-socket URL.') if not password: socket_path = '{0}{1}'.format(netloc, path) elif netloc.endswith('.'): socket_path = '{0}{1}'.format(netloc.split('@')[1], path) elif not path: socket_path = netloc.split('@')[1] else: socket_path = path # Catch bad paths. parent_dir = os.path.split(socket_path)[0] if parent_dir and not os.path.isdir(parent_dir): raise ValueError("Unix socket path's parent not a dir: {0}".format(parent_dir)) # Finish up. result = dict(unix_socket_path=socket_path) if password: result['password'] = password return result
AttributeError
dataset/ETHPy150Open Robpol86/Flask-Redis-Helper/flask_redis.py/parse_url
8,552
def forwards(self, orm): using_mysql = db.backend_name == 'mysql' db.rename_table('easy_thumbnails_storagenew', 'easy_thumbnails_storage') if using_mysql: try: db.drop_foreign_key('easy_thumbnails_source', 'storage_new_id') except __HOLE__: e = sys.exc_info()[1] # Python 2.5 compatable "as e" # e.g MyISAM tables don't support foreign key constraints print("Could not remove foreign key contraint: %s" % e) db.rename_column('easy_thumbnails_source', 'storage_new_id', 'storage_id') if using_mysql: try: db.execute('ALTER TABLE easy_thumbnails_source ADD CONSTRAINT ' 'sourcestorage_id_fk_to_storage FOREIGN KEY (storage_id) ' 'REFERENCES easy_thumbnails_storage(id)') except Exception: e = sys.exc_info()[1] # Python 2.5 compatable "as e" print("Could not add contraint: %s" % e) if using_mysql: try: db.drop_foreign_key('easy_thumbnails_thumbnail', 'storage_new_id') except ValueError: e = sys.exc_info()[1] # Python 2.5 compatable "as e" # e.g MyISAM tables don't support foreign key constraints print("Could not remove foreign key contraint: %s" % e) db.rename_column('easy_thumbnails_thumbnail', 'storage_new_id', 'storage_id') if using_mysql: try: db.execute('ALTER TABLE easy_thumbnails_thumbnail ADD CONSTRAINT ' 'thumbnailstorage_id_fk_to_storage FOREIGN KEY (storage_id) ' 'REFERENCES easy_thumbnails_storage(id)') except Exception: e = sys.exc_info()[1] # Python 2.5 compatable "as e" print("Could not add contraint: %s" % e)
ValueError
dataset/ETHPy150Open SmileyChris/easy-thumbnails/easy_thumbnails/south_migrations/0010_rename_storage.py/Migration.forwards
8,553
def __eq__(self, other): try: return (self._ip == other._ip and self._version == other._version) except __HOLE__: return NotImplemented
AttributeError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseIP.__eq__
8,554
def __eq__(self, other): try: return (self._version == other._version and self.network == other.network and int(self.netmask) == int(other.netmask)) except __HOLE__: if isinstance(other, _BaseIP): return (self._version == other._version and self._ip == other._ip)
AttributeError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseNet.__eq__
8,555
def _ip_int_from_string(self, ip_str): """Turn the given IP string into an integer for comparison. Args: ip_str: A string, the IP ip_str. Returns: The IP ip_str as an integer. Raises: AddressValueError: if the string isn't a valid IP string. """ packed_ip = 0 octets = ip_str.split('.') if len(octets) != 4: raise AddressValueError(ip_str) for oc in octets: try: packed_ip = (packed_ip << 8) | int(oc) except __HOLE__: raise AddressValueError(ip_str) return packed_ip
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseV4._ip_int_from_string
8,556
def _is_valid_ip(self, address): """Validate the dotted decimal notation IP/netmask string. Args: address: A string, either representing a quad-dotted ip or an integer which is a valid IPv4 IP address. Returns: A boolean, True if the string is a valid dotted decimal IP string. """ octets = address.split('.') if len(octets) == 1: # We have an integer rather than a dotted decimal IP. try: return int(address) >= 0 and int(address) <= self._ALL_ONES except ValueError: return False if len(octets) != 4: return False for octet in octets: try: if not 0 <= int(octet) <= 255: return False except __HOLE__: return False return True
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseV4._is_valid_ip
8,557
def _is_hostmask(self, ip_str): """Test if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask. """ bits = ip_str.split('.') try: parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] except __HOLE__: return False if len(parts) != len(bits): return False if parts[0] < parts[-1]: return True return False
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/IPv4Network._is_hostmask
8,558
def _is_valid_netmask(self, netmask): """Verify that the netmask is valid. Args: netmask: A string, either a prefix or dotted decimal netmask. Returns: A boolean, True if the prefix represents a valid IPv4 netmask. """ mask = netmask.split('.') if len(mask) == 4: if [x for x in mask if int(x) not in self._valid_mask_octets]: return False if [y for idx, y in enumerate(mask) if idx > 0 and y > mask[idx - 1]]: return False return True try: netmask = int(netmask) except __HOLE__: return False return 0 <= netmask <= self._max_prefixlen # backwards compatibility
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/IPv4Network._is_valid_netmask
8,559
def _ip_int_from_string(self, ip_str=None): """Turn an IPv6 ip_str into an integer. Args: ip_str: A string, the IPv6 ip_str. Returns: A long, the IPv6 ip_str. Raises: AddressValueError: if ip_str isn't a valid IP Address. """ if not ip_str: ip_str = str(self.ip) ip_int = 0 # Do we have an IPv4 mapped (::ffff:a.b.c.d) or compact (::a.b.c.d) # ip_str? fields = ip_str.split(':') if fields[-1].count('.') == 3: ipv4_string = fields.pop() ipv4_int = IPv4Network(ipv4_string)._ip octets = [] for _ in xrange(2): octets.append(hex(ipv4_int & 0xFFFF).lstrip('0x').rstrip('L')) ipv4_int >>= 16 fields.extend(reversed(octets)) ip_str = ':'.join(fields) fields = self._explode_shorthand_ip_string(ip_str).split(':') for field in fields: try: ip_int = (ip_int << 16) + int(field or '0', 16) except __HOLE__: raise AddressValueError(ip_str) return ip_int
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseV6._ip_int_from_string
8,560
def _is_valid_ip(self, ip_str): """Ensure we have a valid IPv6 address. Probably not as exhaustive as it should be. Args: ip_str: A string, the IPv6 address. Returns: A boolean, True if this is a valid IPv6 address. """ # We need to have at least one ':'. if ':' not in ip_str: return False # We can only have one '::' shortener. if ip_str.count('::') > 1: return False # '::' should be encompassed by start, digits or end. if ':::' in ip_str: return False # A single colon can neither start nor end an address. if ((ip_str.startswith(':') and not ip_str.startswith('::')) or (ip_str.endswith(':') and not ip_str.endswith('::'))): return False # If we have no concatenation, we need to have 8 fields with 7 ':'. if '::' not in ip_str and ip_str.count(':') != 7: # We might have an IPv4 mapped address. if ip_str.count('.') != 3: return False ip_str = self._explode_shorthand_ip_string(ip_str) # Now that we have that all squared away, let's check that each of the # hextets are between 0x0 and 0xFFFF. for hextet in ip_str.split(':'): if hextet.count('.') == 3: # If we have an IPv4 mapped address, the IPv4 portion has to # be at the end of the IPv6 portion. if not ip_str.split(':')[-1] == hextet: return False try: IPv4Network(hextet) except AddressValueError: return False else: try: # a value error here means that we got a bad hextet, # something like 0xzzzz if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF: return False except __HOLE__: return False return True
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/_BaseV6._is_valid_ip
8,561
def _is_valid_netmask(self, prefixlen): """Verify that the netmask/prefixlen is valid. Args: prefixlen: A string, the netmask in prefix length format. Returns: A boolean, True if the prefix represents a valid IPv6 netmask. """ try: prefixlen = int(prefixlen) except __HOLE__: return False return 0 <= prefixlen <= self._max_prefixlen
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/ipaddr.py/IPv6Network._is_valid_netmask
8,562
def Decompress(self, compressed_data): """Decompresses the compressed data. Args: compressed_data: a byte string containing the compressed data. Returns: A tuple containing a byte string of the uncompressed data and the remaining compressed data. Raises: BackEndError: if the XZ compressed stream cannot be decompressed. """ try: # Note that we cannot use max_length=0 here due to different # versions of the lzma code. uncompressed_data = self._lzma_decompressor.decompress( compressed_data, 0) remaining_compressed_data = getattr( self._lzma_decompressor, u'unused_data', b'') except (EOFError, __HOLE__, LZMAError) as exception: raise errors.BackEndError(( u'Unable to decompress XZ compressed stream with error: ' u'{0!s}.').format(exception)) return uncompressed_data, remaining_compressed_data
IOError
dataset/ETHPy150Open log2timeline/dfvfs/dfvfs/compression/xz_decompressor.py/XZDecompressor.Decompress
8,563
def test_low_sample_count(self): uni = Uniform() uni.num_samples = 1 try: for case in uni: pass except __HOLE__ as err: self.assertEqual(str(err),"Uniform distributions " "must have at least 2 samples. " "num_samples is set to less than 2.")
ValueError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/doegenerators/test/test_uniform.py/TestCase.test_low_sample_count
8,564
def parsedate_tz(data): """Convert a date string to a time tuple. Accounts for military timezones. """ data = data.split() # The FWS after the comma after the day-of-week is optional, so search and # adjust for this. if data[0].endswith(',') or data[0].lower() in _daynames: # There's a dayname here. Skip it del data[0] else: i = data[0].rfind(',') if i >= 0: data[0] = data[0][i+1:] if len(data) == 3: # RFC 850 date, deprecated stuff = data[0].split('-') if len(stuff) == 3: data = stuff + data[1:] if len(data) == 4: s = data[3] i = s.find('+') if i > 0: data[3:] = [s[:i], s[i+1:]] else: data.append('') # Dummy tz if len(data) < 5: return None data = data[:5] [dd, mm, yy, tm, tz] = data mm = mm.lower() if mm not in _monthnames: dd, mm = mm, dd.lower() if mm not in _monthnames: return None mm = _monthnames.index(mm) + 1 if mm > 12: mm -= 12 if dd[-1] == ',': dd = dd[:-1] i = yy.find(':') if i > 0: yy, tm = tm, yy if yy[-1] == ',': yy = yy[:-1] if not yy[0].isdigit(): yy, tz = tz, yy if tm[-1] == ',': tm = tm[:-1] tm = tm.split(':') if len(tm) == 2: [thh, tmm] = tm tss = '0' elif len(tm) == 3: [thh, tmm, tss] = tm else: return None try: yy = int(yy) dd = int(dd) thh = int(thh) tmm = int(tmm) tss = int(tss) except ValueError: return None tzoffset = None tz = tz.upper() if tz in _timezones: tzoffset = _timezones[tz] else: try: tzoffset = int(tz) except __HOLE__: pass # Convert a timezone offset into seconds ; -0500 -> -18000 if tzoffset: if tzoffset < 0: tzsign = -1 tzoffset = -tzoffset else: tzsign = 1 tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60) # Daylight Saving Time flag is set to -1, since DST is unknown. return yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset
ValueError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/email/_parseaddr.py/parsedate_tz
8,565
def connectionLost(self, reason): """Close both ends of my pipe. """ if not hasattr(self, "o"): return for fd in self.i, self.o: try: os.close(fd) except __HOLE__: pass del self.i, self.o
IOError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/posixbase.py/_FDWaker.connectionLost
8,566
def wakeUp(self): """Write one byte to the pipe, and flush it. """ # We don't use fdesc.writeToFD since we need to distinguish # between EINTR (try again) and EAGAIN (do nothing). if self.o is not None: try: util.untilConcludes(os.write, self.o, 'x') except __HOLE__, e: # XXX There is no unit test for raising the exception # for other errnos. See #4285. if e.errno != errno.EAGAIN: raise
OSError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/posixbase.py/_UnixWaker.wakeUp
8,567
@contextmanager def _when_exits_zero(call_if_ok): """ Calls the function passed in if the SystemExit has a zero exit code. """ try: yield except __HOLE__ as se: if getattr(se, 'code', None) == 0: # Update the tracking branch to reference current HEAD call_if_ok() raise
SystemExit
dataset/ETHPy150Open robmadole/jig/src/jig/commands/ci.py/_when_exits_zero
8,568
@imm_action('set flag', target_class='args', prep='on', obj_msg_class="flags", self_object=True) def add_flag(source, target, obj, **_): try: flag_id = target[0] except __HOLE__: raise ActionError("Flag id required.") try: flag_value = target[1] except IndexError: flag_value = 'None' try: flag_value = str_to_primitive(flag_value) except ValueError: raise ActionError("Cannot parse {}".format(flag_value)) obj.flags[flag_id] = flag_value source.display_line("Flag {} set to {} on {}.".format(flag_id, flag_value, obj.name))
IndexError
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/mud/immortal.py/add_flag
8,569
@imm_action('clear flag', target_class='args', prep='from', obj_msg_class="flags", self_object=True) def add_flag(source, target, obj, **_): try: flag_id = target[0] except IndexError: raise ActionError("Flag id required.") try: old_value = obj.flags.pop(flag_id) except __HOLE__: raise ActionError("Flag {} not set.".format(flag_id)) source.display_line("Flag {} ({}) cleared {}.".format(flag_id, old_value, obj.name))
KeyError
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/mud/immortal.py/add_flag
8,570
@imm_action('patch', '__dict__', imm_level='supreme', prep=":", obj_target_class="args") def patch(target, verb, args, command, **_): try: split_ix = args.index(":") prop = args[split_ix + 1] new_value = find_extra(verb, split_ix + 2, command) except (ValueError, __HOLE__): return "Syntax -- 'patch [target] [:] [prop_name] [new_value]'" if not new_value: return "New value required" if new_value == "None": new_value = None patch_object(target, prop, new_value) return "Object successfully patched"
IndexError
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/mud/immortal.py/patch
8,571
@imm_action('run update', imm_level='supreme') def run_update(source, args, **_): if not args: return "Update name required." try: return lampost.setup.update.__dict__[args[0]](source, *args[1:]) except __HOLE__: return "No such update."
KeyError
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/mud/immortal.py/run_update
8,572
@imm_action('combat log') def combat_log(source, **_): try: delattr(source.env, 'combat_log') return "Combat logging removed from {}".format(source.env.name) except __HOLE__: source.env.combat_log = True return "Combat logging added to {}.".format(source.env.name)
AttributeError
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/mud/immortal.py/combat_log
8,573
def metadata(id, sleep_time=1): """ Given a HTRC ID, download the volume metadata from the Solr index. :param id: HTRC volume id. :type id: string :param sleep_time: Sleep time to prevent denial of service :type sleep_time: int in seconds, default: 1 :returns: dict """ solr ="http://chinkapin.pti.indiana.edu:9994/solr/meta/select/?q=id:%s" % id solr += "&wt=json" ## retrieve JSON results # TODO: exception handling if sleep_time: sleep(sleep_time) ## JUST TO MAKE SURE WE ARE THROTTLED try: data = json.load(urlopen(solr)) print id return data['response']['docs'][0] except __HOLE__, IndexError: print "No result found for " + id return dict()
ValueError
dataset/ETHPy150Open inpho/vsm/vsm/extensions/htrc.py/metadata
8,574
def rm_lb_hyphens(plain_root, logger, ignore=['.json', '.log', '.err']): """ Looks for a hyphen followed by whitespace or a line break. Reconstructs word and checks to see if the result exists in either WordNet or the OS's default spellchecker dictionary. If so, replaces fragments with reconstructed word. :param plain_root: The name of the directory containing plain-text files. :type plain_root: string :param logger: Logger that handles logging for the given directory. :type logger: Logger :param ignore: List of file extensions to ignore in the directory. :type ignore: list of strings, optional :returns: None """ try: d = enchant.Dict('en_US') except __HOLE__: d = None def recon(match_obj): rc_word = match_obj.group(1) + match_obj.group(2) if wn.synsets(rc_word) or (d and d.check(rc_word)): logger.info('\nbook: %s\nreconstructed word:\n%s\n', plain_root, rc_word) return rc_word logger.info('\nbook: %s\nignored expression:\nleft: %s\nright: %s\n', plain_root, match_obj.group(1), match_obj.group(2)) return match_obj.group(0) def inner(s): lb_hyphenated = re.compile(r'(\w+)-\s+(\w+)') return lb_hyphenated.sub(recon, s) page_files = os.listdir(plain_root) page_files = filter_by_suffix(page_files, ignore) for i, page_file in enumerate(page_files): filename = os.path.join(plain_root, page_file) with open(filename, 'r+w') as f: page = f.read() page = inner(page) f.seek(0) f.write(page) f.truncate()
ImportError
dataset/ETHPy150Open inpho/vsm/vsm/extensions/htrc.py/rm_lb_hyphens
8,575
def htrc_get_titles(metadata, vol_id): """ Gets titles of the volume given the metadata from a json file and volume id. """ try: md = metadata[vol_id] return md[md.keys()[0]]['titles'] except __HOLE__: print 'Volume ID not found:', vol_id raise
KeyError
dataset/ETHPy150Open inpho/vsm/vsm/extensions/htrc.py/htrc_get_titles
8,576
def _checkTimeout(self, result, name, lookupDeferred): try: userDeferred, cancelCall = self._runningQueries[lookupDeferred] except __HOLE__: pass else: del self._runningQueries[lookupDeferred] cancelCall.cancel() if isinstance(result, failure.Failure): userDeferred.errback(self._fail(name, result.getErrorMessage())) else: userDeferred.callback(result)
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/ThreadedResolver._checkTimeout
8,577
def removeTrigger_BASE(self, handle): """ Just try to remove the trigger. @see: removeTrigger """ try: phase, callable, args, kwargs = handle except (TypeError, __HOLE__): raise ValueError("invalid trigger handle") else: if phase not in ('before', 'during', 'after'): raise KeyError("invalid phase") getattr(self, phase).remove((callable, args, kwargs))
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/_ThreePhaseEvent.removeTrigger_BASE
8,578
def _moveCallLaterSooner(self, tple): # Linear time find: slow. heap = self._pendingTimedCalls try: pos = heap.index(tple) # Move elt up the heap until it rests at the right place. elt = heap[pos] while pos != 0: parent = (pos-1) // 2 if heap[parent] <= elt: break # move parent down heap[pos] = heap[parent] pos = parent heap[pos] = elt except __HOLE__: # element was not found in heap - oh well... pass
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/ReactorBase._moveCallLaterSooner
8,579
def _stopThreadPool(self): """ Stop the reactor threadpool. This method is only valid if there is currently a threadpool (created by L{_initThreadPool}). It is not intended to be called directly; instead, it will be called by a shutdown trigger created in L{_initThreadPool}. """ triggers = [self._threadpoolStartupID, self.threadpoolShutdownID] for trigger in filter(None, triggers): try: self.removeSystemEventTrigger(trigger) except __HOLE__: pass self._threadpoolStartupID = None self.threadpoolShutdownID = None self.threadpool.stop() self.threadpool = None
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/ReactorBase._stopThreadPool
8,580
def cancelTimeout(self): if self.timeoutID is not None: try: self.timeoutID.cancel() except __HOLE__: pass del self.timeoutID
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/BaseConnector.cancelTimeout
8,581
def _handleSignals(self): """ Install the signal handlers for the Twisted event loop. """ try: import signal except __HOLE__: log.msg("Warning: signal module unavailable -- " "not installing signal handlers.") return if signal.getsignal(signal.SIGINT) == signal.default_int_handler: # only handle if there isn't already a handler, e.g. for Pdb. signal.signal(signal.SIGINT, self.sigInt) signal.signal(signal.SIGTERM, self.sigTerm) # Catch Ctrl-Break in windows if hasattr(signal, "SIGBREAK"): signal.signal(signal.SIGBREAK, self.sigBreak)
ImportError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/base.py/_SignalReactorMixin._handleSignals
8,582
def type_check(self, instance): error_msg = '' for t in self.union_types: try: check_constraint(t, instance) return except __HOLE__ as e: error_msg = str(e) continue raise CompositeTypeHintError( '%s type-constraint violated. Expected an instance of one of: %s, ' 'received %s instead.%s' % (repr(self), tuple(sorted(_unified_repr(t) for t in self.union_types)), instance.__class__.__name__, error_msg))
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/DataflowPythonSDK/google/cloud/dataflow/typehints/typehints.py/UnionHint.UnionConstraint.type_check
8,583
def _render_expression(self, check): """Turn a mongodb-style search dict into an SQL query.""" expressions = [] args = [] skeys = set(check.keys()) skeys.difference_update(set(self._keys)) skeys.difference_update(set(['buffers', 'result_buffers'])) if skeys: raise KeyError("Illegal testing key(s): %s"%skeys) for name,sub_check in check.items(): if isinstance(sub_check, dict): for test,value in sub_check.items(): try: op = operators[test] except __HOLE__: raise KeyError("Unsupported operator: %r"%test) if isinstance(op, tuple): op, join = op if value is None and op in null_operators: expr = "%s %s"%null_operators[op] else: expr = "%s %s ?"%(name, op) if isinstance(value, (tuple,list)): if op in null_operators and any([v is None for v in value]): # equality tests don't work with NULL raise ValueError("Cannot use %r test with NULL values on SQLite backend"%test) expr = '( %s )'%( join.join([expr]*len(value)) ) args.extend(value) else: args.append(value) expressions.append(expr) else: # it's an equality check if sub_check is None: expressions.append("%s IS NULL") else: expressions.append("%s = ?"%name) args.append(sub_check) expr = " AND ".join(expressions) return expr, args
KeyError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/controller/sqlitedb.py/SQLiteDB._render_expression
8,584
def delete_if_lifetime_over(item, name): """ :return: True if file was deleted """ if 0 < item.meta['timestamp-max-life'] < time.time(): try: current_app.storage.remove(name) except (OSError, __HOLE__) as e: pass return True return False
IOError
dataset/ETHPy150Open bepasty/bepasty-server/bepasty/utils/date_funcs.py/delete_if_lifetime_over
8,585
def stress(cmd, revision_tag, stress_sha, stats=None): """Run stress command and collect average statistics""" # Check for compatible stress commands. This doesn't yet have full # coverage of every option: # Make sure that if this is a read op, that the number of threads # was specified, otherwise stress defaults to doing multiple runs # which is not what we want: if cmd.strip().startswith("read") and 'threads' not in cmd: raise AssertionError('Stress read commands must specify #/threads when used with this tool.') stress_path = os.path.join(CASSANDRA_STRESS_PATH, stress_sha, 'tools/bin/cassandra-stress') temp_log = tempfile.mktemp() logger.info("Running stress from '{stress_path}' : {cmd}" .format(stress_path=stress_path, cmd=cmd)) # Record the type of operation being performed: operation = cmd.strip().split(" ")[0] if stats is None: stats = { "id": str(uuid.uuid1()), "command": cmd, "intervals": [], "test": operation, "revision": revision_tag, "date": datetime.datetime.now().isoformat(), "stress_revision": stress_sha } # Run stress: # Subprocess communicate() blocks, preventing us from seeing any # realtime output, so pipe the output to a file as a workaround: proc = subprocess.Popen('JAVA_HOME={JAVA_HOME} {CASSANDRA_STRESS} {cmd} | tee {temp_log}' .format(JAVA_HOME=JAVA_HOME, CASSANDRA_STRESS=stress_path, cmd=cmd, temp_log=temp_log), shell=True) proc.wait() log = open(temp_log) collecting_aggregates = False collecting_values = False # Regex for trunk cassandra-stress start_of_intervals_re = re.compile('type.*total ops,.*op/s,.*pk/s') for line in log: line = line.strip() if line.startswith("Results:"): collecting_aggregates = True continue if not collecting_aggregates: if start_of_intervals_re.match(line): collecting_values = True continue if collecting_values: line_parts = [l.strip() for l in line.split(',')] # Only capture total metrics for now if line_parts[0] == 'total': try: stats['intervals'].append([float(x) for x in line_parts[1:]]) except: pass continue continue if line.startswith("END") or line.strip() == "": continue # Collect aggregates: try: stat, value = line.split(":", 1) stats[stat.strip()] = value.strip() except __HOLE__: logger.info("Unable to parse aggregate line: '{}'".format(line)) log.close() os.remove(temp_log) return stats
ValueError
dataset/ETHPy150Open datastax/cstar_perf/tool/cstar_perf/tool/benchmark.py/stress
8,586
def print_package_version(package_name, indent=' '): try: package = __import__(package_name) version = getattr(package, '__version__', None) package_file = getattr(package, '__file__', ) provenance_info = '{0} from {1}'.format(version, package_file) except __HOLE__: provenance_info = 'not installed' print('{0}{1}: {2}'.format(indent, package_name, provenance_info))
ImportError
dataset/ETHPy150Open neurosynth/neurosynth/ci/show-python-package-versions.py/print_package_version
8,587
def get_context(self, url='/'): context = self.client.get(url).context try: return context[0] except __HOLE__: return context
KeyError
dataset/ETHPy150Open calebsmith/django-template-debug/template_debug/tests/base.py/TemplateDebugTestCase.get_context
8,588
def publish_status(self, target, temp, heater_percent, cooler_percent): # Get timestamp in epoch seconds timestamp = int(time.time()) # Append new status status = [target, temp, heater_percent, cooler_percent, timestamp] self._datapoints.append(status) # Drop datapoints older than required for chart earliest = timestamp - self._past_seconds self._datapoints = [d for d in self._datapoints if d[4] >= earliest] # Divine polling frequency using the difference between timestamps of # the most recent 2 datapoints. Use this to set an appropriate x label # interval. if len(self._datapoints) >= 2: freq_seconds = self._datapoints[-1][4] - self._datapoints[-2][4] if freq_seconds > 0: self._chart_config.x_labels_major_every = ( self._x_label_seconds / freq_seconds) # Draw chart chart = pygal.Line(self._chart_config) chart.x_labels = [ datetime.fromtimestamp(d[4]).strftime('%b-%d %H:%M:%S') for d in self._datapoints] chart.add('Target', [d[0] for d in self._datapoints]) chart.add('Actual', [d[1] for d in self._datapoints]) # TODO: Add heating and cooling as bars once pygal supports bars and # lines on a single chart. #chart.add('Heating', [d[2] for d in self._datapoints], secondary=True) #chart.add('Cooling', [d[3] for d in self._datapoints], secondary=True) try: getattr(chart, self._outputter)(filename=self._out_file) except __HOLE__ as err: raise OutputError(err)
IOError
dataset/ETHPy150Open amorphic/braubuddy/braubuddy/output/imagefile.py/ImageFileOutput.publish_status
8,589
@classmethod def as_manager(cls): class QuerySetManager(models.Manager): use_for_related_fields = True def __init__(self): super(QuerySetManager, self).__init__() self.queryset_class = cls def get_query_set(self): return self.queryset_class(self.model) def __getattr__(self, attr, *args): try: return getattr(self.__class__, attr, *args) except __HOLE__: return getattr(self.get_query_set(), attr, *args) return QuerySetManager()
AttributeError
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/utils/query_set.py/QuerySet.as_manager
8,590
def _on_harvest_status_message(self): try: log.debug("Updating harvest with id %s", self.message["id"]) # Retrieve harvest model object harvest = Harvest.objects.get(harvest_id=self.message["id"]) # And update harvest model object harvest.status = self.message["status"] harvest.stats = self.message.get("summary", {}) harvest.infos = self.message.get("infos", []) harvest.warnings = self.message.get("warnings", []) harvest.errors = self.message.get("errors", []) harvest.token_updates = self.message.get("token_updates") harvest.uids = self.message.get("uids") harvest.warcs_count = self.message.get("warcs", {}).get("count", 0) harvest.warcs_bytes = self.message.get("warcs", {}).get("bytes", 0) harvest.date_started = iso8601.parse_date(self.message["date_started"]) if "date_ended" in self.message: harvest.date_ended = iso8601.parse_date(self.message["date_ended"]) harvest.save() # Update seeds based on tokens that have changed for id, token in self.message.get("token_updates", {}).items(): # Try to find seed based on seedset and uid. try: seed = Seed.objects.get(seed_id=id) seed.token = token seed.history_note = "Changed token based on information from harvester from harvest {}".format( self.message["id"]) seed.save() except ObjectDoesNotExist: log.error("Seed model object with seed_id %s not found to update token to %s", id, token) # Update seeds based on uids that have been returned for id, uid in self.message.get("uids", {}).items(): # Try to find seed based on seedset and token. try: seed = Seed.objects.get(seed_id=id) seed.uid = uid seed.history_note = "Changed uid based on information from harvester from harvest {}".format( self.message["id"]) seed.save() except ObjectDoesNotExist: log.error("Seed model object with seed_id %s not found to update uid to %s", id, uid) except __HOLE__: log.error("Harvest model object not found for harvest status message: %s", json.dumps(self.message, indent=4))
ObjectDoesNotExist
dataset/ETHPy150Open gwu-libraries/sfm-ui/sfm/message_consumer/sfm_ui_consumer.py/SfmUiConsumer._on_harvest_status_message
8,591
def _on_warc_created_message(self): try: log.debug("Warc with id %s", self.message["warc"]["id"]) # Create warc model object warc = Warc.objects.create( harvest=Harvest.objects.get(harvest_id=self.message["harvest"]["id"]), warc_id=self.message["warc"]["id"], path=self.message["warc"]["path"], sha1=self.message["warc"]["sha1"], bytes=self.message["warc"]["bytes"], date_created=iso8601.parse_date(self.message["warc"]["date_created"]) ) warc.save() except __HOLE__: log.error("Harvest model object not found for harvest status message: %s", json.dumps(self.message, indent=4))
ObjectDoesNotExist
dataset/ETHPy150Open gwu-libraries/sfm-ui/sfm/message_consumer/sfm_ui_consumer.py/SfmUiConsumer._on_warc_created_message
8,592
def _on_export_status_message(self): try: log.debug("Updating export with id %s", self.message["id"]) # Retrieve export model object export = Export.objects.get(export_id=self.message["id"]) # And update export model object export.status = self.message["status"] export.infos = self.message.get("infos", []) export.warnings = self.message.get("warnings", []) export.errors = self.message.get("errors", []) export.date_started = iso8601.parse_date(self.message["date_started"]) if "date_ended" in self.message: export.date_ended = iso8601.parse_date(self.message["date_ended"]) export.save() except __HOLE__: log.error("Export model object not found for export status message: %s", json.dumps(self.message, indent=4))
ObjectDoesNotExist
dataset/ETHPy150Open gwu-libraries/sfm-ui/sfm/message_consumer/sfm_ui_consumer.py/SfmUiConsumer._on_export_status_message
8,593
def _on_web_harvest_start_message(self): try: log.debug("Creating harvest for web harvest with id %s", self.message["id"]) parent_harvest = Harvest.objects.get(harvest_id=self.message["parent_id"]) harvest = Harvest.objects.create(harvest_type=self.message["type"], harvest_id=self.message["id"], parent_harvest=parent_harvest, seed_set=parent_harvest.seed_set) harvest.save() except __HOLE__: log.error("Harvest model object not found for web harvest status message: %s", json.dumps(self.message, indent=4)) # { # "id": "flickr:45", # "parent_id": "sfmui:45", # "type": "web", # "seeds": [ # { # "token": "http://www.gwu.edu/" # }, # { # "token": "http://library.gwu.edu/" # } # ], # "collection": { # "id": "test_collection", # "path": "/tmp/test_collection" # } # }
ObjectDoesNotExist
dataset/ETHPy150Open gwu-libraries/sfm-ui/sfm/message_consumer/sfm_ui_consumer.py/SfmUiConsumer._on_web_harvest_start_message
8,594
def paginate(context, window=DEFAULT_WINDOW): """ Renders the ``pagination/pagination.html`` template, resulting in a Digg-like display of the available pages, given the current page. If there are too many pages to be displayed before and after the current page, then elipses will be used to indicate the undisplayed gap between page numbers. Requires one argument, ``context``, which should be a dictionary-like data structure and must contain the following keys: ``paginator`` A ``Paginator`` or ``QuerySetPaginator`` object. ``page_obj`` This should be the result of calling the page method on the aforementioned ``Paginator`` or ``QuerySetPaginator`` object, given the current page. This same ``context`` dictionary-like data structure may also include: ``getvars`` A dictionary of all of the **GET** parameters in the current request. This is useful to maintain certain types of state, even when requesting a different page. """ try: paginator = context['paginator'] page_obj = context['page_obj'] page_range = paginator.page_range # First and last are simply the first *n* pages and the last *n* pages, # where *n* is the current window size. first = set(page_range[:window]) last = set(page_range[-window:]) # Now we look around our current page, making sure that we don't wrap # around. current_start = page_obj.number-1-window if current_start < 0: current_start = 0 current_end = page_obj.number-1+window if current_end < 0: current_end = 0 current = set(page_range[current_start:current_end]) pages = [] # If there's no overlap between the first set of pages and the current # set of pages, then there's a possible need for elusion. if len(first.intersection(current)) == 0: first_list = list(first) first_list.sort() second_list = list(current) second_list.sort() pages.extend(first_list) diff = second_list[0] - first_list[-1] # If there is a gap of two, between the last page of the first # set and the first page of the current set, then we're missing a # page. if diff == 2: pages.append(second_list[0] - 1) # If the difference is just one, then there's nothing to be done, # as the pages need no elusion and are correct. elif diff == 1: pass # Otherwise, there's a bigger gap which needs to be signaled for # elusion, by pushing a None value to the page list. else: pages.append(None) pages.extend(second_list) else: unioned = list(first.union(current)) unioned.sort() pages.extend(unioned) # If there's no overlap between the current set of pages and the last # set of pages, then there's a possible need for elusion. if len(current.intersection(last)) == 0: second_list = list(last) second_list.sort() diff = second_list[0] - pages[-1] # If there is a gap of two, between the last page of the current # set and the first page of the last set, then we're missing a # page. if diff == 2: pages.append(second_list[0] - 1) # If the difference is just one, then there's nothing to be done, # as the pages need no elusion and are correct. elif diff == 1: pass # Otherwise, there's a bigger gap which needs to be signaled for # elusion, by pushing a None value to the page list. else: pages.append(None) pages.extend(second_list) else: differenced = list(last.difference(current)) differenced.sort() pages.extend(differenced) to_return = { 'pages': pages, 'page_obj': page_obj, 'paginator': paginator, 'is_paginated': paginator.count > paginator.per_page, } if 'request' in context: getvars = context['request'].GET.copy() if 'page' in getvars: del getvars['page'] if len(getvars.keys()) > 0: to_return['getvars'] = "&%s" % getvars.urlencode() else: to_return['getvars'] = '' return to_return except __HOLE__, AttributeError: return {}
KeyError
dataset/ETHPy150Open ilblackdragon/django-blogs/blog/templatetags/misc.py/paginate
8,595
def lazy_init(self): try: f=open(self.filename) except __HOLE__: warning("Can't open base %s" % self.filename) return try: self.base = [] for l in f: if l[0] in ["#","\n"]: continue l = tuple(l.split(":")) if len(l) < 8: continue def a2i(x): if x.isdigit(): return int(x) return x li = [ a2i(i) for i in l[1:4] ] #if li[0] not in self.ttl_range: # self.ttl_range.append(li[0]) # self.ttl_range.sort() self.base.append((l[0], li[0], li[1], li[2], l[4], l[5], l[6], l[7][:-1])) except: warning("Can't parse p0f database (new p0f version ?)") self.base = None f.close()
IOError
dataset/ETHPy150Open phaethon/scapy/scapy/modules/p0f.py/p0fKnowledgeBase.lazy_init
8,596
def make_sure_path_exists(path): try: os.makedirs(path) except __HOLE__ as exception: if exception.errno != errno.EEXIST: raise if not os.path.isdir(path): raise
OSError
dataset/ETHPy150Open fictivekin/openrecipes/scrapy_proj/grab_html.py/make_sure_path_exists
8,597
def get_zoom(self, request, default=16): try: zoom = int(request.GET['zoom']) except (__HOLE__, KeyError): zoom = default else: zoom = min(max(10, zoom), 18) return zoom
ValueError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/views.py/BaseView.get_zoom
8,598
def render(self, request, context, template_name, expires=None): """ Given a request, a context dictionary and a template name, this renders the template with the given context according to the capabilities and requested format of the client. An optional final argument is that of a timedelta object, which sets additional caching headers for the content. """ context.pop('exposes_user_data', None) if 'format' in request.REQUEST: formats = request.REQUEST['format'].split(',') renderers, seen_formats = [], set() for format in formats: if format in self.FORMATS and format not in seen_formats: renderers.append(self.FORMATS[format]) elif request.META.get('HTTP_ACCEPT'): accepts = self.parse_accept_header(request.META['HTTP_ACCEPT']) renderers = MediaType.resolve(accepts, self.FORMATS_BY_MIMETYPE) else: renderers = [self.FORMATS['html']] # Stop external sites from grabbing JSON representations of pages # which contain sensitive user information. try: offsite_referrer = 'HTTP_REFERER' in request.META and \ request.META['HTTP_REFERER'].split('/')[2] != \ request.META.get('HTTP_HOST') except __HOLE__: # Malformed referrers (i.e., those not containing a full URL) throw # this offsite_referrer = True for renderer in renderers: if renderer.format != 'html' and context.get('exposes_user_data') \ and offsite_referrer: continue try: response = renderer(request, context, template_name) except NotImplementedError: continue else: if expires is not None and not settings.DEBUG and \ not getattr(settings, 'NO_CACHE', False): response['Expires'] = formatdate( mktime((datetime.now() + expires).timetuple())) # if expires is negative, then consider this to be no-cache if expires < timedelta(seconds=0): response['Cache-Control'] = 'no-cache' else: response['Cache-Control'] = 'max-age=%d' % \ (expires.seconds + expires.days * 24 * 3600) return response else: if 'format' not in request.REQUEST: tried_mimetypes = list(itertools.chain(*[r.mimetypes for r in renderers])) response = HttpResponse( _("Your Accept header didn't contain any supported media ranges.") + \ "\n\n" + _("Supported ranges are:") + \ "\n\n * %s\n" % '\n * '.join( sorted('%s (%s)' % (f[0].value, f[1].format) for f in self.FORMATS_BY_MIMETYPE if not f[0] in tried_mimetypes)), mimetype="text/plain") else: response = HttpResponse( _("Unable to render this document in this format.") + "\n\n" + _("Supported formats are") + ":\n\n * %s\n" \ % '\n * '.join(self.FORMATS.keys()), mimetype="text/plain") response.status_code = 406 # Not Acceptable return response
IndexError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/views.py/BaseView.render
8,599
def parse_accept_header(self, accept): media_types = [] for media_type in accept.split(','): try: media_types.append(MediaType(media_type)) except __HOLE__: pass return media_types
ValueError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/views.py/BaseView.parse_accept_header