Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
9,300
def add_product(basket, price=None, quantity=1, product=None): """ Helper to add a product to the basket. """ has_strategy = False try: has_strategy = hasattr(basket, 'strategy') except __HOLE__: pass if not has_strategy: basket.strategy = strategy.Default() if price is None: price = D('1') if product and product.has_stockrecords: record = product.stockrecords.all()[0] else: record = factories.create_stockrecord( product=product, price_excl_tax=price, num_in_stock=quantity + 1) basket.add_product(record.product, quantity)
RuntimeError
dataset/ETHPy150Open django-oscar/django-oscar/src/oscar/test/basket.py/add_product
9,301
def run_only_if_kitchen_is_available(func): if sys.version_info < (2, 7): try: from kitchen.pycompat27 import subprocess except __HOLE__: subprocess = None else: import subprocess pred = lambda: subprocess is not None return run_only(func, pred)
ImportError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/userscripts/test/testuserscripts.py/run_only_if_kitchen_is_available
9,302
def _unpack(self, data): if data is None: raise IPCError("received data is None") try: return json.loads(data.decode('utf-8')), True except ValueError: pass try: assert len(data) >= HDRLEN size = struct.unpack("!L", data[:HDRLEN])[0] assert size >= len(data[HDRLEN:]) return self._unpack_body(data[HDRLEN:HDRLEN + size]), False except __HOLE__: raise IPCError( "error reading reply!" " (probably the socket was disconnected)" )
AssertionError
dataset/ETHPy150Open qtile/qtile/libqtile/ipc.py/_IPC._unpack
9,303
def send(self, msg, is_json=False): if is_json: send_data = self._pack_json(msg) else: send_data = self._pack(msg) self.transport.write(send_data) try: self.transport.write_eof() except __HOLE__: logger.exception('Swallowing AttributeError due to asyncio bug!')
AttributeError
dataset/ETHPy150Open qtile/qtile/libqtile/ipc.py/_ClientProtocol.send
9,304
def send(self, msg): client_coroutine = self.loop.create_unix_connection(_ClientProtocol, path=self.fname) try: _, client_proto = self.loop.run_until_complete(client_coroutine) except __HOLE__: raise IPCError("Could not open %s" % self.fname) client_proto.send(msg, is_json=self.is_json) try: self.loop.run_until_complete(asyncio.wait_for(client_proto.reply, timeout=10)) except asyncio.TimeoutError: raise RuntimeError("Server not responding") return client_proto.reply.result()
OSError
dataset/ETHPy150Open qtile/qtile/libqtile/ipc.py/Client.send
9,305
@csrf_exempt def serve(request, tail, server): """ Django adapter. It has three arguments: #. ``request`` is a Django request object, #. ``tail`` is everything that's left from an URL, which adapter is attached to, #. ``server`` is a pyws server object. First two are the context of an application, function ``serve`` transforms them into a pyws request object. Then it feeds the request to the server, gets the response and transforms it into a Django response object. """ if request.GET: body = '' else: try: body = request.body except __HOLE__: body = request.raw_post_data request = Request( tail, body, parse_qs(request.META['QUERY_STRING']), parse_qs(body), request.COOKIES, ) response = server.process_request(request) return HttpResponse( response.text, content_type=response.content_type, status=get_http_response_code_num(response))
AttributeError
dataset/ETHPy150Open stepank/pyws/src/pyws/adapters/_django.py/serve
9,306
def _connect(**kwargs): ''' Initialise netscaler connection ''' connargs = dict() # Shamelessy ripped from the mysql module def __connarg(name, key=None): ''' Add key to connargs, only if name exists in our kwargs or as netscaler.<name> in __opts__ or __pillar__ Evaluate in said order - kwargs, opts then pillar. To avoid collision with other functions, kwargs-based connection arguments are prefixed with 'netscaler_' (i.e. 'netscaler_host', 'netscaler_user', etc.). ''' if key is None: key = name if name in kwargs: connargs[key] = kwargs[name] else: prefix = 'netscaler_' if name.startswith(prefix): try: name = name[len(prefix):] except __HOLE__: return val = __salt__['config.option']('netscaler.{0}'.format(name), None) if val is not None: connargs[key] = val __connarg('netscaler_host', 'host') __connarg('netscaler_user', 'user') __connarg('netscaler_pass', 'pass') # useSSL = True will be enforced #_connarg('connection_useSSL', 'useSSL') nitro = NSNitro(connargs['host'], connargs['user'], connargs['pass'], True) try: nitro.login() except NSNitroError as error: log.debug('netscaler module error - NSNitro.login() failed: {0}'.format(error)) return None return nitro
IndexError
dataset/ETHPy150Open saltstack/salt/salt/modules/netscaler.py/_connect
9,307
def getid(obj): """Wrapper to get object's ID. Abstracts the common pattern of allowing both an object or an object's ID (UUID) as a parameter when dealing with relationships. """ try: return obj.id except __HOLE__: return obj
AttributeError
dataset/ETHPy150Open openstack/python-cerberusclient/cerberusclient/common/base.py/getid
9,308
def to_utf8(x): """ Tries to utf-8 encode x when possible If x is a string returns it encoded, otherwise tries to iter x and encode utf-8 all strings it contains, returning a list. """ if isinstance(x, basestring): return x.encode('utf-8') if isinstance(x, unicode) else x try: l = iter(x) except __HOLE__: return x return [to_utf8(i) for i in l]
TypeError
dataset/ETHPy150Open maraujop/requests-oauth/oauth_hook/auth.py/to_utf8
9,309
def send_signal(pid, signum): try: os.kill(pid, signum) except __HOLE__, ose: if ose.errno != errno.ESRCH: raise
OSError
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/tools/botnet/commands.py/send_signal
9,310
def run_for_instance(self, _, instance): height = 20 try: process = popen("stty", "size", stdin=sys.stdin) except __HOLE__: pass else: stdout, _ = process.communicate() if process.returncode == 0: try: height = max(int(stdout.split()[0]) - 2, 0) except ValueError: pass yield instance.follow(lines=height)
OSError
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/tools/botnet/commands.py/Follow.run_for_instance
9,311
def _get_build_prefix(): """ Returns a safe build_prefix """ path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' % __get_username()) if sys.platform == 'win32': """ on windows(tested on 7) temp dirs are isolated """ return path try: os.mkdir(path) write_delete_marker_file(path) except __HOLE__: file_uid = None try: # raises OSError for symlinks # https://github.com/pypa/pip/pull/935#discussion_r5307003 file_uid = get_path_uid(path) except OSError: file_uid = None if file_uid != os.geteuid(): msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \ % path print (msg) print("pip will not work until the temporary folder is " + \ "either deleted or is a real directory owned by your user account.") raise pip.exceptions.InstallationError(msg) return path
OSError
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/pip/locations.py/_get_build_prefix
9,312
def _parse_args(): parser = optparse.OptionParser() parser.add_option("-d", "--dry-run", action="store_true", dest="dry_run", default=False, help="don't actually remove locks") parser.add_option("-l", "--limit", action="store", type='int', dest="limit", default=sys.maxint, help="max number of locks to delete (default: no limit)") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="don't print status messages to stdout") options, args = parser.parse_args() try: days_old = int(args[0]) except (IndexError, __HOLE__): parser.print_help() sys.exit(1) return options, days_old
ValueError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/tools/xenserver/cleanup_sm_locks.py/_parse_args
9,313
def main(): options, days_old = _parse_args() if not os.path.exists(BASE): print >> sys.stderr, "error: '%s' doesn't exist. Make sure you're"\ " running this on the dom0." % BASE sys.exit(1) lockpaths_removed = 0 nspaths_removed = 0 for nsname in os.listdir(BASE)[:options.limit]: nspath = os.path.join(BASE, nsname) if not os.path.isdir(nspath): continue # Remove old lockfiles removed = 0 locknames = os.listdir(nspath) for lockname in locknames: lockpath = os.path.join(nspath, lockname) lock_age_days = _get_age_days(os.path.getmtime(lockpath)) if lock_age_days > days_old: lockpaths_removed += 1 removed += 1 if options.verbose: print 'Removing old lock: %03d %s' % (lock_age_days, lockpath) if not options.dry_run: os.unlink(lockpath) # Remove empty namespace paths if len(locknames) == removed: nspaths_removed += 1 if options.verbose: print 'Removing empty namespace: %s' % nspath if not options.dry_run: try: os.rmdir(nspath) except __HOLE__, e: if e.errno == errno.ENOTEMPTY: print >> sys.stderr, "warning: directory '%s'"\ " not empty" % nspath else: raise if options.dry_run: print "** Dry Run **" print "Total locks removed: ", lockpaths_removed print "Total namespaces removed: ", nspaths_removed
OSError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/tools/xenserver/cleanup_sm_locks.py/main
9,314
@property def nonNilFactsInInstance(self): # indexed by fact (concept) qname """Facts in the instance which are not nil, cached :returns: set -- non-nil facts in instance """ try: return self._nonNilFactsInInstance except __HOLE__: self._nonNilFactsInInstance = set(f for f in self.factsInInstance if not f.isNil) return self._nonNilFactsInInstance
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.nonNilFactsInInstance
9,315
@property def factsByQname(self): # indexed by fact (concept) qname """Facts in the instance indexed by their QName, cached :returns: dict -- indexes are QNames, values are ModelFacts """ try: return self._factsByQname except __HOLE__: self._factsByQname = fbqn = defaultdict(set) for f in self.factsInInstance: if f.qname is not None: fbqn[f.qname].add(f) return fbqn
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.factsByQname
9,316
def factsByDatatype(self, notStrict, typeQname): # indexed by fact (concept) qname """Facts in the instance indexed by data type QName, cached as types are requested :param notSctrict: if True, fact may be derived :type notStrict: bool :returns: set -- ModelFacts that have specified type or (if nonStrict) derived from specified type """ try: return self._factsByDatatype[notStrict, typeQname] except __HOLE__: self._factsByDatatype = {} return self.factsByDatatype(notStrict, typeQname) except KeyError: self._factsByDatatype[notStrict, typeQname] = fbdt = set() for f in self.factsInInstance: c = f.concept if c.typeQname == typeQname or (notStrict and c.type.isDerivedFrom(typeQname)): fbdt.add(f) return fbdt
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.factsByDatatype
9,317
def factsByPeriodType(self, periodType): # indexed by fact (concept) qname """Facts in the instance indexed by periodType, cached :param periodType: Period type to match ("instant", "duration", or "forever") :type periodType: str :returns: set -- ModelFacts that have specified periodType """ try: return self._factsByPeriodType[periodType] except __HOLE__: self._factsByPeriodType = fbpt = defaultdict(set) for f in self.factsInInstance: p = f.concept.periodType if p: fbpt[p].add(f) return self.factsByPeriodType(periodType) except KeyError: return set() # no facts for this period type
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.factsByPeriodType
9,318
def factsByDimMemQname(self, dimQname, memQname=None): # indexed by fact (concept) qname """Facts in the instance indexed by their Dimension and Member QName, cached :returns: dict -- indexes are (Dimension, Member) and (Dimension) QNames, values are ModelFacts If Member is None, returns facts that have the dimension (explicit or typed) If Member is NONDEFAULT, returns facts that have the dimension (explicit non-default or typed) If Member is DEFAULT, returns facts that have the dimension (explicit non-default or typed) defaulted """ try: fbdq = self._factsByDimQname[dimQname] return fbdq[memQname] except __HOLE__: self._factsByDimQname = {} return self.factsByDimMemQname(dimQname, memQname) except KeyError: self._factsByDimQname[dimQname] = fbdq = defaultdict(set) for fact in self.factsInInstance: if fact.isItem and fact.context is not None: dimValue = fact.context.dimValue(dimQname) if isinstance(dimValue, ModelValue.QName): # explicit dimension default value fbdq[None].add(fact) # set of all facts that have default value for dimension if dimQname in self.modelXbrl.qnameDimensionDefaults: fbdq[self.qnameDimensionDefaults[dimQname]].add(fact) # set of facts that have this dim and mem fbdq[DEFAULT].add(fact) # set of all facts that have default value for dimension elif dimValue is not None: # not default fbdq[None].add(fact) # set of all facts that have default value for dimension fbdq[NONDEFAULT].add(fact) # set of all facts that have non-default value for dimension if dimValue.isExplicit: fbdq[dimValue.memberQname].add(fact) # set of facts that have this dim and mem else: # default typed dimension fbdq[DEFAULT].add(fact) return fbdq[memQname]
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.factsByDimMemQname
9,319
def modelObject(self, objectId): """Finds a model object by an ordinal ID which may be buried in a tkinter view id string (e.g., 'somedesignation_ordinalnumber'). :param objectId: string which includes _ordinalNumber, produced by ModelObject.objectId(), or integer object index :type objectId: str or int :returns: ModelObject """ if isinstance(objectId, _INT_TYPES): # may be long or short in 2.7 return self.modelObjects[objectId] # assume it is a string with ID in a tokenized representation, like xyz_33 try: return self.modelObjects[_INT(objectId.rpartition("_")[2])] except (__HOLE__, ValueError): return None # UI thread viewModelObject
IndexError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.modelObject
9,320
def viewModelObject(self, objectId): """Finds model object, if any, and synchronizes any views displaying it to bring the model object into scrollable view region and highlight it :param objectId: string which includes _ordinalNumber, produced by ModelObject.objectId(), or integer object index :type objectId: str or int """ modelObject = "" try: if isinstance(objectId, (ModelObject,FactPrototype)): modelObject = objectId elif isinstance(objectId, str) and objectId.startswith("_"): modelObject = self.modelObject(objectId) if modelObject is not None: for view in self.views: view.viewModelObject(modelObject) except (IndexError, __HOLE__, AttributeError)as err: self.modelManager.addToLog(_("Exception viewing properties {0} {1} at {2}").format( modelObject, err, traceback.format_tb(sys.exc_info()[2])))
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.viewModelObject
9,321
def logArguments(self, codes, msg, codedArgs): """ Prepares arguments for logger function as per info() below. If codes includes EFM, GFM, HMRC, or SBR-coded error then the code chosen (if a sequence) corresponds to whether EFM, GFM, HMRC, or SBR validation is in effect. """ def propValues(properties): # deref objects in properties return [(p[0],str(p[1])) if len(p) == 2 else (p[0],str(p[1]),propValues(p[2])) for p in properties if 2 <= len(p) <= 3] # determine logCode messageCode = self.effectiveMessageCode(codes) # determine message and extra arguments fmtArgs = {} extras = {"messageCode":messageCode} modelObjectArgs = () for argName, argValue in codedArgs.items(): if argName in ("modelObject", "modelXbrl", "modelDocument"): try: entryUrl = self.modelDocument.uri except __HOLE__: try: entryUrl = self.entryLoadingUrl except AttributeError: entryUrl = self.fileSource.url refs = [] modelObjectArgs = argValue if isinstance(argValue, (tuple,list,set)) else (argValue,) for arg in flattenSequence(modelObjectArgs): if arg is not None: if isinstance(arg, _STR_BASE): objectUrl = arg else: try: objectUrl = arg.modelDocument.uri except AttributeError: try: objectUrl = self.modelDocument.uri except AttributeError: objectUrl = self.entryLoadingUrl try: file = UrlUtil.relativeUri(entryUrl, objectUrl) except: file = "" ref = {} if isinstance(arg,(ModelObject, ObjectPropertyViewWrapper)): _arg = arg.modelObject if isinstance(arg, ObjectPropertyViewWrapper) else arg ref["href"] = file + "#" + XmlUtil.elementFragmentIdentifier(_arg) ref["sourceLine"] = _arg.sourceline ref["objectId"] = _arg.objectId() if self.logRefObjectProperties: try: ref["properties"] = propValues(arg.propertyView) except AttributeError: pass # is a default properties entry appropriate or needed? if self.logRefHasPluginProperties: refProperties = ref.get("properties", {}) for pluginXbrlMethod in pluginClassMethods("Logging.Ref.Properties"): pluginXbrlMethod(arg, refProperties, codedArgs) if refProperties: ref["properties"] = refProperties else: ref["href"] = file try: ref["sourceLine"] = arg.sourceline except AttributeError: pass # arg may not have sourceline, ignore if so if self.logRefHasPluginAttrs: refAttributes = {} for pluginXbrlMethod in pluginClassMethods("Logging.Ref.Attributes"): pluginXbrlMethod(arg, refAttributes, codedArgs) if refAttributes: ref["customAttributes"] = refAttributes refs.append(ref) extras["refs"] = refs elif argName == "sourceFileLine": # sourceFileLines is pairs of file and line numbers, e.g., ((file,line),(file2,line2),...) ref = {} if isinstance(argValue, (tuple,list)): ref["href"] = str(argValue[0]) if len(argValue) > 1 and argValue[1]: ref["sourceLine"] = str(argValue[1]) else: ref["href"] = str(argValue) extras["refs"] = [ref] elif argName == "sourceFileLines": # sourceFileLines is tuple/list of pairs of file and line numbers, e.g., ((file,line),(file2,line2),...) refs = [] for arg in (argValue if isinstance(argValue, (tuple,list)) else (argValue,)): ref = {} if isinstance(arg, (tuple,list)): ref["href"] = str(arg[0]) if len(arg) > 1 and arg[1]: ref["sourceLine"] = str(arg[1]) else: ref["href"] = str(arg) refs.append(ref) extras["refs"] = refs elif argName == "sourceLine": if isinstance(argValue, _INT_TYPES): # must be sortable with int's in logger extras["sourceLine"] = argValue elif argName not in ("exc_info", "messageCodes"): if isinstance(argValue, (ModelValue.QName, ModelObject, bool, FileNamedStringIO, # might be a set of lxml objects not dereferencable at shutdown tuple, list, set)): fmtArgs[argName] = str(argValue) elif argValue is None: fmtArgs[argName] = "(none)" elif isinstance(argValue, _INT_TYPES): # need locale-dependent formatting fmtArgs[argName] = format_string(self.modelManager.locale, '%i', argValue) elif isinstance(argValue,(float,Decimal)): # need locale-dependent formatting fmtArgs[argName] = format_string(self.modelManager.locale, '%f', argValue) elif isinstance(argValue, dict): fmtArgs[argName] = argValue else: fmtArgs[argName] = str(argValue) if "refs" not in extras: try: file = os.path.basename(self.modelDocument.uri) except AttributeError: try: file = os.path.basename(self.entryLoadingUrl) except: file = "" extras["refs"] = [{"href": file}] for pluginXbrlMethod in pluginClassMethods("Logging.Message.Parameters"): # plug in can rewrite msg string or return msg if not altering msg msg = pluginXbrlMethod(messageCode, msg, modelObjectArgs, fmtArgs) or msg return (messageCode, (msg, fmtArgs) if fmtArgs else (msg,), extras)
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.logArguments
9,322
def profileStat(self, name=None, stat=None): ''' order 1xx - load, import, setup, etc order 2xx - views, 26x - table lb 3xx diff, other utilities 5xx validation 6xx formula ''' if self.modelManager.collectProfileStats: import time global profileStatNumber try: if name: thisTime = stat if stat is not None else time.time() - self._startedTimeStat mem = self.modelXbrl.modelManager.cntlr.memoryUsed prevTime = self.profileStats.get(name, (0,0,0))[1] self.profileStats[name] = (profileStatNumber, thisTime + prevTime, mem) profileStatNumber += 1 except __HOLE__: pass if stat is None: self._startedTimeStat = time.time()
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.profileStat
9,323
def profileActivity(self, activityCompleted=None, minTimeToShow=0): """Used to provide interactive GUI messages of long-running processes. When the time between last profileActivity and this profileActivity exceeds minTimeToShow, then the time is logged (if it is shorter than it is not logged), thus providing feedback of long running (and possibly troublesome) processing steps. :param activityCompleted: Description of activity completed, or None if call is just to demark starting of a profiled activity. :type activityCompleted: str :param minTimeToShow: Seconds of elapsed time for activity, if longer then the profile message appears in the log. :type minTimeToShow: seconds """ import time try: if activityCompleted: timeTaken = time.time() - self._startedProfiledActivity if timeTaken > minTimeToShow: self.info("info:profileActivity", _("%(activity)s %(time)s secs\n"), modelObject=self.modelXbrl.modelDocument, activity=activityCompleted, time=format_string(self.modelManager.locale, "%.3f", timeTaken, grouping=True)) except __HOLE__: pass self._startedProfiledActivity = time.time()
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelXbrl.py/ModelXbrl.profileActivity
9,324
def test(self): here = os.getcwd() mypid = os.getpid() with open("split-test.txt", "w") as f: f.write("foobar") with splitbrain(self.conn): try: path = tempfile.mkdtemp() import email self.assertNotIn("stale", repr(email)) os.chdir(path) hispid = os.getpid() self.assertNotEqual(mypid, hispid) here2 = os.getcwd() self.assertNotEqual(here, here2) self.assertFalse(os.path.exists("split-test.txt")) with open("split-test.txt", "w") as f: f.write("spam") with localbrain(): self.assertEqual(os.getpid(), mypid) with open("split-test.txt", "r") as f: self.assertEqual(f.read(), "foobar") try: def f(): g() def g(): h() def h(): open("crap.txt", "r") f() except __HOLE__: with localbrain(): tbtext = "".join(traceback.format_exception(*sys.exc_info())) # pdb.post_mortem(sys.exc_info()[2]) self.assertIn("f()", tbtext) self.assertIn("g()", tbtext) self.assertIn("h()", tbtext) else: self.fail("This should have raised a IOError") finally: # we must move away from the tempdir to delete it (at least on windows) os.chdir("/") shutil.rmtree(path) self.assertIn("stale", repr(email)) self.assertEqual(os.getpid(), mypid) self.assertEqual(os.getcwd(), here) os.remove("split-test.txt")
IOError
dataset/ETHPy150Open tomerfiliba/rpyc/tests/test_splitbrain.py/SplitbrainTest.test
9,325
def _discover_space(name, globals): try: return _local.space_stack[-1] except (__HOLE__, IndexError): pass if '__pluginbase_state__' in globals: return globals['__pluginbase_state__'].source mod_name = globals.get('__name__') if mod_name is not None and \ mod_name.startswith(_internalspace.__name__ + '.'): end = mod_name.find('.', len(_internalspace.__name__) + 1) space = sys.modules.get(mod_name[:end]) if space is not None: return space.__pluginbase_state__.source
AttributeError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/_discover_space
9,326
def __getattr__(self, name): try: return ModuleType.__getattr__(self, name) except __HOLE__: if name[:2] == '__': raise raise RuntimeError( 'Attempted to import from a plugin base module (%s) without ' 'having a plugin source activated. To solve this error ' 'you have to move the import into a "with" block of the ' 'associated plugin source.' % self.__name__)
AttributeError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/_IntentionallyEmptyModule.__getattr__
9,327
@property def __path__(self): try: ps = self.__pluginbase_state__.source except __HOLE__: return [] return ps.searchpath + ps.base.searchpath
AttributeError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/_PluginSourceModule.__path__
9,328
def _setup_base_package(module_name): try: mod = __import__(module_name, None, None, ['__name__']) except __HOLE__: mod = None if '.' in module_name: parent_mod = __import__(module_name.rsplit('.', 1)[0], None, None, ['__name__']) else: parent_mod = None if mod is None: mod = _IntentionallyEmptyModule(module_name) if parent_mod is not None: setattr(parent_mod, module_name.rsplit('.', 1)[-1], mod) sys.modules[module_name] = mod
ImportError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/_setup_base_package
9,329
def __cleanup(self, _sys=sys, _shutdown_module=_shutdown_module): # The default parameters are necessary because this can be fired # from the destructor and so late when the interpreter shuts down # that these functions and modules might be gone. if self.mod is None: return modname = self.mod.__name__ self.mod.__pluginbase_state__ = None self.mod = None try: delattr(_internalspace, self.spaceid) except __HOLE__: pass prefix = modname + '.' _sys.modules.pop(modname) for key, value in list(_sys.modules.items()): if not key.startswith(prefix): continue mod = _sys.modules.pop(key, None) if mod is None: continue _shutdown_module(mod)
AttributeError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/PluginSource.__cleanup
9,330
def __exit__(self, exc_type, exc_value, tb): try: _local.space_stack.pop() except (AttributeError, __HOLE__): pass
IndexError
dataset/ETHPy150Open dragondjf/PFramer/util/pluginbase.py/PluginSource.__exit__
9,331
def get_tests(app_module): parts = app_module.__name__.split('.') prefix, last = parts[:-1], parts[-1] try: test_module = import_module('.'.join(prefix + [TEST_MODULE])) except __HOLE__: # Couldn't import tests.py. Was it due to a missing file, or # due to an import error in a tests.py that actually exists? # app_module either points to a models.py file, or models/__init__.py # Tests are therefore either in same directory, or one level up if last == 'models': app_root = import_module('.'.join(prefix)) else: app_root = app_module if not module_has_submodule(app_root, TEST_MODULE): test_module = None else: # The module exists, so there must be an import error in the test # module itself. raise return test_module
ImportError
dataset/ETHPy150Open golismero/golismero/thirdparty_libs/django/test/simple.py/get_tests
9,332
def build_suite(app_module): """ Create a complete Django test suite for the provided application module. """ suite = unittest.TestSuite() # Load unit and doctests in the models.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(app_module, 'suite'): suite.addTest(app_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule( app_module)) try: suite.addTest(doctest.DocTestSuite(app_module, checker=doctestOutputChecker, runner=DocTestRunner)) except __HOLE__: # No doc tests in models.py pass # Check to see if a separate 'tests' module exists parallel to the # models module test_module = get_tests(app_module) if test_module: # Load unit and doctests in the tests.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(test_module, 'suite'): suite.addTest(test_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule( test_module)) try: suite.addTest(doctest.DocTestSuite( test_module, checker=doctestOutputChecker, runner=DocTestRunner)) except ValueError: # No doc tests in tests.py pass return suite
ValueError
dataset/ETHPy150Open golismero/golismero/thirdparty_libs/django/test/simple.py/build_suite
9,333
def build_test(label): """ Construct a test case with the specified label. Label should be of the form model.TestClass or model.TestClass.test_method. Returns an instantiated test or test suite corresponding to the label provided. """ parts = label.split('.') if len(parts) < 2 or len(parts) > 3: raise ValueError("Test label '%s' should be of the form app.TestCase " "or app.TestCase.test_method" % label) # # First, look for TestCase instances with a name that matches # app_module = get_app(parts[0]) test_module = get_tests(app_module) TestClass = getattr(app_module, parts[1], None) # Couldn't find the test class in models.py; look in tests.py if TestClass is None: if test_module: TestClass = getattr(test_module, parts[1], None) try: if issubclass(TestClass, (unittest.TestCase, real_unittest.TestCase)): if len(parts) == 2: # label is app.TestClass try: return unittest.TestLoader().loadTestsFromTestCase( TestClass) except TypeError: raise ValueError( "Test label '%s' does not refer to a test class" % label) else: # label is app.TestClass.test_method return TestClass(parts[2]) except TypeError: # TestClass isn't a TestClass - it must be a method or normal class pass # # If there isn't a TestCase, look for a doctest that matches # tests = [] for module in app_module, test_module: try: doctests = doctest.DocTestSuite(module, checker=doctestOutputChecker, runner=DocTestRunner) # Now iterate over the suite, looking for doctests whose name # matches the pattern that was given for test in doctests: if test._dt_test.name in ( '%s.%s' % (module.__name__, '.'.join(parts[1:])), '%s.__test__.%s' % ( module.__name__, '.'.join(parts[1:]))): tests.append(test) except __HOLE__: # No doctests found. pass # If no tests were found, then we were given a bad test label. if not tests: raise ValueError("Test label '%s' does not refer to a test" % label) # Construct a suite out of the tests that matched. return unittest.TestSuite(tests)
ValueError
dataset/ETHPy150Open golismero/golismero/thirdparty_libs/django/test/simple.py/build_test
9,334
def integrity_check(config, wiggle=0): for suite in config.suites: for view in suite.views: matches = defaultdict(list) for couch in config.couches: params = { 'reduce': 'false', 'limit': 0 } resp = requests.get("{uri}/{database}/{view}".format(uri=couch.uri, database=suite.database, view=view), params=params, headers=couch.headers) content = json.loads(resp.content) try: total_rows = content['total_rows'] except __HOLE__: print "Problem getting `total_rows`. Is this a valid couch database? {}"\ .format(suite.database) else: matched = False for wiggle_range, couches in matches.items(): if wiggle_range[0] <= total_rows <= wiggle_range[1]: matches[wiggle_range].append((couch.uri, total_rows)) matched = True if not matched: new_wiggle_range = (total_rows - wiggle, total_rows + wiggle) matches[new_wiggle_range].append((couch.uri, total_rows)) print_result(matches, view, suite.database)
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/cleanup/management/commands/couch_integrity.py/integrity_check
9,335
def _configure_polymorphic_setter(self, init=False): """Configure an attribute on the mapper representing the 'polymorphic_on' column, if applicable, and not already generated by _configure_properties (which is typical). Also create a setter function which will assign this attribute to the value of the 'polymorphic_identity' upon instance construction, also if applicable. This routine will run when an instance is created. """ setter = False if self.polymorphic_on is not None: setter = True if isinstance(self.polymorphic_on, util.string_types): # polymorphic_on specified as a string - link # it to mapped ColumnProperty try: self.polymorphic_on = self._props[self.polymorphic_on] except __HOLE__: raise sa_exc.ArgumentError( "Can't determine polymorphic_on " "value '%s' - no attribute is " "mapped to this name." % self.polymorphic_on) if self.polymorphic_on in self._columntoproperty: # polymorphic_on is a column that is already mapped # to a ColumnProperty prop = self._columntoproperty[self.polymorphic_on] polymorphic_key = prop.key self.polymorphic_on = prop.columns[0] polymorphic_key = prop.key elif isinstance(self.polymorphic_on, MapperProperty): # polymorphic_on is directly a MapperProperty, # ensure it's a ColumnProperty if not isinstance(self.polymorphic_on, properties.ColumnProperty): raise sa_exc.ArgumentError( "Only direct column-mapped " "property or SQL expression " "can be passed for polymorphic_on") prop = self.polymorphic_on self.polymorphic_on = prop.columns[0] polymorphic_key = prop.key elif not expression._is_column(self.polymorphic_on): # polymorphic_on is not a Column and not a ColumnProperty; # not supported right now. raise sa_exc.ArgumentError( "Only direct column-mapped " "property or SQL expression " "can be passed for polymorphic_on" ) else: # polymorphic_on is a Column or SQL expression and # doesn't appear to be mapped. this means it can be 1. # only present in the with_polymorphic selectable or # 2. a totally standalone SQL expression which we'd # hope is compatible with this mapper's mapped_table col = self.mapped_table.corresponding_column( self.polymorphic_on) if col is None: # polymorphic_on doesn't derive from any # column/expression isn't present in the mapped # table. we will make a "hidden" ColumnProperty # for it. Just check that if it's directly a # schema.Column and we have with_polymorphic, it's # likely a user error if the schema.Column isn't # represented somehow in either mapped_table or # with_polymorphic. Otherwise as of 0.7.4 we # just go with it and assume the user wants it # that way (i.e. a CASE statement) setter = False instrument = False col = self.polymorphic_on if isinstance(col, schema.Column) and ( self.with_polymorphic is None or self.with_polymorphic[1]. corresponding_column(col) is None): raise sa_exc.InvalidRequestError( "Could not map polymorphic_on column " "'%s' to the mapped table - polymorphic " "loads will not function properly" % col.description) else: # column/expression that polymorphic_on derives from # is present in our mapped table # and is probably mapped, but polymorphic_on itself # is not. This happens when # the polymorphic_on is only directly present in the # with_polymorphic selectable, as when use # polymorphic_union. # we'll make a separate ColumnProperty for it. instrument = True key = getattr(col, 'key', None) if key: if self._should_exclude(col.key, col.key, False, col): raise sa_exc.InvalidRequestError( "Cannot exclude or override the " "discriminator column %r" % col.key) else: self.polymorphic_on = col = \ col.label("_sa_polymorphic_on") key = col.key self._configure_property( key, properties.ColumnProperty(col, _instrument=instrument), init=init, setparent=True) polymorphic_key = key else: # no polymorphic_on was set. # check inheriting mappers for one. for mapper in self.iterate_to_root(): # determine if polymorphic_on of the parent # should be propagated here. If the col # is present in our mapped table, or if our mapped # table is the same as the parent (i.e. single table # inheritance), we can use it if mapper.polymorphic_on is not None: if self.mapped_table is mapper.mapped_table: self.polymorphic_on = mapper.polymorphic_on else: self.polymorphic_on = \ self.mapped_table.corresponding_column( mapper.polymorphic_on) # we can use the parent mapper's _set_polymorphic_identity # directly; it ensures the polymorphic_identity of the # instance's mapper is used so is portable to subclasses. if self.polymorphic_on is not None: self._set_polymorphic_identity = \ mapper._set_polymorphic_identity self._validate_polymorphic_identity = \ mapper._validate_polymorphic_identity else: self._set_polymorphic_identity = None return if setter: def _set_polymorphic_identity(state): dict_ = state.dict state.get_impl(polymorphic_key).set( state, dict_, state.manager.mapper.polymorphic_identity, None) def _validate_polymorphic_identity(mapper, state, dict_): if polymorphic_key in dict_ and \ dict_[polymorphic_key] not in \ mapper._acceptable_polymorphic_identities: util.warn( "Flushing object %s with " "incompatible polymorphic identity %r; the " "object may not refresh and/or load correctly" % ( state_str(state), dict_[polymorphic_key] ) ) self._set_polymorphic_identity = _set_polymorphic_identity self._validate_polymorphic_identity = \ _validate_polymorphic_identity else: self._set_polymorphic_identity = None
KeyError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/orm/mapper.py/Mapper._configure_polymorphic_setter
9,336
def get_property(self, key, _configure_mappers=True): """return a MapperProperty associated with the given key. """ if _configure_mappers and Mapper._new_mappers: configure_mappers() try: return self._props[key] except __HOLE__: raise sa_exc.InvalidRequestError( "Mapper '%s' has no property '%s'" % (self, key))
KeyError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/orm/mapper.py/Mapper.get_property
9,337
def Import(modname): """Improved __import__ function that returns fully initialized subpackages.""" try: return sys.modules[modname] except __HOLE__: pass __import__(modname) return sys.modules[modname]
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/module.py/Import
9,338
def get_module(name): """ Use the Python import function to get a Python package module by name. Raises ModuleImportError if the module could not be found. Arguments: name: A string. The name of the module to import and run. Returns: A python module. """ try: return sys.modules[name] except __HOLE__: pass try: __import__(name) except ImportError as err: raise ModuleImportError("Error loading: %s (%s)." % (name, err)) else: return sys.modules[name]
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/module.py/get_module
9,339
def get_object(name): """Get an object from a Python module, or a module itself. Arguments: name: Python path name of object. Usually a module or a class in a module. Returns: An object identified by the given path name. may raise AttributeError or ModuleImportError if name not found. """ try: return sys.modules[name] except KeyError: pass i = name.rfind(".") if i >= 0: try: __import__(name) except __HOLE__: pass else: return sys.modules[name] mod = get_module(name[:i]) # module name component try: return getattr(mod, name[i+1:]) # path tail is an object inside module except AttributeError: raise ObjectImportError("%r not found in %r." % (name[i+1:], mod.__name__)) else: return get_module(name) # basic module name
ImportError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/module.py/get_object
9,340
def find_package(packagename, searchpath=None): """Find a package by fully qualified name.""" try: return sys.modules[packagename] except __HOLE__: pass for pkgname in _iter_subpath(packagename): if "." in pkgname: basepkg, subpkg = pkgname.rsplit(".", 1) pkg = sys.modules[basepkg] _load_package(subpkg, basepkg, pkg.__path__) else: try: sys.modules[pkgname] except KeyError: _load_package(pkgname, None, searchpath) return sys.modules[packagename]
KeyError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/module.py/find_package
9,341
def find_module(modname, path=None): """Find a module, also handling subpackages. Similar to imp.find_module(), except works with subpackages. This does not load the module, so no side effects from the module. It does load the package, so any contents of __init__.py are run and may have side effects. Returns: fo -- Open file object. fname -- file name of the file found desc -- a 3-tuple of extension, mode, and file type. """ if "." in modname: pkgname, modname = modname.rsplit(".", 1) pkg = find_package(pkgname) return find_module(modname, pkg.__path__) try: info = imp.find_module(modname, path) except __HOLE__: return None return info
ImportError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/module.py/find_module
9,342
def lookup(self, path): if path == "" or path[0] != "/": path = posixpath.normpath("/" + path) else: path = posixpath.normpath(path) # Set default mimeType. It will be modified only if there is a change self.set_mimetype(MIME_TYPE_TEXT_PLAIN) # fix up requests, prepending /ec2 to anything that does not match path_tokens = path.split('/')[1:] if path_tokens[0] not in ("ec2", "openstack"): if path_tokens[0] == "": # request for / path_tokens = ["ec2"] else: path_tokens = ["ec2"] + path_tokens path = "/" + "/".join(path_tokens) # all values of 'path' input starts with '/' and have no trailing / # specifically handle the top level request if len(path_tokens) == 1: if path_tokens[0] == "openstack": # NOTE(vish): don't show versions that are in the future today = timeutils.utcnow().strftime("%Y-%m-%d") versions = [v for v in OPENSTACK_VERSIONS if v <= today] if OPENSTACK_VERSIONS != versions: LOG.debug("future versions %s hidden in version list", [v for v in OPENSTACK_VERSIONS if v not in versions]) versions += ["latest"] else: versions = VERSIONS + ["latest"] return versions try: if path_tokens[0] == "openstack": data = self.get_openstack_item(path_tokens[1:]) else: data = self.get_ec2_item(path_tokens[1:]) except (InvalidMetadataVersion, __HOLE__): raise InvalidMetadataPath(path) return data
KeyError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/metadata/base.py/InstanceMetadata.lookup
9,343
def metadata_for_config_drive(self): """Yields (path, value) tuples for metadata elements.""" # EC2 style metadata for version in VERSIONS + ["latest"]: if version in CONF.config_drive_skip_versions.split(' '): continue data = self.get_ec2_metadata(version) if 'user-data' in data: filepath = os.path.join('ec2', version, 'user-data') yield (filepath, data['user-data']) del data['user-data'] try: del data['public-keys']['0']['_name'] except __HOLE__: pass filepath = os.path.join('ec2', version, 'meta-data.json') yield (filepath, jsonutils.dump_as_bytes(data['meta-data'])) ALL_OPENSTACK_VERSIONS = OPENSTACK_VERSIONS + ["latest"] for version in ALL_OPENSTACK_VERSIONS: path = 'openstack/%s/%s' % (version, MD_JSON_NAME) yield (path, self.lookup(path)) path = 'openstack/%s/%s' % (version, UD_NAME) if self.userdata_raw is not None: yield (path, self.lookup(path)) if self._check_version(HAVANA, version, ALL_OPENSTACK_VERSIONS): path = 'openstack/%s/%s' % (version, VD_JSON_NAME) yield (path, self.lookup(path)) if self._check_version(LIBERTY, version, ALL_OPENSTACK_VERSIONS): path = 'openstack/%s/%s' % (version, NW_JSON_NAME) yield (path, self.lookup(path)) for (cid, content) in six.iteritems(self.content): yield ('%s/%s/%s' % ("openstack", CONTENT_DIR, cid), content)
KeyError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/metadata/base.py/InstanceMetadata.metadata_for_config_drive
9,344
@staticmethod def case_status_onaccept(form): """ Onaccept routine for case statuses: - only one status can be the default @param form: the FORM """ form_vars = form.vars try: record_id = form_vars.id except __HOLE__: record_id = None if not record_id: return # If this status is the default, then set is_default-flag # for all other statuses to False: if "is_default" in form_vars and form_vars.is_default: table = current.s3db.dvr_case_status db = current.db db(table.id != record_id).update(is_default = False) # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseModel.case_status_onaccept
9,345
@staticmethod def case_onvalidation(form): """ Case onvalidation: - make sure case numbers are unique within the organisation @param form: the FORM """ db = current.db s3db = current.s3db # Read form data form_vars = form.vars if "id" in form_vars: # Inline subtable update record_id = form_vars.id elif hasattr(form, "record_id"): # Regular update form record_id = form.record_id else: # New record record_id = None try: reference = form_vars.reference except __HOLE__: reference = None if reference: # Make sure the case reference is unique within the organisation ctable = s3db.dvr_case otable = s3db.org_organisation # Get the organisation_id if "organisation_id" not in form_vars: if not record_id: # Create form with hidden organisation_id # => use default organisation_id = ctable.organisation_id.default else: # Reload the record to get the organisation_id query = (ctable.id == record_id) row = db(query).select(ctable.organisation_id, limitby = (0, 1)).first() if not row: return organisation_id = row.organisation_id else: # Use the organisation_id in the form organisation_id = form_vars.organisation_id # Case duplicate query dquery = (ctable.reference == reference) & \ (ctable.deleted != True) if record_id: dquery &= (ctable.id != record_id) msg = current.T("This Case Number is already in use") # Add organisation query to duplicate query if current.deployment_settings.get_org_branches(): # Get the root organisation query = (otable.id == organisation_id) row = db(query).select(otable.root_organisation, limitby = (0, 1)).first() root_organisation = row.root_organisation \ if row else organisation_id dquery &= (otable.root_organisation == root_organisation) left = otable.on(otable.id == ctable.organisation_id) else: dquery &= (ctable.organisation_id == organisation_id) left = None # Is there a record with the same reference? row = db(dquery).select(ctable.id, left = left, limitby = (0, 1)).first() if row: form.errors["reference"] = msg # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseModel.case_onvalidation
9,346
@staticmethod def case_appointment_ondelete(row): """ Actions after deleting appointments - Update last_seen_on in the corresponding case(s) @param row: the deleted Row """ if current.deployment_settings.get_dvr_appointments_update_last_seen_on(): # Get the deleted keys table = current.s3db.dvr_case_appointment row = current.db(table.id == row.id).select(table.deleted_fk, limitby = (0, 1), ).first() if row and row.deleted_fk: # Get the person ID try: deleted_fk = json.loads(row.deleted_fk) except (ValueError, __HOLE__): person_id = None else: person_id = deleted_fk.get("person_id") # Update last_seen_on if person_id: dvr_update_last_seen(person_id) # =============================================================================
TypeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseAppointmentModel.case_appointment_ondelete
9,347
@staticmethod def allowance_ondelete(row): """ Actions after deleting allowance information - Update last_seen_on in the corresponding case(s) @param row: the deleted Row """ if current.deployment_settings.get_dvr_payments_update_last_seen_on(): # Get the deleted keys table = current.s3db.dvr_allowance row = current.db(table.id == row.id).select(table.deleted_fk, limitby = (0, 1), ).first() if row and row.deleted_fk: # Get the person ID try: deleted_fk = json.loads(row.deleted_fk) except (ValueError, __HOLE__): person_id = None else: person_id = deleted_fk.get("person_id") # Update last_seen_on if person_id: dvr_update_last_seen(person_id) # =============================================================================
TypeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseAllowanceModel.allowance_ondelete
9,348
@staticmethod def case_event_type_onaccept(form): """ Onaccept routine for case event types: - only one type can be the default @param form: the FORM """ form_vars = form.vars try: record_id = form_vars.id except __HOLE__: record_id = None if not record_id: return # If this type is the default, then set is_default-flag # for all other types to False: if "is_default" in form_vars and form_vars.is_default: table = current.s3db.dvr_case_event_type db = current.db db(table.id != record_id).update(is_default = False) # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseEventModel.case_event_type_onaccept
9,349
@staticmethod def case_event_create_onaccept(form): """ Actions after creation of a case event: - update last_seen_on in the corresponding cases - close appointments if configured to do so @param form: the FORM """ formvars = form.vars try: record_id = formvars.id except __HOLE__: record_id = None if not record_id: return db = current.db s3db = current.s3db close_appointments = current.deployment_settings \ .get_dvr_case_events_close_appointments() case_id = formvars.get("case_id") person_id = formvars.get("person_id") type_id = formvars.get("type_id") if not person_id or \ close_appointments and (not case_id or not type_id): # Reload the record table = s3db.dvr_case_event row = db(table.id == record_id).select(table.case_id, table.person_id, table.type_id, limitby = (0, 1), ).first() if not row: return case_id = row.case_id person_id == row.person_id type_id = row.type_id if not person_id: return # Update last_seen dvr_update_last_seen(person_id) # Close appointments if close_appointments and type_id: atable = s3db.dvr_case_appointment ttable = s3db.dvr_case_event_type today = current.request.utcnow.date() left = atable.on((atable.type_id == ttable.appointment_type_id) & \ (atable.person_id == person_id) & \ ((atable.date == None) | (atable.date <= today)) & \ (atable.deleted != True) ) query = (ttable.id == type_id) & \ (ttable.appointment_type_id != None) & \ (ttable.deleted != True) if case_id: query &= (atable.case_id == case_id) | \ (atable.case_id == None) rows = db(query).select(ttable.appointment_type_id, atable.id, atable.date, atable.status, left = left, orderby = ~atable.date, ) data = {"date": today, "status": 4} if rows: update = None create = False first = rows[0].dvr_case_appointment if first.id is None: # No appointment of this type yet create = True else: # Find key dates undated = open_today = closed_today = previous = None for row in rows: appointment = row.dvr_case_appointment if appointment.date is None: if not undated: # An appointment without date undated = appointment elif appointment.date == today: if appointment.status != 4: # An open or cancelled appointment today open_today = appointment else: # A closed appointment today closed_today = appointment elif previous is None: # The last appointment before today previous = appointment if open_today: # If we have an open appointment for today, update it update = open_today elif closed_today: # If we already have a closed appointment for today, # do nothing update = None elif previous: if previous.status not in (1, 2, 3): # Last appointment before today is closed # => create a new one unless there is an undated one if undated: update = undated else: create = True else: # Last appointment before today is still open # => update it update = previous else: update = undated if create: # Create a new closed appointment data["type_id"] = rows[0].dvr_case_event_type.appointment_type_id data["person_id"] = person_id data["case_id"] = case_id aresource = s3db.resource("dvr_case_appointment") try: record_id = aresource.insert(**data) except S3PermissionError: current.log.error("Event Registration: %s" % sys.exc_info()[1]) elif update: # Update the appointment permitted = current.auth.s3_has_permission("update", atable, record_id=update.id, ) if permitted: # Customise appointment resource r = S3Request("dvr", "case_appointment", current.request, args = [], get_vars = {}, ) r.customise_resource("dvr_case_appointment") # Update appointment success = update.update_record(**data) if success: data["id"] = update.id s3db.onaccept(atable, data, method="update") else: current.log.error("Event Registration: could not update appointment %s" % update.id) else: current.log.error("Event registration: not permitted to update appointment %s" % update.id) # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseEventModel.case_event_create_onaccept
9,350
@staticmethod def case_event_ondelete(row): """ Actions after deleting a case event: - update last_seen_on in the corresponding cases @param row: the deleted Row """ # Get the deleted keys table = current.s3db.dvr_case_event row = current.db(table.id == row.id).select(table.deleted_fk, limitby = (0, 1), ).first() if row and row.deleted_fk: # Get the person ID try: deleted_fk = json.loads(row.deleted_fk) except (__HOLE__, TypeError): person_id = None else: person_id = deleted_fk.get("person_id") # Update last_seen_on if person_id: dvr_update_last_seen(person_id) # =============================================================================
ValueError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRCaseEventModel.case_event_ondelete
9,351
def registration_ajax(self, r, **attr): """ Ajax response method, expects a JSON input like: {l: the PE label (from the input field), c: boolean to indicate whether to just check the PE label or to register payments t: the event type code } @param r: the S3Request instance @param attr: controller parameters @return: JSON response, structure: {l: the actual PE label (to update the input field), p: the person details, f: [{n: the flag name i: the flag instructions }, ...], s: whether the action is permitted or not e: form error (for label field) a: error message w: warning message m: success message } """ T = current.T # Load JSON data from request body s = r.body s.seek(0) try: data = json.load(s) except (__HOLE__, TypeError): r.error(400, current.ERROR.BAD_REQUEST) # Initialize processing variables output = {} error = None alert = None message = None warning = None permitted = False flags = [] # Identify the person pe_label = data.get("l") person = self.get_person(pe_label) if person is None: error = s3_str(T("No person found with this ID number")) else: # Get flag info flag_info = dvr_get_flag_instructions(person.id, action = "id-check", ) permitted = flag_info["permitted"] check = data.get("c") if check: name = s3_fullname(person) dob = person.date_of_birth if dob: dob = S3DateTime.date_represent(dob) person_data = "%s (%s %s)" % (name, T("Date of Birth"), dob) else: person_data = name output["p"] = s3_str(person_data) output["l"] = person.pe_label info = flag_info["info"] for flagname, instructions in info: flags.append({"n": s3_str(T(flagname)), "i": s3_str(T(instructions)), }) else: event_code = data.get("t") if not event_code: alert = T("No event type specified") elif not permitted: alert = T("Event registration not permitted") else: event_type = self.get_event_type(event_code) if not event_type: alert = T("Invalid event type %s" % event_code) else: success = self.register_event(person.id, event_type.id) if success: message = T("Event registered") else: alert = T("Could not register event") # Add messages to output if alert: output["a"] = s3_str(alert) if error: output["e"] = s3_str(error) if message: output["m"] = s3_str(message) if warning: output["w"] = s3_str(warning) # Add flag info to output output["s"] = permitted output["f"] = flags current.response.headers["Content-Type"] = "application/json" return json.dumps(output) # -------------------------------------------------------------------------
ValueError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRRegisterCaseEvent.registration_ajax
9,352
def registration_ajax(self, r, **attr): """ Ajax response method, expects a JSON input like: {l: the PE label (from the input field), c: boolean to indicate whether to just check the PE label or to register payments d: the payment data (raw data, which payments to update) } @param r: the S3Request instance @param attr: controller parameters @return: JSON response, structure: {l: the actual PE label (to update the input field), p: the person details, f: [{n: the flag name i: the flag instructions }, ...], u: whether there are any actionable data s: whether the action is permitted or not d: {t: time stamp h: payment details (raw data) d: payment details (HTML) } e: form error (for label field) a: error message w: warning message m: success message } """ T = current.T # Load JSON data from request body s = r.body s.seek(0) try: data = json.load(s) except (__HOLE__, TypeError): r.error(400, current.ERROR.BAD_REQUEST) # Initialize processing variables output = {} alert = None error = None warning = None message = None permitted = False flags = [] # Identify the person pe_label = data.get("l") person = self.get_person(pe_label) if person is None: error = s3_str(T("No person found with this ID number")) else: # Get flag info flag_info = dvr_get_flag_instructions(person.id, action = self.ACTION, ) permitted = flag_info["permitted"] check = data.get("c") if check: name = s3_fullname(person) dob = person.date_of_birth if dob: dob = S3DateTime.date_represent(dob) person_data = "%s (%s %s)" % (name, T("Date of Birth"), dob) else: person_data = name output["p"] = s3_str(person_data) output["l"] = person.pe_label info = flag_info["info"] for flagname, instructions in info: flags.append({"n": s3_str(T(flagname)), "i": s3_str(T(instructions)), }) if permitted: payments = self.get_payment_data(person.id) else: payments = [] date = S3DateTime.datetime_represent(current.request.utcnow, utc = True, ) output["d"] = {"d": s3_str(self.payment_data_represent(payments)), "t": s3_str(date), "h": payments, } if payments: output["u"] = True else: output["u"] = False else: if not permitted: alert = T("Payment registration not permitted") else: # Get payment data from JSON payments = data.get("d") if payments: # @todo: read date from JSON data (utcnow as fallback) date = r.utcnow comments = data.get("c") updated, failed = self.register_payments( person.id, payments, date = date, comments = comments, ) message = T("%(number)s payment(s) registered") % \ {"number": updated} if failed: warning = T("%(number)s payment(s) not found") % \ {"number": failed} else: alert = T("No payments specified") # Add messages to output if alert: output["a"] = s3_str(alert) if error: output["e"] = s3_str(error) if message: output["m"] = s3_str(message) if warning: output["w"] = s3_str(warning) # Add flag info to output output["s"] = permitted output["f"] = flags current.response.headers["Content-Type"] = "application/json" return json.dumps(output) # -------------------------------------------------------------------------
ValueError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRRegisterPayment.registration_ajax
9,353
def register_payments(self, person_id, payments, date=None, comments=None): """ Helper function to register payments @param person_id: the person record ID @param payments: the payments as sent from form @param date: the payment date (default utcnow) @param comments: comments for the payments @return: tuple (updated, failed), number of records """ if isinstance(payments, basestring): try: payments = json.loads(payments) except (ValueError, __HOLE__): payments = [] if not date: date = current.request.utcnow # Data to write data = {"status": 2, "paid_on": date, } if comments: data["comments"] = comments atable = current.s3db.dvr_allowance updated = 0 failed = 0 # Customise allowance resource r = S3Request("dvr", "allowance", current.request, args = [], get_vars = {}, ) r.customise_resource("dvr_allowance") onaccept = current.s3db.onaccept db = current.db accessible = current.auth.s3_accessible_query("update", atable) for payment in payments: record_id = payment.get("r") query = accessible & \ (atable.id == record_id) & \ (atable.person_id == person_id) & \ (atable.status != 2) & \ (atable.deleted != True) success = db(query).update(**data) if success: record = {"id": record_id, "person_id": person_id} record.update(data) onaccept(atable, record, method="update") updated += 1 else: failed += 1 return updated, failed # -------------------------------------------------------------------------
TypeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/DVRRegisterPayment.register_payments
9,354
def dvr_update_last_seen(person_id): """ Helper function for automatic updates of dvr_case.last_seen_on @param person_id: the person ID """ db = current.db s3db = current.s3db now = current.request.utcnow last_seen_on = None if not person_id: return # Get the last case event etable = s3db.dvr_case_event query = (etable.person_id == person_id) & \ (etable.date != None) & \ (etable.date <= now) & \ (etable.deleted != True) event = db(query).select(etable.date, orderby = ~etable.date, limitby = (0, 1), ).first() if event: last_seen_on = event.date # Check shelter registration history for newer entries htable = s3db.cr_shelter_registration_history query = (htable.person_id == person_id) & \ (htable.status.belongs(2, 3)) & \ (htable.date != None) & \ (htable.deleted != True) if last_seen_on is not None: query &= htable.date > last_seen_on entry = db(query).select(htable.date, orderby = ~htable.date, limitby = (0, 1), ).first() if entry: last_seen_on = entry.date settings = current.deployment_settings # Case appointments to update last_seen_on? if settings.get_dvr_appointments_update_last_seen_on(): atable = s3db.dvr_case_appointment ttable = s3db.dvr_case_appointment_type left = ttable.on(ttable.id == atable.type_id) query = (atable.person_id == person_id) & \ (atable.date != None) & \ (ttable.presence_required == True) & \ (atable.date <= now.date()) & \ (atable.status == 4) & \ (atable.deleted != True) if last_seen_on is not None: query &= atable.date > last_seen_on.date() appointment = db(query).select(atable.date, left = left, orderby = ~atable.date, limitby = (0, 1), ).first() if appointment: date = appointment.date try: date = datetime.datetime.combine(date, datetime.time(0, 0, 0)) except __HOLE__: pass # Local time offset to UTC (NB: can be 0) delta = S3DateTime.get_offset_value(current.session.s3.utc_offset) # Default to 08:00 local time (...unless that would be future) date = min(now, date + datetime.timedelta(seconds = 28800 - delta)) last_seen_on = date # Allowance payments to update last_seen_on? if settings.get_dvr_payments_update_last_seen_on(): atable = s3db.dvr_allowance query = (atable.person_id == person_id) & \ (atable.paid_on != None) & \ (atable.status == 2) & \ (atable.deleted != True) if last_seen_on is not None: query &= atable.paid_on > last_seen_on payment = db(query).select(atable.paid_on, orderby = ~atable.paid_on, limitby = (0, 1), ).first() if payment: last_seen_on = payment.paid_on # Update last_seen_on ctable = s3db.dvr_case query = (ctable.person_id == person_id) & \ (ctable.archived != True) & \ (ctable.deleted != True) db(query).update(last_seen_on = last_seen_on, # Don't change author stamp for # system-controlled record update: modified_on = ctable.modified_on, modified_by = ctable.modified_by, ) # =============================================================================
TypeError
dataset/ETHPy150Open sahana/eden/modules/s3db/dvr.py/dvr_update_last_seen
9,355
def render(self, name, value, attrs=None): if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) output = [u'<span id="%s_%s">' % (u'multi', name)] final_attrs = self.build_attrs(attrs) id_ = final_attrs.get('id', None) for i, widget in enumerate(self.widgets): try: widget_value = value[i] except __HOLE__: widget_value = None if id_: final_attrs = dict(final_attrs, id=id_) output.append(widget.render(name, widget_value, final_attrs)) output.append(u'</span>') return mark_safe(self.format_output(output))
IndexError
dataset/ETHPy150Open treeio/treeio/treeio/core/ajax/converter.py/MultiHiddenWidget.render
9,356
def __init__(self, data): assert(len(data) == self.HeaderLength) # Conveniently, the new ASCII format defines all fields (except for # the magic) to be 8 chars long, each field being a hex encoded # length. format = '6s' + '8s' * 13 arr = struct.unpack(format, data) for slotName, val in zip(self.__slots__, arr): if slotName != 'magic': try: val = int(val, 16) except __HOLE__: raise InvalidFieldValue(val) setattr(self, slotName, val) # Pad to multiple of four, first the file name self.skip = self.pad(self.HeaderLength + self.namesize) # Then the file size self.skip += self.pad(self.filesize)
ValueError
dataset/ETHPy150Open sassoftware/conary/conary/lib/cpiostream.py/CpioHeader.__init__
9,357
def explode(self, destDir): linkMap = {} for ent in self: try: target = destDir + '/' + ent.filename parent = os.path.dirname(target) if not os.path.exists(parent): os.makedirs(parent) if stat.S_ISCHR(ent.header.mode): os.mknod(target, stat.S_IFCHR, os.makedev(ent.rdevmajor, ent.rdevminor)) elif stat.S_ISBLK(ent.header.mode): os.mknod(target, stat.S_IFBLK, os.makedev(ent.rdevmajor, ent.rdevminor)) elif stat.S_ISDIR(ent.header.mode): os.mkdir(target) elif stat.S_ISFIFO(ent.header.mode): os.mkfifo(target) elif stat.S_ISLNK(ent.header.mode): os.symlink(ent.payload.read(),target) elif stat.S_ISREG(ent.header.mode): # save hardlinks until after the file content is written if ent.header.nlink > 1 and ent.header.filesize == 0: l = linkMap.get(ent.header.inode, []) l.append(target) linkMap[ent.header.inode] = l continue f = open(target, "w") buf = ent.payload.read(64 * 1024) while buf: f.write(buf) buf = ent.payload.read(64 * 1024) f.close() else: raise Error("unknown file mode 0%o for %s" % (ent.header.mode, ent.filename)) # create hardlinks after the file content is written if ent.header.nlink > 1 and ent.header.filesize: l = linkMap.get(ent.header.inode, []) # the last entry with the same inode should contain the # contents so this list should always have at least one # entry assert(l) for t in l: os.link(target, t) # create hardlinks after the file content is written except __HOLE__, e: if e.errno == errno.EEXIST: pass else: raise if not stat.S_ISLNK(ent.header.mode): os.chmod(target, ent.header.mode & 0777)
OSError
dataset/ETHPy150Open sassoftware/conary/conary/lib/cpiostream.py/CpioExploder.explode
9,358
def register(self, state, transform, validators=[], raw=False): registry = self._get_registry(raw) try: state_xforms = registry[state] except KeyError: registry[state] = OrderedDict() state_xforms = registry[state] try: if issubclass(transform, Transform): # Transform is a class. Instantiate it. transform_obj = transform() except __HOLE__: # Transform is a function. Wrap it in a class instance. transform_obj = FunctionWrappingTransform(transform) transform_obj.add_validation(*validators) state_xforms[transform_obj.name] = transform_obj
TypeError
dataset/ETHPy150Open openelections/openelections-core/openelex/base/transform.py/Registry.register
9,359
def get(self, state, name, raw=False): registry = self._get_registry(raw) try: transform = registry[state][name] except __HOLE__: err_msg = "Transform (%s) not registered for %s" % (name, state) raise KeyError(err_msg) return transform
KeyError
dataset/ETHPy150Open openelections/openelections-core/openelex/base/transform.py/Registry.get
9,360
def main(): r = redis.Redis() while True: # process queue as FIFO, change `blpop` to `brpop` to process as LIFO source, data = r.blpop(["dmoz:items"]) item = json.loads(data) try: print u"Processing: %(name)s <%(link)s>" % item except __HOLE__: print u"Error procesing: %r" % item
KeyError
dataset/ETHPy150Open rolando/scrapy-redis/example-project/process_items.py/main
9,361
def init_frame(self, n): """ NOTE: this is called from the IIS_RequestHandler """ self.logger.debug("initializing frame %d" % (n)) # create the frame, if needed try: fb = self.get_frame(n) except __HOLE__: fb = iis.framebuffer() self.fb[n] = fb fb.width = None fb.height = None fb.wcs = '' fb.image = None fb.bitmap = None fb.zoom = 1.0 fb.buffer = array.array('B') fb.ct = iis.coord_tran() #fb.chname = None return fb
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/misc/plugins/IRAF.py/IRAF.init_frame
9,362
def start_imexamine(self, fitsimage, chname): self.logger.info("STARTING") # Turn off regular UI processing in the frame self.canvas.setSurface(fitsimage) # insert layer if it is not already try: obj = fitsimage.getObjectByTag(self.layertag) except __HOLE__: # Add canvas layer fitsimage.add(self.canvas, tag=self.layertag) self.canvas.ui_setActive(True) self.imexam_active = True self.setMode('IRAF', chname) self.fv.gui_do(self.fv.ds.raise_tab, 'IRAF') self.logger.info("FINISHING")
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/misc/plugins/IRAF.py/IRAF.start_imexamine
9,363
def play(file_path): """play a video file""" iv = pipeffmpeg.InputVideoStream() iv.open(file_path) # Getting the frame rate of the video # Got this from the source code of pipeffmpeg (Can be replaced by Popen) file_info = pipeffmpeg.get_info(file_path) try: f_rate = file_info["streams"][0]["r_frame_rate"] # Getting the number of seconds for a frame delay = 1.0 / int(f_rate.split("/")[0]) except (__HOLE__, IndexError): # Setting a default delay if frame rate not obtained delay = 0.02 stdscr = curses.initscr() max_h, max_w = stdscr.getmaxyx() start_time = time.time() for i, bmp in enumerate(iv.readframe()): # timestap when frame start frame_start_time = time.time() # resize each frame to proper size, and covert to gray image = Image.open(StringIO.StringIO(bmp))\ .resize((max_w, max_h), Image.ANTIALIAS)\ .convert('L') stdscr.clear() data = ''.join(gray_scale[pix] for pix in image.getdata()) for h in range(max_h-1): stdscr.addstr(h, 0, data[h*max_w:(h+1)*max_w]) time_eclipsed = time.time() - start_time minutes, seconds = time_eclipsed/60, time_eclipsed % 60 stdscr.addstr(max_h-1, 0, 'Resolution:[%d*%d] Frame:%d Time:[%d:%d]' % (max_w, max_h, i, minutes, seconds)) stdscr.refresh() # Adding a delay if needed real_delay = delay - (time.time() - frame_start_time) if real_delay > 0: time.sleep(real_delay) curses.endwin()
KeyError
dataset/ETHPy150Open ee0703/console_video/player.py/play
9,364
def __getitem__(self, key): try: return self._primary[key] except __HOLE__: return self._secondary[key]
KeyError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/string.py/_multimap.__getitem__
9,365
def safe_substitute(self, *args, **kws): if len(args) > 1: raise TypeError('Too many positional arguments') if not args: mapping = kws elif kws: mapping = _multimap(kws, args[0]) else: mapping = args[0] # Helper function for .sub() def convert(mo): named = mo.group('named') if named is not None: try: # We use this idiom instead of str() because the latter # will fail if val is a Unicode containing non-ASCII return '%s' % (mapping[named],) except __HOLE__: return self.delimiter + named braced = mo.group('braced') if braced is not None: try: return '%s' % (mapping[braced],) except KeyError: return self.delimiter + '{' + braced + '}' if mo.group('escaped') is not None: return self.delimiter if mo.group('invalid') is not None: return self.delimiter raise ValueError('Unrecognized named group in pattern', self.pattern) return self.pattern.sub(convert, self.template)
KeyError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/string.py/Template.safe_substitute
9,366
def mkdir(self, subdirs): try: os.makedirs(os.path.join(self.root, subdirs)) except __HOLE__: pass # Give some time to inotify in order # to avoid a known bug where new files # if a recently created directory are # ignored # cf http://stackoverflow.com/a/17586891/180751 import time time.sleep(0.1)
OSError
dataset/ETHPy150Open onitu/onitu/drivers/local_storage/onitu_local_storage/tests/driver.py/Driver.mkdir
9,367
def iter_fields(node): """ Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` that is present on *node*. """ for field in node._fields: try: yield field, getattr(node, field) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open eBay/restcommander/play-1.2.4/python/Lib/ast.py/iter_fields
9,368
def decode(text): for enc in _encodings: try: return text.decode(enc) except __HOLE__: continue else: return text.decode('utf-8','ignore')
UnicodeDecodeError
dataset/ETHPy150Open ecdavis/pants/pants/web/utils.py/decode
9,369
def finalize_options(self): self.set_undefined_options('build', ('build_lib', 'build_lib'), ('force', 'force')) # Get the distribution options that are aliases for build_py # options -- list of packages and list of modules. self.packages = self.distribution.packages self.py_modules = self.distribution.py_modules self.package_data = self.distribution.package_data self.package_dir = {} if self.distribution.package_dir: for name, path in self.distribution.package_dir.items(): self.package_dir[name] = convert_path(path) self.data_files = self.get_data_files() # Ick, copied straight from install_lib.py (fancy_getopt needs a # type system! Hell, *everything* needs a type system!!!) if not isinstance(self.optimize, int): try: self.optimize = int(self.optimize) assert 0 <= self.optimize <= 2 except (ValueError, __HOLE__): raise DistutilsOptionError("optimize must be 0, 1, or 2")
AssertionError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/distutils/command/build_py.py/build_py.finalize_options
9,370
def get_package_dir(self, package): """Return the directory, relative to the top of the source distribution, where package 'package' should be found (at least according to the 'package_dir' option, if any).""" path = package.split('.') if not self.package_dir: if path: return os.path.join(*path) else: return '' else: tail = [] while path: try: pdir = self.package_dir['.'.join(path)] except __HOLE__: tail.insert(0, path[-1]) del path[-1] else: tail.insert(0, pdir) return os.path.join(*tail) else: # Oops, got all the way through 'path' without finding a # match in package_dir. If package_dir defines a directory # for the root (nameless) package, then fallback on it; # otherwise, we might as well have not consulted # package_dir at all, as we just use the directory implied # by 'tail' (which should be the same as the original value # of 'path' at this point). pdir = self.package_dir.get('') if pdir is not None: tail.insert(0, pdir) if tail: return os.path.join(*tail) else: return ''
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/distutils/command/build_py.py/build_py.get_package_dir
9,371
def find_modules(self): """Finds individually-specified Python modules, ie. those listed by module name in 'self.py_modules'. Returns a list of tuples (package, module_base, filename): 'package' is a tuple of the path through package-space to the module; 'module_base' is the bare (no packages, no dots) module name, and 'filename' is the path to the ".py" file (relative to the distribution root) that implements the module. """ # Map package names to tuples of useful info about the package: # (package_dir, checked) # package_dir - the directory where we'll find source files for # this package # checked - true if we have checked that the package directory # is valid (exists, contains __init__.py, ... ?) packages = {} # List of (package, module, filename) tuples to return modules = [] # We treat modules-in-packages almost the same as toplevel modules, # just the "package" for a toplevel is empty (either an empty # string or empty list, depending on context). Differences: # - don't check for __init__.py in directory for empty package for module in self.py_modules: path = module.split('.') package = '.'.join(path[0:-1]) module_base = path[-1] try: (package_dir, checked) = packages[package] except __HOLE__: package_dir = self.get_package_dir(package) checked = 0 if not checked: init_py = self.check_package(package, package_dir) packages[package] = (package_dir, 1) if init_py: modules.append((package, "__init__", init_py)) # XXX perhaps we should also check for just .pyc files # (so greedy closed-source bastards can distribute Python # modules too) module_file = os.path.join(package_dir, module_base + ".py") if not self.check_module(module, module_file): continue modules.append((package, module_base, module_file)) return modules
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/distutils/command/build_py.py/build_py.find_modules
9,372
def parse_iter(self, pos, tag, lineindex, it, src=None): dotchar = self.dotchar quotechar = self.quotechar children = [] firstline = lineindex while 1: try: lineindex, next = it.next() except __HOLE__: next = None break if not next.startswith(dotchar): tag.append(next) else: break for (i, t) in enumerate(tag): if (t.startswith(quotechar+dotchar) or t.startswith(quotechar+quotechar+dotchar)): tag[i] = t[len(quotechar):] if tag == ['']: tag = '\n' else: tag = '\n'.join(tag) while 1: if (next is None or len(next) <= pos or next[pos] != dotchar or not next.startswith(dotchar*(pos+1))): return lineindex, next, self.node(tag, children, firstline) if len(next) > pos+1 and next[pos+1] == dotchar: if src is None: raise SyntaxError, 'Level must increase with 1 max' else: src.error('Level must increase with 1 max', lineindex) lineindex, next, child = self.parse_iter(pos+1, [next[pos+1:]], lineindex, it, src) children.append(child)
StopIteration
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/DottedTree.py/_GLUECLAMP_.parse_iter
9,373
def __init__(self): self._user = None # User-specified default encoding self._learned = [] # Learned default encodings self._widefiles = False # File system can be wide # Can the file system be Unicode? try: self._widefiles = os.path.supports_unicode_filenames except AttributeError: try: self._widefiles = sys.getwindowsversion() == os.VER_PLATFORM_WIN32_NT except __HOLE__: pass # Try to guess a working default try: encoding = sys.getfilesystemencoding() if encoding and not (encoding.upper() in ENC_ASCII_LIST): self._learned = [ encoding ] except AttributeError: pass if not self._learned: encoding = sys.getdefaultencoding() if encoding and not (encoding.upper() in ENC_ASCII_LIST): self._learned = [ encoding ] # If we had no guesses, start with some European defaults if not self._learned: self._learned = ENC_DEFAULT_LIST #end def __init__
AttributeError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/Encoder.__init__
9,374
def SetVerbose(self, level): """ Sets the verbose level. """ try: if type(level) != types.IntType: level = int(level) if (level >= 0) and (level <= 3): self._verbose = level return except __HOLE__: pass self.Error('Verbose level (%s) must be between 0 and 3 inclusive.' % level) #end def SetVerbose #end class Output
ValueError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/Output.SetVerbose
9,375
def TrySetAttribute(self, attribute, value): """ Attempt to set the attribute to the value, with a pretty try block around it. """ if attribute == 'loc': self.loc = self.Canonicalize(value) else: try: setattr(self, attribute, value) except __HOLE__: output.Warn('Unknown URL attribute: %s' % attribute) #end def TrySetAttribute
AttributeError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/URL.TrySetAttribute
9,376
def Validate(self, base_url, allow_fragment): """ Verify the data in this URL is well-formed, and override if not. """ assert type(base_url) == types.StringType # Test (and normalize) the ref if not self.loc: output.Warn('Empty URL') return False if allow_fragment: self.loc = urlparse.urljoin(base_url, self.loc) if not self.loc.startswith(base_url): output.Warn('Discarded URL for not starting with the base_url: %s' % self.loc) self.loc = None return False # Test the lastmod if self.lastmod: if not self.VerifyDate(self.lastmod, "lastmod"): self.lastmod = None # Test the changefreq if self.changefreq: match = False self.changefreq = self.changefreq.lower() for pattern in CHANGEFREQ_PATTERNS: if self.changefreq == pattern: match = True break if not match: output.Warn('Changefreq "%s" is not a valid change frequency on URL ' ': %s' % (self.changefreq, self.loc)) self.changefreq = None # Test the priority if self.priority: priority = -1.0 try: priority = float(self.priority) except __HOLE__: pass if (priority < 0.0) or (priority > 1.0): output.Warn('Priority "%s" is not a number between 0 and 1 inclusive ' 'on URL: %s' % (self.priority, self.loc)) self.priority = None return True #end def Validate
ValueError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/URL.Validate
9,377
def ProduceURLs(self, consumer): """ Produces URLs from our data source, hands them in to the consumer. """ # Open the file (frame, file) = OpenFileForRead(self._path, 'URLLIST') if not file: return # Iterate lines linenum = 0 for line in file.readlines(): linenum = linenum + 1 # Strip comments and empty lines if self._encoding: line = encoder.WidenText(line, self._encoding) line = line.strip() if (not line) or line[0] == '#': continue # Split the line on space url = URL() cols = line.split(' ') for i in range(0,len(cols)): cols[i] = cols[i].strip() url.TrySetAttribute('loc', cols[0]) # Extract attributes from the other columns for i in range(1,len(cols)): if cols[i]: try: (attr_name, attr_val) = cols[i].split('=', 1) url.TrySetAttribute(attr_name, attr_val) except __HOLE__: output.Warn('Line %d: Unable to parse attribute: %s' % (linenum, cols[i])) # Pass it on consumer(url, False) file.close() if frame: frame.close() #end def ProduceURLs #end class InputURLList
ValueError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/InputURLList.ProduceURLs
9,378
def ProduceURLs(self, consumer): """ Produces URLs from our data source, hands them in to the consumer. """ # Open the file (frame, file) = OpenFileForRead(self._path, 'URLLIST') if not file: return # Iterate lines linenum = 0 for line in file.readlines(): linenum = linenum + 1 # Strip comments and empty lines if self._encoding: line = encoder.WidenText(line, self._encoding) line = line.strip() if (not line) or line[0] == '#': continue # Split the line on tabs url = NewsURL() cols = line.split('\t') for i in range(0,len(cols)): cols[i] = cols[i].strip() for i in range(0,len(cols)): if cols[i]: attr_value = cols[i] if i < len(self._tag_order): attr_name = self._tag_order[i] try: url.TrySetAttribute(attr_name, attr_value) except __HOLE__: output.Warn('Line %d: Unable to parse attribute: %s' % (linenum, cols[i])) # Pass it on consumer(url, False) file.close() if frame: frame.close() #end def ProduceURLs #end class InputNewsURLList
ValueError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/InputNewsURLList.ProduceURLs
9,379
def ProduceURLs(self, consumer): """ Produces URLs from our data source, hands them in to the consumer. """ if not self._path: return root_path = self._path root_URL = self._url root_file = self._default_file remove_empty_directories = self._remove_empty_directories def HasReadPermissions(path): """ Verifies a given path has read permissions. """ stat_info = os.stat(path) mode = stat_info[stat.ST_MODE] if mode & stat.S_IREAD: return True else: return None def PerFile(dirpath, name): """ Called once per file. Note that 'name' will occasionally be None -- for a directory itself """ # Pull a timestamp url = URL() isdir = False try: if name: path = os.path.join(dirpath, name) else: path = dirpath isdir = os.path.isdir(path) time = None if isdir and root_file: file = os.path.join(path, root_file) try: time = os.stat(file)[stat.ST_MTIME]; except OSError: pass if not time: time = os.stat(path)[stat.ST_MTIME]; url.lastmod = TimestampISO8601(time) except OSError: pass except ValueError: pass # Build a URL middle = dirpath[len(root_path):] if os.sep != '/': middle = middle.replace(os.sep, '/') if middle: middle = middle + '/' if name: middle = middle + name if isdir: middle = middle + '/' url.TrySetAttribute('loc', root_URL + encoder.WidenText(middle, None)) # Suppress default files. (All the way down here so we can log it.) if name and (root_file == name): url.Log(prefix='IGNORED (default file)', level=2) return # Suppress directories when remove_empty_directories="true" try: if isdir: if HasReadPermissions(path): if remove_empty_directories == 'true' and \ len(os.listdir(path)) == 0: output.Log('IGNORED empty directory %s' % str(path), level=1) return elif path == self._path: output.Error('IGNORED configuration file directory input %s due ' 'to file permissions' % self._path) else: output.Log('IGNORED files within directory %s due to file ' 'permissions' % str(path), level=0) except __HOLE__: pass except ValueError: pass consumer(url, False) #end def PerFile def PerDirectory(ignore, dirpath, namelist): """ Called once per directory with a list of all the contained files/dirs. """ ignore = ignore # Avoid warnings of an unused parameter if not dirpath.startswith(root_path): output.Warn('Unable to decide what the root path is for directory: ' '%s' % dirpath) return for name in namelist: PerFile(dirpath, name) #end def PerDirectory output.Log('Walking DIRECTORY "%s"' % self._path, 1) PerFile(self._path, None) os.path.walk(self._path, PerDirectory, None) #end def ProduceURLs #end class InputDirectory
OSError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/InputDirectory.ProduceURLs
9,380
def FlushSet(self): """ Flush the current set of URLs to the output. This is a little slow because we like to sort them all and normalize the priorities before dumping. """ # Determine what Sitemap header to use (News or General) if self._sitemap_type == 'news': sitemap_header = NEWS_SITEMAP_HEADER else: sitemap_header = GENERAL_SITEMAP_HEADER # Sort and normalize output.Log('Sorting and normalizing collected URLs.', 1) self._set.sort() for url in self._set: hash = url.MakeHash() dup = self._urls[hash] if dup > 0: self._urls[hash] = -1 if not url.priority: url.priority = '%.4f' % (float(dup) / float(self._dup_max)) # Get the filename we're going to write to filename = self._filegen.GeneratePath(self._sitemaps) if not filename: output.Fatal('Unexpected: Couldn\'t generate output filename.') self._sitemaps = self._sitemaps + 1 output.Log('Writing Sitemap file "%s" with %d URLs' % (filename, len(self._set)), 1) # Write to it frame = None file = None try: if self._filegen.is_gzip: basename = os.path.basename(filename); frame = open(filename, 'wb') file = gzip.GzipFile(fileobj=frame, filename=basename, mode='wt') else: file = open(filename, 'wt') file.write(sitemap_header) for url in self._set: url.WriteXML(file) file.write(SITEMAP_FOOTER) file.close() if frame: frame.close() frame = None file = None except __HOLE__: output.Fatal('Couldn\'t write out to file: %s' % filename) os.chmod(filename, 0644) # Flush self._set = [] #end def FlushSet
IOError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/Sitemap.FlushSet
9,381
def WriteIndex(self): """ Write the master index of all Sitemap files """ # Make a filename filename = self._filegen.GeneratePath(SITEINDEX_SUFFIX) if not filename: output.Fatal('Unexpected: Couldn\'t generate output index filename.') output.Log('Writing index file "%s" with %d Sitemaps' % (filename, self._sitemaps), 1) # Determine what Sitemap index header to use (News or General) if self._sitemap_type == 'news': sitemap_index_header = NEWS_SITEMAP_HEADER else: sitemap__index_header = GENERAL_SITEMAP_HEADER # Make a lastmod time lastmod = TimestampISO8601(time.time()) # Write to it try: fd = open(filename, 'wt') fd.write(sitemap_index_header) for mapnumber in range(0,self._sitemaps): # Write the entry mapurl = self._filegen.GenerateURL(mapnumber, self._base_url) mapattributes = { 'loc' : mapurl, 'lastmod' : lastmod } fd.write(SITEINDEX_ENTRY % mapattributes) fd.write(SITEINDEX_FOOTER) fd.close() fd = None except __HOLE__: output.Fatal('Couldn\'t write out to file: %s' % filename) os.chmod(filename, 0644) #end def WriteIndex
IOError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/Sitemap.WriteIndex
9,382
def NotifySearch(self): """ Send notification of the new Sitemap(s) to the search engines. """ if self._suppress: output.Log('Search engine notification is suppressed.', 1) return output.Log('Notifying search engines.', 1) # Override the urllib's opener class with one that doesn't ignore 404s class ExceptionURLopener(urllib.FancyURLopener): def http_error_default(self, url, fp, errcode, errmsg, headers): output.Log('HTTP error %d: %s' % (errcode, errmsg), 2) raise IOError #end def http_error_default #end class ExceptionURLOpener old_opener = urllib._urlopener urllib._urlopener = ExceptionURLopener() # Build the URL we want to send in if self._sitemaps > 1: url = self._filegen.GenerateURL(SITEINDEX_SUFFIX, self._base_url) else: url = self._filegen.GenerateURL(0, self._base_url) # Test if we can hit it ourselves try: u = urllib.urlopen(url) u.close() except IOError: output.Error('When attempting to access our generated Sitemap at the ' 'following URL:\n %s\n we failed to read it. Please ' 'verify the store_into path you specified in\n' ' your configuration file is web-accessable. Consult ' 'the FAQ for more\n information.' % url) output.Warn('Proceeding to notify with an unverifyable URL.') # Cycle through notifications # To understand this, see the comment near the NOTIFICATION_SITES comment for ping in NOTIFICATION_SITES: query_map = ping[3] query_attr = ping[5] query_map[query_attr] = url query = urllib.urlencode(query_map) notify = urlparse.urlunsplit((ping[0], ping[1], ping[2], query, ping[4])) # Send the notification output.Log('Notifying: %s' % ping[1], 0) output.Log('Notification URL: %s' % notify, 2) try: u = urllib.urlopen(notify) u.read() u.close() except __HOLE__: output.Warn('Cannot contact: %s' % ping[1]) if old_opener: urllib._urlopener = old_opener #end def NotifySearch
IOError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/Sitemap.NotifySearch
9,383
def OpenFileForRead(path, logtext): """ Opens a text file, be it GZip or plain """ frame = None file = None if not path: return (frame, file) try: if path.endswith('.gz'): frame = open(path, 'rb') file = gzip.GzipFile(fileobj=frame, mode='rt') else: file = open(path, 'rt') if logtext: output.Log('Opened %s file: %s' % (logtext, path), 1) else: output.Log('Opened file: %s' % path, 1) except __HOLE__: output.Error('Can not open file: %s' % path) return (frame, file) #end def OpenFileForRead
IOError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/OpenFileForRead
9,384
def CreateSitemapFromFile(configpath, suppress_notify): """ Sets up a new Sitemap object from the specified configuration file. """ # Remember error count on the way in num_errors = output.num_errors # Rev up SAX to parse the config sitemap = Sitemap(suppress_notify) try: output.Log('Reading configuration file: %s' % configpath, 0) xml.sax.parse(configpath, sitemap) except __HOLE__: output.Error('Cannot read configuration file: %s' % configpath) except xml.sax._exceptions.SAXParseException, e: output.Error('XML error in the config file (line %d, column %d): %s' % (e._linenum, e._colnum, e.getMessage())) except xml.sax._exceptions.SAXReaderNotAvailable: output.Error('Some installs of Python 2.2 did not include complete support' ' for XML.\n Please try upgrading your version of Python' ' and re-running the script.') # If we added any errors, return no sitemap if num_errors == output.num_errors: return sitemap return None #end def CreateSitemapFromFile
IOError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/CreateSitemapFromFile
9,385
def ProcessCommandFlags(args): """ Parse command line flags per specified usage, pick off key, value pairs All flags of type "--key=value" will be processed as __flags[key] = value, "--option" will be processed as __flags[option] = option """ flags = {} rkeyval = '--(?P<key>\S*)[=](?P<value>\S*)' # --key=val roption = '--(?P<option>\S*)' # --key r = '(' + rkeyval + ')|(' + roption + ')' rc = re.compile(r) for a in args: try: rcg = rc.search(a).groupdict() if rcg.has_key('key'): flags[rcg['key']] = rcg['value'] if rcg.has_key('option'): flags[rcg['option']] = rcg['option'] except __HOLE__: return None return flags #end def ProcessCommandFlags # # __main__ #
AttributeError
dataset/ETHPy150Open benoitc/restkit/doc/sitemap_gen.py/ProcessCommandFlags
9,386
def _generic_view(message_method, fail_signal, request, message_id, dispatch_id, hashed, redirect_to=None): if redirect_to is None: redirect_to = '/' try: dispatch = Dispatch.objects.select_related('message').get(pk=dispatch_id) if int(message_id) != dispatch.message_id: raise ValueError() message = dispatch.message except (Dispatch.DoesNotExist, __HOLE__): pass else: try: message_type = message.get_type() expected_hash = message_type.get_dispatch_hash(dispatch_id, message_id) method = getattr(message_type, message_method) return method( request, message, dispatch, hash_is_valid=(expected_hash == hashed), redirect_to=redirect_to ) except UnknownMessageTypeError: pass fail_signal.send(None, request=request, message=message_id, dispatch=dispatch_id) return redirect(redirect_to)
ValueError
dataset/ETHPy150Open idlesign/django-sitemessage/sitemessage/views.py/_generic_view
9,387
def do_batch(self, args): try: doParser = self.arg_batch() try: doArgs = doParser.parse_args(args.split()) except __HOLE__ as e: return with open(doArgs.file) as f: for line in f: try: self.run_commands_at_invocation([line]) except: printer.out("bad command '"+line+"'", printer.ERROR) print "\n" except IOError as e: printer.out("File error: "+str(e), printer.ERROR) return except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_batch()
SystemExit
dataset/ETHPy150Open usharesoft/hammr/src/hammr/hammr.py/Hammr.do_batch
9,388
@method_decorator(login_required) def dispatch(self, request, *args, **kwargs): # Only continue if logged in user has list permission if not self.permission_helper.user_can_list(request.user): raise PermissionDenied self.list_display = self.model_admin.get_list_display(request) self.list_filter = self.model_admin.get_list_filter(request) self.search_fields = self.model_admin.get_search_fields(request) self.items_per_page = self.model_admin.list_per_page self.select_related = self.model_admin.list_select_related # Get search parameters from the query string. try: self.page_num = int(request.GET.get(self.PAGE_VAR, 0)) except __HOLE__: self.page_num = 0 self.params = dict(request.GET.items()) if self.PAGE_VAR in self.params: del self.params[self.PAGE_VAR] if self.ERROR_FLAG in self.params: del self.params[self.ERROR_FLAG] self.query = request.GET.get(self.SEARCH_VAR, '') self.queryset = self.get_queryset(request) return super(IndexView, self).dispatch(request, *args, **kwargs)
ValueError
dataset/ETHPy150Open torchbox/wagtail/wagtail/contrib/modeladmin/views.py/IndexView.dispatch
9,389
def get_ordering(self, request, queryset): """ Returns the list of ordering fields for the change list. First we check the get_ordering() method in model admin, then we check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by ensuring the primary key is used as the last ordering field. """ params = self.params ordering = list(self.get_default_ordering(request)) if self.ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[self.ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it # reverse order if order_field has already "-" as prefix if order_field.startswith('-') and pfx == "-": ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, __HOLE__): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) # Ensure that the primary key is systematically present in the list of # ordering fields so we can guarantee a deterministic order across all # database backends. pk_name = self.opts.pk.name if not (set(ordering) & {'pk', '-pk', pk_name, '-' + pk_name}): # The two sets do not intersect, meaning the pk isn't present. So # we add it. ordering.append('-pk') return ordering
ValueError
dataset/ETHPy150Open torchbox/wagtail/wagtail/contrib/modeladmin/views.py/IndexView.get_ordering
9,390
def get_ordering_field_columns(self): """ Returns an OrderedDict of ordering field column numbers and asc/desc """ # We must cope with more than one column having the same underlying # sort field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = OrderedDict() if self.ORDER_VAR not in self.params: # for ordering specified on model_admin or model Meta, we don't # know the right column numbers absolutely, because there might be # morr than one column associated with that ordering, so we guess. for field in ordering: if field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[self.ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except __HOLE__: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields
ValueError
dataset/ETHPy150Open torchbox/wagtail/wagtail/contrib/modeladmin/views.py/IndexView.get_ordering_field_columns
9,391
def get_field_display_value(self, field_name, field=None): """ Return a display value for a field """ # First we check for a 'get_fieldname_display' property/method on # the model, and return the value of that, if present. val_funct = getattr(self.instance, 'get_%s_display' % field_name, None) if val_funct is not None: if callable(val_funct): return val_funct() return val_funct # If we have a real field, we can utilise that to try to display # something more useful if field is not None: try: field_type = field.get_internal_type() if ( field_type == 'ForeignKey' and field.related_model == get_image_model() ): # The field is an image return self.get_image_field_display(field_name, field) if ( field_type == 'ForeignKey' and field.related_model == get_document_model() ): # The field is a document return self.get_document_field_display(field_name, field) except __HOLE__: pass # Resort to getting the value of 'field_name' from the instance return getattr(self.instance, field_name, self.model_admin.get_empty_value_display(field_name))
AttributeError
dataset/ETHPy150Open torchbox/wagtail/wagtail/contrib/modeladmin/views.py/InspectView.get_field_display_value
9,392
def check_signature(self, args, kwargs): service_cls = self.container.service_cls fn = getattr(service_cls, self.method_name) try: service_instance = None # fn is unbound inspect.getcallargs(fn, service_instance, *args, **kwargs) except __HOLE__ as exc: raise IncorrectSignature(str(exc))
TypeError
dataset/ETHPy150Open onefinestay/nameko/nameko/extensions.py/Entrypoint.check_signature
9,393
def __GetArrayById(array_id): """ Get an array, by its ID. This (internal) wrapper around 'idaaip.netnode(array_id)' will ensure a certain safety around the retrieval of arrays (by catching quite unexpect[ed|able] exceptions, and making sure we don't create & use `transient' netnodes). @param array_id: A positive, valid array ID. """ try: node = idaapi.netnode(array_id) nodename = node.name() if nodename is None or not nodename.startswith(_IDC_ARRAY_PREFIX): return __dummy_netnode.instance else: return node except __HOLE__: return __dummy_netnode.instance
NotImplementedError
dataset/ETHPy150Open EiNSTeiN-/idapython/python/idc.py/__GetArrayById
9,394
def __init__(self): # Lazily load the libHLC library if self.hlc is None: try: hlc = CDLL(os.path.join(sys.prefix, 'lib', 'libHLC.so')) except __HOLE__: raise ImportError("libHLC.so cannot be found. Please install the libhlc " "package by: conda install -c numba libhlc") else: hlc.HLC_ParseModule.restype = moduleref_ptr hlc.HLC_ModuleEmitBRIG.restype = c_size_t hlc.HLC_Initialize() utils.finalize(hlc, hlc.HLC_Finalize) hlc.HLC_SetCommandLineOption.argtypes = [ c_int, c_void_p, ] type(self).hlc = hlc
OSError
dataset/ETHPy150Open numba/numba/numba/hsa/hlc/libhlc.py/HLC.__init__
9,395
def to_brig(self, mod, opt=3): bufptr = c_void_p(0) size = self.hlc.HLC_ModuleEmitBRIG(mod, int(opt), byref(bufptr)) if not size: raise Error("Failed to emit BRIG") buf = (c_byte * size).from_address(bufptr.value) try: buffer except __HOLE__: ret = bytes(buf) else: ret = bytes(buffer(buf)) self.hlc.HLC_DisposeString(buf) return ret
NameError
dataset/ETHPy150Open numba/numba/numba/hsa/hlc/libhlc.py/HLC.to_brig
9,396
def get_device_zone(self, port_id): # we have to key the device_zone_map based on the fragment of the port # UUID that shows up in the interface name. This is because the initial # map is populated strictly based on interface names that we don't know # the full UUID of. short_port_id = port_id[:(LINUX_DEV_LEN - LINUX_DEV_PREFIX_LEN)] try: return self._device_zone_map[short_port_id] except __HOLE__: return self._generate_device_zone(short_port_id)
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/agent/linux/iptables_firewall.py/IptablesFirewallDriver.get_device_zone
9,397
def fetch_test_data(): """ Fetches test data if it's missing. """ data_dir = os.path.join(SETUP_DIR, 'tests', 'data') # if tests/data exists, is empty, and .gitmodules exists, run the # appropriate git-submodule(1) command if (os.path.isdir(data_dir) and not os.listdir(data_dir) and os.path.isfile(os.path.join(SETUP_DIR, '.gitmodules'))): import subprocess subprocess.call(['git', 'submodule', 'update', '--init', '--recursive']) # if tests/data either doesn't exist or is empty, download the # busbus-test-data zip from GitHub and unpack it else: try: os.mkdir(data_dir) except __HOLE__ as exc: if exc.errno != errno.EEXIST: raise if not os.listdir(data_dir): # at this point requests and six are already installed import requests import six resp = requests.get('https://github.com/spaceboats/busbus-test-' 'data/archive/v{0}.zip'.format(VERSION)) with zipfile.ZipFile(six.BytesIO(resp.content)) as z: prefix = 'busbus-test-data-{0}/'.format(VERSION) for filename in z.namelist(): if filename == prefix or not filename.startswith(prefix): continue filename = filename[len(prefix):] if filename.endswith('/'): os.mkdir(os.path.join(data_dir, filename)) else: with open(os.path.join(data_dir, filename), 'wb') as f: f.write(z.read(prefix + filename))
OSError
dataset/ETHPy150Open spaceboats/busbus/setup.py/fetch_test_data
9,398
def test_errors(): a = linuxaudiodev.open("w") size = 8 fmt = linuxaudiodev.AFMT_U8 rate = 8000 nchannels = 1 try: a.setparameters(-1, size, nchannels, fmt) except __HOLE__, msg: print msg try: a.setparameters(rate, -2, nchannels, fmt) except ValueError, msg: print msg try: a.setparameters(rate, size, 3, fmt) except ValueError, msg: print msg try: a.setparameters(rate, size, nchannels, 177) except ValueError, msg: print msg try: a.setparameters(rate, size, nchannels, linuxaudiodev.AFMT_U16_LE) except ValueError, msg: print msg try: a.setparameters(rate, 16, nchannels, fmt) except ValueError, msg: print msg
ValueError
dataset/ETHPy150Open babble/babble/include/jython/Lib/test/test_linuxaudiodev.py/test_errors
9,399
def GetMap(self, r, kwargs): parms = GetMapMixin.Parameters.create(kwargs).cleaned_data kwargs = { i : j for i, j in kwargs.items() if i not in parms } item = self.adapter.get_cache_record(**parms) if item and not parms['fresh']: return HttpResponse(item, mimetype='image/'+parms['format']) if self.adapter.requires_time and 'time' not in parms: raise common.MissingParameterValue.at('time') if self.adapter.requires_elevation and 'elevation' not in parms: raise common.MissingParameterValue.at('elevation') if parms['format'].startswith('image/'): fmt = parms['format'][len('image/'):] else: fmt = parms['format'] if self.task: ret = self.task.delay(parms).get() else: fltr = None if parms['filter']: fltr = json.loads(parms['filter']) ds = self.adapter.get_2d_dataset( layers=parms['layers'], srs=parms['srs'], bbox=parms['bbox'], width=parms['width'], height=parms['height'], styles=parms['styles'], bgcolor=parms['bgcolor'], transparent=parms['transparent'], time=parms['time'], elevation=parms['elevation'], v=parms['v'], filter = fltr, format = fmt.encode('ascii'), **kwargs ) tmp = None ret = None # this codepath is officially confusing. Here's the deal. We have several different ways of returning # datasets that ga_wms can handle. # We can A: return a GDAL dataset. This will be written to a tempfile and passed to the requestor. # # B: return a filename. This is assumed to already be in the proper format. If it's not, you're going to # confuse a bunch of people # # C: return a numpy array in which case scipy is asked to handle it through "imsave" # # D: return a file or StringIO instance. This is also already assumed to be in the proper format # # All these cases are handled properly by the below code, HOWEVER, as it stands right now if you return # filenames, files, or StringIO isntances, we assume that you're caching them yourself. Otherwise why would # you have handed us a real file? if not isinstance(ds, gdal.Dataset): # then it == a Cairo imagesurface or numpy array, or at least... it'd BETTER be if HAVE_CAIRO and isinstance(ds,cairo.Surface): tmp = tempfile.NamedTemporaryFile(suffix='.png') ds.write_to_png(tmp.name) ds = gdal.Open(tmp.name) # TODO add all the appropriate metadata from the request into the dataset if this == being returned as a GeoTIFF elif isinstance(ds, tuple): ret = ds[1] elif isinstance(ds, basestring): try: ret = open(ds) except __HOLE__: if HAVE_SCIPY: tmp = tempfile.NamedTemporaryFile(suffix='.tif') scipy.misc.imsave(tmp.name, ds) ds = gdal.Open(tmp.name) # TODO add all the appropriate metadata from the request into the dataset if this == being returned as a GeoTIFF elif HAVE_SCIPY: print type(ds) print ds tmp = tempfile.NamedTemporaryFile(suffix='.tif') scipy.misc.imsave(tmp.name, ds) ds = gdal.Open(tmp.name) # TODO add all the appropriate metadata from the request into the dataset if this == being returned as a GeoTIFF if not ret: if fmt == 'tiff' or fmt == 'geotiff': driver = gdal.GetDriverByName('GTiff') elif fmt == 'jpg' or fmt == 'jpeg': driver = gdal.GetDriverByName('jpeg') elif fmt == 'jp2k' or fmt == 'jpeg2000': tmp = tempfile.NamedTemporaryFile(suffix='.jp2') driver = gdal.GetDriverByName('jpeg2000') else: driver = gdal.GetDriverByName(fmt.encode('ascii')) try: tmp = tempfile.NamedTemporaryFile(suffix='.' + fmt) ds2 = driver.CreateCopy(tmp.name, ds) del ds2 tmp.seek(0) ret = tmp.read() self.adapter.cache_result(ret, **parms) except Exception as ex: del tmp raise common.NoApplicableCode(str(ex)) resp = HttpResponse(ret, mimetype=fmt if '/' in fmt else 'image/'+fmt) return resp
IOError
dataset/ETHPy150Open hydroshare/hydroshare2/ga_ows/views/wms/base.py/GetMapMixin.GetMap