Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
1,100
def to_signum(signum): """Resolves the signal number from arbitrary signal representation. Supported formats: 10 - plain integers '10' - integers as a strings 'KILL' - signal names 'SIGKILL' - signal names with SIG prefix 'SIGRTMIN+1' - signal names with offsets """ if isinstance(signum, int): return signum m = re.match(r'(\w+)(\+(\d+))?', signum) if m: name = m.group(1).upper() if not name.startswith('SIG'): name = 'SIG' + name offset = int(m.group(3)) if m.group(3) else 0 try: return getattr(signal, name) + offset except __HOLE__: pass raise ValueError('signal invalid: {}'.format(signum))
KeyError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/to_signum
1,101
def to_uid(name): # NOQA """Return an uid, given a user name. If the name is an integer, make sure it's an existing uid. If the user name is unknown, raises a ValueError. """ try: name = int(name) except __HOLE__: pass if isinstance(name, int): try: pwd.getpwuid(name) return name except KeyError: raise ValueError("%r isn't a valid user id" % name) if not isinstance(name, string_types): raise TypeError(name) try: return pwd.getpwnam(name).pw_uid except KeyError: raise ValueError("%r isn't a valid user name" % name)
ValueError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/to_uid
1,102
def to_gid(name): # NOQA """Return a gid, given a group name If the group name is unknown, raises a ValueError. """ try: name = int(name) except __HOLE__: pass if isinstance(name, int): try: grp.getgrgid(name) return name # getgrid may raises overflow error on mac/os x, # fixed in python2.7.5 # see http://bugs.python.org/issue17531 except (KeyError, OverflowError): raise ValueError("No such group: %r" % name) if not isinstance(name, string_types): raise TypeError(name) try: return grp.getgrnam(name).gr_gid except KeyError: raise ValueError("No such group: %r" % name)
ValueError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/to_gid
1,103
def resolve_name(import_name, silent=False, reload=False): """Imports an object based on a string. This is useful if you want to use import paths as endpoints or something similar. An import path can be specified either in dotted notation (``xml.sax.saxutils.escape``) or with a colon as object delimiter (``xml.sax.saxutils:escape``). If `silent` is True the return value will be `None` if the import fails. :param import_name: the dotted name for the object to import. :param silent: if set to `True` import errors are ignored and `None` is returned instead. :param reload: if set to `True` modules that are already loaded will be reloaded :return: imported object """ # force the import name to automatically convert to strings import_name = bytestring(import_name) try: if ':' in import_name: module, obj = import_name.split(':', 1) elif '.' in import_name and import_name not in sys.modules: module, obj = import_name.rsplit('.', 1) else: module, obj = import_name, None # __import__ is not able to handle unicode strings in the fromlist mod = None # if the module is a package if reload and module in sys.modules: try: importlib.invalidate_caches() except Exception: pass try: mod = reload_module(sys.modules[module]) except Exception: pass if not mod: if not obj: return __import__(module) try: mod = __import__(module, None, None, [obj]) except ImportError: if ':' in import_name: raise return __import__(import_name) if not obj: return mod try: return getattr(mod, obj) except __HOLE__: # support importing modules not yet set up by the parent module # (or package for that matter) if ':' in import_name: raise return __import__(import_name) except ImportError as e: if not silent: raise_with_tb(ImportStringError(import_name, e))
AttributeError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/resolve_name
1,104
def _read(self, fp, fpname): cursect = None # None, or a dictionary optname = None lineno = 0 e = None # None, or an exception while True: line = fp.readline() if not line: break lineno += 1 # comment or blank line? if line.strip() == '' or line[0] in '#;': continue if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR": # no leading whitespace continue # continuation line? if line[0].isspace() and cursect is not None and optname: value = line.strip() if value: cursect[optname].append(value) # a section header or option header? else: # is it a section header? mo = self.SECTCRE.match(line) if mo: sectname = mo.group('header') if sectname in self._sections: # we're extending/overriding, we're good cursect = self._sections[sectname] elif sectname == DEFAULTSECT: cursect = self._defaults else: cursect = self._dict() cursect['__name__'] = sectname self._sections[sectname] = cursect # So sections can't start with a continuation line optname = None # no section header in the file? elif cursect is None: raise MissingSectionHeaderError(fpname, lineno, line) # an option line? else: try: mo = self._optcre.match(line) # 2.7 except __HOLE__: mo = self.OPTCRE.match(line) # 2.6 if mo: optname, vi, optval = mo.group('option', 'vi', 'value') self.optionxform = text_type optname = self.optionxform(optname.rstrip()) # We don't want to override. if optname in cursect: continue # This check is fine because the OPTCRE cannot # match if it would set optval to None if optval is not None: if vi in ('=', ':') and ';' in optval: # ';' is a comment delimiter only if it follows # a spacing character pos = optval.find(';') if pos != -1 and optval[pos - 1].isspace(): optval = optval[:pos] optval = optval.strip() # allow empty values if optval == '""': optval = '' cursect[optname] = [optval] else: # valueless option handling cursect[optname] = optval else: # a non-fatal parsing error occurred. set up the # exception but keep going. the exception will be # raised at the end of the file and will contain a # list of all bogus lines if not e: e = ParsingError(fpname) e.append(lineno, repr(line)) # if any parsing errors occurred, raise an exception if e: raise e # join the multi-line values collected while reading all_sections = [self._defaults] all_sections.extend(self._sections.values()) for options in all_sections: for name, val in options.items(): if isinstance(val, list): options[name] = '\n'.join(val)
AttributeError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/StrictConfigParser._read
1,105
def get_connection(socket, endpoint, ssh_server=None, ssh_keyfile=None): if ssh_server is None: socket.connect(endpoint) else: try: try: ssh.tunnel_connection(socket, endpoint, ssh_server, keyfile=ssh_keyfile) except ImportError: ssh.tunnel_connection(socket, endpoint, ssh_server, keyfile=ssh_keyfile, paramiko=True) except __HOLE__: raise ImportError("pexpect was not found, and failed to use " "Paramiko. You need to install Paramiko")
ImportError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/get_connection
1,106
def load_virtualenv(watcher, py_ver=None): if not watcher.copy_env: raise ValueError('copy_env must be True to to use virtualenv') if not py_ver: py_ver = sys.version.split()[0][:3] # XXX Posix scheme - need to add others sitedir = os.path.join(watcher.virtualenv, 'lib', 'python' + py_ver, 'site-packages') if not os.path.exists(sitedir): raise ValueError("%s does not exist" % sitedir) bindir = os.path.join(watcher.virtualenv, 'bin') if os.path.exists(bindir): watcher.env['PATH'] = ':'.join([bindir, watcher.env.get('PATH', '')]) def process_pth(sitedir, name): packages = set() fullname = os.path.join(sitedir, name) try: f = open(fullname, "rU") except IOError: return with f: for line in f.readlines(): if line.startswith(("#", "import")): continue line = line.rstrip() pkg_path = os.path.abspath(os.path.join(sitedir, line)) if os.path.exists(pkg_path): packages.add(pkg_path) return packages venv_pkgs = set() dotpth = os.extsep + "pth" for name in os.listdir(sitedir): if name.endswith(dotpth): try: packages = process_pth(sitedir, name) if packages: venv_pkgs |= packages except __HOLE__: continue py_path = watcher.env.get('PYTHONPATH') path = None if venv_pkgs: venv_path = os.pathsep.join(venv_pkgs) if py_path: path = os.pathsep.join([venv_path, py_path]) else: path = venv_path # Add watcher virtualenv site-packages dir to the python path if path and sitedir not in path.split(os.pathsep): path = os.pathsep.join([path, sitedir]) else: if py_path: path = os.pathsep.join([py_path, sitedir]) else: path = sitedir watcher.env['PYTHONPATH'] = path
OSError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/load_virtualenv
1,107
def create_udp_socket(mcast_addr, mcast_port): """Create an udp multicast socket for circusd cluster auto-discovery. mcast_addr must be between 224.0.0.0 and 239.255.255.255 """ try: ip_splitted = list(map(int, mcast_addr.split('.'))) mcast_port = int(mcast_port) except __HOLE__: raise ValueError('Wrong UDP multicast_endpoint configuration. Should ' 'looks like: "%r"' % DEFAULT_ENDPOINT_MULTICAST) if ip_splitted[0] < 224 or ip_splitted[0] > 239: raise ValueError('The multicast address is not valid should be ' 'between 224.0.0.0 and 239.255.255.255') any_addr = "0.0.0.0" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) # Allow reutilization of addr sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Some platform exposes SO_REUSEPORT if hasattr(socket, 'SO_REUSEPORT'): try: sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) except socket.error: # see #699 pass # Put packet ttl to max # The following ttl fix is to make this work on SunOS and BSD systems. # Ref : Issue #875 ttl = struct.pack('B', 255) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) # Register socket to multicast group sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(mcast_addr) + socket.inet_aton(any_addr)) # And finally bind all interfaces sock.bind((any_addr, mcast_port)) return sock # taken from http://stackoverflow.com/questions/1165352
ValueError
dataset/ETHPy150Open circus-tent/circus/circus/util.py/create_udp_socket
1,108
def foo(): try: test() except __HOLE__: raise RuntimeError("Accessing a undefined name should raise a NameError")
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/except_in_raising_code.py/foo
1,109
def FilterGenerator(): class PrintPmPerceptualError(PrintFilter): def __init__(self): super(PrintPmPerceptualError, self).__init__('print_pm_perceptual_error', 'Prints perceptual error at different levels of a progressive mesh compared to full resolution') self.arguments.append(FileArgument("pm_file", "Path of the progressive mesh file. Specify NONE if no pm file.")) self.arguments.append(FileArgument("mipmap_tar_file", "Path of the tar file with mipmap levels in it")) def apply(self, mesh, pm_filename, mipmap_tarfilename): try: pm_filebuf = open(pm_filename, 'r') if pm_filename != 'NONE' else None except IOError, ex: raise FilterException("Error opening pm file: %s" % str(ex)) try: mipmap_tarfilebuf = open(mipmap_tarfilename, 'rb') except __HOLE__, ex: raise FilterException("Error opening mipmap tar: %s" % str(ex)) perceptualdiff = which('perceptualdiff') if perceptualdiff is None: raise FilterException("perceptualdiff exectuable not found on path") printPmPerceptualError(mesh, pm_filebuf, mipmap_tarfilebuf) return mesh return PrintPmPerceptualError()
IOError
dataset/ETHPy150Open pycollada/meshtool/meshtool/filters/print_filters/print_pm_perceptual_error.py/FilterGenerator
1,110
def daemonize(self): """ do the UNIX double-fork magic, see Stevens' "Advanced Programming in the UNIX Environment" for details (ISBN 0201563177) http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 """ try: pid = os.fork() if pid > 0: # exit first parent sys.exit(0) except __HOLE__ as e: sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror)) sys.exit(1) # decouple from parent environment os.chdir("/") os.setsid() os.umask(0) # do second fork try: pid = os.fork() if pid > 0: # exit from second parent sys.exit(0) except OSError as e: sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror)) sys.exit(1) # redirect standard file descriptors sys.stdout.flush() sys.stderr.flush() si = file(self.stdin, 'r') so = file(self.stdout, 'a+') se = file(self.stderr, 'a+', 0) os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) # write pidfile atexit.register(self.delpid) pid = str(os.getpid()) file(self.pidfile, 'w+').write("%s\n" % pid)
OSError
dataset/ETHPy150Open CacheBrowser/cachebrowser/cachebrowser/daemon.py/Daemon.daemonize
1,111
def start(self): """ Start the daemon """ # Check for a pidfile to see if the daemon already runs try: pf = file(self.pidfile, 'r') pid = int(pf.read().strip()) pf.close() except __HOLE__: pid = None if pid: message = "pidfile %s already exist. Daemon already running?\n" sys.stderr.write(message % self.pidfile) sys.exit(1) # Start the daemon self.daemonize() self.run()
IOError
dataset/ETHPy150Open CacheBrowser/cachebrowser/cachebrowser/daemon.py/Daemon.start
1,112
def stop(self): """ Stop the daemon """ # Get the pid from the pidfile try: pf = file(self.pidfile, 'r') pid = int(pf.read().strip()) pf.close() except IOError: pid = None if not pid: message = "pidfile %s does not exist. Daemon not running?\n" sys.stderr.write(message % self.pidfile) return # not an error in a restart # Try killing the daemon process try: while 1: os.kill(pid, SIGTERM) time.sleep(0.1) except __HOLE__ as err: err = str(err) if err.find("No such process") > 0: if os.path.exists(self.pidfile): os.remove(self.pidfile) else: print(str(err)) sys.exit(1)
OSError
dataset/ETHPy150Open CacheBrowser/cachebrowser/cachebrowser/daemon.py/Daemon.stop
1,113
def main(): try: f = open('.git', 'r') subgitdir = f.read() f.close() except (__HOLE__, OSError): raise SystemExit(1) subgitdir = subgitdir.replace('gitdir: ', '').strip() p = subprocess.Popen(['git','update-server-info'], cwd=subgitdir) p.communicate()
IOError
dataset/ETHPy150Open trebuchet-deploy/trigger/trigger/utils/submodule_update.py/main
1,114
def get_page_count(self): super(Python, self).get_page_count() page_count = 1 if self.mime_type == 'application/pdf' or self.soffice_file: # If file is a PDF open it with slate to determine the page count if self.soffice_file: file_object = IteratorIO(self.soffice_file).file_buffer else: file_object = self.file_object try: page_count = len(list(PDFPage.get_pages(file_object))) except Exception as exception: error_message = _( 'Exception determining PDF page count; %s' ) % exception logger.error(error_message) raise PageCountError(error_message) else: logger.debug('Document contains %d pages', page_count) return page_count finally: file_object.seek(0) else: try: image = Image.open(self.file_object) except __HOLE__ as exception: error_message = _( 'Exception determining PDF page count; %s' ) % exception logger.error(error_message) raise PageCountError(error_message) finally: self.file_object.seek(0) try: while True: image.seek(image.tell() + 1) page_count += 1 except EOFError: # end of sequence pass return page_count
IOError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/converter/backends/python.py/Python.get_page_count
1,115
def _izip_longest(*args, **kwds): """Taken from Python docs http://docs.python.org/library/itertools.html#itertools.izip """ fillvalue = kwds.get('fillvalue') def sentinel(counter = ([fillvalue]*(len(args)-1)).pop): yield counter() # yields the fillvalue, or raises IndexError fillers = itertools.repeat(fillvalue) iters = [itertools.chain(it, sentinel(), fillers) for it in args] try: for tup in itertools.izip(*iters): yield tup except __HOLE__: pass
IndexError
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/linode.py/_izip_longest
1,116
def _get_plugins_module(self, package_name): """ Import 'plugins.py' from the package with the given name. If the package does not exist, or does not contain 'plugins.py' then return None. """ try: module = __import__(package_name + '.plugins', fromlist=['plugins']) except __HOLE__: module = None return module # smell: Looooong and ugly!
ImportError
dataset/ETHPy150Open enthought/envisage/envisage/package_plugin_manager.py/PackagePluginManager._get_plugins_module
1,117
def test_no_init_kwargs(self): """ Test that a view can't be accidentally instantiated before deployment """ try: view = SimpleView(key='value').as_view() self.fail('Should not be able to instantiate a view') except __HOLE__: pass
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/tests/regressiontests/generic_views/base.py/ViewTest.test_no_init_kwargs
1,118
def test_no_init_args(self): """ Test that a view can't be accidentally instantiated before deployment """ try: view = SimpleView.as_view('value') self.fail('Should not be able to use non-keyword arguments instantiating a view') except __HOLE__: pass
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/tests/regressiontests/generic_views/base.py/ViewTest.test_no_init_args
1,119
def get_next_sibling(self): """ Returns this model instance's next sibling in the tree, or ``None`` if it doesn't have a next sibling. """ opts = self._meta if self.is_root_node(): filters = { '%s__isnull' % opts.parent_attr: True, '%s__gt' % opts.tree_id_attr: getattr(self, opts.tree_id_attr), } else: filters = { opts.parent_attr: getattr(self, '%s_id' % opts.parent_attr), '%s__gt' % opts.left_attr: getattr(self, opts.right_attr), } sibling = None try: sibling = self._tree_manager.filter(**filters)[0] except __HOLE__: pass return sibling
IndexError
dataset/ETHPy150Open agiliq/django-socialnews/socialnews/mptt/models.py/get_next_sibling
1,120
def get_previous_sibling(self): """ Returns this model instance's previous sibling in the tree, or ``None`` if it doesn't have a previous sibling. """ opts = self._meta if self.is_root_node(): filters = { '%s__isnull' % opts.parent_attr: True, '%s__lt' % opts.tree_id_attr: getattr(self, opts.tree_id_attr), } order_by = '-%s' % opts.tree_id_attr else: filters = { opts.parent_attr: getattr(self, '%s_id' % opts.parent_attr), '%s__lt' % opts.right_attr: getattr(self, opts.left_attr), } order_by = '-%s' % opts.right_attr sibling = None try: sibling = self._tree_manager.filter(**filters).order_by(order_by)[0] except __HOLE__: pass return sibling
IndexError
dataset/ETHPy150Open agiliq/django-socialnews/socialnews/mptt/models.py/get_previous_sibling
1,121
def has_changed(self, initial, data): # noqa if initial is None: initial = ['' for x in range(0, len(data))] else: if not isinstance(initial, list): initial = self.widget.decompress(initial) amount_field, currency_field = self.fields amount_initial, currency_initial = initial # We treat the amount and currency fields slightly # differently: if the amount has changed, then we definitely # consider the money value to have changed. If the currency # has changed, but the amount is *empty* then we do not # consider the money value to have changed. This means that it # plays nicely with empty formrows in formsets. try: amount_data = data[0] except __HOLE__: amount_data = None try: amount_initial = amount_field.to_python(amount_initial) except ValidationError: return True if amount_field.has_changed(amount_initial, amount_data): return True try: currency_data = data[1] except IndexError: currency_data = None try: currency_initial = currency_field.to_python(currency_initial) except ValidationError: return True # If the currency is valid, has changed and there is some # amount data, then the money value has changed. if currency_field.has_changed(currency_initial, currency_data) and amount_data: return True return False
IndexError
dataset/ETHPy150Open django-money/django-money/djmoney/forms/fields.py/MoneyField.has_changed
1,122
def test_ndarray_compat_properties(self): for o in self.objs: # check that we work for p in ['shape', 'dtype', 'flags', 'T', 'strides', 'itemsize', 'nbytes']: self.assertIsNotNone(getattr(o, p, None)) self.assertTrue(hasattr(o, 'base')) # if we have a datetimelike dtype then needs a view to work # but the user is responsible for that try: self.assertIsNotNone(o.data) except __HOLE__: pass self.assertRaises(ValueError, o.item) # len > 1 self.assertEqual(o.ndim, 1) self.assertEqual(o.size, len(o)) self.assertEqual(Index([1]).item(), 1) self.assertEqual(Series([1]).item(), 1)
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_base.py/TestIndexOps.test_ndarray_compat_properties
1,123
def test_ops(self): for op in ['max', 'min']: for o in self.objs: result = getattr(o, op)() if not isinstance(o, PeriodIndex): expected = getattr(o.values, op)() else: expected = pd.Period(ordinal=getattr(o.values, op)(), freq=o.freq) try: self.assertEqual(result, expected) except __HOLE__: # comparing tz-aware series with np.array results in # TypeError expected = expected.astype('M8[ns]').astype('int64') self.assertEqual(result.value, expected)
TypeError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_base.py/TestIndexOps.test_ops
1,124
@staticmethod def parse_rpm_output(output, tags, separator=';'): """ Parse output of the rpm query. :param output: list, decoded output (str) from the rpm subprocess :param tags: list, str fields used for query output :return: list, dicts describing each rpm package """ def field(tag): """ Get a field value by name """ try: value = fields[tags.index(tag)] except __HOLE__: return None if value == '(none)': return None return value components = [] sigmarker = 'Key ID ' for rpm in output: fields = rpm.rstrip('\n').split(separator) if len(fields) < len(tags): continue signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig') if signature: parts = signature.split(sigmarker, 1) if len(parts) > 1: signature = parts[1] component_rpm = { 'type': 'rpm', 'name': field('NAME'), 'version': field('VERSION'), 'release': field('RELEASE'), 'arch': field('ARCH'), 'sigmd5': field('SIGMD5'), 'signature': signature, } # Special handling for epoch as it must be an integer or None epoch = field('EPOCH') if epoch is not None: epoch = int(epoch) component_rpm['epoch'] = epoch if component_rpm['name'] != 'gpg-pubkey': components.append(component_rpm) return components
ValueError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.parse_rpm_output
1,125
def get_rpms(self): """ Build a list of installed RPMs in the format required for the metadata. """ tags = [ 'NAME', 'VERSION', 'RELEASE', 'ARCH', 'EPOCH', 'SIGMD5', 'SIGPGP:pgpsig', 'SIGGPG:pgpsig', ] sep = ';' fmt = sep.join(["%%{%s}" % tag for tag in tags]) cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt) try: # py3 (status, output) = subprocess.getstatusoutput(cmd) except __HOLE__: # py2 with open('/dev/null', 'r+') as devnull: p = subprocess.Popen(cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull) (stdout, stderr) = p.communicate() status = p.wait() output = stdout.decode() if status != 0: self.log.debug("%s: stderr output: %s", cmd, stderr) raise RuntimeError("%s: exit code %s" % (cmd, status)) return self.parse_rpm_output(output.splitlines(), tags, separator=sep)
AttributeError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.get_rpms
1,126
def get_builder_image_id(self): """ Find out the docker ID of the buildroot image we are in. """ try: buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"] except __HOLE__: return '' try: pod = self.osbs.get_pod_for_build(self.build_id) all_images = pod.get_container_image_ids() except OsbsException as ex: self.log.error("unable to find image id: %r", ex) return buildroot_tag try: return all_images[buildroot_tag] except KeyError: self.log.error("Unable to determine buildroot image ID for %s", buildroot_tag) return buildroot_tag
KeyError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.get_builder_image_id
1,127
def get_image_components(self): """ Re-package the output of the rpmqa plugin into the format required for the metadata. """ try: output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key] except __HOLE__: self.log.error("%s plugin did not run!", PostBuildRPMqaPlugin.key) return [] return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags, separator=',')
KeyError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.get_image_components
1,128
def get_build(self, metadata): build_start_time = metadata["creationTimestamp"] try: # Decode UTC RFC3339 date with no fractional seconds # (the format we expect) start_time_struct = time.strptime(build_start_time, '%Y-%m-%dT%H:%M:%SZ') start_time = int(time.mktime(start_time_struct)) except __HOLE__: self.log.error("Invalid time format (%s)", build_start_time) raise name = self.nvr_image.repo version, release = self.nvr_image.tag.split('-', 1) source = self.workflow.source if not isinstance(source, GitSource): raise RuntimeError('git source required') build = { 'name': name, 'version': version, 'release': release, 'source': "{0}#{1}".format(source.uri, source.commit_id), 'start_time': start_time, 'end_time': int(time.time()), 'extra': { 'image': {}, }, } if self.metadata_only: build['metadata_only'] = True return build
ValueError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.get_build
1,129
def get_metadata(self): """ Build the metadata needed for importing the build :return: tuple, the metadata and the list of Output instances """ try: metadata = get_build_json()["metadata"] self.build_id = metadata["name"] except __HOLE__: self.log.error("No build metadata") raise for image in self.workflow.tag_conf.primary_images: # dash at first/last postition does not count if '-' in image.tag[1:-1]: self.nvr_image = image break else: raise RuntimeError('Unable to determine name-version-release') metadata_version = 0 build = self.get_build(metadata) buildroot = self.get_buildroot(build_id=self.build_id) output_files = self.get_output(buildroot['id']) koji_metadata = { 'metadata_version': metadata_version, 'build': build, 'buildroots': [buildroot], 'output': [output.metadata for output in output_files], } return koji_metadata, output_files
KeyError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_koji_promote.py/KojiPromotePlugin.get_metadata
1,130
def present(name, **kwargs): ''' Ensure that an account is present and properly configured name The email address associated with the Stormpath account directory_id The ID of a directory which the account belongs to. Required. password Required when creating a new account. If specified, it is advisable to reference the password in another database using an ``sdb://`` URL. Will NOT update the password if an account already exists. givenName Required when creating a new account. surname Required when creating a new account. username Optional. Must be unique across the owning directory. If not specified, the username will default to the email field. middleName Optional. status ``enabled`` accounts are able to login to their assigned applications, ``disabled`` accounts may not login to applications, ``unverified`` accounts are disabled and have not verified their email address. customData. Optional. Must be specified as a dict. ''' # Because __opts__ is not available outside of functions if __opts__.get('requests_lib', False): from requests.exceptions import HTTPError else: from urllib2 import HTTPError ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} info = {} try: result = __salt__['stormpath.show_account'](email=name, **kwargs) if len(result['items']) > 0: info = result['items'][0] except __HOLE__: pass needs_update = {} if info.get('email', False): for field in kwargs.keys(): if info.get(field, None) != kwargs[field]: needs_update[field] = kwargs[field] del needs_update['directory_id'] if 'password' in needs_update: del needs_update['password'] if len(needs_update.keys()) < 1: ret['result'] = True ret['comment'] = 'Stormpath account {0} already exists and is correct'.format(name) return ret if __opts__['test']: if len(needs_update.keys()) < 1: ret['comment'] = 'Stormpath account {0} needs to be created'.format(name) else: if 'password' in needs_update: needs_update['password'] = '**HIDDEN**' ret['comment'] = ('Stormpath account {0} needs the following ' 'fields to be updated: '.format(', '.join(needs_update))) return ret if len(needs_update.keys()) < 1: info = __salt__['stormpath.create_account'](email=name, **kwargs) comps = info['href'].split('/') account_id = comps[-1] ret['changes'] = info ret['result'] = True kwargs['password'] = '**HIDDEN**' ret['comment'] = 'Created account ID {0} ({1}): {2}'.format( account_id, name, pprint.pformat(kwargs)) return ret comps = info['href'].split('/') account_id = comps[-1] result = __salt__['stormpath.update_account'](account_id, items=needs_update) if result.get('href', None): ret['changes'] = needs_update ret['result'] = True if 'password' in needs_update: needs_update['password'] = '**HIDDEN**' ret['comment'] = 'Set the following fields for account ID {0} ({1}): {2}'.format( account_id, name, pprint.pformat(needs_update)) return ret else: ret['result'] = False ret['comment'] = 'Failed to set the following fields for account ID {0} ({1}): {2}'.format( account_id, name, pprint.pformat(needs_update)) return ret
HTTPError
dataset/ETHPy150Open saltstack/salt/salt/states/stormpath_account.py/present
1,131
def absent(name, directory_id=None): ''' Ensure that an account associated with the given email address is absent. Will search all directories for the account, unless a directory_id is specified. name The email address of the account to delete. directory_id Optional. The ID of the directory that the account is expected to belong to. If not specified, then a list of directories will be retrieved, and each will be scanned for the account. Specifying a directory_id will therefore cut down on the number of requests to Stormpath, and increase performance of this state. ''' # Because __opts__ is not available outside of functions if __opts__.get('requests_lib', False): from requests.exceptions import HTTPError else: from urllib2 import HTTPError ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} info = {} if directory_id is None: dirs = __salt__['stormpath.list_directories']() for dir_ in dirs.get('items', []): try: comps = dir_.get('href', '').split('/') directory_id = comps[-1] info = __salt__['stormpath.show_account'](email=name, directory_id=directory_id) if len(info.get('items', [])) > 0: info = info['items'][0] break except __HOLE__: pass else: info = __salt__['stormpath.show_account'](email=name, directory_id=directory_id) info = info['items'][0] if 'items' in info: ret['result'] = True ret['comment'] = 'Stormpath account {0} already absent'.format(name) return ret if __opts__['test']: ret['comment'] = 'Stormpath account {0} needs to be deleted'.format(name) return ret comps = info['href'].split('/') account_id = comps[-1] if __salt__['stormpath.delete_account'](account_id): ret['changes'] = {'deleted': account_id} ret['result'] = True ret['comment'] = 'Stormpath account {0} was deleted'.format(name) return ret else: ret['result'] = False ret['comment'] = 'Failed to delete Stormpath account {0}'.format(name) return ret
HTTPError
dataset/ETHPy150Open saltstack/salt/salt/states/stormpath_account.py/absent
1,132
def save(self): """ Attempts to save the current file, prompting for a path if necessary. Returns whether the file was saved. """ editor = self.window.central_pane.editor try: editor.save() except __HOLE__: # If you are trying to save to a file that doesn't exist, open up a # FileDialog with a 'save as' action. dialog = FileDialog(parent=self.window.control, action='save as', wildcard='*.py') if dialog.open() == OK: editor.save(dialog.path) else: return False return True ########################################################################### # Protected interface. ###########################################################################
IOError
dataset/ETHPy150Open enthought/pyface/examples/tasks/basic/example_task.py/ExampleTask.save
1,133
def test_failstub(): failstub = StubCallable(throw=NotImplementedError('foo')) try: failstub('sanity') assert False except __HOLE__: pass assert 1 == len(failstub.calls) assert "(('sanity',), {})" == str(failstub.calls[0])
NotImplementedError
dataset/ETHPy150Open probcomp/bayeslite/tests/test_loggers.py/test_failstub
1,134
def test_logged_query_fail(): # If the query itself fails, we should not impede that floating to top. failstub = StubCallable(throw=NotImplementedError('foo')) okstub = StubCallable() lgr = loggers.CallHomeStatusLogger(post=okstub) try: with loggers.logged_query(logger=lgr, **THE_USUAL): failstub('die') assert False except __HOLE__: pass time.sleep(0.2) # To let the call-home thread run, so this is less flaky. assert 1 == len(okstub.calls) check_logcall(okstub.calls[0]) assert 1 == len(failstub.calls) assert "[(('die',), {})]" == str(failstub.calls)
NotImplementedError
dataset/ETHPy150Open probcomp/bayeslite/tests/test_loggers.py/test_logged_query_fail
1,135
def refresh_file_mapping(self): ''' Override the default refresh_file_mapping to look for nova files recursively, rather than only in a top-level directory ''' # map of suffix to description for imp self.suffix_map = {} suffix_order = [] # local list to determine precedence of extensions suffix_order.append('.yaml') for (suffix, mode, kind) in imp.get_suffixes(): self.suffix_map[suffix] = (suffix, mode, kind) suffix_order.append(suffix) # create mapping of filename (without suffix) to (path, suffix) self.file_mapping = {} for mod_dir in self.module_dirs: for dirname, dirs, files in os.walk(mod_dir): if '.git' in dirs: dirs.remove('.git') for filename in files: try: if filename.startswith('_'): # skip private modules # log messages omitted for obviousness continue _, ext = os.path.splitext(filename) fpath = os.path.join(dirname, filename) f_withext = fpath.partition(mod_dir)[-1] # Nova only supports .py and .yaml if ext not in ['.py', '.yaml']: continue if f_withext in self.disabled: log.trace( 'Skipping {0}, it is disabled by configuration'.format( filename ) ) continue # if we don't have it, we want it elif f_withext not in self.file_mapping: self.file_mapping[f_withext] = (fpath, ext) # if we do, we want it if we have a higher precidence ext else: curr_ext = self.file_mapping[f_withext][1] #log.debug("****** curr_ext={0} ext={1} suffix_order={2}".format(curr_ext, ext, suffix_order)) if curr_ext and suffix_order.index(ext) < suffix_order.index(curr_ext): self.file_mapping[f_withext] = (fpath, ext) except __HOLE__: continue
OSError
dataset/ETHPy150Open HubbleStack/Nova/_modules/hubble.py/NovaLazyLoader.refresh_file_mapping
1,136
def _load_module(self, name): ''' Override the module load code ''' mod = None fpath, suffix = self.file_mapping[name] self.loaded_files.add(name) if suffix == '.yaml': try: with open(fpath) as fh_: data = yaml.safe_load(fh_) except Exception as exc: self.__missing_data__[name] = str(exc) return False self.__data__[name] = data return True try: sys.path.append(os.path.dirname(fpath)) desc = self.suffix_map[suffix] # if it is a directory, we don't open a file with salt.utils.fopen(fpath, desc[1]) as fn_: mod = imp.load_module( '{0}.{1}.{2}.{3}'.format( self.loaded_base_name, self.mod_type_check(fpath), self.tag, name ), fn_, fpath, desc) except IOError: raise except ImportError as error: log.debug( 'Failed to import {0} {1}:\n'.format( self.tag, name ), exc_info=True ) self.missing_modules[name] = str(error) return False except Exception as error: log.error( 'Failed to import {0} {1}, this is due most likely to a ' 'syntax error:\n'.format( self.tag, name ), exc_info=True ) self.missing_modules[name] = str(error) return False except SystemExit as error: log.error( 'Failed to import {0} {1} as the module called exit()\n'.format( self.tag, name ), exc_info=True ) self.missing_modules[name] = str(error) return False finally: sys.path.pop() mod.__grains__ = __grains__ mod.__pillar__ = __pillar__ mod.__opts__ = __opts__ mod.__salt__ = __salt__ # pack whatever other globals we were asked to for p_name, p_value in six.iteritems(self.pack): setattr(mod, p_name, p_value) module_name = name # Call a module's initialization method if it exists module_init = getattr(mod, '__init__', None) if inspect.isfunction(module_init): try: module_init(self.opts) except __HOLE__ as e: log.error(e) except Exception: err_string = '__init__ failed' log.debug( 'Error loading {0}.{1}: {2}'.format( self.tag, module_name, err_string), exc_info=True) self.missing_modules[name] = err_string return False # if virtual modules are enabled, we need to look for the # __virtual__() function inside that module and run it. if self.virtual_enable: (virtual_ret, module_name, virtual_err) = self.process_virtual( mod, module_name, ) if virtual_err is not None: log.debug('Error loading {0}.{1}: {2}'.format(self.tag, module_name, virtual_err, )) # if process_virtual returned a non-True value then we are # supposed to not process this module if virtual_ret is not True: # If a module has information about why it could not be loaded, record it self.missing_modules[name] = virtual_err return False # If this is a proxy minion then MOST modules cannot work. Therefore, require that # any module that does work with salt-proxy-minion define __proxyenabled__ as a list # containing the names of the proxy types that the module supports. # # Render modules and state modules are OK though if 'proxy' in self.opts: if self.tag in ['grains', 'proxy']: if not hasattr(mod, '__proxyenabled__') or \ (self.opts['proxy']['proxytype'] not in mod.__proxyenabled__ and '*' not in mod.__proxyenabled__): err_string = 'not a proxy_minion enabled module' self.missing_modules[name] = err_string return False if getattr(mod, '__load__', False) is not False: log.info( 'The functions from module {0!r} are being loaded from the ' 'provided __load__ attribute'.format( module_name ) ) mod_dict = salt.utils.odict.OrderedDict() # In nova we only care about the audit() function, and we want to # store it with directory structure in the name. for attr in getattr(mod, '__load__', dir(mod)): if attr != 'audit': continue func = getattr(mod, attr) # Save many references for lookups self._dict[name] = func mod_dict[name] = func self.loaded_modules[name] = mod_dict return True
TypeError
dataset/ETHPy150Open HubbleStack/Nova/_modules/hubble.py/NovaLazyLoader._load_module
1,137
def fetch_issues(self): try: iterator = self.repo.iter_issues except __HOLE__: iterator = self.repo.issues for issue in iterator(state='all'): # is this a pull request ? if getattr(issue, 'pull_request', None): msg = 'Skipping pull request %s: %s' % (issue.id, issue.title) logger.debug(msg) continue msg = 'Skipping pull request %s: %s' % (issue.id, issue.title) logger.debug(msg) issue_data = {'source_id': issue.id, 'priority_id': 1 # set default priority to be safe } if getattr(issue, 'title', None): issue_data['subject'] = issue.title if getattr(issue, 'body', None): try: issue_data['description'] = issue.body_text except: issue_data['description'] = issue.body if getattr(issue, 'tracker', None): # not available on github pass if getattr(issue, 'state', None): # the default redmine statuses start with a capital letter issue_data['status_name'] = issue.state.title() if getattr(issue, 'priority', None): # not available on github pass if getattr(issue, 'done_ratio', None): # not available on github pass if getattr(issue, 'story_points', None): # not available on github pass if getattr(issue, 'milestone', None): issue_data['fixed_version_id'] = issue.milestone.number issue_data['version_name'] = issue.milestone.title if getattr(issue, 'assignee', None): issue_data['assigned_to_id'] = issue.assignee.id login = issue.assignee.login issue_data['assigned_to_login'] = login yield issue_data
AttributeError
dataset/ETHPy150Open redhat-cip/software-factory/tools/sfmigration/sfmigration/issues/github.py/GithubImporter.fetch_issues
1,138
def fetch_versions(self): try: iterator = self.repo.iter_milestones except __HOLE__: iterator = self.repo.milestones for version in iterator(): logger.debug("Fetching version %s: %s" % (version.number, version.title)) version_data = {} version_data['source_id'] = version.number version_data['name'] = version.title if getattr(version, 'state', None): version_data['status'] = version.state yield version_data
AttributeError
dataset/ETHPy150Open redhat-cip/software-factory/tools/sfmigration/sfmigration/issues/github.py/GithubImporter.fetch_versions
1,139
def process(self, lookup_type, value, connection): """ Returns a tuple of data suitable for inclusion in a WhereNode instance. """ # Because of circular imports, we need to import this here. from django.db.models.base import ObjectDoesNotExist try: if self.field: params = self.field.get_db_prep_lookup(lookup_type, value, connection=connection, prepared=True) db_type = self.field.db_type(connection=connection) else: # This branch is used at times when we add a comparison to NULL # (we don't really want to waste time looking up the associated # field object at the calling location). params = Field().get_db_prep_lookup(lookup_type, value, connection=connection, prepared=True) db_type = None except __HOLE__: raise EmptyShortCircuit return (self.alias, self.col, db_type), params
ObjectDoesNotExist
dataset/ETHPy150Open daoluan/decode-Django/Django-1.5.1/django/db/models/sql/where.py/Constraint.process
1,140
def tearDown(self): typepad.client = self.typepad_client del self.typepad_client for x in ('headers', 'body'): try: delattr(self, x) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open typepad/python-typepad-api/tests/test_tpobject.py/ClientTestCase.tearDown
1,141
def __init__(self, session=None, verify_ssl=False, **credentials): self.verify_ssl = verify_ssl if session: if isinstance(session, dict): logger.info('Trying to recover OpenStack session.') self.session = OpenStackSession.recover(session, verify_ssl=verify_ssl) self.session.validate() else: self.session = session else: try: self.session = OpenStackSession(verify_ssl=verify_ssl, **credentials) except __HOLE__ as e: logger.error('Failed to create OpenStack session.') six.reraise(OpenStackBackendError, e)
AttributeError
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/openstack/backend.py/OpenStackClient.__init__
1,142
def get_instance(self, instance_id): try: nova = self.nova_client cinder = self.cinder_client instance = nova.servers.get(instance_id) try: attached_volume_ids = [v.volumeId for v in nova.volumes.get_server_volumes(instance_id)] if len(attached_volume_ids) != 2: raise OpenStackBackendError('Only instances with 2 volumes are supported') for volume_id in attached_volume_ids: volume = cinder.volumes.get(volume_id) # Blessed be OpenStack developers for returning booleans as strings if volume.bootable == 'true': system_volume = volume elif volume.bootable == 'false': data_volume = volume flavor = nova.flavors.get(instance.flavor['id']) cores = flavor.vcpus ram = flavor.ram ips = {} for net_conf in instance.addresses.values(): for ip in net_conf: if ip['OS-EXT-IPS:type'] == 'fixed': ips['internal'] = ip['addr'] if ip['OS-EXT-IPS:type'] == 'floating': ips['external'] = ip['addr'] except nova_exceptions.ClientException as e: logger.exception("Failed to lookup instance %s information", instance_id) six.reraise(OpenStackBackendError, e) try: d = dateparse.parse_datetime(instance.to_dict()['OS-SRV-USG:launched_at']) except (__HOLE__, ValueError): launch_time = None else: # At the moment OpenStack does not provide any timezone info, # but in future it might do. if timezone.is_naive(d): launch_time = timezone.make_aware(d, timezone.utc) instance.nc_model_data = dict( name=instance.name or instance.id, key_name=instance.key_name or '', start_time=launch_time, state=self._get_instance_state(instance), created=dateparse.parse_datetime(instance.created), cores=cores, ram=ram, disk=self.gb2mb(system_volume.size + data_volume.size), system_volume_id=system_volume.id, system_volume_size=self.gb2mb(system_volume.size), data_volume_id=data_volume.id, data_volume_size=self.gb2mb(data_volume.size), internal_ips=ips.get('internal', ''), external_ips=ips.get('external', ''), security_groups=[sg['name'] for sg in instance.security_groups], ) except (glance_exceptions.ClientException, cinder_exceptions.ClientException, nova_exceptions.ClientException, neutron_exceptions.NeutronClientException) as e: six.reraise(OpenStackBackendError, e) return instance
KeyError
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/openstack/backend.py/OpenStackBackend.get_instance
1,143
def provision_instance(self, instance, backend_flavor_id=None, backend_image_id=None, system_volume_id=None, data_volume_id=None, skip_external_ip_assignment=False, public_key=None): logger.info('About to provision instance %s', instance.uuid) try: nova = self.nova_client cinder = self.cinder_client neutron = self.neutron_client backend_flavor = nova.flavors.get(backend_flavor_id) # verify if the internal network to connect to exists service_project_link = instance.service_project_link # XXX: In the future instance should depend on tenant. Now SPL can have only one tenant. tenant = service_project_link.tenant try: neutron.show_network(service_project_link.internal_network_id) except neutron_exceptions.NeutronClientException: logger.exception('Internal network with id of %s was not found', service_project_link.internal_network_id) raise OpenStackBackendError('Unable to find network to attach instance to') if not skip_external_ip_assignment: # TODO: check availability and quota if not service_project_link.floating_ips.filter(status='DOWN').exists(): self.allocate_floating_ip_address(tenant) floating_ip = service_project_link.floating_ips.filter(status='DOWN').first() instance.external_ips = floating_ip.address floating_ip.status = 'BOOKED' floating_ip.save(update_fields=['status']) # instance key name and fingerprint are optional if instance.key_name: backend_public_key = self.get_or_create_ssh_key_for_tenant( tenant, instance.key_name, instance.key_fingerprint, public_key) else: backend_public_key = None if not system_volume_id: system_volume_name = '{0}-system'.format(instance.name) logger.info('Creating volume %s for instance %s', system_volume_name, instance.uuid) system_volume = cinder.volumes.create( size=self.mb2gb(instance.system_volume_size), display_name=system_volume_name, display_description='', imageRef=backend_image_id) system_volume_id = system_volume.id if not data_volume_id: data_volume_name = '{0}-data'.format(instance.name) logger.info('Creating volume %s for instance %s', data_volume_name, instance.uuid) data_volume = cinder.volumes.create( size=self.mb2gb(instance.data_volume_size), display_name=data_volume_name, display_description='') data_volume_id = data_volume.id if not self._wait_for_volume_status(system_volume_id, cinder, 'available', 'error'): logger.error( "Failed to provision instance %s: timed out waiting " "for system volume %s to become available", instance.uuid, system_volume_id) raise OpenStackBackendError("Timed out waiting for instance %s to provision" % instance.uuid) if not self._wait_for_volume_status(data_volume_id, cinder, 'available', 'error'): logger.error( "Failed to provision instance %s: timed out waiting " "for data volume %s to become available", instance.uuid, data_volume_id) raise OpenStackBackendError("Timed out waiting for instance %s to provision" % instance.uuid) security_group_ids = instance.security_groups.values_list('security_group__backend_id', flat=True) server_create_parameters = dict( name=instance.name, image=None, # Boot from volume, see boot_index below flavor=backend_flavor, block_device_mapping_v2=[ { 'boot_index': 0, 'destination_type': 'volume', 'device_type': 'disk', 'source_type': 'volume', 'uuid': system_volume_id, 'delete_on_termination': True, }, { 'destination_type': 'volume', 'device_type': 'disk', 'source_type': 'volume', 'uuid': data_volume_id, 'delete_on_termination': True, }, ], nics=[ {'net-id': service_project_link.internal_network_id} ], key_name=backend_public_key.name if backend_public_key is not None else None, security_groups=security_group_ids, ) availability_zone = service_project_link.availability_zone if availability_zone: server_create_parameters['availability_zone'] = availability_zone if instance.user_data: server_create_parameters['userdata'] = instance.user_data server = nova.servers.create(**server_create_parameters) instance.backend_id = server.id instance.system_volume_id = system_volume_id instance.data_volume_id = data_volume_id instance.save() if not self._wait_for_instance_status(server.id, nova, 'ACTIVE'): logger.error( "Failed to provision instance %s: timed out waiting " "for instance to become online", instance.uuid) raise OpenStackBackendError("Timed out waiting for instance %s to provision" % instance.uuid) logger.debug("About to infer internal ip addresses of instance %s", instance.uuid) try: server = nova.servers.get(server.id) fixed_address = server.addresses.values()[0][0]['addr'] except (nova_exceptions.ClientException, __HOLE__, IndexError): logger.exception( "Failed to infer internal ip addresses of instance %s", instance.uuid) else: instance.internal_ips = fixed_address instance.save() logger.info( "Successfully inferred internal ip addresses of instance %s", instance.uuid) self.push_floating_ip_to_instance(instance, server) backend_security_groups = server.list_security_group() for bsg in backend_security_groups: if instance.security_groups.filter(security_group__name=bsg.name).exists(): continue try: security_group = service_project_link.security_groups.get(name=bsg.name) except models.SecurityGroup.DoesNotExist: logger.error( 'SPL %s (PK: %s) does not have security group "%s", but its instance %s (PK: %s) has.' % (service_project_link, service_project_link.pk, bsg.name, instance, instance.pk) ) else: instance.security_groups.create(security_group=security_group) except (glance_exceptions.ClientException, cinder_exceptions.ClientException, nova_exceptions.ClientException, neutron_exceptions.NeutronClientException) as e: logger.exception("Failed to provision instance %s", instance.uuid) six.reraise(OpenStackBackendError, e) else: logger.info("Successfully provisioned instance %s", instance.uuid)
KeyError
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/openstack/backend.py/OpenStackBackend.provision_instance
1,144
def push_floating_ip_to_instance(self, instance, server): if not instance.external_ips or not instance.internal_ips: return logger.debug('About to add external ip %s to instance %s', instance.external_ips, instance.uuid) service_project_link = instance.service_project_link try: floating_ip = service_project_link.floating_ips.get( status__in=('BOOKED', 'DOWN'), address=instance.external_ips, backend_network_id=service_project_link.external_network_id ) server.add_floating_ip(address=instance.external_ips, fixed_address=instance.internal_ips) except ( nova_exceptions.ClientException, __HOLE__, MultipleObjectsReturned, KeyError, IndexError, ): logger.exception('Failed to add external ip %s to instance %s', instance.external_ips, instance.uuid) instance.set_erred() instance.error_message = 'Failed to add external ip %s to instance %s' % (instance.external_ips, instance.uuid) instance.save() else: floating_ip.status = 'ACTIVE' floating_ip.save() logger.info('Successfully added external ip %s to instance %s', instance.external_ips, instance.uuid)
ObjectDoesNotExist
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/openstack/backend.py/OpenStackBackend.push_floating_ip_to_instance
1,145
def test_integrity(self): Group.objects.create(name='jack') try: Team.objects.convert_from_user(self.jack) except __HOLE__: pass else: self.fail("Cannot allow to convert user into team if there is " "a group with same name already persisted")
ValidationError
dataset/ETHPy150Open lukaszb/django-projector/projector/tests/test_user2team_conversion.py/UserToTeamConversion.test_integrity
1,146
def test_anonymous(self): user = AnonymousUser() try: Team.objects.convert_from_user(user) except __HOLE__: pass else: self.fail("Cannot allow to convert anonymous user to team")
ValidationError
dataset/ETHPy150Open lukaszb/django-projector/projector/tests/test_user2team_conversion.py/UserToTeamConversion.test_anonymous
1,147
def test_staff(self): self.jack.is_staff = True self.jack.save() try: Team.objects.convert_from_user(self.jack) except __HOLE__: pass else: self.fail("Cannot allow to convert staff member to team")
ValidationError
dataset/ETHPy150Open lukaszb/django-projector/projector/tests/test_user2team_conversion.py/UserToTeamConversion.test_staff
1,148
def test_superuser(self): self.jack.is_superuser = True self.jack.save() try: Team.objects.convert_from_user(self.jack) except __HOLE__: pass else: self.fail("Cannot allow to convert superuser to team")
ValidationError
dataset/ETHPy150Open lukaszb/django-projector/projector/tests/test_user2team_conversion.py/UserToTeamConversion.test_superuser
1,149
def test_non_active(self): self.jack.is_active = False self.jack.save() try: Team.objects.convert_from_user(self.jack) except __HOLE__: pass else: self.fail("Cannot allow to convert inactive user to team")
ValidationError
dataset/ETHPy150Open lukaszb/django-projector/projector/tests/test_user2team_conversion.py/UserToTeamConversion.test_non_active
1,150
def astnode(self, s): """Return a Python2 ast Node compiled from a string.""" try: import compiler except __HOLE__: # Fallback to eval when compiler package is not available, # e.g. IronPython 1.0. return eval(s) p = compiler.parse("__tempvalue__ = " + s) return p.getChildren()[1].getChildren()[0].getChildren()[1]
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/_Builder2.astnode
1,151
def build_Name(self, o): name = o.name if name == 'None': return None if name == 'True': return True if name == 'False': return False # See if the Name is a package or module. If it is, import it. try: return modules(name) except __HOLE__: pass # See if the Name is in builtins. try: return getattr(builtins, name) except AttributeError: pass raise TypeError("unrepr could not resolve the name %s" % repr(name))
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/_Builder2.build_Name
1,152
def astnode(self, s): """Return a Python3 ast Node compiled from a string.""" try: import ast except __HOLE__: # Fallback to eval when ast package is not available, # e.g. IronPython 1.0. return eval(s) p = ast.parse("__tempvalue__ = " + s) return p.body[0].value
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/_Builder3.astnode
1,153
def build_Name(self, o): name = o.id if name == 'None': return None if name == 'True': return True if name == 'False': return False # See if the Name is a package or module. If it is, import it. try: return modules(name) except ImportError: pass # See if the Name is in builtins. try: import builtins return getattr(builtins, name) except __HOLE__: pass raise TypeError("unrepr could not resolve the name %s" % repr(name))
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/_Builder3.build_Name
1,154
def modules(modulePath): """Load a module and retrieve a reference to that module.""" try: mod = sys.modules[modulePath] if mod is None: raise KeyError() except __HOLE__: # The last [''] is important. mod = __import__(modulePath, globals(), locals(), ['']) return mod
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/modules
1,155
def attributes(full_attribute_name): """Load a module and retrieve an attribute of that module.""" # Parse out the path, module, and attribute last_dot = full_attribute_name.rfind(".") attr_name = full_attribute_name[last_dot + 1:] mod_path = full_attribute_name[:last_dot] mod = modules(mod_path) # Let an AttributeError propagate outward. try: attr = getattr(mod, attr_name) except __HOLE__: raise AttributeError("'%s' object has no attribute '%s'" % (mod_path, attr_name)) # Return a reference to the attribute. return attr
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/lib/reprconf.py/attributes
1,156
def main(func, modification_callback, *args, **kwargs): """Run the given function and restart any time modules are changed.""" if os.environ.get('RUN_MAIN'): exit_code = [] def main_thread(): try: func(*args, **kwargs) exit_code.append(None) except __HOLE__ as e: exit_code.append(e.code) except: traceback.print_exception(*sys.exc_info()) exit_code.append(1) def check_exit(): if exit_code: sys.exit(exit_code[0]) # Lanch the actual program as a child thread thread = threading.Thread(target=main_thread, name='Main thread') thread.setDaemon(True) thread.start() try: # Now wait for a file modification and quit _reloader_thread(modification_callback, check_exit) except KeyboardInterrupt: pass else: # Initial invocation just waits around restarting this executable try: sys.exit(_restart_with_reloader()) except KeyboardInterrupt: pass
SystemExit
dataset/ETHPy150Open edgewall/trac/trac/util/autoreload.py/main
1,157
@requires_system_grains def test_pip_installed_weird_install(self, grains=None): # First, check to see if this is running on CentOS 5. If so, skip this test. if grains['os'] in ('CentOS',) and grains['osrelease_info'][0] in (5,): self.skipTest('This test does not run reliably on CentOS 5') ographite = '/opt/graphite' if os.path.isdir(ographite): self.skipTest( 'You already have \'{0}\'. This test would overwrite this ' 'directory'.format(ographite) ) try: os.makedirs(ographite) except __HOLE__ as err: if err.errno == 13: # Permission denied self.skipTest( 'You don\'t have the required permissions to run this test' ) finally: if os.path.isdir(ographite): shutil.rmtree(ographite) venv_dir = os.path.join( integration.SYS_TMP_DIR, 'pip-installed-weird-install' ) try: # Since we don't have the virtualenv created, pip.installed will # thrown and error. ret = self.run_function( 'state.sls', mods='pip-installed-weird-install' ) self.assertSaltTrueReturn(ret) # We cannot use assertInSaltComment here because we need to skip # some of the state return parts for key in six.iterkeys(ret): self.assertTrue(ret[key]['result']) if ret[key]['comment'] == 'Created new virtualenv': continue self.assertEqual( ret[key]['comment'], 'There was no error installing package \'carbon\' ' 'although it does not show when calling \'pip.freeze\'.' ) finally: if os.path.isdir(venv_dir): shutil.rmtree(venv_dir) if os.path.isdir('/opt/graphite'): shutil.rmtree('/opt/graphite')
OSError
dataset/ETHPy150Open saltstack/salt/tests/integration/states/pip.py/PipStateTest.test_pip_installed_weird_install
1,158
def test_issue_6833_pip_upgrade_pip(self): # Create the testing virtualenv venv_dir = os.path.join( integration.TMP, '6833-pip-upgrade-pip' ) ret = self.run_function('virtualenv.create', [venv_dir]) try: try: self.assertEqual(ret['retcode'], 0) self.assertIn( 'New python executable', ret['stdout'] ) except __HOLE__: import pprint pprint.pprint(ret) raise # Let's install a fixed version pip over whatever pip was # previously installed ret = self.run_function( 'pip.install', ['pip==6.0'], upgrade=True, ignore_installed=True, bin_env=venv_dir ) try: self.assertEqual(ret['retcode'], 0) self.assertIn( 'Successfully installed pip', ret['stdout'] ) except AssertionError: import pprint pprint.pprint(ret) raise # Le't make sure we have pip 6.0 installed self.assertEqual( self.run_function('pip.list', ['pip'], bin_env=venv_dir), {'pip': '6.0'} ) # Now the actual pip upgrade pip test ret = self.run_state( 'pip.installed', name='pip==6.0.7', upgrade=True, bin_env=venv_dir ) try: self.assertSaltTrueReturn(ret) self.assertInSaltReturn( 'Installed', ret, ['changes', 'pip==6.0.7'] ) except AssertionError: import pprint pprint.pprint(ret) raise finally: if os.path.isdir(venv_dir): shutil.rmtree(venv_dir)
AssertionError
dataset/ETHPy150Open saltstack/salt/tests/integration/states/pip.py/PipStateTest.test_issue_6833_pip_upgrade_pip
1,159
def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except __HOLE__: return url return ""
ValueError
dataset/ETHPy150Open pinax/pinax-blog/pinax/blog/parsers/markdown_parser.py/ImageLookupImagePattern.sanitize_url
1,160
def do_open(self, http_class, req): proxy_authorization = None for header in req.headers: if header.lower() == "proxy-authorization": proxy_authorization = req.headers[header] break # Intentionally very specific so as to opt for false negatives # rather than false positives. try: return urllib2.HTTPSHandler.do_open( self, create_fancy_connection(req._tunnel_host, req._key_file, req._cert_file, req._ca_certs, proxy_authorization), req) except urllib2.URLError, url_error: try: import ssl if (type(url_error.reason) == ssl.SSLError and url_error.reason.args[0] == 1): # Display the reason to the user. Need to use args for python2.5 # compat. raise InvalidCertificateException(req.host, "", url_error.reason.args[1]) except __HOLE__: pass raise url_error # We have to implement this so that we persist the tunneling behavior # through redirects.
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/fancy_urllib/fancy_urllib/__init__.py/FancyHTTPSHandler.do_open
1,161
def render(self, name, value, attrs=None): # Update the template parameters with any attributes passed in. if attrs: self.params.update(attrs) # Defaulting the WKT value to a blank string -- this # will be tested in the JavaScript and the appropriate # interface will be constructed. self.params['wkt'] = '' # If a string reaches here (via a validation error on another # field) then just reconstruct the Geometry. if isinstance(value, basestring): try: value = GEOSGeometry(value) except (GEOSException, __HOLE__): value = None if value and value.geom_type.upper() != self.geom_type: value = None # Constructing the dictionary of the map options. self.params['map_options'] = self.map_options() # Constructing the JavaScript module name using the name of # the GeometryField (passed in via the `attrs` keyword). # Use the 'name' attr for the field name (rather than 'field') self.params['name'] = name # note: we must switch out dashes for underscores since js # functions are created using the module variable js_safe_name = self.params['name'].replace('-','_') self.params['module'] = 'geodjango_%s' % js_safe_name if value: # Transforming the geometry to the projection used on the # OpenLayers map. srid = self.params['srid'] if value.srid != srid: try: ogr = value.ogr ogr.transform(srid) wkt = ogr.wkt except OGRException: wkt = '' else: wkt = value.wkt # Setting the parameter WKT with that of the transformed # geometry. self.params['wkt'] = wkt return loader.render_to_string(self.template, self.params, context_instance=geo_context)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/gis/admin/widgets.py/OpenLayersWidget.render
1,162
def to_python(self, value): if value == "": return None try: if isinstance(value, basestring): return json.loads(value) elif isinstance(value, bytes): return json.loads(value.decode('utf8')) except __HOLE__: pass return value
ValueError
dataset/ETHPy150Open skorokithakis/django-annoying/annoying/fields.py/JSONField.to_python
1,163
def login(self, username, password): try: request_data = {'postUserLogin': { 'login': username, 'password': password, 'remember': 1, }} response = self.request(self.LOGIN_URI, request_data) response['userLogin']['profile'] self.request(self.TOKEN_URI) except urllib2.URLError: raise AuthenticationError('Please provide correct username and password.') except __HOLE__: raise AuthenticationError('No userLogin information in response to login.')
KeyError
dataset/ETHPy150Open comoga/gooddata-python/gooddataclient/connection.py/Connection.login
1,164
def create_directory(self, directory_path): """Create a directory if it does not already exist. :param str directory_name: A fully qualified path of the to create. :returns: bool """ try: if os.path.exists(directory_path): return True else: os.makedirs(directory_path) return True except __HOLE__: # OSError is thrown for cases like no permission pass return False
OSError
dataset/ETHPy150Open jmathai/elodie/elodie/filesystem.py/FileSystem.create_directory
1,165
def delete_directory_if_empty(self, directory_path): """Delete a directory only if it's empty. Instead of checking first using `len([name for name in os.listdir(directory_path)]) == 0`, we catch the OSError exception. :param str directory_name: A fully qualified path of the directory to delete. """ try: os.rmdir(directory_path) return True except __HOLE__: pass return False
OSError
dataset/ETHPy150Open jmathai/elodie/elodie/filesystem.py/FileSystem.delete_directory_if_empty
1,166
def load_tests(loader, tests, ignore): try: import sklearn except __HOLE__: pass else: tests.addTests(doctest.DocTestSuite()) return tests
ImportError
dataset/ETHPy150Open Twangist/log_calls/log_calls/tests/test_with_sklearn/test_decorate_sklearn_KMeans.py/load_tests
1,167
def call(*args, **kwargs): if logging.getLogger().getEffectiveLevel() == logging.DEBUG: stderr = PIPE else: stderr = DEVNULL kwargs.setdefault('stderr', stderr) kwargs.setdefault('stdout', PIPE) kwargs.setdefault('stdin', PIPE) kwargs.setdefault('shell', False) kwargs_input = kwargs.pop('input', None) with Proc(*args, **kwargs) as proc: output, error = proc.communicate(input=kwargs_input) try: output = output.decode('utf-8') error = error.decode('utf-8') except __HOLE__: pass return output, error
AttributeError
dataset/ETHPy150Open marcwebbie/passpie/passpie/process.py/call
1,168
def getcaps(): """Return a dictionary containing the mailcap database. The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain') to a list of dictionaries corresponding to mailcap entries. The list collects all the entries for that MIME type from all available mailcap files. Each dictionary contains key-value pairs for that MIME type, where the viewing command is stored with the key "view". """ caps = {} for mailcap in listmailcapfiles(): try: fp = open(mailcap, 'r') except __HOLE__: continue morecaps = readmailcapfile(fp) fp.close() for key, value in morecaps.iteritems(): if not key in caps: caps[key] = value else: caps[key] = caps[key] + value return caps
IOError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/mailcap.py/getcaps
1,169
def _get_llas(self, gw_ifname, ns_name): try: return self.intf_driver.get_ipv6_llas(gw_ifname, ns_name) except __HOLE__: # The error message was printed as part of the driver call # This could happen if the gw_ifname was removed # simply return and exit the thread return
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/agent/linux/pd.py/PrefixDelegation._get_llas
1,170
def _delete_lla(self, router, lla_with_mask): if lla_with_mask and router['gw_interface']: try: self.intf_driver.delete_ipv6_addr(router['gw_interface'], lla_with_mask, router['ns_name']) except __HOLE__: # Ignore error if the lla doesn't exist pass
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/agent/linux/pd.py/PrefixDelegation._delete_lla
1,171
def make_changed_file(path, env): """ Given the path to a template file, write a new file with: * The same filename, except without '.template' at the end. * A placeholder in the new file changed to the latest installable version of Flocker. This new file will be deleted on build completion. :param unicode path: The path to a template file. :param sphinx.environment.BuildEnvironment env: The Sphinx build environment """ def remove_file(path): try: os.remove(path) except __HOLE__: pass latest = get_installable_version(version) new_path = remove_extension(path) with open(path, 'r') as templated_file: with open(new_path, 'w') as new_file: new_file.write(templated_file.read().replace(PLACEHOLDER, latest)) env.app.connect('build-finished', lambda self, *args: remove_file(new_path))
OSError
dataset/ETHPy150Open ClusterHQ/flocker/flocker/docs/version_extensions.py/make_changed_file
1,172
def string_to_country(self, value): try: return CountryContainer(value) except __HOLE__: raise ValidationError( self.error_messages['invalid_country'], code='invalid_country', params={'name': value} )
KeyError
dataset/ETHPy150Open django-bmf/django-bmf/djangobmf/fields/country.py/CountryField.string_to_country
1,173
def Run(self, max_age=60 * 60 * 24 * 7): """Run the report.""" counts = {} self.fields.append("count") self.results = [] for client in self._QueryResults(max_age): version = client.get("GRR client") try: counts[version] += 1 except __HOLE__: counts[version] = 1 for version, count in counts.iteritems(): self.results.append({"GRR client": version, "count": count}) self.SortResults("count")
KeyError
dataset/ETHPy150Open google/grr/grr/lib/aff4_objects/reports.py/VersionBreakdownReport.Run
1,174
def IterFunction(self, client, out_queue, unused_token): """Extract report attributes.""" result = {} for attr in self.report_attrs: # Do some special formatting for certain fields. if attr.name == "subject": result[attr.name] = client.Get(attr).Basename() elif attr.name == "GRR client": c_info = client.Get(attr) if not c_info: self.broken_subjects.append(client.client_id) result[attr.name] = None continue result[attr.name] = "%s %s" % (c_info.client_name, str(c_info.client_version)) else: result[attr.name] = client.Get(attr) for sub_path, attr in self.extended_report_attrs: try: client_sub = client.OpenMember(sub_path) # TODO(user): Update this to use MultiOpen. except IOError: # If the path is not found, just continue. continue # Special case formatting for some attributes. if attr.name == "Interfaces": interfaces = client_sub.Get(attr) if interfaces: try: result[attr.name] = ",".join(interfaces.GetIPAddresses()) except __HOLE__: result[attr.name] = "" else: result[attr.name] = client_sub.Get(attr) out_queue.put(result)
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/aff4_objects/reports.py/ClientReportIterator.IterFunction
1,175
def get_user_from_uid(uid): if uid is None: raise ValidationError(_("uid is required!")) try: uid = urlsafe_base64_decode(uid) user = User.objects.get(pk=uid) except (TypeError, __HOLE__, OverflowError, User.DoesNotExist): raise ValidationError(_(u"Invalid uid %s") % uid) return user
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/serializers/password_reset_serializer.py/get_user_from_uid
1,176
def __getstate__(self): d = self.__dict__.copy() for attr in ('reader', 'writer'): method = getattr(self, attr) try: # if instance method, pickle instance and method name d[attr] = method.__self__, method.__func__.__name__ except __HOLE__: pass # not an instance method return d
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/contrib/hdfs/format.py/CompatibleHdfsFormat.__getstate__
1,177
def __setstate__(self, d): self.__dict__ = d for attr in ('reader', 'writer'): try: method_self, method_name = d[attr] except __HOLE__: continue method = getattr(method_self, method_name) setattr(self, attr, method)
ValueError
dataset/ETHPy150Open spotify/luigi/luigi/contrib/hdfs/format.py/CompatibleHdfsFormat.__setstate__
1,178
def create_directory(path): if not os.path.isdir(path): if os.path.exists(path): raise RuntimeError('{0} is not a directory.'.format(path)) try: os.makedirs(path, mode=0o750) except __HOLE__ as error: logger.fatal('failed to create {0}: {1}'.format(path, error)) raise CouldNotCreateDbDir()
OSError
dataset/ETHPy150Open pimutils/khal/khal/khalendar/khalendar.py/create_directory
1,179
def splitport(port): port = port.split('/',1) proto = None if len(port) == 2: port, proto = port else: port = port[0] try: port = int(port) except __HOLE__: proto = port port = None return port, proto
ValueError
dataset/ETHPy150Open bonsaiviking/NfSpy/nfspy/nfspy.py/splitport
1,180
def _gethandle(self, path): fh = None fattr = None try: if path == "" or path == "/" or path == "/.." or path == "/.": fh = self.rootdh fattr = self.rootattr else: fh, fattr, cachetime = self.handles[path] # check that it isn't stale self.ncl.fuid = fattr[3] self.ncl.fgid = fattr[4] #Commented to save a call. May cause problems? #fattr = self.ncl.Getattr(dh) #self.handles[path][1] = fattr except (__HOLE__,NFSError) as e: if isinstance(e, KeyError) or e.errno() == NFSError.NFS3ERR_STALE: if isinstance(e, NFSError): del self.handles[path] tmppath, elem = path.rsplit("/",1) dh, fattr = self.gethandle(tmppath) self.ncl.fuid = fattr[3] self.ncl.fgid = fattr[4] fh, fattr, dattr = self.ncl.Lookup((dh, elem)) self.ncl.fuid = fattr[3] self.ncl.fgid = fattr[4] self.handles[path] = (fh, fattr, time()) self.handles[tmppath] = (dh, dattr, time()) else: raise return (fh, fattr)
KeyError
dataset/ETHPy150Open bonsaiviking/NfSpy/nfspy/nfspy.py/NfSpy._gethandle
1,181
def start(host, port, profile_stats, dont_start_browser): """Starts HTTP server with specified parameters. Args: host: Server hostname. port: Server port. profile_stats: Dict with collected progran stats. dont_start_browser: Whether to start browser after profiling. """ stats_handler = functools.partial(StatsHandler, profile_stats) if not dont_start_browser: webbrowser.open('http://{}:{}/'.format(host, port)) try: StatsServer((host, port), stats_handler).serve_forever() except __HOLE__: print('Stopping...') sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open nvdv/vprof/vprof/stats_server.py/start
1,182
def get_response(self, request): """ Overrides the base implementation with object-oriented hooks. Adapted from django.core.handlers.base.BaseHandler. """ resolver = self.get_resolver(request) try: return self.process_response( request, resolver, self.generate_response( request, resolver)) except __HOLE__: six.reraise(*sys.exc_info()) except: return self.handle_uncaught_exception( request, resolver, sys.exc_info())
SystemExit
dataset/ETHPy150Open skibblenybbles/django-daydreamer/daydreamer/core/handlers/base.py/Handler.get_response
1,183
def get_glance_client(context, image_href): """Get the correct glance client and id for the given image_href. The image_href param can be an href of the form http://myglanceserver:9292/images/42, or just an int such as 42. If the image_href is an int, then flags are used to create the default glance client. :param image_href: image ref/id for an image :returns: a tuple of the form (glance_client, image_id) """ image_href = image_href or 0 if str(image_href).isdigit(): glance_host, glance_port = pick_glance_api_server() glance_client = _create_glance_client(context, glance_host, glance_port) return (glance_client, int(image_href)) try: (image_id, host, port) = _parse_image_ref(image_href) except __HOLE__: raise exception.InvalidImageRef(image_href=image_href) glance_client = _create_glance_client(context, glance_host, glance_port) return (glance_client, image_id)
ValueError
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/image/glance.py/get_glance_client
1,184
def _fetch_images(self, fetch_func, **kwargs): """Paginate through results from glance server""" images = fetch_func(**kwargs) if not images: # break out of recursive loop to end pagination return for image in images: yield image try: # attempt to advance the marker in order to fetch next page kwargs['marker'] = images[-1]['id'] except __HOLE__: raise exception.ImagePaginationFailed() try: kwargs['limit'] = kwargs['limit'] - len(images) # break if we have reached a provided limit if kwargs['limit'] <= 0: return except KeyError: # ignore missing limit, just proceed without it pass for image in self._fetch_images(fetch_func, **kwargs): yield image
KeyError
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/image/glance.py/GlanceImageService._fetch_images
1,185
def _parse_glance_iso8601_timestamp(timestamp): """Parse a subset of iso8601 timestamps into datetime objects.""" iso_formats = ['%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] for iso_format in iso_formats: try: return datetime.datetime.strptime(timestamp, iso_format) except __HOLE__: pass raise ValueError(_('%(timestamp)s does not follow any of the ' 'signatures: %(ISO_FORMATS)s') % locals()) # TODO(yamahata): use block-device-mapping extension to glance
ValueError
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/image/glance.py/_parse_glance_iso8601_timestamp
1,186
def regenerate_memmap(self): """Delete internal memmap and create a new one, to save memory.""" try: del self._mm except __HOLE__: pass self._mm = np.memmap(\ self.filename, dtype='h', mode='r', offset=self.header.Header, shape=(self.header.n_samples, self.header.Channel_Count))
AttributeError
dataset/ETHPy150Open NeuralEnsemble/python-neo/neo/io/blackrockio_deprecated.py/Loader.regenerate_memmap
1,187
def _get_channel(self, channel_number): """Returns slice into internal memmap for requested channel""" try: mm_index = self.header.Channel_ID.index(channel_number) except __HOLE__: logging.info( "Channel number %d does not exist" % channel_number) return np.array([]) self.regenerate_memmap() return self._mm[:, mm_index]
ValueError
dataset/ETHPy150Open NeuralEnsemble/python-neo/neo/io/blackrockio_deprecated.py/Loader._get_channel
1,188
def get_date_status(value): """ For date values that are tomorrow, today or yesterday compared to present day returns representing string. Otherwise, returns an empty string or overdue string if date value is late. """ try: value = date(value.year, value.month, value.day) except __HOLE__: # Passed value wasn't a date object ret = '' except ValueError: # Date arguments out of range ret = '' delta = value - date.today() if delta.days == 0: ret = 'today' elif delta.days == 1: ret = 'tomorrow' elif delta.days < 0: ret = 'overdue' else: ret = '' return ret
AttributeError
dataset/ETHPy150Open zorna/zorna/zorna/calendars/api.py/get_date_status
1,189
def add(self, num1, num2): """ :param num1: list of digits in reverse order :param num2: list of digits in reverse order :return: list of digits in reverse order """ num1 = list(num1) # NOTICE: local copy num2 = list(num2) # NOTICE: local copy if len(num1) < len(num2): num1, num2 = num2, num1 carry = 0 for ind in range(len(num1)): # longer one try: result = num1[ind]+num2[ind]+carry except __HOLE__: result = num1[ind]+carry if result == num1[ind]: break # prune carry = result/10 num1[ind] = result%10 if carry != 0: num1.append(carry) return num1
IndexError
dataset/ETHPy150Open algorhythms/LeetCode/042 Multiply Strings.py/Solution.add
1,190
def ordinal(value): """ Converts an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer. """ try: value = int(value) except __HOLE__: return value t = (_('th'), _('st'), _('nd'), _('rd'), _('th'), _('th'), _('th'), _('th'), _('th'), _('th')) if value % 100 in (11, 12, 13): # special case return u"%d%s" % (value, t[0]) return u'%d%s' % (value, t[value % 10])
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/contrib/humanize/templatetags/humanize.py/ordinal
1,191
def apnumber(value): """ For numbers 1-9, returns the number spelled out. Otherwise, returns the number. This follows Associated Press style. """ try: value = int(value) except __HOLE__: return value if not 0 < value < 10: return value return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/contrib/humanize/templatetags/humanize.py/apnumber
1,192
def naturalday(value, arg=None): """ For date values that are tomorrow, today or yesterday compared to present day returns representing string. Otherwise, returns a string formatted according to settings.DATE_FORMAT. """ try: value = date(value.year, value.month, value.day) except __HOLE__: # Passed value wasn't a date object return value except ValueError: # Date arguments out of range return value delta = value - date.today() if delta.days == 0: return _(u'today') elif delta.days == 1: return _(u'tomorrow') elif delta.days == -1: return _(u'yesterday') return defaultfilters.date(value, arg)
AttributeError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/contrib/humanize/templatetags/humanize.py/naturalday
1,193
def GetFacterFacts(): """Return facter contents. Returns: dict, facter contents """ return_code, stdout, unused_stderr = Exec( FACTER_CMD, timeout=300, waitfor=0.5) if return_code != 0: return {} # Iterate over the facter output and create a dictionary of the contents. facts = {} for line in stdout.splitlines(): try: key, unused_sep, value = line.split(' ', 2) value = value.strip() facts[key] = value except __HOLE__: logging.warning('Ignoring invalid facter output line: %s', line) return facts
ValueError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/GetFacterFacts
1,194
def GetSystemUptime(): """Returns the system uptime. Returns: float seconds of uptime Raises: Error: if an error occurs in calculating uptime """ libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c')) # 2 integers returned, might be 4 or 8 bytes each l = ctypes.c_int(16) buf = ctypes.create_string_buffer(16) try: r = libc.sysctlbyname( ctypes.c_char_p('kern.boottime'), buf, ctypes.byref(l), None, None) except __HOLE__: raise Error('Cannot find sysctlbyname()') if r == 0 and l.value in [8, 16]: if l.value == 8: # <=10.5 fmt = 'II' else: # >=10.6 fmt = 'QQ' (since_s, since_ms) = struct.unpack(fmt, ctypes.string_at(buf, l)) uptime = time.time() - float('%s.%s' % (since_s, since_ms)) else: raise Error('sysctlbyname() returned %d, oldlenp=%d' % (r, l.value)) return uptime
AttributeError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/GetSystemUptime
1,195
def GetDiskFree(path=None): """Return the bytes of free space. Args: path: str, optional, default '/' Returns: int, bytes in free space available """ if path is None: path = '/' try: st = os.statvfs(path) except __HOLE__ as e: raise Error(str(e)) return st.f_frsize * st.f_bavail # f_bavail matches df(1) output
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/GetDiskFree
1,196
def GetClientIdentifier(runtype=None): """Assembles the client identifier based on information collected by facter. Args: runtype: str, optional, Munki runtype. i.e. auto, custom, manual, etc. Returns: dict client identifier. """ facts = GetFacterFacts() uuid = (facts.get('certname', None) or facts.get('uuid', None) or _GetMachineInfoPlistValue('MachineUUID') or _GetHardwareUUID()) uuid = uuid.lower() # normalize uuid to lowercase. owner = (facts.get('primary_user', None) or _GetMachineInfoPlistValue('Owner') or _GetPrimaryUser() or _GetConsoleUser()) hostname = (facts.get('sp_local_host_name', None) or facts.get('hostname', None) or _GetHostname()) config_track = facts.get('configtrack', 'BROKEN') simian_track = (facts.get('simiantrack', None)) # Apple SUS integration. applesus = facts.get('applesus', 'true').lower() == 'true' site = facts.get('site', None) office = facts.get('location', None) os_version = platform.mac_ver()[0] # tuple like: ('10.6.3', (...), 'i386') serial = facts.get('hardware_serialnumber', None) or _GetSerialNumber() # client_management_enabled facter support; defaults to enabled. mgmt_enabled = facts.get( 'client_management_enabled', 'true').lower() == 'true' # Determine if the computer is on the corp network or not. on_corp = None on_corp_cmd = '' on_corp_cmd_config = '/etc/simian/on_corp_cmd' if not on_corp_cmd and os.path.isfile(on_corp_cmd_config): try: f = open(on_corp_cmd_config, 'r') on_corp_cmd = f.read() on_corp_cmd = on_corp_cmd.strip() f.close() except __HOLE__ as e: logging.exception( 'Error reading %s: %s', on_corp_cmd_config, str(e)) if on_corp_cmd: try: on_corp, unused_stdout, unused_stderr = Exec( on_corp_cmd, timeout=60, waitfor=0.5) # exit=0 means on corp, so reverse. on_corp = '%d' % (not on_corp) except OSError as e: # in this case, we don't know if on corp or not so don't log either. logging.exception('OSError calling on_corp_cmd: %s', str(e)) on_corp = None # LastNotifiedDate comes as local time from FoundationPlist, # so convert to epoc timestamp then to UTC datetime. last_notified_datetime_str = GetPlistDateValue( 'LastNotifiedDate', str_format=DATETIME_STR_FORMAT) # get uptime try: uptime = GetSystemUptime() except Error as e: uptime = 'ERROR: %s' % str(e) # get free space try: root_disk_free = GetDiskFree() except Error as e: root_disk_free = 'ERROR: %s' % str(e) # get user disk free user_disk_free = None if owner: try: # TODO(user): this may not be FileVault compatible, at least before # the user is logged in; investigate. user_dir_path = '/Users/%s/' % owner if os.path.isdir(user_dir_path): user_disk_free = GetDiskFree(user_dir_path) except Error as e: user_disk_free = 'ERROR: %s' % str(e) client_id = { 'uuid': uuid, 'owner': owner, 'hostname': hostname, 'serial': serial, 'config_track': config_track, 'track': simian_track, 'applesus': applesus, 'mgmt_enabled': mgmt_enabled, 'site': site, 'office': office, 'os_version': os_version, 'client_version': GetClientVersion(), 'on_corp': on_corp, 'last_notified_datetime': last_notified_datetime_str, 'runtype': runtype, 'uptime': uptime, 'root_disk_free': root_disk_free, 'user_disk_free': user_disk_free, } return client_id
IOError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/GetClientIdentifier
1,197
def UploadAllManagedInstallReports(client, on_corp): """Uploads any installs, updates, uninstalls back to Simian server. Args: client: A SimianAuthClient. on_corp: str, on_corp status from GetClientIdentifier. """ # Report installs from the ManagedInstallsReport archives. archives_dir = os.path.join(munkicommon.pref('ManagedInstallDir'), 'Archives') if os.path.isdir(archives_dir): for fname in os.listdir(archives_dir): if not fname.startswith('ManagedInstallReport-'): continue install_report_path = os.path.join(archives_dir, fname) if not os.path.isfile(install_report_path): continue install_report, _ = GetManagedInstallReport( install_report_path=install_report_path) try: _UploadManagedInstallReport(client, on_corp, install_report) try: os.unlink(install_report_path) except (__HOLE__, OSError): logging.warning( 'Failed to delete ManagedInstallsReport.plist: %s', install_report_path) except ServerRequestError: logging.exception('Error uploading ManagedInstallReport installs.') # Report installs from the current ManagedInstallsReport.plist. install_report, install_report_path = GetManagedInstallReport() try: _UploadManagedInstallReport(client, on_corp, install_report) # Clear reportable information now that is has been published. install_report['InstallResults'] = [] install_report['RemovalResults'] = [] install_report['ProblemInstalls'] = [] fpl.writePlist(install_report, install_report_path) except ServerRequestError: logging.exception('Error uploading ManagedInstallReport installs.')
IOError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/UploadAllManagedInstallReports
1,198
def KillHungManagedSoftwareUpdate(): """Kill hung managedsoftwareupdate instances, if any can be found. Returns: True if a managedsoftwareupdate instance was killed, False otherwise. """ rc, stdout, stderr = Exec(['/bin/ps', '-eo', 'pid,ppid,lstart,command']) if rc != 0 or not stdout or stderr: return False pids = {} msu_pids = [] for l in stdout.splitlines(): a = l.split() if len(a) < 8: continue try: pids[int(a[0])] = { 'ppid': int(a[1]), 'lstart': datetime.datetime(*time.strptime(' '.join(a[2:7]))[0:7]), 'command': '\t'.join(a[7:]), } except ValueError: continue if re.search(r'(MacOS\/Python|python)', a[7], re.IGNORECASE): if len(a) > 8 and a[8].find('managedsoftwareupdate') > -1: msu_pids.append(int(a[0])) now = datetime.datetime.now() kill = [] for pid in msu_pids: if (now - pids[pid]['lstart']) >= HUNG_MSU_TIMEOUT: for opid in pids: if pids[opid]['ppid'] == pid: kill.append(opid) # child kill.append(pid) # parent last for pid in kill: if pid == 1: # sanity check continue try: logging.warning('Sending SIGKILL to pid %d', pid) os.kill(pid, signal.SIGKILL) except __HOLE__ as e: # if the process died between ps and now we're OK, otherwise log error. if e.args[0] != errno.ESRCH: logging.warning('OSError on kill(%d, SIGKILL): %s', pid, str(e)) return bool(len(kill))
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/flight_common.py/KillHungManagedSoftwareUpdate
1,199
def main(): 'Manage the OSM DB during development.' from optparse import OptionParser parser = OptionParser(usage=usage, prog=toolname, version='%prog ' + toolversion) parser.add_option('-b', '--buffering', dest='buffering', metavar="NUMBER", default=64, type="int", help="Buffer size in KB for *zip uncompression " + "[%default]") parser.add_option('-B', '--backend', dest='backend', metavar='DBTYPE', default=None, help="Type of backend to use [from configuration file]"), parser.add_option('-C', '--config', dest='config', metavar="FILENAME", default=toolconfig, help="Path to configuration information [%default]") parser.add_option('-E', '--encoding', dest='datastore_encoding', metavar='ENCODING', default=None, type="str", help="Encoding for use for values [%default]"), parser.add_option('-I', '--init', dest='doinit', action='store_true', default=False, help='(Re-)initialize the backend'), parser.add_option('-n', '--dryrun', dest='dryrun', metavar="BOOLEAN", default=False, action="store_true", help="Parse, but do not upload data [%default]") parser.add_option('-T', '--nothreading', dest='nothreading', metavar="BOOLEAN", default=False, action="store_true", help="Do not use threads [%default]"), parser.add_option('-v', '--verbose', dest='verbose', metavar="BOOLEAN", default=False, action="store_true", help="Be verbose") parser.add_option("-x", '--nochangesets', dest="nochangesets", action="store_true", default=False, help="Skip retrieval of changeset information " "[%default]") options, args = parser.parse_args() # Read configuration information. configfiles = [options.config, os.path.join(sys.path[0], devconfigdir, devconfigfilename)] from ConfigParser import ConfigParser cfg = ConfigParser() cfg.read(configfiles) # Sanity check. if not cfg.has_section(C.FRONT_END): parser.error("Incomplete configuration, tried:\n\t" + "\n\t".join(configfiles)) # Override configuration options specified on the command line. if options.datastore_encoding: cfg.set(C.DATASTORE, C.DATASTORE_ENCODING, options.datastore_encoding) if options.backend: cfg.set(C.DATASTORE, C.DATASTORE_BACKEND, options.backend) # Initialize statistics. init_statistics(cfg, options) # Load in the desired interface to the datastore. backend = cfg.get(C.DATASTORE, C.DATASTORE_BACKEND) try: module = __import__('datastore.ds_' + backend, fromlist=['Datastore']) except __HOLE__, x: parser.exit("Error: Could not initialize backend of type \"%s\": %s" % (backend, str(x))) db = module.Datastore(cfg, not options.nothreading, True) if options.doinit: db.initialize() ops = DBOps(cfg, options, db) # Initialize the geohash module. init_geohash(cfg.getint(C.DATASTORE, C.GEOHASH_LENGTH), cfg.getint(C.DATASTORE, C.SCALE_FACTOR)) # Initialize the OSM element factory and related modules. init_slabutil(cfg) init_osm_factory(cfg) # Turn file names into iterators that deliver an element at a time. try: iterlist = map(lambda fn: makesource(cfg, options, fn), args) inputelements = itertools.chain(*iterlist) except Exception, x: parser.exit("Error: " + str(x)) for elem in inputelements: # Add basic elements if elem.namespace in [C.CHANGESET, C.NODE, C.RELATION, C.WAY]: ops.add_element(elem) else: raise NotImplementedError, "Element type: %s" % elem.kind ops.finish() fini_statistics(options)
ImportError
dataset/ETHPy150Open MapQuest/mapquest-osm-server/src/python/dbmgr/__main__.py/main