Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
500
def format_uptime(start_time): try: delta = datetime.now() - start_time days = delta.days hours = delta.seconds / 3600 minutes = (delta.seconds % 3600) / 60 seconds = (delta.seconds % 3600) % 60 return "%dd %dh %dm %ds" % (days, hours, minutes, seconds) except __HOLE__: return 'n/a'
TypeError
dataset/ETHPy150Open onefinestay/gonzo/gonzo/scripts/utils.py/format_uptime
501
def _wait_file_notifier(self, filepath): while True: try: open(filepath) except __HOLE__: time.sleep(0.1) else: break #time.sleep(1)
IOError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/admin/bot/wl_process.py/WebLabProcess._wait_file_notifier
502
def get_url(self): """ Return a generated url from ``rule`` attribute. Returns ------- str Generated url """ try: rule = self.rule except __HOLE__: raise NotImplementedError('``rule`` attr must be defined.') return url_for(rule)
AttributeError
dataset/ETHPy150Open thisissoon/Flask-Velox/flask_velox/mixins/http.py/RedirectMixin.get_url
503
def get_libclang_headers(): try: paths = _ask_clang() except __HOLE__: paths = _guess_paths() return ['-I%s' % path for path in paths]
OSError
dataset/ETHPy150Open punchagan/cinspect/cinspect/clang_utils.py/get_libclang_headers
504
def read(): try: return sys.stdin.buffer.read() except __HOLE__: return sys.stdin.read()
AttributeError
dataset/ETHPy150Open freedoom/freedoom/bootstrap/bootstrap.py/read
505
def write(out): try: sys.stdout.buffer.write(out) except __HOLE__: sys.stdout.write(out)
AttributeError
dataset/ETHPy150Open freedoom/freedoom/bootstrap/bootstrap.py/write
506
def cached_wheel(cache_dir, link, format_control, package_name): if not cache_dir: return link if not link: return link if link.is_wheel: return link if not link.is_artifact: return link if not package_name: return link canonical_name = pkg_resources.safe_name(package_name).lower() formats = pip.index.fmt_ctl_formats(format_control, canonical_name) if "binary" not in formats: return link root = _cache_for_link(cache_dir, link) try: wheel_names = os.listdir(root) except __HOLE__ as e: if e.errno == errno.ENOENT: return link raise candidates = [] for wheel_name in wheel_names: try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue if not wheel.supported(): # Built for a different python/arch/etc continue candidates.append((wheel.support_index_min(), wheel_name)) if not candidates: return link candidates.sort() path = os.path.join(root, candidates[0][1]) return pip.index.Link(path_to_url(path), trusted=True)
OSError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/site-packages/pip/wheel.py/cached_wheel
507
def render(self, context): if 'forloop' in context: parentloop = context['forloop'] else: parentloop = {} context.push() try: values = self.sequence.resolve(context, True) except VariableDoesNotExist: values = [] if values is None: values = [] if not hasattr(values, '__len__'): values = list(values) len_values = len(values) if len_values < 1: context.pop() return self.nodelist_empty.render(context) nodelist = NodeList() if self.is_reversed: values = reversed(values) unpack = len(self.loopvars) > 1 # Create a forloop value in the context. We'll update counters on each # iteration just below. loop_dict = context['forloop'] = {'parentloop': parentloop} for i, item in enumerate(values): # Shortcuts for current loop iteration number. loop_dict['counter0'] = i loop_dict['counter'] = i+1 # Reverse counter iteration numbers. loop_dict['revcounter'] = len_values - i loop_dict['revcounter0'] = len_values - i - 1 # Boolean values designating first and last times through loop. loop_dict['first'] = (i == 0) loop_dict['last'] = (i == len_values - 1) pop_context = False if unpack: # If there are multiple loop variables, unpack the item into # them. try: unpacked_vars = dict(zip(self.loopvars, item)) except __HOLE__: pass else: pop_context = True context.update(unpacked_vars) else: context[self.loopvars[0]] = item # In TEMPLATE_DEBUG mode provide source of the node which # actually raised the exception if settings.TEMPLATE_DEBUG: for node in self.nodelist_loop: try: nodelist.append(node.render(context)) except Exception as e: if not hasattr(e, 'django_template_source'): e.django_template_source = node.source raise else: for node in self.nodelist_loop: nodelist.append(node.render(context)) if pop_context: # The loop variables were pushed on to the context so pop them # off again. This is necessary because the tag lets the length # of loopvars differ to the length of each set of items and we # don't want to leave any vars from the previous loop on the # context. context.pop() context.pop() return nodelist.render(context)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/template/defaulttags.py/ForNode.render
508
def render(self, context): filepath = self.filepath.resolve(context) if not include_is_allowed(filepath): if settings.DEBUG: return "[Didn't have permission to include file]" else: return '' # Fail silently for invalid includes. try: with open(filepath, 'r') as fp: output = fp.read() except __HOLE__: output = '' if self.parsed: try: t = Template(output, name=filepath) return t.render(context) except TemplateSyntaxError as e: if settings.DEBUG: return "[Included template had syntax error: %s]" % e else: return '' # Fail silently for invalid included templates. return output
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/template/defaulttags.py/SsiNode.render
509
def render(self, context): try: value = self.val_expr.resolve(context) max_value = self.max_expr.resolve(context) max_width = int(self.max_width.resolve(context)) except VariableDoesNotExist: return '' except (ValueError, TypeError): raise TemplateSyntaxError("widthratio final argument must be an number") try: value = float(value) max_value = float(max_value) ratio = (value / max_value) * max_width except ZeroDivisionError: return '0' except (ValueError, __HOLE__): return '' return str(int(round(ratio)))
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/template/defaulttags.py/WidthRatioNode.render
510
def refresh(self): """ Refresh context with new declarations from known registries. Useful for third-party extensions. """ # Populate built-in registry from . import (arraymath, enumimpl, iterators, linalg, numbers, optional, rangeobj, slicing, smartarray, tupleobj) try: from . import npdatetime except __HOLE__: pass self.install_registry(builtin_registry) self.load_additional_registries() # Also refresh typing context, since @overload declarations can # affect it. self.typing_context.refresh()
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.refresh
511
def install_registry(self, registry): """ Install a *registry* (a imputils.Registry instance) of function and attribute implementations. """ try: loader = self._registries[registry] except __HOLE__: loader = RegistryLoader(registry) self._registries[registry] = loader self.insert_func_defn(loader.new_registrations('functions')) self._insert_getattr_defn(loader.new_registrations('getattrs')) self._insert_setattr_defn(loader.new_registrations('setattrs')) self._insert_cast_defn(loader.new_registrations('casts')) self._insert_get_constant_defn(loader.new_registrations('constants'))
KeyError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.install_registry
512
def get_constant_generic(self, builder, ty, val): """ Return a LLVM constant representing value *val* of Numba type *ty*. """ try: impl = self._get_constants.find((ty,)) return impl(self, builder, ty, val) except __HOLE__: raise NotImplementedError("cannot lower constant of type '%s'" % (ty,))
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.get_constant_generic
513
def get_function(self, fn, sig): """ Return the implementation of function *fn* for signature *sig*. The return value is a callable with the signature (builder, args). """ sig = sig.as_function() if isinstance(fn, (types.Function, types.BoundFunction, types.Dispatcher)): key = fn.get_impl_key(sig) overloads = self._defns[key] else: key = fn overloads = self._defns[key] try: return _wrap_impl(overloads.find(sig.args), self, sig) except NotImplementedError: pass if isinstance(fn, types.Type): # It's a type instance => try to find a definition for the type class try: return self.get_function(type(fn), sig) except __HOLE__: # Raise exception for the type instance, for a better error message pass raise NotImplementedError("No definition for lowering %s%s" % (key, sig))
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.get_function
514
def get_getattr(self, typ, attr): """ Get the getattr() implementation for the given type and attribute name. The return value is a callable with the signature (context, builder, typ, val, attr). """ if isinstance(typ, types.Module): # Implement getattr for module-level globals. # We are treating them as constants. # XXX We shouldn't have to retype this attrty = self.typing_context.resolve_module_constants(typ, attr) if attrty is None or isinstance(attrty, types.Dummy): # No implementation required for dummies (functions, modules...), # which are dealt with later return None else: pyval = getattr(typ.pymod, attr) llval = self.get_constant(attrty, pyval) def imp(context, builder, typ, val, attr): return impl_ret_borrowed(context, builder, attrty, llval) return imp # Lookup specific getattr implementation for this type and attribute overloads = self._getattrs[attr] try: return overloads.find((typ,)) except NotImplementedError: pass # Lookup generic getattr implementation for this type overloads = self._getattrs[None] try: return overloads.find((typ,)) except __HOLE__: pass raise NotImplementedError("No definition for lowering %s.%s" % (typ, attr))
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.get_getattr
515
def get_setattr(self, attr, sig): """ Get the setattr() implementation for the given attribute name and signature. The return value is a callable with the signature (builder, args). """ assert len(sig.args) == 2 typ = sig.args[0] valty = sig.args[1] def wrap_setattr(impl): def wrapped(builder, args): return impl(self, builder, sig, args, attr) return wrapped # Lookup specific setattr implementation for this type and attribute overloads = self._setattrs[attr] try: return wrap_setattr(overloads.find((typ, valty))) except NotImplementedError: pass # Lookup generic setattr implementation for this type overloads = self._setattrs[None] try: return wrap_setattr(overloads.find((typ, valty))) except __HOLE__: pass raise NotImplementedError("No definition for lowering %s.%s = %s" % (typ, attr, valty))
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.get_setattr
516
def cast(self, builder, val, fromty, toty): """ Cast a value of type *fromty* to type *toty*. This implements implicit conversions as can happen due to the granularity of the Numba type system, or lax Python semantics. """ if fromty == toty or toty == types.Any: return val try: impl = self._casts.find((fromty, toty)) return impl(self, builder, fromty, toty, val) except __HOLE__: raise NotImplementedError( "Cannot cast %s to %s: %s" % (fromty, toty, val))
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext.cast
517
def _call_nrt_incref_decref(self, builder, root_type, typ, value, funcname, getters=()): self._require_nrt() from numba.runtime.atomicops import incref_decref_ty data_model = self.data_model_manager[typ] members = data_model.traverse(builder) for mtyp, getter in members: self._call_nrt_incref_decref(builder, root_type, mtyp, value, funcname, getters + (getter,)) if data_model.has_nrt_meminfo(): # Call the chain of getters to compute the member value for getter in getters: value = getter(value) try: meminfo = data_model.get_nrt_meminfo(builder, value) except __HOLE__ as e: raise NotImplementedError("%s: %s" % (root_type, str(e))) assert meminfo is not None # since has_nrt_meminfo() mod = builder.module fn = mod.get_or_insert_function(incref_decref_ty, name=funcname) # XXX "nonnull" causes a crash in test_dyn_array: can this # function be called with a NULL pointer? fn.args[0].add_attribute("noalias") fn.args[0].add_attribute("nocapture") builder.call(fn, [meminfo])
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/base.py/BaseContext._call_nrt_incref_decref
518
def Import(modname): try: return sys.modules[modname] except __HOLE__: pass mod = __import__(modname) pathparts = modname.split(".") for part in pathparts[1:]: mod = getattr(mod, part) sys.modules[modname] = mod return mod
KeyError
dataset/ETHPy150Open kdart/pycopia/fepy/pycopia/fepy/remote/IpyServer.py/Import
519
def __call__(self, target, source, env): """ Smart autoscan function. Gets the list of objects for the Program or Lib. Adds objects and builders for the special qt files. """ try: if int(env.subst('$QT_AUTOSCAN')) == 0: return target, source except ValueError: pass try: debug = int(env.subst('$QT_DEBUG')) except __HOLE__: debug = 0 # some shortcuts used in the scanner splitext = SCons.Util.splitext objBuilder = getattr(env, self.objBuilderName) # some regular expressions: # Q_OBJECT detection q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') # cxx and c comment 'eater' #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') # CW: something must be wrong with the regexp. See also bug #998222 # CURRENTLY THERE IS NO TEST CASE FOR THAT # The following is kind of hacky to get builders working properly (FIXME) objBuilderEnv = objBuilder.env objBuilder.env = env mocBuilderEnv = env.Moc.env env.Moc.env = env # make a deep copy for the result; MocH objects will be appended out_sources = source[:] for obj in source: if not obj.has_builder(): # binary obj file provided if debug: print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj) continue cpp = obj.sources[0] if not splitext(str(cpp))[1] in cxx_suffixes: if debug: print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp) # c or fortran source continue #cpp_contents = comment.sub('', cpp.get_text_contents()) cpp_contents = cpp.get_text_contents() h=None for h_ext in header_extensions: # try to find the header file in the corresponding source # directory hname = splitext(cpp.name)[0] + h_ext h = find_file(hname, (cpp.get_dir(),), env.File) if h: if debug: print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp)) #h_contents = comment.sub('', h.get_text_contents()) h_contents = h.get_text_contents() break if not h and debug: print "scons: qt: no header for '%s'." % (str(cpp)) if h and q_object_search.search(h_contents): # h file with the Q_OBJECT macro found -> add moc_cpp moc_cpp = env.Moc(h) moc_o = objBuilder(moc_cpp) out_sources.append(moc_o) #moc_cpp.target_scanner = SCons.Defaults.CScan if debug: print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp)) if cpp and q_object_search.search(cpp_contents): # cpp file with Q_OBJECT macro found -> add moc # (to be included in cpp) moc = env.Moc(cpp) env.Ignore(moc, moc) if debug: print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc)) #moc.source_scanner = SCons.Defaults.CScan # restore the original env attributes (FIXME) objBuilder.env = objBuilderEnv env.Moc.env = mocBuilderEnv return (target, out_sources)
ValueError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/qt.py/_Automoc.__call__
520
def real_download(self, filename, info_dict): def run_rtmpdump(args): start = time.time() resume_percent = None resume_downloaded_data_len = None proc = subprocess.Popen(args, stderr=subprocess.PIPE) cursor_in_new_line = True proc_stderr_closed = False while not proc_stderr_closed: # read line from stderr line = '' while True: char = proc.stderr.read(1) if not char: proc_stderr_closed = True break if char in [b'\r', b'\n']: break line += char.decode('ascii', 'replace') if not line: # proc_stderr_closed is True continue mobj = re.search(r'([0-9]+\.[0-9]{3}) kB / [0-9]+\.[0-9]{2} sec \(([0-9]{1,2}\.[0-9])%\)', line) if mobj: downloaded_data_len = int(float(mobj.group(1)) * 1024) percent = float(mobj.group(2)) if not resume_percent: resume_percent = percent resume_downloaded_data_len = downloaded_data_len eta = self.calc_eta(start, time.time(), 100 - resume_percent, percent - resume_percent) speed = self.calc_speed(start, time.time(), downloaded_data_len - resume_downloaded_data_len) data_len = None if percent > 0: data_len = int(downloaded_data_len * 100 / percent) data_len_str = '~' + format_bytes(data_len) self.report_progress(percent, data_len_str, speed, eta) cursor_in_new_line = False self._hook_progress({ 'downloaded_bytes': downloaded_data_len, 'total_bytes': data_len, 'tmpfilename': tmpfilename, 'filename': filename, 'status': 'downloading', 'eta': eta, 'speed': speed, }) else: # no percent for live streams mobj = re.search(r'([0-9]+\.[0-9]{3}) kB / [0-9]+\.[0-9]{2} sec', line) if mobj: downloaded_data_len = int(float(mobj.group(1)) * 1024) time_now = time.time() speed = self.calc_speed(start, time_now, downloaded_data_len) self.report_progress_live_stream(downloaded_data_len, speed, time_now - start) cursor_in_new_line = False self._hook_progress({ 'downloaded_bytes': downloaded_data_len, 'tmpfilename': tmpfilename, 'filename': filename, 'status': 'downloading', 'speed': speed, }) elif self.params.get('verbose', False): if not cursor_in_new_line: self.to_screen('') cursor_in_new_line = True self.to_screen('[rtmpdump] ' + line) proc.wait() if not cursor_in_new_line: self.to_screen('') return proc.returncode url = info_dict['url'] player_url = info_dict.get('player_url', None) page_url = info_dict.get('page_url', None) app = info_dict.get('app', None) play_path = info_dict.get('play_path', None) tc_url = info_dict.get('tc_url', None) flash_version = info_dict.get('flash_version', None) live = info_dict.get('rtmp_live', False) conn = info_dict.get('rtmp_conn', None) protocol = info_dict.get('rtmp_protocol', None) self.report_destination(filename) tmpfilename = self.temp_name(filename) test = self.params.get('test', False) # Check for rtmpdump first if not check_executable('rtmpdump', ['-h']): self.report_error('RTMP download detected but "rtmpdump" could not be run. Please install it.') return False # Download using rtmpdump. rtmpdump returns exit code 2 when # the connection was interrumpted and resuming appears to be # possible. This is part of rtmpdump's normal usage, AFAIK. basic_args = ['rtmpdump', '--verbose', '-r', url, '-o', tmpfilename] if player_url is not None: basic_args += ['--swfVfy', player_url] if page_url is not None: basic_args += ['--pageUrl', page_url] if app is not None: basic_args += ['--app', app] if play_path is not None: basic_args += ['--playpath', play_path] if tc_url is not None: basic_args += ['--tcUrl', url] if test: basic_args += ['--stop', '1'] if flash_version is not None: basic_args += ['--flashVer', flash_version] if live: basic_args += ['--live'] if isinstance(conn, list): for entry in conn: basic_args += ['--conn', entry] elif isinstance(conn, compat_str): basic_args += ['--conn', conn] if protocol is not None: basic_args += ['--protocol', protocol] args = basic_args + [[], ['--resume', '--skip', '1']][not live and self.params.get('continuedl', False)] if sys.platform == 'win32' and sys.version_info < (3, 0): # Windows subprocess module does not actually support Unicode # on Python 2.x # See http://stackoverflow.com/a/9951851/35070 subprocess_encoding = sys.getfilesystemencoding() args = [a.encode(subprocess_encoding, 'ignore') for a in args] else: subprocess_encoding = None if self.params.get('verbose', False): if subprocess_encoding: str_args = [ a.decode(subprocess_encoding) if isinstance(a, bytes) else a for a in args] else: str_args = args try: import pipes shell_quote = lambda args: ' '.join(map(pipes.quote, str_args)) except __HOLE__: shell_quote = repr self.to_screen('[debug] rtmpdump command line: ' + shell_quote(str_args)) RD_SUCCESS = 0 RD_FAILED = 1 RD_INCOMPLETE = 2 RD_NO_CONNECT = 3 retval = run_rtmpdump(args) if retval == RD_NO_CONNECT: self.report_error('[rtmpdump] Could not connect to RTMP server.') return False while (retval == RD_INCOMPLETE or retval == RD_FAILED) and not test and not live: prevsize = os.path.getsize(encodeFilename(tmpfilename)) self.to_screen('[rtmpdump] %s bytes' % prevsize) time.sleep(5.0) # This seems to be needed retval = run_rtmpdump(basic_args + ['-e'] + [[], ['-k', '1']][retval == RD_FAILED]) cursize = os.path.getsize(encodeFilename(tmpfilename)) if prevsize == cursize and retval == RD_FAILED: break # Some rtmp streams seem abort after ~ 99.8%. Don't complain for those if prevsize == cursize and retval == RD_INCOMPLETE and cursize > 1024: self.to_screen('[rtmpdump] Could not download the whole video. This can happen for some advertisements.') retval = RD_SUCCESS break if retval == RD_SUCCESS or (test and retval == RD_INCOMPLETE): fsize = os.path.getsize(encodeFilename(tmpfilename)) self.to_screen('[rtmpdump] %s bytes' % fsize) self.try_rename(tmpfilename, filename) self._hook_progress({ 'downloaded_bytes': fsize, 'total_bytes': fsize, 'filename': filename, 'status': 'finished', }) return True else: self.to_stderr('\n') self.report_error('rtmpdump exited with code %d' % retval) return False
ImportError
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/downloader/rtmp.py/RtmpFD.real_download
521
def test_nargs_default(runner): try: @click.command() @click.argument('src', nargs=-1, default=42) def copy(src): pass except __HOLE__ as e: assert 'nargs=-1' in str(e) else: assert False
TypeError
dataset/ETHPy150Open pallets/click/tests/test_arguments.py/test_nargs_default
522
def _fetch_content_type_counts(self): """ If an object with an empty _ct_inventory is encountered, compute all the content types currently used on that object and save the list in the object itself. Further requests for that object can then access that information and find out which content types are used without resorting to multiple selects on different ct tables. It is therefore important that even an "empty" object does not have an empty _ct_inventory. """ if 'counts' not in self._cache: if (self.item._ct_inventory and self.item._ct_inventory.get('_version_', -1) == INVENTORY_VERSION): try: self._cache['counts'] = self._from_inventory( self.item._ct_inventory) except __HOLE__: # It's possible that the inventory does not fit together # with the current models anymore, f.e. because a content # type has been removed. pass if 'counts' not in self._cache: super(TrackerContentProxy, self)._fetch_content_type_counts() self.item._ct_inventory = self._to_inventory( self._cache['counts']) if hasattr(self.item, 'invalidate_cache'): self.item.invalidate_cache() self.item.__class__.objects.filter(id=self.item.id).update( _ct_inventory=self.item._ct_inventory) # Run post save handler by hand if hasattr(self.item, 'get_descendants'): self.item.get_descendants(include_self=False).update( _ct_inventory=None) return self._cache['counts']
KeyError
dataset/ETHPy150Open feincms/feincms/feincms/module/extensions/ct_tracker.py/TrackerContentProxy._fetch_content_type_counts
523
def do_transition(self, comment, transition, user): try: if transition in self.get_current_state().origin_transitions.all(): self.log_entries.create( comment=comment, transition=transition, user=user ) except __HOLE__: # No initial state has been set for this workflow pass
AttributeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/document_states/models.py/WorkflowInstance.do_transition
524
def get_current_state(self): try: return self.get_last_transition().destination_state except __HOLE__: return self.workflow.get_initial_state()
AttributeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/document_states/models.py/WorkflowInstance.get_current_state
525
def get_last_log_entry(self): try: return self.log_entries.order_by('datetime').last() except __HOLE__: return None
AttributeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/document_states/models.py/WorkflowInstance.get_last_log_entry
526
def get_last_transition(self): try: return self.get_last_log_entry().transition except __HOLE__: return None
AttributeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/document_states/models.py/WorkflowInstance.get_last_transition
527
def _parse_headers(self, data): data = native_str(data.decode('latin1')) eol = data.find("\r\n") start_line = data[:eol] try: headers = httputil.HTTPHeaders.parse(data[eol:]) except __HOLE__: # probably form split() if there was no ':' in the line raise httputil.HTTPInputError("Malformed HTTP headers: %r" % data[eol:100]) return start_line, headers
ValueError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/http1connection.py/HTTP1Connection._parse_headers
528
@require_POST @csrf_exempt def import_submission_for_form(request, username, id_string): """ Retrieve and process submission from SMSSync Request """ sms_identity = request.POST.get('From', '').strip() sms_text = request.POST.get('Body', '').strip() now_timestamp = datetime.datetime.now().strftime('%s') sent_timestamp = request.POST.get('time_created', now_timestamp).strip() try: sms_time = datetime.datetime.fromtimestamp(float(sent_timestamp)) except __HOLE__: sms_time = datetime.datetime.now() return process_message_for_twilio(username=username, sms_identity=sms_identity, sms_text=sms_text, sms_time=sms_time, id_string=id_string)
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/apps/sms_support/providers/twilio.py/import_submission_for_form
529
def setup_env(): """Configures app engine environment for command-line apps.""" # Try to import the appengine code from the system path. try: from google.appengine.api import apiproxy_stub_map except __HOLE__: for k in [k for k in sys.modules if k.startswith('google')]: del sys.modules[k] # Not on the system path. Build a list of alternative paths where it # may be. First look within the project for a local copy, then look for # where the Mac OS SDK installs it. paths = [os.path.join(PROJECT_DIR, '.google_appengine'), os.environ.get('APP_ENGINE_SDK'), '/usr/local/google_appengine', '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine'] for path in os.environ.get('PATH', '').split(os.pathsep): path = path.rstrip(os.sep) if path.endswith('google_appengine'): paths.append(path) if os.name in ('nt', 'dos'): path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ paths.append(path) # Loop through all possible paths and look for the SDK dir. sdk_path = None for path in paths: if not path: continue path = os.path.expanduser(path) path = os.path.realpath(path) if os.path.exists(path): sdk_path = path break if sdk_path is None: # The SDK could not be found in any known location. sys.stderr.write('The Google App Engine SDK could not be found!\n' "Make sure it's accessible via your PATH " "environment and called google_appengine.\n") sys.exit(1) # Add the SDK and the libraries within it to the system path. extra_paths = [sdk_path] lib = os.path.join(sdk_path, 'lib') # Automatically add all packages in the SDK's lib folder: for name in os.listdir(lib): root = os.path.join(lib, name) subdir = name # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/' detect = (os.path.join(root, subdir), os.path.join(root, 'lib', subdir)) for path in detect: if os.path.isdir(path): extra_paths.append(os.path.dirname(path)) break else: if name == 'webapp2': extra_paths.append(root) sys.path = extra_paths + sys.path from google.appengine.api import apiproxy_stub_map setup_project() from .utils import have_appserver if have_appserver: # App Engine's threading.local is broken setup_threading() elif not os.path.exists(DATA_ROOT): os.mkdir(DATA_ROOT) setup_logging() if not have_appserver: # Patch Django to support loading management commands from zip files from django.core import management management.find_commands = find_commands
ImportError
dataset/ETHPy150Open adieu/djangoappengine/boot.py/setup_env
530
def setup_threading(): if sys.version_info >= (2, 7): return # XXX: On Python 2.5 GAE's threading.local doesn't work correctly with subclassing try: from django.utils._threading_local import local import threading threading.local = local except __HOLE__: pass
ImportError
dataset/ETHPy150Open adieu/djangoappengine/boot.py/setup_threading
531
def setup_project(): from .utils import have_appserver, on_production_server if have_appserver: # This fixes a pwd import bug for os.path.expanduser() env_ext['HOME'] = PROJECT_DIR # The dev_appserver creates a sandbox which restricts access to certain # modules and builtins in order to emulate the production environment. # Here we get the subprocess module back into the dev_appserver sandbox. # This module is just too important for development. # Also we add the compiler/parser module back and enable https connections # (seem to be broken on Windows because the _ssl module is disallowed). if not have_appserver: from google.appengine.tools import dev_appserver try: # Backup os.environ. It gets overwritten by the dev_appserver, # but it's needed by the subprocess module. env = dev_appserver.DEFAULT_ENV dev_appserver.DEFAULT_ENV = os.environ.copy() dev_appserver.DEFAULT_ENV.update(env) # Backup the buffer() builtin. The subprocess in Python 2.5 on # Linux and OS X uses needs it, but the dev_appserver removes it. dev_appserver.buffer = buffer except __HOLE__: logging.warn('Could not patch the default environment. ' 'The subprocess module will not work correctly.') try: # Allow importing compiler/parser, _ssl (for https), # _io for Python 2.7 io support on OS X dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend( ('parser', '_ssl', '_io')) except AttributeError: logging.warn('Could not patch modules whitelist. ' 'The compiler and parser modules will not work and ' 'SSL support is disabled.') elif not on_production_server: try: # Restore the real subprocess module from google.appengine.api.mail_stub import subprocess sys.modules['subprocess'] = subprocess # Re-inject the buffer() builtin into the subprocess module from google.appengine.tools import dev_appserver subprocess.buffer = dev_appserver.buffer except Exception, e: logging.warn('Could not add the subprocess module to the sandbox: %s' % e) os.environ.update(env_ext) extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')] zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages') # We support zipped packages in the common and project folders. if os.path.isdir(zip_packages_dir): for zip_package in os.listdir(zip_packages_dir): extra_paths.append(os.path.join(zip_packages_dir, zip_package)) # App Engine causes main.py to be reloaded if an exception gets raised # on the first request of a main.py instance, so don't call setup_project() # multiple times. We ensure this indirectly by checking if we've already # modified sys.path, already. if len(sys.path) < len(extra_paths) or \ sys.path[:len(extra_paths)] != extra_paths: for path in extra_paths: while path in sys.path: sys.path.remove(path) sys.path = extra_paths + sys.path
AttributeError
dataset/ETHPy150Open adieu/djangoappengine/boot.py/setup_project
532
@property def n_topics(self): try: return self.model.n_topics except __HOLE__: return self.model.n_components
AttributeError
dataset/ETHPy150Open chartbeat-labs/textacy/textacy/tm/topic_model.py/TopicModel.n_topics
533
def _get_aa(self, attr): if attr in self.cache: return self.cache[attr] else: try: kv = self.KVClass.objects.get(**{'key': attr, 'obj': self.obj}) except __HOLE__: raise AttributeError("{0} AuxAttr has no attribute " "{1}".format(self.KVClass, attr)) self.cache[attr] = kv.value return kv.value raise AttributeError()
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/utils.py/AuxAttr._get_aa
534
def __getattribute__(self, attr): try: return super(AuxAttr, self).__getattribute__(attr) except __HOLE__: pass return self._get_aa(attr)
AttributeError
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/utils.py/AuxAttr.__getattribute__
535
def __setattr__(self, attr, value): try: if super(AuxAttr, self).__getattribute__(attr): return super(AuxAttr, self).__setattr__(attr, value) except AttributeError: pass try: kv = self.KVClass.objects.get(**{'key': attr, 'obj': self.obj}) except __HOLE__: kv = self.KVClass(**{'key': attr, 'obj': self.obj}) kv.value = value kv.clean() kv.save() self.cache[attr] = value return
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/utils.py/AuxAttr.__setattr__
536
def __delattr__(self, attr): try: if super(AuxAttr, self).__getattribute__(attr): return super(AuxAttr, self).__delattr__(attr) except __HOLE__: pass if hasattr(self, attr): self.cache.pop(attr) kv = self.KVClass.objects.get(**{'key': attr, 'obj': self.obj}) kv.delete() return else: raise AttributeError("{0} AuxAttr has no attribute " "{1}".format(self.KVClass, attr))
AttributeError
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/utils.py/AuxAttr.__delattr__
537
def tearDown(self): for filename in (self.csv_file, self.json_file, self.schema_file): try: remove(filename) except __HOLE__: continue
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/avro-1.7.6/test/test_script.py/TestWrite.tearDown
538
@staticmethod def read(reader, dump=None): name = reader.constant_pool[reader.read_u2()].bytes.decode('mutf-8') size = reader.read_u4() if dump is not None: reader.debug(" " * dump, '%s (%s bytes)' % (name, size)) try: return globals()[name].read_info(reader, dump + 1 if dump is not None else dump) except __HOLE__: # Unknown attribute - just read the bytes and ignore them. if dump is not None: reader.debug(" " * (dump + 1), 'Reading and ignoring %s bytes' % size) reader.read_bytes(size)
KeyError
dataset/ETHPy150Open pybee/voc/voc/java/attributes.py/Attribute.read
539
@task def evacuate(name_config=None, debug=None, iteration=False): init(name_config, debug) try: iteration = int(iteration) except __HOLE__: LOG.error("Invalid value provided as 'iteration' argument, it must be " "integer") return env.key_filename = cfglib.CONF.migrate.key_filename cloud = cloud_ferry.CloudFerry(cfglib.CONF) LOG.info("running evacuation") evacuation_chain.process_chain(cloud, iteration) freed_nodes = get_freed_nodes(iteration) if not freed_nodes: LOG.warning("Evacuation cannot be completed, because there are no " "available compute nodes, that should be freed") return LOG.info("Following nodes will be freed once in-cloud migration finishes, " "and can be moved from source to destination: %s", freed_nodes)
ValueError
dataset/ETHPy150Open MirantisWorkloadMobility/CloudFerry/cloudferry/fabfile.py/evacuate
540
def render_admin_panel(self, req, cat, page, path_info): # Trap AssertionErrors and convert them to TracErrors try: return self._render_admin_panel(req, cat, page, path_info) except __HOLE__ as e: raise TracError(e)
AssertionError
dataset/ETHPy150Open edgewall/trac/trac/ticket/admin.py/TicketAdminPanel.render_admin_panel
541
def _do_remove(self, number): try: number = int(number) except __HOLE__: raise AdminCommandError(_("<number> must be a number")) with self.env.db_transaction: model.Ticket(self.env, number).delete() printout(_("Ticket #%(num)s and all associated data removed.", num=number))
ValueError
dataset/ETHPy150Open edgewall/trac/trac/ticket/admin.py/TicketAdmin._do_remove
542
def prompt_for_user_token(username, scope=None, client_id = None, client_secret = None, redirect_uri = None): ''' prompts the user to login if necessary and returns the user token suitable for use with the spotipy.Spotify constructor Parameters: - username - the Spotify username - scope - the desired scope of the request - client_id - the client id of your app - client_secret - the client secret of your app - redirect_uri - the redirect URI of your app ''' if not client_id: client_id = os.getenv('SPOTIPY_CLIENT_ID') if not client_secret: client_secret = os.getenv('SPOTIPY_CLIENT_SECRET') if not redirect_uri: redirect_uri = os.getenv('SPOTIPY_REDIRECT_URI') if not client_id: print(''' You need to set your Spotify API credentials. You can do this by setting environment variables like so: export SPOTIPY_CLIENT_ID='your-spotify-client-id' export SPOTIPY_CLIENT_SECRET='your-spotify-client-secret' export SPOTIPY_REDIRECT_URI='your-app-redirect-url' Get your credentials at https://developer.spotify.com/my-applications ''') raise spotipy.SpotifyException(550, -1, 'no credentials set') sp_oauth = oauth2.SpotifyOAuth(client_id, client_secret, redirect_uri, scope=scope, cache_path=".cache-" + username ) # try to get a valid token for this user, from the cache, # if not in the cache, the create a new (this will send # the user to a web page where they can authorize this app) token_info = sp_oauth.get_cached_token() if not token_info: print(''' User authentication requires interaction with your web browser. Once you enter your credentials and give authorization, you will be redirected to a url. Paste that url you were directed to to complete the authorization. ''') auth_url = sp_oauth.get_authorize_url() try: subprocess.call(["open", auth_url]) print("Opening %s in your browser" % auth_url) except: print("Please navigate here: %s" % auth_url) print() print() try: response = raw_input("Enter the URL you were redirected to: ") except __HOLE__: response = input("Enter the URL you were redirected to: ") print() print() code = sp_oauth.parse_response_code(response) token_info = sp_oauth.get_access_token(code) # Auth'ed API request if token_info: return token_info['access_token'] else: return None
NameError
dataset/ETHPy150Open plamere/spotipy/spotipy/util.py/prompt_for_user_token
543
def getattr_gi(self, inst, key): #TODO: this can probably just be removed now? return self.NONE try: if inst.get_data(key) is None: return self.NONE except __HOLE__: return self.NONE type.__setattr__(inst.__class__, key, GIProxy(key, 'data')) return getattr(inst, key)
TypeError
dataset/ETHPy150Open pyjs/pyjs/pyjs/runners/giwebkit.py/GIResolver.getattr_gi
544
def getattr_w3(self, inst, key_w3): key_gi = self._key_gi(key_w3) for base in inst.__class__.__mro__: key = (base, key_w3) if key in self._custom: try: attr = self._custom[key].bind(key) except __HOLE__: attr = self._custom[key] elif hasattr(inst.props, key_gi): attr = GIProxy(key_gi) elif key_gi in base.__dict__: attr = base.__dict__[key_gi] else: continue type.__setattr__(base, key_w3, attr) return getattr(inst, key_w3) return self.NONE
AttributeError
dataset/ETHPy150Open pyjs/pyjs/pyjs/runners/giwebkit.py/GIResolver.getattr_w3
545
def get_process_list(self): process_list = [] for p in psutil.process_iter(): mem = p.memory_info() # psutil throws a KeyError when the uid of a process is not associated with an user. try: username = p.username() except __HOLE__: username = None proc = { 'pid': p.pid, 'name': p.name(), 'cmdline': ' '.join(p.cmdline()), 'user': username, 'status': p.status(), 'created': p.create_time(), 'mem_rss': mem.rss, 'mem_vms': mem.vms, 'mem_percent': p.memory_percent(), 'cpu_percent': p.cpu_percent(0) } process_list.append(proc) return process_list
KeyError
dataset/ETHPy150Open Jahaja/psdash/psdash/node.py/LocalService.get_process_list
546
def get_process(self, pid): p = psutil.Process(pid) mem = p.memory_info_ex() cpu_times = p.cpu_times() # psutil throws a KeyError when the uid of a process is not associated with an user. try: username = p.username() except __HOLE__: username = None return { 'pid': p.pid, 'ppid': p.ppid(), 'parent_name': p.parent().name() if p.parent() else '', 'name': p.name(), 'cmdline': ' '.join(p.cmdline()), 'user': username, 'uid_real': p.uids().real, 'uid_effective': p.uids().effective, 'uid_saved': p.uids().saved, 'gid_real': p.gids().real, 'gid_effective': p.gids().effective, 'gid_saved': p.gids().saved, 'status': p.status(), 'created': p.create_time(), 'terminal': p.terminal(), 'mem_rss': mem.rss, 'mem_vms': mem.vms, 'mem_shared': mem.shared, 'mem_text': mem.text, 'mem_lib': mem.lib, 'mem_data': mem.data, 'mem_dirty': mem.dirty, 'mem_percent': p.memory_percent(), 'cwd': p.cwd(), 'nice': p.nice(), 'io_nice_class': p.ionice()[0], 'io_nice_value': p.ionice()[1], 'cpu_percent': p.cpu_percent(0), 'num_threads': p.num_threads(), 'num_files': len(p.open_files()), 'num_children': len(p.children()), 'num_ctx_switches_invol': p.num_ctx_switches().involuntary, 'num_ctx_switches_vol': p.num_ctx_switches().voluntary, 'cpu_times_user': cpu_times.user, 'cpu_times_system': cpu_times.system, 'cpu_affinity': p.cpu_affinity() }
KeyError
dataset/ETHPy150Open Jahaja/psdash/psdash/node.py/LocalService.get_process
547
def get_logs(self): available_logs = [] for log in self.node.logs.get_available(): try: stat = os.stat(log.filename) available_logs.append({ 'path': log.filename.encode("utf-8"), 'size': stat.st_size, 'atime': stat.st_atime, 'mtime': stat.st_mtime }) except __HOLE__: logger.info('Could not stat "%s", removing from available logs', log.filename) self.node.logs.remove_available(log.filename) return available_logs
OSError
dataset/ETHPy150Open Jahaja/psdash/psdash/node.py/LocalService.get_logs
548
def enable(name, **kwargs): ''' Enable the named service to start at boot CLI Example: .. code-block:: bash salt '*' service.enable <service name> ''' osmajor = _osrel()[0] if osmajor < '6': cmd = 'update-rc.d -f {0} defaults 99'.format(_cmd_quote(name)) else: cmd = 'update-rc.d {0} enable'.format(_cmd_quote(name)) try: if int(osmajor) >= 6: cmd = 'insserv {0} && '.format(_cmd_quote(name)) + cmd except __HOLE__: osrel = _osrel() if osrel == 'testing/unstable' or osrel == 'unstable' or osrel.endswith("/sid"): cmd = 'insserv {0} && '.format(_cmd_quote(name)) + cmd return not __salt__['cmd.retcode'](cmd, python_shell=True)
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/debian_service.py/enable
549
def _convert_entity(m): if m.group(1) == "#": try: return unichr(int(m.group(2))) except __HOLE__: return "&#%s;" % m.group(2) try: return _HTML_UNICODE_MAP[m.group(2)] except KeyError: return "&%s;" % m.group(2)
ValueError
dataset/ETHPy150Open IanLewis/kay/kay/ext/gaema/escape.py/_convert_entity
550
def get(self, key): """Returns the deserialized data for the provided key. """ encoded_key = self.encode_key(key) try: return self.deserialize_data(self.get_binary(encoded_key)) except __HOLE__: if encoded_key == key: raise KeyError(binascii.hexlify(key)) else: raise KeyError(key, binascii.hexlify(encoded_key))
KeyError
dataset/ETHPy150Open blixt/py-starbound/starbound/btreedb4.py/FileBTreeDB4.get
551
@require_can_edit_apps def form_designer(request, domain, app_id, module_id=None, form_id=None): meta = get_meta(request) track_entered_form_builder_on_hubspot.delay(request.couch_user, request.COOKIES, meta) app = get_app(domain, app_id) module = None try: module = app.get_module(module_id) except ModuleNotFoundException: return bail(request, domain, app_id, not_found="module") try: form = module.get_form(form_id) except __HOLE__: return bail(request, domain, app_id, not_found="form") if form.no_vellum: messages.warning(request, _( "You tried to edit this form in the Form Builder. " "However, your administrator has locked this form against editing " "in the form builder, so we have redirected you to " "the form's front page instead." )) return back_to_main(request, domain, app_id=app_id, unique_form_id=form.unique_id) vellum_plugins = ["modeliteration", "itemset", "atwho"] if (toggles.COMMTRACK.enabled(domain)): vellum_plugins.append("commtrack") if toggles.VELLUM_SAVE_TO_CASE.enabled(domain): vellum_plugins.append("saveToCase") if toggles.VELLUM_EXPERIMENTAL_UI.enabled(domain) and module and module.case_type and form.requires_case(): vellum_plugins.append("databrowser") vellum_features = toggles.toggles_dict(username=request.user.username, domain=domain) vellum_features.update(feature_previews.previews_dict(domain)) vellum_features.update({ 'group_in_field_list': app.enable_group_in_field_list, 'image_resize': app.enable_image_resize, 'markdown_in_groups': app.enable_markdown_in_groups, 'lookup_tables': domain_has_privilege(domain, privileges.LOOKUP_TABLES), 'templated_intents': domain_has_privilege(domain, privileges.TEMPLATED_INTENTS), 'custom_intents': domain_has_privilege(domain, privileges.CUSTOM_INTENTS), }) has_schedule = ( getattr(module, 'has_schedule', False) and getattr(form, 'schedule', False) and form.schedule.enabled ) scheduler_data_nodes = [] if has_schedule: scheduler_data_nodes = [ SCHEDULE_CURRENT_VISIT_NUMBER, SCHEDULE_NEXT_DUE, SCHEDULE_UNSCHEDULED_VISIT, SCHEDULE_GLOBAL_NEXT_VISIT_DATE, ] scheduler_data_nodes.extend([ u"next_{}".format(f.schedule_form_id) for f in form.get_phase().get_forms() if getattr(f, 'schedule', False) and f.schedule.enabled ]) context = get_apps_base_context(request, domain, app) context.update(locals()) context.update({ 'vellum_debug': settings.VELLUM_DEBUG, 'nav_form': form, 'formdesigner': True, 'multimedia_object_map': app.get_object_map(), 'sessionid': request.COOKIES.get('sessionid'), 'features': vellum_features, 'plugins': vellum_plugins, 'app_callout_templates': next(app_callout_templates), 'scheduler_data_nodes': scheduler_data_nodes, 'no_header': True, }) return render(request, 'app_manager/form_designer.html', context)
IndexError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/app_manager/views/formdesigner.py/form_designer
552
def process_response(self, request, response): content_type = response.get('content-type') try: if content_type.startswith('text/html'): # only update last_access when rendering the main page request.customer.last_access = timezone.now() request.customer.save(update_fields=['last_access']) except __HOLE__: pass return response
AttributeError
dataset/ETHPy150Open awesto/django-shop/shop/middleware.py/CustomerMiddleware.process_response
553
def _url_for_fetch(self, mapping): try: return mapping['pre_processed_url'] except __HOLE__: return mapping['raw_url']
KeyError
dataset/ETHPy150Open openelections/openelections-core/openelex/us/tx/datasource.py/Datasource._url_for_fetch
554
def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff): """Implementation of the shortest augmenting path algorithm. """ if s not in G: raise nx.NetworkXError('node %s not in graph' % str(s)) if t not in G: raise nx.NetworkXError('node %s not in graph' % str(t)) if s == t: raise nx.NetworkXError('source and sink are the same node') if residual is None: R = build_residual_network(G, capacity) else: R = residual R_node = R.node R_pred = R.pred R_succ = R.succ # Initialize/reset the residual network. for u in R: for e in R_succ[u].values(): e['flow'] = 0 # Initialize heights of the nodes. heights = {t: 0} q = deque([(t, 0)]) while q: u, height = q.popleft() height += 1 for v, attr in R_pred[u].items(): if v not in heights and attr['flow'] < attr['capacity']: heights[v] = height q.append((v, height)) if s not in heights: # t is not reachable from s in the residual network. The maximum flow # must be zero. R.graph['flow_value'] = 0 return R n = len(G) m = R.size() / 2 # Initialize heights and 'current edge' data structures of the nodes. for u in R: R_node[u]['height'] = heights[u] if u in heights else n R_node[u]['curr_edge'] = CurrentEdge(R_succ[u]) # Initialize counts of nodes in each level. counts = [0] * (2 * n - 1) for u in R: counts[R_node[u]['height']] += 1 inf = R.graph['inf'] def augment(path): """Augment flow along a path from s to t. """ # Determine the path residual capacity. flow = inf it = iter(path) u = next(it) for v in it: attr = R_succ[u][v] flow = min(flow, attr['capacity'] - attr['flow']) u = v if flow * 2 > inf: raise nx.NetworkXUnbounded( 'Infinite capacity path, flow unbounded above.') # Augment flow along the path. it = iter(path) u = next(it) for v in it: R_succ[u][v]['flow'] += flow R_succ[v][u]['flow'] -= flow u = v return flow def relabel(u): """Relabel a node to create an admissible edge. """ height = n - 1 for v, attr in R_succ[u].items(): if attr['flow'] < attr['capacity']: height = min(height, R_node[v]['height']) return height + 1 if cutoff is None: cutoff = float('inf') # Phase 1: Look for shortest augmenting paths using depth-first search. flow_value = 0 path = [s] u = s d = n if not two_phase else int(min(m ** 0.5, 2 * n ** (2. / 3))) done = R_node[s]['height'] >= d while not done: height = R_node[u]['height'] curr_edge = R_node[u]['curr_edge'] # Depth-first search for the next node on the path to t. while True: v, attr = curr_edge.get() if (height == R_node[v]['height'] + 1 and attr['flow'] < attr['capacity']): # Advance to the next node following an admissible edge. path.append(v) u = v break try: curr_edge.move_to_next() except __HOLE__: counts[height] -= 1 if counts[height] == 0: # Gap heuristic: If relabeling causes a level to become # empty, a minimum cut has been identified. The algorithm # can now be terminated. R.graph['flow_value'] = flow_value return R height = relabel(u) if u == s and height >= d: if not two_phase: # t is disconnected from s in the residual network. No # more augmenting paths exist. R.graph['flow_value'] = flow_value return R else: # t is at least d steps away from s. End of phase 1. done = True break counts[height] += 1 R_node[u]['height'] = height if u != s: # After relabeling, the last edge on the path is no longer # admissible. Retreat one step to look for an alternative. path.pop() u = path[-1] break if u == t: # t is reached. Augment flow along the path and reset it for a new # depth-first search. flow_value += augment(path) if flow_value >= cutoff: R.graph['flow_value'] = flow_value return R path = [s] u = s # Phase 2: Look for shortest augmenting paths using breadth-first search. flow_value += edmonds_karp_core(R, s, t, cutoff - flow_value) R.graph['flow_value'] = flow_value return R
StopIteration
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/flow/shortestaugmentingpath.py/shortest_augmenting_path_impl
555
def main(args): git_uri = validate_args(args) client = _get_solum_client() plan_file = get_planfile(git_uri, args.app_name, args.command, args.public) print('\n') print("************************* Starting setup *************************") print('\n') plan_uri = create_plan(client, plan_file) add_ssh_keys(args) try: os.remove(plan_file) except __HOLE__: print('Cannot remove %s. Skip and move forward...' % plan_file) trigger_uri = create_assembly(client, args.app_name, plan_uri) create_webhook(_filter_trigger_url(trigger_uri)) print('Successfully created Solum plan, assembly and webhooks!')
OSError
dataset/ETHPy150Open openstack/python-solumclient/contrib/setup-tools/solum-app-setup.py/main
556
@classmethod def setUpClass(cls): # Upload the app to the Platform. cls.base_input = makeInputs() bundled_resources = dxpy.app_builder.upload_resources(src_dir) try: app_name = os.path.basename(os.path.abspath(src_dir)) + "_test" except __HOLE__: app_name = "test_app" applet_basename = app_name + "_" + str(int(time.time())) cls.applet_id, _ignored_applet_spec = dxpy.app_builder.upload_applet(src_dir, bundled_resources, override_name=applet_basename)
OSError
dataset/ETHPy150Open dnanexus/dx-toolkit/src/python/dxpy/templating/templates/python/basic/test/test.py/TestDX_APP_WIZARD_NAME.setUpClass
557
def getvideosize(url, verbose=False): try: if url.startswith('http:') or url.startswith('https:'): ffprobe_command = ['ffprobe', '-icy', '0', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_format', '-show_streams', '-timeout', '60000000', '-user-agent', BILIGRAB_UA, url] else: ffprobe_command = ['ffprobe', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_streams', url] logcommand(ffprobe_command) ffprobe_process = subprocess.Popen(ffprobe_command, stdout=subprocess.PIPE) try: ffprobe_output = json.loads(ffprobe_process.communicate()[0].decode('utf-8', 'replace')) except __HOLE__: logging.warning('Cancelling getting video size, press Ctrl-C again to terminate.') ffprobe_process.terminate() return 0, 0 width, height, widthxheight, duration, total_bitrate = 0, 0, 0, 0, 0 try: if dict.get(ffprobe_output, 'format')['duration'] > duration: duration = dict.get(ffprobe_output, 'format')['duration'] except Exception: pass for stream in dict.get(ffprobe_output, 'streams', []): try: if duration == 0 and (dict.get(stream, 'duration') > duration): duration = dict.get(stream, 'duration') if dict.get(stream, 'width')*dict.get(stream, 'height') > widthxheight: width, height = dict.get(stream, 'width'), dict.get(stream, 'height') if dict.get(stream, 'bit_rate') > total_bitrate: total_bitrate += int(dict.get(stream, 'bit_rate')) except Exception: pass if duration == 0: duration = int(get_url_size(url) * 8 / total_bitrate) return [[int(width), int(height)], int(float(duration))+1] except Exception as e: logorraise(e) return [[0, 0], 0] #----------------------------------------------------------------------
KeyboardInterrupt
dataset/ETHPy150Open cnbeining/Biligrab/biligrab.py/getvideosize
558
def checkForTracebacks(self): try: with open(self.appServer.errorLog) as f: data = f.read() if 'Traceback (most recent call last)' in data: print >> sys.stderr, "Contents of error.log after test:" print >> sys.stderr, data sys.stderr.flush() except __HOLE__, err: if err.args[0] != errno.ENOENT: raise
IOError
dataset/ETHPy150Open sassoftware/conary/conary_test/lib/repserver.py/ConaryServer.checkForTracebacks
559
def parse_params(option, opt, value, parser): try: args_dict = json.loads(value) except __HOLE__: print "argument error, %s should be valid JSON" % value setattr(parser.values, option.dest, args_dict)
ValueError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqadmin/management/commands/make_supervisor_conf.py/parse_params
560
def core_build_id(): '''Unique id in URN form, distinguishing each unique build. If building from a repository checkout, the resulting string will have the format "urn:rcsid:IDENTIFIER". If building from source which is not under revision control, the build id will have the format "urn:utcts:YYYYMMDDHHMMSS". (UTC timestamp) In the special case of a Debian package, there is a suffix of the following format: ":debian:" package-revision Examples of Debianized build ids: urn:rcsid:1d348b83a8a8297267c25a1dc86fa170c6d141df:debian:3 urn:utcts:20081030020621:debian:3 Dirty repositories generate a urn:rcsid with a trailing "+" followed by a base 16 encoded timestamp. A "unique build" is defined as the sum of all source code bytes combined. A single modification anywhere causes a new unique instance to be "born". This value can be overridden by exporting the SMISK_BUILD_ID environment variable, but keep in mind to use a URN av value. :return: URN :rtype: string ''' global _core_build_id if _core_build_id is None: if 'SMISK_BUILD_ID' in os.environ: _core_build_id = os.environ['SMISK_BUILD_ID'] else: # Maybe under revision control try: _core_build_id = shell_cmd(['git rev-parse master']) except __HOLE__: pass if _core_build_id: dirty_extra = '' if _core_build_id[-1] == '+': dirty_extra = '%x' % int(time.time()) _core_build_id = 'urn:rcsid:%s%s' % (_core_build_id, dirty_extra) if not _core_build_id: # Not under revision control _core_build_id = time.strftime('urn:utcts:%Y%m%d%H%M%S', time.gmtime()) if 'SMISK_BUILD_ID_SUFFIX' in os.environ: _core_build_id += os.environ['SMISK_BUILD_ID_SUFFIX'] return _core_build_id
IOError
dataset/ETHPy150Open rsms/smisk/setup.py/core_build_id
561
def run(self): try: import sphinx if not os.path.exists(self.out_dir): if self.dry_run: self.announce('skipping creation of directory %s (dry run)' % self.out_dir) else: self.announce('creating directory %s' % self.out_dir) os.makedirs(self.out_dir) if self.dry_run: self.announce('skipping %s (dry run)' % ' '.join(self.sphinx_args)) else: self.announce('running %s' % ' '.join(self.sphinx_args)) sphinx.main(self.sphinx_args) except __HOLE__, e: log.info('Sphinx not installed -- skipping documentation. (%s)', e)
ImportError
dataset/ETHPy150Open rsms/smisk/setup.py/sphinx_build.run
562
def __init__(self, attrs=None): Distribution.__init__(self, attrs) self.cmdclass = { 'build': build, 'build_ext': build_ext, 'sdist': sdist, 'config': config, 'docs': sphinx_build, 'clean': clean, } try: shell_cmd('which dpkg-buildpackage') self.cmdclass['debian'] = debian except __HOLE__: # Not a debian system or dpkg-buildpackage not installed pass # -----------------------------------------
IOError
dataset/ETHPy150Open rsms/smisk/setup.py/SmiskDistribution.__init__
563
def cleanup(self, releases_path, reserve_version): changes = 0 if os.path.lexists(releases_path): releases = [ f for f in os.listdir(releases_path) if os.path.isdir(os.path.join(releases_path,f)) ] try: releases.remove(reserve_version) except __HOLE__: pass if not self.module.check_mode: releases.sort( key=lambda x: os.path.getctime(os.path.join(releases_path,x)), reverse=True) for release in releases[self.keep_releases:]: changes += self.delete_path(os.path.join(releases_path, release)) elif len(releases) > self.keep_releases: changes += (len(releases) - self.keep_releases) return changes
ValueError
dataset/ETHPy150Open roots/trellis/lib/trellis/modules/deploy_helper.py/DeployHelper.cleanup
564
def _update(self, contracts): updated = set() retry = set() try: with self.updating_counter(len(contracts)): start = time.clock() hashes = [c.hash for c in contracts] url = '{0}/challenge/{1}'.format(self.api_url, self.token) data = { 'hashes': hashes } headers = { 'Content-Type': 'application/json' } try: resp = self.session.post(url, data=json.dumps(data), headers=headers, verify=self.requests_verify_arg) except: raise DownstreamError('Unable to perform HTTP post.') try: r_json = handle_json_response(resp) except: raise DownstreamError('Challenge update failed.') if 'challenges' not in r_json: raise DownstreamError('Malformed response from server.') # challenges is in r_json challenges = r_json['challenges'] for challenge in challenges: if ('file_hash' not in challenge): raise DownstreamError( 'Malformed response from server.') try: contract = self.contracts[challenge['file_hash']] except __HOLE__: self.logger.warn('Unexpected challenge update.') continue if ('error' in challenge or 'status' in challenge): if 'error' in challenge: message = challenge['error'] else: message = challenge['status'] self.logger.error('Couldn\'t update contract {0}: {1}' .format(contract, message)) continue for k in ['challenge', 'due', 'answered']: if (k not in challenge): self.logger.error( 'Malformed challenge for contract {0}' .format(contract)) continue if (challenge['answered']): # contract didn't get updated # we must have tried too early # put it in the retry set retry.add(contract) continue contract.challenge = self.heartbeat.challenge_type().\ fromdict(challenge['challenge']) contract.expiration = datetime.utcnow()\ + timedelta(seconds=int(challenge['due'])) contract.answered = challenge['answered'] updated.add(contract) # go ahead and begin proving now to save time self._prove_async(contract) stop = time.clock() self.logger.info('Updated {0} contracts in {1} seconds' .format(len(updated), round(stop - start, 3))) except: # some issue updating... retry self.logger.error('Update error: {0}'.format(sys.exc_info()[1])) # we had an issue updating these contracts # we shall retry until they expire... then they'll # get dropped retry = set(contracts) - updated earliest = None reupdate_time = datetime.utcnow() \ + timedelta(seconds=self.retry_interval) ready_time = datetime.utcnow() for c in contracts: if (c in retry and c.expiration + c.estimated_interval > reupdate_time): self.update_queue.put(c, reupdate_time, ready_time) continue if (c in updated): if (earliest is None or c.expiration < earliest): earliest = c.expiration continue # otherwise, remove the contract self.logger.error('Contract {0} not updated, dropping' .format(c)) self._remove_contract(c) if (earliest is not None): self.logger.debug('Earliest updated contract due in {0} ' 'seconds' .format((earliest - datetime.utcnow()) .total_seconds())) if (len(retry) > 0): self.heartbeat_thread.wake()
KeyError
dataset/ETHPy150Open StorjOld/downstream-farmer/downstream_farmer/client.py/DownstreamClient._update
565
@classmethod def _check_geo_field(cls, opts, lookup): """ Utility for checking the given lookup with the given model options. The lookup is a string either specifying the geographic field, e.g. 'point, 'the_geom', or a related lookup on a geographic field like 'address__point'. If a GeometryField exists according to the given lookup on the model options, it will be returned. Otherwise returns None. """ # This takes into account the situation where the lookup is a # lookup to a related geographic field, e.g., 'address__point'. field_list = lookup.split(LOOKUP_SEP) # Reversing so list operates like a queue of related lookups, # and popping the top lookup. field_list.reverse() fld_name = field_list.pop() try: geo_fld = opts.get_field(fld_name) # If the field list is still around, then it means that the # lookup was for a geometry field across a relationship -- # thus we keep on getting the related model options and the # model field associated with the next field in the list # until there's no more left. while len(field_list): opts = geo_fld.rel.to._meta geo_fld = opts.get_field(field_list.pop()) except (FieldDoesNotExist, __HOLE__): return False # Finally, make sure we got a Geographic field and return. if isinstance(geo_fld, GeometryField): return geo_fld else: return False
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/gis/db/models/sql/where.py/GeoWhereNode._check_geo_field
566
def __init__ ( self, parent, **traits ): """ Initializes the editor object. """ HasPrivateTraits.__init__( self, **traits ) try: self.old_value = getattr( self.object, self.name ) except __HOLE__: ctrait = self.object.base_trait(self.name) if ctrait.type == 'event' or self.name == 'spring': # Getting the attribute will fail for 'Event' traits: self.old_value = Undefined else : raise # Synchronize the application invalid state status with the editor's: self.sync_value( self.factory.invalid, 'invalid', 'from' ) #--------------------------------------------------------------------------- # Finishes editor set-up: #---------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open enthought/traitsui/traitsui/editor.py/Editor.__init__
567
def run(self): """This is executed once the application GUI has started. *Make sure all other MayaVi specific imports are made here!* """ # Various imports to do different things. from mayavi.sources.vtk_file_reader import VTKFileReader from mayavi.modules.outline import Outline from mayavi.modules.axes import Axes from mayavi.modules.grid_plane import GridPlane from mayavi.modules.image_plane_widget import ImagePlaneWidget from mayavi.modules.text import Text script = self.script # Create a new scene. script.new_scene() # Read a VTK (old style) data file. r = VTKFileReader() r.initialize(join(get_data_dir(dirname(abspath(__file__))), 'heart.vtk')) script.add_source(r) # Put up some text. t = Text(text='MayaVi rules!', x_position=0.2, y_position=0.9, width=0.8) t.property.color = 1, 1, 0 # Bright yellow, yeah! script.add_module(t) # Create an outline for the data. o = Outline() script.add_module(o) # Create an axes for the data. a = Axes() script.add_module(a) # Create an orientation axes for the scene. This only works with # VTK-4.5 and above which is why we have the try block. try: from mayavi.modules.orientation_axes import OrientationAxes except __HOLE__: pass else: a = OrientationAxes() a.marker.set_viewport(0.0, 0.8, 0.2, 1.0) script.add_module(a) # Create three simple grid plane modules. # First normal to 'x' axis. gp = GridPlane() script.add_module(gp) # Second normal to 'y' axis. gp = GridPlane() gp.grid_plane.axis = 'y' script.add_module(gp) # Third normal to 'z' axis. gp = GridPlane() script.add_module(gp) gp.grid_plane.axis = 'z' # Create one ImagePlaneWidget. ipw = ImagePlaneWidget() script.add_module(ipw) # Set the position to the middle of the data. ipw.ipw.slice_position = 16
ImportError
dataset/ETHPy150Open enthought/mayavi/docs/source/mayavi/auto/subclassing_mayavi_application.py/MyApp.run
568
def __getattr__(self, name): try: return self.ground(self.attrs[name]) except __HOLE__: raise AttributeError(name)
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/conch/insults/text.py/CharacterAttributes._ColorAttribute.__getattr__
569
def _init_flow_exceptions(): """Internal helper to initialize _flow_exceptions. This automatically adds webob.exc.HTTPException, if it can be imported. """ global _flow_exceptions _flow_exceptions = () add_flow_exception(datastore_errors.Rollback) try: from webob import exc except __HOLE__: pass else: add_flow_exception(exc.HTTPException)
ImportError
dataset/ETHPy150Open GoogleCloudPlatform/datastore-ndb-python/ndb/tasklets.py/_init_flow_exceptions
570
def _help_tasklet_along(self, ns, ds_conn, gen, val=None, exc=None, tb=None): # XXX Docstring info = utils.gen_info(gen) # pylint: disable=invalid-name __ndb_debug__ = info try: save_context = get_context() save_namespace = namespace_manager.get_namespace() save_ds_connection = datastore._GetConnection() try: set_context(self._context) if ns != save_namespace: namespace_manager.set_namespace(ns) if ds_conn is not save_ds_connection: datastore._SetConnection(ds_conn) if exc is not None: _logging_debug('Throwing %s(%s) into %s', exc.__class__.__name__, exc, info) value = gen.throw(exc.__class__, exc, tb) else: _logging_debug('Sending %r to %s', val, info) value = gen.send(val) self._context = get_context() finally: ns = namespace_manager.get_namespace() ds_conn = datastore._GetConnection() set_context(save_context) if save_namespace != ns: namespace_manager.set_namespace(save_namespace) if save_ds_connection is not ds_conn: datastore._SetConnection(save_ds_connection) except __HOLE__, err: result = get_return_value(err) _logging_debug('%s returned %r', info, result) self.set_result(result) return except GeneratorExit: # In Python 2.5, this derives from Exception, but we don't want # to handle it like other Exception instances. So we catch and # re-raise it immediately. See issue 127. http://goo.gl/2p5Pn # TODO: Remove when Python 2.5 is no longer supported. raise except Exception, err: _, _, tb = sys.exc_info() if isinstance(err, _flow_exceptions): # Flow exceptions aren't logged except in "heavy debug" mode, # and then only at DEBUG level, without a traceback. _logging_debug('%s raised %s(%s)', info, err.__class__.__name__, err) elif utils.DEBUG and logging.getLogger().level < logging.DEBUG: # In "heavy debug" mode, log a warning with traceback. # (This is the same condition as used in utils.logging_debug().) logging.warning('%s raised %s(%s)', info, err.__class__.__name__, err, exc_info=True) else: # Otherwise, log a warning without a traceback. logging.warning('%s raised %s(%s)', info, err.__class__.__name__, err) self.set_exception(err, tb) return else: _logging_debug('%s yielded %r', info, value) if isinstance(value, (apiproxy_stub_map.UserRPC, datastore_rpc.MultiRpc)): # TODO: Tail recursion if the RPC is already complete. eventloop.queue_rpc(value, self._on_rpc_completion, value, ns, ds_conn, gen) return if isinstance(value, Future): # TODO: Tail recursion if the Future is already done. if self._next: raise RuntimeError('Future has already completed yet next is %r' % self._next) self._next = value self._geninfo = utils.gen_info(gen) _logging_debug('%s is now blocked waiting for %s', self, value) value.add_callback(self._on_future_completion, value, ns, ds_conn, gen) return if isinstance(value, (tuple, list)): # Arrange for yield to return a list of results (not Futures). info = 'multi-yield from %s' % utils.gen_info(gen) mfut = MultiFuture(info) try: for subfuture in value: mfut.add_dependent(subfuture) mfut.complete() except GeneratorExit: raise except Exception, err: _, _, tb = sys.exc_info() mfut.set_exception(err, tb) mfut.add_callback(self._on_future_completion, mfut, ns, ds_conn, gen) return if _is_generator(value): # TODO: emulate PEP 380 here? raise NotImplementedError('Cannot defer to another generator.') raise RuntimeError('A tasklet should not yield a plain value: ' '%.200s yielded %.200r' % (info, value))
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/datastore-ndb-python/ndb/tasklets.py/Future._help_tasklet_along
571
def tasklet(func): # XXX Docstring @utils.wrapping(func) def tasklet_wrapper(*args, **kwds): # XXX Docstring # TODO: make most of this a public function so you can take a bare # generator and turn it into a tasklet dynamically. (Monocle has # this I believe.) # pylint: disable=invalid-name __ndb_debug__ = utils.func_info(func) fut = Future('tasklet %s' % utils.func_info(func)) fut._context = get_context() try: result = func(*args, **kwds) except __HOLE__, err: # Just in case the function is not a generator but still uses # the "raise Return(...)" idiom, we'll extract the return value. result = get_return_value(err) if _is_generator(result): ns = namespace_manager.get_namespace() ds_conn = datastore._GetConnection() _state.add_generator(result) eventloop.queue_call(None, fut._help_tasklet_along, ns, ds_conn, result) else: fut.set_result(result) return fut return tasklet_wrapper
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/datastore-ndb-python/ndb/tasklets.py/tasklet
572
def _make_cloud_datastore_context(app_id, external_app_ids=()): """Creates a new context to connect to a remote Cloud Datastore instance. This should only be used outside of Google App Engine. Args: app_id: The application id to connect to. This differs from the project id as it may have an additional prefix, e.g. "s~" or "e~". external_app_ids: A list of apps that may be referenced by data in your application. For example, if you are connected to s~my-app and store keys for s~my-other-app, you should include s~my-other-app in the external_apps list. Returns: An ndb.Context that can connect to a Remote Cloud Datastore. You can use this context by passing it to ndb.set_context. """ from . import model # Late import to deal with circular imports. # Late import since it might not exist. if not datastore_pbs._CLOUD_DATASTORE_ENABLED: raise datastore_errors.BadArgumentError( datastore_pbs.MISSING_CLOUD_DATASTORE_MESSAGE) import googledatastore try: from google.appengine.datastore import cloud_datastore_v1_remote_stub except __HOLE__: from google3.apphosting.datastore import cloud_datastore_v1_remote_stub current_app_id = os.environ.get('APPLICATION_ID', None) if current_app_id and current_app_id != app_id: # TODO(pcostello): We should support this so users can connect to different # applications. raise ValueError('Cannot create a Cloud Datastore context that connects ' 'to an application (%s) that differs from the application ' 'already connected to (%s).' % (app_id, current_app_id)) os.environ['APPLICATION_ID'] = app_id id_resolver = datastore_pbs.IdResolver((app_id,) + tuple(external_app_ids)) project_id = id_resolver.resolve_project_id(app_id) endpoint = googledatastore.helper.get_project_endpoint_from_env(project_id) datastore = googledatastore.Datastore( project_endpoint=endpoint, credentials=googledatastore.helper.get_credentials_from_env()) conn = model.make_connection(_api_version=datastore_rpc._CLOUD_DATASTORE_V1, _id_resolver=id_resolver) # If necessary, install the stubs try: stub = cloud_datastore_v1_remote_stub.CloudDatastoreV1RemoteStub(datastore) apiproxy_stub_map.apiproxy.RegisterStub(datastore_rpc._CLOUD_DATASTORE_V1, stub) except: pass # The stub is already installed. # TODO(pcostello): Ensure the current stub is connected to the right project. # Install a memcache and taskqueue stub which throws on everything. try: apiproxy_stub_map.apiproxy.RegisterStub('memcache', _ThrowingStub()) except: pass # The stub is already installed. try: apiproxy_stub_map.apiproxy.RegisterStub('taskqueue', _ThrowingStub()) except: pass # The stub is already installed. return make_context(conn=conn)
ImportError
dataset/ETHPy150Open GoogleCloudPlatform/datastore-ndb-python/ndb/tasklets.py/_make_cloud_datastore_context
573
def get_file_obj(fname, mode='r', encoding=None): """ Light wrapper to handle strings and let files (anything else) pass through. It also handle '.gz' files. Parameters ========== fname: string or file-like object File to open / forward mode: string Argument passed to the 'open' or 'gzip.open' function encoding: string For Python 3 only, specify the encoding of the file Returns ======= A file-like object that is always a context-manager. If the `fname` was already a file-like object, the returned context manager *will not close the file*. """ if _is_string_like(fname): return _open(fname, mode, encoding) try: # Make sure the object has the write methods if 'r' in mode: fname.read if 'w' in mode or 'a' in mode: fname.write except __HOLE__: raise ValueError('fname must be a string or a file-like object') return EmptyContextManager(fname)
AttributeError
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/iolib/openfile.py/get_file_obj
574
def main(): """\ Usage: sports-naarad [-h] [-F] [-C] live_score|news|[player_stats name] [-my_fav_team] Get latest updates for football and cricket Options: -h, --help shows help(this) message -F, --football [uefa, barclay, fifa] Get football updates. The tournament for which you want to get updates. One of the argumets from uefa, barclay and fifa is compulsory. -C, --cricket Get cricket updates for international matches. [live-score, news, ,fixtures player-stats[name]] Fields to get. `live-scores` to get live socre of on-going matches, `news` to get latest news headlines, `player-stats` to get statistics of player specified. `fixtures` to get updates on upcoming matches. Compulsory single argument. For football option you can give additional options topscorer. Use `-` instead of space in names. --proxy To specify proxy. Defaults to system proxy. Take name of a file. Sample -proxy http://username:password@host:port/ --unset-proxy Removes the proxy set by using --proxy option. """ useage = textwrap.dedent(main.__doc__) args = sys.argv if args[1] == '-h': print(useage) sys.exit(0) try: if '--proxy' in args: with open(sys.path[0] + '/proxy.config', 'w') as f: f.write(args[args.index('-proxy') + 1]) if '--unset-proxy' in args: import os os.reomve(sys.path[0] + '/proxy.config') if args[1] == '-F' or args[1] == '--football' or args[1] == '-f': if '-c' in args or '-C' in args or '--cricket' in args: raise ValueError( 'Both Football and cricket cannot be specified') if args[2] == 'uefa': pass elif args[2].lower() == 'barclay': from sports_naarad.barclay import Barclay if args[3].lower() == 'fixtures': fixture = Barclay().Fixtures(type_return='dict') header = ['Clubs', 'Time(UTC)', 'Location'] print(tabulate(fixture, headers=header, tablefmt='fancy_grid', floatfmt=".2f")) elif args[3].lower() == 'live-score': print('\n'.join(Barclay().liveScore(type_return='dict'))) elif args[3].lower() == 'news': news = Barclay().get_news_headlines(type_return='dict') print() for headline in news: print('Headline: ' + headline + '\n' + 'link: ' + news[headline] + '\n') elif args[3].lower() == 'player-stats': stats = Barclay().playerStats(args[4], type_return='dict') print() for stat in stats: print(stat + ': ' + stats[stat]) elif args[3].lower() == 'topscorer': scorers = Barclay().topScorers(type_return='dict') top_scorers = [] for names in scorers: top_scorers.append((names, scorers[names])) top_scorers = sorted(top_scorers, key=lambda x: int(x[1])) top_scorers.reverse() print(tabulate(top_scorers, headers=[ 'Player Name', 'Goal Scored'], tablefmt='fancy_grid')) else: raise ValueError( 'Not a Valid argument!\n Use -h option for help.') elif args[2] == 'fifa': pass else: raise ValueError( 'Not a Valid argument!\n Use -h option for help') if args[1] == '-C' or args[1] == '-c' or args[1] == '--cricket': from sports_naarad.cricketAPI import Cricket if '-f' in args or '--football' in args: raise ValueError( 'Both Cricket and Football cannot be specifed together!') if args[2].lower() == 'live-score': print(Cricket().live_score(type_return='dict')) elif args[2].lower() == 'fixtures': tournaments = Cricket().list_matches(type_return='dict') header = ['Teams', 'Time and Date', 'Venue', 'Result'] for tournament in tournaments: print("Tournament: {}".format(tournament)) print( tabulate(tournaments[tournament], headers=header, tablefmt='fancy_grid')) print('\n') elif args[2].lower() == 'news': news = Cricket().news(type_return='dict') print() for headline in news: print('Headline: ' + headline + '\n' + 'link: ' + news[headline] + '\n') elif args[2].lower() == 'player-stats': try: stats = Cricket().get_player_stats( args[3], type_return='dict') print('\n') for stat in stats: print(stat + ': ' + stats[stat]) except: raise ValueError('Not a Valid Name!') else: raise ValueError( 'Not a Valid Argument! Use -h or --help for help ') except __HOLE__: print("Arguments not according to format. Please see help! Use -h option for help")
IndexError
dataset/ETHPy150Open PyRag/sports-naarad/sports_naarad/naarad.py/main
575
def unpack(value): """Return a three tuple of data, code, and headers""" if not isinstance(value, tuple): return value, 200, {} try: data, code, headers = value return data, code, headers except ValueError: pass try: data, code = value return data, code, {} except __HOLE__: pass return value, 200, {}
ValueError
dataset/ETHPy150Open flask-restful/flask-restful/flask_restful/utils/__init__.py/unpack
576
def generate_edgelist(G, delimiter=' ', data=True): """Generate a single line of the graph G in edge list format. Parameters ---------- G : NetworkX graph delimiter : string, optional Separator for node labels data : bool or list of keys If False generate no edge data. If True use a dictionary representation of edge data. If a list of keys use a list of data values corresponding to the keys. Returns ------- lines : string Lines of data in adjlist format. Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> G[1][2]['weight'] = 3 >>> G[3][4]['capacity'] = 12 >>> for line in nx.generate_edgelist(G, data=False): ... print(line) 0 1 0 2 0 3 1 2 1 3 2 3 3 4 4 5 5 6 >>> for line in nx.generate_edgelist(G): ... print(line) 0 1 {} 0 2 {} 0 3 {} 1 2 {'weight': 3} 1 3 {} 2 3 {} 3 4 {'capacity': 12} 4 5 {} 5 6 {} >>> for line in nx.generate_edgelist(G,data=['weight']): ... print(line) 0 1 0 2 0 3 1 2 3 1 3 2 3 3 4 4 5 5 6 See Also -------- write_adjlist, read_adjlist """ if data is True or data is False: for e in G.edges(data=data): yield delimiter.join(map(make_str,e)) else: for u,v,d in G.edges(data=True): e=[u,v] try: e.extend(d[k] for k in data) except __HOLE__: pass # missing data for this edge, should warn? yield delimiter.join(map(make_str,e))
KeyError
dataset/ETHPy150Open networkx/networkx/networkx/readwrite/edgelist.py/generate_edgelist
577
def _test_list_bulk(self, typecallable, creator=None): if creator is None: creator = self.entity_maker class Foo(object): pass canary = Canary() instrumentation.register_class(Foo) attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, typecallable=typecallable, useobject=True) obj = Foo() direct = obj.attr e1 = creator() obj.attr.append(e1) like_me = typecallable() e2 = creator() like_me.append(e2) self.assert_(obj.attr is direct) obj.attr = like_me self.assert_(obj.attr is not direct) self.assert_(obj.attr is not like_me) self.assert_(set(obj.attr) == set([e2])) self.assert_(e1 in canary.removed) self.assert_(e2 in canary.added) e3 = creator() real_list = [e3] obj.attr = real_list self.assert_(obj.attr is not real_list) self.assert_(set(obj.attr) == set([e3])) self.assert_(e2 in canary.removed) self.assert_(e3 in canary.added) e4 = creator() try: obj.attr = set([e4]) self.assert_(False) except __HOLE__: self.assert_(e4 not in canary.data) self.assert_(e3 in canary.data) e5 = creator() e6 = creator() e7 = creator() obj.attr = [e5, e6, e7] self.assert_(e5 in canary.added) self.assert_(e6 in canary.added) self.assert_(e7 in canary.added) obj.attr = [e6, e7] self.assert_(e5 in canary.removed) self.assert_(e6 in canary.added) self.assert_(e7 in canary.added) self.assert_(e6 not in canary.removed) self.assert_(e7 not in canary.removed)
TypeError
dataset/ETHPy150Open zzzeek/sqlalchemy/test/orm/test_collection.py/CollectionsTest._test_list_bulk
578
def _test_set(self, typecallable, creator=None): if creator is None: creator = self.entity_maker class Foo(object): pass canary = Canary() instrumentation.register_class(Foo) attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, typecallable=typecallable, useobject=True) obj = Foo() adapter = collections.collection_adapter(obj.attr) direct = obj.attr control = set() def assert_eq(): eq_(set(direct), canary.data) eq_(set(adapter), canary.data) eq_(direct, control) def addall(*values): for item in values: direct.add(item) control.add(item) assert_eq() def zap(): for item in list(direct): direct.remove(item) control.clear() addall(creator()) e = creator() addall(e) addall(e) if hasattr(direct, 'remove'): e = creator() addall(e) direct.remove(e) control.remove(e) assert_eq() e = creator() try: direct.remove(e) except KeyError: assert_eq() self.assert_(e not in canary.removed) else: self.assert_(False) if hasattr(direct, 'discard'): e = creator() addall(e) direct.discard(e) control.discard(e) assert_eq() e = creator() direct.discard(e) self.assert_(e not in canary.removed) assert_eq() if hasattr(direct, 'update'): zap() e = creator() addall(e) values = set([e, creator(), creator()]) direct.update(values) control.update(values) assert_eq() if hasattr(direct, '__ior__'): zap() e = creator() addall(e) values = set([e, creator(), creator()]) direct |= values control |= values assert_eq() # cover self-assignment short-circuit values = set([e, creator(), creator()]) obj.attr |= values control |= values assert_eq() values = frozenset([e, creator()]) obj.attr |= values control |= values assert_eq() try: direct |= [e, creator()] assert False except TypeError: assert True addall(creator(), creator()) direct.clear() control.clear() assert_eq() # note: the clear test previously needs # to have executed in order for this to # pass in all cases; else there's the possibility # of non-deterministic behavior. addall(creator()) direct.pop() control.pop() assert_eq() if hasattr(direct, 'difference_update'): zap() e = creator() addall(creator(), creator()) values = set([creator()]) direct.difference_update(values) control.difference_update(values) assert_eq() values.update(set([e, creator()])) direct.difference_update(values) control.difference_update(values) assert_eq() if hasattr(direct, '__isub__'): zap() e = creator() addall(creator(), creator()) values = set([creator()]) direct -= values control -= values assert_eq() values.update(set([e, creator()])) direct -= values control -= values assert_eq() values = set([creator()]) obj.attr -= values control -= values assert_eq() values = frozenset([creator()]) obj.attr -= values control -= values assert_eq() try: direct -= [e, creator()] assert False except TypeError: assert True if hasattr(direct, 'intersection_update'): zap() e = creator() addall(e, creator(), creator()) values = set(control) direct.intersection_update(values) control.intersection_update(values) assert_eq() values.update(set([e, creator()])) direct.intersection_update(values) control.intersection_update(values) assert_eq() if hasattr(direct, '__iand__'): zap() e = creator() addall(e, creator(), creator()) values = set(control) direct &= values control &= values assert_eq() values.update(set([e, creator()])) direct &= values control &= values assert_eq() values.update(set([creator()])) obj.attr &= values control &= values assert_eq() try: direct &= [e, creator()] assert False except TypeError: assert True if hasattr(direct, 'symmetric_difference_update'): zap() e = creator() addall(e, creator(), creator()) values = set([e, creator()]) direct.symmetric_difference_update(values) control.symmetric_difference_update(values) assert_eq() e = creator() addall(e) values = set([e]) direct.symmetric_difference_update(values) control.symmetric_difference_update(values) assert_eq() values = set() direct.symmetric_difference_update(values) control.symmetric_difference_update(values) assert_eq() if hasattr(direct, '__ixor__'): zap() e = creator() addall(e, creator(), creator()) values = set([e, creator()]) direct ^= values control ^= values assert_eq() e = creator() addall(e) values = set([e]) direct ^= values control ^= values assert_eq() values = set() direct ^= values control ^= values assert_eq() values = set([creator()]) obj.attr ^= values control ^= values assert_eq() try: direct ^= [e, creator()] assert False except __HOLE__: assert True
TypeError
dataset/ETHPy150Open zzzeek/sqlalchemy/test/orm/test_collection.py/CollectionsTest._test_set
579
def _test_set_bulk(self, typecallable, creator=None): if creator is None: creator = self.entity_maker class Foo(object): pass canary = Canary() instrumentation.register_class(Foo) attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, typecallable=typecallable, useobject=True) obj = Foo() direct = obj.attr e1 = creator() obj.attr.add(e1) like_me = typecallable() e2 = creator() like_me.add(e2) self.assert_(obj.attr is direct) obj.attr = like_me self.assert_(obj.attr is not direct) self.assert_(obj.attr is not like_me) self.assert_(obj.attr == set([e2])) self.assert_(e1 in canary.removed) self.assert_(e2 in canary.added) e3 = creator() real_set = set([e3]) obj.attr = real_set self.assert_(obj.attr is not real_set) self.assert_(obj.attr == set([e3])) self.assert_(e2 in canary.removed) self.assert_(e3 in canary.added) e4 = creator() try: obj.attr = [e4] self.assert_(False) except __HOLE__: self.assert_(e4 not in canary.data) self.assert_(e3 in canary.data)
TypeError
dataset/ETHPy150Open zzzeek/sqlalchemy/test/orm/test_collection.py/CollectionsTest._test_set_bulk
580
def _test_dict(self, typecallable, creator=None): if creator is None: creator = self.dictable_entity class Foo(object): pass canary = Canary() instrumentation.register_class(Foo) attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, typecallable=typecallable, useobject=True) obj = Foo() adapter = collections.collection_adapter(obj.attr) direct = obj.attr control = dict() def assert_eq(): self.assert_(set(direct.values()) == canary.data) self.assert_(set(adapter) == canary.data) self.assert_(direct == control) def addall(*values): for item in values: direct.set(item) control[item.a] = item assert_eq() def zap(): for item in list(adapter): direct.remove(item) control.clear() # assume an 'set' method is available for tests addall(creator()) if hasattr(direct, '__setitem__'): e = creator() direct[e.a] = e control[e.a] = e assert_eq() e = creator(e.a, e.b) direct[e.a] = e control[e.a] = e assert_eq() if hasattr(direct, '__delitem__'): e = creator() addall(e) del direct[e.a] del control[e.a] assert_eq() e = creator() try: del direct[e.a] except KeyError: self.assert_(e not in canary.removed) if hasattr(direct, 'clear'): addall(creator(), creator(), creator()) direct.clear() control.clear() assert_eq() direct.clear() control.clear() assert_eq() if hasattr(direct, 'pop'): e = creator() addall(e) direct.pop(e.a) control.pop(e.a) assert_eq() e = creator() try: direct.pop(e.a) except __HOLE__: self.assert_(e not in canary.removed) if hasattr(direct, 'popitem'): zap() e = creator() addall(e) direct.popitem() control.popitem() assert_eq() if hasattr(direct, 'setdefault'): e = creator() val_a = direct.setdefault(e.a, e) val_b = control.setdefault(e.a, e) assert_eq() self.assert_(val_a is val_b) val_a = direct.setdefault(e.a, e) val_b = control.setdefault(e.a, e) assert_eq() self.assert_(val_a is val_b) if hasattr(direct, 'update'): e = creator() d = dict([(ee.a, ee) for ee in [e, creator(), creator()]]) addall(e, creator()) direct.update(d) control.update(d) assert_eq() kw = dict([(ee.a, ee) for ee in [e, creator()]]) direct.update(**kw) control.update(**kw) assert_eq()
KeyError
dataset/ETHPy150Open zzzeek/sqlalchemy/test/orm/test_collection.py/CollectionsTest._test_dict
581
def _test_dict_bulk(self, typecallable, creator=None): if creator is None: creator = self.dictable_entity class Foo(object): pass canary = Canary() instrumentation.register_class(Foo) attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, typecallable=typecallable, useobject=True) obj = Foo() direct = obj.attr e1 = creator() collections.collection_adapter(direct).append_with_event(e1) like_me = typecallable() e2 = creator() like_me.set(e2) self.assert_(obj.attr is direct) obj.attr = like_me self.assert_(obj.attr is not direct) self.assert_(obj.attr is not like_me) self.assert_( set(collections.collection_adapter(obj.attr)) == set([e2])) self.assert_(e1 in canary.removed) self.assert_(e2 in canary.added) # key validity on bulk assignment is a basic feature of # MappedCollection but is not present in basic, @converter-less # dict collections. e3 = creator() if isinstance(obj.attr, collections.MappedCollection): real_dict = dict(badkey=e3) try: obj.attr = real_dict self.assert_(False) except __HOLE__: pass self.assert_(obj.attr is not real_dict) self.assert_('badkey' not in obj.attr) eq_(set(collections.collection_adapter(obj.attr)), set([e2])) self.assert_(e3 not in canary.added) else: real_dict = dict(keyignored1=e3) obj.attr = real_dict self.assert_(obj.attr is not real_dict) self.assert_('keyignored1' not in obj.attr) eq_(set(collections.collection_adapter(obj.attr)), set([e3])) self.assert_(e2 in canary.removed) self.assert_(e3 in canary.added) obj.attr = typecallable() eq_(list(collections.collection_adapter(obj.attr)), []) e4 = creator() try: obj.attr = [e4] self.assert_(False) except TypeError: self.assert_(e4 not in canary.data)
TypeError
dataset/ETHPy150Open zzzeek/sqlalchemy/test/orm/test_collection.py/CollectionsTest._test_dict_bulk
582
def get_parser(self, directory, base_python_name): """ Get a parser for the given directory, or create one if necessary. This way parsers can be cached and reused. # @@: settings are inherited from the first caller """ try: return self.parsers_by_directory[(directory, base_python_name)] except __HOLE__: parser = self.__class__( {}, directory, base_python_name, index_names=self.index_names, hide_extensions=self.hide_extensions, ignore_extensions=self.ignore_extensions, constructors=self.constructors) self.parsers_by_directory[(directory, base_python_name)] = parser return parser
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/urlparser.py/URLParser.get_parser
583
def load_module_from_name(environ, filename, module_name, errors): if module_name in sys.modules: return sys.modules[module_name] init_filename = os.path.join(os.path.dirname(filename), '__init__.py') if not os.path.exists(init_filename): try: f = open(init_filename, 'w') except (__HOLE__, IOError) as e: errors.write( 'Cannot write __init__.py file into directory %s (%s)\n' % (os.path.dirname(filename), e)) return None f.write('#\n') f.close() fp = None if module_name in sys.modules: return sys.modules[module_name] if '.' in module_name: parent_name = '.'.join(module_name.split('.')[:-1]) base_name = module_name.split('.')[-1] parent = load_module_from_name(environ, os.path.dirname(filename), parent_name, errors) else: base_name = module_name fp = None try: fp, pathname, stuff = imp.find_module( base_name, [os.path.dirname(filename)]) module = imp.load_module(module_name, fp, pathname, stuff) finally: if fp is not None: fp.close() return module
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/urlparser.py/load_module_from_name
584
def __call__(self, environ, start_response): path_info = environ.get('PATH_INFO', '') if not path_info: return self.add_slash(environ, start_response) if path_info == '/': # @@: This should obviously be configurable filename = 'index.html' else: filename = request.path_info_pop(environ) resource = os.path.normcase(os.path.normpath( self.resource_name + '/' + filename)) if self.root_resource is not None and not resource.startswith(self.root_resource): # Out of bounds return self.not_found(environ, start_response) if not self.egg.has_resource(resource): return self.not_found(environ, start_response) if self.egg.resource_isdir(resource): # @@: Cache? child_root = self.root_resource is not None and self.root_resource or \ self.resource_name return self.__class__(self.egg, resource, self.manager, root_resource=child_root)(environ, start_response) if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/': return self.error_extra_path(environ, start_response) type, encoding = mimetypes.guess_type(resource) if not type: type = 'application/octet-stream' # @@: I don't know what to do with the encoding. try: file = self.egg.get_resource_stream(self.manager, resource) except (__HOLE__, OSError) as e: exc = httpexceptions.HTTPForbidden( 'You are not permitted to view this file (%s)' % e) return exc.wsgi_application(environ, start_response) start_response('200 OK', [('content-type', type)]) return fileapp._FileIter(file)
IOError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/urlparser.py/PkgResourcesParser.__call__
585
def _get_hosts(self): """ Return list of hostnames sorted by load. """ # Get host load information. try: proc = ShellProc(self._QHOST, stdout=PIPE) except Exception as exc: self._logger.error('%r failed: %s' % (self._QHOST, exc)) return [] lines = proc.stdout.readlines() # Reduce to hosts we're interested in and sort by CPU-adjusted load. loads = [] for line in lines: if line.startswith(('HOSTNAME', '-')): continue hostname, arch, ncpu, load, \ memtot, memuse, swapto, swapus = line.split() if self.pattern: if not fnmatch.fnmatchcase(hostname, self.pattern): continue try: load = float(load) ncpu = int(ncpu) except __HOLE__: continue loads.append((hostname, load / ncpu, ncpu)) loads = sorted(loads, key=lambda item: item[1]) # Return list of hostnames. hosts = [] for hostname, load, ncpu in loads: for i in range(ncpu): hosts.append(hostname) return hosts
ValueError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/grid_engine.py/GridEngineAllocator._get_hosts
586
@rbac('owner') def execute_command(self, resource_desc): """ Submit command based on `resource_desc`. resource_desc: dict Description of command and required resources. The '-V' `qsub` option is always used to export the current environment to the job. This environment is first updated with any 'job_environment' data. The '-sync yes' `qsub` option is used to wait for job completion. Other job resource keys are processed as follows: ========================= ========================= Resource Key Translation ========================= ========================= ``submit_as_hold`` -h ------------------------- ------------------------- ``rerunnable`` -r yes|no ------------------------- ------------------------- ``working_directory`` Ignored ------------------------- ------------------------- ``job_category`` Sets parallel environment ------------------------- ------------------------- ``min_cpus`` Sets parallel environment ------------------------- ------------------------- ``max_cpus`` Sets parallel environment ------------------------- ------------------------- ``min_phys_memory`` Ignored ------------------------- ------------------------- ``email`` -M `value` ------------------------- ------------------------- ``email_on_started`` -m b ------------------------- ------------------------- ``email_on_terminated`` -m e ------------------------- ------------------------- ``job_name`` -N `value` ------------------------- ------------------------- ``input_path`` -i `value` ------------------------- ------------------------- ``output_path`` -o `value` ------------------------- ------------------------- ``error_path`` -e `value` ------------------------- ------------------------- ``join_files`` -j yes|no ------------------------- ------------------------- ``reservation_id`` -ar `value` ------------------------- ------------------------- ``queue_name`` -q `value` ------------------------- ------------------------- ``priority`` -p `value` ------------------------- ------------------------- ``start_time`` -a `value` ------------------------- ------------------------- ``deadline_time`` Ignored ------------------------- ------------------------- ``accounting_id`` -A `value` ========================= ========================= Where `value` is the corresponding resource value. The 'working_directory' key is ignored since the server has been started in this directory. ``-cwd`` is used in the `qsub` command. If 'input_path' is not specified, add ``-i /dev/null``. If 'output_path' is not specified, add ``-o <remote_command>.stdout``. If 'error_path' is not specified, add ``-j yes``. If 'native_specification' is specified, it is added to the `qsub` command just before 'remote_command' and 'args'. If specified, 'job_category' is used to index into the category map set up during allocator configuration. The mapped category name as well as the 'min_cpus' and 'max_cpus' values are used with the ``-pe`` qsub option. Some resource limits are also handled: ==================== ========================= Resource Key Translation ==================== ========================= ``core_file_size`` Ignored -------------------- ------------------------- ``data_seg_size`` Ignored -------------------- ------------------------- ``file_size`` Ignored -------------------- ------------------------- ``open_files`` Ignored -------------------- ------------------------- ``stack_size`` Ignored -------------------- ------------------------- ``virtual_memory`` Ignored -------------------- ------------------------- ``cpu_time`` -l h_cpu= `value` -------------------- ------------------------- ``wallclock_time`` -l h_rt= `value` ==================== ========================= Output from `qsub` itself is routed to ``qsub.out``. """ self.home_dir = os.path.expanduser('~') self.work_dir = os.getcwd() # Server started in working directory. cmd = list(self._QSUB) cmd.extend(('-V', '-sync', 'yes', '-b', 'yes', '-cwd')) env = None inp, out, err = None, None, None # Process description in fixed, repeatable order. keys = ('submit_as_hold', 'rerunnable', 'job_environment', 'email', 'email_on_started', 'email_on_terminated', 'job_name', 'input_path', 'output_path', 'error_path', 'join_files', 'reservation_id', 'queue_name', 'priority', 'start_time', 'accounting_id') email_events = '' for key in keys: try: value = resource_desc[key] except KeyError: continue if key == 'submit_as_hold': if value: cmd.append('-h') elif key == 'rerunnable': cmd.extend(('-r', 'yes' if value else 'no')) elif key == 'job_environment': env = value elif key == 'email': cmd.extend(('-M', ','.join(value))) elif key == 'email_on_started': email_events += 'b' elif key == 'email_on_terminated': email_events += 'e' elif key == 'job_name': if value: cmd.extend(('-N', self._jobname(value))) elif key == 'input_path': cmd.extend(('-i', self._fix_path(value))) inp = value elif key == 'output_path': cmd.extend(('-o', self._fix_path(value))) out = value elif key == 'error_path': cmd.extend(('-e', self._fix_path(value))) err = value elif key == 'join_files': cmd.extend(('-j', 'yes' if value else 'no')) if value: err = 'yes' elif key == 'reservation_id': cmd.extend(('-ar', value)) elif key == 'queue_name': cmd.extend(('-q', value)) elif key == 'priority': cmd.extend(('-p', str(value))) elif key == 'start_time': cmd.extend(('-a', value.strftime('%Y%m%d%H%M.%S'))) elif key == 'accounting_id': cmd.extend(('-A', value)) if email_events: cmd.extend(('-m', email_events)) # Setup parallel environment. if 'job_category' in resource_desc: job_category = resource_desc['job_category'] try: parallel_environment = self.category_map[job_category] except __HOLE__: msg = 'No mapping for job_category %r' % job_category self._logger.error(msg) raise ValueError(msg) min_cpus = resource_desc.get('min_cpus', 1) max_cpus = resource_desc.get('max_cpus', min_cpus) cmd.extend(('-pe', parallel_environment, '%d-%d' % (min_cpus, max_cpus))) # Set resource limits. if 'resource_limits' in resource_desc: limits = resource_desc['resource_limits'] if 'cpu_time' in limits: cpu_time = limits['cpu_time'] cmd.extend(('-l', 'h_cpu=%s' % self._timelimit(cpu_time))) if 'wallclock_time' in limits: wall_time = limits['wallclock_time'] cmd.extend(('-l', 'h_rt=%s' % self._timelimit(wall_time))) # Set default command configuration. if inp is None: cmd.extend(('-i', DEV_NULL)) if out is None: base = os.path.basename(resource_desc['remote_command']) cmd.extend(('-o', '%s.stdout' % base)) if err is None: cmd.extend(('-j', 'yes')) # Add 'escape' clause. if 'native_specification' in resource_desc: cmd.extend(resource_desc['native_specification']) cmd.append(self._fix_path(resource_desc['remote_command'])) if 'args' in resource_desc: for arg in resource_desc['args']: cmd.append(self._fix_path(arg)) self._logger.info('%r', ' '.join(cmd)) try: process = ShellProc(cmd, DEV_NULL, 'qsub.out', STDOUT, env) except Exception as exc: self._logger.error('exception creating process: %s', exc) raise self._logger.debug(' PID = %d', process.pid) return_code, error_msg = process.wait(1) self._logger.debug(' returning %s', (return_code, error_msg)) return (return_code, error_msg)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/grid_engine.py/GridEngineServer.execute_command
587
def main(args): print "INFO\t" + now() + "\tstarting " + sys.argv[0] + " called with args: " + ' '.join(sys.argv) + "\n" bedfile = open(args.varFileName, 'r') reffile = pysam.Fastafile(args.refFasta) if not os.path.exists(args.bamFileName + '.bai'): sys.stderr.write("ERROR\t" + now() + "\tinput bam must be indexed, not .bai file found for " + args.bamFileName + " \n") sys.exit(1) alignopts = {} if args.alignopts is not None: alignopts = dict([o.split(':') for o in args.alignopts.split(',')]) aligners.checkoptions(args.aligner, alignopts, args.picardjar) # load readlist to avoid, if specified avoid = None if args.avoidreads is not None: avoid = dictlist(args.avoidreads) # make a temporary file to hold mutated reads outbam_mutsfile = "addindel." + str(uuid4()) + ".muts.bam" bamfile = pysam.Samfile(args.bamFileName, 'rb') outbam_muts = pysam.Samfile(outbam_mutsfile, 'wb', template=bamfile) outbam_muts.close() bamfile.close() tmpbams = [] if not os.path.exists(args.tmpdir): os.mkdir(args.tmpdir) print "INFO\t" + now() + "\tcreated tmp directory: " + args.tmpdir if not os.path.exists('addindel_logs_' + os.path.basename(args.outBamFile)): os.mkdir('addindel_logs_' + os.path.basename(args.outBamFile)) print "created directory: addindel_logs_" + os.path.basename(args.outBamFile) assert os.path.exists('addindel_logs_' + os.path.basename(args.outBamFile)), "could not create output directory!" assert os.path.exists(args.tmpdir), "could not create temporary directory!" pool = Pool(processes=int(args.procs)) results = [] ntried = 0 for bedline in bedfile: if ntried < int(args.numsnvs) or int(args.numsnvs) == 0: c = bedline.strip().split() chrom = c[0] start = int(c[1]) end = int(c[2]) vaf = float(c[3]) type = c[4] ins = None assert type in ('INS', 'DEL') if type == 'INS': ins = c[5] # make mutation (submit job to thread pool) result = pool.apply_async(makemut, [args, chrom, start, end, vaf, ins, avoid, alignopts]) results.append(result) ntried += 1 for result in results: try: tmpbamlist = result.get() if tmpbamlist is not None: for tmpbam in tmpbamlist: if os.path.exists(tmpbam): tmpbams.append(tmpbam) except __HOLE__: print "****************************************************" print "* WARNING: assertion failed somewhere, check logs. *" print "****************************************************" if len(tmpbams) == 0: print "INFO\t" + now() + "\tno succesful mutations" sys.exit() tmpbams.sort() # merge tmp bams if len(tmpbams) == 1: os.rename(tmpbams[0],outbam_mutsfile) elif len(tmpbams) > 1: mergebams(tmpbams,outbam_mutsfile,maxopen=int(args.maxopen)) bedfile.close() # cleanup for bam in tmpbams: if os.path.exists(bam): os.remove(bam) if os.path.exists(bam + '.bai'): os.remove(bam + '.bai') if args.skipmerge: print "INFO\t" + now() + "\tskipping merge, plase merge reads from", outbam_mutsfile, "manually." else: if args.tagreads: from bamsurgeon.markreads import markreads tmp_tag_bam = 'tag.%s.bam' % str(uuid4()) markreads(outbam_mutsfile, tmp_tag_bam) move(tmp_tag_bam, outbam_mutsfile) print "INFO\t" + now() + "\ttagged reads." print "INFO\t" + now() + "\tdone making mutations, merging mutations into", args.bamFileName, "-->", args.outBamFile replace(args.bamFileName, outbam_mutsfile, args.outBamFile, seed=args.seed) #cleanup os.remove(outbam_mutsfile)
AssertionError
dataset/ETHPy150Open adamewing/bamsurgeon/bin/addindel.py/main
588
def Log(self, format_str, *args): """Logs the message using the flow's standard logging. Args: format_str: Format string *args: arguments to the format string Raises: RuntimeError: on parent missing logs_collection """ format_str = utils.SmartUnicode(format_str) try: # The status message is always in unicode status = format_str % args except __HOLE__: logging.error("Tried to log a format string with the wrong number " "of arguments: %s", format_str) status = format_str logging.info("%s: %s", self.session_id, status) self.SetStatus(utils.SmartUnicode(status)) logs_collection = self.OpenLogsCollection(self.args.logs_collection_urn) logs_collection.Add( rdf_flows.FlowLog(client_id=None, urn=self.session_id, flow_name=self.flow_obj.__class__.__name__, log_message=status)) logs_collection.Flush()
TypeError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/HuntRunner.Log
589
def _CreateAuditEvent(self, event_action): try: flow_name = self.flow_obj.args.flow_runner_args.flow_name except __HOLE__: flow_name = "" event = flow.AuditEvent(user=self.flow_obj.token.username, action=event_action, urn=self.flow_obj.urn, flow_name=flow_name, description=self.args.description) flow.Events.PublishEvent("Audit", event, token=self.flow_obj.token)
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/HuntRunner._CreateAuditEvent
590
def _AddURNToCollection(self, urn, collection_urn): # TODO(user): Change to use StaticAdd once all active hunts are # migrated. try: aff4.FACTORY.Open(collection_urn, "UrnCollection", mode="rw", token=self.token).Add(urn) except __HOLE__: aff4_collections.PackedVersionedCollection.AddToCollection( collection_urn, [urn], sync=False, token=self.token)
IOError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt._AddURNToCollection
591
def _AddHuntErrorToCollection(self, error, collection_urn): # TODO(user) Change to use StaticAdd once all active hunts are # migrated. try: aff4.FACTORY.Open(collection_urn, "HuntErrorCollection", mode="rw", token=self.token).Add(error) except __HOLE__: aff4_collections.PackedVersionedCollection.AddToCollection( collection_urn, [error], sync=False, token=self.token)
IOError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt._AddHuntErrorToCollection
592
@classmethod def StartHunt(cls, args=None, runner_args=None, **kwargs): """This class method creates new hunts.""" # Build the runner args from the keywords. if runner_args is None: runner_args = HuntRunnerArgs() cls.FilterArgsFromSemanticProtobuf(runner_args, kwargs) # Is the required flow a known flow? if (runner_args.hunt_name not in cls.classes or not aff4.issubclass(cls.classes[runner_args.hunt_name], GRRHunt)): raise RuntimeError("Unable to locate hunt %s" % runner_args.hunt_name) # Make a new hunt object and initialize its runner. hunt_obj = aff4.FACTORY.Create(None, runner_args.hunt_name, mode="w", token=runner_args.token) # Hunt is called using keyword args. We construct an args proto from the # kwargs.. if hunt_obj.args_type and args is None: args = hunt_obj.args_type() cls.FilterArgsFromSemanticProtobuf(args, kwargs) if hunt_obj.args_type and not isinstance(args, hunt_obj.args_type): raise RuntimeError("Hunt args must be instance of %s" % hunt_obj.args_type) if kwargs: raise type_info.UnknownArg("Unknown parameters to StartHunt: %s" % kwargs) # Store the hunt args in the state. hunt_obj.state.Register("args", args) # Hunts are always created in the paused state. The runner method Start # should be called to start them. hunt_obj.Set(hunt_obj.Schema.STATE("PAUSED")) runner = hunt_obj.CreateRunner(runner_args=runner_args) # Allow the hunt to do its own initialization. runner.RunStateMethod("Start") hunt_obj.Flush() try: flow_name = args.flow_runner_args.flow_name except __HOLE__: flow_name = "" event = flow.AuditEvent(user=runner_args.token.username, action="HUNT_CREATED", urn=hunt_obj.urn, flow_name=flow_name, description=runner_args.description) flow.Events.PublishEvent("Audit", event, token=runner_args.token) return hunt_obj
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt.StartHunt
593
def AddResultsToCollection(self, responses, client_id): if responses.success: with self.lock: self.processed_responses = True msgs = [rdf_flows.GrrMessage(payload=response, source=client_id) for response in responses] try: with aff4.FACTORY.Open(self.state.context.results_collection_urn, hunts_results.HuntResultCollection.__name__, mode="rw", token=self.token) as collection: for msg in msgs: collection.Add(msg) except __HOLE__: aff4.ResultsOutputCollection.AddToCollection( self.state.context.results_collection_urn, msgs, sync=True, token=self.token) if responses: self.RegisterClientWithResults(client_id) # Update stats. stats.STATS.IncrementCounter("hunt_results_added", delta=len(msgs)) else: self.LogClientError(client_id, log_message=utils.SmartStr( responses.status))
IOError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt.AddResultsToCollection
594
def SetDescription(self, description=None): if description: self.state.context.args.description = description else: try: flow_name = self.state.args.flow_runner_args.flow_name except __HOLE__: flow_name = "" self.state.context.args.description = flow_name
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt.SetDescription
595
@flow.StateHandler() def Start(self): """Initializes this hunt from arguments.""" self.state.context.Register("results_metadata_urn", self.urn.Add("ResultsMetadata")) self.state.context.Register("results_collection_urn", self.urn.Add("Results")) self.state.context.Register("output_plugins_base_urn", self.urn.Add("Results")) with aff4.FACTORY.Create( self.state.context.results_metadata_urn, "HuntResultsMetadata", mode="rw", token=self.token) as results_metadata: state = rdf_flows.FlowState() try: plugins_descriptors = self.state.args.output_plugins except __HOLE__: plugins_descriptors = [] for index, plugin_descriptor in enumerate(plugins_descriptors): output_base_urn = self.state.context.output_plugins_base_urn.Add( plugin_descriptor.plugin_name) plugin_class = plugin_descriptor.GetPluginClass() plugin_obj = plugin_class(self.state.context.results_collection_urn, output_base_urn=output_base_urn, args=plugin_descriptor.plugin_args, token=self.token) state.Register("%s_%d" % (plugin_descriptor.plugin_name, index), (plugin_descriptor, plugin_obj.state)) results_metadata.Set(results_metadata.Schema.OUTPUT_PLUGINS(state)) # Create the collection for results. with aff4.FACTORY.Create(self.state.context.results_collection_urn, "HuntResultCollection", mode="w", token=self.token): pass # Create the collection for logs. with aff4.FACTORY.Create(self.logs_collection_urn, flow_runner.FlowLogCollection.__name__, mode="w", token=self.token): pass # Create the collections for urns. for urn in [self.all_clients_collection_urn, self.completed_clients_collection_urn, self.clients_with_results_collection_urn]: with aff4.FACTORY.Create(urn, "UrnCollection", mode="w", token=self.token): pass # Create the collection for errors. with aff4.FACTORY.Create(self.clients_errors_collection_urn, "HuntErrorCollection", mode="w", token=self.token): pass # Create the collections for PluginStatus messages. for urn in [self.output_plugins_status_collection_urn, self.output_plugins_errors_collection_urn]: with aff4.FACTORY.Create(urn, "PluginStatusCollection", mode="w", token=self.token): pass if not self.state.context.args.description: self.SetDescription()
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/hunts/implementation.py/GRRHunt.Start
596
def __getitem__(self, name): 'get connection to the named server object' try: return dict.__getitem__(self, name) except __HOLE__: # Get information about the requested object. methodDict = self.server.objectInfo(name) import types if isinstance(methodDict, types.StringType): raise KeyError(methodDict) # RETURNED VALUE IS ERROR MESSAGE! v = XMLRPCClientObject(self, name, methodDict) self[name] = v # SAVE THIS OBJECT INTO OUR DICTIONARY return v
KeyError
dataset/ETHPy150Open cjlee112/pygr/pygr/coordinator.py/XMLRPCClient.__getitem__
597
def __call__(self, url, name): try: s = self[url] # REUSE EXISTING CONNECTION TO THE SERVER except __HOLE__: s = XMLRPCClient(url) # GET NEW CONNECTION TO THE SERVER self[url] = s # CACHE THIS CONNECTION return s[name] # GET THE REQUESTED OBJECT PROXY FROM THE SERVER
KeyError
dataset/ETHPy150Open cjlee112/pygr/pygr/coordinator.py/ConnectionDict.__call__
598
def safe_dispatch(self, name, args): """restrict calls to selected methods, and trap all exceptions to keep server alive!""" import datetime if name in self.xmlrpc_methods: # Make sure this method is explicitly allowed. try: # TRAP ALL ERRORS TO PREVENT OUR SERVER FROM DYING print >>sys.stderr, 'XMLRPC:', name, args, \ datetime.datetime.now().isoformat(' ') # LOG THE REQUEST if self.xmlrpc_methods[name]: # use this as an alias for method m = getattr(self, self.xmlrpc_methods[name]) else: # use method name as usual m = getattr(self, name) # GET THE BOUND METHOD val = m(*args) # CALL THE METHOD sys.stderr.flush() # FLUSH ANY OUTPUT TO OUR LOG return val # HAND BACK ITS RETURN VALUE except __HOLE__: raise # WE REALLY DO WANT TO EXIT. except: # METHOD RAISED AN EXCEPTION, SO PRINT TRACEBACK TO STDERR traceback.print_exc(self.max_tb, sys.stderr) else: print >>sys.stderr, "safe_dispatch: blocked unregistered method %s" \ % name return False # THIS RETURN VALUE IS CONFORMABLE BY XMLRPC...
SystemExit
dataset/ETHPy150Open cjlee112/pygr/pygr/coordinator.py/safe_dispatch
599
def __init__(self, s, separator=None, eq_separator=None): list.__init__(self) if separator is None: separator = self._separator if eq_separator is None: eq_separator = self._eq_separator args = s.split(separator) i = 0 for arg in args: try: # PROCESS attr=val ARGUMENT FORMAT k, v = arg.split(eq_separator) try: # SEE IF WE HAVE A TYPE FOR THIS ATTRIBUTE v = self._attrtype[k](v) except (AttributeError, KeyError): pass # IF NO CONVERSION, JUST USE THE ORIGINAL STRING setattr(self, k, v) # SAVE VALUE AS ATTRIBUTE except ValueError: # JUST A SIMPLE ARGUMENT, SO SAVE AS ARG LIST try: # SEE IF WE HAVE A TYPE FOR THIS LIST ITEM arg = self._attrtype[i](arg) except (AttributeError, __HOLE__): pass # IF NO CONVERSION, JUST USE THE ORIGINAL STRING self.append(arg) i += 1 # ADVANCE OUR ARGUMENT COUNT
KeyError
dataset/ETHPy150Open cjlee112/pygr/pygr/coordinator.py/ObjectFromString.__init__