Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
900
def loads(self, request, data): # Short circuit if we've been given an empty set of data if not data: return # Determine what version of the serializer the data was serialized # with try: ver, data = data.split(b",", 1) except __HOLE__: ver = b"cc=0" # Make sure that our "ver" is actually a version and isn't a false # positive from a , being in the data stream. if ver[:3] != b"cc=": data = ver + data ver = b"cc=0" # Get the version number out of the cc=N ver = ver.split(b"=", 1)[-1].decode("ascii") # Dispatch to the actual load method for the given version try: return getattr(self, "_loads_v{0}".format(ver))(request, data) except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None return
ValueError
dataset/ETHPy150Open ionrock/cachecontrol/cachecontrol/serialize.py/Serializer.loads
901
def prepare_response(self, request, cached): """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. if "*" in cached.get("vary", {}): return # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: return body_raw = cached["response"].pop("body") headers = CaseInsensitiveDict(data=cached['response']['headers']) if headers.get('transfer-encoding', '') == 'chunked': headers.pop('transfer-encoding') cached['response']['headers'] = headers try: body = io.BytesIO(body_raw) except __HOLE__: # This can happen if cachecontrol serialized to v1 format (pickle) # using Python 2. A Python 2 str(byte string) will be unpickled as # a Python 3 str (unicode string), which will cause the above to # fail with: # # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode('utf8')) return HTTPResponse( body=body, preload_content=False, **cached["response"] )
TypeError
dataset/ETHPy150Open ionrock/cachecontrol/cachecontrol/serialize.py/Serializer.prepare_response
902
def _loads_v1(self, request, data): try: cached = pickle.loads(data) except __HOLE__: return return self.prepare_response(request, cached)
ValueError
dataset/ETHPy150Open ionrock/cachecontrol/cachecontrol/serialize.py/Serializer._loads_v1
903
def _loads_v2(self, request, data): try: cached = json.loads(zlib.decompress(data).decode("utf8")) except __HOLE__: return # We need to decode the items that we've base64 encoded cached["response"]["body"] = _b64_decode_bytes( cached["response"]["body"] ) cached["response"]["headers"] = dict( (_b64_decode_str(k), _b64_decode_str(v)) for k, v in cached["response"]["headers"].items() ) cached["response"]["reason"] = _b64_decode_str( cached["response"]["reason"], ) cached["vary"] = dict( (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) for k, v in cached["vary"].items() ) return self.prepare_response(request, cached)
ValueError
dataset/ETHPy150Open ionrock/cachecontrol/cachecontrol/serialize.py/Serializer._loads_v2
904
def weekly_artists(xml): soup = BeautifulStoneSoup(xml) # Check this is the right thing try: assert soup.find("weeklyartistchart"), "weekly_artists did not get a Weekly Artist Chart" except __HOLE__: print >> sys.stderr, xml raise AssertionError("weekly_artists did not get a Weekly Artist Chart") # Get the artists for tag in soup.findAll("artist"): name = str(tag.find("name").string).decode("utf8") playtag = tag.find("playcount") if playtag: plays = long(playtag.string) else: plays = float(tag.find("weight").string) yield unescape(name), plays
AssertionError
dataset/ETHPy150Open andrewgodwin/lastgraph/lastgui/xml.py/weekly_artists
905
@property def username(self): try: return self._thread_local.user except __HOLE__: return DEFAULT_USER.get()
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/libs/hadoop/src/hadoop/yarn/history_server_api.py/HistoryServerApi.username
906
def __init__(self, name, filename=None, lognode=None): self.prefix = name self.log = None if filename is not None: self.log = open("concoord_log_"+name, 'w') if lognode is not None: logaddr,logport = lognode.split(':') try: self.log = socket.socket(socket.AF_INET,socket.SOCK_STREAM) self.log.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) self.log.setsockopt(socket.IPPROTO_TCP,socket.TCP_NODELAY,1) self.log.connect((logaddr,int(logport))) except __HOLE__: self.log = None return
IOError
dataset/ETHPy150Open denizalti/concoord/concoord/utils.py/Logger.__init__
907
def _resize_image(self, filename, size): """Resizes the image to specified width, height and force option - filename: full path of image to resize - size: dictionary containing: - width: new width - height: new height - force: if True, image will be cropped to fit the exact size, if False, it will have the bigger size that fits the specified size, but without cropping, so it could be smaller on width or height """ WIDTH, HEIGHT = 0, 1 from PIL import Image, ImageOps img = Image.open(filename) if img.size[WIDTH] > size['width'] or img.size[HEIGHT] > size['height']: if size['force']: img = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS) else: img.thumbnail((size['width'], size['height']), Image.ANTIALIAS) try: img.save(filename, optimize=1) except __HOLE__: img.save(filename)
IOError
dataset/ETHPy150Open cidadania/ecidadania-ng/src/apps/spaces/fields.py/StdImageField._resize_image
908
def test_badpath_notfound(webapp): try: url = "%s/../../../../../../etc/passwd" % webapp.server.http.base urlopen(url) except __HOLE__ as e: assert e.code == 404 else: assert False
HTTPError
dataset/ETHPy150Open circuits/circuits/tests/web/test_security.py/test_badpath_notfound
909
def compatible_staticpath(path): ''' Try to return a path to static the static files compatible all the way back to Django 1.2. If anyone has a cleaner or better way to do this let me know! ''' try: # >= 1.4 from django.templatetags.static import static return static(path) except ImportError: pass try: # >= 1.3 return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path) except AttributeError: pass try: return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path) except __HOLE__: pass return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
AttributeError
dataset/ETHPy150Open timmyomahony/django-pagedown/pagedown/utils.py/compatible_staticpath
910
def ensure_connected(f): """Tries to connect to the player It *should* be successful if the player is alive """ def wrapper(*args, **kwargs): self = args[0] try: self.iface.GetMetadata() except (dbus.exceptions.DBusException, __HOLE__): # except AttributeError because # self.iface won't exist if we haven't # _connect()ed yet self._connect() return f(*args, **kwargs) return wrapper
AttributeError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/mpriswidget.py/Mpris.ensure_connected
911
def _execute(self, transforms, *args, **kwargs): try: extra = kwargs['extra'] proto_name = kwargs['protocol'] proto_init = kwargs['protocol_init'] session_id = kwargs['session_id'] logging.debug('Incoming session %s(%s) Session ID: %s Extra: %s' % ( proto_name, proto_init, session_id, extra )) # If protocol is disabled, raise HTTPError if proto_name not in self.settings['enabled_protocols']: raise HTTPError(403, 'Forbidden') protocol = PROTOCOLS.get(proto_name, None) if protocol: handler = protocol(self, session_id) handler._execute(transforms, *extra, **kwargs) else: raise Exception('Handler for protocol "%s" is not available' % proto_name) except __HOLE__: # TODO: Debugging raise HTTPError(403, 'Forbidden')
ValueError
dataset/ETHPy150Open mrjoes/tornadio/tornadio/router.py/SocketRouterBase._execute
912
def __init__(self, stream=None): if stream is None: stream = sys.stdout try: if stream.isatty(): curses.setupterm() self.terminal_capable = True except __HOLE__: pass
AttributeError
dataset/ETHPy150Open google/grr/grr/tools/run_tests.py/Colorizer.__init__
913
def complete(self, text, state): """ Complete """ response = None if state == 0: origline = readline.get_line_buffer() begin = readline.get_begidx() end = readline.get_endidx() being_completed = origline[begin:end] words = origline.split() if not words: self.current_candidates = sorted([c for c in self.options]) else: try: if begin == 0: candidates = [c for c in self.options] elif words[-1] in self.options[words[0]]: candidates = [] else: first = words[0] candidates = self.options[first] if being_completed: self.current_candidates = [w for w in candidates if w.startswith(being_completed)] else: self.current_candidates = candidates except (KeyError, __HOLE__): self.current_candidates = [] try: response = self.current_candidates[state] except IndexError: response = None return response
IndexError
dataset/ETHPy150Open DTVD/rainbowstream/rainbowstream/interactive.py/RainbowCompleter.complete
914
def parse_authorization_header(value): """Parse an HTTP basic/digest authorization header transmitted by the web browser. The return value is either `None` if the header was invalid or not given, otherwise an :class:`Authorization` object. :param value: the authorization header to parse. :return: a :class:`Authorization` object or `None`. """ if not value: return try: auth_type, auth_info = value.split(None, 1) auth_type = auth_type.lower() except __HOLE__: return if auth_type == 'basic': try: username, password = auth_info.decode('base64').split(':', 1) except Exception, e: return return Authorization('basic', {'username': username, 'password': password}) elif auth_type == 'digest': auth_map = parse_dict_header(auth_info) for key in 'username', 'realm', 'nonce', 'uri', 'nc', 'cnonce', \ 'response': if not key in auth_map: return return Authorization('digest', auth_map)
ValueError
dataset/ETHPy150Open IanLewis/kay/kay/lib/werkzeug/http.py/parse_authorization_header
915
def parse_www_authenticate_header(value, on_update=None): """Parse an HTTP WWW-Authenticate header into a :class:`WWWAuthenticate` object. :param value: a WWW-Authenticate header to parse. :param on_update: an optional callable that is called every time a value on the :class:`WWWAuthenticate` object is changed. :return: a :class:`WWWAuthenticate` object. """ if not value: return WWWAuthenticate(on_update=on_update) try: auth_type, auth_info = value.split(None, 1) auth_type = auth_type.lower() except (__HOLE__, AttributeError): return WWWAuthenticate(value.strip().lower(), on_update=on_update) return WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update)
ValueError
dataset/ETHPy150Open IanLewis/kay/kay/lib/werkzeug/http.py/parse_www_authenticate_header
916
def parse_date(value): """Parse one of the following date formats into a datetime object: .. sourcecode:: text Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123 Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036 Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format If parsing fails the return value is `None`. :param value: a string with a supported date format. :return: a :class:`datetime.datetime` object. """ if value: t = parsedate_tz(value.strip()) if t is not None: try: year = t[0] # unfortunately that function does not tell us if two digit # years were part of the string, or if they were prefixed # with two zeroes. So what we do is to assume that 69-99 # refer to 1900, and everything below to 2000 if year >= 0 and year <= 68: year += 2000 elif year >= 69 and year <= 99: year += 1900 return datetime(*((year,) + t[1:7])) - \ timedelta(seconds=t[-1] or 0) except (__HOLE__, OverflowError): return None
ValueError
dataset/ETHPy150Open IanLewis/kay/kay/lib/werkzeug/http.py/parse_date
917
def load_clib(self): from ctypes import cdll, util try: clib = cdll.LoadLibrary(util.find_library('c')) except __HOLE__ as e: if 'image not found' in e.message: clib = cdll.LoadLibrary('libc.dylib') # The mac edge case else: raise return clib
OSError
dataset/ETHPy150Open ooici/pyon/pyon/util/test/test_async.py/TestThreads.load_clib
918
def find_all_episodes(self, options): premium = False if options.username and options.password: premium = self._login(options.username, options.password) if isinstance(premium, Exception): log.error(premium.message) return None jsondata = self._get_show_info() episodes = [] n = 1 for i in jsondata["results"]: if premium: text = "availability_group_premium" else: text = "availability_group_free" try: days = int(i["availability"][text]) except (ValueError, __HOLE__): days = 999 if days > 0: video_id = i["id"] url = "http://www.tv4play.se/program/%s?video_id=%s" % ( i["program"]["nid"], video_id) episodes.append(url) if n == options.all_last: break n += 1 return episodes
TypeError
dataset/ETHPy150Open spaam/svtplay-dl/lib/svtplay_dl/service/tv4play.py/Tv4play.find_all_episodes
919
def findvid(url, data): parse = urlparse(url) if "tv4play.se" in url: try: vid = parse_qs(parse.query)["video_id"][0] except __HOLE__: return None else: match = re.search(r"\"vid\":\"(\d+)\",", data) if match: vid = match.group(1) else: match = re.search(r"-(\d+)$", url) if match: vid = match.group(1) else: match = re.search(r"meta content='([^']+)' property='og:video'", data) if match: match = re.search(r"vid=(\d+)&", match.group(1)) if match: vid = match.group(1) else: log.error("Can't find video id for %s", url) return else: return None return vid
KeyError
dataset/ETHPy150Open spaam/svtplay-dl/lib/svtplay_dl/service/tv4play.py/findvid
920
def run(self, *args, **kwargs): compile_dir = settings_get('compileDir') source_file = self.view.file_name() source_dir = os.path.normcase(os.path.dirname(source_file)) try: project_file = self.view.window().project_file_name() except __HOLE__: project_file = '' if project_file: project_dir = os.path.normcase(os.path.dirname(project_file)) compile_paths = settings_get('compilePaths') compress = settings_get('compress', False) args = [source_file] if compress: args = ['-c'] + args use_autoprefixer = settings_get('useAutoPrefixer', False) if use_autoprefixer is True: print("Using autoprefixer...") args = ['--use', 'autoprefixer-stylus'] + args # check instance of compile_paths if isinstance(compile_paths, dict): appendix_len = None for key_path in compile_paths: norm_path = os.path.normcase(key_path) if not os.path.isabs(norm_path) and project_file: norm_path = os.path.join(project_dir, norm_path) appendix = os.path.relpath(source_dir, norm_path) if not appendix.startswith('..') and (appendix_len is None or len(appendix) < appendix_len): appendix_len = len(appendix) compile_dir = compile_paths[key_path] if not os.path.isabs(compile_dir): compile_dir = os.path.join(norm_path, compile_dir) compile_dir = os.path.join(compile_dir, appendix) if compile_dir and (isinstance(compile_dir, str)): # Check for absolute path or relative path for compile_dir if not os.path.isabs(compile_dir): compile_dir = os.path.join(source_dir, compile_dir) print("Compile to:" + compile_dir) # create folder if not exist if not os.path.exists(compile_dir): os.makedirs(compile_dir) print("Compile dir did not exist, created folder: " + compile_dir) folder, file_nm = os.path.split(source_file) args = ['--out', compile_dir] + args else: compile_dir = source_dir print("Compile to same directory") cwd = None result = run("stylus", args=args, cwd=cwd) if result['okay'] is True: status = 'Compilation Succeeded' else: lines = result['err'].splitlines() if len(lines) >= 3: line = lines[2] if re.search("throw err;$", line): # Remove useless lines lines = lines[4:] index = 0 linenb = 0 for line in lines: if re.search("^ at ", line): linenb = index break index += 1 if linenb > 0: # remove useless lines lines = lines[:linenb - 1] status = 'Compilation FAILED ' + lines[0] sublime.error_message("\n".join(lines)) later = lambda: sublime.status_message(status) sublime.set_timeout(later, 300)
AttributeError
dataset/ETHPy150Open billymoon/Stylus/Stylus.py/StyluscompileCommand.run
921
def get_executable(): name = get_var('vial_python_executable', 'default') try: return get_var('vial_python_executables', {})[name] except __HOLE__: pass path = get_virtualenvwrapper_executable(name) if path: return path if name == 'default': return sys.executable elif name == 'python2': path = which('python2') if path: return path elif name == 'python3': path = which('python3') if path: return path return sys.executable
KeyError
dataset/ETHPy150Open baverman/vial-python/vial-plugin/python/env.py/get_executable
922
def get(): executable = get_executable() try: env = environments[executable] except __HOLE__: logfile = join(tempfile.gettempdir(), 'supp.log') env = environments[executable] = Environment(executable, get_var('vial_python_executable_env', {}), logfile) return env
KeyError
dataset/ETHPy150Open baverman/vial-python/vial-plugin/python/env.py/get
923
def in_home(*path): try: from win32com.shell import shellcon, shell except __HOLE__: home = os.path.expanduser("~") else: home = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) return os.path.join(home, *path)
ImportError
dataset/ETHPy150Open iancmcc/ouimeaux/ouimeaux/config.py/in_home
924
@defer.inlineCallbacks def _downloadMicrodescriptorBlock(self, block, v2dirs): descs = set() for d in block: try: tmp = b64encode(d.decode('hex')).rstrip('=') descs.add(tmp) except __HOLE__: msg = "Malformed descriptor {}. Discarding.".format(d) logging.debug(msg) dircaches = list(v2dirs) for _ in xrange(len(dircaches)): dircache = random.choice(dircaches) url = _makeDescDownloadURL(dircache, descs) try: result = yield getPage(url, timeout=TIMEOUT) # descs set to leftover descriptors that weren't received descs = self._processMicrodescriptorBlockResult(result, descs) if len(descs) == 0: break except Exception: # if a download fails, try again at a different dircache dircaches.remove(dircache) if len(descs) != 0: msg = ("Tried all V2Dir caches and failed to download the " "descriptors with digests: {}".format(' '.join(descs))) logging.debug(msg) defer.returnValue(None)
TypeError
dataset/ETHPy150Open nskinkel/oppy/oppy/netstatus/microdescriptormanager.py/MicrodescriptorManager._downloadMicrodescriptorBlock
925
def _processMicrodescriptorBlockResult(self, result, requested): try: micro_descs = _decompressAndSplitResult(result) except __HOLE__: return requested processed = {} for m in micro_descs: hashed = b64encode(sha256(m).digest()).rstrip('=') # discard any descriptors we didn't request if hashed not in requested: continue try: desc = microdescriptor.Microdescriptor(m) except Exception: # discard unparseable descriptors (shouldn't happen) continue processed[desc.digest] = desc requested.remove(hashed) self._saveProcessedMicrodescriptors(processed) # return any requested descriptors that weren't received/processed return requested
ValueError
dataset/ETHPy150Open nskinkel/oppy/oppy/netstatus/microdescriptormanager.py/MicrodescriptorManager._processMicrodescriptorBlockResult
926
@cacheit def _inverse_cdf_expression(self): """ Inverse of the CDF Used by sample """ x, z = symbols('x, z', real=True, positive=True, cls=Dummy) # Invert CDF try: inverse_cdf = list(solveset(self.cdf(x) - z, x)) except __HOLE__: inverse_cdf = None if not inverse_cdf or len(inverse_cdf) != 1: raise NotImplementedError("Could not invert CDF") return Lambda(z, inverse_cdf[0])
NotImplementedError
dataset/ETHPy150Open sympy/sympy/sympy/stats/drv.py/SingleDiscreteDistribution._inverse_cdf_expression
927
def shared(value, name=None, strict=False, allow_downcast=None, **kwargs): """Return a SharedVariable Variable, initialized with a copy or reference of `value`. This function iterates over constructor functions to find a suitable SharedVariable subclass. The suitable one is the first constructor that accept the given value. See the documentation of :func:`shared_constructor` for the definition of a contructor function. This function is meant as a convenient default. If you want to use a specific shared variable constructor, consider calling it directly. ``theano.shared`` is a shortcut to this function. .. attribute:: constructors A list of shared variable constructors that will be tried in reverse order. Notes ----- By passing kwargs, you effectively limit the set of potential constructors to those that can accept those kwargs. Some shared variable have ``borrow`` as extra kwargs. `See <http://deeplearning.net/software/theano/tutorial/aliasing.\ html#borrowing-when-creating-shared-variables>`_ for details. Some shared variable have ``broadcastable`` as extra kwargs. As shared variable shapes can change, all dimensions default to not being broadcastable, even if ``value`` has a shape of 1 along some dimension. This parameter allows you to create for example a `row` or `column` 2d tensor. """ try: if isinstance(value, Variable): raise TypeError("Shared variable constructor needs numeric " "values and not symbolic variables.") for ctor in reversed(shared.constructors): try: var = ctor(value, name=name, strict=strict, allow_downcast=allow_downcast, **kwargs) utils.add_tag_trace(var) return var except __HOLE__: continue # This may happen when kwargs were supplied # if kwargs were given, the generic_constructor won't be callable. # # This was done on purpose, the rationale being that if kwargs # were supplied, the user didn't want them to be ignored. except MemoryError as e: e.args = e.args + ('you might consider' ' using \'theano.shared(..., borrow=True)\'',) raise raise TypeError('No suitable SharedVariable constructor could be found.' ' Are you sure all kwargs are supported?' ' We do not support the parameter dtype or type.' ' value="%s". parameters="%s"' % (value, kwargs))
TypeError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/compile/sharedvalue.py/shared
928
def test_load_save_incomplete(self): self.wk.data = json.loads(wheel_json) del self.wk.data['signers'] self.wk.data['schema'] = self.wk.SCHEMA+1 self.wk.save() try: self.wk.load() except __HOLE__: pass else: raise Exception("Expected ValueError") del self.wk.data['schema'] self.wk.save() self.wk.load()
ValueError
dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/wheel/test/test_keys.py/TestWheelKeys.test_load_save_incomplete
929
def _read_to_buffer(self): """Reads from the socket and appends the result to the read buffer. Returns the number of bytes read. Returns 0 if there is nothing to read (i.e. the read returns EWOULDBLOCK or equivalent). On error closes the socket and raises an exception. """ try: chunk = self.read_from_fd() except (socket.error, __HOLE__, OSError) as e: # ssl.SSLError is a subclass of socket.error if e.args[0] in _ERRNO_CONNRESET: # Treat ECONNRESET as a connection close rather than # an error to minimize log spam (the exception will # be available on self.error for apps that care). self.close(exc_info=True) return self.close(exc_info=True) raise if chunk is None: return 0 self._read_buffer.append(chunk) self._read_buffer_size += len(chunk) if self._read_buffer_size > self.max_buffer_size: gen_log.error("Reached maximum read buffer size") self.close() raise StreamBufferFullError("Reached maximum read buffer size") return len(chunk)
IOError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/iostream.py/BaseIOStream._read_to_buffer
930
def _handle_write(self): while self._write_buffer: try: if not self._write_buffer_frozen: # On windows, socket.send blows up if given a # write buffer that's too large, instead of just # returning the number of bytes it was able to # process. Therefore we must not call socket.send # with more than 128KB at a time. _merge_prefix(self._write_buffer, 128 * 1024) num_bytes = self.write_to_fd(self._write_buffer[0]) if num_bytes == 0: # With OpenSSL, if we couldn't write the entire buffer, # the very same string object must be used on the # next call to send. Therefore we suppress # merging the write buffer after an incomplete send. # A cleaner solution would be to set # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is # not yet accessible from python # (http://bugs.python.org/issue8240) self._write_buffer_frozen = True break self._write_buffer_frozen = False _merge_prefix(self._write_buffer, num_bytes) self._write_buffer.popleft() self._write_buffer_size -= num_bytes except (socket.error, __HOLE__, OSError) as e: if e.args[0] in _ERRNO_WOULDBLOCK: self._write_buffer_frozen = True break else: if e.args[0] not in _ERRNO_CONNRESET: # Broken pipe errors are usually caused by connection # reset, and its better to not log EPIPE errors to # minimize log spam gen_log.warning("Write error on %s: %s", self.fileno(), e) self.close(exc_info=True) return if not self._write_buffer: if self._write_callback: callback = self._write_callback self._write_callback = None self._run_callback(callback) if self._write_future: future = self._write_future self._write_future = None future.set_result(None)
IOError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/iostream.py/BaseIOStream._handle_write
931
def _do_ssl_handshake(self): # Based on code from test_ssl.py in the python stdlib try: self._handshake_reading = False self._handshake_writing = False self.socket.do_handshake() except ssl.SSLError as err: if err.args[0] == ssl.SSL_ERROR_WANT_READ: self._handshake_reading = True return elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: self._handshake_writing = True return elif err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN): return self.close(exc_info=True) elif err.args[0] == ssl.SSL_ERROR_SSL: try: peer = self.socket.getpeername() except Exception: peer = '(not connected)' gen_log.warning("SSL Error on %s %s: %s", self.socket.fileno(), peer, err) return self.close(exc_info=True) raise except socket.error as err: if err.args[0] in _ERRNO_CONNRESET: return self.close(exc_info=True) except __HOLE__: # On Linux, if the connection was reset before the call to # wrap_socket, do_handshake will fail with an # AttributeError. return self.close(exc_info=True) else: self._ssl_accepting = False if not self._verify_cert(self.socket.getpeercert()): self.close() return if self._ssl_connect_callback is not None: callback = self._ssl_connect_callback self._ssl_connect_callback = None self._run_callback(callback)
AttributeError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/iostream.py/SSLIOStream._do_ssl_handshake
932
def read_from_fd(self): try: chunk = os.read(self.fd, self.read_chunk_size) except (IOError, __HOLE__) as e: if errno_from_exception(e) in _ERRNO_WOULDBLOCK: return None elif errno_from_exception(e) == errno.EBADF: # If the writing half of a pipe is closed, select will # report it as readable but reads will fail with EBADF. self.close(exc_info=True) return None else: raise if not chunk: self.close() return None return chunk
OSError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/iostream.py/PipeIOStream.read_from_fd
933
def serialize_cache_file(cache_file_path): """ Given a cache file, open it and determine whether it's something we can upload. If it is, serialize it to JSON, then return the serialized JSON. :param str cache_file_path: The full path to a cache file to read and potentially serialize. :rtype: str or None :returns: If the cache file contains something we can upload, return a JSON string that will be uploaded to EMDR. Otherwise, return None. """ # Catching missing files between passes, eve can delete cache file # before we can read it. try: fobj = open(cache_file_path, 'rb') except (IOError, __HOLE__): print "Cache file removed before we got to it" return None # Parse with either reverence or despair. try: key, obj = blue.marshal.Load(fobj.read()) except marshal.UnmarshalError: # The file probably wasn't finished writing when EMDU tried to read it. return None if key[1] == 'GetOrders': json_str = serialize_orders(obj, key) fobj.close() return json_str if key[1] == 'GetOldPriceHistory': json_str = serialize_history(obj, key) fobj.close() return json_str # This isn't a cache file we can do anything with. Oh bother. return None
OSError
dataset/ETHPy150Open gtaylor/EVE-Market-Data-Uploader/emdu/cachefile_serializer.py/serialize_cache_file
934
def _repr_svg_(self): """SVG representation of a GeometryEntity suitable for IPython""" from sympy.core.evalf import N try: bounds = self.bounds except (NotImplementedError, TypeError): # if we have no SVG representation, return None so IPython # will fall back to the next representation return None svg_top = '''<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="{1}" height="{2}" viewBox="{0}" preserveAspectRatio="xMinYMin meet"> <defs> <marker id="markerCircle" markerWidth="8" markerHeight="8" refx="5" refy="5" markerUnits="strokeWidth"> <circle cx="5" cy="5" r="1.5" style="stroke: none; fill:#000000;"/> </marker> <marker id="markerArrow" markerWidth="13" markerHeight="13" refx="2" refy="4" orient="auto" markerUnits="strokeWidth"> <path d="M2,2 L2,6 L6,4" style="fill: #000000;" /> </marker> <marker id="markerReverseArrow" markerWidth="13" markerHeight="13" refx="6" refy="4" orient="auto" markerUnits="strokeWidth"> <path d="M6,2 L6,6 L2,4" style="fill: #000000;" /> </marker> </defs>''' # Establish SVG canvas that will fit all the data + small space xmin, ymin, xmax, ymax = map(N, bounds) if xmin == xmax and ymin == ymax: # This is a point; buffer using an arbitrary size xmin, ymin, xmax, ymax = xmin - .5, ymin -.5, xmax + .5, ymax + .5 else: # Expand bounds by a fraction of the data ranges expand = 0.1 # or 10%; this keeps arrowheads in view (R plots use 4%) widest_part = max([xmax - xmin, ymax - ymin]) expand_amount = widest_part * expand xmin -= expand_amount ymin -= expand_amount xmax += expand_amount ymax += expand_amount dx = xmax - xmin dy = ymax - ymin width = min([max([100., dx]), 300]) height = min([max([100., dy]), 300]) scale_factor = 1. if max(width, height) == 0 else max(dx, dy) / max(width, height) try: svg = self._svg(scale_factor) except (NotImplementedError, __HOLE__): # if we have no SVG representation, return None so IPython # will fall back to the next representation return None view_box = "{0} {1} {2} {3}".format(xmin, ymin, dx, dy) transform = "matrix(1,0,0,-1,0,{0})".format(ymax + ymin) svg_top = svg_top.format(view_box, width, height) return svg_top + ( '<g transform="{0}">{1}</g></svg>' ).format(transform, svg)
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/geometry/entity.py/GeometryEntity._repr_svg_
935
def __cmp__(self, other): """Comparison of two GeometryEntities.""" n1 = self.__class__.__name__ n2 = other.__class__.__name__ c = (n1 > n2) - (n1 < n2) if not c: return 0 i1 = -1 for cls in self.__class__.__mro__: try: i1 = ordering_of_classes.index(cls.__name__) break except __HOLE__: i1 = -1 if i1 == -1: return c i2 = -1 for cls in other.__class__.__mro__: try: i2 = ordering_of_classes.index(cls.__name__) break except ValueError: i2 = -1 if i2 == -1: return c return (i1 > i2) - (i1 < i2)
ValueError
dataset/ETHPy150Open sympy/sympy/sympy/geometry/entity.py/GeometryEntity.__cmp__
936
def _intersect(self, o): """ Returns a sympy.sets.Set of intersection objects, if possible. """ from sympy.sets import Set, FiniteSet, Union from sympy.geometry import Point try: inter = self.intersection(o) except __HOLE__: # sympy.sets.Set.reduce expects None if an object # doesn't know how to simplify return None # put the points in a FiniteSet points = FiniteSet(*[p for p in inter if isinstance(p, Point)]) non_points = [p for p in inter if not isinstance(p, Point)] return Union(*(non_points + [points]))
NotImplementedError
dataset/ETHPy150Open sympy/sympy/sympy/geometry/entity.py/GeometrySet._intersect
937
def parse_message(self, payload, conn_id): if len(payload) < FRAME_FORMAT_MESSAGE_SIZE: raise SnakeMQBrokenMessage("message") try: ident = self._ident_by_conn[conn_id] except __HOLE__: raise SnakeMQNoIdent(conn_id) muuid, ttl, flags = struct.unpack(FRAME_FORMAT_MESSAGE, memstr(payload[:FRAME_FORMAT_MESSAGE_SIZE])) if ttl == INFINITE_TTL: ttl = None message = Message(data=memstr(payload[FRAME_FORMAT_MESSAGE_SIZE:]), uuid=muuid, ttl=ttl, flags=flags) self.on_message_recv(conn_id, ident, message) ###########################################################
KeyError
dataset/ETHPy150Open dsiroky/snakemq/snakemq/messaging.py/Messaging.parse_message
938
def _on_packet_sent(self, conn_id, packet_id): try: msg_uuid = self._message_by_packet[packet_id] except __HOLE__: return ident = self._ident_by_conn[conn_id] self.on_message_sent(conn_id, ident, msg_uuid) ###########################################################
KeyError
dataset/ETHPy150Open dsiroky/snakemq/snakemq/messaging.py/Messaging._on_packet_sent
939
def delt(self, *args, **kargs): """delt(host|net, gw|dev)""" self.invalidate_cache() route = self.make_route(*args,**kargs) try: i=self.routes.index(route) del(self.routes[i]) except __HOLE__: warning("no matching route found")
ValueError
dataset/ETHPy150Open phaethon/scapy/scapy/route.py/Route.delt
940
def start_api_and_rpc_workers(neutron_api): pool = eventlet.GreenPool() api_thread = pool.spawn(neutron_api.wait) try: neutron_rpc = service.serve_rpc() except __HOLE__: LOG.info(_LI("RPC was already started in parent process by " "plugin.")) else: rpc_thread = pool.spawn(neutron_rpc.wait) plugin_workers = service.start_plugin_workers() for worker in plugin_workers: pool.spawn(worker.wait) # api and rpc should die together. When one dies, kill the other. rpc_thread.link(lambda gt: api_thread.kill()) api_thread.link(lambda gt: rpc_thread.kill()) pool.waitall()
NotImplementedError
dataset/ETHPy150Open openstack/neutron/neutron/server/wsgi_eventlet.py/start_api_and_rpc_workers
941
def generic_decode_credentials(self, credentials, provider_data, target): # convenience function for simple creds (rhev-m and vmware currently) doc = libxml2.parseDoc(credentials) self.username = None _usernodes = doc.xpathEval("//provider_credentials/%s_credentials/username" % (target)) if len(_usernodes) > 0: self.username = _usernodes[0].content else: try: self.username = provider_data['username'] except KeyError: raise ImageFactoryException("No username specified in config file or in push call") self.provider_account_identifier = self.username _passnodes = doc.xpathEval("//provider_credentials/%s_credentials/password" % (target)) if len(_passnodes) > 0: self.password = _passnodes[0].content else: try: self.password = provider_data['password'] except __HOLE__: raise ImageFactoryException("No password specified in config file or in push call") doc.freeDoc()
KeyError
dataset/ETHPy150Open redhat-imaging/imagefactory/imagefactory_plugins/RHEVM/RHEVM.py/RHEVM.generic_decode_credentials
942
def get_dynamic_provider_data(self, provider): # Get provider details for RHEV-M or VSphere # First try to interpret this as an ad-hoc/dynamic provider def # If this fails, try to find it in one or the other of the config files # If this all fails return None # We use this in the builders as well so I have made it "public" try: xml_et = fromstring(provider) return xml_et.attrib except Exception as e: self.log.debug('Testing provider for XML: %s' % e) pass try: jload = json.loads(provider) return jload except __HOLE__ as e: self.log.debug('Testing provider for JSON: %s' % e) pass return None
ValueError
dataset/ETHPy150Open redhat-imaging/imagefactory/imagefactory_plugins/RHEVM/RHEVM.py/RHEVM.get_dynamic_provider_data
943
def commit(self, message, author, parents=None, branch=None, date=None, **kwargs): """ Performs in-memory commit (doesn't check workdir in any way) and returns newly created ``Changeset``. Updates repository's ``revisions``. :param message: message of the commit :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" :param parents: single parent or sequence of parents from which commit would be derieved :param date: ``datetime.datetime`` instance. Defaults to ``datetime.datetime.now()``. :param branch: branch name, as string. If none given, default backend's branch would be used. :raises ``CommitError``: if any error occurs while committing """ self.check_integrity(parents) from .repository import GitRepository if branch is None: branch = GitRepository.DEFAULT_BRANCH_NAME repo = self.repository._repo object_store = repo.object_store ENCODING = "UTF-8" DIRMOD = 040000 # Create tree and populates it with blobs commit_tree = self.parents[0] and repo[self.parents[0]._commit.tree] or\ objects.Tree() for node in self.added + self.changed: # Compute subdirs if needed dirpath, nodename = posixpath.split(node.path) dirnames = dirpath and dirpath.split('/') or [] parent = commit_tree ancestors = [('', parent)] # Tries to dig for the deepest existing tree while dirnames: curdir = dirnames.pop(0) try: dir_id = parent[curdir][1] except KeyError: # put curdir back into dirnames and stops dirnames.insert(0, curdir) break else: # If found, updates parent parent = self.repository._repo[dir_id] ancestors.append((curdir, parent)) # Now parent is deepest existing tree and we need to create subtrees # for dirnames (in reverse order) [this only applies for nodes from added] new_trees = [] if not node.is_binary: content = node.content.encode(ENCODING) else: content = node.content blob = objects.Blob.from_string(content) node_path = node.name.encode(ENCODING) if dirnames: # If there are trees which should be created we need to build # them now (in reverse order) reversed_dirnames = list(reversed(dirnames)) curtree = objects.Tree() curtree[node_path] = node.mode, blob.id new_trees.append(curtree) for dirname in reversed_dirnames[:-1]: newtree = objects.Tree() #newtree.add(DIRMOD, dirname, curtree.id) newtree[dirname] = DIRMOD, curtree.id new_trees.append(newtree) curtree = newtree parent[reversed_dirnames[-1]] = DIRMOD, curtree.id else: parent.add(name=node_path, mode=node.mode, hexsha=blob.id) new_trees.append(parent) # Update ancestors for parent, tree, path in reversed([(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])]): parent[path] = DIRMOD, tree.id object_store.add_object(tree) object_store.add_object(blob) for tree in new_trees: object_store.add_object(tree) for node in self.removed: paths = node.path.split('/') tree = commit_tree trees = [tree] # Traverse deep into the forest... for path in paths: try: obj = self.repository._repo[tree[path][1]] if isinstance(obj, objects.Tree): trees.append(obj) tree = obj except __HOLE__: break # Cut down the blob and all rotten trees on the way back... for path, tree in reversed(zip(paths, trees)): del tree[path] if tree: # This tree still has elements - don't remove it or any # of it's parents break object_store.add_object(commit_tree) # Create commit commit = objects.Commit() commit.tree = commit_tree.id commit.parents = [p._commit.id for p in self.parents if p] commit.author = commit.committer = safe_str(author) commit.encoding = ENCODING commit.message = safe_str(message) # Compute date if date is None: date = time.time() elif isinstance(date, datetime.datetime): date = time.mktime(date.timetuple()) author_time = kwargs.pop('author_time', date) commit.commit_time = int(date) commit.author_time = int(author_time) tz = time.timezone author_tz = kwargs.pop('author_timezone', tz) commit.commit_timezone = tz commit.author_timezone = author_tz object_store.add_object(commit) ref = 'refs/heads/%s' % branch repo.refs[ref] = commit.id # Update vcs repository object & recreate dulwich repo self.repository.revisions.append(commit.id) # invalidate parsed refs after commit self.repository._parsed_refs = self.repository._get_parsed_refs() tip = self.repository.get_changeset() self.reset() return tip
KeyError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/inmemory.py/GitInMemoryChangeset.commit
944
def _is_file_obj(topic): try: return isinstance(topic, types.FileType) except __HOLE__: # pragma: no cover # FIXME: add comment... # what is this for? return isinstance(topic, io.IOBase) #------------------------------------------------------------------------------------------------- # Assertions #-------------------------------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open heynemann/preggy/preggy/assertions/types/file.py/_is_file_obj
945
@assertion def not_to_be_a_file(topic): '''Asserts that `topic` is NOT a file. If `topic` is a string, this asserts that `os.path.isfile()` returns `False`. Otherwise, this asserts whether `topic` is NOT an instance of the built-in `file` type. ''' try: to_be_a_file(topic) except __HOLE__: return True msg = 'Expected topic({0}) not to be a file, but it was'.format(topic) raise AssertionError(msg)
AssertionError
dataset/ETHPy150Open heynemann/preggy/preggy/assertions/types/file.py/not_to_be_a_file
946
def warmup(request): """ Provides default procedure for handling warmup requests on App Engine. Just add this view to your main urls.py. """ for app in settings.INSTALLED_APPS: for name in ('urls', 'views', 'models'): try: import_module('%s.%s' % (app, name)) except __HOLE__: pass return HttpResponse("Ok.")
ImportError
dataset/ETHPy150Open potatolondon/djangae/djangae/views.py/warmup
947
def init_plugins(plugindir, plugins_to_load=None): if plugindir and not os.path.isdir(plugindir): raise InvalidPluginDir(plugindir) if not plugindir: plugindir = DIR("plugins") logger.debug("plugindir: {0}".format(plugindir)) if os.path.isdir(plugindir): pluginfiles = glob(os.path.join(plugindir, "[!_]*.py")) plugins = strip_extension(os.path.basename(p) for p in pluginfiles) else: # we might be in an egg; try to get the files that way logger.debug("trying pkg_resources") import pkg_resources try: plugins = strip_extension( pkg_resources.resource_listdir(__name__, "plugins")) except __HOLE__: raise InvalidPluginDir(plugindir) hooks = {} oldpath = copy.deepcopy(sys.path) sys.path.insert(0, plugindir) for plugin in plugins: if plugins_to_load and plugin not in plugins_to_load: logger.debug("skipping plugin {0}, not in plugins_to_load {1}".format(plugin, plugins_to_load)) continue logger.debug("plugin: {0}".format(plugin)) try: mod = importlib.import_module(plugin) modname = mod.__name__ for hook in re.findall(r"\bon_(\w+)", " ".join(dir(mod))): hookfun = getattr(mod, "on_" + hook) logger.debug("plugin: attaching %s hook for %s", hook, modname) hooks.setdefault(hook, []).append(hookfun) if mod.__doc__: firstline = mod.__doc__.split('\n')[0] hooks.setdefault('help', {})[modname] = firstline hooks.setdefault('extendedhelp', {})[modname] = mod.__doc__ # bare except, because the modules could raise any number of errors # on import, and we want them not to kill our server except: logger.warning("import failed on module {0}, module not loaded".format(plugin)) logger.warning("{0}".format(sys.exc_info()[0])) logger.warning("{0}".format(traceback.format_exc())) sys.path = oldpath return hooks
OSError
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/init_plugins
948
def handle_bot_message(event, server): try: bot = server.slack.server.bots[event["bot_id"]] except __HOLE__: logger.debug("bot_message event {0} has no bot".format(event)) return return "\n".join(run_hook(server.hooks, "bot_message", event, server))
KeyError
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/handle_bot_message
949
def handle_message(event, server): subtype = event.get("subtype", "") if subtype == "message_changed": return if subtype == "bot_message": return handle_bot_message(event, server) try: msguser = server.slack.server.users[event["user"]] except __HOLE__: logger.debug("event {0} has no user".format(event)) return return "\n".join(run_hook(server.hooks, "message", event, server))
KeyError
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/handle_message
950
def loop(server, test_loop=None): """Run the main loop server is a limbo Server object test_loop, if present, is a number of times to run the loop """ try: loops_without_activity = 0 while test_loop is None or test_loop > 0: start = time.time() loops_without_activity += 1 events = server.slack.rtm_read() for event in events: loops_without_activity = 0 logger.debug("got {0}".format(event.get("type", event))) response = handle_event(event, server) while response: # The Slack API documentation says: # # Clients should limit messages sent to channels to 4000 # characters, which will always be under 16k bytes even # with a message comprised solely of non-BMP Unicode # characters at 4 bytes each. # # but empirical testing shows that I'm getting disconnected # at 4000 characters and even quite a bit lower. Use 1000 # to be safe server.slack.rtm_send_message(event["channel"], response[:1000]) response = response[1000:] # Run the loop hook. This doesn't send messages it receives, # because it doesn't know where to send them. Use # server.slack.post_message to send messages from a loop hook run_hook(server.hooks, "loop", server) # The Slack RTM API docs say: # # > When there is no other activity clients should send a ping # > every few seconds # # So, if we've gone >5 seconds without any activity, send a ping. # If the connection has broken, this will reveal it so slack can # quit if loops_without_activity > 5: server.slack.server.ping() loops_without_activity = 0 end = time.time() runtime = start - end time.sleep(max(1-runtime, 0)) if test_loop: test_loop -= 1 except __HOLE__: if os.environ.get("LIMBO_DEBUG"): import ipdb; ipdb.set_trace() raise
KeyboardInterrupt
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/loop
951
def init_server(args, config, Server=LimboServer, Client=SlackClient): init_log(config) logger.debug("config: {0}".format(config)) db = init_db(args.database_name) config_plugins = config.get("plugins") plugins_to_load = config_plugins.split(",") if config_plugins else [] hooks = init_plugins(args.pluginpath, plugins_to_load) try: slack = Client(config["token"]) except __HOLE__: logger.error("""Unable to find a slack token. The environment variables limbo sees are: {0} and the current config is: {1} Try setting your bot's slack token with: export SLACK_TOKEN=<your-slack-bot-token> """.format(relevant_environ(), config)) raise server = Server(slack, config, hooks, db) return server # decode a string. if str is a python 3 string, do nothing.
KeyError
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/init_server
952
def repl(server, args): try: while 1: cmd = decode(input("limbo> ")) if cmd.lower() == "quit" or cmd.lower() == "exit": return print(run_cmd(cmd, server, args.hook, args.pluginpath, None)) except (EOFError, __HOLE__): print() pass
KeyboardInterrupt
dataset/ETHPy150Open llimllib/limbo/limbo/limbo.py/repl
953
def get_required_setting(setting, value_re, invalid_msg): """ Return a constant from ``django.conf.settings``. The `setting` argument is the constant name, the `value_re` argument is a regular expression used to validate the setting value and the `invalid_msg` argument is used as exception message if the value is not valid. """ try: value = getattr(settings, setting) except __HOLE__: raise AnalyticalException("%s setting: not found" % setting) if value is None: raise AnalyticalException("%s setting is set to None" % setting) value = str(value) if not value_re.search(value): raise AnalyticalException("%s setting: %s: '%s'" % (setting, invalid_msg, value)) return value
AttributeError
dataset/ETHPy150Open jcassee/django-analytical/analytical/utils.py/get_required_setting
954
def get_user_from_context(context): """ Get the user instance from the template context, if possible. If the context does not contain a `request` or `user` attribute, `None` is returned. """ try: return context['user'] except KeyError: pass try: request = context['request'] return request.user except (KeyError, __HOLE__): pass return None
AttributeError
dataset/ETHPy150Open jcassee/django-analytical/analytical/utils.py/get_user_from_context
955
def get_identity(context, prefix=None, identity_func=None, user=None): """ Get the identity of a logged in user from a template context. The `prefix` argument is used to provide different identities to different analytics services. The `identity_func` argument is a function that returns the identity of the user; by default the identity is the username. """ if prefix is not None: try: return context['%s_identity' % prefix] except KeyError: pass try: return context['analytical_identity'] except __HOLE__: pass if getattr(settings, 'ANALYTICAL_AUTO_IDENTIFY', True): try: if user is None: user = get_user_from_context(context) if user.is_authenticated(): if identity_func is not None: return identity_func(user) else: try: return user.get_username() except AttributeError: # Django < 1.5 fallback return user.username except (KeyError, AttributeError): pass return None
KeyError
dataset/ETHPy150Open jcassee/django-analytical/analytical/utils.py/get_identity
956
def is_internal_ip(context, prefix=None): """ Return whether the visitor is coming from an internal IP address, based on information from the template context. The prefix is used to allow different analytics services to have different notions of internal addresses. """ try: request = context['request'] remote_ip = request.META.get('HTTP_X_FORWARDED_FOR', '') if not remote_ip: remote_ip = request.META.get('REMOTE_ADDR', '') if not remote_ip: return False internal_ips = None if prefix is not None: internal_ips = getattr(settings, '%s_INTERNAL_IPS' % prefix, None) if internal_ips is None: internal_ips = getattr(settings, 'ANALYTICAL_INTERNAL_IPS', None) if internal_ips is None: internal_ips = getattr(settings, 'INTERNAL_IPS', None) return remote_ip in (internal_ips or []) except (__HOLE__, AttributeError): return False
KeyError
dataset/ETHPy150Open jcassee/django-analytical/analytical/utils.py/is_internal_ip
957
def currentSpeed(self, i3s_output_list, i3s_config): # parse some configuration parameters if not isinstance(self.interfaces, list): self.interfaces = self.interfaces.split(',') if not isinstance(self.interfaces_blacklist, list): self.interfaces_blacklist = self.interfaces_blacklist.split(',') # == 6 characters (from MULTIPLIER_TOP + dot + self.precision) if self.precision > 0: self.left_align = len(str(MULTIPLIER_TOP)) + 1 + self.precision else: self.left_align = len(str(MULTIPLIER_TOP)) self.value_format = "{value:%s.%sf} {unit}" % (self.left_align, self.precision) ns = self._get_stat() deltas = {} try: # time from previous check timedelta = time() - self.last_time # calculate deltas for all interfaces for old, new in zip(self.last_stat, ns): down = int(new[1]) - int(old[1]) up = int(new[9]) - int(old[9]) down /= timedelta * INITIAL_MULTI up /= timedelta * INITIAL_MULTI deltas[new[0]] = {'total': up+down, 'up': up, 'down': down, } # update last_ info self.last_stat = self._get_stat() self.last_time = time() # get the interface with max rate interface = max(deltas, key=lambda x: deltas[x]['total']) # if there is no rate - show last active interface, or hide if deltas[interface]['total'] == 0: interface = self.last_interface hide = self.hide_if_zero # if there is - update last_interface else: self.last_interface = interface hide = False # get the deltas into variable delta = deltas[interface] if interface else None except __HOLE__: delta = None interface = None hide = self.hide_if_zero return { 'cached_until': time() + self.cache_timeout, 'full_text': "" if hide else self.format.format( total=self._divide_and_format(delta['total']), up=self._divide_and_format(delta['up']), down=self._divide_and_format(delta['down']), interface=interface[:-1], ) if interface else self.format_no_connection }
TypeError
dataset/ETHPy150Open ultrabug/py3status/py3status/modules/net_rate.py/Py3status.currentSpeed
958
def _get_stat(self): """ Get statistics from devfile in list of lists of words """ def dev_filter(x): # get first word and remove trailing interface number x = x.strip().split(" ")[0][:-1] if x in self.interfaces_blacklist: return False if self.all_interfaces: return True if x in self.interfaces: return True return False # read devfile, skip two header files x = filter(dev_filter, open(self.devfile).readlines()[2:]) try: # split info into words, filter empty ones return [list(filter(lambda x: x, _x.split(" "))) for _x in x] except __HOLE__: return None
StopIteration
dataset/ETHPy150Open ultrabug/py3status/py3status/modules/net_rate.py/Py3status._get_stat
959
def add(self, *args, **kwargs): try: return self[0].new(*args, **kwargs) except __HOLE__: o = self._obj() o._h = self._h return o.new(*args, **kwargs)
IndexError
dataset/ETHPy150Open kennethreitz-archive/python-github3/github3/structures.py/KeyedListResource.add
960
def create_connection(self): attempts = 0 while True: attempts += 1 try: self.connection = Connection('127.0.0.1', 'guest', 'guest') break except amqpstorm.AMQPError as why: LOGGER.exception(why) if self.max_retries and attempts > self.max_retries: break time.sleep(min(attempts * 2, 30)) except __HOLE__: break
KeyboardInterrupt
dataset/ETHPy150Open eandersson/amqpstorm/examples/robust_consumer.py/Consumer.create_connection
961
def start(self): if not self.connection: self.create_connection() while True: try: channel = self.connection.channel() channel.queue.declare('simple_queue') channel.basic.consume(self, 'simple_queue', no_ack=False) channel.start_consuming(to_tuple=False) if not channel.consumer_tags: channel.close() except amqpstorm.AMQPError as why: LOGGER.exception(why) self.create_connection() except __HOLE__: self.connection.close() break
KeyboardInterrupt
dataset/ETHPy150Open eandersson/amqpstorm/examples/robust_consumer.py/Consumer.start
962
def setup_form_view(view, request, form, *args, **kwargs): """Mimic as_view and with forms to skip some of the context""" view.request = request try: view.request.user = request.user except __HOLE__: view.request.user = UserFactory() view.args = args view.kwargs = kwargs view.form = form return view
AttributeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/admin_tests/utilities.py/setup_form_view
963
def setup_log_view(view, request, *args, **kwargs): view.request = request try: view.request.user = request.user except __HOLE__: view.request.user = UserFactory() view.args = args view.kwargs = kwargs return view
AttributeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/admin_tests/utilities.py/setup_log_view
964
def mkdir(path): """ Creates a dir with the given path. Args: path: A str, the name of the dir to create. Returns: True on success, False otherwise. """ try: os.mkdir(path) except __HOLE__: logging.error("OSError while creating dir '{0}'".format(path)) return False return True
OSError
dataset/ETHPy150Open AppScale/appscale/AppDB/backup/backup_recovery_helper.py/mkdir
965
def makedirs(path): """ Creates a dir with the given path and all directories in between. Args: path: A str, the name of the dir to create. Returns: True on success, False otherwise. """ try: os.makedirs(path) except __HOLE__: logging.error("OSError while creating dir '{0}'".format(path)) return False return True
OSError
dataset/ETHPy150Open AppScale/appscale/AppDB/backup/backup_recovery_helper.py/makedirs
966
def rename(source, destination): """ Renames source file into destination. Args: source: A str, the path of the file to rename. destination: A str, the destination path. Returns: True on success, False otherwise. """ try: os.rename(source, destination) except __HOLE__: logging.error("OSError while renaming '{0}' to '{1}'". format(source, destination)) return False return True
OSError
dataset/ETHPy150Open AppScale/appscale/AppDB/backup/backup_recovery_helper.py/rename
967
def remove(path): """ Deletes the given file from the filesystem. Args: path: A str, the path of the file to delete. Returns: True on success, False otherwise. """ try: os.remove(path) except __HOLE__: logging.error("OSError while deleting '{0}'". format(path)) return False return True
OSError
dataset/ETHPy150Open AppScale/appscale/AppDB/backup/backup_recovery_helper.py/remove
968
def handle(self): with self._lock: self.worker.nb_connections +=1 self.worker.refresh_name() try: while not self.connected: data = self.sock.recv(1024) if not data: break self.buf.append(data) if self.remote is None: try: self.do_proxy() except StopIteration: break except ConnectionError, e: log.error("Error while connecting: [%s]" % str(e)) self.handle_error(e) except InactivityTimeout, e: log.warn("inactivity timeout") self.handle_error(e) except socket.error, e: log.error("socket.error: [%s]" % str(e)) self.handle_error(e) except greenlet.GreenletExit: pass except __HOLE__: pass except Exception, e: log.error("unknown error %s" % str(e)) finally: if self.remote is not None: log.debug("Close connection to %s:%s" % self.remote) with self._lock: self.worker.nb_connections -=1 self.worker.refresh_name() _closesocket(self.sock)
KeyboardInterrupt
dataset/ETHPy150Open benoitc/tproxy/tproxy/client.py/ClientConnection.handle
969
def send_data(self, sock, data): if hasattr(data, 'read'): try: data.seek(0) except (__HOLE__, IOError): pass while True: chunk = data.readline() if not chunk: break sock.sendall(chunk) elif isinstance(data, basestring): sock.sendall(data) else: for chunk in data: sock.sendall(chunk)
ValueError
dataset/ETHPy150Open benoitc/tproxy/tproxy/client.py/ClientConnection.send_data
970
def test_create_file(self): """ Test the creation of a simple XlsxWriter file with an autofilter. This test is the base comparison. It has data but no autofilter. """ workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() # Open a text file with autofilter example data. textfile = open(self.txt_filename) # Start writing data from the first worksheet row. row = 0 # Read the text file and write it to the worksheet. for line in textfile: # Split the input data based on whitespace. data = line.strip("\n").split() # Convert the number data from the text file. for i, item in enumerate(data): try: data[i] = float(item) except __HOLE__: pass for col in range(len(data)): worksheet.write(row, col, data[col]) # Move on to the next worksheet row. row += 1 textfile.close() workbook.close() self.assertExcelEqual()
ValueError
dataset/ETHPy150Open jmcnamara/XlsxWriter/xlsxwriter/test/comparison/test_autofilter00.py/TestCompareXLSXFiles.test_create_file
971
def test_screen(self): widget = self.create() self.assertEqual(widget['screen'], '') try: display = os.environ['DISPLAY'] except __HOLE__: self.skipTest('No $DISPLAY set.') self.checkInvalidParam(widget, 'screen', display, errmsg="can't modify -screen option after widget is created") widget2 = self.create(screen=display) self.assertEqual(widget2['screen'], display)
KeyError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/lib-tk/test/test_tkinter/test_widgets.py/ToplevelTest.test_screen
972
def __iter__(self): queryset = self.queryset db = queryset.db compiler = queryset.query.get_compiler(using=db) # Execute the query. This will also fill compiler.select, klass_info, # and annotations. results = compiler.execute_sql() select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info, compiler.annotation_col_map) if klass_info is None: return model_cls = klass_info['model'] select_fields = klass_info['select_fields'] model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1 init_list = [f[0].target.attname for f in select[model_fields_start:model_fields_end]] if len(init_list) != len(model_cls._meta.concrete_fields): init_set = set(init_list) skip = [f.attname for f in model_cls._meta.concrete_fields if f.attname not in init_set] model_cls = deferred_class_factory(model_cls, skip) related_populators = get_related_populators(klass_info, select, db) for row in compiler.results_iter(results): obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end]) if related_populators: for rel_populator in related_populators: rel_populator.populate(row, obj) if annotation_col_map: for attr_name, col_pos in annotation_col_map.items(): setattr(obj, attr_name, row[col_pos]) # Add the known related objects to the model, if there are any if queryset._known_related_objects: for field, rel_objs in queryset._known_related_objects.items(): # Avoid overwriting objects loaded e.g. by select_related if hasattr(obj, field.get_cache_name()): continue pk = getattr(obj, field.get_attname()) try: rel_obj = rel_objs[pk] except __HOLE__: pass # may happen in qs1 | qs2 scenarios else: setattr(obj, field.name, rel_obj) yield obj
KeyError
dataset/ETHPy150Open django/django/django/db/models/query.py/ModelIterable.__iter__
973
def aggregate(self, *args, **kwargs): """ Returns a dictionary containing the calculations (aggregation) over the current queryset If args is present the expression is passed as a kwarg using the Aggregate object's default alias. """ if self.query.distinct_fields: raise NotImplementedError("aggregate() + distinct(fields) not implemented.") for arg in args: # The default_alias property may raise a TypeError, so we use # a try/except construct rather than hasattr in order to remain # consistent between PY2 and PY3 (hasattr would swallow # the TypeError on PY2). try: arg.default_alias except (__HOLE__, TypeError): raise TypeError("Complex aggregates require an alias") kwargs[arg.default_alias] = arg query = self.query.clone() for (alias, aggregate_expr) in kwargs.items(): query.add_annotation(aggregate_expr, alias, is_summary=True) if not query.annotations[alias].contains_aggregate: raise TypeError("%s is not an aggregate expression" % alias) return query.get_aggregation(self.db, kwargs.keys())
AttributeError
dataset/ETHPy150Open django/django/django/db/models/query.py/QuerySet.aggregate
974
def annotate(self, *args, **kwargs): """ Return a query set in which the returned objects have been annotated with extra data or aggregations. """ annotations = OrderedDict() # To preserve ordering of args for arg in args: # The default_alias property may raise a TypeError, so we use # a try/except construct rather than hasattr in order to remain # consistent between PY2 and PY3 (hasattr would swallow # the TypeError on PY2). try: if arg.default_alias in kwargs: raise ValueError("The named annotation '%s' conflicts with the " "default name for another annotation." % arg.default_alias) except (__HOLE__, TypeError): raise TypeError("Complex annotations require an alias") annotations[arg.default_alias] = arg annotations.update(kwargs) clone = self._clone() names = self._fields if names is None: names = {f.name for f in self.model._meta.get_fields()} for alias, annotation in annotations.items(): if alias in names: raise ValueError("The annotation '%s' conflicts with a field on " "the model." % alias) clone.query.add_annotation(annotation, alias, is_summary=False) for alias, annotation in clone.query.annotations.items(): if alias in annotations and annotation.contains_aggregate: if clone._fields is None: clone.query.group_by = True else: clone.query.set_group_by() break return clone
AttributeError
dataset/ETHPy150Open django/django/django/db/models/query.py/QuerySet.annotate
975
@property def columns(self): """ A list of model field names in the order they'll appear in the query results. """ if not hasattr(self, '_columns'): self._columns = self.query.get_columns() # Adjust any column names which don't match field names for (query_name, model_name) in self.translations.items(): try: index = self._columns.index(query_name) self._columns[index] = model_name except __HOLE__: # Ignore translations for non-existent column names pass return self._columns
ValueError
dataset/ETHPy150Open django/django/django/db/models/query.py/RawQuerySet.columns
976
def prefetch_related_objects(model_instances, *related_lookups): """ Populate prefetched object caches for a list of model instances based on the lookups/Prefetch instances given. """ if len(model_instances) == 0: return # nothing to do related_lookups = normalize_prefetch_lookups(related_lookups) # We need to be able to dynamically add to the list of prefetch_related # lookups that we look up (see below). So we need some book keeping to # ensure we don't do duplicate work. done_queries = {} # dictionary of things like 'foo__bar': [results] auto_lookups = set() # we add to this as we go through. followed_descriptors = set() # recursion protection all_lookups = deque(related_lookups) while all_lookups: lookup = all_lookups.popleft() if lookup.prefetch_to in done_queries: if lookup.queryset: raise ValueError("'%s' lookup was already seen with a different queryset. " "You may need to adjust the ordering of your lookups." % lookup.prefetch_to) continue # Top level, the list of objects to decorate is the result cache # from the primary QuerySet. It won't be for deeper levels. obj_list = model_instances through_attrs = lookup.prefetch_through.split(LOOKUP_SEP) for level, through_attr in enumerate(through_attrs): # Prepare main instances if len(obj_list) == 0: break prefetch_to = lookup.get_current_prefetch_to(level) if prefetch_to in done_queries: # Skip any prefetching, and any object preparation obj_list = done_queries[prefetch_to] continue # Prepare objects: good_objects = True for obj in obj_list: # Since prefetching can re-use instances, it is possible to have # the same instance multiple times in obj_list, so obj might # already be prepared. if not hasattr(obj, '_prefetched_objects_cache'): try: obj._prefetched_objects_cache = {} except (AttributeError, __HOLE__): # Must be an immutable object from # values_list(flat=True), for example (TypeError) or # a QuerySet subclass that isn't returning Model # instances (AttributeError), either in Django or a 3rd # party. prefetch_related() doesn't make sense, so quit. good_objects = False break if not good_objects: break # Descend down tree # We assume that objects retrieved are homogeneous (which is the premise # of prefetch_related), so what applies to first object applies to all. first_obj = obj_list[0] prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr) if not attr_found: raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid " "parameter to prefetch_related()" % (through_attr, first_obj.__class__.__name__, lookup.prefetch_through)) if level == len(through_attrs) - 1 and prefetcher is None: # Last one, this *must* resolve to something that supports # prefetching, otherwise there is no point adding it and the # developer asking for it has made a mistake. raise ValueError("'%s' does not resolve to an item that supports " "prefetching - this is an invalid parameter to " "prefetch_related()." % lookup.prefetch_through) if prefetcher is not None and not is_fetched: obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level) # We need to ensure we don't keep adding lookups from the # same relationships to stop infinite recursion. So, if we # are already on an automatically added lookup, don't add # the new lookups from relationships we've seen already. if not (lookup in auto_lookups and descriptor in followed_descriptors): done_queries[prefetch_to] = obj_list new_lookups = normalize_prefetch_lookups(additional_lookups, prefetch_to) auto_lookups.update(new_lookups) all_lookups.extendleft(new_lookups) followed_descriptors.add(descriptor) else: # Either a singly related object that has already been fetched # (e.g. via select_related), or hopefully some other property # that doesn't support prefetching but needs to be traversed. # We replace the current list of parent objects with the list # of related objects, filtering out empty or missing values so # that we can continue with nullable or reverse relations. new_obj_list = [] for obj in obj_list: try: new_obj = getattr(obj, through_attr) except exceptions.ObjectDoesNotExist: continue if new_obj is None: continue # We special-case `list` rather than something more generic # like `Iterable` because we don't want to accidentally match # user models that define __iter__. if isinstance(new_obj, list): new_obj_list.extend(new_obj) else: new_obj_list.append(new_obj) obj_list = new_obj_list
TypeError
dataset/ETHPy150Open django/django/django/db/models/query.py/prefetch_related_objects
977
def prefetch_one_level(instances, prefetcher, lookup, level): """ Helper function for prefetch_related_objects Runs prefetches on all instances using the prefetcher object, assigning results to relevant caches in instance. The prefetched objects are returned, along with any additional prefetches that must be done due to prefetch_related lookups found from default managers. """ # prefetcher must have a method get_prefetch_queryset() which takes a list # of instances, and returns a tuple: # (queryset of instances of self.model that are related to passed in instances, # callable that gets value to be matched for returned instances, # callable that gets value to be matched for passed in instances, # boolean that is True for singly related objects, # cache name to assign to). # The 'values to be matched' must be hashable as they will be used # in a dictionary. rel_qs, rel_obj_attr, instance_attr, single, cache_name = ( prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))) # We have to handle the possibility that the QuerySet we just got back # contains some prefetch_related lookups. We don't want to trigger the # prefetch_related functionality by evaluating the query. Rather, we need # to merge in the prefetch_related lookups. # Copy the lookups in case it is a Prefetch object which could be reused # later (happens in nested prefetch_related). additional_lookups = [ copy.copy(additional_lookup) for additional_lookup in getattr(rel_qs, '_prefetch_related_lookups', []) ] if additional_lookups: # Don't need to clone because the manager should have given us a fresh # instance, so we access an internal instead of using public interface # for performance reasons. rel_qs._prefetch_related_lookups = [] all_related_objects = list(rel_qs) rel_obj_cache = {} for rel_obj in all_related_objects: rel_attr_val = rel_obj_attr(rel_obj) rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj) to_attr, as_attr = lookup.get_current_to_attr(level) # Make sure `to_attr` does not conflict with a field. if as_attr and instances: # We assume that objects retrieved are homogeneous (which is the premise # of prefetch_related), so what applies to first object applies to all. model = instances[0].__class__ try: model._meta.get_field(to_attr) except exceptions.FieldDoesNotExist: pass else: msg = 'to_attr={} conflicts with a field on the {} model.' raise ValueError(msg.format(to_attr, model.__name__)) # Whether or not we're prefetching the last part of the lookup. leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level for obj in instances: instance_attr_val = instance_attr(obj) vals = rel_obj_cache.get(instance_attr_val, []) if single: val = vals[0] if vals else None to_attr = to_attr if as_attr else cache_name setattr(obj, to_attr, val) else: if as_attr: setattr(obj, to_attr, vals) obj._prefetched_objects_cache[cache_name] = vals else: manager = getattr(obj, to_attr) if leaf and lookup.queryset is not None: try: apply_rel_filter = manager._apply_rel_filters except __HOLE__: warnings.warn( "The `%s.%s` class must implement a `_apply_rel_filters()` " "method that accepts a `QuerySet` as its single " "argument and returns an appropriately filtered version " "of it." % (manager.__class__.__module__, manager.__class__.__name__), RemovedInDjango20Warning, ) qs = manager.get_queryset() else: qs = apply_rel_filter(lookup.queryset) else: qs = manager.get_queryset() qs._result_cache = vals # We don't want the individual qs doing prefetch_related now, # since we have merged this into the current work. qs._prefetch_done = True obj._prefetched_objects_cache[cache_name] = qs return all_related_objects, additional_lookups
AttributeError
dataset/ETHPy150Open django/django/django/db/models/query.py/prefetch_one_level
978
def test_multiple_instance_error(self): try: self.AppClass() except __HOLE__: pass except Exception as e: raise e else: raise AssertionError("Test failed")
RuntimeError
dataset/ETHPy150Open amol-mandhane/htmlPy/tests/base_gui_basics.py/BaseGUIBasics.test_multiple_instance_error
979
def delete(self, *args, **kwargs): permission_name = self.permission_name super(Queue, self).delete(*args, **kwargs) # once the Queue is safely deleted, remove the permission (if exists) if permission_name: try: p = Permission.objects.get(codename=permission_name[9:]) p.delete() except __HOLE__: pass
ObjectDoesNotExist
dataset/ETHPy150Open rossp/django-helpdesk/helpdesk/models.py/Queue.delete
980
def _set_settings(self, data): # data should always be a Python dictionary. try: import pickle except __HOLE__: import cPickle as pickle from helpdesk.lib import b64encode self.settings_pickled = b64encode(pickle.dumps(data))
ImportError
dataset/ETHPy150Open rossp/django-helpdesk/helpdesk/models.py/UserSettings._set_settings
981
def _get_settings(self): # return a python dictionary representing the pickled data. try: import pickle except __HOLE__: import cPickle as pickle from helpdesk.lib import b64decode try: return pickle.loads(b64decode(str(self.settings_pickled))) except pickle.UnpicklingError: return {}
ImportError
dataset/ETHPy150Open rossp/django-helpdesk/helpdesk/models.py/UserSettings._get_settings
982
def _get_form(response, formname, formid, formnumber, formxpath): """Find the form element """ root = create_root_node(response.text, lxml.html.HTMLParser, base_url=get_base_url(response)) forms = root.xpath('//form') if not forms: raise ValueError("No <form> element found in %s" % response) if formname is not None: f = root.xpath('//form[@name="%s"]' % formname) if f: return f[0] if formid is not None: f = root.xpath('//form[@id="%s"]' % formid) if f: return f[0] # Get form element from xpath, if not found, go up if formxpath is not None: nodes = root.xpath(formxpath) if nodes: el = nodes[0] while True: if el.tag == 'form': return el el = el.getparent() if el is None: break encoded = formxpath if six.PY3 else formxpath.encode('unicode_escape') raise ValueError('No <form> element found with %s' % encoded) # If we get here, it means that either formname was None # or invalid if formnumber is not None: try: form = forms[formnumber] except __HOLE__: raise IndexError("Form number %d not found in %s" % (formnumber, response)) else: return form
IndexError
dataset/ETHPy150Open scrapy/scrapy/scrapy/http/request/form.py/_get_form
983
def _get_inputs(form, formdata, dont_click, clickdata, response): try: formdata = dict(formdata or ()) except (ValueError, __HOLE__): raise ValueError('formdata should be a dict or iterable of tuples') inputs = form.xpath('descendant::textarea' '|descendant::select' '|descendant::input[not(@type) or @type[' ' not(re:test(., "^(?:submit|image|reset)$", "i"))' ' and (../@checked or' ' not(re:test(., "^(?:checkbox|radio)$", "i")))]]', namespaces={ "re": "http://exslt.org/regular-expressions"}) values = [(k, u'' if v is None else v) for k, v in (_value(e) for e in inputs) if k and k not in formdata] if not dont_click: clickable = _get_clickable(clickdata, form) if clickable and clickable[0] not in formdata and not clickable[0] is None: values.append(clickable) values.extend(formdata.items()) return values
TypeError
dataset/ETHPy150Open scrapy/scrapy/scrapy/http/request/form.py/_get_inputs
984
def _get_clickable(clickdata, form): """ Returns the clickable element specified in clickdata, if the latter is given. If not, it returns the first clickable element found """ clickables = [ el for el in form.xpath( 'descendant::*[(self::input or self::button)' ' and re:test(@type, "^submit$", "i")]' '|descendant::button[not(@type)]', namespaces={"re": "http://exslt.org/regular-expressions"}) ] if not clickables: return # If we don't have clickdata, we just use the first clickable element if clickdata is None: el = clickables[0] return (el.get('name'), el.get('value') or '') # If clickdata is given, we compare it to the clickable elements to find a # match. We first look to see if the number is specified in clickdata, # because that uniquely identifies the element nr = clickdata.get('nr', None) if nr is not None: try: el = list(form.inputs)[nr] except __HOLE__: pass else: return (el.get('name'), el.get('value') or '') # We didn't find it, so now we build an XPath expression out of the other # arguments, because they can be used as such xpath = u'.//*' + \ u''.join(u'[@%s="%s"]' % c for c in six.iteritems(clickdata)) el = form.xpath(xpath) if len(el) == 1: return (el[0].get('name'), el[0].get('value') or '') elif len(el) > 1: raise ValueError("Multiple elements found (%r) matching the criteria " "in clickdata: %r" % (el, clickdata)) else: raise ValueError('No clickable element matching clickdata: %r' % (clickdata,))
IndexError
dataset/ETHPy150Open scrapy/scrapy/scrapy/http/request/form.py/_get_clickable
985
@classmethod def string_to_date_with_xls_validation(cls, date_str): date_obj = datetime.strptime(date_str, '%Y-%m-%d').date() try: SharedDate().datetime_to_julian(date_obj) except __HOLE__: return date_str else: return date_obj
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/utils/export_tools.py/ExportBuilder.string_to_date_with_xls_validation
986
@classmethod def convert_type(cls, value, data_type): """ Convert data to its native type e.g. string '1' to int 1 @param value: the string value to convert @param data_type: the native data type to convert to @return: the converted value """ func = ExportBuilder.CONVERT_FUNCS.get(data_type, lambda x: x) try: return func(value) except __HOLE__: return value
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/utils/export_tools.py/ExportBuilder.convert_type
987
def _get_server_from_metadata(xform, meta, token): report_templates = MetaData.external_export(xform) if meta: try: int(meta) except __HOLE__: raise Exception(u"Invalid metadata pk {0}".format(meta)) # Get the external server from the metadata result = report_templates.get(pk=meta) server = result.external_export_url name = result.external_export_name elif token: server = token name = None else: # Take the latest value in the metadata if not report_templates: raise Exception( u"Could not find the template token: Please upload template.") server = report_templates[0].external_export_url name = report_templates[0].external_export_name return server, name
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/utils/export_tools.py/_get_server_from_metadata
988
def tearDown(self): try: utils.destroy_test_repository() utils.destroy_test_project() except __HOLE__: # not exists pass utils.remove_test_user()
OSError
dataset/ETHPy150Open koshinuke/koshinuke.py/tests/core_test.py/CreateTestCase.tearDown
989
def _set_log_level_and_params(self, base_log_level, func): """Wrap `func` to temporarily set this plugin's logger level to `base_log_level` + config options (and restore it to its previous value after the function returns). Also determines which params may not be sent for backwards-compatibility. """ argspec = inspect.getargspec(func) @wraps(func) def wrapper(*args, **kwargs): assert self._log.level == logging.NOTSET verbosity = beets.config['verbose'].get(int) log_level = max(logging.DEBUG, base_log_level - 10 * verbosity) self._log.setLevel(log_level) try: try: return func(*args, **kwargs) except __HOLE__ as exc: if exc.args[0].startswith(func.__name__): # caused by 'func' and not stuff internal to 'func' kwargs = dict((arg, val) for arg, val in kwargs.items() if arg in argspec.args) return func(*args, **kwargs) else: raise finally: self._log.setLevel(logging.NOTSET) return wrapper
TypeError
dataset/ETHPy150Open beetbox/beets/beets/plugins.py/BeetsPlugin._set_log_level_and_params
990
def load_plugins(names=()): """Imports the modules for a sequence of plugin names. Each name must be the name of a Python module under the "beetsplug" namespace package in sys.path; the module indicated should contain the BeetsPlugin subclasses desired. """ for name in names: bname = name.encode('utf8') modname = b'%s.%s' % (PLUGIN_NAMESPACE, bname) try: try: namespace = __import__(modname, None, None) except __HOLE__ as exc: # Again, this is hacky: if exc.args[0].endswith(' ' + name): log.warn(u'** plugin {0} not found', name) else: raise else: for obj in getattr(namespace, bname).__dict__.values(): if isinstance(obj, type) and issubclass(obj, BeetsPlugin) \ and obj != BeetsPlugin and obj not in _classes: _classes.add(obj) except: log.warn( u'** error loading plugin {}:\n{}', name, traceback.format_exc(), )
ImportError
dataset/ETHPy150Open beetbox/beets/beets/plugins.py/load_plugins
991
def test_unary_ops(self): unary_ops = [str, repr, len, bool, not_] try: unary_ops.append(unicode) except __HOLE__: pass # unicode no longer exists in Python 3. for op in unary_ops: self.assertEqual( op(self.lazy), op(self.base), str(op))
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazyListTestCase.test_unary_ops
992
def test_binary_ops(self): binary_ops = [eq, ge, gt, le, lt, ne, add, concat] try: binary_ops.append(cmp) except __HOLE__: pass # cmp no longer exists in Python 3. for op in binary_ops: self.assertEqual( op(self.lazy, self.lazy), op(self.base, self.base), str(op)) for other in [self.base, self.lesser, self.greater]: self.assertEqual( op(self.lazy, other), op(self.base, other), '%s %s' % (op, other)) self.assertEqual( op(other, self.lazy), op(other, self.base), '%s %s' % (op, other)) # Multiplication self.assertEqual(self.lazy * 3, self.base * 3) self.assertEqual(3 * self.lazy, 3 * self.base) # Contains self.assertTrue(2 in self.lazy) self.assertFalse(42 in self.lazy)
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazyListTestCase.test_binary_ops
993
def test_callable(self): try: callable except __HOLE__: return # No longer exists with Python 3. self.assertFalse(callable(self.lazy))
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazyListTestCase.test_callable
994
def test_unary_ops(self): # These ops just need to work. unary_ops = [str, repr] try: unary_ops.append(unicode) except __HOLE__: pass # unicode no longer exists in Python 3. for op in unary_ops: op(self.lazy) # These ops just need to work. # These ops should return identical values as a real set. unary_ops = [len, bool, not_] for op in unary_ops: self.assertEqual( op(self.lazy), op(self.base), '%s(lazy) == %r' % (op, op(self.lazy)))
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazySetTestCase.test_unary_ops
995
def test_binary_ops(self): binary_ops = [eq, ge, gt, le, lt, ne, sub, and_, or_, xor] try: binary_ops.append(cmp) except __HOLE__: pass # cmp no longer exists in Python 3. for op in binary_ops: self.assertEqual( op(self.lazy, self.lazy), op(self.base, self.base), str(op)) self.assertEqual( op(self.lazy, self.base), op(self.base, self.base), str(op)) self.assertEqual( op(self.base, self.lazy), op(self.base, self.base), str(op)) # Contains self.assertTrue(2 in self.lazy) self.assertFalse(42 in self.lazy)
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazySetTestCase.test_binary_ops
996
def test_iops(self): try: iops = [isub, iand, ior, ixor] except __HOLE__: return # Don't exist in older Python versions. for op in iops: # Mutating operators, so make fresh copies. lazy = LazySet(self.base) base = self.base.copy() op(lazy, set([1])) op(base, set([1])) self.assertEqual(lazy, base, str(op))
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazySetTestCase.test_iops
997
def test_callable(self): try: callable except __HOLE__: return # No longer exists with Python 3. self.assertFalse(callable(self.lazy))
NameError
dataset/ETHPy150Open awslabs/lambda-apigateway-twilio-tutorial/pytz/tests/test_lazy.py/LazySetTestCase.test_callable
998
def makeImportedModule(name, pathname, desc, scope): """ Returns a ModuleProxy that has access to a closure w/ information about the module to load, but is otherwise empty. On an attempted access of any member of the module, the module is loaded. """ def _loadModule(): """ Load the given module, and insert it into the parent scope, and also the original importing scope. """ mod = sys.modules.get(name, None) if mod is None or not isinstance(mod, types.ModuleType): try: file = open(pathname, 'U') except: file = None try: mod = imp.load_module(name, file, pathname, desc) finally: if file is not None: file.close() sys.modules[name] = mod scope[name] = mod frame = sys._getframe(2) global_scope = frame.f_globals local_scope = frame.f_locals # check to see if this module exists for any part of the name # we are importing, e.g. if you are importing foo.bar.baz, # look for foo.bar.baz, bar.baz, and baz. moduleParts = name.split('.') names = [ '.'.join(moduleParts[-x:]) for x in range(len(moduleParts)) ] for modulePart in names: if modulePart in local_scope: if (hasattr(local_scope[modulePart], '__class__') and local_scope[modulePart].__class__.__name__ == 'ModuleProxy'): # FIXME: this makes me cringe, but I haven't figured out a # better way to ensure that the module proxy we're # looking at is actually a proxy for this module if pathname in repr(local_scope[modulePart]): local_scope[modulePart] = mod if modulePart in global_scope: if (hasattr(global_scope[modulePart], '__class__') and global_scope[modulePart].__class__.__name__ == 'ModuleProxy'): if pathname in repr(global_scope[modulePart]): global_scope[modulePart] = mod return mod class ModuleProxy(object): __slots__ = [] # we don't add any docs for the module in case the # user tries accessing '__doc__' def __hasattr__(self, key): mod = _loadModule() return hasattr(mod, key) def __getattr__(self, key): mod = _loadModule() try: return getattr(mod, key) except __HOLE__: # A nicer error to compensate for the extra stack frame the # lazy importer adds. raise AttributeError("Module %r has no attribute %r" % (name, key)) def __setattr__(self, key, value): mod = _loadModule() return setattr(mod, key, value) def __repr__(self): return "<moduleProxy '%s' from '%s'>" % (name, pathname) return ModuleProxy()
AttributeError
dataset/ETHPy150Open sassoftware/conary/conary/lib/importer.py/makeImportedModule
999
def find_module(self, fullname, path=None): origName = fullname if not path: mod = sys.modules.get(fullname, False) if mod is None or mod and isinstance(mod, types.ModuleType): return mod frame = sys._getframe(1) global_scope = frame.f_globals # this is the scope in which import <fullname> was called if '.' in fullname: head, fullname = fullname.rsplit('.', 1) # this import protocol works such that if I am going to be # able to import fullname, then everything in front of the # last . in fullname must already be loaded into sys.modules. mod = sys.modules.get(head,None) if mod is None: return None if hasattr(mod, '__path__'): path = mod.__path__ try: file, pathname, desc = imp.find_module(fullname, path) return OnDemandLoader(origName, file, pathname, desc, global_scope) except __HOLE__: # don't return an import error. That will stop # the automated search mechanism from working. return None
ImportError
dataset/ETHPy150Open sassoftware/conary/conary/lib/importer.py/OnDemandImporter.find_module