Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,200
def _start_application(self, application, port): # Note: No significant application logic should be executed before this point. The call to application.listen() # will raise an exception if another process is using the same port. We rely on this exception to force us to # exit if there are any port conflicts. try: application.listen(port, '0.0.0.0') except __HOLE__: self._logger.error('Could not start application on port {}. Is port already in use?'.format(port)) sys.exit(1) ioloop = tornado.ioloop.IOLoop.instance() # add a teardown callback that will stop the tornado server stop_tornado_ioloop = functools.partial(ioloop.add_callback, callback=ioloop.stop) UnhandledExceptionHandler.singleton().add_teardown_callback(stop_tornado_ioloop) return ioloop
OSError
dataset/ETHPy150Open box/ClusterRunner/app/subcommands/service_subcommand.py/ServiceSubcommand._start_application
6,201
def _write_pid_file(self, filename): fs.write_file(str(os.getpid()), filename) def remove_pid_file(): try: os.remove(filename) except __HOLE__: pass UnhandledExceptionHandler.singleton().add_teardown_callback(remove_pid_file)
OSError
dataset/ETHPy150Open box/ClusterRunner/app/subcommands/service_subcommand.py/ServiceSubcommand._write_pid_file
6,202
def create_apikey_model(user_model): """ Generate ApiKey model class and connect it with :user_model:. ApiKey is generated with relationship to user model class :user_model: as a One-to-One relationship with a backreference. ApiKey is set up to be auto-generated when a new :user_model: is created. Returns ApiKey document class. If ApiKey is already defined, it is not generated. Arguments: :user_model: Class that represents user model for which api keys will be generated and with which ApiKey will have relationship. """ try: return engine.get_document_cls('ApiKey') except __HOLE__: pass fk_kwargs = { 'ref_column': None, } if hasattr(user_model, '__tablename__'): fk_kwargs['ref_column'] = '.'.join([ user_model.__tablename__, user_model.pk_field()]) fk_kwargs['ref_column_type'] = user_model.pk_field_type() class ApiKey(engine.BaseDocument): __tablename__ = 'nefertari_apikey' id = engine.IdField(primary_key=True) token = engine.StringField(default=create_apikey_token) user = engine.Relationship( document=user_model.__name__, uselist=False, backref_name='api_key', backref_uselist=False) user_id = engine.ForeignKeyField( ref_document=user_model.__name__, **fk_kwargs) def reset_token(self): self.update({'token': create_apikey_token()}) return self.token # Setup ApiKey autogeneration on :user_model: creation ApiKey.autogenerate_for(user_model, 'user') return ApiKey
ValueError
dataset/ETHPy150Open ramses-tech/nefertari/nefertari/authentication/models.py/create_apikey_model
6,203
def __init__(self, command, input_pipe=None): """ Initializes a InputPipeProcessWrapper instance. :param command: a subprocess.Popen instance with stdin=input_pipe and stdout=subprocess.PIPE. Alternatively, just its args argument as a convenience. """ self._command = command self._input_pipe = input_pipe self._original_input = True if input_pipe is not None: try: input_pipe.fileno() except __HOLE__: # subprocess require a fileno to work, if not present we copy to disk first self._original_input = False f = tempfile.NamedTemporaryFile('wb', prefix='luigi-process_tmp', delete=False) self._tmp_file = f.name while True: chunk = input_pipe.read(io.DEFAULT_BUFFER_SIZE) if not chunk: break f.write(chunk) input_pipe.close() f.close() self._input_pipe = FileWrapper(io.BufferedReader(io.FileIO(self._tmp_file, 'r'))) self._process = command if isinstance(command, subprocess.Popen) else self.create_subprocess(command) # we want to keep a circular reference to avoid garbage collection # when the object is used in, e.g., pipe.read() self._process._selfref = self
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/InputPipeProcessWrapper.__init__
6,204
def _abort(self): """ Call _finish, but eat the exception (if any). """ try: self._finish() except __HOLE__: raise except BaseException: pass
KeyboardInterrupt
dataset/ETHPy150Open spotify/luigi/luigi/format.py/InputPipeProcessWrapper._abort
6,205
def __getattr__(self, name): if name in ['_process', '_input_pipe']: raise AttributeError(name) try: return getattr(self._process.stdout, name) except __HOLE__: return getattr(self._input_pipe, name)
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/InputPipeProcessWrapper.__getattr__
6,206
def __getattr__(self, name): if name in ['_process', '_output_pipe']: raise AttributeError(name) try: return getattr(self._process.stdin, name) except __HOLE__: return getattr(self._output_pipe, name)
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/OutputPipeProcessWrapper.__getattr__
6,207
def __init__(self, stream, *args, **kwargs): self._stream = stream try: super(BaseWrapper, self).__init__(stream, *args, **kwargs) except __HOLE__: pass
TypeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/BaseWrapper.__init__
6,208
def __init__(self, *args, **kwargs): self.args = args try: self.input = args[0].input except AttributeError: pass try: self.output = args[-1].output except __HOLE__: pass if not kwargs.get('check_consistency', True): return for x in range(len(args) - 1): try: if args[x].output != args[x + 1].input: raise TypeError( 'The format chaining is not valid, %s expect %s' 'but %s provide %s' % ( args[x].__class__.__name__, args[x].input, args[x + 1].__class__.__name__, args[x + 1].output, ) ) except AttributeError: pass
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/ChainFormat.__init__
6,209
def __del__(self, *args): # io.TextIOWrapper close the file on __del__, let the underlying file decide if not self.closed and self.writable(): super(TextWrapper, self).flush() try: self._stream.__del__(*args) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/TextWrapper.__del__
6,210
def __init__(self, stream, *args, **kwargs): self._stream = stream try: super(TextWrapper, self).__init__(stream, *args, **kwargs) except __HOLE__: pass
TypeError
dataset/ETHPy150Open spotify/luigi/luigi/format.py/TextWrapper.__init__
6,211
def validate_fields(self, bundle, request=None): errors = [] for field in self.validated_fields[request.method]: validation_func = getattr(self, '%s_is_valid' % field) try: validation_func(bundle.data.get(field, None), bundle, request) except __HOLE__, error: errors.append(error.get_dict()) return errors
ValidationError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-tastytools/tastytools/validation.py/FieldsValidation.validate_fields
6,212
@staticmethod def uri_to_pk(uri): if uri is None: return None # convert everything to lists multiple = not isinstance(uri, basestring) uris = uri if multiple else [uri] # handle all passed URIs converted = [] for one_uri in uris: try: # hopefully /api/v1/<resource_name>/<pk>/ converted.append(int(one_uri.split('/')[-2])) except (IndexError, __HOLE__): raise ValueError("URI %s could not be converted to PK integer." % one_uri) # convert back to original format return converted if multiple else converted[0]
ValueError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-tastytools/tastytools/validation.py/FieldsValidation.uri_to_pk
6,213
def _get_stream_parameters(kind, device, channels, dtype, latency, samplerate): """Generate PaStreamParameters struct.""" if device is None: if kind == 'input': device = _pa.Pa_GetDefaultInputDevice() elif kind == 'output': device = _pa.Pa_GetDefaultOutputDevice() info = device_info(device) if channels is None: channels = info['max_' + kind + '_channels'] dtype = np.dtype(dtype) try: sample_format = _np2pa[dtype] except __HOLE__: raise ValueError("Invalid " + kind + " sample format") if samplerate is None: samplerate = info['default_samplerate'] parameters = ffi.new( "PaStreamParameters*", (device, channels, sample_format, latency, ffi.NULL)) return parameters, dtype, samplerate
KeyError
dataset/ETHPy150Open bastibe/PySoundCard/pysoundcard.py/_get_stream_parameters
6,214
def _split(value): """Split input/output value into two values.""" if isinstance(value, str): # iterable, but not meant for splitting return value, value try: invalue, outvalue = value except TypeError: invalue = outvalue = value except __HOLE__: raise ValueError("Only single values and pairs are allowed") return invalue, outvalue
ValueError
dataset/ETHPy150Open bastibe/PySoundCard/pysoundcard.py/_split
6,215
def DoAttributes(self, c, fsm): # m try: while 1: n = int(fsm.pop()) self.screen.set_attribute(n) except __HOLE__: pass
IndexError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/ANSIterm.py/ANSITerminal.DoAttributes
6,216
@staticmethod def from_string(data_string): """Deserializes a token from a string like one returned by `to_string()`.""" if not len(data_string): raise ValueError("Invalid parameter string.") params = parse_qs(data_string, keep_blank_values=False) if not len(params): raise ValueError("Invalid parameter string.") try: key = params['oauth_token'][0] except Exception: raise ValueError("'oauth_token' not found in OAuth request.") try: secret = params['oauth_token_secret'][0] except Exception: raise ValueError("'oauth_token_secret' not found in " "OAuth request.") token = YahooToken(key, secret) session_handle = params.get('oauth_session_handle') if session_handle: setattr(token, 'session_handle', session_handle[0]) timestamp = params.get('token_creation_timestamp') if timestamp: setattr(token, 'timestamp', timestamp[0]) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except __HOLE__: pass # 1.0, no callback confirmed. return token
KeyError
dataset/ETHPy150Open project-fondue/python-yql/yql/__init__.py/YahooToken.from_string
6,217
def validate_statementId(req_dict): if 'statementId' in req_dict['params'] and 'voidedStatementId' in req_dict['params']: err_msg = "Cannot have both statementId and voidedStatementId in a GET request" raise ParamError(err_msg) elif 'statementId' in req_dict['params']: statementId = req_dict['params']['statementId'] voided = False else: statementId = req_dict['params']['voidedStatementId'] voided = True not_allowed = ["agent", "verb", "activity", "registration", "related_activities", "related_agents", "since", "until", "limit", "ascending"] bad_keys = set(not_allowed) & set(req_dict['params'].keys()) if bad_keys: err_msg = "Cannot have %s in a GET request only 'format' and/or 'attachments' are allowed with 'statementId' and 'voidedStatementId'" % ', '.join(bad_keys) raise ParamError(err_msg) # Try to retrieve stmt, if DNE then return empty else return stmt info try: uuidId = uuid.UUID(str(statementId)) st = Statement.objects.get(statement_id=uuidId) except (Statement.DoesNotExist, __HOLE__): err_msg = 'There is no statement associated with the id: %s' % statementId raise IDNotFoundError(err_msg) auth = req_dict.get('auth', None) mine_only = auth and 'statements_mine_only' in auth if auth['agent']: if mine_only and st.authority.id != auth['agent'].id: err_msg = "Incorrect permissions to view statements" raise Forbidden(err_msg) if st.voided != voided: if st.voided: err_msg = 'The requested statement (%s) is voided. Use the "voidedStatementId" parameter to retrieve your statement.' % statementId else: err_msg = 'The requested statement (%s) is not voided. Use the "statementId" parameter to retrieve your statement.' % statementId raise IDNotFoundError(err_msg) return statementId
ValueError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_validate.py/validate_statementId
6,218
@auth def activities_get(req_dict): rogueparams = set(req_dict['params']) - set(["activityId"]) if rogueparams: raise ParamError("The get activities request contained unexpected parameters: %s" % ", ".join(rogueparams)) try: activity_id = req_dict['params']['activityId'] except __HOLE__: err_msg = "Error -- activities - method = %s, but activityId parameter is missing" % req_dict['method'] raise ParamError(err_msg) # Try to retrieve activity, if DNE then return empty else return activity info try: Activity.objects.get(activity_id=activity_id, authority__isnull=False) except Activity.DoesNotExist: err_msg = "No activity found with ID %s" % activity_id raise IDNotFoundError(err_msg) return req_dict
KeyError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_validate.py/activities_get
6,219
@auth def agents_get(req_dict): rogueparams = set(req_dict['params']) - set(["agent"]) if rogueparams: raise ParamError("The get agent request contained unexpected parameters: %s" % ", ".join(rogueparams)) try: req_dict['params']['agent'] except __HOLE__: err_msg = "Error -- agents url, but no agent parameter.. the agent parameter is required" raise ParamError(err_msg) agent = json.loads(req_dict['params']['agent']) params = get_agent_ifp(agent) if not Agent.objects.filter(**params).exists(): raise IDNotFoundError("Error with Agent. The agent partial did not match any agents on record") req_dict['agent_ifp'] = params return req_dict
KeyError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_validate.py/agents_get
6,220
def __getitem__(self, name): name = self._to_colons[name] try: return self._unknowns[name] except __HOLE__: return self._params[name]
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/components/exec_comp.py/_UPDict.__getitem__
6,221
def get(self, key, default=None, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.reader(): exp = self._expire_info.get(key) if exp is None: return default elif exp > time.time(): try: pickled = self._cache[key] return pickle.loads(pickled) except pickle.PickleError: return default with self._lock.writer(): try: del self._cache[key] del self._expire_info[key] except __HOLE__: pass return default
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/cache/backends/locmem.py/LocMemCache.get
6,222
def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.reader(): exp = self._expire_info.get(key) if exp is None: return False elif exp > time.time(): return True with self._lock.writer(): try: del self._cache[key] del self._expire_info[key] except __HOLE__: pass return False
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/cache/backends/locmem.py/LocMemCache.has_key
6,223
def _delete(self, key): try: del self._cache[key] except __HOLE__: pass try: del self._expire_info[key] except KeyError: pass
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/cache/backends/locmem.py/LocMemCache._delete
6,224
def script(): """ Execute the program as a script. Set up logging, invoke main() using the user-provided arguments and handle any exception raised. """ # Setup logging import logging logging.basicConfig( format="%(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S", stream=sys.stdout) logger = logging.getLogger() # Dispatch to the command according to the script name script = sys.argv[0] args = sys.argv[1:] if os.path.basename(script).startswith('pgxn-'): args.insert(0, os.path.basename(script)[5:]) # for help print sys.argv[0] = os.path.join(os.path.dirname(script), 'pgxn') # Execute the script try: main(args) # Different ways to fail except UserAbort, e: # The user replied "no" to some question logger.info("%s", e) sys.exit(1) except PgxnException, e: # An regular error from the program logger.error("%s", e) sys.exit(1) except __HOLE__, e: # Usually the arg parser bailing out. pass except Exception, e: logger.error(_("unexpected error: %s - %s"), e.__class__.__name__, e, exc_info=True) sys.exit(1) except BaseException, e: # ctrl-c sys.exit(1)
SystemExit
dataset/ETHPy150Open dvarrazzo/pgxnclient/pgxnclient/cli.py/script
6,225
def setup_server(): try: import routes except __HOLE__: raise nose.SkipTest('Install routes to test RoutesDispatcher code') class Dummy: def index(self): return "I said good day!" class City: def __init__(self, name): self.name = name self.population = 10000 def index(self, **kwargs): return "Welcome to %s, pop. %s" % (self.name, self.population) index._cp_config = {'tools.response_headers.on': True, 'tools.response_headers.headers': [('Content-Language', 'en-GB')]} def update(self, **kwargs): self.population = kwargs['pop'] return "OK" d = cherrypy.dispatch.RoutesDispatcher() d.connect(action='index', name='hounslow', route='/hounslow', controller=City('Hounslow')) d.connect(name='surbiton', route='/surbiton', controller=City('Surbiton'), action='index', conditions=dict(method=['GET'])) d.mapper.connect('/surbiton', controller='surbiton', action='update', conditions=dict(method=['POST'])) d.connect('main', ':action', controller=Dummy()) conf = {'/': {'request.dispatch': d}} cherrypy.tree.mount(root=None, config=conf)
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/test/test_routes.py/RoutesDispatchTest.setup_server
6,226
def __init__(self, first, last=None): try: first_num = WEEKDAYS.index(first.lower()) except __HOLE__: raise ValueError('Invalid weekday name "%s"' % first) if last: try: last_num = WEEKDAYS.index(last.lower()) except ValueError: raise ValueError('Invalid weekday name "%s"' % last) else: last_num = None RangeExpression.__init__(self, first_num, last_num)
ValueError
dataset/ETHPy150Open agronholm/apscheduler/apscheduler/triggers/cron/expressions.py/WeekdayRangeExpression.__init__
6,227
def __init__(self, option_name, weekday_name): try: self.option_num = self.options.index(option_name.lower()) except ValueError: raise ValueError('Invalid weekday position "%s"' % option_name) try: self.weekday = WEEKDAYS.index(weekday_name.lower()) except __HOLE__: raise ValueError('Invalid weekday name "%s"' % weekday_name)
ValueError
dataset/ETHPy150Open agronholm/apscheduler/apscheduler/triggers/cron/expressions.py/WeekdayPositionExpression.__init__
6,228
def from_inet_ptoi(bgp_id): """Convert an IPv4 address string format to a four byte long. """ four_byte_id = None try: packed_byte = socket.inet_pton(socket.AF_INET, bgp_id) four_byte_id = int(packed_byte.encode('hex'), 16) except __HOLE__: LOG.debug('Invalid bgp id given for conversion to integer value %s', bgp_id) return four_byte_id
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/services/protocols/bgp/utils/bgp.py/from_inet_ptoi
6,229
def get_sort_options(expressions=None, match_scorer=None, limit=1000): """A function to handle the sort expression API differences in 1.6.4 vs. 1.6.5+. An example of usage (NOTE: Do NOT put limit SortExpression or MatchScorer): expr_list = [ search.SortExpression(expression='author', default_value='', direction=search.SortExpression.DESCENDING)] sortopts = get_sort_options(expression=expr_list, limit=sort_limit) The returned value is used in constructing the query options: qoptions=search.QueryOptions(limit=doc_limit, sort_options=sortopts) Another example illustrating sorting on an expression based on a MatchScorer score: expr_list = [ search.SortExpression(expression='_score + 0.001 * rating', default_value='', direction=search.SortExpression.DESCENDING)] sortopts = get_sort_options(expression=expr_list, match_scorer=search.MatchScorer(), limit=sort_limit) Args: expression: a list of search.SortExpression. Do not set limit parameter on SortExpression match_scorer: a search.MatchScorer or search.RescoringMatchScorer. Do not set limit parameter on either scorer limit: the scoring limit Returns: the sort options value, either list of SortOption (1.6.4) or SortOptions (1.6.5), to set the sort_options field in the QueryOptions object. """ try: # using 1.6.5 or greater if search.SortOptions: logging.debug("search.SortOptions is defined.") return search.SortOptions( expressions=expressions, match_scorer=match_scorer, limit=limit) # SortOptions not available, so using 1.6.4 except __HOLE__: logging.debug("search.SortOptions is not defined.") expr_list = [] # copy the sort expressions including the limit info if expressions: expr_list=[ search.SortExpression( expression=e.expression, direction=e.direction, default_value=e.default_value, limit=limit) for e in expressions] # add the match scorer, if defined, to the expressions list. if isinstance(match_scorer, search.MatchScorer): expr_list.append(match_scorer.__class__(limit=limit)) logging.info("sort expressions: %s", expr_list) return expr_list
AttributeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-search-python-java/product_search_python/sortoptions.py/get_sort_options
6,230
def get_command_handler(self, command): """ Parsing command and returning appropriate handler. :param command: command :return: command_handler """ try: command_handler = getattr(self, "command_{}".format(command)) except __HOLE__: raise RoutersploitException("Unknown command: '{}'".format(command)) return command_handler
AttributeError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/BaseInterpreter.get_command_handler
6,231
def start(self): """ Routersploit main entry point. Starting interpreter loop. """ print(self.banner) while True: try: command, args = self.parse_line(raw_input(self.prompt)) if not command: continue command_handler = self.get_command_handler(command) command_handler(args) except RoutersploitException as err: utils.print_error(err) except (__HOLE__, EOFError): print() utils.print_status("routersploit stopped") break
KeyboardInterrupt
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/BaseInterpreter.start
6,232
def complete(self, text, state): """Return the next possible completion for 'text'. If a command has not been entered, then complete against command list. Otherwise try to call complete_<command> to get list of completions. """ if state == 0: original_line = readline.get_line_buffer() line = original_line.lstrip() stripped = len(original_line) - len(line) start_index = readline.get_begidx() - stripped end_index = readline.get_endidx() - stripped if start_index > 0: cmd, args = self.parse_line(line) if cmd == '': complete_function = self.default_completer else: try: complete_function = getattr(self, 'complete_' + cmd) except AttributeError: complete_function = self.default_completer else: complete_function = self.raw_command_completer self.completion_matches = complete_function(text, line, start_index, end_index) try: return self.completion_matches[state] except __HOLE__: return None
IndexError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/BaseInterpreter.complete
6,233
def load_modules(self): self.main_modules_dirs = [module for module in os.listdir(self.modules_directory) if not module.startswith("__")] self.modules = [] self.modules_with_errors = {} for root, dirs, files in os.walk(self.modules_directory): _, package, root = root.rpartition('routersploit') root = "".join((package, root)).replace(os.sep, '.') modules = map(lambda x: '.'.join((root, os.path.splitext(x)[0])), filter(lambda x: x.endswith('.py'), files)) for module_path in modules: try: module = importlib.import_module(module_path) except __HOLE__ as error: self.modules_with_errors[module_path] = error else: klasses = inspect.getmembers(module, inspect.isclass) exploits = filter(lambda x: issubclass(x[1], Exploit), klasses) # exploits = map(lambda x: '.'.join([module_path.split('.', 2).pop(), x[0]]), exploits) # self.modules.extend(exploits) if exploits: self.modules.append(module_path.split('.', 2).pop())
ImportError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/RoutersploitInterpreter.load_modules
6,234
@property def prompt(self): """ Returns prompt string based on current_module attribute. Adding module prefix (module.name) if current_module attribute is set. :return: prompt string with appropriate module prefix. """ if self.current_module: try: return self.module_prompt_template.format(host=self.prompt_hostname, module=self.module_metadata['name']) except (AttributeError, __HOLE__): return self.module_prompt_template.format(host=self.prompt_hostname, module="UnnamedModule") else: return self.raw_prompt_template.format(host=self.prompt_hostname)
KeyError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/RoutersploitInterpreter.prompt
6,235
def command_use(self, module_path, *args, **kwargs): module_path = utils.pythonize_path(module_path) module_path = '.'.join(('routersploit', 'modules', module_path)) # module_path, _, exploit_name = module_path.rpartition('.') try: module = importlib.import_module(module_path) self.current_module = getattr(module, 'Exploit')() except (__HOLE__, AttributeError, KeyError): utils.print_error("Error during loading '{}' module. " "It should be valid path to the module. " "Use <tab> key multiple times for completion.".format(utils.humanize_path(module_path)))
ImportError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/RoutersploitInterpreter.command_use
6,236
@utils.module_required def get_opts(self, *args): """ Generator returning module's Option attributes (option_name, option_value, option_description) :param args: Option names :return: """ for opt_key in args: try: opt_description = self.current_module.exploit_attributes[opt_key] opt_value = getattr(self.current_module, opt_key) except (__HOLE__, AttributeError): pass else: yield opt_key, opt_value, opt_description
KeyError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/interpreter.py/RoutersploitInterpreter.get_opts
6,237
def test_dnn_tag(): """ Test that if cudnn isn't avail we crash and that if it is avail, we use it. """ x = T.ftensor4() old = theano.config.on_opt_error theano.config.on_opt_error = "raise" sio = StringIO() handler = logging.StreamHandler(sio) logging.getLogger('theano.compile.tests.test_dnn').addHandler(handler) # Silence original handler when intentionnally generating warning messages logging.getLogger('theano').removeHandler(theano.logging_default_handler) raised = False try: f = theano.function( [x], pool_2d(x, ds=(2, 2), ignore_border=True), mode=mode_with_gpu.including("cudnn")) except (__HOLE__, RuntimeError): assert not dnn.dnn_available(test_ctx_name) raised = True finally: theano.config.on_opt_error = old logging.getLogger( 'theano.compile.tests.test_dnn').removeHandler(handler) logging.getLogger('theano').addHandler(theano.logging_default_handler) if not raised: assert dnn.dnn_available(test_ctx_name) assert any([isinstance(n.op, dnn.GpuDnnPool) for n in f.maker.fgraph.toposort()])
AssertionError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/sandbox/gpuarray/tests/test_dnn.py/test_dnn_tag
6,238
def add_edge(self, u, v, key=None, attr_dict=None, **attr): """Add an edge between u and v. The nodes u and v will be automatically added if they are not already in the graph. Edge attributes can be specified with keywords or by providing a dictionary with key/value pairs. See examples below. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. key : hashable identifier, optional (default=lowest unused integer) Used to distinguish multiedges between a pair of nodes. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with the edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edges_from : add a collection of edges Notes ----- To replace/update edge data, use the optional key argument to identify a unique edge. Otherwise a new edge will be created. NetworkX algorithms designed for weighted graphs cannot use multigraphs directly because it is not clear how to handle multiedge weights. Convert to Graph using edge attribute 'weight' to enable weighted graph algorithms. Examples -------- The following all add the edge e=(1,2) to graph G: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1,2) >>> G.add_edge(1, 2) # explicit two-node form >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from( [(1,2)] ) # add edges from iterable container Associate data to edges using keywords: >>> G.add_edge(1, 2, weight=3) >>> G.add_edge(1, 2, key=0, weight=4) # update data for key=0 >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) """ # set up attribute dict if attr_dict is None: attr_dict = attr else: try: attr_dict.update(attr) except __HOLE__: raise NetworkXError( "The attr_dict argument must be a dictionary.") # add nodes if u not in self.adj: self.adj[u] = self.adjlist_dict_factory() self.node[u] = {} if v not in self.adj: self.adj[v] = self.adjlist_dict_factory() self.node[v] = {} if v in self.adj[u]: keydict = self.adj[u][v] if key is None: # find a unique integer key # other methods might be better here? key = len(keydict) while key in keydict: key += 1 datadict = keydict.get(key, self.edge_attr_dict_factory()) datadict.update(attr_dict) keydict[key] = datadict else: # selfloops work this way without special treatment if key is None: key = 0 datadict = self.edge_attr_dict_factory() datadict.update(attr_dict) keydict = self.edge_key_dict_factory() keydict[key] = datadict self.adj[u][v] = keydict self.adj[v][u] = keydict
AttributeError
dataset/ETHPy150Open networkx/networkx/networkx/classes/multigraph.py/MultiGraph.add_edge
6,239
def add_edges_from(self, ebunch, attr_dict=None, **attr): """Add all the edges in ebunch. Parameters ---------- ebunch : container of edges Each edge given in the container will be added to the graph. The edges can be: - 2-tuples (u,v) or - 3-tuples (u,v,d) for an edge attribute dict d, or - 4-tuples (u,v,k,d) for an edge identified by key k attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with each edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edge : add a single edge add_weighted_edges_from : convenient way to add weighted edges Notes ----- Adding the same edge twice has no effect but any edge data will be updated when each duplicate edge is added. Edge attributes specified in edges take precedence over attributes specified generally. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples >>> e = zip(range(0,3),range(1,4)) >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1,2),(2,3)], weight=3) >>> G.add_edges_from([(3,4),(1,4)], label='WN2898') """ # set up attribute dict if attr_dict is None: attr_dict = attr else: try: attr_dict.update(attr) except __HOLE__: raise NetworkXError( "The attr_dict argument must be a dictionary.") # process ebunch for e in ebunch: ne = len(e) if ne == 4: u, v, key, dd = e elif ne == 3: u, v, dd = e key = None elif ne == 2: u, v = e dd = {} key = None else: raise NetworkXError( "Edge tuple %s must be a 2-tuple, 3-tuple or 4-tuple." % (e,)) ddd = {} ddd.update(attr_dict) ddd.update(dd) self.add_edge(u, v, key, ddd)
AttributeError
dataset/ETHPy150Open networkx/networkx/networkx/classes/multigraph.py/MultiGraph.add_edges_from
6,240
def has_edge(self, u, v, key=None): """Return True if the graph has an edge between nodes u and v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. key : hashable identifier, optional (default=None) If specified return True only if the edge with key is found. Returns ------- edge_ind : bool True if edge is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v, an edge tuple (u,v), or an edge tuple (u,v,key). >>> G = nx.MultiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.has_edge(0,1) # using two nodes True >>> e = (0,1) >>> G.has_edge(*e) # e is a 2-tuple (u,v) True >>> G.add_edge(0,1,key='a') >>> G.has_edge(0,1,key='a') # specify key True >>> e=(0,1,'a') >>> G.has_edge(*e) # e is a 3-tuple (u,v,'a') True The following syntax are equivalent: >>> G.has_edge(0,1) True >>> 1 in G[0] # though this gives :exc:`KeyError` if 0 not in G True """ try: if key is None: return v in self.adj[u] else: return key in self.adj[u][v] except __HOLE__: return False
KeyError
dataset/ETHPy150Open networkx/networkx/networkx/classes/multigraph.py/MultiGraph.has_edge
6,241
def get_edge_data(self, u, v, key=None, default=None): """Return the attribute dictionary associated with edge (u,v). Parameters ---------- u, v : nodes default : any Python object (default=None) Value to return if the edge (u,v) is not found. key : hashable identifier, optional (default=None) Return data only for the edge with specified key. Returns ------- edge_dict : dictionary The edge attribute dictionary. Notes ----- It is faster to use G[u][v][key]. >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_edge(0,1,key='a',weight=7) >>> G[0][1]['a'] # key='a' {'weight': 7} Warning: Assigning G[u][v][key] corrupts the graph data structure. But it is safe to assign attributes to that dictionary, >>> G[0][1]['a']['weight'] = 10 >>> G[0][1]['a']['weight'] 10 >>> G[1][0]['a']['weight'] 10 Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.get_edge_data(0,1) {0: {}} >>> e = (0,1) >>> G.get_edge_data(*e) # tuple form {0: {}} >>> G.get_edge_data('a','b',default=0) # edge not in graph, return 0 0 """ try: if key is None: return self.adj[u][v] else: return self.adj[u][v][key] except __HOLE__: return default
KeyError
dataset/ETHPy150Open networkx/networkx/networkx/classes/multigraph.py/MultiGraph.get_edge_data
6,242
def number_of_edges(self, u=None, v=None): """Return the number of edges between two nodes. Parameters ---------- u, v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. See Also -------- size Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.number_of_edges() 3 >>> G.number_of_edges(0,1) 1 >>> e = (0,1) >>> G.number_of_edges(*e) 1 """ if u is None: return self.size() try: edgedata = self.adj[u][v] except __HOLE__: return 0 # no such edge return len(edgedata)
KeyError
dataset/ETHPy150Open networkx/networkx/networkx/classes/multigraph.py/MultiGraph.number_of_edges
6,243
def check_formfield(cls, model, opts, label, field): if getattr(cls.form, 'base_fields', None): try: cls.form.base_fields[field] except __HOLE__: raise ImproperlyConfigured("'%s.%s' refers to field '%s' that " "is missing from the form." % (cls.__name__, label, field)) else: fields = fields_for_model(model) try: fields[field] except KeyError: raise ImproperlyConfigured("'%s.%s' refers to field '%s' that " "is missing from the form." % (cls.__name__, label, field))
KeyError
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/admin/validation.py/check_formfield
6,244
def fetch_attr(cls, model, opts, label, field): try: return opts.get_field(field) except models.FieldDoesNotExist: pass try: return getattr(model, field) except __HOLE__: raise ImproperlyConfigured("'%s.%s' refers to '%s' that is neither a field, method or property of model '%s'." % (cls.__name__, label, field, model.__name__))
AttributeError
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/admin/validation.py/fetch_attr
6,245
def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. """ try: # Nod to tastypie's use of importlib. parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except __HOLE__ as exc: format = "Could not import '%s' for API setting '%s'. %s." msg = format % (val, setting_name, exc) raise ImportError(msg)
ImportError
dataset/ETHPy150Open tomchristie/flask-api/flask_api/settings.py/import_from_string
6,246
@staticmethod def get_variables_from_file(full_filename, file_encoding='utf-8'): path, filename = os.path.split(full_filename) temp_abspath = None global_dict = globals().copy() try: # add settings dir from path sys.path.insert(0, path) execfile(full_filename, global_dict, global_dict) except __HOLE__: raise Exception("%s: file not found" % full_filename) except Exception as e: try: f = open(full_filename, "rU") content = f.read() f.close() temp_abspath = "%s/%s" %(tempfile.gettempdir().rstrip('/'), filename) f = open(temp_abspath, "w") f.write('#-*- coding:%s -*-\n%s' % (file_encoding, content)) f.close() execfile(temp_abspath, global_dict, global_dict) except Exception as e: raise Exception("error interpreting config file '%s': %s" % (filename, str(e))) finally: #erase temp and compiled files if temp_abspath and os.path.isfile(temp_abspath): os.remove(temp_abspath) # remove settings dir from path if path in sys.path: sys.path.remove(path) local_dict = {} globals_keys = globals().keys() for key in global_dict: if key not in globals_keys: local_dict[key] = global_dict[key] return local_dict
IOError
dataset/ETHPy150Open guilhermechapiewski/simple-db-migrate/simple_db_migrate/helpers.py/Utils.get_variables_from_file
6,247
def getattr(obj, name, *default): """a version of getattr() that supports NetProxies""" if len(default) > 1: raise TypeError("getattr expected at most 3 arguments, got %d" % (2 + len(default),)) if orig_isinstance(obj, NetProxy): try: return obj.__getattr__(name) except __HOLE__: if not default: raise return default[0] else: return orig_getattr(obj, name, *default)
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/Rpyc/Utils/Builtins.py/getattr
6,248
def hasattr(obj, name): """a version of hasattr() that supports NetProxies""" try: getattr(obj, name) except __HOLE__: return False else: return True
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/Rpyc/Utils/Builtins.py/hasattr
6,249
def _get_fullname(cls): """ a heuristic to generate a unique identifier for classes, that is not machine-, platform-, or runtime-dependent """ if orig_isinstance(cls, NetProxy): modules = _get_conn(cls).modules.sys.modules else: modules = sys.modules try: filename = modules[cls.__module__].__file__ except (__HOLE__, AttributeError): filename = cls.__module__ return (filename, cls.__name__)
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/Rpyc/Utils/Builtins.py/_get_fullname
6,250
def isinstance(obj, bases): """a version of isinstance that supports NetProxies""" try: cls = obj.__getattr__("__class__") except __HOLE__: try: cls = obj.__class__ except AttributeError: cls = orig_type(obj) return issubclass(cls, bases)
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/Rpyc/Utils/Builtins.py/isinstance
6,251
def stop(self): try: if self.process: #disconnect the subprocess try: result = self.process.poll() if result is None: self.process.kill() except __HOLE__: pass self.process.wait() logging.debug("Subprocess has shut down successfully") self.isShuttingDown = True if self.subprocessOutThread is not None and not self.isSuprocessOutThread(): self.subprocessStdOutFromOtherSide.write("\n") self.subprocessOutThread.join() self.subprocessStdOutFromOtherSide.close() if self.subprocessErrThread is not None and not self.isSuprocessErrThread(): self.subprocessStdErrFromOtherSide.write("\n") self.subprocessErrThread.join() self.subprocessStdErrFromOtherSide.close() self.subprocessStdIn.close() logging.debug("SubprocessRunner has shut down successfully") finally: self.isShuttingDown = False
OSError
dataset/ETHPy150Open ufora/ufora/ufora/core/SubprocessRunner.py/SubprocessRunner.stop
6,252
def _fill_result_cache(self): """ Fill the result cache with all results. """ idx = 0 try: while True: idx += 1000 self._fill_result_cache_to_idx(idx) except __HOLE__: pass self._count = len(self._result_cache)
StopIteration
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet._fill_result_cache
6,253
def _fill_result_cache_to_idx(self, idx): self._execute_query() if self._result_idx is None: self._result_idx = -1 qty = idx - self._result_idx if qty < 1: return else: for idx in range(qty): self._result_idx += 1 while True: try: self._result_cache[self._result_idx] = self._construct_result(self._result_cache[self._result_idx]) break except __HOLE__: self._result_cache.append(next(self._result_generator))
IndexError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet._fill_result_cache_to_idx
6,254
def __iter__(self): self._execute_query() idx = 0 while True: if len(self._result_cache) <= idx: try: self._result_cache.append(next(self._result_generator)) except __HOLE__: break instance = self._result_cache[idx] if isinstance(instance, dict): self._fill_result_cache_to_idx(idx) yield self._result_cache[idx] idx += 1
StopIteration
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.__iter__
6,255
def __getitem__(self, s): self._execute_query() if isinstance(s, slice): start = s.start if s.start else 0 # calculate the amount of results that need to be loaded end = s.stop if start < 0 or s.stop is None or s.stop < 0: end = self.count() try: self._fill_result_cache_to_idx(end) except StopIteration: pass return self._result_cache[start:s.stop:s.step] else: try: s = int(s) except (ValueError, TypeError): raise TypeError('QuerySet indices must be integers') # Using negative indexing is costly since we have to execute a count() if s < 0: num_results = self.count() s += num_results try: self._fill_result_cache_to_idx(s) except __HOLE__: raise IndexError return self._result_cache[s]
StopIteration
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.__getitem__
6,256
def first(self): try: return six.next(iter(self)) except __HOLE__: return None
StopIteration
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.first
6,257
def iff(self, *args, **kwargs): """Adds IF statements to queryset""" if len([x for x in kwargs.values() if x is None]): raise CQLEngineException("None values on iff are not allowed") clone = copy.deepcopy(self) for operator in args: if not isinstance(operator, ConditionalClause): raise QueryException('{0} is not a valid query operator'.format(operator)) clone._conditional.append(operator) for arg, val in kwargs.items(): if isinstance(val, Token): raise QueryException("Token() values are not valid in conditionals") col_name, col_op = self._parse_filter_arg(arg) try: column = self.model._get_column(col_name) except __HOLE__: raise QueryException("Can't resolve column name: '{0}'".format(col_name)) if isinstance(val, BaseQueryFunction): query_val = val else: query_val = column.to_database(val) operator_class = BaseWhereOperator.get_operator(col_op or 'EQ') operator = operator_class() clone._conditional.append(WhereClause(column.db_field_name, operator, query_val)) return clone
KeyError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.iff
6,258
def filter(self, *args, **kwargs): """ Adds WHERE arguments to the queryset, returning a new queryset See :ref:`retrieving-objects-with-filters` Returns a QuerySet filtered on the keyword arguments """ # add arguments to the where clause filters if len([x for x in kwargs.values() if x is None]): raise CQLEngineException("None values on filter are not allowed") clone = copy.deepcopy(self) for operator in args: if not isinstance(operator, WhereClause): raise QueryException('{0} is not a valid query operator'.format(operator)) clone._where.append(operator) for arg, val in kwargs.items(): col_name, col_op = self._parse_filter_arg(arg) quote_field = True if not isinstance(val, Token): try: column = self.model._get_column(col_name) except __HOLE__: raise QueryException("Can't resolve column name: '{0}'".format(col_name)) else: if col_name != 'pk__token': raise QueryException("Token() values may only be compared to the 'pk__token' virtual column") column = columns._PartitionKeysToken(self.model) quote_field = False partition_columns = column.partition_columns if len(partition_columns) != len(val.value): raise QueryException( 'Token() received {0} arguments but model has {1} partition keys'.format( len(val.value), len(partition_columns))) val.set_columns(partition_columns) # get query operator, or use equals if not supplied operator_class = BaseWhereOperator.get_operator(col_op or 'EQ') operator = operator_class() if isinstance(operator, InOperator): if not isinstance(val, (list, tuple)): raise QueryException('IN queries must use a list/tuple value') query_val = [column.to_database(v) for v in val] elif isinstance(val, BaseQueryFunction): query_val = val elif (isinstance(operator, ContainsOperator) and isinstance(column, (columns.List, columns.Set, columns.Map))): # For ContainsOperator and collections, we query using the value, not the container query_val = val else: query_val = column.to_database(val) if not col_op: # only equal values should be deferred clone._defer_fields.add(col_name) clone._deferred_values[column.db_field_name] = val # map by db field name for substitution in results clone._where.append(WhereClause(column.db_field_name, operator, query_val, quote_field=quote_field)) return clone
KeyError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.filter
6,259
def get(self, *args, **kwargs): """ Returns a single instance matching this query, optionally with additional filter kwargs. See :ref:`retrieving-objects-with-filters` Returns a single object matching the QuerySet. .. code-block:: python user = User.get(id=1) If no objects are matched, a :class:`~.DoesNotExist` exception is raised. If more than one object is found, a :class:`~.MultipleObjectsReturned` exception is raised. """ if args or kwargs: return self.filter(*args, **kwargs).get() self._execute_query() # Check that the resultset only contains one element, avoiding sending a COUNT query try: self[1] raise self.model.MultipleObjectsReturned('Multiple objects found') except IndexError: pass try: obj = self[0] except __HOLE__: raise self.model.DoesNotExist return obj
IndexError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/AbstractQuerySet.get
6,260
def __getattr__(self, item): try: return self[item] except __HOLE__: raise AttributeError
KeyError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/query.py/ResultObject.__getattr__
6,261
def run(self, host=None, port=None, debug=None, **options): import tornado.wsgi import tornado.ioloop import tornado.httpserver import tornado.web if host is None: host = '127.0.0.1' if port is None: server_name = self.config['SERVER_NAME'] if server_name and ':' in server_name: port = int(server_name.rsplit(':', 1)[1]) else: port = 5000 if debug is not None: self.debug = bool(debug) hostname = host port = port application = self use_reloader = self.debug use_debugger = self.debug if use_debugger: from werkzeug.debug import DebuggedApplication application = DebuggedApplication(application, True) try: from .webdav import dav_app except __HOLE__ as e: logger.error('WebDav interface not enabled: %r', e) dav_app = None if dav_app: from werkzeug.wsgi import DispatcherMiddleware application = DispatcherMiddleware(application, { '/dav': dav_app }) container = tornado.wsgi.WSGIContainer(application) self.http_server = tornado.httpserver.HTTPServer(container) self.http_server.listen(port, hostname) if use_reloader: from tornado import autoreload autoreload.start() self.logger.info('webui running on %s:%s', hostname, port) self.ioloop = tornado.ioloop.IOLoop.current() self.ioloop.start()
ImportError
dataset/ETHPy150Open binux/pyspider/pyspider/webui/app.py/QuitableFlask.run
6,262
def update(self): """Called each display loop to update the slide positions.""" super(MoveIn, self).update() if not self.active_transition: return # figure out which direction is non-zero and move it towards zero if self.slide_b_current_x: self.slide_b_current_x = int( self.slide_b_start_x * (1 - self.percent)) if self.slide_b_current_y: self.slide_b_current_y = int( self.slide_b_start_y * (1 - self.percent)) try: # try in case super() completed the transition # blit slide_a as the background self.surface.blit(self.slide_a.surface, (0, 0)) # blit slide_b on top of it self.surface.blit(self.slide_b.surface, (self.slide_b_current_x, self.slide_b_current_y)) except (TypeError, __HOLE__): pass # The MIT License (MIT) # Copyright (c) 2013-2015 Brian Madden and Gabe Knuth # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.
AttributeError
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/transitions/move_in.py/MoveIn.update
6,263
def parseEventRange(self, trace, eventRange): """ Convert an event range description string into a concrete event range for a trace. The string may be an integral event number or a frame number preceded with '#', or a range with ":" as the separator between start and end events. @param trace Trace for event @param eventNumber Event range string @returns the first and last event number specified by the range """ if not eventRange: return (0, len(trace.events)) def parse(number, base = 0, default = 0): if not number: return default if number[0] == '+': offset = base number = number[1:] else: offset = 0 try: return int(number) + offset except __HOLE__: assert number[0] == "#" targetFrameNumber = int(number[1:]) frameNumber = 0 if targetFrameNumber < 0: frames = len([e for e in trace.events if self.lookupFunction(e).isFrameMarker]) targetFrameNumber += frames i = 0 for i, event in enumerate(trace.events[offset:]): if frameNumber == targetFrameNumber: break if self.lookupFunction(event).isFrameMarker: frameNumber += 1 return i + offset try: first, last = eventRange.split(":") except ValueError: first, last = eventRange, eventRange first = parse(first) last = parse(last, first, len(trace.events)) if first == last: last += 1 return (first, last)
ValueError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Analyzer.py/InteractiveAnalyzer.parseEventRange
6,264
def parseBoolean(self, boolean): """ Parse a boolean descriptor and return True or False accordingly. """ try: return bool(int(boolean)) except __HOLE__: if boolean.lower() in ["yes", "on", "true", "enabled"]: return True return False
ValueError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Analyzer.py/InteractiveAnalyzer.parseBoolean
6,265
def lookupFunction(self, event): """ Fetch a function from the project's library for an event. @param event: Event for which the function should be sought @returns a Function or None if it wasn't found """ try: library = self.project.targets["code"].library except __HOLE__: return if event.name in library.functions: return library.functions[event.name]
KeyError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Analyzer.py/InteractiveAnalyzer.lookupFunction
6,266
def run(self): while not self.done: try: self.colorizer.setColor(0xff, 0xff, 0xff) try: sys.stdout.write("%s> " % self.project.config.name) except __HOLE__: sys.stdout.write("%s> " % "(no project)") self.colorizer.resetColor() command = raw_input() except KeyboardInterrupt: break except EOFError: break if not command: continue try: self.execute(command) except ExecutionError: pass except KeyboardInterrupt: print self.reportError("Interrupted.")
AttributeError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Analyzer.py/InteractiveAnalyzer.run
6,267
def __unicode__(self): # for translated errors we only return the message if self.translated: return self.message # otherwise attach some stuff location = 'line %d' % self.lineno name = self.filename or self.name if name: location = 'File "%s", %s' % (name, location) lines = [self.message, ' ' + location] # if the source is set, add the line to the output if self.source is not None: try: line = self.source.splitlines()[self.lineno - 1] except __HOLE__: line = None if line: lines.append(' ' + line.strip()) return u'\n'.join(lines)
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/exceptions.py/TemplateSyntaxError.__unicode__
6,268
def omz_env(): try: import workflow return 'Editorial' except ImportError: try: import scene return 'Pythonista' except __HOLE__: return None
ImportError
dataset/ETHPy150Open cclauss/Ten-lines-or-less/omz_env.py/omz_env
6,269
def request(self, method, url, body=None, headers=None): self._method = method self._url = url try: self._body = body.read() except __HOLE__: self._body = body if headers is None: headers = [] elif hasattr(headers, 'items'): headers = headers.items() self.headers = headers
AttributeError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/dist/httplib.py/HTTPConnection.request
6,270
def getresponse(self): if self.port and self.port != self.default_port: host = '%s:%s' % (self.host, self.port) else: host = self.host if not self._url.startswith(self.protocol): url = '%s://%s%s' % (self.protocol, host, self._url) else: url = self._url headers = dict(self.headers) try: method = self._method_map[self._method.upper()] except __HOLE__: raise ValueError("%r is an unrecognized HTTP method" % self._method) response = self._fetch(url, self._body, method, headers, self._allow_truncated, self._follow_redirects) return HTTPResponse(response)
KeyError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/dist/httplib.py/HTTPConnection.getresponse
6,271
def get_json(self, force=False, silent=False, cache=True): """Parses the incoming JSON request data and returns it. If parsing fails the :meth:`on_json_loading_failed` method on the request object will be invoked. By default this function will only load the json data if the mimetype is :mimetype:`application/json` but this can be overridden by the `force` parameter. :param force: if set to ``True`` the mimetype is ignored. :param silent: if set to ``True`` this method will fail silently and return ``None``. :param cache: if set to ``True`` the parsed JSON data is remembered on the request. """ rv = getattr(self, '_cached_json', _missing) if rv is not _missing: return rv if not (force or self.is_json): return None # We accept a request charset against the specification as # certain clients have been using this in the past. This # fits our general approach of being nice in what we accept # and strict in what we send out. request_charset = self.mimetype_params.get('charset') try: data = _get_data(self, cache) if request_charset is not None: rv = json.loads(data, encoding=request_charset) else: rv = json.loads(data) except __HOLE__ as e: if silent: rv = None else: rv = self.on_json_loading_failed(e) if cache: self._cached_json = rv return rv
ValueError
dataset/ETHPy150Open pallets/flask/flask/wrappers.py/Request.get_json
6,272
def key_Return(self, entry, event): text = self.getText() # Figure out if that Return meant "next line" or "execute." try: c = code.compile_command(text) except SyntaxError, e: # This could conceivably piss you off if the client's python # doesn't accept keywords that are known to the manhole's # python. point = buffer.get_iter_at_line_offset(e.lineno, e.offset) buffer.place(point) # TODO: Componentize! self.toplevel.output.append(str(e), "exception") except (OverflowError, __HOLE__), e: self.toplevel.output.append(str(e), "exception") else: if c is not None: self.sendMessage() # Don't insert Return as a newline in the buffer. self.history.append(text) self.clear() # entry.emit_stop_by_name("key_press_event") return True else: # not a complete code block return False return False
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/manhole/ui/gtk2manhole.py/ConsoleInput.key_Return
6,273
def discover_post_import_hooks(group): try: import pkg_resources except __HOLE__: return for entrypoint in pkg_resources.iter_entry_points(group=group): callback = _create_import_hook_from_entrypoint(entrypoint) register_post_import_hook(callback, entrypoint.name) # Indicate that a module has been loaded. Any post import hooks which # were registered against the target module will be invoked. If an # exception is raised in any of the post import hooks, that will cause # the import of the target module to fail.
ImportError
dataset/ETHPy150Open GrahamDumpleton/wrapt/src/wrapt/importer.py/discover_post_import_hooks
6,274
def get_biblio_for_id(self, id, provider_url_template=None, cache_enabled=True): logger.debug(u"%20s getting biblio for %s" % (self.provider_name, id)) if not provider_url_template: provider_url_template = self.biblio_url_template url = self._get_templated_url(provider_url_template, id, "biblio") # try to get a response from the data provider try: response = self.http_get(url, cache_enabled=cache_enabled, allow_redirects=True) except provider.ProviderTimeout: logger.info(u"%20s ProviderTimeout getting %s so giving up on webpage biblio" % (self.provider_name, id)) return {} except provider.ProviderHttpError: logger.info(u"%20s ProviderHttpError getting %s so giving up on webpage biblio" % (self.provider_name, id)) return {} if response.status_code != 200: logger.info(u"%20s status_code=%i getting %s so giving up on webpage biblio" % (self.provider_name, response.status_code, id)) return {} # extract the aliases try: biblio_dict = self._extract_biblio(response.text, id) except __HOLE__: #sometimes has a response but no text in it return {} return biblio_dict # use lxml because is html
TypeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/webpage.py/Webpage.get_biblio_for_id
6,275
def _extract_biblio(self, page, id=None): biblio_dict = {} if not page: return biblio_dict unicode_page = to_unicode_or_bust(page) try: parsed_html = lxml.html.document_fromstring(unicode_page) try: response = parsed_html.find(".//title").text if response and response.strip(): biblio_dict["title"] = response.strip() except AttributeError: pass try: response = parsed_html.find(".//h1").text if response and response.strip(): biblio_dict["h1"] = response.strip() except AttributeError: pass # throws ParserError when document is empty except (ValueError, lxml.etree.ParserError): logger.warning(u"%20s couldn't parse %s so giving up on webpage biblio" % (self.provider_name, id)) try: response = re.search("<title>(.+?)</title>", unicode_page).group(1) response.replace("\n", "") response.replace("\r", "") if response: biblio_dict["title"] = response.strip() except __HOLE__: pass return biblio_dict # overriding because don't need to look up
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/webpage.py/Webpage._extract_biblio
6,276
def parse_mime_type(mime_type): """Carves up a mime-type and returns a tuple of the (type, subtype, params) where 'params' is a dictionary of all the parameters for the media range. For example, the media range 'application/xhtml;q=0.5' would get parsed into: ('application', 'xhtml', {'q', '0.5'}) """ type = mime_type.split(';') type, plist = type[0], type[1:] try: type, subtype = type.split('/', 1) except __HOLE__: type, subtype = type.strip() or '*', '*' else: type = type.strip() or '*' subtype = subtype.strip() or '*' params = {} for param in plist: param = param.split('=', 1) if len(param) == 2: key, value = param[0].strip(), param[1].strip() if key and value: params[key] = value return type, subtype, params
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/util/mimeparse.py/parse_mime_type
6,277
def parse_media_range(range): """Carves up a media range and returns a tuple of the (type, subtype, params) where 'params' is a dictionary of all the parameters for the media range. For example, the media range 'application/*;q=0.5' would get parsed into: ('application', '*', {'q', '0.5'}) In addition this function also guarantees that there is a value for 'q' in the params dictionary, filling it in with a proper default if necessary. """ type, subtype, params = parse_mime_type(range) try: if not 0 <= float(params['q']) <= 1: raise ValueError except (KeyError, __HOLE__): params['q'] = '1' return type, subtype, params
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/util/mimeparse.py/parse_media_range
6,278
def get_snippets_file(): snippets_file = os.path.join(os.path.expanduser('~'), '.clf.yaml') if not os.path.isfile(snippets_file): try: f = open(snippets_file, 'w') f.close() except __HOLE__: raise OSException('Could not create {}'.format(snippets_file)) return snippets_file
OSError
dataset/ETHPy150Open ncrocfer/clf/clf/utils.py/get_snippets_file
6,279
def LoadJsonFile(self, path, expand=False): """Loads a file but ignores the broken ones. Fails a test assertion if the file is not loadable. Args: path: (str) path to file. expand: (bool, default False) whether to expand as a Json template. Returns: (dict) or None if the file is in a white list of known broken files. """ json_file = open(path) content = json_file.read() self.assertLess(1, len(content)) json_file.close() try: json_data = json_with_comments.Loads(content) except __HOLE__ as err: # Ignore the known broken files. if not path.endswith('testdata/broken.json'): self.fail('%s: %s' % (path, err)) return None if expand: json_data = json_expander.ExpandJsonTemplate(json_data) return json_data
ValueError
dataset/ETHPy150Open google/apis-client-generator/src/googleapis/codegen/configuration_test.py/ConfigurationTest.LoadJsonFile
6,280
def serialize(obj, fields=None, invalid=ABORT): """ Serialize a GitModel object to JSON. fields: When None, serilizes all fields. Otherwise, only the given fields will be returned in the serialized output. invalid: If a field cannot be coerced into its respective data type, a ValidationError will be raised. When invalid is ABORT, this exception is re-raised. SET_EMPTY causes the value to be set to an empty value. IGNORE simply uses the current value. Note that serialization may still fail with IGNORE if a value is not serializable. """ pyobj = OrderedDict({ 'model': obj._meta.model_name, 'fields': {} }) for field in obj._meta.fields: if fields is None or field.name in fields: if field.serializable: try: value = field.serialize(obj) except __HOLE__: if invalid == SET_EMPTY: value = field.empty_value elif invalid == IGNORE: value = getattr(obj, field.name) else: raise pyobj['fields'][field.name] = value return pyobj
ValidationError
dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/serializers/python.py/serialize
6,281
def deserialize(workspace, data, oid, invalid=IGNORE): """ Load a python dict as a GitModel instance. model: the model class representing the data data: a valid JSON string invalid: If a field cannot be coerced into its respective data type, a ``ValidationError`` will be raised. When ``invalid`` is ``ABORT``, this exception is re-raised. ``SET_EMPTY`` causes the value to be set to an empty value. ``IGNORE`` simply uses the raw value. """ attrs = {'oid': oid} try: model = workspace.models[data['model']] except __HOLE__: raise ModelNotFound(data['model']) for field in model._meta.fields: value = data['fields'].get(field.name) # field.deserialize() calls field.to_python(). If a serialized value # cannot be coerced into the correct type for its field, just assign # the raw value. try: value = field.deserialize(data, value) except ValidationError: if invalid == SET_EMPTY: value = field.empty_value elif invalid == ABORT: raise attrs[field.name] = value return model(**attrs)
KeyError
dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/serializers/python.py/deserialize
6,282
def _call(self, host, method, params, secret=None, timeout=5000): params['format'] = params.get('format', 'json') # default to json if self.access_token: scheme = 'https' params['access_token'] = self.access_token host = self.ssl_host else: scheme = 'http' params['login'] = self.login params['apiKey'] = self.api_key if secret: params['signature'] = self._generateSignature(params, secret) # force to utf8 to fix ascii codec errors params = _utf8_params(params) request = "%(scheme)s://%(host)s/%(method)s?%(params)s" % { 'scheme': scheme, 'host': host, 'method': method, 'params': urlencode(params, doseq=1) } try: opener = build_opener(DontRedirect()) opener.addheaders = [('User-agent', self.user_agent + ' urllib')] response = opener.open(request) code = response.code result = response.read().decode('utf-8') if code != 200: raise BitlyError(500, result) if not result.startswith('{'): raise BitlyError(500, result) data = json.loads(result) if data.get('status_code', 500) != 200: raise BitlyError(data.get('status_code', 500), data.get('status_txt', 'UNKNOWN_ERROR')) return data except URLError as e: raise BitlyError(500, str(e)) except __HOLE__ as e: raise BitlyError(e.code, e.read()) except BitlyError: raise except Exception: raise BitlyError(None, sys.exc_info()[1])
HTTPError
dataset/ETHPy150Open bitly/bitly-api-python/bitly_api/bitly_api.py/Connection._call
6,283
def do_action(self, acs_request): ep = region_provider.find_product_domain(self.get_region_id(), acs_request.get_product()) if ep is None: raise exs.ClientException(error_code.SDK_INVALID_REGION_ID, error_msg.get_msg('SDK_INVALID_REGION_ID')) if not isinstance(acs_request, AcsRequest): raise exs.ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST')) try: # style = acs_request.get_style() content = acs_request.get_content() method = acs_request.get_method() header = acs_request.get_signed_header(self.get_region_id(), self.get_access_key(), self.get_access_secret()) if self.get_user_agent() is not None: header['User-Agent'] = self.get_user_agent() header['x-sdk-client'] = 'python/2.0.0' protocol = acs_request.get_protocol_type() prefix = self.__replace_occupied_params(acs_request.get_domain_pattern(),acs_request.get_domain_params()) url = acs_request.get_url(self.get_region_id(),self.get_access_key(),self.get_access_secret()) if prefix is None: response = HttpResponse(ep, url, method, {} if header is None else header, protocol,content,self.__port) else: response = HttpResponse(prefix + ',' + ep, url, method, {} if header is None else header, protocol,content,self.__port) _header, _body = response.get_response() # if _body is None: # raise exs.ClientException(error_code.SDK_SERVER_UNREACHABLE, error_msg.get_msg('SDK_SERVER_UNREACHABLE')) return _body except IOError: raise exs.ClientException(error_code.SDK_SERVER_UNREACHABLE, error_msg.get_msg('SDK_SERVER_UNREACHABLE')) except __HOLE__: raise exs.ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST'))
AttributeError
dataset/ETHPy150Open aliyun/aliyun-openapi-python-sdk/aliyun-python-sdk-core/aliyunsdkcore/client.py/AcsClient.do_action
6,284
def get_response(self, acs_request): ep = region_provider.find_product_domain(self.get_region_id(), acs_request.get_product()) if ep is None: raise exs.ClientException(error_code.SDK_INVALID_REGION_ID, error_msg.get_msg('SDK_INVALID_REGION_ID')) if not isinstance(acs_request, AcsRequest): raise exs.ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST')) try: # style = acs_request.get_style() content = acs_request.get_content() method = acs_request.get_method() header = acs_request.get_signed_header(self.get_region_id(), self.get_access_key(), self.get_access_secret()) if self.get_user_agent() is not None: header['User-Agent'] = self.get_user_agent() header['x-sdk-client'] = 'python/2.0.0' protocol = acs_request.get_protocol_type() prefix = self.__replace_occupied_params(acs_request.get_domain_pattern(),acs_request.get_domain_params()) url = acs_request.get_url(self.get_region_id(),self.get_access_key(),self.get_access_secret()) if prefix is None: _response = HttpResponse(ep, url, method, {} if header is None else header, protocol, content,self.__port) else: _response = HttpResponse(prefix + ',' + ep, url, method, {} if header is None else header, protocol, content,self.__port) return _response.get_response_object() except __HOLE__: raise exs.ClientException(error_code.SDK_SERVER_UNREACHABLE, error_msg.get_msg('SDK_SERVER_UNREACHABLE')) except AttributeError: raise exs.ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST'))
IOError
dataset/ETHPy150Open aliyun/aliyun-openapi-python-sdk/aliyun-python-sdk-core/aliyunsdkcore/client.py/AcsClient.get_response
6,285
def fit(dataset, n_clusters=5, max_iterations=10, random_state=None, save_results=True, show=False): """ Optimize k-clustering for `iterations` iterations with cluster center definitions as given in `center`. """ from disco.job import Job from disco.worker.pipeline.worker import Worker, Stage from disco.core import result_iterator try: n_clusters = int(n_clusters) max_iterations = int(max_iterations) if n_clusters < 2: raise Exception("Parameter n_clusters should be greater than 1.") if max_iterations < 1: raise Exception("Parameter max_iterations should be greater than 0.") except __HOLE__: raise Exception("Parameters should be numerical.") job = Job(worker=Worker(save_results=save_results)) job.pipeline = [("split", Stage("kmeans_init_map", input_chain=dataset.params["input_chain"], init=map_init, process=random_init_map)), ('group_label', Stage("kmeans_init_reduce", process=estimate_reduce, init=simple_init, combine=True))] job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['seed'] = random_state job.params['k'] = n_clusters job.run(input=dataset.params["data_tag"], name="kmeans_init") init = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(init)] for j in range(max_iterations): job = Job(worker=Worker(save_results=save_results)) job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['k'] = n_clusters job.params['centers'] = centers job.pipeline = [('split', Stage("kmeans_map_iter_%s" % (j + 1,), input_chain=dataset.params["input_chain"], process=estimate_map, init=simple_init)), ('group_label', Stage("kmeans_reduce_iter_%s" % (j + 1,), process=estimate_reduce, init=simple_init, combine=True))] job.run(input=dataset.params["data_tag"], name='kmeans_iter_%d' % (j + 1,)) fitmodel_url = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(fitmodel_url)] return {"kmeans_fitmodel": fitmodel_url} # return results url
ValueError
dataset/ETHPy150Open romanorac/discomll/discomll/clustering/kmeans.py/fit
6,286
def args_func(args, p): try: args.func(args, p) except __HOLE__ as e: sys.exit("Error: %s" % e) except Exception as e: if e.__class__.__name__ not in ('ScannerError', 'ParserError'): message = """\ An unexpected error has occurred with osprey (version %s), please consider sending the following traceback to the osprey GitHub issue tracker at: https://github.com/pandegroup/osprey/issues """ print(message % __version__, file=sys.stderr) raise # as if we did not catch it
RuntimeError
dataset/ETHPy150Open msmbuilder/osprey/osprey/cli/main.py/args_func
6,287
def NormalizeAndTypeCheck(arg, types): """Normalizes and type checks the given argument. Args: arg: an instance or iterable of the given type(s) types: allowed type or tuple of types Returns: A (list, bool) tuple. The list is a normalized, shallow copy of the argument. The boolean is True if the argument was a sequence, False if it was a single object. Raises: AssertionError: types includes list or tuple. BadArgumentError: arg is not an instance or sequence of one of the given types. """ if not isinstance(types, (list, tuple)): types = (types,) assert list not in types and tuple not in types if isinstance(arg, types): return [arg], False else: if isinstance(arg, basestring): raise datastore_errors.BadArgumentError( 'Expected an instance or iterable of %s; received %s (a %s).' % (types, arg, typename(arg))) try: arg_list = list(arg) except __HOLE__: raise datastore_errors.BadArgumentError( 'Expected an instance or iterable of %s; received %s (a %s).' % (types, arg, typename(arg))) for val in arg_list: if not isinstance(val, types): raise datastore_errors.BadArgumentError( 'Expected one of %s; received %s (a %s).' % (types, val, typename(val))) return arg_list, True
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/NormalizeAndTypeCheck
6,288
def _GetConnection(): """Retrieve a datastore connection local to the thread.""" connection = None if os.getenv(_ENV_KEY): try: connection = _thread_local.connection except __HOLE__: pass if connection is None: connection = datastore_rpc.Connection(adapter=_adapter) _SetConnection(connection) return connection
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/_GetConnection
6,289
@staticmethod def _FromPb(pb, require_valid_key=True, default_kind='<not specified>'): """Static factory method. Returns the Entity representation of the given protocol buffer (datastore_pb.Entity). Not intended to be used by application developers. The Entity PB's key must be complete. If it isn't, an AssertionError is raised. Args: # a protocol buffer Entity pb: datastore_pb.Entity default_kind: str, the kind to use if the pb has no key. Returns: # the Entity representation of the argument Entity """ if not pb.key().path().element_size(): pb.mutable_key().CopyFrom(Key.from_path(default_kind, 0)._ToPb()) last_path = pb.key().path().element_list()[-1] if require_valid_key: assert last_path.has_id() ^ last_path.has_name() if last_path.has_id(): assert last_path.id() != 0 else: assert last_path.has_name() assert last_path.name() unindexed_properties = [unicode(p.name(), 'utf-8') for p in pb.raw_property_list()] if pb.key().has_name_space(): namespace = pb.key().name_space() else: namespace = '' e = Entity(unicode(last_path.type(), 'utf-8'), unindexed_properties=unindexed_properties, _app=pb.key().app(), namespace=namespace) ref = e.__key._Key__reference ref.CopyFrom(pb.key()) temporary_values = {} for prop_list in (pb.property_list(), pb.raw_property_list()): for prop in prop_list: if prop.meaning() == entity_pb.Property.INDEX_VALUE: e.__projection = True try: value = datastore_types.FromPropertyPb(prop) except (AssertionError, AttributeError, __HOLE__, ValueError), e: raise datastore_errors.Error( 'Property %s is corrupt in the datastore:\n%s' % (prop.name(), traceback.format_exc())) multiple = prop.multiple() if multiple: value = [value] name = prop.name() cur_value = temporary_values.get(name) if cur_value is None: temporary_values[name] = value elif not multiple or not isinstance(cur_value, list): raise datastore_errors.Error( 'Property %s is corrupt in the datastore; it has multiple ' 'values, but is not marked as multiply valued.' % name) else: cur_value.extend(value) for name, value in temporary_values.iteritems(): decoded_name = unicode(name, 'utf-8') datastore_types.ValidateReadProperty(decoded_name, value) dict.__setitem__(e, decoded_name, value) return e
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/Entity._FromPb
6,290
def _CheckFilter(self, filter, values): """Type check a filter string and list of values. Raises BadFilterError if the filter string is empty, not a string, or invalid. Raises BadValueError if the value type is not supported. Args: filter: String containing the filter text. values: List of associated filter values. Returns: re.MatchObject (never None) that matches the 'filter'. Group 1 is the property name, group 3 is the operator. (Group 2 is unused.) """ try: match = Query.FILTER_REGEX.match(filter) if not match: raise datastore_errors.BadFilterError( 'Could not parse filter string: %s' % str(filter)) except __HOLE__: raise datastore_errors.BadFilterError( 'Could not parse filter string: %s' % str(filter)) property = match.group(1) operator = match.group(3) if operator is None: operator = '=' if isinstance(values, tuple): values = list(values) elif not isinstance(values, list): values = [values] if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES): raise datastore_errors.BadValueError( 'Filtering on %s properties is not supported.' % typename(values[0])) if (operator in self.INEQUALITY_OPERATORS and property != datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY): if self.__inequality_prop and property != self.__inequality_prop: raise datastore_errors.BadFilterError( 'Only one property per query may have inequality filters (%s).' % ', '.join(self.INEQUALITY_OPERATORS)) elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property: raise datastore_errors.BadFilterError( 'Inequality operators (%s) must be on the same property as the ' 'first sort order, if any sort orders are supplied' % ', '.join(self.INEQUALITY_OPERATORS)) if (self.__kind is None and property != datastore_types.KEY_SPECIAL_PROPERTY and property != datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY): raise datastore_errors.BadFilterError( 'Only %s filters are allowed on kindless queries.' % datastore_types.KEY_SPECIAL_PROPERTY) if property == datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY: if self.__kind: raise datastore_errors.BadFilterError( 'Only kindless queries can have %s filters.' % datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY) if not operator in self.UPPERBOUND_INEQUALITY_OPERATORS: raise datastore_errors.BadFilterError( 'Only %s operators are supported with %s filters.' % ( self.UPPERBOUND_INEQUALITY_OPERATORS, datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)) if property in datastore_types._SPECIAL_PROPERTIES: if property == datastore_types.KEY_SPECIAL_PROPERTY: for value in values: if not isinstance(value, Key): raise datastore_errors.BadFilterError( '%s filter value must be a Key; received %s (a %s)' % (datastore_types.KEY_SPECIAL_PROPERTY, value, typename(value))) return match
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/Query._CheckFilter
6,291
def __init__(self, entity_iterator, orderings): """Ctor. Args: entity_iterator: an iterator of entities which will be wrapped. orderings: an iterable of (identifier, order) pairs. order should be either Query.ASCENDING or Query.DESCENDING. """ self.__entity_iterator = entity_iterator self.__entity = None self.__min_max_value_cache = {} try: self.__entity = entity_iterator.next() except __HOLE__: pass else: self.__orderings = orderings
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/MultiQuery.SortOrderEntity.__init__
6,292
def Run(self, **kwargs): """Return an iterable output with all results in order. Merge sort the results. First create a list of iterators, then walk though them and yield results in order. Args: kwargs: Any keyword arguments accepted by datastore_query.QueryOptions(). Returns: An iterator for the result set. """ config = _GetConfigFromKwargs(kwargs, convert_rpc=True, config_class=datastore_query.QueryOptions) if config and config.keys_only: raise datastore_errors.BadRequestError( 'keys only queries are not supported by multi-query.') lower_bound, upper_bound, config = self._ExtractBounds(config) projection, override = self.__GetProjectionOverride(config) if override: config = datastore_query.QueryOptions(projection=override, config=config) results = [] count = 1 log_level = logging.DEBUG - 1 for bound_query in self.__bound_queries: logging.log(log_level, 'Running query #%i' % count) results.append(bound_query.Run(config=config)) count += 1 def GetDedupeKey(sort_order_entity): if projection: return (sort_order_entity.GetEntity().key(), frozenset(sort_order_entity.GetEntity().iteritems())) else: return sort_order_entity.GetEntity().key() def IterateResults(results): """Iterator function to return all results in sorted order. Iterate over the array of results, yielding the next element, in sorted order. This function is destructive (results will be empty when the operation is complete). Args: results: list of result iterators to merge and iterate through Yields: The next result in sorted order. """ result_heap = [] for result in results: heap_value = MultiQuery.SortOrderEntity(result, self.__orderings) if heap_value.GetEntity(): heapq.heappush(result_heap, heap_value) used_keys = set() while result_heap: if upper_bound is not None and len(used_keys) >= upper_bound: break top_result = heapq.heappop(result_heap) dedupe_key = GetDedupeKey(top_result) if dedupe_key not in used_keys: result = top_result.GetEntity() if override: for key in result.keys(): if key not in projection: del result[key] yield result else: pass used_keys.add(dedupe_key) results_to_push = [] while result_heap: next = heapq.heappop(result_heap) if dedupe_key != GetDedupeKey(next): results_to_push.append(next) break else: results_to_push.append(next.GetNext()) results_to_push.append(top_result.GetNext()) for popped_result in results_to_push: if popped_result.GetEntity(): heapq.heappush(result_heap, popped_result) it = IterateResults(results) try: for _ in xrange(lower_bound): it.next() except __HOLE__: pass return it
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/MultiQuery.Run
6,293
def __delitem__(self, query_filter): """Delete a filter by deleting it from all subqueries. If a KeyError is raised during the attempt, it is ignored, unless every subquery raised a KeyError. If any other exception is raised, any deletes will be rolled back. Args: query_filter: the filter to delete. Raises: KeyError: No subquery had an entry containing query_filter. """ subquery_count = len(self.__bound_queries) keyerror_count = 0 saved_items = [] for index, query in enumerate(self.__bound_queries): try: saved_items.append(query.get(query_filter, None)) del query[query_filter] except __HOLE__: keyerror_count += 1 except: for q, old_value in itertools.izip(self.__bound_queries[:index], saved_items): if old_value is not None: q[query_filter] = old_value raise if keyerror_count == subquery_count: raise KeyError(query_filter)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore.py/MultiQuery.__delitem__
6,294
def __getattr__(self, name): # type: (str) -> Any try: return self[name] except __HOLE__: raise AttributeError(name)
KeyError
dataset/ETHPy150Open tornadoweb/tornado/tornado/util.py/ObjectDict.__getattr__
6,295
def import_object(name): # type: (_BaseString) -> Any """Imports an object by name. import_object('x') is equivalent to 'import x'. import_object('x.y.z') is equivalent to 'from x.y import z'. >>> import tornado.escape >>> import_object('tornado.escape') is tornado.escape True >>> import_object('tornado.escape.utf8') is tornado.escape.utf8 True >>> import_object('tornado') is tornado True >>> import_object('tornado.missing_module') Traceback (most recent call last): ... ImportError: No module named missing_module """ if not isinstance(name, str): # on python 2 a byte string is required. name = name.encode('utf-8') if name.count('.') == 0: return __import__(name, None, None) parts = name.split('.') obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0) try: return getattr(obj, parts[-1]) except __HOLE__: raise ImportError("No module named %s" % parts[-1]) # Stubs to make mypy happy (and later for actual type-checking).
AttributeError
dataset/ETHPy150Open tornadoweb/tornado/tornado/util.py/import_object
6,296
def __init__(self, func, name): # type: (Callable, str) -> None self.name = name try: self.arg_pos = self._getargnames(func).index(name) except __HOLE__: # Not a positional parameter self.arg_pos = None
ValueError
dataset/ETHPy150Open tornadoweb/tornado/tornado/util.py/ArgReplacer.__init__
6,297
def _getargnames(self, func): # type: (Callable) -> List[str] try: return getargspec(func).args except __HOLE__: if hasattr(func, 'func_code'): # Cython-generated code has all the attributes needed # by inspect.getargspec, but the inspect module only # works with ordinary functions. Inline the portion of # getargspec that we need here. Note that for static # functions the @cython.binding(True) decorator must # be used (for methods it works out of the box). code = func.func_code # type: ignore return code.co_varnames[:code.co_argcount] raise
TypeError
dataset/ETHPy150Open tornadoweb/tornado/tornado/util.py/ArgReplacer._getargnames
6,298
def do_run_tests(project, logger, execution_prefix, execution_name): test_dir = _register_test_and_source_path_and_return_test_dir(project, sys.path, execution_prefix) file_suffix = project.get_property("%s_file_suffix" % execution_prefix) if file_suffix is not None: logger.warn( "%(prefix)s_file_suffix is deprecated, please use %(prefix)s_module_glob" % {"prefix": execution_prefix}) module_glob = "*{0}".format(file_suffix) if module_glob.endswith(".py"): WITHOUT_DOT_PY = slice(0, -3) module_glob = module_glob[WITHOUT_DOT_PY] project.set_property("%s_module_glob" % execution_prefix, module_glob) else: module_glob = project.get_property("%s_module_glob" % execution_prefix) logger.info("Executing %s from Python modules in %s", execution_name, test_dir) logger.debug("Including files matching '%s'", module_glob) try: test_method_prefix = project.get_property("%s_test_method_prefix" % execution_prefix) runner_generator = project.get_property("%s_runner" % execution_prefix) result, console_out = execute_tests_matching(runner_generator, logger, test_dir, module_glob, test_method_prefix) if result.testsRun == 0: logger.warn("No %s executed.", execution_name) else: logger.info("Executed %d %s", result.testsRun, execution_name) write_report(execution_prefix, project, logger, result, console_out) if not result.wasSuccessful(): raise BuildFailedException("There were %d error(s) and %d failure(s) in %s" % (len(result.errors), len(result.failures), execution_name)) logger.info("All %s passed.", execution_name) except __HOLE__ as e: import traceback _, _, import_error_traceback = sys.exc_info() file_with_error, error_line, _, statement_causing_error = traceback.extract_tb(import_error_traceback)[-1] logger.error("Import error in test file {0}, due to statement '{1}' on line {2}".format( file_with_error, statement_causing_error, error_line)) logger.error("Error importing %s: %s", execution_prefix, e) raise BuildFailedException("Unable to execute %s." % execution_name)
ImportError
dataset/ETHPy150Open pybuilder/pybuilder/src/main/python/pybuilder/plugins/python/unittest_plugin.py/do_run_tests
6,299
def setup_env(dev_appserver_version=1): """Configures GAE environment for command-line apps.""" if dev_appserver_version not in (1, 2): raise Exception('Invalid dev_appserver_version setting, expected 1 or 2, got %s' % dev_appserver_version) # Try to import the appengine code from the system path. try: from google.appengine.api import apiproxy_stub_map except ImportError: for k in [k for k in sys.modules if k.startswith('google')]: del sys.modules[k] # Not on the system path. Build a list of alternative paths # where it may be. First look within the project for a local # copy, then look for where the Mac OS SDK installs it. paths = [os.path.join(PROJECT_DIR, 'google_appengine'), os.environ.get('APP_ENGINE_SDK'), '/usr/local/google_appengine', '/usr/local/opt/google-app-engine/share/google-app-engine', '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine'] for path in os.environ.get('PATH', '').split(os.pathsep): path = path.rstrip(os.sep) if path.endswith('google_appengine'): paths.append(path) if os.name in ('nt', 'dos'): path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ paths.append(path) # Loop through all possible paths and look for the SDK dir. sdk_path = None for path in paths: if not path: continue path = os.path.expanduser(path) path = os.path.realpath(path) if os.path.exists(path): sdk_path = path break # The SDK could not be found in any known location. if sdk_path is None: sys.stderr.write("The Google App Engine SDK could not be found!\n" "Make sure it's accessible via your PATH " "environment and called google_appengine.\n") sys.exit(1) # First add the found SDK to the path sys.path = [ sdk_path ] + sys.path # Then call fix_sys_path from the SDK try: from dev_appserver import fix_sys_path except __HOLE__: from old_dev_appserver import fix_sys_path if dev_appserver_version == 2: # emulate dev_appserver._run_file in devappserver2 from dev_appserver import _PATHS sys.path = _PATHS._script_to_paths['dev_appserver.py'] + sys.path fix_sys_path() setup_project(dev_appserver_version) from djangoappengine.utils import have_appserver if have_appserver: # App Engine's threading.local is broken. setup_threading() elif not os.path.exists(DATA_ROOT): os.mkdir(DATA_ROOT) setup_logging() if not have_appserver: # Patch Django to support loading management commands from zip # files. from django.core import management management.find_commands = find_commands
ImportError
dataset/ETHPy150Open django-nonrel/djangoappengine/djangoappengine/boot.py/setup_env