Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
1,400
|
def on_source_need_data(self, source, length):
# Attempt to read data from the stream
try:
data = self.fd.read(length)
except __HOLE__ as err:
self.exit("Failed to read data from stream: {0}".format(err))
# If data is empty it's the end of stream
if not data:
source.emit("end-of-stream")
return
# Convert the Python bytes into a GStreamer Buffer
# and then push it to the appsrc
buf = gst.Buffer.new_wrapped(data)
source.emit("push-buffer", buf)
|
IOError
|
dataset/ETHPy150Open chrippa/livestreamer/examples/gst-player.py/LivestreamerPlayer.on_source_need_data
|
1,401
|
def test_dtype_errors():
# Try to call theano_expr with a bad label dtype.
raised = False
fmt = OneHotFormatter(max_labels=50)
try:
fmt.theano_expr(theano.tensor.vector(dtype=theano.config.floatX))
except __HOLE__:
raised = True
assert raised
# Try to call format with a bad label dtype.
raised = False
try:
fmt.format(numpy.zeros(10, dtype='float64'))
except TypeError:
raised = True
assert raised
|
TypeError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/format/tests/test_target_format.py/test_dtype_errors
|
1,402
|
def test_bad_arguments():
# Make sure an invalid max_labels raises an error.
raised = False
try:
fmt = OneHotFormatter(max_labels=-10)
except ValueError:
raised = True
assert raised
raised = False
try:
fmt = OneHotFormatter(max_labels='10')
except ValueError:
raised = True
assert raised
# Make sure an invalid dtype identifier raises an error.
raised = False
try:
fmt = OneHotFormatter(max_labels=10, dtype='invalid')
except TypeError:
raised = True
assert raised
# Make sure an invalid ndim raises an error for format().
fmt = OneHotFormatter(max_labels=10)
raised = False
try:
fmt.format(numpy.zeros((2, 3, 4), dtype='int32'))
except ValueError:
raised = True
assert raised
# Make sure an invalid ndim raises an error for theano_expr().
raised = False
try:
fmt.theano_expr(theano.tensor.itensor3())
except __HOLE__:
raised = True
assert raised
|
ValueError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/format/tests/test_target_format.py/test_bad_arguments
|
1,403
|
def upkeep():
"""Does upkeep (like flushing, garbage collection, etc.)"""
# Just in case, let's clear the exception info.
try:
sys.exc_clear()
except __HOLE__:
# Python 3 does not have sys.exc_clear. The except statement clears
# the info itself (and we've just entered an except statement)
pass
if os.name == 'nt':
try:
import msvcrt
msvcrt.heapmin()
except ImportError:
pass
except IOError: # Win98
pass
if conf.daemonized:
# If we're daemonized, sys.stdout has been replaced with a StringIO
# object, so let's see if anything's been printed, and if so, let's
# log.warning it (things shouldn't be printed, and we're more likely
# to get bug reports if we make it a warning).
if not hasattr(sys.stdout, 'getvalue'):
# Stupid twisted sometimes replaces our stdout with theirs, because
# "The Twisted Way Is The Right Way" (ha!). So we're stuck simply
# returning.
log.warning('Expected cStringIO as stdout, got %r.', sys.stdout)
return
s = sys.stdout.getvalue()
if s:
log.warning('Printed to stdout after daemonization: %s', s)
sys.stdout.seek(0)
sys.stdout.truncate() # Truncates to current offset.
s = sys.stderr.getvalue()
if s:
log.error('Printed to stderr after daemonization: %s', s)
sys.stderr.seek(0)
sys.stderr.truncate() # Truncates to current offset.
doFlush = conf.supybot.flush() and not starting
if doFlush:
flush()
# This is so registry._cache gets filled.
# This seems dumb, so we'll try not doing it anymore.
#if registryFilename is not None:
# registry.open(registryFilename)
if not dying:
if minisix.PY2:
log.debug('Regexp cache size: %s', len(re._cache))
log.debug('Pattern cache size: %s', len(ircutils._patternCache))
log.debug('HostmaskPatternEqual cache size: %s',
len(ircutils._hostmaskPatternEqualCache))
#timestamp = log.timestamp()
if doFlush:
log.info('Flushers flushed and garbage collected.')
else:
log.info('Garbage collected.')
collected = gc.collect()
if gc.garbage:
log.warning('Noncollectable garbage (file this as a bug on SF.net): %s',
gc.garbage)
return collected
|
AttributeError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/world.py/upkeep
|
1,404
|
def initialize(self, import_func=__import__):
"""Attempt to import the config module, if not already imported.
This function always sets self._module to a value unequal
to None: either the imported module (if imported successfully), or
a dummy object() instance (if an ImportError was raised). Other
exceptions are *not* caught.
When a dummy instance is used, it is also put in sys.modules.
This allows us to detect when sys.modules was changed (as
dev_appserver.py does when it notices source code changes) and
re-try the __import__ in that case, while skipping it (for speed)
if nothing has changed.
Args:
import_func: Used for dependency injection.
"""
if (self._module is not None and
self._module is sys.modules.get(self._modname)):
return
try:
import_func(self._modname)
except __HOLE__, err:
if str(err) != 'No module named %s' % self._modname:
raise
self._module = object()
sys.modules[self._modname] = self._module
else:
self._module = sys.modules[self._modname]
|
ImportError
|
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/lib_config.py/LibConfigRegistry.initialize
|
1,405
|
def _clear_cache(self):
"""Clear the cached values."""
for key in self._defaults:
try:
delattr(self, key)
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/lib_config.py/ConfigHandle._clear_cache
|
1,406
|
def process_queue(q, quiet=False):
if not quiet:
print("Processing: %s" % q)
if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port:
try:
import socks
except __HOLE__:
raise ImportError("Queue has been configured with proxy settings, but no socks library was installed. Try to install PySocks via pypi.")
proxy_type = {
'socks4': socks.SOCKS4,
'socks5': socks.SOCKS5,
}.get(q.socks_proxy_type)
socks.set_default_proxy(proxy_type=proxy_type, addr=q.socks_proxy_host, port=q.socks_proxy_port)
socket.socket = socks.socksocket
else:
socket.socket = socket._socketobject
email_box_type = settings.QUEUE_EMAIL_BOX_TYPE if settings.QUEUE_EMAIL_BOX_TYPE else q.email_box_type
if email_box_type == 'pop3':
if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL:
if not q.email_box_port: q.email_box_port = 995
server = poplib.POP3_SSL(q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port))
else:
if not q.email_box_port: q.email_box_port = 110
server = poplib.POP3(q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port))
server.getwelcome()
server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER)
server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD)
messagesInfo = server.list()[1]
for msg in messagesInfo:
msgNum = msg.split(" ")[0]
msgSize = msg.split(" ")[1]
full_message = "\n".join(server.retr(msgNum)[1])
ticket = ticket_from_message(message=full_message, queue=q, quiet=quiet)
if ticket:
server.dele(msgNum)
server.quit()
elif email_box_type == 'imap':
if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL:
if not q.email_box_port: q.email_box_port = 993
server = imaplib.IMAP4_SSL(q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port))
else:
if not q.email_box_port: q.email_box_port = 143
server = imaplib.IMAP4(q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port))
server.login(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER, q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD)
server.select(q.email_box_imap_folder)
status, data = server.search(None, 'NOT', 'DELETED')
if data:
msgnums = data[0].split()
for num in msgnums:
status, data = server.fetch(num, '(RFC822)')
ticket = ticket_from_message(message=data[0][1], queue=q, quiet=quiet)
if ticket:
server.store(num, '+FLAGS', '\\Deleted')
server.expunge()
server.close()
server.logout()
|
ImportError
|
dataset/ETHPy150Open rossp/django-helpdesk/helpdesk/management/commands/get_email.py/process_queue
|
1,407
|
def _test_gen_skeleton(command_name, operation_name):
p = aws('%s %s --generate-cli-skeleton' % (command_name, operation_name))
assert_equal(p.rc, 0, 'Received non zero RC (%s) for command: %s %s'
% (p.rc, command_name, operation_name))
try:
parsed = json.loads(p.stdout)
except __HOLE__ as e:
raise AssertionError(
"Could not generate CLI skeleton for command: %s %s\n"
"stdout:\n%s\n"
"stderr:\n%s\n" % (command_name, operation_name))
|
ValueError
|
dataset/ETHPy150Open aws/aws-cli/tests/integration/customizations/test_generatecliskeleton.py/_test_gen_skeleton
|
1,408
|
def pop(self, k, *args):
result = super(OrderedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except __HOLE__:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
|
ValueError
|
dataset/ETHPy150Open dragondjf/QMarkdowner/markdown/odict.py/OrderedDict.pop
|
1,409
|
def index(self, key):
""" Return the index of a given key. """
try:
return self.keyOrder.index(key)
except __HOLE__:
raise ValueError("Element '%s' was not found in OrderedDict" % key)
|
ValueError
|
dataset/ETHPy150Open dragondjf/QMarkdowner/markdown/odict.py/OrderedDict.index
|
1,410
|
def enable_plugin(self, pname):
try:
self.logger.debug("enabling plugin '%s'" % (pname))
try:
self.pluginmods[pname] = __import__(pname)
except Exception, e:
self.logger.error("exception importing '%s' plugin:\n%s" % (pname, shared.indent(traceback.format_exc().strip(), 1)))
else:
try:
self.pluginobjs[pname] = getattr(self.pluginmods[pname], pname)(self.global_config)
except __HOLE__, e:
self.logger.error("plugin '%s' could not be loaded (%s)" % (pname, e))
self.disable_plugin(pname)
else:
if not self.pluginobjs[pname].required_config_loaded():
self.logger.error("plugin '%s' requires config section '%s' with parameters %s" % (pname, pname, self.pluginobjs[pname].required_config))
self.disable_plugin(pname)
else:
self.logger.info("enabled plugin '%s'" % (pname))
except Exception, e:
self.logger.error("exception when initializing plugin '%s':\n%s" % (pname, shared.indent(traceback.format_exc().strip(), 1)))
self.disable_plugin(pname)
|
AttributeError
|
dataset/ETHPy150Open zynga/hiccup/hiccup/PluginManager.py/PluginManager.enable_plugin
|
1,411
|
def disable_plugin(self, pname):
self.logger.info("disabling plugin '%s'" % (pname))
if (pname in sys.modules):
del(sys.modules[pname])
if (pname in self.pluginmods):
del(self.pluginmods[pname])
if (pname in self.pluginobjs):
del(self.pluginobjs[pname])
if self.file_watcher != None:
self.file_watcher.remove_item("%s.py" % pname)
if self.auto_delete_class_files == True:
self.logger.debug(" testing for class file : %s" % (os.path.join("%s" % self.dname, "%s$py.class" % pname)))
if (os.path.isfile(os.path.join("%s" % self.dname, "%s$py.class" % pname))):
self.logger.debug(" disable_plugin removing stale .class file for disabled plugin '%s'" % pname)
try:
os.remove(os.path.join("%s" % self.dname, "%s$py.class" % pname))
except __HOLE__, e:
self.logger.debug("failed to remove stale file %s$py.class but don't really care" % pname)
|
OSError
|
dataset/ETHPy150Open zynga/hiccup/hiccup/PluginManager.py/PluginManager.disable_plugin
|
1,412
|
def test_raise_no_arg(self):
r = None
try:
try:
raise RuntimeError("dummy")
except __HOLE__:
raise
except RuntimeError, e:
r = str(e)
self.assertEquals(r, "dummy")
|
RuntimeError
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_exceptions_jy.py/ExceptionsTestCase.test_raise_no_arg
|
1,413
|
def all_query(expression):
"""Match arrays that contain all elements in the query."""
def _all(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
try:
iter(ev)
except __HOLE__:
raise AttributeError('$all argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]
store_keys = set([])
if len(hashed_ev) == 0:
return []
store_keys = set(index.get_keys_for(hashed_ev[0]))
for value in hashed_ev[1:]:
store_keys &= set(index.get_keys_for(value))
return list(store_keys)
return _all
|
TypeError
|
dataset/ETHPy150Open adewes/blitzdb/blitzdb/backends/file/queries.py/all_query
|
1,414
|
def in_query(expression):
"""Match any of the values that exist in an array specified in query."""
def _in(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
try:
iter(ev)
except __HOLE__:
raise AttributeError('$in argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]
store_keys = set()
for value in hashed_ev:
store_keys |= set(index.get_keys_for(value))
return list(store_keys)
return _in
|
TypeError
|
dataset/ETHPy150Open adewes/blitzdb/blitzdb/backends/file/queries.py/in_query
|
1,415
|
@classmethod
def wrap_iterable(cls, url_or_urls):
"""Given a string or :class:`Link` or iterable, return an iterable of :class:`Link` objects.
:param url_or_urls: A string or :class:`Link` object, or iterable of string or :class:`Link`
objects.
:returns: A list of :class:`Link` objects.
"""
try:
return [cls.wrap(url_or_urls)]
except __HOLE__:
pass
if isinstance(url_or_urls, Iterable):
return [cls.wrap(url) for url in url_or_urls]
raise ValueError('url_or_urls must be string/Link or iterable of strings/Links')
|
ValueError
|
dataset/ETHPy150Open pantsbuild/pex/pex/link.py/Link.wrap_iterable
|
1,416
|
def _call(self, input_str, binary, args=[], verbose=False):
"""
Call the binary with the given input.
:param input_str: A string whose contents are used as stdin.
:param binary: The location of the binary to call
:param args: A list of command-line arguments.
:return: A tuple (stdout, returncode)
:see: ``config_prover9``
"""
if verbose:
print('Calling:', binary)
print('Args:', args)
print('Input:\n', input_str, '\n')
# Call prover9 via a subprocess
cmd = [binary] + args
try:
input_str = input_str.encode("utf8")
except __HOLE__:
pass
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate(input=input_str)
if verbose:
print('Return code:', p.returncode)
if stdout: print('stdout:\n', stdout, '\n')
if stderr: print('stderr:\n', stderr, '\n')
return (stdout.decode("utf-8"), p.returncode)
|
AttributeError
|
dataset/ETHPy150Open nltk/nltk/nltk/inference/prover9.py/Prover9Parent._call
|
1,417
|
def validate_user(f):
try:
results = db.query("SELECT * FROM users WHERE username=$username ORDER BY ROWID ASC LIMIT 1",
vars={'username': f.username.value})
user = results[0]
try:
valid_hash = pbkdf2_sha256.verify(f.password.value, user.password)
except ValueError as e:
web.debug('%s user=%s' % (str(e), user.username))
valid_hash = None
pass
date_now = datetime.datetime.now()
date_expires = datetime.datetime.combine(user.expires, datetime.time.min)
if date_now <= date_expires:
if valid_hash:
web.debug('login_success_hash: user=%s' % user.username)
return user
else:
web.debug('login_failed_hash: incorrect password user=%s, fallback to plaintext' % user.username)
if f.password.value == user.password:
web.debug('login_success_plaintext: user=%s' % user.username)
return user
else:
web.debug('login_failed_plaintext: incorrect password user=%s' % user.username)
return None
else:
web.debug('login_failed: expired account user=%s' % user.username)
return None
except __HOLE__, e:
web.debug('login_failed: not found user=%s' % f.username.value)
return None
|
IndexError
|
dataset/ETHPy150Open ab77/netflix-proxy/auth/auth.py/validate_user
|
1,418
|
def _check_imports():
""" Dynamically remove optimizers we don't have
"""
optlist = ['ALGENCIAN', 'ALHSO', 'ALPSO', 'COBYLA', 'CONMIN', 'FILTERSD',
'FSQP', 'GCMMA', 'KSOPT', 'MIDACO', 'MMA', 'MMFD', 'NLPQL', 'NLPQLP',
'NSGA2', 'PSQP', 'SDPEN', 'SLSQP', 'SNOPT', 'SOLVOPT']
for optimizer in optlist[:]:
try:
exec('from pyOpt import %s' % optimizer)
except __HOLE__:
optlist.remove(optimizer)
return optlist
|
ImportError
|
dataset/ETHPy150Open OpenMDAO-Plugins/pyopt_driver/src/pyopt_driver/pyopt_driver.py/_check_imports
|
1,419
|
def execute(self):
"""pyOpt execution. Note that pyOpt controls the execution, and the
individual optimizers control the iteration."""
self.pyOpt_solution = None
self.run_iteration()
opt_prob = Optimization(self.title, self.objfunc, var_set={},
obj_set={}, con_set={})
# Add all parameters
self.param_type = {}
self.nparam = self.total_parameters()
for name, param in self.get_parameters().iteritems():
# We need to identify Enums, Lists, Dicts
metadata = param.get_metadata()[1]
values = param.evaluate()
# Assuming uniform enumerated, discrete, or continuous for now.
val = values[0]
choices = []
if 'values' in metadata and \
isinstance(metadata['values'], (list, tuple, array, set)):
vartype = 'd'
choices = metadata['values']
elif isinstance(val, bool):
vartype = 'd'
choices = [True, False]
elif isinstance(val, (int, int32, int64)):
vartype = 'i'
elif isinstance(val, (float, float32, float64)):
vartype = 'c'
else:
msg = 'Only continuous, discrete, or enumerated variables' \
' are supported. %s is %s.' % (name, type(val))
self.raise_exception(msg, ValueError)
self.param_type[name] = vartype
names = param.names
lower_bounds = param.get_low()
upper_bounds = param.get_high()
for i in range(param.size):
opt_prob.addVar(names[i], vartype,
lower=lower_bounds[i], upper=upper_bounds[i],
value=values[i], choices=choices)
# Add all objectives
for name in self.get_objectives():
opt_prob.addObj(name)
# Add all equality constraints
for name, con in self.get_eq_constraints().items():
if con.size > 1:
for i in range(con.size):
opt_prob.addCon('%s [%s]' % (name, i), type='e')
else:
opt_prob.addCon(name, type='e')
# Add all inequality constraints
for name, con in self.get_ineq_constraints().items():
if con.size > 1:
for i in range(con.size):
opt_prob.addCon('%s [%s]' % (name, i), type='i')
else:
opt_prob.addCon(name, type='i')
self.inputs = self.list_param_group_targets()
self.objs = self.list_objective_targets()
self.cons = self.list_constraint_targets()
# Instantiate the requested optimizer
optimizer = self.optimizer
try:
exec('from pyOpt import %s' % optimizer)
except __HOLE__:
msg = "Optimizer %s is not available in this installation." % \
optimizer
self.raise_exception(msg, ImportError)
optname = vars()[optimizer]
opt = optname()
# Set optimization options
for option, value in self.options.iteritems():
opt.setOption(option, value)
# Execute the optimization problem
if self.pyopt_diff:
# Use pyOpt's internal finite difference
opt(opt_prob, sens_type='FD', sens_step=self.gradient_options.fd_step,
store_hst=self.store_hst, hot_start=self.hot_start)
else:
# Use OpenMDAO's differentiator for the gradient
opt(opt_prob, sens_type=self.gradfunc, store_hst=self.store_hst,
hot_start=self.hot_start)
# Print results
if self.print_results:
print opt_prob.solution(0)
# Pull optimal parameters back into framework and re-run, so that
# framework is left in the right final state
dvals = []
for i in range(0, len(opt_prob.solution(0)._variables)):
dvals.append(opt_prob.solution(0)._variables[i].value)
# Integer parameters come back as floats, so we need to round them
# and turn them into python integers before setting.
if 'i' in self.param_type.values():
for j, param in enumerate(self.get_parameters().keys()):
if self.param_type[param] == 'i':
dvals[j] = int(round(dvals[j]))
self.set_parameters(dvals)
self.run_iteration()
# Save the most recent solution.
self.pyOpt_solution = opt_prob.solution(0)
|
ImportError
|
dataset/ETHPy150Open OpenMDAO-Plugins/pyopt_driver/src/pyopt_driver/pyopt_driver.py/pyOptDriver.execute
|
1,420
|
@log_function
@defer.inlineCallbacks
def do_invite_join(self, target_hosts, room_id, joinee, content):
""" Attempts to join the `joinee` to the room `room_id` via the
server `target_host`.
This first triggers a /make_join/ request that returns a partial
event that we can fill out and sign. This is then sent to the
remote server via /send_join/ which responds with the state at that
event and the auth_chains.
We suspend processing of any received events from this room until we
have finished processing the join.
"""
logger.debug("Joining %s to %s", joinee, room_id)
yield self.store.clean_room_for_join(room_id)
origin, event = yield self._make_and_verify_event(
target_hosts,
room_id,
joinee,
"join",
content,
)
self.room_queues[room_id] = []
handled_events = set()
try:
event = self._sign_event(event)
# Try the host we successfully got a response to /make_join/
# request first.
try:
target_hosts.remove(origin)
target_hosts.insert(0, origin)
except __HOLE__:
pass
ret = yield self.replication_layer.send_join(target_hosts, event)
origin = ret["origin"]
state = ret["state"]
auth_chain = ret["auth_chain"]
auth_chain.sort(key=lambda e: e.depth)
handled_events.update([s.event_id for s in state])
handled_events.update([a.event_id for a in auth_chain])
handled_events.add(event.event_id)
logger.debug("do_invite_join auth_chain: %s", auth_chain)
logger.debug("do_invite_join state: %s", state)
logger.debug("do_invite_join event: %s", event)
try:
yield self.store.store_room(
room_id=room_id,
room_creator_user_id="",
is_public=False
)
except:
# FIXME
pass
event_stream_id, max_stream_id = yield self._persist_auth_tree(
auth_chain, state, event
)
with PreserveLoggingContext():
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id,
extra_users=[joinee]
)
logger.debug("Finished joining %s to %s", joinee, room_id)
finally:
room_queue = self.room_queues[room_id]
del self.room_queues[room_id]
for p, origin in room_queue:
if p.event_id in handled_events:
continue
try:
self.on_receive_pdu(origin, p)
except:
logger.exception("Couldn't handle pdu")
defer.returnValue(True)
|
ValueError
|
dataset/ETHPy150Open matrix-org/synapse/synapse/handlers/federation.py/FederationHandler.do_invite_join
|
1,421
|
@defer.inlineCallbacks
def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
origin, event = yield self._make_and_verify_event(
target_hosts,
room_id,
user_id,
"leave"
)
signed_event = self._sign_event(event)
# Try the host we successfully got a response to /make_join/
# request first.
try:
target_hosts.remove(origin)
target_hosts.insert(0, origin)
except __HOLE__:
pass
yield self.replication_layer.send_leave(
target_hosts,
signed_event
)
context = yield self.state_handler.compute_event_context(event)
event_stream_id, max_stream_id = yield self.store.persist_event(
event,
context=context,
)
target_user = UserID.from_string(event.state_key)
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id,
extra_users=[target_user],
)
defer.returnValue(event)
|
ValueError
|
dataset/ETHPy150Open matrix-org/synapse/synapse/handlers/federation.py/FederationHandler.do_remotely_reject_invite
|
1,422
|
@defer.inlineCallbacks
def construct_auth_difference(self, local_auth, remote_auth):
""" Given a local and remote auth chain, find the differences. This
assumes that we have already processed all events in remote_auth
Params:
local_auth (list)
remote_auth (list)
Returns:
dict
"""
logger.debug("construct_auth_difference Start!")
# TODO: Make sure we are OK with local_auth or remote_auth having more
# auth events in them than strictly necessary.
def sort_fun(ev):
return ev.depth, ev.event_id
logger.debug("construct_auth_difference after sort_fun!")
# We find the differences by starting at the "bottom" of each list
# and iterating up on both lists. The lists are ordered by depth and
# then event_id, we iterate up both lists until we find the event ids
# don't match. Then we look at depth/event_id to see which side is
# missing that event, and iterate only up that list. Repeat.
remote_list = list(remote_auth)
remote_list.sort(key=sort_fun)
local_list = list(local_auth)
local_list.sort(key=sort_fun)
local_iter = iter(local_list)
remote_iter = iter(remote_list)
logger.debug("construct_auth_difference before get_next!")
def get_next(it, opt=None):
try:
return it.next()
except:
return opt
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
logger.debug("construct_auth_difference before while")
missing_remotes = []
missing_locals = []
while current_local or current_remote:
if current_remote is None:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local is None:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
if current_local.event_id == current_remote.event_id:
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
continue
if current_local.depth < current_remote.depth:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local.depth > current_remote.depth:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
# They have the same depth, so we fall back to the event_id order
if current_local.event_id < current_remote.event_id:
missing_locals.append(current_local)
current_local = get_next(local_iter)
if current_local.event_id > current_remote.event_id:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
logger.debug("construct_auth_difference after while")
# missing locals should be sent to the server
# We should find why we are missing remotes, as they will have been
# rejected.
# Remove events from missing_remotes if they are referencing a missing
# remote. We only care about the "root" rejected ones.
missing_remote_ids = [e.event_id for e in missing_remotes]
base_remote_rejected = list(missing_remotes)
for e in missing_remotes:
for e_id, _ in e.auth_events:
if e_id in missing_remote_ids:
try:
base_remote_rejected.remove(e)
except __HOLE__:
pass
reason_map = {}
for e in base_remote_rejected:
reason = yield self.store.get_rejection_reason(e.event_id)
if reason is None:
# TODO: e is not in the current state, so we should
# construct some proof of that.
continue
reason_map[e.event_id] = reason
if reason == RejectedReason.AUTH_ERROR:
pass
elif reason == RejectedReason.REPLACED:
# TODO: Get proof
pass
elif reason == RejectedReason.NOT_ANCESTOR:
# TODO: Get proof.
pass
logger.debug("construct_auth_difference returning")
defer.returnValue({
"auth_chain": local_auth,
"rejects": {
e.event_id: {
"reason": reason_map[e.event_id],
"proof": None,
}
for e in base_remote_rejected
},
"missing": [e.event_id for e in missing_locals],
})
|
ValueError
|
dataset/ETHPy150Open matrix-org/synapse/synapse/handlers/federation.py/FederationHandler.construct_auth_difference
|
1,423
|
def __init__(self, resource, location, name, func):
"""Initialize the switch."""
super().__init__(resource, location, name)
self._func = func
request = requests.get('{}/{}'.format(self._resource, self._func),
timeout=10)
if request.status_code is not 200:
_LOGGER.error("Can't find function. Is device offline?")
return
try:
request.json()['return_value']
except KeyError:
_LOGGER.error("No return_value received. "
"Is the function name correct.")
except __HOLE__:
_LOGGER.error("Response invalid. Is the function name correct.")
|
ValueError
|
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/switch/arest.py/ArestSwitchFunction.__init__
|
1,424
|
@login_required
def message_create(request, content_type_id=None, object_id=None,
template_name='messages/message_form.html'):
"""
Handles a new message and displays a form.
Template:: ``messages/message_form.html``
Context:
form
MessageForm object
"""
next = request.GET.get('next', None)
if request.GET.get('to', None):
to_user = get_object_or_404(User, username=request.GET['to'])
else:
to_user = None
if content_type_id and object_id:
content_type = ContentType.objects.get(pk=base62.to_decimal(content_type_id))
Model = content_type.model_class()
try:
related_object = Model.objects.get(pk=base62.to_decimal(object_id))
except __HOLE__:
raise Http404, "The object ID was invalid."
else:
related_object = None
form = MessageForm(request.POST or None, initial={'to_user': to_user})
if form.is_valid():
message = form.save(commit=False)
if related_object:
message.object = related_object
message.from_user = request.user
message = form.save()
return HttpResponseRedirect(next or reverse('messages:messages'))
return render_to_response(template_name, {
'form': form,
'to_user': to_user,
'related_object': related_object,
'next': next,
}, context_instance=RequestContext(request))
|
ObjectDoesNotExist
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/messages/views.py/message_create
|
1,425
|
def load(self, backend, *args, **kwargs):
i = backend.rfind('.')
module, attr = backend[:i], backend[i+1:]
try:
__import__(module)
mod = sys.modules[module]
except ImportError, e:
raise ImproperlyConfigured(
"Error importing upload handler module %s: '%s'" % (module, e))
try:
cls = getattr(mod, attr)
except __HOLE__:
raise ImproperlyConfigured(
"Module '%s' does not define a '%s' backend" % (module, attr))
return cls(*args, **kwargs)
|
AttributeError
|
dataset/ETHPy150Open jezdez-archive/django-vcstorage/src/vcstorage/storage.py/VcStorage.load
|
1,426
|
def save(self, name, content, message=None):
"""
Saves the given content with the name and commits to the working dir.
"""
self.populate()
if message is None:
message = "Automated commit: adding %s" % name
name = super(VcStorage, self).save(name, content)
full_paths = [smart_str(os.path.join(self.location, self.path(name)))]
try:
self.wd.add(paths=full_paths)
self.wd.commit(message=message, paths=full_paths)
except __HOLE__:
pass
return name
|
OSError
|
dataset/ETHPy150Open jezdez-archive/django-vcstorage/src/vcstorage/storage.py/VcStorage.save
|
1,427
|
def delete(self, name, message=None):
"""
Deletes the specified file from the storage system.
"""
self.populate()
if message is None:
message = "Automated commit: removing %s" % name
full_paths = [smart_str(self.path(name))]
try:
self.wd.remove(paths=full_paths)
self.wd.commit(message=message, paths=full_paths)
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open jezdez-archive/django-vcstorage/src/vcstorage/storage.py/VcStorage.delete
|
1,428
|
def test_max_recursion_error(self):
"""
Overriding a method on a super class and then calling that method on
the super class should not trigger infinite recursion. See #17011.
"""
try:
super(ClassDecoratedTestCase, self).test_max_recursion_error()
except __HOLE__:
self.fail()
|
RuntimeError
|
dataset/ETHPy150Open django/django/tests/settings_tests/tests.py/ClassDecoratedTestCase.test_max_recursion_error
|
1,429
|
@staticmethod
def _FindNextOpcode(job, timeout_strategy_factory):
"""Locates the next opcode to run.
@type job: L{_QueuedJob}
@param job: Job object
@param timeout_strategy_factory: Callable to create new timeout strategy
"""
# Create some sort of a cache to speed up locating next opcode for future
# lookups
# TODO: Consider splitting _QueuedJob.ops into two separate lists, one for
# pending and one for processed ops.
if job.ops_iter is None:
job.ops_iter = enumerate(job.ops)
# Find next opcode to run
while True:
try:
(idx, op) = job.ops_iter.next()
except __HOLE__:
raise errors.ProgrammerError("Called for a finished job")
if op.status == constants.OP_STATUS_RUNNING:
# Found an opcode already marked as running
raise errors.ProgrammerError("Called for job marked as running")
opctx = _OpExecContext(op, idx, "Op %s/%s" % (idx + 1, len(job.ops)),
timeout_strategy_factory)
if op.status not in constants.OPS_FINALIZED:
return opctx
# This is a job that was partially completed before master daemon
# shutdown, so it can be expected that some opcodes are already
# completed successfully (if any did error out, then the whole job
# should have been aborted and not resubmitted for processing).
logging.info("%s: opcode %s already processed, skipping",
opctx.log_prefix, opctx.summary)
|
StopIteration
|
dataset/ETHPy150Open ganeti/ganeti/lib/jqueue/__init__.py/_JobProcessor._FindNextOpcode
|
1,430
|
@staticmethod
def _ResolveJobDependencies(resolve_fn, deps):
"""Resolves relative job IDs in dependencies.
@type resolve_fn: callable
@param resolve_fn: Function to resolve a relative job ID
@type deps: list
@param deps: Dependencies
@rtype: tuple; (boolean, string or list)
@return: If successful (first tuple item), the returned list contains
resolved job IDs along with the requested status; if not successful,
the second element is an error message
"""
result = []
for (dep_job_id, dep_status) in deps:
if ht.TRelativeJobId(dep_job_id):
assert ht.TInt(dep_job_id) and dep_job_id < 0
try:
job_id = resolve_fn(dep_job_id)
except __HOLE__:
# Abort
return (False, "Unable to resolve relative job ID %s" % dep_job_id)
else:
job_id = dep_job_id
result.append((job_id, dep_status))
return (True, result)
|
IndexError
|
dataset/ETHPy150Open ganeti/ganeti/lib/jqueue/__init__.py/JobQueue._ResolveJobDependencies
|
1,431
|
@ratelimit(rate='1200/h', method=None, block=True)
def surveyform(request, assignment_id):
assignment = get_object_or_404(Assignment, form_slug=assignment_id)
# This variable is no longer used. It was initially developed
# as a reference to the FormRequest for a separate view that would
# change the object's page_loaded boolean to true.
request_key = str(assignment.id) + str(hex(random.randint(100000, 999999)))
# When behind a proxy, the first one is the client IP, otherwise it is in REMOTE_ADDR
ip_address = request.META.get('HTTP_X_FORWARDED_FOR', '') or request.META.get('REMOTE_ADDR')
referer = request.META.get('HTTP_REFERER') or ''
user_agent = request.META.get('HTTP_USER_AGENT') or ''
try:
form_request = FormRequest.objects.create(key=request_key, assignment=assignment,
ip_address=ip_address, referer=referer,
user_agent=user_agent)
except:
form_request = FormRequest.objects.create(key=request_key, assignment=assignment,
ip_address='0.0.0.0', referer='Unknown',
user_agent='Unknown')
form_request.save()
complete_check = (list(assignment.questions.questions.all().order_by('id').values_list('id', flat=True))
== list(assignment.answers.all().order_by('question__question__id').values_list('question__question__id', flat=True))
and Comment.objects.filter(assignment=assignment).exists())
if complete_check:
return HttpResponseRedirect('/checkup/thanks/' + assignment.form_slug + '/')
else:
Answer.objects.filter(assignment=assignment).delete()
Comment.objects.filter(assignment=assignment).delete()
if request.method == 'POST':
form = SurveyForm(request.POST, assignment=assignment)
if form.is_valid():
for key, value in form.cleaned_data.items():
if 'question' in key:
question = Question.objects.get(pk=int(key.split('-')[1]))
group = assignment.questions
# We store the QuestionGroupOrder with the answer
# so we have access to questions and answers
# IN ORDER from the assignment model later
group_order = QuestionGroupOrder.objects.get(
question=question, group=group)
answer = Answer.objects.get_or_create(assignment=assignment,
question=group_order,
answer=Choice.objects.get(pk=int(value)))
elif 'comment' in key:
comment = Comment.objects.get_or_create(assignment=assignment,
comment=value)
complete_check = (list(assignment.questions.questions.all().order_by('id').values_list('id', flat=True))
== list(assignment.answers.all().order_by('question__question__id').values_list('question__question__id', flat=True))
and Comment.objects.filter(assignment=assignment).exists())
if complete_check:
assignment.survey_complete = True
assignment.save()
email_message = assignment.respondent.first_name + ' ' + assignment.respondent.last_name
email_message += ' responded to the survey: ' + assignment.survey.name + '\n\n'
email_message += 'Here are the respondent\'s answer(s): \n'
for answer in assignment.answers.all():
email_message += answer.question.question.question + '\n'
email_message += answer.answer.choice + '\n'
email_message += '\n'
email_message += 'Here is the respondent\'s comment (if any): \n'
email_message += assignment.comment.comment + '\n'
try:
send_mail('CheckUp survey form submitted!',
email_message,
settings.DEFAULT_FROM_EMAIL,
[assignment.reporter.user.email],
fail_silently=True)
except __HOLE__:
#If they didn't set up their server.
pass
return HttpResponseRedirect('/checkup/thanks/' + assignment.form_slug + '/')
else:
form = SurveyForm(assignment=assignment)
context = {
'form' : form,
'assignment' : assignment,
'base_template': base_template
}
return render(request, 'checkup/surveyform.html', context)
|
AttributeError
|
dataset/ETHPy150Open newsday/newstools-checkup/checkup/views.py/surveyform
|
1,432
|
def _parse_body(self, stream):
rows, cols, entries, format, field, symm = (self.rows, self.cols,
self.entries, self.format,
self.field, self.symmetry)
try:
from scipy.sparse import coo_matrix
except __HOLE__:
coo_matrix = None
dtype = self.DTYPES_BY_FIELD.get(field, None)
has_symmetry = self.has_symmetry
is_complex = field == self.FIELD_COMPLEX
is_skew = symm == self.SYMMETRY_SKEW_SYMMETRIC
is_herm = symm == self.SYMMETRY_HERMITIAN
is_pattern = field == self.FIELD_PATTERN
if format == self.FORMAT_ARRAY:
a = zeros((rows, cols), dtype=dtype)
line = 1
i, j = 0, 0
while line:
line = stream.readline()
if not line or line.startswith(b'%'):
continue
if is_complex:
aij = complex(*map(float, line.split()))
else:
aij = float(line)
a[i, j] = aij
if has_symmetry and i != j:
if is_skew:
a[j, i] = -aij
elif is_herm:
a[j, i] = conj(aij)
else:
a[j, i] = aij
if i < rows-1:
i = i + 1
else:
j = j + 1
if not has_symmetry:
i = 0
else:
i = j
if not (i in [0, j] and j == cols):
raise ValueError("Parse error, did not read all lines.")
elif format == self.FORMAT_COORDINATE and coo_matrix is None:
# Read sparse matrix to dense when coo_matrix is not available.
a = zeros((rows, cols), dtype=dtype)
line = 1
k = 0
while line:
line = stream.readline()
if not line or line.startswith(b'%'):
continue
l = line.split()
i, j = map(int, l[:2])
i, j = i-1, j-1
if is_complex:
aij = complex(*map(float, l[2:]))
else:
aij = float(l[2])
a[i, j] = aij
if has_symmetry and i != j:
if is_skew:
a[j, i] = -aij
elif is_herm:
a[j, i] = conj(aij)
else:
a[j, i] = aij
k = k + 1
if not k == entries:
ValueError("Did not read all entries")
elif format == self.FORMAT_COORDINATE:
# Read sparse COOrdinate format
if entries == 0:
# empty matrix
return coo_matrix((rows, cols), dtype=dtype)
try:
if not _is_fromfile_compatible(stream):
flat_data = fromstring(stream.read(), sep=' ')
else:
# fromfile works for normal files
flat_data = fromfile(stream, sep=' ')
except Exception:
# fallback - fromfile fails for some file-like objects
flat_data = fromstring(stream.read(), sep=' ')
# TODO use iterator (e.g. xreadlines) to avoid reading
# the whole file into memory
if is_pattern:
flat_data = flat_data.reshape(-1, 2)
I = ascontiguousarray(flat_data[:, 0], dtype='intc')
J = ascontiguousarray(flat_data[:, 1], dtype='intc')
V = ones(len(I), dtype='int8') # filler
elif is_complex:
flat_data = flat_data.reshape(-1, 4)
I = ascontiguousarray(flat_data[:, 0], dtype='intc')
J = ascontiguousarray(flat_data[:, 1], dtype='intc')
V = ascontiguousarray(flat_data[:, 2], dtype='complex')
V.imag = flat_data[:, 3]
else:
flat_data = flat_data.reshape(-1, 3)
I = ascontiguousarray(flat_data[:, 0], dtype='intc')
J = ascontiguousarray(flat_data[:, 1], dtype='intc')
V = ascontiguousarray(flat_data[:, 2], dtype='float')
I -= 1 # adjust indices (base 1 -> base 0)
J -= 1
if has_symmetry:
mask = (I != J) # off diagonal mask
od_I = I[mask]
od_J = J[mask]
od_V = V[mask]
I = concatenate((I, od_J))
J = concatenate((J, od_I))
if is_skew:
od_V *= -1
elif is_herm:
od_V = od_V.conjugate()
V = concatenate((V, od_V))
a = coo_matrix((V, (I, J)), shape=(rows, cols), dtype=dtype)
else:
raise NotImplementedError(format)
return a
# ------------------------------------------------------------------------
|
ImportError
|
dataset/ETHPy150Open scipy/scipy/scipy/io/mmio.py/MMFile._parse_body
|
1,433
|
def _is_fromfile_compatible(stream):
"""
Check whether `stream` is compatible with numpy.fromfile.
Passing a gzipped file object to ``fromfile/fromstring`` doesn't work with
Python3.
"""
if sys.version_info[0] < 3:
return True
bad_cls = []
try:
import gzip
bad_cls.append(gzip.GzipFile)
except ImportError:
pass
try:
import bz2
bad_cls.append(bz2.BZ2File)
except __HOLE__:
pass
bad_cls = tuple(bad_cls)
return not isinstance(stream, bad_cls)
# -----------------------------------------------------------------------------
|
ImportError
|
dataset/ETHPy150Open scipy/scipy/scipy/io/mmio.py/_is_fromfile_compatible
|
1,434
|
@increment_visitor_counter
def survey(request, survey_code, code, page=1):
answer = get_object_or_404(
SurveyAnswer.objects.select_related('survey'),
survey__is_active=True,
survey__code=survey_code,
code=code)
answer.update_status(answer.STARTED)
pages = answer.survey.pages()
# Only look at valid pages
try:
page = int(page)
groups = pages[page - 1]
except (IndexError, __HOLE__, ValueError):
return redirect(answer)
kwargs = {
'questions': Question.objects.filter(group__in=groups).order_by(
'group', 'ordering').select_related('group'),
'answer': answer,
}
if request.method == 'POST':
form = QuestionForm(request.POST, **kwargs)
if form.is_valid():
form.save()
if 'finish' in request.POST:
answer.update_status(answer.FINISHED)
return redirect(
'survey_survey_end',
survey_code=survey_code,
code=code)
elif 'prev' in request.POST:
offset = -1
else:
offset = 1
if 0 < page + offset <= len(pages):
return redirect(
'survey_survey_page',
survey_code=survey_code,
code=code,
page=page + offset)
else:
form = QuestionForm(**kwargs)
return render(request, 'survey/form.html', {
'survey': answer.survey,
'answer': answer,
'form': form,
'page': page,
'page_count': len(pages),
'is_first_page': page == 1,
'is_last_page': page == len(pages),
})
|
TypeError
|
dataset/ETHPy150Open matthiask/survey/survey/views.py/survey
|
1,435
|
def set(self, ob, **kwargs):
self.real_neighbor(ob)
if isinstance(ob, NeuronClass):
ob_name = ob.name()
this_name = self.owner.name()
for x in ob.member():
# Get the name for the neighbor
# XXX:
try:
n = x.name()
side = n[n.find(ob_name)+len(ob_name):]
name_here = this_name + side
this_neuron = P.Neuron(name_here)
self.owner.member(this_neuron)
this_neuron.neighbor(x,**kwargs)
except __HOLE__:
# XXX: could default to all-to-all semantics
print 'Do not recoginze the membership of this neuron/neuron class', ob
elif isinstance(ob, Neuron):
for x in self.owner.member:
x.neighbor(ob)
|
ValueError
|
dataset/ETHPy150Open openworm/PyOpenWorm/examples/rmgr.py/NC_neighbor.set
|
1,436
|
def process(self, definitions_asset, nodes, nvtristrip, materials, effects):
# Look at the material to check for geometry requirements
need_normals = False
need_tangents = False
generate_normals = False
generate_tangents = False
# Assumed to be a graphics geometry
is_graphics_geometry = True
if is_graphics_geometry:
LOG.info('"%s" is assumed to be a graphics geometry. ' \
'Check referencing node for physics properties otherwise', self.name)
self.meta['graphics'] = True
def _find_material_from_instance_on_node(mat, node):
for instance in node.instance_geometry:
if instance.geometry == self.id:
for surface, material in instance.materials.iteritems():
if surface == mat:
return material
for child in node.children:
material = _find_material_from_instance_on_node(mat, child)
if material is not None:
return material
return None
for mat_name in self.surfaces.iterkeys():
# Ok, we have a mat_name but this may need to be mapped if the node has an instanced material.
# So we find the node referencing this geometry, and see if the material has a mapping on it.
for _, node in nodes.iteritems():
instance_mat_name = _find_material_from_instance_on_node(mat_name, node)
if instance_mat_name is not None:
LOG.debug('Using instance material:%s to %s', mat_name, instance_mat_name)
mat_name = instance_mat_name
break
if mat_name is None:
mat_name = 'default'
effect_name = None
meta = { }
material = definitions_asset.retrieve_material(mat_name, False)
if material is not None:
effect_name = material.get('effect', None)
if 'meta' in material:
meta.update(material['meta'])
else:
material = materials.get(mat_name, None)
if material is not None:
effect_name = material.effect_name
# Dae2Material has no meta data, everything is on Dae2Effect
else:
continue
if effect_name is not None:
effect = definitions_asset.retrieve_effect(effect_name)
if effect is not None:
if 'meta' in effect:
meta.update(effect['meta'])
else:
effect = effects.get(effect_name, None)
if effect is not None and effect.meta is not None:
meta.update(effect.meta)
if meta.get('normals', False) is True:
need_normals = True
if meta.get('tangents', False) is True:
need_tangents = True
if meta.get('generate_normals', False) is True:
generate_normals = True
if meta.get('generate_tangents', False) is True:
generate_tangents = True
break
if need_normals and 'NORMAL' not in self.inputs:
generate_normals = True
if need_tangents and 'TANGENT' not in self.inputs and 'BINORMAL' not in self.inputs:
generate_tangents = True
if generate_normals is False and generate_tangents is False and nvtristrip is None:
return
# Generate a single vertex pool.
new_sources = { }
old_semantics = { }
old_offsets = { }
has_uvs = False
for semantic, input_stream in self.inputs.iteritems():
new_sources[input_stream.source] = [ ]
old_offsets[input_stream.source] = input_stream.offset
old_semantics[semantic] = True
if semantic == 'TEXCOORD' or semantic == 'TEXCOORD0':
has_uvs = True
if generate_tangents:
if has_uvs is False:
LOG.warning('Material "%s" requires tangents but geometry "%s" has no UVs', mat_name, self.name)
return
for mat_name, surface in self.surfaces.iteritems():
if surface.type == JsonAsset.SurfaceTriangles:
if generate_tangents:
LOG.info('Process:generate_tangents:geometry:%s:surface:%s', self.name, mat_name)
elif generate_normals:
LOG.info('Process:generate_normals:geometry:%s:surface:%s', self.name, mat_name)
elif surface.type == JsonAsset.SurfaceQuads:
triangles = [ ]
for (q0, q1, q2, q3) in surface.primitives:
triangles.append( ( q0, q1, q2) )
triangles.append( ( q0, q2, q3) )
surface.primitives = triangles
surface.type = JsonAsset.SurfaceTriangles
LOG.info('Triangulated geometry:%s:surface:%s', self.name, mat_name)
if generate_tangents:
LOG.info('Process:generate_tangents:geometry:%s:surface:%s', self.name, mat_name)
elif generate_normals:
LOG.info('Process:generate_normals:geometry:%s:surface:%s', self.name, mat_name)
else:
return
# For each surface in the geometry...
new_surfaces = { }
index = 0
for mat_name, surface in self.surfaces.iteritems():
start_index = index
surface_sources = surface.sources
# For each primitive within the surface...
for primitive in surface.primitives:
index += 1
if isinstance(primitive[0], (tuple, list)):
# For each input source and input offset...
for source, offset in old_offsets.iteritems():
new_source = new_sources[source]
if source in surface_sources:
source_values = self.sources[source].values
# For each vertex in the primitive (triangle or quad)...
for vertex in primitive:
new_source.append( source_values[vertex[offset]] )
else:
zero = self.sources[source].zero_value
for vertex in primitive:
new_source.append(zero)
else:
# For each input source and input offset...
for source in old_offsets.iterkeys():
new_source = new_sources[source]
if source in surface_sources:
source_values = self.sources[source].values
# For each vertex in the primitive (triangle or quad)...
for vertex in primitive:
new_source.append( source_values[vertex] )
else:
zero = self.sources[source].zero_value
for vertex in primitive:
new_source.append(zero)
end_index = index
new_surfaces[mat_name] = (start_index, end_index)
mesh = Mesh()
for semantic, input_stream in self.inputs.iteritems():
mesh.set_values(new_sources[input_stream.source], semantic)
mesh.primitives = [ (i, i + 1, i + 2) for i in range(0, index * 3, 3) ]
#mesh.mirror_in('z')
if generate_normals:
mesh.generate_normals()
mesh.smooth_normals()
old_semantics['NORMAL'] = True
if generate_tangents:
mesh.generate_tangents()
mesh.normalize_tangents()
mesh.smooth_tangents()
mesh.generate_normals_from_tangents()
mesh.smooth_normals()
old_semantics['TANGENT'] = True
old_semantics['BINORMAL'] = True
def compact_stream(values, semantic):
"""Generate a new value and index stream remapping and removing duplicate elements."""
new_values = [ ]
new_values_hash = { }
new_index = [ ]
for v in values:
if v in new_values_hash:
new_index.append(new_values_hash[v])
else:
i = len(new_values)
new_index.append(i)
new_values.append(v)
new_values_hash[v] = i
LOG.info('%s stream compacted from %i to %i elements', semantic, len(values), len(new_values))
return (new_values, new_index)
# !!! This should be updated to find index buffers that are similar rather than identical.
new_indexes = [ ]
new_offsets = { }
for semantic in old_semantics.iterkeys():
values = mesh.get_values(semantic)
(new_values, new_values_index) = compact_stream(values, semantic)
mesh.set_values(new_values, semantic)
for i, indexes in enumerate(new_indexes):
if indexes == new_values_index:
new_offsets[semantic] = i
break
else:
new_offsets[semantic] = len(new_indexes)
new_indexes.append(new_values_index)
indexes = zip(*new_indexes)
# Use NVTriStrip to generate a vertex cache aware triangle list
if nvtristrip is not None:
for (start_index, end_index) in new_surfaces.itervalues():
reverse_map = {}
indexes_map = {}
num_vertices = 0
for n in xrange(start_index * 3, end_index * 3):
index = indexes[n]
if index not in indexes_map:
indexes_map[index] = num_vertices
reverse_map[num_vertices] = index
num_vertices += 1
#LOG.info(num_vertices)
if num_vertices < 65536:
#LOG.info(indexes)
try:
nvtristrip_proc = subprocess.Popen([nvtristrip],
stdin = subprocess.PIPE,
stdout = subprocess.PIPE)
stdin_write = nvtristrip_proc.stdin.write
for n in xrange(start_index * 3, end_index * 3):
index = indexes[n]
value = indexes_map[index]
stdin_write(str(value) + "\n")
stdin_write("-1\n")
stdin_write = None
indexes_map = None
stdout_readline = nvtristrip_proc.stdout.readline
try:
num_groups = int(stdout_readline())
group_type = int(stdout_readline())
num_indexes = int(stdout_readline())
if num_groups != 1 or group_type != 0 or num_indexes != (end_index - start_index) * 3:
LOG.warning("NvTriStripper failed: %d groups, type %d, %d indexes.",
num_groups, group_type, num_indexes)
else:
n = start_index * 3
for value in stdout_readline().split():
value = int(value)
indexes[n] = reverse_map[value]
n += 1
except ValueError as e:
error_string = str(e).split("'")
if 1 < len(error_string):
error_string = error_string[1]
else:
error_string = str(e)
LOG.warning("NvTriStripper failed: %s", error_string)
stdout_readline = None
nvtristrip_proc = None
#LOG.info(indexes)
except __HOLE__ as e:
LOG.warning("NvTriStripper failed: " + str(e))
else:
LOG.warning("Too many vertices to use NvTriStrip: %d", num_vertices)
indexes_map = None
reverse_map = None
primitives = [ (indexes[i], indexes[i + 1], indexes[i + 2]) for i in xrange(0, len(indexes), 3) ]
# Fix up the surfaces...
for mat_name, (start_index, end_index) in new_surfaces.iteritems():
self.surfaces[mat_name].primitives = primitives[start_index:end_index]
# Fix up the inputs...
for semantic, input_stream in self.inputs.iteritems():
input_stream.offset = new_offsets[semantic]
# Fix up the sources...
for _, source in self.sources.iteritems():
source.values = mesh.get_values(source.semantic)
if generate_normals:
self.inputs['NORMAL'] = Dae2Geometry.Input('NORMAL', 'normals', new_offsets['NORMAL'])
self.sources['normals'] = Dae2Geometry.Source(mesh.normals, 'NORMAL', 'normals', 3, len(mesh.normals))
if generate_tangents:
self.inputs['BINORMAL'] = Dae2Geometry.Input('BINORMAL', 'binormals', new_offsets['BINORMAL'])
self.inputs['TANGENT'] = Dae2Geometry.Input('TANGENT', 'tangents', new_offsets['TANGENT'])
self.sources['binormals'] = Dae2Geometry.Source(mesh.binormals,
'BINORMAL',
'binormals',
3,
len(mesh.binormals))
self.sources['tangents'] = Dae2Geometry.Source(mesh.tangents,
'TANGENT',
'tangents',
3,
len(mesh.tangents))
# pylint: enable=R0914
|
OSError
|
dataset/ETHPy150Open turbulenz/turbulenz_tools/turbulenz_tools/tools/dae2json.py/Dae2Geometry.process
|
1,437
|
def parse(input_filename="default.dae", output_filename="default.json", asset_url="", asset_root=".", infiles=None,
options=None):
"""Untility function to convert a Collada file into a JSON file."""
definitions_asset = standard_include(infiles)
animations = { }
animation_clips = { }
geometries = { }
effects = { }
materials = { }
images = { }
lights = { }
nodes = { }
physics_materials = { }
physics_models = { }
physics_bodies = { }
physics_nodes = { }
name_map = { }
geometry_names = { }
effect_names = { }
light_names = { }
node_names = { }
node_map = { }
url_handler = UrlHandler(asset_root, input_filename)
# DOM stuff from here...
try:
collada_e = ElementTree.parse(input_filename).getroot()
except __HOLE__ as e:
LOG.error('Failed loading: %s', input_filename)
LOG.error(' >> %s', e)
exit(1)
except ExpatError as e:
LOG.error('Failed processing: %s', input_filename)
LOG.error(' >> %s', e)
exit(2)
else:
if collada_e is not None:
fix_sid(collada_e, None)
# Asset...
asset_e = collada_e.find(tag('asset'))
# What is the world scale?
scale = 1.0
unit_e = asset_e.find(tag('unit'))
if unit_e is not None:
scale = float(unit_e.get('meter', '1.0'))
# pylint: disable=C0330
# What is the up axis?
upaxis_rotate = None
upaxis_e = asset_e.find(tag('up_axis'))
if upaxis_e is not None:
if upaxis_e.text == 'X_UP':
upaxis_rotate = [ 0.0, 1.0, 0.0, 0.0,
-1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0 ]
elif upaxis_e.text == 'Z_UP':
upaxis_rotate = [ 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, -1.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0 ]
LOG.info('Up axis:%s', upaxis_e.text)
# pylint: enable=C0330
# Core COLLADA elements are:
#
# library_animation_clips - not supported
# library_animations - not supported
# instance_animation - not supported
# library_cameras - not supported
# instance_camera - supported
# library_controllers - not supported
# instance_controller - supported
# library_geometries - supported
# instance_geometry - supported
# library_lights - supported
# instance_light - supported
# library_nodes - not supported
# instance_node - supported
# library_visual_scenes - supported
# instance_visual_scene - not supported
# scene - not supported
geometries_e = collada_e.find(tag('library_geometries'))
if geometries_e is not None:
for x in geometries_e.findall(tag('geometry')):
g = Dae2Geometry(x, scale, geometries_e, name_map, geometry_names)
# For now we only support mesh and convex_mesh
if g.type == 'mesh' or g.type == 'convex_mesh':
geometries[g.id] = g
else:
LOG.info('Collada file without:library_geometries:%s', input_filename)
lights_e = collada_e.find(tag('library_lights'))
if lights_e is not None:
for x in lights_e.findall(tag('light')):
l = Dae2Light(x, name_map, light_names)
lights[l.id] = l
controllers_e = collada_e.find(tag('library_controllers'))
visual_scenes_e = collada_e.find(tag('library_visual_scenes'))
if visual_scenes_e is not None:
visual_scene_e = visual_scenes_e.findall(tag('visual_scene'))
if visual_scene_e is not None:
if len(visual_scene_e) > 1:
LOG.warning('Collada file with more than 1:visual_scene:%s', input_filename)
node_e = visual_scene_e[0].findall(tag('node'))
for n in node_e:
n = Dae2Node(n, scale, None, upaxis_rotate, None, controllers_e, collada_e,
name_map, node_names, node_map, geometries)
nodes[n.id] = n
if len(node_e) == 0:
LOG.warning('Collada file without:node:%s', input_filename)
else:
LOG.warning('Collada file without:visual_scene:%s', input_filename)
else:
LOG.warning('Collada file without:library_visual_scenes:%s', input_filename)
animations_e = collada_e.find(tag('library_animations'))
if animations_e is not None:
for x in animations_e.findall(tag('animation')):
a = Dae2Animation(x, animations_e, name_map, animations)
animations[a.id] = a
animation_clips_e = collada_e.find(tag('library_animation_clips'))
if animation_clips_e is not None:
for x in animation_clips_e.findall(tag('animation_clip')):
c = Dae2AnimationClip(x, scale, upaxis_rotate, animation_clips_e, name_map, animations, nodes, None)
animation_clips[c.id] = c
else:
if animations_e is not None:
LOG.info('Exporting default animations from:%s', input_filename)
for n in nodes:
c = Dae2AnimationClip(x, scale, upaxis_rotate, None, name_map, animations, nodes, n)
if c.anim:
animation_clips[c.id] = c
# FX COLLADA elements are:
#
# library_effects - supported
# instance_effect - supported
# library_materials - supported
# instance_material - supported
# library_images - supported
# instance_image - not supported
# Images have to be read before effects and materials
images_e = collada_e.find(tag('library_images'))
if images_e is not None:
for x in images_e.findall(tag('image')):
i = Dae2Image(x, url_handler, name_map)
images[i.id] = i
effects_e = collada_e.find(tag('library_effects'))
if effects_e is not None:
for x in effects_e.iter(tag('image')):
i = Dae2Image(x, url_handler, name_map)
images[i.id] = i
for x in effects_e.findall(tag('effect')):
e = Dae2Effect(x, url_handler, name_map, effect_names)
effects[e.id] = e
else:
LOG.info('Collada file without:library_effects:%s', input_filename)
# json.AddObject("default")
# json.AddString("type", "lambert")
materials_e = collada_e.find(tag('library_materials'))
if materials_e is not None:
for x in materials_e.findall(tag('material')):
m = Dae2Material(x, name_map)
materials[m.id] = m
else:
LOG.info('Collada file without:library_materials:%s', input_filename)
# json.AddObject("default")
# json.AddString("effect", "default")
# Physics COLLADA elements are:
#
# library_force_fields - not supported
# instance_force_field - not supported
# library_physics_materials - supported
# instance_physics_material - supported
# library_physics_models - supported
# instance_physics_model - supported
# library_physics_scenes - supported
# instance_physics_scene - not supported
# instance_rigid_body - supported
# instance_rigid_constraint - not supported
physics_materials_e = collada_e.find(tag('library_physics_materials'))
if physics_materials_e is not None:
for x in physics_materials_e.findall(tag('physics_material')):
m = Dae2PhysicsMaterial(x, name_map)
physics_materials[m.id] = m
physics_models_e = collada_e.find(tag('library_physics_models'))
if physics_models_e is not None:
for x in physics_models_e.findall(tag('physics_model')):
m = Dae2PhysicsModel(x, geometries_e, physics_bodies, name_map)
physics_models[m.id] = m
physics_scenes_e = collada_e.find(tag('library_physics_scenes'))
if physics_scenes_e is not None:
physics_scene_e = physics_scenes_e.findall(tag('physics_scene'))
if physics_scene_e is not None:
if len(physics_scene_e) > 1:
LOG.warning('Collada file with more than 1:physics_scene:%s', input_filename)
for x in physics_scene_e[0].findall(tag('instance_physics_model')):
i = Dae2InstancePhysicsModel(x)
physics_nodes[i.name] = i
# Drop reference to the etree
collada_e = None
# Process asset...
for _, node in nodes.iteritems():
node.process(nodes)
for _, geometry in geometries.iteritems():
geometry.process(definitions_asset, nodes, options.nvtristrip, materials, effects)
# Create JSON...
json_asset = JsonAsset()
def _attach(asset_type):
if options.include_types is not None:
return asset_type in options.include_types
if options.exclude_types is not None:
return asset_type not in options.exclude_types
return True
# By default attach images map
if _attach('images'):
for _, image in images.iteritems():
image.attach(json_asset)
if _attach('effects'):
for _, effect in effects.iteritems():
effect.attach(json_asset, definitions_asset)
if _attach('materials'):
for _, material in materials.iteritems():
material.attach(json_asset, definitions_asset, name_map)
if _attach('geometries'):
for _, geometry in geometries.iteritems():
geometry.attach(json_asset)
if _attach('lights'):
for _, light in lights.iteritems():
light.attach(json_asset, definitions_asset)
if _attach('nodes'):
for _, node in nodes.iteritems():
node.attach(json_asset, url_handler, name_map)
if _attach('animations'):
for _, animation_clip in animation_clips.iteritems():
animation_clip.attach(json_asset, name_map)
if _attach('physicsmaterials'):
for _, physics_material in physics_materials.iteritems():
physics_material.attach(json_asset)
if _attach('physicsnodes'):
for _, physics_node in physics_nodes.iteritems():
physics_node.attach(json_asset, physics_bodies, name_map, node_map)
if _attach('physicsmodels'):
for _, physics_model in physics_models.iteritems():
physics_model.attach(json_asset)
if not options.keep_unused_images:
remove_unreferenced_images(json_asset)
# Write JSON...
try:
standard_json_out(json_asset, output_filename, options)
except IOError as e:
LOG.error('Failed processing: %s', output_filename)
LOG.error(' >> %s', e)
exit(3)
return json_asset
# pylint: enable=R0914
|
IOError
|
dataset/ETHPy150Open turbulenz/turbulenz_tools/turbulenz_tools/tools/dae2json.py/parse
|
1,438
|
def set_task_flags(self, task_flags):
try:
flags = int(task_flags)
except (__HOLE__, TypeError):
flags = self.words_to_flags(task_flags, "TASK_FLAG_")
self.task.SetTaskFlags(flags)
|
ValueError
|
dataset/ETHPy150Open tjguk/winsys/winsys/scheduled_tasks.py/Task.set_task_flags
|
1,439
|
def _intercept(self, env, start_response):
import figleaf
try:
import cPickle as pickle
except __HOLE__:
import pickle
coverage = figleaf.get_info()
s = pickle.dumps(coverage)
start_response("200 OK", [('Content-type', 'application/x-pickle')])
return [s]
|
ImportError
|
dataset/ETHPy150Open rtyler/Spawning/spawning/spawning_child.py/FigleafCoverage._intercept
|
1,440
|
def send_status_to_controller(self):
try:
child_status = {'pid':os.getpid()}
if self.server:
child_status['concurrent_requests'] = \
self.server.outstanding_requests
else:
child_status['error'] = 'Starting...'
body = json.dumps(child_status)
import urllib2
urllib2.urlopen(self.controller_url, body)
except (KeyboardInterrupt, __HOLE__,
eventlet.greenthread.greenlet.GreenletExit):
raise
except Exception, e:
# we really don't want exceptions here to stop read_pipe_and_die
pass
|
SystemExit
|
dataset/ETHPy150Open rtyler/Spawning/spawning/spawning_child.py/ChildStatus.send_status_to_controller
|
1,441
|
def warn_controller_of_imminent_death(controller_pid):
# The controller responds to a SIGUSR1 by kicking off a new child process.
try:
os.kill(controller_pid, signal.SIGUSR1)
except __HOLE__, e:
if not e.errno == errno.ESRCH:
raise
|
OSError
|
dataset/ETHPy150Open rtyler/Spawning/spawning/spawning_child.py/warn_controller_of_imminent_death
|
1,442
|
def serve_from_child(sock, config, controller_pid):
threads = config.get('threadpool_workers', 0)
wsgi_application = spawning.util.named(config['app_factory'])(config)
if config.get('coverage'):
wsgi_application = FigleafCoverage(wsgi_application)
if config.get('sysinfo'):
wsgi_application = SystemInfo(wsgi_application)
if threads >= 1:
# proxy calls of the application through tpool
wsgi_application = tpool_wsgi(wsgi_application)
elif threads != 1:
print "(%s) not using threads, installing eventlet cooperation monkeypatching" % (
os.getpid(), )
eventlet.patcher.monkey_patch(all=False, socket=True)
host, port = sock.getsockname()
access_log_file = config.get('access_log_file')
if access_log_file is not None:
access_log_file = open(access_log_file, 'a')
max_age = 0
if config.get('max_age'):
max_age = int(config.get('max_age'))
server_event = eventlet.event.Event()
# the status object wants to have a reference to the server object
if config.get('status_port'):
def send_server_to_status(server_event):
server = server_event.wait()
get_statusobj().server = server
eventlet.spawn(send_server_to_status, server_event)
http_version = config.get('no_keepalive') and 'HTTP/1.0' or 'HTTP/1.1'
try:
wsgi_args = (sock, wsgi_application)
wsgi_kwargs = {'log' : access_log_file, 'server_event' : server_event, 'max_http_version' : http_version}
if config.get('no_keepalive'):
wsgi_kwargs.update({'keepalive' : False})
if max_age:
wsgi_kwargs.update({'timeout_value' : True})
eventlet.with_timeout(max_age, eventlet.wsgi.server, *wsgi_args,
**wsgi_kwargs)
warn_controller_of_imminent_death(controller_pid)
else:
eventlet.wsgi.server(*wsgi_args, **wsgi_kwargs)
except __HOLE__:
# controller probably doesn't know that we got killed by a SIGINT
warn_controller_of_imminent_death(controller_pid)
except ExitChild:
pass # parent killed us, it already knows we're dying
## Set a deadman timer to violently kill the process if it doesn't die after
## some long timeout.
signal.signal(signal.SIGALRM, deadman_timeout)
signal.alarm(config['deadman_timeout'])
## Once we get here, we just need to handle outstanding sockets, not
## accept any new sockets, so we should close the server socket.
sock.close()
server = server_event.wait()
last_outstanding = None
while server.outstanding_requests:
if last_outstanding != server.outstanding_requests:
print "(%s) %s requests remaining, waiting... (timeout after %s)" % (
os.getpid(), server.outstanding_requests, config['deadman_timeout'])
last_outstanding = server.outstanding_requests
eventlet.sleep(0.1)
print "(%s) *** Child exiting: all requests completed at %s" % (
os.getpid(), time.asctime())
|
KeyboardInterrupt
|
dataset/ETHPy150Open rtyler/Spawning/spawning/spawning_child.py/serve_from_child
|
1,443
|
def test_empty(self):
# Raise an ImportWarning if sys.meta_path is empty.
module_name = 'nothing'
try:
del sys.modules[module_name]
except __HOLE__:
pass
with util.import_state(meta_path=[]):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertIsNone(importlib._bootstrap._find_module('nothing',
None))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, ImportWarning))
|
KeyError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_importlib/import_/test_meta_path.py/CallingOrder.test_empty
|
1,444
|
def __getattr__(self, attr):
try:
return self[attr]
except __HOLE__:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
|
KeyError
|
dataset/ETHPy150Open michaelliao/sinaweibopy/weibo.py/JsonDict.__getattr__
|
1,445
|
def get_provider_or_404(provider_id):
try:
return current_app.extensions['social'].providers[provider_id]
except __HOLE__:
abort(404)
|
KeyError
|
dataset/ETHPy150Open mattupstate/flask-social/flask_social/utils.py/get_provider_or_404
|
1,446
|
def request(self, kvp):
"""process OAI-PMH request"""
kvpout = {'service': 'CSW', 'version': '2.0.2', 'mode': 'oaipmh'}
LOGGER.debug('Incoming kvp: %s' % kvp)
if 'verb' in kvp:
if 'metadataprefix' in kvp:
self.metadata_prefix = kvp['metadataprefix']
try:
kvpout['outputschema'] = self._get_metadata_prefix(kvp['metadataprefix'])
except __HOLE__:
kvpout['outputschema'] = kvp['metadataprefix']
else:
self.metadata_prefix = 'csw-record'
LOGGER.info('metadataPrefix: %s' % self.metadata_prefix)
if kvp['verb'] in ['ListRecords', 'ListIdentifiers', 'GetRecord']:
kvpout['request'] = 'GetRecords'
kvpout['resulttype'] = 'results'
kvpout['typenames'] = 'csw:Record'
kvpout['elementsetname'] = 'full'
if kvp['verb'] in ['Identify', 'ListMetadataFormats', 'ListSets']:
kvpout['request'] = 'GetCapabilities'
elif kvp['verb'] == 'GetRecord':
kvpout['request'] = 'GetRecordById'
if 'identifier' in kvp:
kvpout['id'] = kvp['identifier']
if ('outputschema' in kvpout and
kvp['metadataprefix'] == 'oai_dc'): # just use default DC
del kvpout['outputschema']
elif kvp['verb'] in ['ListRecords', 'ListIdentifiers']:
if 'resumptiontoken' in kvp:
kvpout['startposition'] = kvp['resumptiontoken']
if ('outputschema' in kvpout and
kvp['verb'] == 'ListIdentifiers'): # simple output only
pass #del kvpout['outputschema']
if ('outputschema' in kvpout and
kvp['metadataprefix'] in ['dc', 'oai_dc']): # just use default DC
del kvpout['outputschema']
start = end = None
LOGGER.info('Scanning temporal parameters')
if 'from' in kvp:
start = 'dc:date >= %s' % kvp['from']
if 'until' in kvp:
end = 'dc:date <= %s' % kvp['until']
if any([start is not None, end is not None]):
if all([start is not None, end is not None]):
time_query = '%s and %s' % (start, end)
elif end is None:
time_query = start
elif start is None:
time_query = end
kvpout['constraintlanguage'] = 'CQL_TEXT'
kvpout['constraint'] = time_query
LOGGER.debug('Resulting parameters: %s' % kvpout)
return kvpout
|
KeyError
|
dataset/ETHPy150Open geopython/pycsw/pycsw/oaipmh.py/OAIPMH.request
|
1,447
|
def _get_metadata_prefix(self, prefix):
"""Convenience function to return metadataPrefix as CSW outputschema"""
try:
outputschema = self.metadata_formats[prefix]['namespace']
except __HOLE__:
outputschema = prefix
return outputschema
|
KeyError
|
dataset/ETHPy150Open geopython/pycsw/pycsw/oaipmh.py/OAIPMH._get_metadata_prefix
|
1,448
|
def encode_uid(pk):
try:
from django.utils.http import urlsafe_base64_encode
from django.utils.encoding import force_bytes
return urlsafe_base64_encode(force_bytes(pk)).decode()
except __HOLE__:
from django.utils.http import int_to_base36
return int_to_base36(pk)
|
ImportError
|
dataset/ETHPy150Open sunscrapers/djoser/djoser/utils.py/encode_uid
|
1,449
|
def decode_uid(pk):
try:
from django.utils.http import urlsafe_base64_decode
from django.utils.encoding import force_text
return force_text(urlsafe_base64_decode(pk))
except __HOLE__:
from django.utils.http import base36_to_int
return base36_to_int(pk)
|
ImportError
|
dataset/ETHPy150Open sunscrapers/djoser/djoser/utils.py/decode_uid
|
1,450
|
def UserModelString():
try:
return settings.AUTH_USER_MODEL
except __HOLE__:
return 'auth.User'
|
AttributeError
|
dataset/ETHPy150Open macropin/django-registration/registration/users.py/UserModelString
|
1,451
|
def visit(self, node):
if node is None:
return None
if type(node) is tuple:
return tuple([self.visit(n) for n in node])
try:
self.blame_stack.append((node.lineno, node.col_offset,))
info = True
except __HOLE__:
info = False
visitor = getattr(self, 'visit_%s' % node.__class__.__name__, None)
if visitor is None:
raise Exception('Unhandled node type %r' % type(node))
ret = visitor(node)
if info:
self.blame_stack.pop()
return ret
|
AttributeError
|
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/genshi/template/astutil.py/ASTCodeGenerator.visit
|
1,452
|
def _clone(self, node):
clone = node.__class__()
for name in getattr(clone, '_attributes', ()):
try:
setattr(clone, name, getattr(node, name))
except AttributeError:
pass
for name in clone._fields:
try:
value = getattr(node, name)
except __HOLE__:
pass
else:
if value is None:
pass
elif isinstance(value, list):
value = [self.visit(x) for x in value]
elif isinstance(value, tuple):
value = tuple(self.visit(x) for x in value)
else:
value = self.visit(value)
setattr(clone, name, value)
return clone
|
AttributeError
|
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/genshi/template/astutil.py/ASTTransformer._clone
|
1,453
|
@inlineCallbacks
def _callback(self):
"""
This will be called repeatedly every `self.interval` seconds.
`self.subscriptions` contain tuples of (obj, args, kwargs) for
each subscribing object.
If overloading, this callback is expected to handle all
subscriptions when it is triggered. It should not return
anything and should not traceback on poorly designed hooks.
The callback should ideally work under @inlineCallbacks so it
can yield appropriately.
The _hook_key, which is passed down through the handler via
kwargs is used here to identify which hook method to call.
"""
for store_key, (obj, args, kwargs) in self.subscriptions.items():
hook_key = yield kwargs.pop("_hook_key", "at_tick")
if not obj or not obj.pk:
# object was deleted between calls
self.remove(store_key)
continue
try:
yield _GA(obj, hook_key)(*args, **kwargs)
except __HOLE__:
log_trace()
self.remove(store_key)
except Exception:
log_trace()
finally:
# make sure to re-store
kwargs["_hook_key"] = hook_key
|
ObjectDoesNotExist
|
dataset/ETHPy150Open evennia/evennia/evennia/scripts/tickerhandler.py/Ticker._callback
|
1,454
|
def _store_key(self, obj, interval, idstring=""):
"""
Tries to create a store_key for the object. Returns a tuple
(isdb, store_key) where isdb is a boolean True if obj was a
database object, False otherwise.
Args:
obj (Object): Subscribing object.
interval (int): Ticker interval
idstring (str, optional): Additional separator between
different subscription types.
"""
if hasattr(obj, "db_key"):
# create a store_key using the database representation
objkey = pack_dbobj(obj)
isdb = True
else:
# non-db object, look for a property "key" on it, otherwise
# use its memory location.
try:
objkey = _GA(obj, "key")
except __HOLE__:
objkey = id(obj)
isdb = False
# return sidb and store_key
return isdb, (objkey, interval, idstring)
|
AttributeError
|
dataset/ETHPy150Open evennia/evennia/evennia/scripts/tickerhandler.py/TickerHandler._store_key
|
1,455
|
def __getitem__(self, key):
try:
return getattr(self, key)
except __HOLE__:
raise KeyError(key)
|
AttributeError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/acme/acme/jose/util.py/ImmutableMap.__getitem__
|
1,456
|
def __getattr__(self, name):
try:
return self._items[name]
except __HOLE__:
raise AttributeError(name)
|
KeyError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/acme/acme/jose/util.py/frozendict.__getattr__
|
1,457
|
def validate ( self, object, name, value ):
""" Validates that a specified value is valid for this trait.
"""
if type(value) is int:
return value
elif type(value) is long:
return int(value)
try:
int_value = operator.index( value )
except __HOLE__:
pass
else:
return int(int_value)
self.error( object, name, value )
|
TypeError
|
dataset/ETHPy150Open enthought/traits/traits/trait_types.py/BaseInt.validate
|
1,458
|
def validate ( self, object, name, value ):
""" Validates that the values is a valid list.
"""
if not isinstance( value, list ):
try:
# Should work for all iterables as well as strings (which do
# not define an __iter__ method)
value = list( value )
except (ValueError, __HOLE__):
value = [ value ]
return super( CList, self ).validate( object, name, value )
|
TypeError
|
dataset/ETHPy150Open enthought/traits/traits/trait_types.py/CList.validate
|
1,459
|
def validate ( self, object, name, value ):
""" Validates that the values is a valid list.
"""
if not isinstance( value, set ):
try:
# Should work for all iterables as well as strings (which do
# not define an __iter__ method)
value = set( value )
except ( ValueError, __HOLE__ ):
value = set( [ value ] )
return super( CSet, self ).validate( object, name, value )
|
TypeError
|
dataset/ETHPy150Open enthought/traits/traits/trait_types.py/CSet.validate
|
1,460
|
def get_one(self, context, name):
"""
Returns a function if it is registered, the context is ignored.
"""
try:
return self.functions[name]
except __HOLE__:
raise FunctionNotFound(name)
|
KeyError
|
dataset/ETHPy150Open stepank/pyws/src/pyws/functions/managers.py/FixedFunctionManager.get_one
|
1,461
|
def _test_impl(self, data, ref):
res = []
while True:
tok = self.lexer.token()
if not tok:
break
res.append(tok.type)
if is_string(ref):
ref = split(ref)
try:
self.assertEqual(res, ref)
except __HOLE__:
e = extract_exception()
cnt = 0
for i, j in zip(res, ref):
if not i == j:
break
cnt += 1
print("Break at index %d" % cnt)
raise e
|
AssertionError
|
dataset/ETHPy150Open cournape/Bento/bento/parser/tests/test_lexer.py/TestLexer._test_impl
|
1,462
|
@register.tag
def featured_projects(parser, token):
try:
tag_name, number = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0])
try:
int(number)
except ValueError:
raise template.TemplateSyntaxError("%r tag's argument should be an integer" % tag_name)
return FeaturedProjectNode(number)
|
ValueError
|
dataset/ETHPy150Open python/raspberryio/raspberryio/project/templatetags/project_tags.py/featured_projects
|
1,463
|
@classmethod
def super_key(cls, supertable, default=None):
"""
Get the name of the key for a super-entity
@param supertable: the super-entity table
"""
if supertable is None and default:
return default
if isinstance(supertable, str):
supertable = cls.table(supertable)
try:
return supertable._id.name
except __HOLE__:
pass
raise SyntaxError("No id-type key found in %s" % supertable._tablename)
# -------------------------------------------------------------------------
|
AttributeError
|
dataset/ETHPy150Open sahana/eden/modules/s3/s3model.py/S3Model.super_key
|
1,464
|
def increment_filename(filename):
dirname = os.path.dirname(filename)
root, ext = os.path.splitext(os.path.basename(filename))
result = None
try:
root_start, root_number_end = rx_numbered.match(root).groups()
except __HOLE__:
pass
else:
result = "%s%s" % (root_start, str(int(root_number_end) + 1))
if not result:
root_start, root_end = root[:-1], root[-1]
if root_end in ("z", "Z"):
root_end = "%sa" % root_end
else:
root_end = chr(ord(root_end) + 1)
result = "%s%s" % (root_start, root_end)
return "%(dirname)s%(sep)s%(root)s%(ext)s" % {
'dirname': dirname,
'sep': dirname and "/" or "",
'root': result,
'ext': ext
}
|
AttributeError
|
dataset/ETHPy150Open Starou/SimpleIDML/src/simple_idml/utils.py/increment_filename
|
1,465
|
def hexascii(self, target_data, byte, offset):
color = "green"
for (fp_i, data_i) in iterator(target_data):
diff_count = 0
for (fp_j, data_j) in iterator(target_data):
if fp_i == fp_j:
continue
try:
if data_i[offset] != data_j[offset]:
diff_count += 1
except __HOLE__ as e:
diff_count += 1
if diff_count == len(target_data)-1:
color = "red"
elif diff_count > 0:
color = "blue"
break
hexbyte = self.colorize("%.2X" % ord(byte), color)
if byte not in string.printable or byte in string.whitespace:
byte = "."
asciibyte = self.colorize(byte, color)
return (hexbyte, asciibyte)
|
IndexError
|
dataset/ETHPy150Open devttys0/binwalk/src/binwalk/modules/hexdiff.py/HexDiff.hexascii
|
1,466
|
@classmethod
def setupClass(cls):
global numpy
global assert_equal
global assert_almost_equal
try:
import numpy
from numpy.testing import assert_equal,assert_almost_equal
except __HOLE__:
raise SkipTest('NumPy not available.')
|
ImportError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/linalg/tests/test_spectrum.py/TestSpectrum.setupClass
|
1,467
|
def get_name(self):
"""Returns a name of this instance"""
try:
docrule_name = self.doccode.get_title()
except (KeyError, __HOLE__):
docrule_name = 'No name given'
pass
return unicode(docrule_name)
|
AttributeError
|
dataset/ETHPy150Open adlibre/Adlibre-DMS/adlibre_dms/apps/dms_plugins/models.py/DoccodePluginMapping.get_name
|
1,468
|
def to_dot(self):
"""Render the dot for the recorded execution."""
try:
import pygraphviz as pgv
except __HOLE__:
warnings.warn('Extra requirements for "trace" are not available.')
return
graph = pgv.AGraph(directed=True, strict=False)
hanging = set()
for node_id, node_name in self.nodes.items():
shape = 'box'
if node_id in self.activities:
shape = 'ellipse'
finish_id = 'finish-%s' % node_id
color, fontcolor = 'black', 'black'
if node_id in self.errors:
color, fontcolor = 'red', 'red'
graph.add_node(node_id, label=node_name, shape=shape, width=0.8,
color=color, fontcolor=fontcolor)
if node_id in self.results or node_id in self.errors:
if node_id in self.errors:
rlabel = str(self.errors[node_id])
else:
rlabel = short_repr.repr(self.results[node_id])
rlabel = ' ' + '\l '.join(rlabel.split('\n')) # Left align
graph.add_node(finish_id, label='', shape='point', width=0.1, color=color)
graph.add_edge(node_id, finish_id, arrowhead='none', penwidth=3, fontsize=8,
color=color, fontcolor=fontcolor, label=' ' + rlabel)
else:
hanging.add(node_id)
levels = ['l%s' % i for i in range(len(self.levels))]
for l in levels:
graph.add_node(l, shape='point', label='', width=0.1, style='invis')
if levels:
start = levels[0]
for l in levels[1:]:
graph.add_edge(start, l, style='invis')
start = l
for l_id, l in zip(levels, self.levels):
if isinstance(l, list):
graph.add_subgraph([l_id] + l, rank='same')
else:
graph.add_subgraph([l_id, 'finish-%s' % l], rank='same')
for from_node, to_nodes in self.deps.items():
if from_node in hanging:
hanging.remove(from_node)
color = 'black'
style = ''
if from_node in self.errors:
color = 'red'
from_node = 'finish-%s' % from_node
elif from_node in self.results:
from_node = 'finish-%s' % from_node
else:
style = 'dotted'
for to_node in to_nodes:
graph.add_edge(from_node, to_node, color=color, style=style)
if hanging:
for node_id in hanging:
finish_id = 'finish-%s' % node_id
graph.add_node(finish_id, label='', shape='point', width=0.1, style='invis')
graph.add_edge(node_id, finish_id, style='dotted', arrowhead='none')
# l_id is the last level here
graph.add_subgraph([l_id] + ['finish-%s' % h for h in hanging], rank='same')
for node_id in self.nodes:
retries = self.timeouts[node_id]
if retries:
graph.add_edge(node_id, node_id, label=' %s' % retries, color='orange',
fontcolor='orange', fontsize=8)
return graph
|
ImportError
|
dataset/ETHPy150Open severb/flowy/flowy/tracer.py/ExecutionTracer.to_dot
|
1,469
|
def prompt_user(query, results):
try:
response = None
short_respones = 0
while not response:
response = input("{}\nYour Response: ".format(query))
if not response or len(response) < 5:
short_respones += 1
# Do not ask more than twice to avoid annoying the user
if short_respones > 2:
break
print("Please enter at least a sentence.")
results['prompts'][query] = response
return response
except __HOLE__:
# Hack for windows:
results['prompts'][query] = 'KeyboardInterrupt'
try:
print("Exiting Hint") # Second I/O will get KeyboardInterrupt
return ''
except KeyboardInterrupt:
return ''
|
KeyboardInterrupt
|
dataset/ETHPy150Open Cal-CS-61A-Staff/ok-client/client/protocols/hinting.py/prompt_user
|
1,470
|
def get_embed(url, max_width=None, finder=None):
# Check database
try:
return Embed.objects.get(url=url, max_width=max_width)
except Embed.DoesNotExist:
pass
# Get/Call finder
if not finder:
finder = get_default_finder()
embed_dict = finder(url, max_width)
# Make sure width and height are valid integers before inserting into database
try:
embed_dict['width'] = int(embed_dict['width'])
except (__HOLE__, ValueError):
embed_dict['width'] = None
try:
embed_dict['height'] = int(embed_dict['height'])
except (TypeError, ValueError):
embed_dict['height'] = None
# Make sure html field is valid
if 'html' not in embed_dict or not embed_dict['html']:
embed_dict['html'] = ''
# Create database record
embed, created = Embed.objects.get_or_create(
url=url,
max_width=max_width,
defaults=embed_dict,
)
# Save
embed.last_updated = datetime.now()
embed.save()
return embed
|
TypeError
|
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailembeds/embeds.py/get_embed
|
1,471
|
def testLogic(self):
x = 0 or None
self.assertEqual(x, None, "0 or None should be None not %s" % repr(x) )
x = None and None
self.assertEqual(x, None, "None or None should be None not %s" % repr(x) )
x = False or None
self.assertEqual(x, None, "False or None should be None not %s" % repr(x) )
self.assertTrue((1 or 2) is 1, "(1 or 2) is 1")
self.assertTrue((0 or 2) is 2, "(0 or 2) is 2")
self.assertTrue((False or 0) is 0, "(False or 0) is 0")
self.assertTrue((0 or False) is False, "(0 or False) is False")
self.assertTrue((0 and 2) is 0, "(0 and 2) is 0")
self.assertTrue((1 and 2) is 2, "(1 and 2) is 2")
self.assertTrue((2 and 1) is 1, "(2 and 1) is 1")
self.assertTrue(([] and 2) == [], "([] and 2) == []")
try:
self.assertTrue(({} and 2) == {}, "({} and 2) == {}")
except:
self.fail("Unexpected error on '({} and 2) == {}'")
try:
self.assertTrue((0 or False or {} or []) == [], "((0 or False or {} or []) == []")
except:
self.fail("Unexpected error on '(0 or False or {} or []) == []'")
f = None
try:
self.assertTrue((f and f.test()) == None, "(f and f.test()) == None")
except:
self.fail("Unexpected error on '(f and f.test()) == None'")
self.assertTrue(bool(None) is False, "bool(None) is False")
self.assertTrue(bool(False) is False, "bool(False) is False")
self.assertTrue(bool(0) is False, "bool(0) is False")
self.assertTrue(bool(0.0) is False, "bool(0.0) is False")
self.assertTrue(bool('') is False, "bool('') is False")
self.assertTrue(bool([]) is False, "bool('') is False")
self.assertTrue(bool({}) is False, "bool('') is False")
self.assertTrue(not None is True, "not None is True")
self.assertTrue(not False is True, "not False is True")
self.assertTrue(not 0 is True, "not 0 is True")
self.assertTrue(not 0.0 is True, "not 0.0 is True")
self.assertTrue(not '' is True, "not '' is True")
self.assertTrue(not [] is True, "not '' is True")
self.assertTrue(not {} is True, "not '' is True")
self.assertTrue(bool(1) is True, "bool('') is True")
self.assertTrue(bool(1.0) is True, "bool('') is True")
self.assertTrue(bool('a') is True, "bool('') is True")
self.assertTrue(bool([1]) is True, "bool('') is True")
self.assertTrue(bool({'a':1}) is True, "bool('') is True")
self.assertTrue(not 1 is False, "not '' is False")
self.assertTrue(not 1.0 is False, "not '' is False")
self.assertTrue(not 'a' is False, "not '' is False")
self.assertTrue(not [1] is False, "not '' is False")
self.assertTrue(not {'a':1} is False, "not '' is False")
d = {'hello': 5}
d2 = d or {}
try:
tst = d == d2
self.assertTrue(tst, "#297 -non-empty object or {} should return the object")
except __HOLE__:
self.fail("#297 TypeError should not have been thrown")
d = {}
d2 = d or 5
try:
tst = d2 == 5
self.assertTrue(tst, "#297 'empty object or 5' should return 5")
except TypeError:
self.fail("#297 TypeError should not have been thrown")
|
TypeError
|
dataset/ETHPy150Open anandology/pyjamas/examples/libtest/BoolTest.py/BoolTest.testLogic
|
1,472
|
def createsuperuser(username=None, email=None, password=None):
"""
Helper function for creating a superuser from the command line. All
arguments are optional and will be prompted-for if invalid or not given.
"""
try:
import pwd
except ImportError:
default_username = ''
else:
# Determine the current system user's username, to use as a default.
default_username = pwd.getpwuid(os.getuid())[0].replace(' ', '').lower()
# Determine whether the default username is taken, so we don't display
# it as an option.
if default_username:
try:
User.objects.get(username=default_username)
except User.DoesNotExist:
pass
else:
default_username = ''
try:
while 1:
if not username:
input_msg = 'Username'
if default_username:
input_msg += ' (Leave blank to use %r)' % default_username
username = raw_input(input_msg + ': ')
if default_username and username == '':
username = default_username
if not username.isalnum():
sys.stderr.write("Error: That username is invalid. Use only letters, digits and underscores.\n")
username = None
continue
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
else:
sys.stderr.write("Error: That username is already taken.\n")
username = None
while 1:
if not email:
email = raw_input('E-mail address: ')
try:
validators.isValidEmail(email, None)
except validators.ValidationError:
sys.stderr.write("Error: That e-mail address is invalid.\n")
email = None
else:
break
while 1:
if not password:
password = getpass.getpass()
password2 = getpass.getpass('Password (again): ')
if password != password2:
sys.stderr.write("Error: Your passwords didn't match.\n")
password = None
continue
if password.strip() == '':
sys.stderr.write("Error: Blank passwords aren't allowed.\n")
password = None
continue
break
except __HOLE__:
sys.stderr.write("\nOperation cancelled.\n")
sys.exit(1)
u = User.objects.create_user(username, email, password)
u.is_staff = True
u.is_active = True
u.is_superuser = True
u.save()
print "Superuser created successfully."
|
KeyboardInterrupt
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/contrib/auth/create_superuser.py/createsuperuser
|
1,473
|
def process_request(self, request):
locale, path = utils.strip_path(request.path_info)
if localeurl_settings.USE_SESSION and not locale:
slocale = request.session.get('django_language')
if slocale and utils.supported_language(slocale):
locale = slocale
if localeurl_settings.USE_ACCEPT_LANGUAGE and not locale:
accept_lang_header = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
header_langs = parse_accept_lang_header(accept_lang_header)
accept_langs = [
l for l in
(utils.supported_language(lang[0]) for lang in header_langs)
if l
]
if accept_langs:
locale = accept_langs[0]
locale_path = utils.locale_path(path, locale)
# locale case might be different in the two paths, that doesn't require
# a redirect (besides locale they'll be identical anyway)
if locale_path.lower() != request.path_info.lower():
locale_url = utils.add_script_prefix(locale_path)
qs = request.META.get("QUERY_STRING", "")
if qs:
# Force this to remain a byte-string by encoding locale_path
# first to avoid Unicode tainting - downstream will need to
# handle the job of handling in-the-wild character encodings:
locale_url = "%s?%s" % (locale_path.encode("utf-8"), qs)
redirect_class = HttpResponsePermanentRedirect
if not localeurl_settings.LOCALE_REDIRECT_PERMANENT:
redirect_class = HttpResponseRedirect
# @@@ iri_to_uri for Django 1.0; 1.1+ do it in HttpResp...Redirect
return redirect_class(iri_to_uri(locale_url))
request.path_info = path
if not locale:
try:
locale = request.LANGUAGE_CODE
except __HOLE__:
locale = settings.LANGUAGE_CODE
translation.activate(locale)
request.LANGUAGE_CODE = translation.get_language()
|
AttributeError
|
dataset/ETHPy150Open carljm/django-localeurl/localeurl/middleware.py/LocaleURLMiddleware.process_request
|
1,474
|
def main():
# bring our logging stuff up as early as possible
debug = ('-d' in sys.argv or '--debug' in sys.argv)
_init_logger(debug)
extension_mgr = _init_extensions()
baseline_formatters = [f.name for f in filter(lambda x:
hasattr(x.plugin,
'_accepts_baseline'),
extension_mgr.formatters)]
# now do normal startup
parser = argparse.ArgumentParser(
description='Bandit - a Python source code security analyzer',
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
'targets', metavar='targets', type=str, nargs='+',
help='source file(s) or directory(s) to be tested'
)
parser.add_argument(
'-r', '--recursive', dest='recursive',
action='store_true', help='find and process files in subdirectories'
)
parser.add_argument(
'-a', '--aggregate', dest='agg_type',
action='store', default='file', type=str,
choices=['file', 'vuln'],
help='aggregate output by vulnerability (default) or by filename'
)
parser.add_argument(
'-n', '--number', dest='context_lines',
action='store', default=3, type=int,
help='maximum number of code lines to output for each issue'
)
parser.add_argument(
'-c', '--configfile', dest='config_file',
action='store', default=None, type=str,
help='optional config file to use for selecting plugins and '
'overriding defaults'
)
parser.add_argument(
'-p', '--profile', dest='profile',
action='store', default=None, type=str,
help='profile to use (defaults to executing all tests)'
)
parser.add_argument(
'-t', '--tests', dest='tests',
action='store', default=None, type=str,
help='comma-separated list of test IDs to run'
)
parser.add_argument(
'-s', '--skip', dest='skips',
action='store', default=None, type=str,
help='comma-separated list of test IDs to skip'
)
parser.add_argument(
'-l', '--level', dest='severity', action='count',
default=1, help='report only issues of a given severity level or '
'higher (-l for LOW, -ll for MEDIUM, -lll for HIGH)'
)
parser.add_argument(
'-i', '--confidence', dest='confidence', action='count',
default=1, help='report only issues of a given confidence level or '
'higher (-i for LOW, -ii for MEDIUM, -iii for HIGH)'
)
output_format = 'screen' if sys.stdout.isatty() else 'txt'
parser.add_argument(
'-f', '--format', dest='output_format', action='store',
default=output_format, help='specify output format',
choices=sorted(extension_mgr.formatter_names)
)
parser.add_argument(
'-o', '--output', dest='output_file', action='store',
default=None, help='write report to filename'
)
parser.add_argument(
'-v', '--verbose', dest='verbose', action='store_true',
help='output extra information like excluded and included files'
)
parser.add_argument(
'-d', '--debug', dest='debug', action='store_true',
help='turn on debug mode'
)
parser.add_argument(
'--ignore-nosec', dest='ignore_nosec', action='store_true',
help='do not skip lines with # nosec comments'
)
parser.add_argument(
'-x', '--exclude', dest='excluded_paths', action='store',
default='', help='comma-separated list of paths to exclude from scan '
'(note that these are in addition to the excluded '
'paths provided in the config file)'
)
parser.add_argument(
'-b', '--baseline', dest='baseline', action='store',
default=None, help='path of a baseline report to compare against '
'(only JSON-formatted files are accepted)'
)
parser.add_argument(
'--ini', dest='ini_path', action='store', default=None,
help='path to a .bandit file that supplies command line arguments'
)
parser.add_argument(
'--version', action='version',
version='%(prog)s {version}'.format(version=bandit.__version__)
)
parser.set_defaults(debug=False)
parser.set_defaults(verbose=False)
parser.set_defaults(ignore_nosec=False)
plugin_info = ["%s\t%s" % (a[0], a[1].name) for a in
six.iteritems(extension_mgr.plugins_by_id)]
blacklist_info = []
for a in six.iteritems(extension_mgr.blacklist):
for b in a[1]:
blacklist_info.append('%s\t%s' % (b['id'], b['name']))
plugin_list = '\n\t'.join(sorted(set(plugin_info + blacklist_info)))
parser.epilog = ('The following tests were discovered and'
' loaded:\n\t{0}\n'.format(plugin_list))
# setup work - parse arguments, and initialize BanditManager
args = parser.parse_args()
try:
b_conf = b_config.BanditConfig(config_file=args.config_file)
except utils.ConfigError as e:
logger.error(e)
sys.exit(2)
# Handle .bandit files in projects to pass cmdline args from file
ini_options = _get_options_from_ini(args.ini_path, args.targets)
if ini_options:
# prefer command line, then ini file
args.excluded_paths = _log_option_source(args.excluded_paths,
ini_options.get('exclude'),
'excluded paths')
args.skips = _log_option_source(args.skips, ini_options.get('skips'),
'skipped tests')
args.tests = _log_option_source(args.tests, ini_options.get('tests'),
'selected tests')
# TODO(tmcpeak): any other useful options to pass from .bandit?
# if the log format string was set in the options, reinitialize
if b_conf.get_option('log_format'):
log_format = b_conf.get_option('log_format')
_init_logger(debug, log_format=log_format)
try:
profile = _get_profile(b_conf, args.profile, args.config_file)
_log_info(args, profile)
profile['include'].update(args.tests.split(',') if args.tests else [])
profile['exclude'].update(args.skips.split(',') if args.skips else [])
extension_mgr.validate_profile(profile)
except (utils.ProfileNotFound, __HOLE__) as e:
logger.error(e)
sys.exit(2)
b_mgr = b_manager.BanditManager(b_conf, args.agg_type, args.debug,
profile=profile, verbose=args.verbose,
ignore_nosec=args.ignore_nosec)
if args.baseline is not None:
try:
with open(args.baseline) as bl:
data = bl.read()
b_mgr.populate_baseline(data)
except IOError:
logger.warning("Could not open baseline report: %s", args.baseline)
sys.exit(2)
if args.output_format not in baseline_formatters:
logger.warning('Baseline must be used with one of the following '
'formats: ' + str(baseline_formatters))
sys.exit(2)
if args.output_format != "json":
if args.config_file:
logger.info("using config: %s", args.config_file)
logger.info("running on Python %d.%d.%d", sys.version_info.major,
sys.version_info.minor, sys.version_info.micro)
# initiate file discovery step within Bandit Manager
b_mgr.discover_files(args.targets, args.recursive, args.excluded_paths)
if not b_mgr.b_ts.tests:
logger.error('No tests would be run, please check the profile.')
sys.exit(2)
# initiate execution of tests within Bandit Manager
b_mgr.run_tests()
logger.debug(b_mgr.b_ma)
logger.debug(b_mgr.metrics)
# trigger output of results by Bandit Manager
sev_level = constants.RANKING[args.severity - 1]
conf_level = constants.RANKING[args.confidence - 1]
b_mgr.output_results(args.context_lines,
sev_level,
conf_level,
args.output_file,
args.output_format)
# return an exit code of 1 if there are results, 0 otherwise
if b_mgr.results_count(sev_filter=sev_level, conf_filter=conf_level) > 0:
sys.exit(1)
else:
sys.exit(0)
|
ValueError
|
dataset/ETHPy150Open openstack/bandit/bandit/cli/main.py/main
|
1,475
|
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except __HOLE__:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib2.URLError("blah")
assert False
|
ValueError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_urllib2.py/MockHandler.handle
|
1,476
|
def test_file(self):
import rfc822, socket
h = urllib2.FileHandler()
o = h.parent = MockOpener()
TESTFN = test_support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = "hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = rfc822.formatdate(stats.st_mtime)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib2.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib2.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", True),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", True),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib2.URLError, __HOLE__):
self.assertTrue(not ftp)
else:
self.assertTrue(o.req is req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type == "ftp", ftp)
|
OSError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_urllib2.py/HandlerTests.test_file
|
1,477
|
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.add_header("Nonsense", "viking=withhold")
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib2.HTTPError:
# 307 in response to POST requires user OK
self.assertEqual(code, 307)
self.assertIsNotNone(data)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except __HOLE__:
self.assertTrue(not o.req.has_data())
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertNotIn("content-length", headers)
self.assertNotIn("content-type", headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assertNotIn("Spam", o.req.headers)
self.assertNotIn("Spam", o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib2.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib2.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib2.HTTPError:
self.assertEqual(count,
urllib2.HTTPRedirectHandler.max_redirections)
|
AttributeError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_urllib2.py/HandlerTests.test_redirect
|
1,478
|
def test_HTTPError_interface_call(self):
"""
Issue 15701= - HTTPError interface has info method available from URLError.
"""
err = urllib2.HTTPError(msg='something bad happened', url=None,
code=None, hdrs='Content-Length:42', fp=None)
self.assertTrue(hasattr(err, 'reason'))
assert hasattr(err, 'reason')
assert hasattr(err, 'info')
assert callable(err.info)
try:
err.info()
except __HOLE__:
self.fail("err.info() failed")
self.assertEqual(err.info(), "Content-Length:42")
|
AttributeError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_urllib2.py/RequestTests.test_HTTPError_interface_call
|
1,479
|
def __init__(self, addr, conf, log, fd=None):
if fd is None:
try:
st = os.stat(addr)
except __HOLE__ as e:
if e.args[0] != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(addr)
else:
raise ValueError("%r is not a socket" % addr)
super(UnixSocket, self).__init__(addr, conf, log, fd=fd)
|
OSError
|
dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/gunicorn/sock.py/UnixSocket.__init__
|
1,480
|
def process_request(self, request):
try:
auth = request.GET.get('auth')
except __HOLE__:
# Django can throw an IOError when trying to read the GET
# data.
return
if auth is None or (request.user and request.user.is_authenticated()):
return
user = authenticate(auth=auth)
if user and user.is_active:
login(request, user)
msg = _lazy(u'You have been automatically logged in.')
messages.success(request, msg)
|
IOError
|
dataset/ETHPy150Open mozilla/kitsune/kitsune/users/middleware.py/TokenLoginMiddleware.process_request
|
1,481
|
def chunk(self):
if not self.iter:
return b""
try:
return six.next(self.iter)
except __HOLE__:
self.iter = None
return b""
|
StopIteration
|
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/gunicorn/http/unreader.py/IterUnreader.chunk
|
1,482
|
def __getitem__(self, key):
# This is a workaround to make TaskResource non-iterable
# over simple index-based iteration
try:
int(key)
raise StopIteration
except __HOLE__:
pass
if key not in self._data:
self._data[key] = self._deserialize(key, None)
return self._data.get(key)
|
ValueError
|
dataset/ETHPy150Open robgolding63/tasklib/tasklib/task.py/TaskResource.__getitem__
|
1,483
|
def __bool__(self):
if self._result_cache is not None:
return bool(self._result_cache)
try:
next(iter(self))
except __HOLE__:
return False
return True
|
StopIteration
|
dataset/ETHPy150Open robgolding63/tasklib/tasklib/task.py/TaskQuerySet.__bool__
|
1,484
|
def test_options(self):
finish = rdbms_conf.DATABASES['sqlitee'].OPTIONS.set_for_testing({'nonsensical': None})
try:
self.client.get(reverse('rdbms:api_tables', args=['sqlitee', self.database]))
except __HOLE__, e:
assert_true('nonsensical' in str(e), e)
finish()
|
TypeError
|
dataset/ETHPy150Open cloudera/hue/apps/rdbms/src/rdbms/tests.py/TestAPI.test_options
|
1,485
|
def _class_count(objects):
"""List the most common object classes"""
totals = {}
for obj in objects:
try:
cls = obj.__class__
except __HOLE__:
cls = type(obj)
name = "%s.%s" % (cls.__module__, cls.__name__)
try:
totals[name].append(obj)
except KeyError:
totals[name] = [obj]
totals = totals.items()
totals.sort(lambda a,b: cmp(len(a[1]),len(b[1])))
totals = totals[-20:] # Is this a reasonable filter?
return totals
|
AttributeError
|
dataset/ETHPy150Open marineam/nagcat/python/nagcat/monitor_api.py/_class_count
|
1,486
|
def main():
while True:
task = socket.recv_json()
# Convert the JSON dict we got into a Task object.
try:
task = to_task(task)
except __HOLE__ as e:
# sisyphus is only exposed to internal components within Galah.
# This is a very serious error signifying a problem with Galah's
# logic.
logger.exception("Error converting request to Task object.")
socket.send_json({
"success": False,
"error_string": str(e)
})
continue
logger.info("Received request for task %s.", task.name)
# Check if this is a recognized command.
if task.name not in task_list.keys():
# Same as above, this is a serious error signifying a problem with
# Galah's logic.
logger.error("Unknown task '%s'.", task.name)
socket.send_json({
"success": False,
"error_string": "Unknown task '%s'" % task.name
})
continue
# All is good, place the task in the queue
task_queue.put(task)
socket.send_json({"success": True})
|
RuntimeError
|
dataset/ETHPy150Open ucrcsedept/galah/galah/sisyphus/sisyphus.py/main
|
1,487
|
def check_proxy_setting():
"""
If the environmental variable 'HTTP_PROXY' is set, it will most likely be
in one of these forms:
proxyhost:8080
http://proxyhost:8080
urlllib2 requires the proxy URL to start with 'http://'
This routine does that, and returns the transport for xmlrpc.
"""
try:
http_proxy = os.environ['HTTP_PROXY']
except __HOLE__:
return
if not http_proxy.startswith('http://'):
match = re.match('(http://)?([-_\.A-Za-z]+):(\d+)', http_proxy)
#if not match:
# raise Exception('Proxy format not recognised: [%s]' % http_proxy)
os.environ['HTTP_PROXY'] = 'http://%s:%s' % (match.group(2),
match.group(3))
return
|
KeyError
|
dataset/ETHPy150Open cakebread/yolk/yolk/pypi.py/check_proxy_setting
|
1,488
|
def get_xmlrpc_server(self):
"""
Returns PyPI's XML-RPC server instance
"""
check_proxy_setting()
if os.environ.has_key('XMLRPC_DEBUG'):
debug = 1
else:
debug = 0
try:
return xmlrpclib.Server(XML_RPC_SERVER, transport=ProxyTransport(), verbose=debug)
except __HOLE__:
self.logger("ERROR: Can't connect to XML-RPC server: %s" \
% XML_RPC_SERVER)
|
IOError
|
dataset/ETHPy150Open cakebread/yolk/yolk/pypi.py/CheeseShop.get_xmlrpc_server
|
1,489
|
def rfc3339(date, utc=False, use_system_timezone=True):
'''
Return a string formatted according to the :RFC:`3339`. If called with
`utc=True`, it normalizes `date` to the UTC date. If `date` does not have
any timezone information, uses the local timezone::
>>> d = datetime.datetime(2008, 4, 2, 20)
>>> rfc3339(d, utc=True, use_system_timezone=False)
'2008-04-02T20:00:00Z'
>>> rfc3339(d) # doctest: +ELLIPSIS
'2008-04-02T20:00:00...'
If called with `user_system_time=False` don't use the local timezone if
`date` does not have timezone informations and consider the offset to UTC
to be zero::
>>> rfc3339(d, use_system_timezone=False)
'2008-04-02T20:00:00+00:00'
`date` must be a `datetime.datetime`, `datetime.date` or a timestamp as
returned by `time.time()`::
>>> rfc3339(0, utc=True, use_system_timezone=False)
'1970-01-01T00:00:00Z'
>>> rfc3339(datetime.date(2008, 9, 6), utc=True,
... use_system_timezone=False)
'2008-09-06T00:00:00Z'
>>> rfc3339(datetime.date(2008, 9, 6),
... use_system_timezone=False)
'2008-09-06T00:00:00+00:00'
>>> rfc3339('foo bar')
Traceback (most recent call last):
...
TypeError: excepted datetime, got str instead
'''
# Check if `date` is a timestamp.
try:
if utc:
return _utc_string(datetime.datetime.utcfromtimestamp(date))
else:
date = datetime.datetime.fromtimestamp(date)
except __HOLE__:
pass
if isinstance(date, datetime.date):
utcoffset = _utc_offset(date, use_system_timezone)
if utc:
if not isinstance(date, datetime.datetime):
date = datetime.datetime(*date.timetuple()[:3])
return _utc_string(date + datetime.timedelta(seconds=utcoffset))
else:
return date.strftime('%Y-%m-%dT%H:%M:%S') + _timezone(utcoffset)
else:
raise TypeError('excepted %s, got %s instead' %
(datetime.datetime.__name__, date.__class__.__name__))
|
TypeError
|
dataset/ETHPy150Open eugenkiss/Simblin/simblin/lib/rfc3339.py/rfc3339
|
1,490
|
def run_mercurial_command(hg_command):
hg_executable = os.environ.get("HG", "hg")
hg_command_tuple = hg_command.split()
hg_command_tuple.insert(0, hg_executable)
# If you install your own mercurial version in your home
# hg_executable does not always have execution permission.
if not os.access(hg_executable, os.X_OK):
hg_command_tuple.insert(0, sys.executable)
try:
hg_subprocess = Popen(hg_command_tuple, stdout=PIPE, stderr=PIPE)
except __HOLE__ as e:
print("Can't find the hg executable!", file=sys.stderr)
print(e)
sys.exit(1)
hg_out, hg_err = hg_subprocess.communicate()
if len(hg_err) > 0:
raise MercurialRuntimeError(hg_err)
return hg_out
|
OSError
|
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/misc/hooks/check_whitespace.py/run_mercurial_command
|
1,491
|
def delete(self):
dir = os.path.dirname(os.path.join(settings.MEDIA_ROOT, self.fileobject.name))
try: self.fileobject.delete()
except __HOLE__: pass
try: os.rmdir(dir)
except OSError: pass
return Model.delete(self)
|
OSError
|
dataset/ETHPy150Open peterkuma/fileshackproject/fileshack/models.py/Item.delete
|
1,492
|
def name(self):
try:
return os.path.basename(self.fileobject.name)
except (OSError,__HOLE__):
return None
|
ValueError
|
dataset/ETHPy150Open peterkuma/fileshackproject/fileshack/models.py/Item.name
|
1,493
|
def as_scalar(x, name=None):
from ..tensor import TensorType, scalar_from_tensor
if isinstance(x, gof.Apply):
if len(x.outputs) != 1:
raise ValueError("It is ambiguous which output of a multi-output"
" Op has to be fetched.", x)
else:
x = x.outputs[0]
if isinstance(x, Variable):
if isinstance(x.type, Scalar):
return x
elif isinstance(x.type, TensorType) and x.ndim == 0:
return scalar_from_tensor(x)
else:
raise TypeError("Variable type field must be a Scalar.", x, x.type)
try:
return constant(x)
except __HOLE__:
raise TypeError("Cannot convert %s to Scalar" % x, type(x))
|
TypeError
|
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/scalar/basic.py/as_scalar
|
1,494
|
def dtype_specs(self):
try:
# To help debug dtype/typenum problem, here is code to get
# the list of numpy typenum. This list change between 32
# and 64 bit platform and probably also also between
# Windows and Linux.
# NOTE: equivalent type on a platform can have different typenum.
# This is the source of all dtype/typenum problem found up to
# now, as Theano always expect the exact typenum that
# correspond to our supported dtype.
"""
for dtype in ['int8', 'uint8', 'short', 'ushort', 'intc', 'uintc',
'longlong', 'ulonglong', 'single', 'double',
'longdouble', 'csingle', 'cdouble', 'clongdouble',
'float32', 'float64', 'int8', 'int16', 'int32',
'int64', 'uint8', 'uint16', 'uint32', 'uint64',
'complex64', 'complex128', 'float', 'double',
'int', 'uint']:
print(dtype, np.zeros(1, dtype=dtype).dtype.num)
"""
return { # dtype: (py_type, c_type, cls_name)
'float16': (numpy.float16, 'npy_float16', 'Float16'),
'float32': (numpy.float32, 'npy_float32', 'Float32'),
'float64': (numpy.float64, 'npy_float64', 'Float64'),
'complex128': (numpy.complex128, 'theano_complex128',
'Complex128'),
'complex64': (numpy.complex64, 'theano_complex64', 'Complex64'),
'uint8': (numpy.uint8, 'npy_uint8', 'UInt8'),
'int8': (numpy.int8, 'npy_int8', 'Int8'),
'uint16': (numpy.uint16, 'npy_uint16', 'UInt16'),
'int16': (numpy.int16, 'npy_int16', 'Int16'),
'uint32': (numpy.uint32, 'npy_uint32', 'UInt32'),
'int32': (numpy.int32, 'npy_int32', 'Int32'),
'uint64': (numpy.uint64, 'npy_uint64', 'UInt64'),
'int64': (numpy.int64, 'npy_int64', 'Int64')
}[self.dtype]
except __HOLE__:
raise TypeError("Unsupported dtype for %s: %s" % (
self.__class__.__name__, self.dtype))
|
KeyError
|
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/scalar/basic.py/Scalar.dtype_specs
|
1,495
|
def init_name(self):
"""
Return a readable string representation of self.fgraph.
"""
try:
rval = self.name
except __HOLE__:
if 0:
l = []
for n in self.fgraph.toposort():
if hasattr(n.op, "name") and n.op.name is not None:
v = n.op.name
if v.startswith("Composite"):
v = v[len("Composite"):]
else:
v = n.op.__class__.__name__
l.append(v)
rval = "Composite{" + ",".join(l) + "}"
else:
for i, r in enumerate(self.fgraph.inputs):
r.name = 'i%i' % i
for i, r in enumerate(self.fgraph.outputs):
r.name = 'o%i' % i
io = set(self.fgraph.inputs + self.fgraph.outputs)
for i, r in enumerate(self.fgraph.variables):
if r not in io and len(r.clients) > 1:
r.name = 't%i' % i
rval = "Composite{%s}" % ', '.join([pprint(output) for output
in self.fgraph.outputs])
self.name = rval
|
AttributeError
|
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/scalar/basic.py/Composite.init_name
|
1,496
|
def __getitem__(self, idx):
"""Return :class:`Mat` block with row and column given by ``idx``
or a given row of blocks."""
try:
i, j = idx
return self.blocks[i][j]
except __HOLE__:
return self.blocks[idx]
|
TypeError
|
dataset/ETHPy150Open OP2/PyOP2/pyop2/petsc_base.py/Mat.__getitem__
|
1,497
|
@collective
def _solve(self, A, x, b):
self._set_parameters()
# Set up the operator only if it has changed
if not self.getOperators()[0] == A.handle:
self.setOperators(A.handle)
if self.parameters['pc_type'] == 'fieldsplit' and A.sparsity.shape != (1, 1):
ises = A.sparsity.toset.field_ises
fises = [(str(i), iset) for i, iset in enumerate(ises)]
self.getPC().setFieldSplitIS(*fises)
if self.parameters['plot_convergence']:
self.reshist = []
def monitor(ksp, its, norm):
self.reshist.append(norm)
debug("%3d KSP Residual norm %14.12e" % (its, norm))
self.setMonitor(monitor)
# Not using super here since the MRO would call base.Solver.solve
with timed_region("PETSc Krylov solver"):
with b.vec_ro as bv:
with x.vec as xv:
PETSc.KSP.solve(self, bv, xv)
if self.parameters['plot_convergence']:
self.cancelMonitor()
try:
import pylab
pylab.semilogy(self.reshist)
pylab.title('Convergence history')
pylab.xlabel('Iteration')
pylab.ylabel('Residual norm')
pylab.savefig('%sreshist_%04d.png' %
(self.parameters['plot_prefix'], self._count))
except __HOLE__:
warning("pylab not available, not plotting convergence history.")
r = self.getConvergedReason()
debug("Converged reason: %s" % self._reasons[r])
debug("Iterations: %s" % self.getIterationNumber())
debug("Residual norm: %s" % self.getResidualNorm())
if r < 0:
msg = "KSP Solver failed to converge in %d iterations: %s (Residual norm: %e)" \
% (self.getIterationNumber(), self._reasons[r], self.getResidualNorm())
if self.parameters['error_on_nonconvergence']:
raise RuntimeError(msg)
else:
warning(msg)
|
ImportError
|
dataset/ETHPy150Open OP2/PyOP2/pyop2/petsc_base.py/Solver._solve
|
1,498
|
def string_to_datetime(datetimestring, date_formats=None):
date_formats = date_formats or [
'%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S.%fZ',
'%Y-%m-%dT%H:%M:%S.%f', "%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S"]
for dateformat in date_formats:
try:
return datetime.datetime.strptime(datetimestring, dateformat)
except __HOLE__:
continue
else:
raise
|
ValueError
|
dataset/ETHPy150Open openstack/python-barbicanclient/functionaltests/utils.py/string_to_datetime
|
1,499
|
def isImageLibAvailable():
try:
from ConvertPackage.ConvertFile import convertFile
return True
except __HOLE__:
return False
|
ImportError
|
dataset/ETHPy150Open jiaweihli/manga_downloader/src/util.py/isImageLibAvailable
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.