Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
7,800
|
def backup(self, target, skipDirs = False):
target = self._normpath(target)
try:
sb = os.lstat(target)
except __HOLE__:
sb = None
if sb:
path = os.path.dirname(target)
name = os.path.basename(target)
tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
os.close(tmpfd)
os.unlink(tmpname)
if not stat.S_ISDIR(sb.st_mode):
self._backup(target, tmpname, sb)
os.link(target, tmpname)
elif not skipDirs:
self._backdir(target, sb)
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary/local/journal.py/JobJournal.backup
|
7,801
|
def commit(self):
for kind, entry in self:
if kind == JOURNAL_ENTRY_BACKUP:
os.unlink(self.root + entry.new())
elif kind == JOURNAL_ENTRY_TRYCLEANUPDIR:
# XXX would be nice to avoid the try/except here with some C
try:
os.rmdir(self.root + entry.new())
except __HOLE__:
pass
self.close()
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary/local/journal.py/JobJournal.commit
|
7,802
|
def revert(self):
for kind, entry in self:
try:
if kind == JOURNAL_ENTRY_BACKUP:
what = "restore"
path = self.root + entry.old()
os.rename(self.root + entry.new(), path)
os.chown(path, entry.inode.uid(), entry.inode.gid())
os.chmod(path, entry.inode.perms())
os.utime(path, (entry.inode.mtime(), entry.inode.mtime()))
elif kind == JOURNAL_ENTRY_RENAME:
what = "restore"
os.rename(self.root + entry.new(), self.root + entry.old())
elif kind == JOURNAL_ENTRY_CREATE:
what = "remove"
os.unlink(self.root + entry.new())
elif kind == JOURNAL_ENTRY_MKDIR:
what = "remove"
os.rmdir(self.root + entry.new())
elif kind == JOURNAL_ENTRY_REMOVE:
pass
elif kind == JOURNAL_ENTRY_BACKDIR:
pass
elif kind == JOURNAL_ENTRY_TRYCLEANUPDIR:
pass
else:
self.callback.warning('unknown journal entry %d', kind)
except __HOLE__, e:
self.callback.warning('could not %s file %s: %s',
what, self.root + entry.new(), e.strerror)
self.close()
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary/local/journal.py/JobJournal.revert
|
7,803
|
def constantStrEqual(str1, str2):
'''Do a constant-time comparison of str1 and str2, returning **True**
if they are equal, **False** otherwise.
:param str str1: first string to compare
:param str str2: second string to compare
:returns: **bool** **True** if str1 == str2, **False** otherwise
'''
try:
from hmac import compare_digest
return compare_digest(str1, str2)
except __HOLE__:
pass
if len(str1) != len(str2):
# we've already failed at this point, but loop anyway
res = 1
comp1 = bytearray(str2)
comp2 = bytearray(str2)
else:
res = 0
comp1 = bytearray(str1)
comp2 = bytearray(str2)
for a, b in izip(comp1, comp2):
res |= a ^ b
return res == 0
|
ImportError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/crypto/util.py/constantStrEqual
|
7,804
|
def validCertTime(cert):
'''Verify that TLS certificate *cert*'s time is not earlier than
cert.notBefore and not later than cert.notAfter.
:param OpenSSL.crypto.X509 cert: TLS Certificate to verify times of
:returns: **bool** **True** if cert.notBefore < now < cert.notAfter,
**False** otherwise
'''
now = datetime.now()
try:
validAfter = datetime.strptime(cert.get_notBefore(), '%Y%m%d%H%M%SZ')
validUntil = datetime.strptime(cert.get_notAfter(), '%Y%m%d%H%M%SZ')
return validAfter < now < validUntil
except __HOLE__:
return False
|
ValueError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/crypto/util.py/validCertTime
|
7,805
|
def MapIdentifiers(seqs, pattern):
rx = re.compile(pattern)
for k, s in seqs.items():
try:
nk = rx.search(k).groups()[0]
except __HOLE__:
raise ValueError(
"identifier can not be parsed from '%s' "
"pattern='%s'" % (k, pattern))
del seqs[k]
seqs[nk] = s
|
AttributeError
|
dataset/ETHPy150Open CGATOxford/cgat/scripts/diff_fasta.py/MapIdentifiers
|
7,806
|
def main(argv=None):
if argv is None:
argv = sys.argv
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option(
"-s", "--correct-gap-shift", dest="correct_shift",
action="store_true",
help="correct gap length shifts in alignments. "
"Requires alignlib_lite.py [%default]")
parser.add_option(
"-1", "--pattern1", dest="pattern1", type="string",
help="pattern to extract identifier from in identifiers1. "
"[%default]")
parser.add_option(
"-2", "--pattern2", dest="pattern2", type="string",
help="pattern to extract identifier from in identifiers2. "
"[%default]")
parser.add_option(
"-o", "--output-section", dest="output", type="choice",
action="append",
choices=("diff", "missed", "seqdiff"),
help="what to output [%default]")
parser.set_defaults(correct_shift=False,
pattern1="(\S+)",
pattern2="(\S+)",
output=[])
(options, args) = E.Start(parser)
if len(args) != 2:
raise ValueError("two files needed to compare.")
if options.correct_shift:
try:
import alignlib_lite
except __HOLE__:
raise ImportError(
"option --correct-shift requires alignlib_lite.py_ "
"but alignlib not found")
seqs1 = dict([
(x.title, x.sequence) for x in FastaIterator.iterate(
IOTools.openFile(args[0], "r"))])
seqs2 = dict([
(x.title, x.sequence) for x in FastaIterator.iterate(
IOTools.openFile(args[1], "r"))])
if not seqs1:
raise ValueError("first file %s is empty." % (args[0]))
if not seqs2:
raise ValueError("second file %s is empty." % (args[1]))
MapIdentifiers(seqs1, options.pattern1)
MapIdentifiers(seqs2, options.pattern2)
nsame = 0
nmissed1 = 0
nmissed2 = 0
ndiff = 0
ndiff_first = 0
ndiff_last = 0
ndiff_prefix = 0
ndiff_selenocysteine = 0
ndiff_masked = 0
nfixed = 0
found2 = {}
write_missed1 = "missed" in options.output
write_missed2 = "missed" in options.output
write_seqdiff = "seqdiff" in options.output
write_diff = "diff" in options.output or write_seqdiff
for k in seqs1:
if k not in seqs2:
nmissed1 += 1
if write_missed1:
options.stdout.write("---- %s ---- %s\n" % (k, "missed1"))
continue
found2[k] = 1
s1 = seqs1[k].upper()
s2 = seqs2[k].upper()
m = min(len(s1), len(s2))
if s1 == s2:
nsame += 1
else:
status = "other"
ndiff += 1
if s1[1:] == s2[1:]:
ndiff_first += 1
status = "first"
elif s1[:m] == s2[:m]:
ndiff_prefix += 1
status = "prefix"
elif s1[:-1] == s2[:-1]:
ndiff_last += 1
status = "last"
else:
if len(s1) == len(s2):
# get all differences: the first and last residues
# can be different for peptide sequences when
# comparing my translations with ensembl peptides.
differences = []
for x in range(1, len(s1) - 1):
if s1[x] != s2[x]:
differences.append((s1[x], s2[x]))
l = len(differences)
# check for Selenocysteins
if len(filter(lambda x: x[0] == "U" or x[1] == "U",
differences)) == l:
ndiff_selenocysteine += 1
status = "selenocysteine"
# check for masked residues
elif len(filter(lambda x: x[0] in "NX" or x[1] in "NX",
differences)) == l:
ndiff_masked += 1
status = "masked"
# correct for different gap lengths
if options.correct_shift:
map_a2b = alignlib_lite.py_makeAlignmentVector()
a, b = 0, 0
keep = False
x = 0
while x < m and not (a == len(s1) and b == len(s2)):
try:
if s1[a] != s2[b]:
while s1[a] == "N" and s2[b] != "N":
a += 1
while s1[a] != "N" and s2[b] == "N":
b += 1
if s1[a] != s2[b]:
break
except IndexError:
print "# index error for %s: x=%i, a=%i, b=%i, l1=%i, l2=%i" % (k, x, a, b, len(s1), len(s2))
break
a += 1
b += 1
map_a2b.addPairExplicit(a, b, 0.0)
# check if we have reached the end:
else:
keep = True
nfixed += 1
f = alignlib_lite.py_AlignmentFormatEmissions(map_a2b)
print "fix\t%s\t%s" % (k, str(f))
if not keep:
print "# warning: not fixable: %s" % k
if write_diff:
options.stdout.write("---- %s ---- %s\n" % (k, status))
if write_seqdiff:
options.stdout.write("< %s\n> %s\n" % (seqs1[k], seqs2[k]))
for k in seqs2.keys():
if k not in found2:
nmissed2 += 1
if write_missed2:
options.stdout.write("---- %s ---- %s\n" % (k, "missed2"))
options.stdlog.write("""# Legend:
# seqs1: number of sequences in set 1
# seqs2: number of sequences in set 2
# same: number of identical sequences
# diff: number of sequences with differences
# nmissed1: sequences in set 1 that are not found in set 2
# nmissed2: sequences in set 2 that are not found in set 1
# Type of sequence differences
# first: only the first residue is different
# last: only the last residue is different
# prefix: one sequence is prefix of the other
# selenocysteine: difference due to selenocysteines
# masked: difference due to masked residues
# fixed: fixed differences
# other: other differences
""")
E.info("seqs1=%i, seqs2=%i, same=%i, ndiff=%i, nmissed1=%i, nmissed2=%i" %
(len(seqs1), len(seqs2), nsame, ndiff, nmissed1, nmissed2))
E.info(
"ndiff=%i: first=%i, last=%i, prefix=%i, selenocysteine=%i, masked=%i, fixed=%i, other=%i" %
(ndiff, ndiff_first, ndiff_last, ndiff_prefix,
ndiff_selenocysteine, ndiff_masked, nfixed,
ndiff - ndiff_first - ndiff_last - ndiff_prefix -
ndiff_selenocysteine - ndiff_masked - nfixed))
E.Stop()
|
ImportError
|
dataset/ETHPy150Open CGATOxford/cgat/scripts/diff_fasta.py/main
|
7,807
|
@classmethod
def load_config_file(cls, filename, filetype=None, paths=None):
if not paths:
paths = ['.']
if filetype is None:
if (filename.lower().endswith('.yaml') or
filename.lower().endswith('.yml')):
filetype = 'yaml'
elif filename.lower().endswith('.json'):
filetype = 'json'
elif (filename.lower().endswith('.conf')
or filename.lower().endswith('.ini')):
filetype = 'ini'
else:
filetype = 'yaml'
data = cls._load_file(filename, paths)
if data is None:
raise ConfigurationError(
"Cannot find or read config file: %s" % filename)
try:
loader = getattr(cls, "_load_%s_config" % filetype)
except __HOLE__:
raise ConfigurationError("Unknown config file type: %s" % filetype)
return loader(data, filename=filename)
|
AttributeError
|
dataset/ETHPy150Open openstack/stacktach-winchester/winchester/config.py/ConfigManager.load_config_file
|
7,808
|
@classmethod
def _lookup_type(cls, type_):
try:
return cls._TYPES[type_]
except __HOLE__:
return cls._UNKNOWN_TYPE
|
KeyError
|
dataset/ETHPy150Open osrg/ryu/ryu/lib/packet/ospf.py/_TypeDisp._lookup_type
|
7,809
|
def serialize_tail(self):
head = bytearray(struct.pack(self._PACK_STR, self.flags, 0,
len(self.links)))
try:
return head + reduce(lambda a, b: a + b,
(link.serialize() for link in self.links))
except __HOLE__:
return head
|
TypeError
|
dataset/ETHPy150Open osrg/ryu/ryu/lib/packet/ospf.py/RouterLSA.serialize_tail
|
7,810
|
def serialize_tail(self):
head = bytearray(struct.pack(self._PACK_STR,
addrconv.ipv4.text_to_bin(self.mask),
self.hello_interval, self.options, self.priority,
self.dead_interval,
addrconv.ipv4.text_to_bin(self.designated_router),
addrconv.ipv4.text_to_bin(self.backup_router)))
try:
return head + reduce(lambda a, b: a + b,
(addrconv.ipv4.text_to_bin(
n) for n in self.neighbors))
except __HOLE__:
return head
|
TypeError
|
dataset/ETHPy150Open osrg/ryu/ryu/lib/packet/ospf.py/OSPFHello.serialize_tail
|
7,811
|
def serialize_tail(self):
flags = ((self.i_flag & 0x1) << 2) ^ \
((self.m_flag & 0x1) << 1) ^ \
(self.ms_flag & 0x1)
head = bytearray(struct.pack(self._PACK_STR, self.mtu,
self.options, flags,
self.sequence_number))
try:
return head + reduce(lambda a, b: a + b,
(hdr.serialize() for hdr in self.lsa_headers))
except __HOLE__:
return head
|
TypeError
|
dataset/ETHPy150Open osrg/ryu/ryu/lib/packet/ospf.py/OSPFDBDesc.serialize_tail
|
7,812
|
def serialize_tail(self):
head = bytearray(struct.pack(self._PACK_STR, len(self.lsas)))
try:
return head + reduce(lambda a, b: a + b,
(lsa.serialize() for lsa in self.lsas))
except __HOLE__:
return head
|
TypeError
|
dataset/ETHPy150Open osrg/ryu/ryu/lib/packet/ospf.py/OSPFLSUpd.serialize_tail
|
7,813
|
def check_SOA_condition(self):
"""We need to check if the domain is the root domain in a zone.
If the domain is the root domain, it will have an soa, but the
master domain will have no soa (or it will have a a different
soa).
"""
try:
self.domain
except __HOLE__:
return # Validation will fail eventually
if not self.domain.soa:
return
root_domain = self.domain.soa.root_domain
if root_domain is None:
return
if self.fqdn == root_domain.name:
raise ValidationError("You cannot create a CNAME who's left hand "
"side is at the same level as an SOA")
|
ObjectDoesNotExist
|
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/cname/models.py/CNAME.check_SOA_condition
|
7,814
|
def validate_response(self, value, model_instance):
"""
Validation method to ensure that the provider's response is valid for
the given resource.
The oEmbed 1.0 spec permits three classes of errors:
- 404 if the provider has no resource for the requested url
- 401 if the specified URL contains a private (non-public) resource
- 501 if the provider cannot return a response in the requested format
Though all HTTP errors are raised, friendly error messages are only
provided for each of these permitted scenarios.
"""
object_name = model_instance._meta.verbose_name
try:
model_instance.oembed_retrieve(suppress_http_errors=False)
except __HOLE__, e:
if e.code == 404:
message = _('%s not found' % object_name)
elif e.code == 401:
message = _((
'This %s is marked as private; you should link to it '
'directly to allow the provider to regulate access control'
) % object_name)
elif e.code == 501:
message = _(
'%s not available in the requested format' % object_name
)
else:
message = _(e.msg)
raise ValidationError(message)
except URLError:
raise ValidationError(_('Unable to contact server'))
|
HTTPError
|
dataset/ETHPy150Open chuckharmston/django-tumblelog/tumblelog/fields.py/OEmbedURLField.validate_response
|
7,815
|
def on_request(self, request):
if not self.ssl or self.bridge:
return request
try:
record, remaining = tls.parse_tls(request)
message = record.messages[0]
if not self.clienthello_handled:
self.clienthello_handled = True
hello = message.obj
# Force a full handshake by preventing session resumption by emptying
# session ID and SessionTicket extension. Otherwise a CCS will follow
# a ServerHello normally.
hello.session_id = []
for ext in hello.extension_list:
if ext.type == Extension.TYPE.SESSIONTICKET:
ext.raw_data = []
return record.to_bytes()
if self.injected_server:
# OpenSSL after the EarlyCCS fix should send a fatal alert
# unexpected_message (10). Some other libraries send a close_notify (0)
# so we accept that as well. Morever, if the client doesn't like the TLS
# protocol version chosen by the server (regardless of whether early
# CCS is injected), the client will send a fatal alert
# protocol_version (70).
if not (
isinstance(message, tls.types.Alert) and
((message.description == Alert.DESCRIPTION.UNEXPECTED_MESSAGE and
message.level == Alert.LEVEL.FATAL) or
(message.description == Alert.DESCRIPTION.PROTOCOL_VERSION and
message.level == Alert.LEVEL.FATAL) or
message.description == Alert.DESCRIPTION.CLOSE_NOTIFY)):
self.log(
logging.CRITICAL,
"Client is vulnerable to Early CCS attack!")
self.connection.vuln_notify(util.vuln.VULN_EARLY_CCS)
self.log_attack_event()
self.connection.close()
else:
self.log(
logging.DEBUG,
"Client not vulnerable to early CCS")
self.log_attack_event(success=False)
except __HOLE__:
# Failed to parse TLS, this is probably due to a short read of a TLS
# record.
pass
return request
|
ValueError
|
dataset/ETHPy150Open google/nogotofail/nogotofail/mitm/connection/handlers/connection/ccs.py/EarlyCCS.on_request
|
7,816
|
def on_response(self, response):
if not self.ssl or self.bridge or self.injected_server:
return response
response = self.buffer + response
self.buffer = ""
try:
index = 0
while index < len(response):
record, size = TlsRecord.from_stream(response[index:])
version = record.version
for i, message in enumerate(record.messages):
# Inject the CCS right after the ServerHello
if (isinstance(message, tls.types.HandshakeMessage)
and message.type == HandshakeMessage.TYPE.SERVER_HELLO):
response = (response[:index] +
self._inject_ccs(record, i) +
response[index+size:])
self.injected_server = True
return response
index += size
except __HOLE__:
# Failed to parse TLS, this is probably due to a short read of a TLS
# record. Buffer the response to try and get more data.
self.buffer = response
# But don't buffer too much, give up after 16k.
if len(self.buffer) > 2**14:
response = self.buffer
self.buffer = ""
return self.buffer
return ""
return response
|
ValueError
|
dataset/ETHPy150Open google/nogotofail/nogotofail/mitm/connection/handlers/connection/ccs.py/EarlyCCS.on_response
|
7,817
|
def save_score(self, line):
"save a Score: line"
self.blast_score = float(line.split()[2])
s = line.split()[7]
if s[0] == 'e':
s = '1' + s
try:
self.e_value = -math.log(float(s)) / math.log(10.0)
except (__HOLE__, OverflowError):
self.e_value = 300.
|
ValueError
|
dataset/ETHPy150Open cjlee112/pygr/pygr/parse_blast.py/BlastHitParser.save_score
|
7,818
|
def delete(self, request, *args, **kwargs):
try:
node, user = self.get_object()
if node.remove_contributor(user, None, log=False):
update_admin_log(
user_id=self.request.user.id,
object_id=node.pk,
object_repr='Contributor',
message='User {} removed from node {}.'.format(
user.pk, node.pk
),
action_flag=CONTRIBUTOR_REMOVED
)
# Log invisibly on the OSF.
osf_log = NodeLog(
action=NodeLog.CONTRIB_REMOVED,
user=None,
params={
'project': node.parent_id,
'node': node.pk,
'contributors': user.pk
},
date=datetime.utcnow(),
should_hide=True,
)
osf_log.save()
except __HOLE__:
return page_not_found(
request,
AttributeError(
'{} with id "{}" not found.'.format(
self.context_object_name.title(),
kwargs.get('node_id')
)
)
)
return redirect(reverse_node(self.kwargs.get('node_id')))
|
AttributeError
|
dataset/ETHPy150Open CenterForOpenScience/osf.io/admin/nodes/views.py/NodeRemoveContributorView.delete
|
7,819
|
def delete(self, request, *args, **kwargs):
try:
node = self.get_object()
flag = None
osf_flag = None
message = None
if node.is_deleted:
node.is_deleted = False
node.deleted_date = None
flag = NODE_RESTORED
message = 'Node {} restored.'.format(node.pk)
osf_flag = NodeLog.NODE_CREATED
elif not node.is_registration:
node.is_deleted = True
node.deleted_date = datetime.utcnow()
flag = NODE_REMOVED
message = 'Node {} removed.'.format(node.pk)
osf_flag = NodeLog.NODE_REMOVED
node.save()
if flag is not None:
update_admin_log(
user_id=self.request.user.id,
object_id=node.pk,
object_repr='Node',
message=message,
action_flag=flag
)
if osf_flag is not None:
# Log invisibly on the OSF.
osf_log = NodeLog(
action=osf_flag,
user=None,
params={
'project': node.parent_id,
},
date=datetime.utcnow(),
should_hide=True,
)
osf_log.save()
except __HOLE__:
return page_not_found(
request,
AttributeError(
'{} with id "{}" not found.'.format(
self.context_object_name.title(),
kwargs.get('guid')
)
)
)
return redirect(reverse_node(self.kwargs.get('guid')))
|
AttributeError
|
dataset/ETHPy150Open CenterForOpenScience/osf.io/admin/nodes/views.py/NodeDeleteView.delete
|
7,820
|
def description(self, value):
"""Fetches the translated description for the given datatype.
The given value will be converted to a `numpy.dtype` object, matched
against the supported datatypes and the description will be translated
into the preferred language. (Usually a settings dialog should be
available to change the language).
If the conversion fails or no match can be found, `None` will be returned.
Args:
value (type|numpy.dtype): Any object or type.
Returns:
str: The translated description of the datatype
None: If no match could be found or an error occured during convertion.
"""
# lists, tuples, dicts refer to numpy.object types and
# return a 'text' description - working as intended or bug?
try:
value = np.dtype(value)
except __HOLE__, e:
return None
for (dtype, string) in self._all:
if dtype == value:
return string
# no match found return given value
return None
|
TypeError
|
dataset/ETHPy150Open datalyze-solutions/pandas-qt/pandasqt/models/SupportedDtypes.py/SupportedDtypesTranslator.description
|
7,821
|
def tupleAt(self, index):
"""Gets the tuple (datatype, description) at the given position out of all supported types.
Args:
index (int): An index to access the list of supported datatypes.
Returns:
tuple: A tuple consisting of the (datatype, description) will be
returned, if the index is valid. If not, an empty tuple is returned.
"""
try:
return self._all[index]
except __HOLE__, e:
return ()
|
IndexError
|
dataset/ETHPy150Open datalyze-solutions/pandas-qt/pandasqt/models/SupportedDtypes.py/SupportedDtypesTranslator.tupleAt
|
7,822
|
def create_initial_parameterizations(self):
self.begin_operation('(Step 2 of 7) Forming initial chart parameterizations...')
new_uv_indices = numpy.zeros(shape=(len(self.all_vert_indices), 3), dtype=numpy.int32)
new_uvs = []
new_uvs_offset = 0
for (face, facedata) in self.facegraph.nodes_iter(data=True):
border_edges = facedata['edges']
chart_tris = self.all_vert_indices[facedata['tris']]
unique_verts = set(chain.from_iterable(chart_tris))
border_verts = set(chain.from_iterable(border_edges))
interior_verts = list(unique_verts.difference(border_verts))
bordergraph = nx.from_edgelist(border_edges)
bigcycle = list(super_cycle(bordergraph))
boundary_path = []
for i in range(len(bigcycle)-1):
boundary_path.append((bigcycle[i], bigcycle[i+1]))
boundary_path.append((bigcycle[len(bigcycle)-1], bigcycle[0]))
assert(len(boundary_path) == len(border_edges))
total_dist = 0
for (v1, v2) in boundary_path:
total_dist += v3dist(self.all_vertices[v1], self.all_vertices[v2])
vert2uv = {}
curangle = 0
for edge in boundary_path:
angle = v3dist(self.all_vertices[edge[0]], self.all_vertices[edge[1]]) / total_dist
curangle += angle * 2 * math.pi
x, y = (math.sin(curangle) + 1) / 2.0, (math.cos(curangle) + 1.0) / 2.0
vert2uv[edge[0]] = (x,y)
if len(interior_verts) > 0:
vert2idx = {}
for i, v in enumerate(interior_verts):
vert2idx[v] = i
A = numpy.zeros(shape=(len(interior_verts), len(interior_verts)), dtype=numpy.float32)
Bu = numpy.zeros(len(interior_verts), dtype=numpy.float32)
Bv = numpy.zeros(len(interior_verts), dtype=numpy.float32)
sumu = numpy.zeros(len(interior_verts), dtype=numpy.float32)
for edge in self.vertexgraph.subgraph(unique_verts).edges_iter():
v1, v2 = edge
if v1 in border_verts and v2 in border_verts:
continue
edgelen = v3dist(self.all_vertices[v1], self.all_vertices[v2])
if v1 in border_verts:
Bu[vert2idx[v2]] += edgelen * vert2uv[v1][0]
Bv[vert2idx[v2]] += edgelen * vert2uv[v1][1]
sumu[vert2idx[v2]] += edgelen
elif v2 in border_verts:
Bu[vert2idx[v1]] += edgelen * vert2uv[v2][0]
Bv[vert2idx[v1]] += edgelen * vert2uv[v2][1]
sumu[vert2idx[v1]] += edgelen
else:
A[vert2idx[v1]][vert2idx[v2]] = -1 * edgelen
A[vert2idx[v2]][vert2idx[v1]] = -1 * edgelen
sumu[vert2idx[v1]] += edgelen
sumu[vert2idx[v2]] += edgelen
Bu.shape = (len(Bu), 1)
Bv.shape = (len(Bv), 1)
sumu.shape = (len(sumu), 1)
A /= sumu
Bu /= sumu
Bv /= sumu
try: numpy.fill_diagonal(A, 1)
except __HOLE__:
for i in xrange(len(A)):
A[i][i] = 1
interior_us = numpy.linalg.solve(A, Bu)
interior_vs = numpy.linalg.solve(A, Bv)
for (i, (u, v)) in enumerate(zip(interior_us, interior_vs)):
vert2uv[interior_verts[i]] = (u[0], v[0])
new_uvs.append(vert2uv.values())
newvert2idx = dict(zip(vert2uv.keys(), range(new_uvs_offset, new_uvs_offset + len(vert2uv))))
for tri in facedata['tris']:
for i, v in enumerate(self.all_vert_indices[tri]):
new_uv_indices[tri][i] = newvert2idx[v]
new_uvs_offset += len(vert2uv)
self.facegraph.node[face]['vert2uvidx'] = newvert2idx
self.new_uvs = numpy.concatenate(new_uvs)
self.new_uv_indices = new_uv_indices
self.end_operation()
|
AttributeError
|
dataset/ETHPy150Open pycollada/meshtool/meshtool/filters/simplify_filters/sander_simplify.py/SanderSimplify.create_initial_parameterizations
|
7,823
|
def FilterGenerator():
class SandlerSimplificationFilter(SimplifyFilter):
def __init__(self):
super(SandlerSimplificationFilter, self).__init__('sander_simplify', 'Simplifies the mesh based on sandler, et al. method.')
self.arguments.append(FileArgument('pm_file', 'Where to save the progressive mesh stream'))
def apply(self, mesh, pm_file):
try:
pmout = open(pm_file, 'w')
except __HOLE__:
pmout = pm_file
s = SanderSimplify(mesh, pmout)
if USE_IPDB:
with launch_ipdb_on_exception():
mesh = s.simplify()
else:
mesh = s.simplify()
return mesh
return SandlerSimplificationFilter()
|
TypeError
|
dataset/ETHPy150Open pycollada/meshtool/meshtool/filters/simplify_filters/sander_simplify.py/FilterGenerator
|
7,824
|
def validate_shipping_address(view):
@wraps(view)
def func(request, checkout):
if checkout.email is None or checkout.shipping_address is None:
return redirect('checkout:shipping-address')
try:
checkout.shipping_address.full_clean()
except __HOLE__:
return redirect('checkout:shipping-address')
return view(request, checkout)
return func
|
ValidationError
|
dataset/ETHPy150Open mirumee/saleor/saleor/checkout/views/validators.py/validate_shipping_address
|
7,825
|
@csrf_exempt
@has_request_variables
def lookup_endpoints_for_user(request, email=REQ()):
try:
return json_response(realm_for_email(email).deployment.endpoints)
except __HOLE__:
return json_error("Cannot determine endpoint for user.", status=404)
|
AttributeError
|
dataset/ETHPy150Open zulip/zulip/zilencer/views.py/lookup_endpoints_for_user
|
7,826
|
def account_deployment_dispatch(request, **kwargs):
sso_unknown_email = False
if request.method == 'POST':
email = request.POST['username']
realm = realm_for_email(email)
try:
return HttpResponseRedirect(realm.deployment.base_site_url)
except __HOLE__:
# No deployment found for this user/email
sso_unknown_email = True
template_response = django_login_page(request, **kwargs)
template_response.context_data['desktop_sso_dispatch'] = True
template_response.context_data['desktop_sso_unknown_email'] = sso_unknown_email
return template_response
|
AttributeError
|
dataset/ETHPy150Open zulip/zulip/zilencer/views.py/account_deployment_dispatch
|
7,827
|
def iter_format_modules(lang):
"""
Does the heavy lifting of finding format modules.
"""
if check_for_language(lang):
format_locations = []
for path in CUSTOM_FORMAT_MODULE_PATHS:
format_locations.append(path + '.%s')
format_locations.append('django.conf.locale.%s')
locale = to_locale(lang)
locales = [locale]
if '_' in locale:
locales.append(locale.split('_')[0])
for location in format_locations:
for loc in locales:
try:
yield import_module('.formats', location % loc)
except __HOLE__:
pass
|
ImportError
|
dataset/ETHPy150Open bitmazk/django-libs/django_libs/format_utils.py/iter_format_modules
|
7,828
|
def get_format(format_type, lang=None, use_l10n=None):
"""
For a specific format type, returns the format for the current
language (locale), defaults to the format in the settings.
format_type is the name of the format, e.g. 'DATE_FORMAT'
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
format_type = str_encode(format_type)
if use_l10n or (use_l10n is None and settings.USE_L10N):
if lang is None:
lang = get_language()
cache_key = (format_type, lang)
try:
cached = _format_cache[cache_key]
if cached is not None:
return cached
else:
# Return the general setting by default
return getattr(settings, format_type)
except KeyError:
for module in get_format_modules(lang):
try:
val = getattr(module, format_type)
for iso_input in ISO_INPUT_FORMATS.get(format_type, ()):
if iso_input not in val:
if isinstance(val, tuple):
val = list(val)
val.append(iso_input)
_format_cache[cache_key] = val
return val
except __HOLE__:
pass
_format_cache[cache_key] = None
return getattr(settings, format_type)
|
AttributeError
|
dataset/ETHPy150Open bitmazk/django-libs/django_libs/format_utils.py/get_format
|
7,829
|
def tzname(self, dt):
try:
return smart_unicode(time.tzname[self._isdst(dt)],
DEFAULT_LOCALE_ENCODING)
except __HOLE__:
return None
|
UnicodeDecodeError
|
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/utils/tzinfo.py/LocalTimezone.tzname
|
7,830
|
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1)
try:
stamp = time.mktime(tt)
except (OverflowError, __HOLE__):
# 32 bit systems can't handle dates after Jan 2038, and certain
# systems can't handle dates before ~1901-12-01:
#
# >>> time.mktime((1900, 1, 13, 0, 0, 0, 0, 0, 0))
# OverflowError: mktime argument out of range
# >>> time.mktime((1850, 1, 13, 0, 0, 0, 0, 0, 0))
# ValueError: year out of range
#
# In this case, we fake the date, because we only care about the
# DST flag.
tt = (2037,) + tt[1:]
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
|
ValueError
|
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/utils/tzinfo.py/LocalTimezone._isdst
|
7,831
|
def main():
args = docopt.docopt('\n'.join(__doc__.split('\n')[2:]),
version=const.VERSION)
logging.basicConfig(
level=logging.DEBUG if args['--verbose'] else logging.INFO,
stream=sys.stdout,
)
conf = config.new_context_from_file(args['--config-file'], section='imap')
if conf is None:
return 1
try:
imap_account = imap_cli.connect(**conf)
imap_cli.change_dir(imap_account,
args['--from'] or const.DEFAULT_DIRECTORY,
read_only=False)
copy(imap_account, args['<mail_id>'], args['<dest>'])
if args['--delete']:
flag.flag(imap_account, args['<mail_id>'], [const.FLAG_DELETED])
imap_account.expunge()
imap_cli.disconnect(imap_account)
except __HOLE__:
log.info('Interrupt by user, exiting')
return 0
|
KeyboardInterrupt
|
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/copy.py/main
|
7,832
|
def string_to_dict(var_string, allow_kv=True):
"""Returns a dictionary given a string with yaml or json syntax.
If data is not present in a key: value format, then it return
an empty dictionary.
Attempts processing string by 3 different methods in order:
1. as JSON 2. as YAML 3. as custom key=value syntax
Throws an error if all of these fail in the standard ways."""
# try:
# # Accept all valid "key":value types of json
# return_dict = json.loads(var_string)
# assert type(return_dict) is dict
# except (TypeError, AttributeError, ValueError, AssertionError):
try:
# Accept all JSON and YAML
return_dict = yaml.load(var_string)
assert type(return_dict) is dict
except (AttributeError, yaml.YAMLError, __HOLE__):
# if these fail, parse by key=value syntax
try:
assert allow_kv
return_dict = parse_kv(var_string)
except:
raise exc.TowerCLIError(
'failed to parse some of the extra '
'variables.\nvariables: \n%s' % var_string
)
return return_dict
|
AssertionError
|
dataset/ETHPy150Open ansible/tower-cli/lib/tower_cli/utils/parser.py/string_to_dict
|
7,833
|
def register_function(lib, item, ignore_errors):
# A function may not exist, if these bindings are used with an older or
# incompatible version of libclang.so.
try:
func = getattr(lib, item[0])
except __HOLE__ as e:
msg = str(e) + ". Please ensure that your python bindings are "\
"compatible with your libclang.so version."
if ignore_errors:
return
raise LibclangError(msg)
if len(item) >= 2:
func.argtypes = item[1]
if len(item) >= 3:
func.restype = item[2]
if len(item) == 4:
func.errcheck = item[3]
|
AttributeError
|
dataset/ETHPy150Open punchagan/cinspect/cinspect/vendor/clang/cindex.py/register_function
|
7,834
|
def get_cindex_library(self):
try:
library = cdll.LoadLibrary(self.get_filename())
except __HOLE__ as e:
msg = str(e) + ". To provide a path to libclang use " \
"Config.set_library_path() or " \
"Config.set_library_file()."
raise LibclangError(msg)
return library
|
OSError
|
dataset/ETHPy150Open punchagan/cinspect/cinspect/vendor/clang/cindex.py/Config.get_cindex_library
|
7,835
|
def function_exists(self, name):
try:
getattr(self.lib, name)
except __HOLE__:
return False
return True
|
AttributeError
|
dataset/ETHPy150Open punchagan/cinspect/cinspect/vendor/clang/cindex.py/Config.function_exists
|
7,836
|
def inject_op(self):
try:
op = self.plan[self.counter]
except __HOLE__:
pass
else:
self.counter += 1
if callable(op):
op(self)
else:
method = op[0]
getattr(self, method)(*op[1:])
|
KeyError
|
dataset/ETHPy150Open StyleShare/flask-volatile/volatiletests/transaction.py/TestCache.inject_op
|
7,837
|
@property
def config_path(self):
try:
ret = zsh.getvalue('POWERLINE_CONFIG_PATHS')
except __HOLE__:
return None
else:
if isinstance(ret, (unicode, str, bytes)):
return [
path
for path in ret.split((b':' if isinstance(ret, bytes) else ':'))
if path
]
else:
return ret
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/Args.config_path
|
7,838
|
@staticmethod
def __getitem__(key):
try:
return string(zsh.getvalue(key))
except __HOLE__ as e:
raise KeyError(*e.args)
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/Environment.__getitem__
|
7,839
|
@staticmethod
def get(key, default=None):
try:
return string(zsh.getvalue(key))
except __HOLE__:
return default
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/Environment.get
|
7,840
|
@staticmethod
def __contains__(key):
try:
zsh.getvalue(key)
return True
except __HOLE__:
return False
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/Environment.__contains__
|
7,841
|
def __str__(self):
parser_state = u(zsh_expand('${(%):-%_}'))
shortened_path = u(zsh_expand('${(%):-%~}'))
try:
mode = u(zsh.getvalue('_POWERLINE_MODE'))
except __HOLE__:
mode = None
try:
default_mode = u(zsh.getvalue('_POWERLINE_DEFAULT_MODE'))
except IndexError:
default_mode = None
segment_info = {
'args': self.args,
'environ': environ,
'client_id': 1,
'local_theme': self.theme,
'parser_state': parser_state,
'shortened_path': shortened_path,
'mode': mode,
'default_mode': default_mode,
}
try:
zle_rprompt_indent = zsh.getvalue('ZLE_RPROMPT_INDENT')
except IndexError:
zle_rprompt_indent = 1
r = ''
if self.above:
for line in self.powerline.render_above_lines(
width=zsh.columns() - zle_rprompt_indent,
segment_info=segment_info,
):
if line:
r += line + '\n'
r += self.powerline.render(
width=zsh.columns(),
side=self.side,
segment_info=segment_info,
mode=mode,
)
if type(r) is not str:
if type(r) is bytes:
return r.decode(get_preferred_output_encoding(), 'replace')
else:
return r.encode(get_preferred_output_encoding(), 'replace')
return r
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/Prompt.__str__
|
7,842
|
def set_prompt(powerline, psvar, side, theme, above=False):
try:
savedps = zsh.getvalue(psvar)
except __HOLE__:
savedps = None
zpyvar = 'ZPYTHON_POWERLINE_' + psvar
prompt = Prompt(powerline, side, theme, psvar, savedps, above)
zsh.setvalue(zpyvar, None)
zsh.set_special_string(zpyvar, prompt)
zsh.setvalue(psvar, '${' + zpyvar + '}')
return ref(prompt)
|
IndexError
|
dataset/ETHPy150Open powerline/powerline/powerline/bindings/zsh/__init__.py/set_prompt
|
7,843
|
def dePlist(self, node, interesting_keys=None):
"""
Given a DOM node, convert the plist (fragment) it refers to and
return the corresponding Python data structure.
If interesting_keys is a list, "dict" keys will be filtered so that
only those nominated are returned (for ALL descendant dicts). Numeric
keys aren't filtered.
"""
ik = interesting_keys
dtype = node.nodeName
if dtype == 'string':
return self.getText(node)
elif dtype == 'integer':
try:
return int(self.getText(node))
except ValueError:
raise iPhotoLibraryError, \
"Corrupted Library; unexpected value '%s' for integer" % \
self.getText(node)
elif dtype == 'real':
try:
return float(self.getText(node))
except __HOLE__:
raise iPhotoLibraryError, \
"Corrupted Library; unexpected value '%s' for real" % \
self.getText(node)
elif dtype == 'array':
return [self.dePlist(c, ik) for c in node.childNodes \
if c.nodeType == Node.ELEMENT_NODE]
elif dtype == 'dict':
d = {}
last_key = None
for c in node.childNodes:
if c.nodeType != Node.ELEMENT_NODE:
continue
# TODO: catch out-of-order keys/values
if c.nodeName == 'key':
last_key = self.getText(c)
else: # value
if interesting_keys: # check to see if we're interested
if last_key not in interesting_keys \
and not last_key.isdigit():
continue # nope.
d[intern(str(last_key))] = self.dePlist(c, ik)
return d
elif dtype == 'true':
return True
elif dtype == 'false':
return False
elif dtype == 'data':
return base64.decodestring(self.getText(c))
elif dtype == 'date':
return self.appleDate(self.getText(c))
else:
raise Exception, "Don't know what a %s is." % dtype
|
ValueError
|
dataset/ETHPy150Open BMorearty/exportiphoto/exportiphoto.py/iPhotoLibrary.dePlist
|
7,844
|
def copyImage(self, imageId, folderName, folderDate):
"""
Copy an image from the library to a folder in the dest_dir. The
name of the folder is based on folderName and folderDate; if
folderDate is None, it's only based upon the folderName.
If use_metadata is True, also write the image metadata from the library
to the copy. If use_faces is True, faces will be saved as keywords.
"""
try:
image = self.images[imageId]
except __HOLE__:
raise iPhotoLibraryError, "Can't find image #%s" % imageId
if not os.path.exists(folderName):
try:
if not self.test:
os.makedirs(folderName)
except OSError, why:
raise iPhotoLibraryError, \
"Can't create %s: %s" % (folderName, why[1])
self.status(" Created %s\n" % folderName)
#Unedited images only have ImagePath, edited images have both ImagePath and OriginalPath,
#except for some corrupted iPhoto libraries, where some images only have OriginalPath.
#Trying to satisfy both conditions with this nested logic.
if self.originals:
if "OriginalPath" in image:
mFilePath = image["OriginalPath"]
else:
mFilePath = image["ImagePath"]
else:
if not "ImagePath" in image:
mFilePath = image["OriginalPath"]
else:
mFilePath = image["ImagePath"]
basename = os.path.basename(mFilePath)
# Deconflict ouput filenames
tFilePath = os.path.join(folderName, basename)
if self.deconflict:
j = 1
while tFilePath in self.output_files:
tFilePath = os.path.join(folderName, "%02d_"%j + basename)
j += 1
self.output_files.add(tFilePath)
# Skip unchanged files, unless we're writing metadata.
if not self.use_metadata and os.path.exists(tFilePath):
mStat = os.stat(mFilePath)
tStat = os.stat(tFilePath)
if not self.ignore_time_delta and abs(tStat[stat.ST_MTIME] - mStat[stat.ST_MTIME]) <= 10:
self.status("-")
return
if tStat[stat.ST_SIZE] == mStat[stat.ST_SIZE]:
self.status("-")
return
if not self.test and os.path.exists(mFilePath):
shutil.copy2(mFilePath, tFilePath)
md_written = False
if self.use_metadata:
md_written = self.writePhotoMD(imageId, tFilePath)
if md_written:
self.status("+")
else:
self.status(".")
|
KeyError
|
dataset/ETHPy150Open BMorearty/exportiphoto/exportiphoto.py/iPhotoLibrary.copyImage
|
7,845
|
def writePhotoMD(self, imageId, filePath=None):
"""
Write the metadata from the library for imageId to filePath.
If filePath is None, write it to the photo in the library.
If use_faces is True, iPhoto face names will be written to
keywords.
"""
try:
image = self.images[imageId]
except KeyError:
raise iPhotoLibraryError, "Can't find image #%s" % imageId
if not filePath:
if self.originals:
if "OriginalPath" in image:
mFilePath = image["OriginalPath"]
else:
mFilePath = image["ImagePath"]
else:
if not "ImagePath" in image:
mFilePath = image["OriginalPath"]
else:
mFilePath = image["ImagePath"]
caption = image.get("Caption", None)
rating = image.get("Rating", None)
comment = image.get("Comment", None)
keywords = set([self.keywords[k] for k in image.get("Keywords", [])])
if self.use_faces:
keywords.update([self.faces[f['face key']]
for f in image.get("Faces", [])
if self.faces.has_key(f['face key'])]
)
if caption or comment or rating or keywords:
try:
md = pyexiv2.ImageMetadata(filePath)
md.read()
if caption:
md["Iptc.Application2.Headline"] = [caption]
if rating:
md["Xmp.xmp.Rating"] = rating
if comment:
md["Iptc.Application2.Caption"] = [comment]
if keywords:
md["Iptc.Application2.Keywords"] = list(keywords)
if not self.test:
md.write(preserve_timestamps=True)
return True
except __HOLE__, why:
self.status("\nProblem setting metadata (%s) on %s\n" % (
unicode(why.__str__(), errors='replace'), filePath
))
return False
|
IOError
|
dataset/ETHPy150Open BMorearty/exportiphoto/exportiphoto.py/iPhotoLibrary.writePhotoMD
|
7,846
|
def appleDate(self, text):
try:
return datetime.utcfromtimestamp(self.apple_epoch + float(text))
except (__HOLE__, TypeError):
raise iPhotoLibraryError, \
"Corrupted Library; unexpected value '%s' for date" % text
|
ValueError
|
dataset/ETHPy150Open BMorearty/exportiphoto/exportiphoto.py/iPhotoLibrary.appleDate
|
7,847
|
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, decoding and caching
is skipped because we can't decode partial content nor does it make
sense to cache partial content as the full response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header. (Overridden if ``amt`` is set.)
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
# Note: content-encoding value should be case-insensitive, per RFC 2616
# Section 3.5
content_encoding = self.headers.get('content-encoding', '').lower()
decoder = self.CONTENT_DECODERS.get(content_encoding)
if decode_content is None:
decode_content = self._decode_content
if self._fp is None:
return
try:
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
else:
return self._fp.read(amt)
try:
if decode_content and decoder:
data = decoder(data)
except (__HOLE__, zlib.error):
raise DecodeError("Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding)
if cache_content:
self._body = data
return data
finally:
if self._original_response and self._original_response.isclosed():
self.release_conn()
|
IOError
|
dataset/ETHPy150Open Esri/geoprocessing-tools-for-hadoop/requests/packages/urllib3/response.py/HTTPResponse.read
|
7,848
|
def compare_xml(doc1, doc2):
"""
Helper function to compare two XML files. It will parse both once again
and write them in a canonical fashion.
"""
try:
doc1.seek(0, 0)
except __HOLE__:
pass
try:
doc2.seek(0, 0)
except AttributeError:
pass
obj1 = etree.parse(doc1)
obj2 = etree.parse(doc2)
# Remove comments from both.
for c in obj1.getroot().xpath("//comment()"):
p = c.getparent()
p.remove(c)
for c in obj2.getroot().xpath("//comment()"):
p = c.getparent()
p.remove(c)
remove_empty_tags(obj1.getroot())
remove_empty_tags(obj2.getroot())
buf = io.BytesIO()
obj1.write_c14n(buf)
buf.seek(0, 0)
str1 = buf.read().decode()
str1 = [_i.strip() for _i in str1.splitlines() if _i.strip()]
buf = io.BytesIO()
obj2.write_c14n(buf)
buf.seek(0, 0)
str2 = buf.read().decode()
str2 = [_i.strip() for _i in str2.splitlines() if _i.strip()]
unified_diff = difflib.unified_diff(str1, str2)
err_msg = "\n".join(unified_diff)
if err_msg:
msg = "Strings are not equal.\n"
raise AssertionError(msg + err_msg)
|
AttributeError
|
dataset/ETHPy150Open trungdong/prov/prov/tests/test_xml.py/compare_xml
|
7,849
|
def test_fileplugin_icon_uppercase(self):
page = api.create_page('testpage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot="body")
plugin = File(
plugin_type='FilePlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
)
# This try/except block allows older and newer versions of the
# djangocms-file plugin to work here.
try:
plugin.file.save("UPPERCASE.JPG", SimpleUploadedFile(
"UPPERCASE.jpg", b"content"), False)
except __HOLE__: # catches 'RelatedObjectDoesNotExist'
plugin.source.save("UPPERCASE.JPG", SimpleUploadedFile(
"UPPERCASE.jpg", b"content"), False)
plugin.add_root(instance=plugin)
self.assertNotEquals(plugin.get_icon_url().find('jpg'), -1)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open divio/django-cms/cms/tests/test_plugins.py/FileSystemPluginTests.test_fileplugin_icon_uppercase
|
7,850
|
def __getattr__(self, attr, g=thread.get_ident):
try:
return self.__dict__['__objs'][g()][attr]
except __HOLE__:
raise AttributeError(
"No variable %s defined for the thread %s"
% (attr, g()))
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/util/threadinglocal.py/local.__getattr__
|
7,851
|
def __delattr__(self, attr, g=thread.get_ident):
try:
del self.__dict__['__objs'][g()][attr]
except __HOLE__:
raise AttributeError(
"No variable %s defined for thread %s"
% (attr, g()))
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/util/threadinglocal.py/local.__delattr__
|
7,852
|
def _create_function(self, request, full_url, headers):
lambda_backend = self.get_lambda_backend(full_url)
spec = json.loads(request.body.decode('utf-8'))
try:
fn = lambda_backend.create_function(spec)
except __HOLE__ as e:
return 400, headers, json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}})
else:
config = fn.get_configuration()
return 201, headers, json.dumps(config)
|
ValueError
|
dataset/ETHPy150Open spulec/moto/moto/awslambda/responses.py/LambdaResponse._create_function
|
7,853
|
def wealth_tester(expname):
'''
Checks the TotalWealth of each Behavior Type in the wealth file.
'''
try:
wealthfile = open(expname + 'wealth.csv', 'r')
except __HOLE__:
print "Check the wealth file."
try:
transactionfile = open(expname + '.csv')
except IOError:
print "Error in passing the csv file"
fout = open('ultimateoutput.txt', 'a')
#find the average stockprice
#indices for readability
#time = 0
#transactionnum = 1
#price = 2
#volume = 3
#totalprice = 0
#numtrans = 0
#skipfirstlines = 0
#for line in transactionfile:
# if skipfirstlines == 0 or skipfirstlines == 1:
# skipfirstlines += 1
# continue
# splitline = line.split(';')
# totalprice = totalprice + (int(float(splitline[price])) * int(splitline[volume]))
# numtrans = numtrans + int(splitline[volume])
# avgtrans = totalprice / numtrans
avgtrans = 500 #This replaces the actual average finder
totalwealth = dict()
#indices for readability
time = 0
id = 1
behavior = 2
wealth = 3
stock = 4
for line in wealthfile:
splitline = line.split(';')
agent = splitline[behavior]
#print agent
if agent in totalwealth.keys():
#add wealth
totalwealth[agent] = totalwealth[agent] + int(splitline[wealth])
#add stock x avg transaction price
totalwealth[agent] = totalwealth[agent] + (int(splitline[stock]) * avgtrans)
else:
totalwealth[agent] = 0
#add wealth
totalwealth[agent] = totalwealth[agent] + int(splitline[wealth])
#add stock x avg transaction price
totalwealth[agent] = totalwealth[agent] + (int(splitline[stock]) * avgtrans)
#Order them top to bottom
agents = [key for key, dummy in sorted(totalwealth.items(), key = lambda arg: arg[1], reverse = True)]
#print agents
#for agent in agents:
# print totalwealth[agent]
#print totalwealth
#print type(totalwealth.keys())
print totalwealth
fout.write(expname + '\n')
fout.write('Agent1: ' + agents[0] + " has a total wealth of " + str(totalwealth[agents[0]]) + '\n')
fout.write('Agent2: ' + agents[1] + " has a total wealth of " + str(totalwealth[agents[1]]) + '\n')
wealthfile.close()
fout.close()
return totalwealth
|
IOError
|
dataset/ETHPy150Open jcbagneris/fms/fms/contrib/coleman/checksuccess.py/wealth_tester
|
7,854
|
def import_user():
try:
from flask.ext.login import current_user
return current_user
except __HOLE__:
raise ImportError(
'User argument not passed and Flask-Login current_user could not be imported.')
|
ImportError
|
dataset/ETHPy150Open raddevon/flask-permissions/flask_permissions/decorators.py/import_user
|
7,855
|
def __enter__(self):
try:
self.exc = None
self.control = ControlHandler()
self.logfile = self.cache._upload_open(self.archive, mode='r+')
getLogger().debug("Found state file for %s", self.archive)
except __HOLE__ as e:
if e.errno == errno.ENOENT:
getLogger().debug("Creating state file for %s", self.archive)
self.logfile = self.cache._upload_open(self.archive, mode='w+')
else:
raise e
# update keys from file
upload = self.cache._validate_upload(self.logfile)
self.uri = upload.get('uri', self.uri)
self.keys = upload.get('keys', self.keys)
if self._paused is True:
self._paused = False
self.cache._write_upload(
self.uri, self.capsule, self.logfile,
self.exc, self._paused)
return self
|
IOError
|
dataset/ETHPy150Open longaccess/longaccess-client/lacli/upload.py/UploadState.__enter__
|
7,856
|
def _check_hex(dummy_option, opt, value):
"""
Checks if a value is given in a decimal integer of hexadecimal reppresentation.
Returns the converted value or rises an exception on error.
"""
try:
if value.lower().startswith("0x"):
return int(value, 16)
else:
return int(value)
except __HOLE__:
raise OptionValueError(
"option {0:s}: invalid integer or hexadecimal value: {1:s}.".format(opt, value))
# function _check_bool
###############################################################################
|
ValueError
|
dataset/ETHPy150Open tpircher/pycrc/crc_opt.py/_check_hex
|
7,857
|
def test06():
try:
_ = MyInfra.server.wibble
assert False, "failed to raise AttributeError when accessing a bad attr"
except __HOLE__, _:
pass
|
AttributeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test06
|
7,858
|
def test12():
try:
_ = MyInfra.server['wow']
assert False, "Should not have been allowed to perform keyed access on the server"
except __HOLE__, _:
pass
|
TypeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test12
|
7,859
|
def test118():
#this is just ensuring we throw if a component derived class fails to
#implement fix_arguments()
class MissedMethod1(AbstractModelingEntity):
def __init__(self, name, arg1, arg2):
super(MissedMethod1, self).__init__(name)
self.arg1 = arg1
self.arg2 = arg2
comp = MissedMethod1("oops!", 1, 2)
try:
comp.fix_arguments()
assert False, "fix_arguments should have thrown an exception"
except __HOLE__, _:
assert True
except Exception, e:
assert False, "got an unexpected exception: '%s'" % e.message
|
TypeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test118
|
7,860
|
def test130():
class BadRefClass(AbstractModelReference):
pass
class Test130(InfraModel):
ref_class = BadRefClass
grid = MultiResource(Server("grid", mem="8GB"))
inst = Test130("t130")
try:
_ = inst.grid[1]
assert False, "Should have raised a TypeError about _get_item_ref_obj()"
except __HOLE__, e:
assert "get_item_ref_obj" in e.message
|
TypeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test130
|
7,861
|
def test132():
class Test132(InfraModel):
server = Server("dummy", mem="8GB", no_key=5)
inst = Test132("t132")
try:
inst.server.no_key[2]
assert False, "We were allowed to use a key on a non-collection attribute"
except __HOLE__, e:
assert "keyed" in e.message
|
TypeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test132
|
7,862
|
def test150():
from actuator.infra import IPAddressable
class NoAdminIP(IPAddressable):
pass
s = NoAdminIP()
try:
_ = s.get_ip()
raise False, "Should not have been able to call get_ip()"
except __HOLE__, e:
assert "Not implemented" in e.message
|
TypeError
|
dataset/ETHPy150Open haxsaw/actuator/src/tests/infra_tests.py/test150
|
7,863
|
def getUserId(self, s):
"""Returns the user ID of a given name or hostmask."""
if ircutils.isUserHostmask(s):
try:
return self._hostmaskCache[s]
except KeyError:
ids = {}
for (id, user) in self.users.items():
x = user.checkHostmask(s)
if x:
ids[id] = x
if len(ids) == 1:
id = list(ids.keys())[0]
self._hostmaskCache[s] = id
try:
self._hostmaskCache[id].add(s)
except __HOLE__:
self._hostmaskCache[id] = set([s])
return id
elif len(ids) == 0:
raise KeyError(s)
else:
log.error('Multiple matches found in user database. '
'Removing the offending hostmasks.')
for (id, hostmask) in ids.items():
log.error('Removing %q from user %s.', hostmask, id)
self.users[id].removeHostmask(hostmask)
raise DuplicateHostmask('Ids %r matched.' % ids)
else: # Not a hostmask, must be a name.
s = s.lower()
try:
return self._nameCache[s]
except KeyError:
for (id, user) in self.users.items():
if s == user.name.lower():
self._nameCache[s] = id
self._nameCache[id] = s
return id
else:
raise KeyError(s)
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/UsersDictionary.getUserId
|
7,864
|
def getUserFromNick(self, network, nick):
"""Return a user given its nick."""
for user in self.users.values():
try:
if nick in user.nicks[network]:
return user
except __HOLE__:
pass
return None
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/UsersDictionary.getUserFromNick
|
7,865
|
def hasUser(self, id):
"""Returns the database has a user given its id, name, or hostmask."""
try:
self.getUser(id)
return True
except __HOLE__:
return False
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/UsersDictionary.hasUser
|
7,866
|
def setUser(self, user, flush=True):
"""Sets a user (given its id) to the IrcUser given it."""
self.nextId = max(self.nextId, user.id)
try:
if self.getUserId(user.name) != user.id:
raise DuplicateHostmask(user.name)
except __HOLE__:
pass
for hostmask in user.hostmasks:
for (i, u) in self.items():
if i == user.id:
continue
elif u.checkHostmask(hostmask):
# We used to remove the hostmask here, but it's not
# appropriate for us both to remove the hostmask and to
# raise an exception. So instead, we'll raise an
# exception, but be nice and give the offending hostmask
# back at the same time.
raise DuplicateHostmask(hostmask)
for otherHostmask in u.hostmasks:
if ircutils.hostmaskPatternEqual(hostmask, otherHostmask):
raise DuplicateHostmask(hostmask)
self.invalidateCache(user.id)
self.users[user.id] = user
if flush:
self.flush()
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/UsersDictionary.setUser
|
7,867
|
def checkIgnored(hostmask, recipient='', users=users, channels=channels):
"""checkIgnored(hostmask, recipient='') -> True/False
Checks if the user is ignored by the recipient of the message.
"""
try:
id = users.getUserId(hostmask)
user = users.getUser(id)
if user._checkCapability('owner'):
# Owners shouldn't ever be ignored.
return False
elif user.ignore:
log.debug('Ignoring %s due to their IrcUser ignore flag.', hostmask)
return True
except __HOLE__:
# If there's no user...
if conf.supybot.defaultIgnore():
log.debug('Ignoring %s due to conf.supybot.defaultIgnore',
hostmask)
return True
if ignores.checkIgnored(hostmask):
log.debug('Ignoring %s due to ignore database.', hostmask)
return True
if ircutils.isChannel(recipient):
channel = channels.getChannel(recipient)
if channel.checkIgnored(hostmask):
log.debug('Ignoring %s due to the channel ignores.', hostmask)
return True
return False
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/checkIgnored
|
7,868
|
def _checkCapabilityForUnknownUser(capability, users=users, channels=channels,
ignoreDefaultAllow=False):
if isChannelCapability(capability):
(channel, capability) = fromChannelCapability(capability)
try:
c = channels.getChannel(channel)
if capability in c.capabilities:
return c._checkCapability(capability)
else:
return _x(capability, (not ignoreDefaultAllow) and c.defaultAllow)
except __HOLE__:
pass
defaultCapabilities = conf.supybot.capabilities()
if capability in defaultCapabilities:
return defaultCapabilities.check(capability)
elif ignoreDefaultAllow:
return _x(capability, False)
else:
return _x(capability, conf.supybot.capabilities.default())
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/_checkCapabilityForUnknownUser
|
7,869
|
def checkCapability(hostmask, capability, users=users, channels=channels,
ignoreOwner=False, ignoreChannelOp=False,
ignoreDefaultAllow=False):
"""Checks that the user specified by name/hostmask has the capability given.
``users`` and ``channels`` default to ``ircdb.users`` and
``ircdb.channels``.
``ignoreOwner``, ``ignoreChannelOp``, and ``ignoreDefaultAllow`` are
used to override default behavior of the capability system in special
cases (actually, in the AutoMode plugin):
* ``ignoreOwner`` disables the behavior "owners have all capabilites"
* ``ignoreChannelOp`` disables the behavior "channel ops have all
channel capabilities"
* ``ignoreDefaultAllow`` disables the behavior "if a user does not have
a capability or the associated anticapability, then they have the
capability"
"""
if world.testing and (not isinstance(hostmask, str) or
'@' not in hostmask or
'__no_testcap__' not in hostmask.split('@')[1]):
return _x(capability, True)
try:
u = users.getUser(hostmask)
if u.secure and not u.checkHostmask(hostmask, useAuth=False):
raise KeyError
except KeyError:
# Raised when no hostmasks match.
return _checkCapabilityForUnknownUser(capability, users=users,
channels=channels, ignoreDefaultAllow=ignoreDefaultAllow)
except ValueError as e:
# Raised when multiple hostmasks match.
log.warning('%s: %s', hostmask, e)
return _checkCapabilityForUnknownUser(capability, users=users,
channels=channels, ignoreDefaultAllow=ignoreDefaultAllow)
if capability in u.capabilities:
try:
return u._checkCapability(capability, ignoreOwner)
except __HOLE__:
pass
if isChannelCapability(capability):
(channel, capability) = fromChannelCapability(capability)
if not ignoreChannelOp:
try:
chanop = makeChannelCapability(channel, 'op')
if u._checkCapability(chanop):
return _x(capability, True)
except KeyError:
pass
c = channels.getChannel(channel)
if capability in c.capabilities:
return c._checkCapability(capability)
elif not ignoreDefaultAllow:
return _x(capability, c.defaultAllow)
else:
return False
defaultCapabilities = conf.supybot.capabilities()
if capability in defaultCapabilities:
return defaultCapabilities.check(capability)
elif ignoreDefaultAllow:
return _x(capability, False)
else:
return _x(capability, conf.supybot.capabilities.default())
|
KeyError
|
dataset/ETHPy150Open ProgVal/Limnoria/src/ircdb.py/checkCapability
|
7,870
|
def on_message(self, raw_message):
message = json.loads(raw_message)
event_type = message.get("type")
try:
event_class = self.event_callbacks[event_type]
except __HOLE__:
print("I don't know how to process '%s' events!" % (
event_type))
return
event = event_class(**message)
self.app.widget_event(event)
|
KeyError
|
dataset/ETHPy150Open ejeschke/ginga/ginga/web/pgw/PgHelp.py/ApplicationHandler.on_message
|
7,871
|
def mkdirs(folder_path):
try:
makedirs(folder_path)
except __HOLE__ as exc:
if exc.errno == errno.EEXIST and path.isdir(folder_path):
pass
else:
raise
|
OSError
|
dataset/ETHPy150Open cloudify-cosmo/cloudify-manager/rest-service/manager_rest/utils.py/mkdirs
|
7,872
|
def save_request_content_to_file(request, archive_target_path, url_key,
data_type='unknown'):
"""
Retrieves the file specified by the request to the local machine.
:param request: the request received by the rest client
:param archive_target_path: the target of the archive
:param data_type: the kind of the data (e.g. 'blueprint')
:param url_key: if the data is passed as a url to an online resource, the
url_key specifies what header points to the requested url.
:return: None
"""
if url_key in request.args:
if request.data or 'Transfer-Encoding' in request.headers:
raise manager_exceptions.BadParametersError(
"Can't pass both a {0} URL via query parameters "
"and {0} data via the request body at the same time"
.format(data_type))
data_url = request.args[url_key]
try:
with contextlib.closing(urlopen(data_url)) as urlf:
with open(archive_target_path, 'w') as f:
f.write(urlf.read())
except URLError:
raise manager_exceptions.ParamUrlNotFoundError(
"URL {0} not found - can't download {1} archive"
.format(data_url, data_type))
except __HOLE__:
raise manager_exceptions.BadParametersError(
"URL {0} is malformed - can't download {1} archive"
.format(data_url, data_type))
elif 'Transfer-Encoding' in request.headers:
with open(archive_target_path, 'w') as f:
for buffered_chunked in chunked.decode(request.input_stream):
f.write(buffered_chunked)
else:
if not request.data:
raise manager_exceptions.BadParametersError(
'Missing {0} archive in request body or '
'"{1}" in query parameters'.format(data_type,
url_key))
uploaded_file_data = request.data
with open(archive_target_path, 'w') as f:
f.write(uploaded_file_data)
|
ValueError
|
dataset/ETHPy150Open cloudify-cosmo/cloudify-manager/rest-service/manager_rest/utils.py/save_request_content_to_file
|
7,873
|
def confirm_changes(changes):
changes = list(changes)
if changes:
for i, (description, func) in enumerate(changes):
print(u'{}. {}'.format(i, description))
print('List the changes you don\'t want to happen.')
print('Just hit enter if changes are okay.')
reverted = None
while reverted is None:
try:
reverted = [int(x.strip()) for x in click.prompt('> ').split()]
except __HOLE__:
print('Invalid input.')
print('If you want to undo number 1 and 2, enter: 1 2')
for i in reverted:
del changes[i]
return changes
|
ValueError
|
dataset/ETHPy150Open untitaker/watdo/watdo/cli.py/confirm_changes
|
7,874
|
def launch_editor(cfg, all_tasks=False, calendar=None):
tmpfile = tempfile.NamedTemporaryFile(dir=cfg['tmppath'], delete=False)
try:
with tmpfile as f:
tasks = model.walk_calendars(cfg['path'])
def task_filter():
for task in tasks:
if calendar is not None and task.calendar != calendar:
continue
if not all_tasks and task.done:
continue
yield task
header = u'// Showing {status} tasks from {calendar}'.format(
status=(u'all' if all_tasks else u'pending'),
calendar=(u'all calendars' if calendar is None else u'@{}'
.format(calendar))
)
old_ids = editor.generate_tmpfile(f, task_filter(), header)
new_ids = None
while new_ids is None:
cmd = cfg['editor'] + ' ' + tmpfile.name
print('>>> {}'.format(cmd))
subprocess.call(cmd, shell=True)
with open(tmpfile.name, 'rb') as f:
try:
new_ids = editor.parse_tmpfile(f)
changes = editor.get_changes(old_ids, new_ids)
if cfg['confirmation']:
changes = confirm_changes(changes)
make_changes(changes, cfg)
except (__HOLE__, CliError) as e:
print(e)
click.confirm('Do you want to edit again? '
'Otherwise changes will be discarded.',
default=True, abort=True)
else:
break
finally:
os.remove(tmpfile.name)
|
ValueError
|
dataset/ETHPy150Open untitaker/watdo/watdo/cli.py/launch_editor
|
7,875
|
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x='+value)
assert not ep.extras
except (TypeError,ValueError,AttributeError,__HOLE__):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr,value)
)
|
AssertionError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/check_importable
|
7,876
|
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value)!=value
except (TypeError,ValueError,AttributeError,__HOLE__):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr,value)
)
|
AssertionError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/assert_string_list
|
7,877
|
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k,v in value.items():
list(pkg_resources.parse_requirements(v))
except (TypeError,__HOLE__,AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/check_extras
|
7,878
|
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError,__HOLE__):
raise DistutilsSetupError(
"%r must be a string or list of strings "
"containing valid project/version requirement specifiers" % (attr,)
)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/check_requirements
|
7,879
|
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except __HOLE__, e:
raise DistutilsSetupError(e)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/check_entry_points
|
7,880
|
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value,dict):
for k,v in value.items():
if not isinstance(k,str): break
try: iter(v)
except __HOLE__:
break
else:
return
raise DistutilsSetupError(
attr+" must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
|
TypeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/check_package_data
|
7,881
|
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
except __HOLE__:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args':['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in opts.keys():
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report = True
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/Distribution.fetch_build_egg
|
7,882
|
def _exclude_misc(self,name,value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self,name)
except __HOLE__:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self,name,[item for item in old if item not in value])
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/Distribution._exclude_misc
|
7,883
|
def _include_misc(self,name,value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self,name)
except __HOLE__:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self,name,value)
elif not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
else:
setattr(self,name,old+[item for item in value if item not in old])
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/dist.py/Distribution._include_misc
|
7,884
|
def _find_method(obj, func):
if obj:
try:
func_self = six.get_method_self(func)
except __HOLE__: # func has no __self__
pass
else:
if func_self is obj:
return six.get_method_function(func).__name__
raise ValueError("Function %s is not a method of: %s" % (func, obj))
|
AttributeError
|
dataset/ETHPy150Open scrapy/scrapy/scrapy/utils/reqser.py/_find_method
|
7,885
|
def _get_method(obj, name):
name = str(name)
try:
return getattr(obj, name)
except __HOLE__:
raise ValueError("Method %r not found in: %s" % (name, obj))
|
AttributeError
|
dataset/ETHPy150Open scrapy/scrapy/scrapy/utils/reqser.py/_get_method
|
7,886
|
def _setup_server(self, config, core):
try:
network.Server(
self.hostname, self.port,
protocol=session.MpdSession,
protocol_kwargs={
'config': config,
'core': core,
'uri_map': self.uri_map,
},
max_connections=config['mpd']['max_connections'],
timeout=config['mpd']['connection_timeout'])
except __HOLE__ as error:
raise exceptions.FrontendError(
'MPD server startup failed: %s' %
encoding.locale_decode(error))
logger.info('MPD server running at [%s]:%s', self.hostname, self.port)
|
IOError
|
dataset/ETHPy150Open mopidy/mopidy/mopidy/mpd/actor.py/MpdFrontend._setup_server
|
7,887
|
def login(self, request, user_name, password):
try:
auth_model_class = import_string(settings.AUTH_USER_MODEL)
except (__HOLE__, AttributeError), e:
raise ImproperlyConfigured, \
'Failed to import %s: "%s".' % (settings.AUTH_USER_MODEL, e)
user = auth_model_class.get_by_user_name(user_name)
if user is None:
return False
if user.check_password(password):
return self.store_user(user)
return False
|
ImportError
|
dataset/ETHPy150Open IanLewis/kay/kay/auth/backends/datastore.py/DatastoreBackend.login
|
7,888
|
def test_login_or_logout(self, client, username=''):
from cookielib import Cookie
args = [None, None, '', None, None, '/', None, None, 86400, None, None,
None, None]
try:
auth_model_class = import_string(settings.AUTH_USER_MODEL)
except (ImportError, __HOLE__), e:
raise ImproperlyConfigured, \
'Failed to import %s: "%s".' % (settings.AUTH_USER_MODEL, e)
user = auth_model_class.get_by_user_name(username)
session_store = import_string(settings.SESSION_STORE)()
data = None
for cookie in client.cookie_jar:
if cookie.name == settings.COOKIE_NAME:
data = cookie.value
if data is None:
session = session_store.new()
else:
session = session_store.get(data)
if user:
session['_user'] = user.key()
elif session.has_key('_user'):
del session['_user']
session_store.save(session)
data = "\"%s\"" % session_store.get_data(session)
client.cookie_jar.set_cookie(Cookie(1, settings.COOKIE_NAME,
data,
*args))
|
AttributeError
|
dataset/ETHPy150Open IanLewis/kay/kay/auth/backends/datastore.py/DatastoreBackend.test_login_or_logout
|
7,889
|
def validate_environment(self):
try:
import tornado.web # noqa
except __HOLE__ as e:
raise exceptions.ExtensionError('tornado library not found', e)
|
ImportError
|
dataset/ETHPy150Open mopidy/mopidy/mopidy/http/__init__.py/Extension.validate_environment
|
7,890
|
def save(self,*args,**kwargs):
try:
self.full_clean()
except __HOLE__ as e:
raise e
super(Alias, self).save(*args, **kwargs)
|
ValidationError
|
dataset/ETHPy150Open kapsiry/sikteeri/services/models.py/Alias.save
|
7,891
|
def _format_external_gateway_info(router):
try:
return jsonutils.dumps(router['external_gateway_info'])
except (__HOLE__, KeyError):
return ''
|
TypeError
|
dataset/ETHPy150Open openstack/python-neutronclient/neutronclient/neutron/v2_0/router.py/_format_external_gateway_info
|
7,892
|
@query
def __contains__(self, key):
"""Tests whether the given ``key`` exists.
:param key: the key
:returns: ``True`` if the ``key`` exists or ``False``
:rtype: :class:`bool`
.. note::
It is directly mapped to Redis :redis:`HEXISTS` command.
"""
try:
encoded_key = self.key_type.encode(key)
except __HOLE__:
return False
exists = self.session.client.hexists(self.key, encoded_key)
return bool(exists)
|
TypeError
|
dataset/ETHPy150Open dahlia/sider/sider/hash.py/Hash.__contains__
|
7,893
|
def setdefault(self, key, default=None):
"""Sets the given ``default`` value to the ``key``
if it doesn't exist and then returns the current value
of the ``key``.
For example, the following code is::
val = hash.setdefault('key', 'set this if not exist')
equivalent to::
try:
val = hash['key']
except KeyError:
val = hash['key'] = 'set this if not exist'
except :meth:`setdefault()` method is an atomic operation.
:param key: the key to get or set
:param default: the value to be set if the ``key``
doesn't exist
:raises exceptions.TypeError:
when the given ``key`` is not acceptable by its
:attr:`key_type` or the given ``default`` value
is not acceptable by its :attr:`value_type`
.. note::
This method internally uses Redis :redis:`HSETNX`
command which is atomic.
"""
if self.session.transaction is not None:
self.session.mark_query()
try:
val = self[key]
except __HOLE__:
self.session.mark_manipulative([self.key])
self[key] = val = default
return val
encoded_key = self.key_type.encode(key)
encoded_val = self.value_type.encode(default)
result = [None]
def block(pipe):
ok = pipe.hsetnx(self.key, encoded_key, encoded_val)
if ok:
result[0] = default
else:
value = pipe.hget(self.key, encoded_key)
result[0] = self.value_type.decode(value)
pipe.multi()
self.session.client.transaction(block, self.key)
return result[0]
|
KeyError
|
dataset/ETHPy150Open dahlia/sider/sider/hash.py/Hash.setdefault
|
7,894
|
def main(argv=sys.argv):
if len(argv) != 2:
usage(argv)
config_uri = argv[1]
settings = get_appsettings(config_uri)
engine = create_engine('pyshop', settings, scoped=False)
config = Configurator(settings=settings)
config.end()
from pyshop.models import (User, Group,
Classifier, Package, Release, ReleaseFile)
session = DBSession()
try:
from IPython import embed
from IPython.config.loader import Config
cfg = Config()
cfg.InteractiveShellEmbed.confirm_exit = False
embed(config=cfg, banner1="Welcome to PyShop shell.")
except __HOLE__:
import code
code.interact("pyshop shell", local=locals())
|
ImportError
|
dataset/ETHPy150Open mardiros/pyshop/pyshop/bin/shell.py/main
|
7,895
|
def state(self, id=None):
if not id:
id = '.{0}'.format(_uuid())
# adds a leading dot to make use of stateconf's namespace feature.
try:
return self.get_all_decls()[id]
except __HOLE__:
self.get_all_decls()[id] = s = StateDeclaration(id)
self.decls.append(s)
return s
|
KeyError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/pydsl.py/Sls.state
|
7,896
|
def __call__(self, check=True):
sls = Sls.get_render_stack()[-1]
if self._id in sls.get_all_decls():
last_func = sls.last_func()
if last_func and self._mods[-1]._func is not last_func:
raise PyDslError(
'Cannot run state({0}: {1}) that is required by a runtime '
'state({2}: {3}), at compile time.'.format(
self._mods[-1]._name, self._id,
last_func.mod, last_func.mod._state_id
)
)
sls.get_all_decls().pop(self._id)
sls.decls.remove(self)
self._mods[0]._func._remove_auto_require()
for m in self._mods:
try:
sls.funcs.remove(m._func)
except __HOLE__:
pass
result = HighState.get_active().state.functions['state.high'](
{self._id: self._repr()}
)
if not isinstance(result, dict):
# A list is an error
raise PyDslError(
'An error occurred while running highstate: {0}'.format(
'; '.join(result)
)
)
result = sorted(six.iteritems(result), key=lambda t: t[1]['__run_num__'])
if check:
for k, v in result:
if not v['result']:
import pprint
raise PyDslError(
'Failed executing low state at compile time:\n{0}'
.format(pprint.pformat({k: v}))
)
return result
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/pydsl.py/StateDeclaration.__call__
|
7,897
|
def set_object_attribute(obj, key, value):
field_type = None
try:
field_type, _, _, _ = obj._meta.get_field_by_name(key)
except FieldDoesNotExist:
raise FieldDoesNotExist("Error in fixed field name in "
"get_fixed_fields. Field %s "
"doesn't exist." % key)
if isinstance(field_type, ForeignKey):
if not value:
# explicit None for empty strings
value = None
else:
model = field_type.related_field.model
try:
value = model.objects.get(name=value)
except model.DoesNotExist:
raise ValidationError({key: wrap_error(value)})
elif field_type.choices:
try:
value = get_value_from_choices(value, field_type.choices)
except __HOLE__:
pass # Delegate validation to the model
setattr(obj, key, value)
|
StopIteration
|
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/image_metadata.py/set_object_attribute
|
7,898
|
def save_metadata(collection, metadata):
image_obj_list = []
image_obj_errors = defaultdict(list)
metadata_list = convert_to_list(metadata)
metadata_dict = list_to_dict(metadata_list,
key=lambda x: x['Filename'])
image_obj_list = collection.basecollectionitem_set.instance_of(Image).all()
image_obj_dict = list_to_dict(image_obj_list,
key=file_basename)
pairs = pair_data_and_objects(metadata_dict,
image_obj_dict)
for data, image_obj in pairs:
try:
set_object_data(image_obj, data)
image_obj.full_clean()
except __HOLE__ as e:
image_obj_errors[file_basename(image_obj)].append(
prepare_messages(image_obj, e.message_dict))
if image_obj_errors:
raise MetadataGridValidationError(image_obj_errors)
for image_obj in image_obj_list:
image_obj.is_valid = True
image_obj.save()
return metadata_list
|
ValidationError
|
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/image_metadata.py/save_metadata
|
7,899
|
def handle_post_metadata(request, collection, success_message):
metadata = json.loads(request.body)
try:
metadata_list = save_metadata(collection, metadata)
except __HOLE__ as e:
return error_response(e)
messages.success(request,
success_message,
extra_tags='alert-success')
return {'data': metadata_list, 'status': 200}
|
ValidationError
|
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/image_metadata.py/handle_post_metadata
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.