Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
800
|
def __call__(self, query, view):
for arg_name, filter_field in self._filter_fields.items():
try:
arg_value = flask.request.args[arg_name]
except __HOLE__:
continue
try:
query = query.filter(filter_field(view, arg_value))
except ApiError as e:
raise e.update({'source': {'parameter': arg_name}})
return query
|
KeyError
|
dataset/ETHPy150Open 4Catalyzer/flask-resty/flask_resty/filtering.py/Filtering.__call__
|
801
|
def itercomplement(ta, tb, strict):
# coerce rows to tuples to ensure hashable and comparable
ita = (tuple(row) for row in iter(ta))
itb = (tuple(row) for row in iter(tb))
ahdr = tuple(next(ita))
next(itb) # ignore b fields
yield ahdr
try:
a = next(ita)
except StopIteration:
pass
else:
try:
b = next(itb)
except StopIteration:
yield a
for row in ita:
yield row
else:
# we want the elements in a that are not in b
while True:
if b is None or Comparable(a) < Comparable(b):
yield a
try:
a = next(ita)
except __HOLE__:
break
elif a == b:
try:
a = next(ita)
except StopIteration:
break
if not strict:
try:
b = next(itb)
except StopIteration:
b = None
else:
try:
b = next(itb)
except StopIteration:
b = None
|
StopIteration
|
dataset/ETHPy150Open alimanfoo/petl/petl/transform/setops.py/itercomplement
|
802
|
def iterintersection(a, b):
ita = iter(a)
itb = iter(b)
ahdr = next(ita)
next(itb) # ignore b header
yield tuple(ahdr)
try:
a = tuple(next(ita))
b = tuple(next(itb))
while True:
if Comparable(a) < Comparable(b):
a = tuple(next(ita))
elif a == b:
yield a
a = tuple(next(ita))
b = tuple(next(itb))
else:
b = tuple(next(itb))
except __HOLE__:
pass
|
StopIteration
|
dataset/ETHPy150Open alimanfoo/petl/petl/transform/setops.py/iterintersection
|
803
|
def main(options, args):
logger = log.get_logger("example2", options=options)
if options.toolkit is None:
logger.error("Please choose a GUI toolkit with -t option")
# decide our toolkit, then import
ginga_toolkit.use(options.toolkit)
viewer = FitsViewer(logger)
viewer.top.resize(700, 540)
if len(args) > 0:
viewer.load_file(viewer.viewer1, args[0])
viewer.top.show()
viewer.top.raise_()
try:
viewer.mainloop()
except __HOLE__:
print("Terminating viewer...")
if viewer.top is not None:
viewer.top.close()
|
KeyboardInterrupt
|
dataset/ETHPy150Open ejeschke/ginga/ginga/examples/gw/shared_canvas.py/main
|
804
|
def _to_node(self, host):
try:
password = \
host['operatingSystem']['passwords'][0]['password']
except (__HOLE__, KeyError):
password = None
hourlyRecurringFee = host.get('billingItem', {}).get(
'hourlyRecurringFee', 0)
recurringFee = host.get('billingItem', {}).get('recurringFee', 0)
recurringMonths = host.get('billingItem', {}).get('recurringMonths', 0)
createDate = host.get('createDate', None)
# When machine is launching it gets state halted
# we change this to pending
state = NODE_STATE_MAP.get(host['powerState']['keyName'],
NodeState.UNKNOWN)
if not password and state == NodeState.UNKNOWN:
state = NODE_STATE_MAP['INITIATING']
public_ips = []
private_ips = []
if 'primaryIpAddress' in host:
public_ips.append(host['primaryIpAddress'])
if 'primaryBackendIpAddress' in host:
private_ips.append(host['primaryBackendIpAddress'])
image = host.get('operatingSystem', {}).get('softwareLicense', {}) \
.get('softwareDescription', {}) \
.get('longDescription', None)
return Node(
id=host['id'],
name=host['fullyQualifiedDomainName'],
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra={
'hostname': host['hostname'],
'fullyQualifiedDomainName': host['fullyQualifiedDomainName'],
'password': password,
'maxCpu': host.get('maxCpu', None),
'datacenter': host.get('datacenter', {}).get('longName', None),
'maxMemory': host.get('maxMemory', None),
'image': image,
'hourlyRecurringFee': hourlyRecurringFee,
'recurringFee': recurringFee,
'recurringMonths': recurringMonths,
'created': createDate,
}
)
|
IndexError
|
dataset/ETHPy150Open apache/libcloud/libcloud/compute/drivers/softlayer.py/SoftLayerNodeDriver._to_node
|
805
|
def _matches(self, item):
try:
iter(item)
return True
except __HOLE__:
return False
|
TypeError
|
dataset/ETHPy150Open drslump/pyshould/pyshould/matchers.py/IsIterable._matches
|
806
|
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except __HOLE__:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
|
TypeError
|
dataset/ETHPy150Open drslump/pyshould/pyshould/matchers.py/RaisesError._matches
|
807
|
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except __HOLE__:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
|
TypeError
|
dataset/ETHPy150Open drslump/pyshould/pyshould/matchers.py/Changes._matches
|
808
|
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except __HOLE__:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
|
AssertionError
|
dataset/ETHPy150Open drslump/pyshould/pyshould/matchers.py/Callback._matches
|
809
|
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except __HOLE__:
return False
|
TypeError
|
dataset/ETHPy150Open drslump/pyshould/pyshould/matchers.py/IsSequenceContainingEveryInOrderSparse._matches
|
810
|
def module_exists(module_name):
try:
__import__(module_name)
except __HOLE__:
return False
else:
return True
|
ImportError
|
dataset/ETHPy150Open rzeka/QLDS-Manager/qldsmanager/manager.py/module_exists
|
811
|
def is_checked_out(context):
try:
return context['object'].is_checked_out()
except __HOLE__:
# Might not have permissions
return False
|
KeyError
|
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/checkouts/links.py/is_checked_out
|
812
|
def is_not_checked_out(context):
try:
return not context['object'].is_checked_out()
except __HOLE__:
# Might not have permissions
return True
|
KeyError
|
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/checkouts/links.py/is_not_checked_out
|
813
|
@functions.CallOnce
def SetupLogger():
"""Configure logging for OpenHTF."""
record_logger = logging.getLogger(RECORD_LOGGER)
record_logger.propagate = False
record_logger.setLevel(logging.DEBUG)
record_logger.addHandler(logging.StreamHandler(stream=sys.stdout))
logger = logging.getLogger(LOGGER_PREFIX)
logger.propagate = False
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
if LOGFILE:
try:
cur_time = str(util.TimeMillis())
file_handler = logging.FileHandler('%s.%s' % (LOGFILE, cur_time))
file_handler.setFormatter(formatter)
file_handler.setLevel(DEFAULT_LOGFILE_LEVEL.upper())
file_handler.addFilter(MAC_FILTER)
logger.addHandler(file_handler)
except __HOLE__ as exception:
print ('Failed to set up log file due to error: %s. '
'Continuing anyway.' % exception)
if not QUIET:
console_handler = logging.StreamHandler(stream=sys.stderr)
console_handler.setFormatter(formatter)
console_handler.setLevel(DEFAULT_LEVEL.upper())
console_handler.addFilter(MAC_FILTER)
logger.addHandler(console_handler)
|
IOError
|
dataset/ETHPy150Open google/openhtf/openhtf/util/logs.py/SetupLogger
|
814
|
def pick(self): # <3>
try:
return self._items.pop()
except __HOLE__:
raise LookupError('pick from empty BingoCage') # <4>
|
IndexError
|
dataset/ETHPy150Open fluentpython/example-code/05-1class-func/bingocall.py/BingoCage.pick
|
815
|
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash or zsh).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except __HOLE__:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for uninstall command
if subcommand_name == 'uninstall' and not current.startswith('-'):
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands_dict[subcommand_name]()
options += [(opt.get_opt_string(), opt.nargs)
for opt in subcommand.parser.option_list_all
if opt.help != optparse.SUPPRESS_HELP]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
if current.startswith('-') or current.startswith('--'):
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
subcommands += [i.get_opt_string() for i in opts
if i.help != optparse.SUPPRESS_HELP]
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
|
IndexError
|
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/__init__.py/autocomplete
|
816
|
def _open_usb_handle(**kwargs):
"""Open a UsbHandle subclass, based on configuration.
If configuration 'remote_usb' is set, use it to connect to remote usb,
otherwise attempt to connect locally.'remote_usb' is set to usb type,
EtherSync or other.
Example of Cambrionix unit in config:
remote_usb: ethersync
ethersync:
mac_addr: 78:a5:04:ca:91:66
plug_port: 5
Args:
**kwargs: Arguments to pass to respective handle's Open() method.
Returns:
Instance of UsbHandle.
"""
serial = None
remote_usb = conf.remote_usb
if remote_usb:
if remote_usb.strip() == 'ethersync':
device = conf.ethersync
try:
mac_addr = device['mac_addr']
port = device['plug_port']
except (__HOLE__,TypeError):
raise ValueError('Ethersync needs mac_addr and plug_port to be set')
else:
ethersync = cambrionix.EtherSync(mac_addr)
serial = ethersync.GetUSBSerial(port)
return local_usb.LibUsbHandle.Open(serial_number=serial, **kwargs)
# pylint: disable=too-few-public-methods
|
KeyError
|
dataset/ETHPy150Open google/openhtf/openhtf/plugs/usb/__init__.py/_open_usb_handle
|
817
|
def __getitem__(self, index):
"""Support slices."""
try:
return deque.__getitem__(self, index)
except __HOLE__:
return type(self)(islice(self, index.start, index.stop, index.step))
|
TypeError
|
dataset/ETHPy150Open klen/graphite-beacon/graphite_beacon/alerts.py/sliceable_deque.__getitem__
|
818
|
def __call__(self, value):
try:
super(URLValidator, self).__call__(value)
except __HOLE__ as e:
# Trivial case failed. Try for possible IDN domain
if value:
value = force_text(value)
scheme, netloc, path, query, fragment = urlsplit(value)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super(URLValidator, self).__call__(url)
else:
raise
else:
url = value
|
ValidationError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/validators.py/URLValidator.__call__
|
819
|
def validate_integer(value):
try:
int(value)
except (__HOLE__, TypeError):
raise ValidationError('')
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/validators.py/validate_integer
|
820
|
def __call__(self, value):
try:
super(EmailValidator, self).__call__(value)
except __HOLE__ as e:
# Trivial case failed. Try for possible IDN domain-part
if value and '@' in value:
parts = value.split('@')
try:
parts[-1] = parts[-1].encode('idna').decode('ascii')
except UnicodeError:
raise e
super(EmailValidator, self).__call__('@'.join(parts))
else:
raise
|
ValidationError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/validators.py/EmailValidator.__call__
|
821
|
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except __HOLE__:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
|
ValidationError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/validators.py/validate_ipv46_address
|
822
|
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters returns the appropriate validators for
the GenericIPAddressField.
This code is here, because it is exactly the same for the model and the form field.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except __HOLE__:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map)))
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/validators.py/ip_address_validators
|
823
|
def _normalize_unicode(self, s):
if not isinstance(s, unicode):
temp = s # this is to help with debugging
try:
s = unicode(temp, self.fs_encoding)
except __HOLE__:
# Not all filesystems support encoding input that
# we throw. We assume UTF-8 is a sufficiently powerful
# "catch-all" to decode things that don't match.
s = unicode(temp, 'utf-8')
return s
|
UnicodeDecodeError
|
dataset/ETHPy150Open memsql/memsql-loader/memsql_loader/vendor/glob2/impl.py/Globber._normalize_unicode
|
824
|
def _load_module(path):
"""Code to load create user module. Copied off django-browserid."""
i = path.rfind('.')
module, attr = path[:i], path[i + 1:]
try:
mod = import_module(module)
except ImportError:
raise ImproperlyConfigured('Error importing CAN_LOGIN_AS'
' function.')
except __HOLE__:
raise ImproperlyConfigured('Error importing CAN_LOGIN_AS'
' function. Is CAN_LOGIN_AS a'
' string?')
try:
can_login_as = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module {0} does not define a {1} '
'function.'.format(module, attr))
return can_login_as
|
ValueError
|
dataset/ETHPy150Open stochastic-technologies/django-loginas/loginas/views.py/_load_module
|
825
|
def SetRenderWindow(self,w):
""" SetRenderWindow(w: vtkRenderWindow)
Set a new render window to QVTKViewWidget and initialize the
interactor as well
"""
if w == self.mRenWin:
return
if self.mRenWin:
if self.mRenWin.GetMapped():
self.mRenWin.Finalize()
self.mRenWin = w
if self.mRenWin:
self.mRenWin.Register(None)
if system.systemType=='Linux':
try:
vp = '_%s_void_p' % (hex(int(QtGui.QX11Info.display()))[2:])
except __HOLE__:
#This was change for PyQt4.2
if isinstance(QtGui.QX11Info.display(),QtGui.Display):
display = sip.unwrapinstance(QtGui.QX11Info.display())
vp = '_%s_void_p' % (hex(display)[2:])
self.mRenWin.SetDisplayId(vp)
if not self.mRenWin.GetMapped():
self.mRenWin.GetInteractor().Initialize()
system.XDestroyWindow(self.mRenWin.GetGenericDisplayId(),
self.mRenWin.GetGenericWindowId())
self.mRenWin.Finalize()
self.mRenWin.SetWindowInfo(str(int(self.winId())))
else:
self.mRenWin.SetWindowInfo(str(int(self.winId())))
if self.isVisible():
self.mRenWin.Start()
|
TypeError
|
dataset/ETHPy150Open VisTrails/VisTrails/contrib/titan/vtkviewcell.py/QVTKViewWidget.SetRenderWindow
|
826
|
def import_module(self, name):
try:
modname = 'kivy.modules.{0}'.format(name)
module = __import__(name=modname)
module = sys.modules[modname]
except __HOLE__:
try:
module = __import__(name=name)
module = sys.modules[name]
except ImportError:
Logger.exception('Modules: unable to import <%s>' % name)
raise
# basic check on module
if not hasattr(module, 'start'):
Logger.warning('Modules: Module <%s> missing start() function' %
name)
return
if not hasattr(module, 'stop'):
err = 'Modules: Module <%s> missing stop() function' % name
Logger.warning(err)
return
self.mods[name]['module'] = module
|
ImportError
|
dataset/ETHPy150Open kivy/kivy/kivy/modules/__init__.py/ModuleBase.import_module
|
827
|
def _configure_module(self, name):
if 'module' not in self.mods[name]:
try:
self.import_module(name)
except __HOLE__:
return
# convert configuration like:
# -m mjpegserver:port=8080,fps=8
# and pass it in context.config token
config = dict()
args = Config.get('modules', name)
if args != '':
values = Config.get('modules', name).split(',')
for value in values:
x = value.split('=', 1)
if len(x) == 1:
config[x[0]] = True
else:
config[x[0]] = x[1]
self.mods[name]['context'].config = config
# call configure if module have one
if hasattr(self.mods[name]['module'], 'configure'):
self.mods[name]['module'].configure(config)
|
ImportError
|
dataset/ETHPy150Open kivy/kivy/kivy/modules/__init__.py/ModuleBase._configure_module
|
828
|
def hashret(self):
if self.nh == 58 and isinstance(self.payload, _ICMPv6):
if self.payload.type < 128:
return self.payload.payload.hashret()
elif (self.payload.type in [133,134,135,136,144,145]):
return struct.pack("B", self.nh)+self.payload.hashret()
nh = self.nh
sd = self.dst
ss = self.src
if self.nh == 43 and isinstance(self.payload, IPv6ExtHdrRouting):
# With routing header, the destination is the last
# address of the IPv6 list if segleft > 0
nh = self.payload.nh
try:
sd = self.addresses[-1]
except __HOLE__:
sd = '::1'
# TODO: big bug with ICMPv6 error messages as the destination of IPerror6
# could be anything from the original list ...
if 1:
sd = inet_pton(socket.AF_INET6, sd)
for a in self.addresses:
a = inet_pton(socket.AF_INET6, a)
sd = strxor(sd, a)
sd = inet_ntop(socket.AF_INET6, sd)
if self.nh == 44 and isinstance(self.payload, IPv6ExtHdrFragment):
nh = self.payload.nh
if self.nh == 0 and isinstance(self.payload, IPv6ExtHdrHopByHop):
nh = self.payload.nh
if self.nh == 60 and isinstance(self.payload, IPv6ExtHdrDestOpt):
foundhao = None
for o in self.payload.options:
if isinstance(o, HAO):
foundhao = o
if foundhao:
nh = self.payload.nh # XXX what if another extension follows ?
ss = foundhao.hoa
if conf.checkIPsrc and conf.checkIPaddr:
sd = inet_pton(socket.AF_INET6, sd)
ss = inet_pton(socket.AF_INET6, self.src)
return struct.pack("B",nh)+self.payload.hashret()
else:
return struct.pack("B", nh)+self.payload.hashret()
|
IndexError
|
dataset/ETHPy150Open phaethon/scapy/scapy/layers/inet6.py/IPv6.hashret
|
829
|
def handle(self, request, data):
try:
rules = json.loads(data["rules"])
new_mapping = api.keystone.mapping_create(
request,
data["id"],
rules=rules)
messages.success(request,
_("Mapping created successfully."))
return new_mapping
except exceptions.Conflict:
msg = _('Mapping ID "%s" is already used.') % data["id"]
messages.error(request, msg)
except (__HOLE__, ValueError):
msg = _("Unable to create mapping. Rules has malformed JSON data.")
messages.error(request, msg)
except Exception:
exceptions.handle(request,
_("Unable to create mapping."))
return False
|
TypeError
|
dataset/ETHPy150Open openstack/horizon/openstack_dashboard/dashboards/identity/mappings/forms.py/CreateMappingForm.handle
|
830
|
def handle(self, request, data):
try:
rules = json.loads(data["rules"])
api.keystone.mapping_update(
request,
data['id'],
rules=rules)
messages.success(request,
_("Mapping updated successfully."))
return True
except (TypeError, __HOLE__):
msg = _("Unable to update mapping. Rules has malformed JSON data.")
messages.error(request, msg)
except Exception:
exceptions.handle(request,
_('Unable to update mapping.'))
|
ValueError
|
dataset/ETHPy150Open openstack/horizon/openstack_dashboard/dashboards/identity/mappings/forms.py/UpdateMappingForm.handle
|
831
|
def __init__(self, to, on_delete=None, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, to_field=None,
db_constraint=True, **kwargs):
try:
to._meta.model_name
except __HOLE__:
assert isinstance(to, six.string_types), (
"%s(%r) is invalid. First parameter to ForeignKey must be "
"either a model, a model name, or the string %r" % (
self.__class__.__name__, to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
else:
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
if on_delete is None:
warnings.warn(
"on_delete will be a required arg for %s in Django 2.0. Set "
"it to models.CASCADE on models and in existing migrations "
"if you want to maintain the current default behavior. "
"See https://docs.djangoproject.com/en/%s/ref/models/fields/"
"#django.db.models.ForeignKey.on_delete" % (
self.__class__.__name__,
get_docs_version(),
),
RemovedInDjango20Warning, 2)
on_delete = CASCADE
elif not callable(on_delete):
warnings.warn(
"The signature for {0} will change in Django 2.0. "
"Pass to_field='{1}' as a kwarg instead of as an arg.".format(
self.__class__.__name__,
on_delete,
),
RemovedInDjango20Warning, 2)
on_delete, to_field = to_field, on_delete
kwargs['rel'] = self.rel_class(
self, to, to_field,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
kwargs['db_index'] = kwargs.get('db_index', True)
super(ForeignKey, self).__init__(
to, on_delete, from_fields=['self'], to_fields=[to_field], **kwargs)
self.db_constraint = db_constraint
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/models/fields/related.py/ForeignKey.__init__
|
832
|
def __init__(self, to, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=None, through=None,
through_fields=None, db_constraint=True, db_table=None,
swappable=True, **kwargs):
try:
to._meta
except __HOLE__:
assert isinstance(to, six.string_types), (
"%s(%r) is invalid. First parameter to ManyToManyField must be "
"either a model, a model name, or the string %r" %
(self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
)
# Class names must be ASCII in Python 2.x, so we forcibly coerce it
# here to break early if there's a problem.
to = str(to)
if symmetrical is None:
symmetrical = (to == RECURSIVE_RELATIONSHIP_CONSTANT)
if through is not None:
assert db_table is None, (
"Cannot specify a db_table if an intermediary model is used."
)
kwargs['rel'] = self.rel_class(
self, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical,
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.has_null_arg = 'null' in kwargs
super(ManyToManyField, self).__init__(**kwargs)
self.db_table = db_table
self.swappable = swappable
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/models/fields/related.py/ManyToManyField.__init__
|
833
|
def in_debug(self, state):
try:
return state.protocol.properties.debug
except __HOLE__:
return False
|
AttributeError
|
dataset/ETHPy150Open Evgenus/protocyt/protocyt/classes.py/Compound.in_debug
|
834
|
def setCopyableState(self, state):
self.__dict__.update(state)
self._activationListeners = []
try:
dataFile = file(self.getFileName(), "rb")
data = dataFile.read()
dataFile.close()
except __HOLE__:
recent = 0
else:
newself = jelly.unjelly(banana.decode(data))
recent = (newself.timestamp == self.timestamp)
if recent:
self._cbGotUpdate(newself.__dict__)
self._wasCleanWhenLoaded = 1
else:
self.remote.callRemote('getStateToPublish').addCallbacks(self._cbGotUpdate)
|
IOError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/spread/publish.py/RemotePublished.setCopyableState
|
835
|
def get_database(dialect):
"""
Returns requested database package with modules that provide additional functionality.
:param string dialect: (required). Database dialect name.
"""
aliases = {
('mysql',): 'mysql',
('sqlite',): 'sqlite',
('pgsql', 'postgres', 'postgresql'): 'postgresql',
}
dialect = next((aliases[alias] for alias in aliases if dialect in alias), dialect)
names = [str(mod[1]) for mod in pkgutil.iter_modules([os.path.join(os.path.dirname(__file__), dialect)])]
try:
return __import__('{0}'.format(dialect), globals(), level=1, fromlist=names)
except __HOLE__:
raise DatabaseError(
current=dialect,
allowed=[name for _, name, is_pkg in pkgutil.iter_modules([os.path.dirname(__file__)]) if is_pkg])
|
ImportError
|
dataset/ETHPy150Open maxtepkeev/architect/architect/databases/utilities.py/get_database
|
836
|
def clean(self):
super(FirstFieldRequiredFormSet, self).clean()
count = 0
for form in self.forms:
try:
if form.cleaned_data and not form.cleaned_data.get('DELETE', False):
count += 1
break
except __HOLE__:
pass
if count < 1:
raise forms.ValidationError(_('Please fill in at least one form'))
|
AttributeError
|
dataset/ETHPy150Open scieloorg/scielo-manager/scielomanager/journalmanager/forms.py/FirstFieldRequiredFormSet.clean
|
837
|
def main(config):
# load ContextCreators from config file, run their input functions, and pass the result into the initialization function
# init() all context creators specified by the user with their arguments
# import them according to their fully-specified class names in the config file
# it's up to the user to specify context creators which extract both negative and positive examples (if that's what they want)
# Chris - working - we want to hit every token
interesting_tokens = experiment_utils.import_and_call_function(config['interesting_tokens'])
print "INTERESTING TOKENS: ", interesting_tokens
logger.info('The number of interesting tokens is: ' + str(len(interesting_tokens)))
workers = config['workers']
# Note: context creators currently create their own interesting tokens internally (interesting tokens controls the index of the context creator)
logger.info('building the context creators...')
train_context_creators = experiment_utils.build_objects(config['context_creators'])
# get the contexts for all of our interesting words (may be +,- or, multi-class)
logger.info('mapping the training contexts over the interesting tokens in train...')
train_contexts = experiment_utils.map_contexts(interesting_tokens, train_context_creators, workers=workers)
# load and parse the test data
logger.info('mapping the training contexts over the interesting tokens in test...')
test_context_creator = experiment_utils.build_objects(config['testing'])
test_contexts = experiment_utils.map_contexts(interesting_tokens, [test_context_creator])
min_total = config['filters']['min_total']
# filter token contexts based on the user-specified filter criteria
logger.info('filtering the contexts by the total number of available instances...')
train_contexts = experiment_utils.filter_contexts(train_contexts, min_total=min_total)
test_contexts = experiment_utils.filter_contexts(test_contexts, min_total=min_total)
# make sure the test_context and train_context keys are in sync
experiment_utils.sync_keys(train_contexts, test_contexts)
# test_contexts = filter_contexts(test_contexts, min_total=min_total)
assert set(test_contexts.keys()) == set(train_contexts.keys())
# extract the 'tag' attribute into the y-value for classification
# tags may need to be converted to be consistent with the training data
wmt_binary_classes = {u'BAD': 0, u'OK': 1}
train_context_tags = experiment_utils.tags_from_contexts(train_contexts)
train_context_tags = {k: np.array([wmt_binary_classes[v] for v in val]) for k, val in train_context_tags.items()}
test_contexts = experiment_utils.convert_tagset(wmt_binary_classes, test_contexts)
test_tags_actual = experiment_utils.tags_from_contexts(test_contexts)
# all of the feature extraction should be parallelizable
# note that a feature extractor MUST be able to parse the context exchange format, or it should throw an error:
# { 'token': <token>, index: <idx>, 'source': [<source toks>]', 'target': [<target toks>], 'tag': <tag>}
feature_extractors = experiment_utils.build_feature_extractors(config['feature_extractors'])
logger.info('mapping the feature extractors over the contexts for test...')
test_context_features = experiment_utils.token_contexts_to_features_categorical(test_contexts, feature_extractors, workers=workers)
logger.info('mapping the feature extractors over the contexts for train...')
train_context_features = experiment_utils.token_contexts_to_features_categorical(train_contexts, feature_extractors, workers=workers)
# flatten so that we can properly binarize the features
all_values = experiment_utils.flatten(test_context_features.values())
all_values.extend(experiment_utils.flatten(train_context_features.values()))
binarizers = experiment_utils.fit_binarizers(all_values)
test_context_features = {k: [experiment_utils.binarize(v, binarizers) for v in val] for k, val in test_context_features.items()}
train_context_features = {k: [experiment_utils.binarize(v, binarizers) for v in val] for k, val in train_context_features.items()}
# BEGIN LEARNING
classifier_type = experiment_utils.import_class(config['learning']['classifier']['module'])
# train the classifier for each token
classifier_map = learning_utils.token_classifiers(train_context_features, train_context_tags, classifier_type)
# classify the test instances
# TODO: output a file in WMT format
# WORKING - dump the output in WMT format
logger.info('classifying the test instances')
test_predictions = {}
for key, features in test_context_features.iteritems():
try:
classifier = classifier_map[key]
predictions = classifier.predict(features)
test_predictions[key] = predictions
except __HOLE__ as e:
print(key + " - is NOT in the classifier map")
raise
#### put the rest of the code into a separate 'evaluate' function that reads the WMT files
# create the performance report for each word in the test data that we had a classifier for
# TODO: Working - evaluate based on the format
f1_map = {}
for token, predicted in test_predictions.iteritems():
logger.info("Evaluating results for token = " + token)
actual = test_tags_actual[token]
print 'Actual: ', actual
print 'Predicted: ', predicted
logger.info("\ttotal instances: " + str(len(predicted)))
f1_map[token] = weighted_fmeasure(actual, predicted)
logger.info('Printing the map of f1 scores by token: ')
print(f1_map)
|
KeyError
|
dataset/ETHPy150Open qe-team/marmot/examples/word_level_quality_estimation/wmt_word_level_experiment.py/main
|
838
|
def load_backend(backend_name):
"""
Return a database backend's "base" module given a fully qualified database
backend name, or raise an error if it doesn't exist.
"""
# This backend was renamed in Django 1.9.
if backend_name == 'django.db.backends.postgresql_psycopg2':
backend_name = 'django.db.backends.postgresql'
try:
return import_module('%s.base' % backend_name)
except __HOLE__ as e_user:
# The database backend wasn't found. Display a helpful error message
# listing all possible (built-in) database backends.
backend_dir = os.path.join(os.path.dirname(upath(__file__)), 'backends')
try:
builtin_backends = [
name for _, name, ispkg in pkgutil.iter_modules([npath(backend_dir)])
if ispkg and name not in {'base', 'dummy', 'postgresql_psycopg2'}
]
except EnvironmentError:
builtin_backends = []
if backend_name not in ['django.db.backends.%s' % b for b in
builtin_backends]:
backend_reprs = map(repr, sorted(builtin_backends))
error_msg = ("%r isn't an available database backend.\n"
"Try using 'django.db.backends.XXX', where XXX "
"is one of:\n %s\nError was: %s" %
(backend_name, ", ".join(backend_reprs), e_user))
raise ImproperlyConfigured(error_msg)
else:
# If there's some other error, this must be an error in Django
raise
|
ImportError
|
dataset/ETHPy150Open django/django/django/db/utils.py/load_backend
|
839
|
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection
where no settings is provided.
"""
try:
conn = self.databases[alias]
except __HOLE__:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
conn.setdefault('ATOMIC_REQUESTS', False)
conn.setdefault('AUTOCOMMIT', True)
conn.setdefault('ENGINE', 'django.db.backends.dummy')
if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']:
conn['ENGINE'] = 'django.db.backends.dummy'
conn.setdefault('CONN_MAX_AGE', 0)
conn.setdefault('OPTIONS', {})
conn.setdefault('TIME_ZONE', None)
for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']:
conn.setdefault(setting, '')
|
KeyError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionHandler.ensure_defaults
|
840
|
def prepare_test_settings(self, alias):
"""
Makes sure the test settings are available in the 'TEST' sub-dictionary.
"""
try:
conn = self.databases[alias]
except __HOLE__:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
test_settings = conn.setdefault('TEST', {})
for key in ['CHARSET', 'COLLATION', 'NAME', 'MIRROR']:
test_settings.setdefault(key, None)
|
KeyError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionHandler.prepare_test_settings
|
841
|
def close_all(self):
for alias in self:
try:
connection = getattr(self._connections, alias)
except __HOLE__:
continue
connection.close()
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionHandler.close_all
|
842
|
def _router_func(action):
def _route_db(self, model, **hints):
chosen_db = None
for router in self.routers:
try:
method = getattr(router, action)
except __HOLE__:
# If the router doesn't have a method, skip to the next one.
pass
else:
chosen_db = method(model, **hints)
if chosen_db:
return chosen_db
instance = hints.get('instance')
if instance is not None and instance._state.db:
return instance._state.db
return DEFAULT_DB_ALIAS
return _route_db
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionRouter._router_func
|
843
|
def allow_relation(self, obj1, obj2, **hints):
for router in self.routers:
try:
method = router.allow_relation
except __HOLE__:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(obj1, obj2, **hints)
if allow is not None:
return allow
return obj1._state.db == obj2._state.db
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionRouter.allow_relation
|
844
|
def allow_migrate(self, db, app_label, **hints):
for router in self.routers:
try:
method = router.allow_migrate
except __HOLE__:
# If the router doesn't have a method, skip to the next one.
continue
allow = method(db, app_label, **hints)
if allow is not None:
return allow
return True
|
AttributeError
|
dataset/ETHPy150Open django/django/django/db/utils.py/ConnectionRouter.allow_migrate
|
845
|
def diff(self, revisions, include_files=[], exclude_patterns=[],
extra_args=[]):
"""
Performs a diff across all modified files in a Plastic workspace
Parent diffs are not supported (the second value in the tuple).
"""
# TODO: use 'files'
changenum = None
tip = revisions['tip']
if tip.startswith(self.REVISION_CHANGESET_PREFIX):
logging.debug('Doing a diff against changeset %s', tip)
try:
changenum = str(int(
tip[len(self.REVISION_CHANGESET_PREFIX):]))
except __HOLE__:
pass
else:
logging.debug('Doing a diff against branch %s', tip)
if not getattr(self.options, 'branch', None):
self.options.branch = tip
diff_entries = execute(
['cm', 'diff', tip, '--format={status} {path} rev:revid:{revid} '
'rev:revid:{parentrevid} src:{srccmpath} '
'dst:{dstcmpath}{newline}'],
split_lines=True)
logging.debug('Got files: %s', diff_entries)
diff = self._process_diffs(diff_entries)
return {
'diff': diff,
'changenum': changenum,
}
|
ValueError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/clients/plastic.py/PlasticClient.diff
|
846
|
def __init__(self, reactor, proc, name, fileno, forceReadHack=False):
"""
Initialize, specifying a Process instance to connect to.
"""
abstract.FileDescriptor.__init__(self, reactor)
fdesc.setNonBlocking(fileno)
self.proc = proc
self.name = name
self.fd = fileno
if not stat.S_ISFIFO(os.fstat(self.fileno()).st_mode):
# If the fd is not a pipe, then the read hack is never
# applicable. This case arises when ProcessWriter is used by
# StandardIO and stdout is redirected to a normal file.
self.enableReadHack = False
elif forceReadHack:
self.enableReadHack = True
else:
# Detect if this fd is actually a write-only fd. If it's
# valid to read, don't try to detect closing via read.
# This really only means that we cannot detect a TTY's write
# pipe being closed.
try:
os.read(self.fileno(), 0)
except __HOLE__:
# It's a write-only pipe end, enable hack
self.enableReadHack = True
if self.enableReadHack:
self.startReading()
|
OSError
|
dataset/ETHPy150Open twisted/twisted/twisted/internet/process.py/ProcessWriter.__init__
|
847
|
def reapProcess(self):
"""
Try to reap a process (without blocking) via waitpid.
This is called when sigchild is caught or a Process object loses its
"connection" (stdout is closed) This ought to result in reaping all
zombie processes, since it will be called twice as often as it needs
to be.
(Unfortunately, this is a slightly experimental approach, since
UNIX has no way to be really sure that your process is going to
go away w/o blocking. I don't want to block.)
"""
try:
try:
pid, status = os.waitpid(self.pid, os.WNOHANG)
except __HOLE__ as e:
if e.errno == errno.ECHILD:
# no child process
pid = None
else:
raise
except:
log.msg('Failed to reap %d:' % self.pid)
log.err()
pid = None
if pid:
self.processEnded(status)
unregisterReapProcessHandler(pid, self)
|
OSError
|
dataset/ETHPy150Open twisted/twisted/twisted/internet/process.py/_BaseProcess.reapProcess
|
848
|
def signalProcess(self, signalID):
"""
Send the given signal C{signalID} to the process. It'll translate a
few signals ('HUP', 'STOP', 'INT', 'KILL', 'TERM') from a string
representation to its int value, otherwise it'll pass directly the
value provided
@type signalID: C{str} or C{int}
"""
if signalID in ('HUP', 'STOP', 'INT', 'KILL', 'TERM'):
signalID = getattr(signal, 'SIG%s' % (signalID,))
if self.pid is None:
raise ProcessExitedAlready()
try:
os.kill(self.pid, signalID)
except __HOLE__ as e:
if e.errno == errno.ESRCH:
raise ProcessExitedAlready()
else:
raise
|
OSError
|
dataset/ETHPy150Open twisted/twisted/twisted/internet/process.py/_BaseProcess.signalProcess
|
849
|
def _fallbackFDImplementation(self):
"""
Fallback implementation where either the resource module can inform us
about the upper bound of how many FDs to expect, or where we just guess
a constant maximum if there is no resource module.
All possible file descriptors from 0 to that upper bound are returned
with no attempt to exclude invalid file descriptor values.
"""
try:
import resource
except __HOLE__:
maxfds = 1024
else:
# OS-X reports 9223372036854775808. That's a lot of fds to close.
# OS-X should get the /dev/fd implementation instead, so mostly
# this check probably isn't necessary.
maxfds = min(1024, resource.getrlimit(resource.RLIMIT_NOFILE)[1])
return range(maxfds)
|
ImportError
|
dataset/ETHPy150Open twisted/twisted/twisted/internet/process.py/_FDDetector._fallbackFDImplementation
|
850
|
def blitz(expr,local_dict=None, global_dict=None,check_size=1,verbose=0,**kw):
# this could call inline, but making a copy of the
# code here is more efficient for several reasons.
global function_catalog
# this grabs the local variables from the *previous* call
# frame -- that is the locals from the function that called
# inline.
call_frame = sys._getframe().f_back
if local_dict is None:
local_dict = call_frame.f_locals
if global_dict is None:
global_dict = call_frame.f_globals
# 1. Check the sizes of the arrays and make sure they are compatible.
# This is expensive, so unsetting the check_size flag can save a lot
# of time. It also can cause core-dumps if the sizes of the inputs
# aren't compatible.
if check_size and not size_check.check_expr(expr,local_dict,global_dict):
raise ValueError("inputs failed to pass size check.")
# 2. try local cache
try:
results = apply(function_cache[expr],(local_dict,global_dict))
return results
except:
pass
try:
results = attempt_function_call(expr,local_dict,global_dict)
# 3. build the function
except ValueError:
# This section is pretty much the only difference
# between blitz and inline
ast = parser.suite(expr)
ast_list = ast.tolist()
expr_code = ast_to_blitz_expr(ast_list)
arg_names = ast_tools.harvest_variables(ast_list)
module_dir = global_dict.get('__file__',None)
func = inline_tools.compile_function(expr_code,arg_names,local_dict,
global_dict,module_dir,
compiler='gcc',auto_downcast=1,
verbose=verbose,
type_converters=converters.blitz,
**kw)
function_catalog.add_function(expr,func,module_dir)
try:
results = attempt_function_call(expr,local_dict,global_dict)
except __HOLE__:
warnings.warn('compilation failed. Executing as python code',
BlitzWarning)
exec(expr, global_dict, local_dict)
|
ValueError
|
dataset/ETHPy150Open scipy/scipy/scipy/weave/blitz_tools.py/blitz
|
851
|
def get_absolute_url(self):
"""
Return the URL of the parent object, if it has one.
This method mainly exists to support cache mechanisms (e.g. refreshing a Varnish cache), and assist in debugging.
"""
if not self.parent_id or not self.parent_type_id:
return None
try:
return self.parent.get_absolute_url()
except __HOLE__:
return None
|
AttributeError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/models/db.py/Placeholder.get_absolute_url
|
852
|
def get_absolute_url(self):
"""
Return the URL of the parent object, if it has one.
This method mainly exists to refreshing cache mechanisms.
"""
# Allows quick debugging, and cache refreshes.
parent = self.parent
try:
return parent.get_absolute_url()
except __HOLE__:
return None
|
AttributeError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/models/db.py/ContentItem.get_absolute_url
|
853
|
def move_to_placeholder(self, placeholder, sort_order=None):
"""
.. versionadded: 1.0.2 Move this content item to a new placeholder.
The object is saved afterwards.
"""
# Transfer parent
self.placeholder = placeholder
self.parent_type = placeholder.parent_type
self.parent_id = placeholder.parent_id
try:
# Copy cache property set by GenericForeignKey (_meta.virtual_fields[0].cache_attr)
setattr(self, '_parent_cache', placeholder._parent_cache)
except __HOLE__:
pass
if sort_order is not None:
self.sort_order = sort_order
self.save()
|
AttributeError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/models/db.py/ContentItem.move_to_placeholder
|
854
|
def error(self, obj, name, value):
"""Returns a descriptive error string."""
# pylint: disable=E1101
right = left = '='
if self.exclude_high is True:
right = ''
if self.exclude_low is True:
left = ''
if self.low is None and self.high is None:
info = "an int"
elif self.low is not None and self.high is not None:
info = "%s <%s an integer <%s %s"% (self.low, left,
right, self.high)
elif self.low is not None:
info = "a float with a value >%s %s"% (left, self.low)
else: # self.high is not None
info = "a float with a value <%s %s"% (right, self.high)
vtype = type(value)
msg = "Variable '%s' must be %s, but a value of %s %s was specified." % \
(name, info, value, vtype)
try:
obj.raise_exception(msg, ValueError)
except __HOLE__:
raise ValueError(msg)
|
AttributeError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/datatypes/int.py/Int.error
|
855
|
def read(self):
# TODO: check the lengths?
try:
key_length = self.read_int()
except struct.error:
return None
try:
key = self._read()
assert self.file.read(1) == b'\t'
val_length = self.read_int()
val = self._read()
assert self.file.read(1) == b'\n'
except (__HOLE__, struct.error):
raise struct.error("EOF before complete pair read")
return key, val
|
StopIteration
|
dataset/ETHPy150Open dougalsutherland/py-sdm/sdm/typedbytes_utils.py/TypedbytesSequenceFileStreamingInput.read
|
856
|
def _file_is_seekable(f):
try:
f.tell()
except __HOLE__ as e:
if e.errno == errno.ESPIPE and e.strerror == 'Illegal seek':
return False
raise
except AttributeError:
return False
else:
return True
|
IOError
|
dataset/ETHPy150Open dougalsutherland/py-sdm/sdm/typedbytes_utils.py/_file_is_seekable
|
857
|
def database_dex_analyze_view(self, request, database_id):
import json
import random
from dbaas_laas.provider import LaaSProvider
from util import get_credentials_for
from util.laas import get_group_name
from dbaas_credentials.models import CredentialType
import os
import string
from datetime import datetime, timedelta
def generate_random_string(length, stringset=string.ascii_letters + string.digits):
return ''.join([stringset[i % len(stringset)]
for i in [ord(x) for x in os.urandom(length)]])
database = Database.objects.get(id=database_id)
if database.status != Database.ALIVE or not database.database_status.is_alive:
self.message_user(
request, "Database is not alive cannot be analyzed", level=messages.ERROR)
url = reverse('admin:logical_database_changelist')
return HttpResponseRedirect(url)
if database.is_beeing_used_elsewhere():
self.message_user(
request, "Database cannot be analyzed because it is in use by another task.", level=messages.ERROR)
url = reverse('admin:logical_database_changelist')
return HttpResponseRedirect(url)
credential = get_credentials_for(environment=database.environment,
credential_type=CredentialType.LAAS)
db_name = database.name
environment = database.environment
endpoint = credential.endpoint
username = credential.user
password = credential.password
lognit_environment = credential.get_parameter_by_name(
'lognit_environment')
provider = LaaSProvider()
group_name = get_group_name(database)
today = (datetime.now()).strftime('%Y%m%d')
yesterday = (datetime.now() - timedelta(days=1)).strftime('%Y%m%d')
uri = "group:{} text:query date:[{} TO {}] time:[000000 TO 235959]".format(
group_name, yesterday, today)
parsed_logs = ''
database_logs = provider.get_logs_for_group(
environment, lognit_environment, uri)
try:
database_logs = json.loads(database_logs)
except Exception, e:
pass
else:
for database_log in database_logs:
try:
items = database_log['items']
except __HOLE__, e:
pass
else:
parsed_logs = "\n".join(
(item['message'] for item in items))
arq_path = Configuration.get_by_name(
'database_clone_dir') + '/' + database.name + generate_random_string(20) + '.txt'
arq = open(arq_path, 'w')
arq.write(parsed_logs)
arq.close()
uri = 'mongodb://{}:{}@{}:{}/admin'.format(database.databaseinfra.user,
database.databaseinfra.password,
database.databaseinfra.instances.all()[
0].address,
database.databaseinfra.instances.all()[0].port)
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
md = dex.Dex(db_uri=uri, verbose=False, namespaces_list=[],
slowms=0, check_indexes=True, timeout=0)
md.analyze_logfile(arq_path)
sys.stdout = old_stdout
dexanalyzer = loads(
mystdout.getvalue().replace("\"", "&&").replace("'", "\"").replace("&&", "'"))
os.remove(arq_path)
import ast
final_mask = """<div>"""
print dexanalyzer['results']
for result in dexanalyzer['results']:
final_mask += "<h3> Collection: " + result['namespace'] + "</h3>"
final_mask += \
"""<li> Query: """ +\
str(ast.literal_eval(result['queryMask'])['$query']) +\
"""</li>""" +\
"""<li> Index: """ +\
result['recommendation']['index'] +\
"""</li>""" +\
"""<li> Command: """ +\
result['recommendation']['shellCommand'] +\
"""</li>"""
final_mask += """<br>"""
final_mask += """</ul> </div>"""
return render_to_response("logical/database/dex_analyze.html", locals(), context_instance=RequestContext(request))
|
KeyError
|
dataset/ETHPy150Open globocom/database-as-a-service/dbaas/logical/admin/database.py/DatabaseAdmin.database_dex_analyze_view
|
858
|
def __init__(self, host=None, port=None, **kwargs):
"""Initialize a connection to the network socket.
Kwargs:
host - optionally override the default network host (default is local machine)
port - optionally override the default network port (default is 50001)
log_mode - optionally record or print logs from the network source
Raises:
DataSourceError if the socket connection cannot be opened.
"""
super(NetworkDataSource, self).__init__(**kwargs)
self.host = host or socket.gethostbyname(socket.gethostname())
self.port = port or self.DEFAULT_PORT
self.port = int(self.port)
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except (__HOLE__, socket.error) as e:
raise DataSourceError("Unable to open socket connection at "
"%s:%s: %s" % (self.host,self.port, e))
else:
LOG.debug("Opened socket connection at %s:%s", self.host, self.port)
|
OSError
|
dataset/ETHPy150Open openxc/openxc-python/openxc/sources/network.py/NetworkDataSource.__init__
|
859
|
def test_get_flattened_index(self):
self.assertEqual(slice(4,5,None), get_flattened_index(4, (10,)))
self.assertEqual(slice(9,10,None), get_flattened_index(-1, (10,)))
self.assertEqual(slice(90,100,1), get_flattened_index(-1, (10,10)))
try:
self.assertEqual(0, get_flattened_index(10, (10,10)))
except __HOLE__ as err:
# Some versions of numpy have slightly different messages, so as
# long as it is an index error, we are fine.
pass
else:
self.fail('Should get an Indexerror')
self.assertEqual(slice(22,23,None), get_flattened_index((2,2), (10,10)))
self.assertEqual(slice(42,63, 10), get_flattened_index((slice(4,7),2), (10,10)))
self.assertEqual(slice(40,61,10), get_flattened_index((slice(4,7),0), (10,10)))
self.assertEqual(slice(4, 11, 2), get_flattened_index(slice(4,11,2), (20,)))
self.assertEqual(slice(40,50,1),
get_flattened_index(slice(4,5,2), (20,10)))
self.assertEqual(slice(1,2,None), get_flattened_index(1, (5,)))
self.assertEqual(slice(6,7,None), get_flattened_index([1,2], (3,4)))
self.assertEqual(slice(62,63,None), get_flattened_index([-1,-1], (9,7)))
self.assertEqual(slice(3, 25, 7), get_flattened_index([slice(None),3], (4,7)))
self.assertEqual(slice(48,49,None), get_flattened_index(-2, (50,)))
self.assertEqual(slice(3, 44, 5), get_flattened_index(slice(3,-3,5), (50,)))
|
IndexError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/test/test_array_helpers.py/TestcaseArrayHelpers.test_get_flattened_index
|
860
|
def tree_entries_from_data(data):
"""Reads the binary representation of a tree and returns tuples of Tree items
:param data: data block with tree data (as bytes)
:return: list(tuple(binsha, mode, tree_relative_path), ...)"""
ord_zero = ord('0')
space_ord = ord(' ')
len_data = len(data)
i = 0
out = list()
while i < len_data:
mode = 0
# read mode
# Some git versions truncate the leading 0, some don't
# The type will be extracted from the mode later
while byte_ord(data[i]) != space_ord:
# move existing mode integer up one level being 3 bits
# and add the actual ordinal value of the character
mode = (mode << 3) + (byte_ord(data[i]) - ord_zero)
i += 1
# END while reading mode
# byte is space now, skip it
i += 1
# parse name, it is NULL separated
ns = i
while byte_ord(data[i]) != 0:
i += 1
# END while not reached NULL
# default encoding for strings in git is utf8
# Only use the respective unicode object if the byte stream was encoded
name = data[ns:i]
try:
name = name.decode(defenc)
except __HOLE__:
pass
# END handle encoding
# byte is NULL, get next 20
i += 1
sha = data[i:i + 20]
i = i + 20
out.append((sha, mode, name))
# END for each byte in data stream
return out
|
UnicodeDecodeError
|
dataset/ETHPy150Open gitpython-developers/GitPython/git/objects/fun.py/tree_entries_from_data
|
861
|
def _find_by_name(tree_data, name, is_dir, start_at):
"""return data entry matching the given name and tree mode
or None.
Before the item is returned, the respective data item is set
None in the tree_data list to mark it done"""
try:
item = tree_data[start_at]
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
tree_data[start_at] = None
return item
except __HOLE__:
pass
# END exception handling
for index, item in enumerate(tree_data):
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
tree_data[index] = None
return item
# END if item matches
# END for each item
return None
|
IndexError
|
dataset/ETHPy150Open gitpython-developers/GitPython/git/objects/fun.py/_find_by_name
|
862
|
def spawn(df, d, limit):
enum = df.get_enum()
rnd = random(enum)
rndmask = (rnd<limit).nonzero()[0]
for e in rndmask:
l = df.get_edge_length(e)
if l<d:
continue
try:
df.split_edge(e)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open inconvergent/differential-line/modules/growth.py/spawn
|
863
|
def spawn_curl(df, limit, prob_spawn=1.0):
enum = df.get_enum()
ind_curv = {}
tot_curv = 0
max_curv = -100000
for e in xrange(enum):
try:
t = df.get_edge_curvature(e)
ind_curv[e] = t
tot_curv += t
max_curv = max(max_curv, t)
except ValueError:
pass
ne = len(ind_curv)
for r,(e,t) in zip(random(ne),ind_curv.iteritems()):
if r<t/max_curv*prob_spawn:
#if t>2*limit or r<t/max_curv:
#if r<sqrt(t):
#if True:
try:
df.split_edge(e, minimum_length=limit)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open inconvergent/differential-line/modules/growth.py/spawn_curl
|
864
|
def spawn_short(df, short, long):
enum = df.get_enum()
for e in xrange(enum):
l = df.get_edge_length(e)
if l>long:
try:
df.split_edge(e, minimum_length=short)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open inconvergent/differential-line/modules/growth.py/spawn_short
|
865
|
def collapse(df, d, limit):
enum = df.get_enum()
rnd = random(enum)
rndmask = (rnd<limit).nonzero()[0]
for e in rndmask:
l = df.get_edge_length(e)
if l<d:
try:
df.collapse_edge(e)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open inconvergent/differential-line/modules/growth.py/collapse
|
866
|
@register.filter
def blockers(user):
"""Returns list of people blocking user."""
try:
return Relationship.objects.get_blockers_for_user(user)
except __HOLE__:
return []
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/blockers
|
867
|
@register.filter
def friends(user):
"""Returns people user is following sans people blocking user."""
try:
return Relationship.objects.get_friends_for_user(user)
except __HOLE__:
return []
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/friends
|
868
|
@register.filter
def followers(user):
"""Returns people following user."""
try:
return Relationship.objects.get_followers_for_user(user)
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/followers
|
869
|
@register.filter
def fans(user):
"""Returns people following user but user isn't following."""
try:
return Relationship.objects.get_fans_for_user(user)
except __HOLE__:
pass
# Comparing two users.
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/fans
|
870
|
@register.filter
def follows(from_user, to_user):
"""Returns ``True`` if the first user follows the second, ``False`` otherwise. Example: {% if user|follows:person %}{% endif %}"""
try:
relationship = Relationship.objects.get_relationship(from_user, to_user)
if relationship and not relationship.is_blocked:
return True
else:
return False
except __HOLE__:
return False
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/follows
|
871
|
@register.filter
def get_relationship(from_user, to_user):
"""Get relationship between two users."""
try:
return Relationship.objects.get_relationship(from_user, to_user)
except __HOLE__:
return None
# get_relationship templatetag.
|
AttributeError
|
dataset/ETHPy150Open nathanborror/django-basic-apps/basic/relationships/templatetags/relationships.py/get_relationship
|
872
|
def encode_7bit(self, encoder=None):
""".. versionadded:: 0.3.12
Forces the message into 7-bit encoding such that it can be sent to SMTP
servers that do not support the ``8BITMIME`` extension.
If the ``encoder`` function is not given, this function is relatively
cheap and will just check the message body for 8-bit characters
(raising :py:exc:`UnicodeDecodeError` if any are found). Otherwise,
this method can be very expensive. It will parse the entire message
into MIME parts in order to encode parts that are not 7-bit.
:param encoder: Optional function from :mod:`email.encoders` used to
encode MIME parts that are not 7-bit.
:raises: UnicodeDecodeError
"""
try:
self.message.decode('ascii')
except __HOLE__:
if not encoder:
raise
self._encode_parts(encoder)
|
UnicodeDecodeError
|
dataset/ETHPy150Open slimta/python-slimta/slimta/envelope.py/Envelope.encode_7bit
|
873
|
def _childParser_mucUser(self, element):
"""
Parse the MUC user extension element.
"""
for child in element.elements():
if child.uri != NS_MUC_USER:
continue
elif child.name == 'status':
try:
value = int(child.getAttribute('code'))
statusCode = STATUS_CODE.lookupByValue(value)
except (__HOLE__, ValueError):
continue
self.mucStatuses.add(statusCode)
elif child.name == 'item':
if child.hasAttribute('jid'):
self.entity = jid.JID(child['jid'])
self.nick = child.getAttribute('nick')
self.affiliation = child.getAttribute('affiliation')
self.role = child.getAttribute('role')
for reason in child.elements(NS_MUC_ADMIN, 'reason'):
self.reason = unicode(reason)
# TODO: destroy
|
TypeError
|
dataset/ETHPy150Open ralphm/wokkel/wokkel/muc.py/UserPresence._childParser_mucUser
|
874
|
def remove(self, name):
""" Remove a column of data.
Args:
name (str) : name of the column to remove
Returns:
None
.. note::
If the column name does not exist, a warning is issued.
"""
try:
self.column_names.remove(name)
del self.data[name]
except (__HOLE__, KeyError):
import warnings
warnings.warn("Unable to find column '%s' in data source" % name)
|
ValueError
|
dataset/ETHPy150Open bokeh/bokeh/bokeh/models/sources.py/ColumnDataSource.remove
|
875
|
def __init__(self, server, pid):
self._pid = pid
self.server = IDroneModelServer(server)
self._created = time.time()
#don't set self._process
try:
try: #re-constructing, can cause problems with this
if IKittNullProcess.providedBy(self.process.process):
raise InvalidProcess("Invalid PID (%s)" % pid)
except __HOLE__:
if isinstance(self.process, NullProcess):
raise InvalidProcess("Invalid PID (%s)" % pid)
raise #re-raise do avoid ending up in a pickle, literally
except InvalidProcess:
if config.HOSTNAME == self.server.hostname:
AppProcess.delete(self) #make sure we are invalid
raise InvalidProcess("Invalid PID (%s)" % pid)
except IOError: #linux and solaris kitt.proc.LiveProcess use files
AppProcess.delete(self) #make sure we are invalid
raise InvalidProcess("Invalid PID (%s)" % pid)
except:
err('wtf happened here .. seriously i do not know!!!')
AppProcess.delete(self) #make sure we are invalid
raise
|
AttributeError
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/models/app.py/AppProcess.__init__
|
876
|
def __init__(self, server, app, label):
try:
if not IDroneModelServer.providedBy(server):
e = '%s is not a L{IDroneModelServer} provider' % str(server)
raise AssertionError(e)
if not IDroneModelApp.providedBy(app):
e = '%s is not a L{IDroneModelAppVersion} provider' % \
str(appversion)
raise AssertionError(e)
except __HOLE__:
AppInstance.delete(self)
raise
#internal information
self.shouldBeRunning = False
#model information
self._label = label
self._app = IDroneModelApp(app)
self._server = IDroneModelServer(server)
#serializable information
self.info = {}
#volitile data, unserializable
self.context = {}
|
AssertionError
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/models/app.py/AppInstance.__init__
|
877
|
@classmethod
def setupClass(cls):
global pydot
try:
import pydot
import dot_parser
except __HOLE__:
raise SkipTest('pydot not available.')
|
ImportError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/drawing/tests/test_pydot.py/TestPydot.setupClass
|
878
|
def _check_aug_version(self):
""" Checks that we have recent enough version of libaugeas.
If augeas version is recent enough, it will support case insensitive
regexp matching"""
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
try:
matches = self.aug.match(
"/test//*[self::arg=~regexp('argument', 'i')]")
except __HOLE__:
self.aug.remove("/test/path")
return False
self.aug.remove("/test/path")
return matches
|
RuntimeError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot-apache/certbot_apache/configurator.py/ApacheConfigurator._check_aug_version
|
879
|
def _copy_create_ssl_vhost_skeleton(self, avail_fp, ssl_fp):
"""Copies over existing Vhost with IfModule mod_ssl.c> skeleton.
:param str avail_fp: Pointer to the original available non-ssl vhost
:param str ssl_fp: Full path where the new ssl_vhost will reside.
A new file is created on the filesystem.
"""
# First register the creation so that it is properly removed if
# configuration is rolled back
self.reverter.register_file_creation(False, ssl_fp)
sift = False
try:
with open(avail_fp, "r") as orig_file:
with open(ssl_fp, "w") as new_file:
new_file.write("<IfModule mod_ssl.c>\n")
for line in orig_file:
if self._sift_line(line):
if not sift:
new_file.write(
"# Some rewrite rules in this file were "
"were disabled on your HTTPS site,\n"
"# because they have the potential to "
"create redirection loops.\n")
sift = True
new_file.write("# " + line)
else:
new_file.write(line)
new_file.write("</IfModule>\n")
except __HOLE__:
logger.fatal("Error writing/reading to file in make_vhost_ssl")
raise errors.PluginError("Unable to write/read in make_vhost_ssl")
if sift:
reporter = zope.component.getUtility(interfaces.IReporter)
reporter.add_message(
"Some rewrite rules copied from {0} were disabled in the "
"vhost for your HTTPS site located at {1} because they have "
"the potential to create redirection loops.".format(avail_fp,
ssl_fp),
reporter.MEDIUM_PRIORITY)
|
IOError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot-apache/certbot_apache/configurator.py/ApacheConfigurator._copy_create_ssl_vhost_skeleton
|
880
|
def enhance(self, domain, enhancement, options=None):
"""Enhance configuration.
:param str domain: domain to enhance
:param str enhancement: enhancement type defined in
:const:`~certbot.constants.ENHANCEMENTS`
:param options: options for the enhancement
See :const:`~certbot.constants.ENHANCEMENTS`
documentation for appropriate parameter.
:raises .errors.PluginError: If Enhancement is not supported, or if
there is any other problem with the enhancement.
"""
try:
func = self._enhance_func[enhancement]
except __HOLE__:
raise errors.PluginError(
"Unsupported enhancement: {0}".format(enhancement))
try:
func(self.choose_vhost(domain), options)
except errors.PluginError:
logger.warn("Failed %s for %s", enhancement, domain)
raise
|
KeyError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot-apache/certbot_apache/configurator.py/ApacheConfigurator.enhance
|
881
|
def is_site_enabled(self, avail_fp):
"""Checks to see if the given site is enabled.
.. todo:: fix hardcoded sites-enabled, check os.path.samefile
:param str avail_fp: Complete file path of available site
:returns: Success
:rtype: bool
"""
enabled_dir = os.path.join(self.parser.root, "sites-enabled")
if not os.path.isdir(enabled_dir):
error_msg = ("Directory '{0}' does not exist. Please ensure "
"that the values for --apache-handle-sites and "
"--apache-server-root are correct for your "
"environment.".format(enabled_dir))
raise errors.ConfigurationError(error_msg)
for entry in os.listdir(enabled_dir):
try:
if filecmp.cmp(avail_fp, os.path.join(enabled_dir, entry)):
return True
except __HOLE__:
pass
return False
|
OSError
|
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot-apache/certbot_apache/configurator.py/ApacheConfigurator.is_site_enabled
|
882
|
@classmethod
def register_self(cls, **kwargs):
registry = get_module_registry()
def resolve_type(t):
if type(t) == tuple:
return registry.get_descriptor_by_name(*t).module
elif type(t) == type:
return t
else:
assert False, ("Unknown type " + str(type(t)))
registry.add_module(cls, **kwargs)
try:
ips = cls.input_ports
except AttributeError:
pass
else:
for (port_name, types) in ips:
registry.add_input_port(cls,
port_name,
list(resolve_type(t) for t in types))
try:
ops = cls.output_ports
except __HOLE__:
pass
else:
for (port_name, types) in ops:
registry.add_output_port(cls,
port_name,
list(resolve_type(t) for t in types))
|
AttributeError
|
dataset/ETHPy150Open VisTrails/VisTrails/contrib/vtksnl/inspectors.py/vtkBaseInspector.register_self
|
883
|
def compute(self):
vtk_object = None
if self.has_input("SetInputConnection0"):
ic = self.get_input("SetInputConnection0")
port_object = ic.vtkInstance
ix = port_object.GetIndex()
producer = port_object.GetProducer()
try:
vtk_object = producer.GetOutput()
except __HOLE__:
raise ModuleError(self,
"expected a module that supports GetOutput")
elif self.has_input("SetInput"):
port_object = self.get_input("SetInput")
if hasattr(port_object, "vtkInstance"):
vtk_object = port_object.vtkInstance
else:
raise ModuleError(self, "expected a vtk module")
if vtk_object:
self.auto_set_results(vtk_object)
|
AttributeError
|
dataset/ETHPy150Open VisTrails/VisTrails/contrib/vtksnl/inspectors.py/vtkDataSetInspector.compute
|
884
|
def compute(self):
vtk_object = None
if self.has_input("SetInputConnection0"):
ic = self.get_input("SetInputConnection0")
port_object = ic.vtkInstance
ix = port_object.GetIndex()
producer = port_object.GetProducer()
try:
vtk_object = producer.GetOutput()
except __HOLE__:
raise ModuleError(self,
"expected a module that supports GetOutput")
elif self.has_input("SetInput"):
port_object = self.get_input("SetInput")
if hasattr(port_object, "vtkInstance"):
vtk_object = port_object.vtkInstance
else:
raise ModuleError(self, "expected a vtk module")
if vtk_object:
self.auto_set_results(vtk_object)
|
AttributeError
|
dataset/ETHPy150Open VisTrails/VisTrails/contrib/vtksnl/inspectors.py/vtkPolyDataInspector.compute
|
885
|
@treeio_login_required
def settings_view(request, response_format='html'):
"Settings view"
user = request.user.profile
# default permissions
try:
conf = ModuleSetting.get_for_module(
'treeio.core', 'default_permissions', user=user)[0]
default_permissions = conf.value
except:
default_permissions = settings.HARDTREE_DEFAULT_PERMISSIONS
# default perspective
try:
conf = ModuleSetting.get_for_module(
'treeio.core', 'default_perspective', user=user)[0]
default_perspective = Perspective.objects.get(pk=long(conf.value))
except:
default_perspective = None
# language
language = getattr(settings, 'HARDTREE_LANGUAGES_DEFAULT', '')
try:
conf = ModuleSetting.get('language', user=user)[0]
language = conf.value
except __HOLE__:
pass
all_languages = getattr(
settings, 'HARDTREE_LANGUAGES', [('en', 'English')])
# time zone
default_timezone = settings.HARDTREE_SERVER_DEFAULT_TIMEZONE
try:
conf = ModuleSetting.get('default_timezone')[0]
default_timezone = conf.value
except:
pass
try:
conf = ModuleSetting.get('default_timezone', user=user)[0]
default_timezone = conf.value
except:
default_timezone = getattr(
settings, 'HARDTREE_SERVER_TIMEZONE')[default_timezone][0]
all_timezones = getattr(settings, 'HARDTREE_SERVER_TIMEZONE')
# email notifications e.g. new task assigned to you
email_notifications = getattr(
settings, 'HARDTREE_ALLOW_EMAIL_NOTIFICATIONS', False)
try:
conf = ModuleSetting.get('email_notifications', user=user)[0]
email_notifications = conf.value
except:
pass
try:
ns = NotificationSetting.objects.get(owner=user, enabled=True)
notifications_for_modules = [m.title for m in ns.modules.all()]
except NotificationSetting.DoesNotExist:
notifications_for_modules = []
return render_to_response('account/settings_view',
{
'default_permissions': default_permissions,
'default_perspective': default_perspective,
'language': language,
'all_languages': all_languages,
'default_timezone': default_timezone,
'all_timezones': all_timezones,
'email_notifications': email_notifications,
'notifications_for_modules': notifications_for_modules,
},
context_instance=RequestContext(request), response_format=response_format)
|
IndexError
|
dataset/ETHPy150Open treeio/treeio/treeio/account/views.py/settings_view
|
886
|
def if_file_get_content(value):
"""
if value is the path to a local file, read the content and return it
otherwise just return value
"""
file_path = path.abspath(value)
if path.isfile(file_path):
try:
f = open(file_path, 'rU')
except __HOLE__:
pass
else:
content = f.read()
f.close()
return content
if sys.platform == 'win32':
# in windows, sys.argv is encoded in windows-1252
return value.decode('windows-1252').encode('UTF-8')
else:
# all others use the same encoding for stdin and argv
return recode_input(value)
|
IOError
|
dataset/ETHPy150Open cloudControl/cctrl/cctrl/addonoptionhelpers.py/if_file_get_content
|
887
|
def main():
"""! This function drives command line tool 'mbedhtrun' which is using DefaultTestSelector
@details 1. Create DefaultTestSelector object and pass command line parameters
2. Call default test execution function run() to start test instrumentation
"""
freeze_support()
result = -2
test_selector = DefaultTestSelector(init_host_test_cli_params())
try:
result = test_selector.execute()
except (__HOLE__, SystemExit):
test_selector.finish()
result = -3
raise
else:
test_selector.finish()
return result
|
KeyboardInterrupt
|
dataset/ETHPy150Open ARMmbed/htrun/mbed_host_tests/mbedhtrun.py/main
|
888
|
def post(self):
# Extract useful information from POST request and store it as a dictionary data structure
self.message = dict(self.request.arguments)
# Extract value of url, action and state request parameters from request body
# Request parameters values by default is []
self.url = self.get_body_argument("url", default=[])
self.action = self.get_body_argument("action", default=[])
self.state = self.get_body_argument("state", default=[])
# Validate url parameter
try:
if not self.url:
pass
elif self.get_component("url_manager").IsURL(self.url):
pass
else:
raise exceptions.InvalidUrlReference(400)
except exceptions.InvalidUrlReference:
raise tornado.web.HTTPError(400, "exceptions.Invalid URL")
# Validate action parameter
try:
if (self.action in self.VALID_ACTIONS) or (not self.action):
pass
else:
raise exceptions.InvalidActionReference(400)
except exceptions.InvalidActionReference:
raise tornado.web.HTTPError(400, "exceptions.Invalid action")
# Validate state parameter
try:
if (self.state == "on") or (self.state == "off") or (not self.state):
pass
else:
raise exceptions.InvalidActionReference(400)
except exceptions.InvalidActionReference:
raise tornado.web.HTTPError(400, "exceptions.Invalid action state")
# If received message is valid, send it to proxy PnH Handler and log this event
try:
if not self.message:
raise exceptions.InvalidMessageReference
else:
# TO DO: send message to proxy handler and verify if event registered in log file
self.application.Core.write_event(json.dumps(self.message), 'a')
except exceptions.InvalidMessageReference:
raise tornado.web.HTTPError(412, "Empty message")
except __HOLE__ as e:
cprint("\n")
cprint("I/O error at event writing: ({0}): {1}".format(e.errno, e.strerror))
cprint("\n")
|
IOError
|
dataset/ETHPy150Open owtf/owtf/framework/interface/api_handlers.py/PlugnhackHandler.post
|
889
|
def clean(self):
cleaned_data = super(AbstractSinglePreferenceForm, self).clean()
try:
self.instance.name, self.instance.section = cleaned_data['name'], cleaned_data['section']
except __HOLE__: # changelist form
pass
try:
self.instance.preference
except NotFoundInRegistry:
raise ValidationError(NotFoundInRegistry.detail_default)
return self.cleaned_data
|
KeyError
|
dataset/ETHPy150Open EliotBerriot/django-dynamic-preferences/dynamic_preferences/forms.py/AbstractSinglePreferenceForm.clean
|
890
|
def clean(self):
cleaned_data = super(AbstractSinglePreferenceForm, self).clean()
try:
self.instance.name, self.instance.section = cleaned_data['name'], cleaned_data['section']
except __HOLE__: # changelist form
pass
i = cleaned_data.get('instance')
if i:
self.instance.instance = i
try:
self.instance.preference
except NotFoundInRegistry:
raise ValidationError(NotFoundInRegistry.detail_default)
return self.cleaned_data
|
KeyError
|
dataset/ETHPy150Open EliotBerriot/django-dynamic-preferences/dynamic_preferences/forms.py/SinglePerInstancePreferenceForm.clean
|
891
|
def _pshell(cmd, cwd=None):
'''
Execute the desired powershell command and ensure that it returns data
in json format and load that into python
'''
if 'convertto-json' not in cmd.lower():
cmd = ' '.join([cmd, '| ConvertTo-Json'])
log.debug('PSGET: {0}'.format(cmd))
ret = __salt__['cmd.shell'](cmd, shell='powershell', cwd=cwd)
try:
ret = json.loads(ret, strict=False)
except __HOLE__:
log.debug('Json not returned')
return ret
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/win_psget.py/_pshell
|
892
|
def getKeyPassphrase(self, keyId, prompt, errorMessage = None):
if errorMessage:
print errorMessage
keyDesc = "conary:pgp:%s" % keyId
try:
import keyutils
# We only initialize keyring if keyutils is not None
keyring = keyutils.KEY_SPEC_SESSION_KEYRING
except __HOLE__:
keyutils = None
# If the passphrase was invalidated, we don't want to be stuck; so, if
# the caller did set an error message, we will not try to use keyutils
if keyutils and not errorMessage:
keyId = keyutils.request_key(keyDesc, keyring)
if keyId is not None:
return keyutils.read_key(keyId)
print
print prompt
passPhrase = getpass.getpass("Passphrase: ")
if keyutils:
keyutils.add_key(keyDesc, passPhrase, keyring)
return passPhrase
|
ImportError
|
dataset/ETHPy150Open sassoftware/conary/conary/callbacks.py/KeyCacheCallback.getKeyPassphrase
|
893
|
def __getattr__(self, k):
try:
return getattr(settings, k)
except __HOLE__:
if k in self.defaults:
return self.defaults[k]
raise ImproperlyConfigured("django-secure requires %s setting." % k)
|
AttributeError
|
dataset/ETHPy150Open carljm/django-secure/djangosecure/conf.py/Configuration.__getattr__
|
894
|
def read_long_description(readme_file):
""" Read package long description from README file """
try:
import pypandoc
except (__HOLE__, OSError) as e:
print('No pypandoc or pandoc: %s' % (e,))
if is_py3:
fh = open(readme_file, encoding='utf-8')
else:
fh = open(readme_file)
long_description = fh.read()
fh.close()
return long_description
else:
return pypandoc.convert(readme_file, 'rst')
|
ImportError
|
dataset/ETHPy150Open walkr/oi/setup.py/read_long_description
|
895
|
def test_double_start(handler, framework):
try:
txaio.start_logging()
except __HOLE__:
assert False, "shouldn't get exception"
|
RuntimeError
|
dataset/ETHPy150Open crossbario/txaio/test/test_logging.py/test_double_start
|
896
|
def test_invalid_level(framework):
try:
txaio.start_logging(level='foo')
assert False, "should get exception"
except __HOLE__ as e:
assert 'Invalid log level' in str(e)
|
RuntimeError
|
dataset/ETHPy150Open crossbario/txaio/test/test_logging.py/test_invalid_level
|
897
|
def generate(env):
"""Add Builders and construction variables for Ghostscript to an
Environment."""
global GhostscriptAction
# The following try-except block enables us to use the Tool
# in standalone mode (without the accompanying pdf.py),
# whenever we need an explicit call of gs via the Gs()
# Builder ...
try:
if GhostscriptAction is None:
GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR')
import pdf
pdf.generate(env)
bld = env['BUILDERS']['PDF']
bld.add_action('.ps', GhostscriptAction)
except __HOLE__, e:
pass
gsbuilder = SCons.Builder.Builder(action = SCons.Action.Action('$GSCOM', '$GSCOMSTR'))
env['BUILDERS']['Gs'] = gsbuilder
env['GS'] = gs
env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite')
env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'
|
ImportError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/gs.py/generate
|
898
|
def handle(self, **options):
connection = connections[options['database']]
try:
connection.client.runshell()
except __HOLE__:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError(
'You appear not to have the %r program installed or on your path.' %
connection.client.executable_name
)
|
OSError
|
dataset/ETHPy150Open django/django/django/core/management/commands/dbshell.py/Command.handle
|
899
|
def getTestCaseNames(self, testCaseClass):
"""Override to select with selector, unless
config.getTestCaseNamesCompat is True
"""
if self.config.getTestCaseNamesCompat:
return unittest.TestLoader.getTestCaseNames(self, testCaseClass)
def wanted(attr, cls=testCaseClass, sel=self.selector):
item = getattr(cls, attr, None)
# MONKEYPATCH: replace this:
#if not ismethod(item):
# return False
# return sel.wantMethod(item)
# With:
if ismethod(item):
return sel.wantMethod(item)
# static method or something. If this is a static method, we
# can't get the class information, and we have to treat it
# as a function. Thus, we will miss things like class
# attributes for test selection
if isfunction(item):
return sel.wantFunction(item)
return False
# END MONKEYPATCH
cases = list(filter(wanted, dir(testCaseClass)))
for base in testCaseClass.__bases__:
for case in self.getTestCaseNames(base):
if case not in cases:
cases.append(case)
# add runTest if nothing else picked
if not cases and hasattr(testCaseClass, 'runTest'):
cases = ['runTest']
if self.sortTestMethodsUsing:
try:
cases.sort(key=self.sortTestMethodsUsing)
except __HOLE__: # Takes care of things trying to use old cmp functions.
cases.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))
return cases
##########################################################################
# Apply monkeypatch here
|
TypeError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/testing/nosepatch.py/getTestCaseNames
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.