Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
700
|
def get(self, key, default=None):
try:
return self[key]
except __HOLE__:
return default
|
KeyError
|
dataset/ETHPy150Open wcong/ants/ants/utils/datatypes.py/MergeDict.get
|
701
|
def run_examples():
exceptions = []
failures = 0
for input_command, expected_output, exit_code in INPUTS_AND_OUTPUTS:
print '\t', input_command,
remove_all_pycs(os.path.abspath(os.path.dirname(__file__)))
try:
process = subprocess.Popen(input_command,
stdout=subprocess.PIPE,
shell=True)
out = process.communicate()[0]+'\n'
out |should_be.equal_to| expected_output
process.wait() |should_be.equal_to| exit_code
print '- OK'
except __HOLE__, e:
print '- FAIL'
print e
failures += 1
return failures
|
AssertionError
|
dataset/ETHPy150Open hltbra/pycukes/specs/console_examples/run_examples.py/run_examples
|
702
|
def is_ffi_instance(obj):
# Compiled FFI modules have a member, ffi, which is an instance of
# CompiledFFI, which behaves similarly to an instance of cffi.FFI. In
# order to simplify handling a CompiledFFI object, we treat them as
# if they're cffi.FFI instances for typing and lowering purposes.
try:
return obj in _ffi_instances or isinstance(obj, cffi.FFI)
except __HOLE__: # Unhashable type possible
return False
|
TypeError
|
dataset/ETHPy150Open numba/numba/numba/typing/cffi_utils.py/is_ffi_instance
|
703
|
def is_cffi_func(obj):
"""Check whether the obj is a CFFI function"""
try:
return ffi.typeof(obj).kind == 'function'
except __HOLE__:
try:
return obj in _ool_func_types
except:
return False
|
TypeError
|
dataset/ETHPy150Open numba/numba/numba/typing/cffi_utils.py/is_cffi_func
|
704
|
def previous_current_next(items):
"""
From http://www.wordaligned.org/articles/zippy-triples-served-with-python
Creates an iterator which returns (previous, current, next) triples,
with ``None`` filling in when there is no previous or next
available.
"""
extend = itertools.chain([None], items, [None])
previous, current, next = itertools.tee(extend, 3)
try:
current.next()
next.next()
next.next()
except __HOLE__:
pass
return itertools.izip(previous, current, next)
#@register.filter
|
StopIteration
|
dataset/ETHPy150Open kylef-archive/lithium/lithium/forum/templatetags/forum.py/previous_current_next
|
705
|
def quoteaddr(addr):
"""Quote a subset of the email addresses defined by RFC 821.
Should be able to handle anything rfc822.parseaddr can handle.
"""
m = (None, None)
try:
m = email.utils.parseaddr(addr)[1]
except __HOLE__:
pass
if m == (None, None): # Indicates parse failure or AttributeError
# something weird here.. punt -ddm
return "<%s>" % addr
elif m is None:
# the sender wants an empty return address
return "<>"
else:
return "<%s>" % m
|
AttributeError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/smtplib.py/quoteaddr
|
706
|
def connect(self, host='localhost', port = 0):
"""Connect to a host on a given port.
If the hostname ends with a colon (`:') followed by a number, and
there is no port specified, that suffix will be stripped off and the
number interpreted as the port number to use.
Note: This method is automatically invoked by __init__, if a host is
specified during instantiation.
"""
if not port and (host.find(':') == host.rfind(':')):
i = host.rfind(':')
if i >= 0:
host, port = host[:i], host[i+1:]
try: port = int(port)
except __HOLE__:
raise socket.error, "nonnumeric port"
if not port: port = self.default_port
if self.debuglevel > 0: print>>stderr, 'connect:', (host, port)
self.sock = self._get_socket(host, port, self.timeout)
(code, msg) = self.getreply()
if self.debuglevel > 0: print>>stderr, "connect:", msg
return (code, msg)
|
ValueError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/smtplib.py/SMTP.connect
|
707
|
def getreply(self):
"""Get a reply from the server.
Returns a tuple consisting of:
- server response code (e.g. '250', or such, if all goes well)
Note: returns -1 if it can't read response code.
- server response string corresponding to response code (multiline
responses are converted to a single, multiline string).
Raises SMTPServerDisconnected if end-of-file is reached.
"""
resp=[]
if self.file is None:
self.file = self.sock.makefile('rb')
while 1:
line = self.file.readline()
if line == '':
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed")
if self.debuglevel > 0: print>>stderr, 'reply:', repr(line)
resp.append(line[4:].strip())
code=line[:3]
# Check that the error code is syntactically correct.
# Don't attempt to read a continuation line if it is broken.
try:
errcode = int(code)
except __HOLE__:
errcode = -1
break
# Check if multiline response.
if line[3:4]!="-":
break
errmsg = "\n".join(resp)
if self.debuglevel > 0:
print>>stderr, 'reply: retcode (%s); Msg: %s' % (errcode,errmsg)
return errcode, errmsg
|
ValueError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/smtplib.py/SMTP.getreply
|
708
|
def __init__(self, resultfunc, downloadid, peerid, ip, port, rawserver,
encrypted = False):
self.resultfunc = resultfunc
self.downloadid = downloadid
self.peerid = peerid
self.ip = ip
self.port = port
self.encrypted = encrypted
self.closed = False
self.buffer = ''
self.read = self._read
self.write = self._write
try:
self.connection = rawserver.start_connection((ip, port), self)
if encrypted:
self._dc = not(CRYPTO_OK and CHECK_PEER_ID_ENCRYPTED)
self.encrypter = Crypto(True, disable_crypto = self._dc)
self.write(self.encrypter.pubkey+self.encrypter.padding())
else:
self.encrypter = None
self.write(chr(len(protocol_name)) + protocol_name +
(chr(0) * 8) + downloadid)
except socketerror:
self.answer(False)
except __HOLE__:
self.answer(False)
self.next_len, self.next_func = 1+len(protocol_name), self.read_header
|
IOError
|
dataset/ETHPy150Open Cclleemm/FriendlyTorrent/src/tornado/BitTornado/BT1/NatCheck.py/NatCheck.__init__
|
709
|
def answer(self, result):
self.closed = True
try:
self.connection.close()
except __HOLE__:
pass
self.resultfunc(result, self.downloadid, self.peerid, self.ip, self.port)
|
AttributeError
|
dataset/ETHPy150Open Cclleemm/FriendlyTorrent/src/tornado/BitTornado/BT1/NatCheck.py/NatCheck.answer
|
710
|
def __getattr__(self, attr):
if attr not in self.defaults.keys():
raise AttributeError("Invalid HEDWIG setting: '%s'" % attr)
try:
# Check if present in user settings
val = self.user_settings[attr]
except __HOLE__:
# Fall back to defaults
val = self.defaults[attr]
# Cache the result
setattr(self, attr, val)
return val
|
KeyError
|
dataset/ETHPy150Open ofpiyush/hedwig-py/hedwig/core/settings.py/Settings.__getattr__
|
711
|
def process_request(self, req, resp):
# req.stream corresponds to the WSGI wsgi.input environ variable,
# and allows you to read bytes from the request body.
#
# See also: PEP 3333
if req.content_length in (None, 0):
# Nothing to do
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
req.context['doc'] = json.loads(body.decode('utf-8'))
except (ValueError, __HOLE__):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
|
UnicodeDecodeError
|
dataset/ETHPy150Open falconry/falcon/tests/test_example.py/JSONTranslator.process_request
|
712
|
@falcon.before(max_body(64 * 1024))
def on_post(self, req, resp, user_id):
try:
doc = req.context['doc']
except __HOLE__:
raise falcon.HTTPBadRequest(
'Missing thing',
'A thing must be submitted in the request body.')
proper_thing = self.db.add_thing(doc)
resp.status = falcon.HTTP_201
resp.location = '/%s/things/%s' % (user_id, proper_thing['id'])
# Configure your WSGI server to load "things.app" (app is a WSGI callable)
|
KeyError
|
dataset/ETHPy150Open falconry/falcon/tests/test_example.py/ThingsResource.on_post
|
713
|
def __init__(self):
# initialize OptionsLoaderMiddleware
from django.conf import settings
self.loaders = []
for loader_path in getattr(settings, 'OPTIONS_LOADERS', getattr(settings, 'options_loaders', [])) :
try:
mw_module, mw_classname = loader_path.rsplit('.', 1)
except ValueError:
raise ImproperlyConfigured('%s isn\'t a options loader module' % loader_path)
try:
mod = import_module(mw_module)
except __HOLE__, e:
raise ImproperlyConfigured('Error importing options loader %s: "%s"' % (mw_module, e))
try:
mw_class = getattr(mod, mw_classname)
except AttributeError:
raise ImproperlyConfigured('Options loader module "%s" does not define a "%s" class' % (mw_module, mw_classname))
assert hasattr(mw_class, 'load_options') or hasattr(mw_class, 'unload_options'), 'Class provided in OPTIONS_LOADERS "%s" has not load_options or unload_options class methods'
self.loaders.append(mw_class)
|
ImportError
|
dataset/ETHPy150Open joke2k/django-options/django_options/middleware.py/OptionsLoaderMiddleware.__init__
|
714
|
@lazyproperty
def core_properties(self):
"""
Instance of |CoreProperties| holding the read/write Dublin Core
document properties for this presentation. Creates a default core
properties part if one is not present (not common).
"""
try:
return self.part_related_by(RT.CORE_PROPERTIES)
except __HOLE__:
core_props = CoreProperties.default()
self.relate_to(core_props, RT.CORE_PROPERTIES)
return core_props
|
KeyError
|
dataset/ETHPy150Open scanny/python-pptx/pptx/package.py/Package.core_properties
|
715
|
@classmethod
def parse(cls, header, updates_to=None, type=None):
"""
Parse the header, returning a CacheControl object.
The object is bound to the request or response object
``updates_to``, if that is given.
"""
if updates_to:
props = cls.update_dict()
props.updated = updates_to
else:
props = {}
for match in token_re.finditer(header):
name = match.group(1)
value = match.group(2) or match.group(3) or None
if value:
try:
value = int(value)
except __HOLE__:
pass
props[name] = value
obj = cls(props, type=type)
if updates_to:
props.updated_args = (obj,)
return obj
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webob-1.1.1/webob/cachecontrol.py/CacheControl.parse
|
716
|
@property
def certificate(self):
"""
Retrieves the certificate used to sign the bounce message.
TODO: Cache the certificate based on the cert URL so we don't have to
retrieve it for each bounce message. *We would need to do it in a
secure way so that the cert couldn't be overwritten in the cache*
"""
if not hasattr(self, '_certificate'):
cert_url = self._get_cert_url()
# Only load certificates from a certain domain?
# Without some kind of trusted domain check, any old joe could
# craft a bounce message and sign it using his own certificate
# and we would happily load and verify it.
if not cert_url:
self._certificate = None
return self._certificate
try:
import requests
except ImportError:
raise ImproperlyConfigured("requests is required for bounce message verification.")
try:
import M2Crypto
except __HOLE__:
raise ImproperlyConfigured("M2Crypto is required for bounce message verification.")
# We use requests because it verifies the https certificate
# when retrieving the signing certificate. If https was somehow
# hijacked then all bets are off.
response = requests.get(cert_url)
if response.status_code != 200:
logger.warning('Could not download certificate from %s: "%s"', cert_url, response.status_code)
self._certificate = None
return self._certificate
# Handle errors loading the certificate.
# If the certificate is invalid then return
# false as we couldn't verify the message.
try:
self._certificate = M2Crypto.X509.load_cert_string(response.content)
except M2Crypto.X509.X509Error as e:
logger.warning('Could not load certificate from %s: "%s"', cert_url, e)
self._certificate = None
return self._certificate
|
ImportError
|
dataset/ETHPy150Open django-ses/django-ses/django_ses/utils.py/BounceMessageVerifier.certificate
|
717
|
def display_available_branches():
"""Displays available branches."""
branches = get_branches()
if not branches:
print(colored.red('No branches available'))
return
branch_col = len(max([b.name for b in branches], key=len)) + 1
for branch in branches:
try:
branch_is_selected = (branch.name == get_current_branch_name())
except __HOLE__:
branch_is_selected = False
marker = '*' if branch_is_selected else ' '
color = colored.green if branch_is_selected else colored.yellow
pub = '(published)' if branch.is_published else '(unpublished)'
print(columns(
[colored.red(marker), 2],
[color(branch.name), branch_col],
[black(pub), 14]
))
|
TypeError
|
dataset/ETHPy150Open kennethreitz/legit/legit/cli.py/display_available_branches
|
718
|
def move_to_tre_server(self, fnm):
source = os.path.join(conf.get(self.backend_name, 'blob.datadir'), fnm)
target = os.path.join(conf.get('general', 'tre_data_folder'), fnm)
try:
os.symlink(os.path.abspath(source), os.path.abspath(target))
except __HOLE__, ex:
if ex.errno == 17:
pass
|
OSError
|
dataset/ETHPy150Open livenson/vcdm/src/vcdm/backends/blob/localdisk.py/POSIXBlob.move_to_tre_server
|
719
|
def run(self):
"""Main run loop."""
self._timer_init()
self.log.info("Starting the run loop at %sHz", self.HZ)
start_time = time.time()
loops = 0
secs_per_tick = self.secs_per_tick
self.next_tick_time = time.time()
try:
while self.done is False:
time.sleep(0.001)
self.get_from_queue()
if self.next_tick_time <= time.time(): # todo change this
self.timer_tick()
self.next_tick_time += secs_per_tick
loops += 1
self._do_shutdown()
self.log.info("Target loop rate: %s Hz", self.HZ)
self.log.info("Actual loop rate: %s Hz",
loops / (time.time() - start_time))
except __HOLE__:
self.shutdown()
|
KeyboardInterrupt
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.run
|
720
|
def bcp_hello(self, **kwargs):
"""Processes an incoming BCP 'hello' command."""
try:
if LooseVersion(kwargs['version']) == (
LooseVersion(version.__bcp_version__)):
self.send('hello', version=version.__bcp_version__)
else:
self.send('hello', version='unknown protocol version')
except __HOLE__:
self.log.warning("Received invalid 'version' parameter with "
"'hello'")
|
KeyError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.bcp_hello
|
721
|
def bcp_player_variable(self, name, value, prev_value, change, player_num,
**kwargs):
"""Processes an incoming BCP 'player_variable' command."""
try:
self.player_list[int(player_num)-1][name] = value
except (__HOLE__, KeyError):
pass
|
IndexError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.bcp_player_variable
|
722
|
def bcp_player_score(self, value, prev_value, change, player_num,
**kwargs):
"""Processes an incoming BCP 'player_score' command."""
try:
self.player_list[int(player_num)-1]['score'] = int(value)
except (IndexError, __HOLE__):
pass
|
KeyError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.bcp_player_score
|
723
|
def bcp_player_turn_start(self, player_num, **kwargs):
"""Processes an incoming BCP 'player_turn_start' command."""
self.log.debug("bcp_player_turn_start")
if ((self.player and self.player.number != player_num) or
not self.player):
try:
self.player = self.player_list[int(player_num)-1]
except __HOLE__:
self.log.error('Received player turn start for player %s, but '
'only %s player(s) exist',
player_num, len(self.player_list))
|
IndexError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.bcp_player_turn_start
|
724
|
def get_debug_status(self, debug_path):
if self.options['loglevel'] > 10 or self.options['consoleloglevel'] > 10:
return True
class_, module = debug_path.split('|')
try:
if module in self.active_debugger[class_]:
return True
else:
return False
except __HOLE__:
return False
|
KeyError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController.get_debug_status
|
725
|
def _set_machine_var(self, name, value):
try:
prev_value = self.machine_vars[name]
except __HOLE__:
prev_value = None
self.machine_vars[name] = value
try:
change = value-prev_value
except TypeError:
if prev_value != value:
change = True
else:
change = False
if change:
self.log.debug("Setting machine_var '%s' to: %s, (prior: %s, "
"change: %s)", name, value, prev_value,
change)
self.events.post('machine_var_' + name,
value=value,
prev_value=prev_value,
change=change)
if self.machine_var_monitor:
for callback in self.monitors['machine_var']:
callback(name=name, value=self.vars[name],
prev_value=prev_value, change=change)
|
KeyError
|
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/media_controller.py/MediaController._set_machine_var
|
726
|
def __eq__(self, other):
"""Check if the properties and bodies of this Message and another Message are the same
Received messages may contain a 'delivery_info' attribute, which isn't compared.
"""
try:
return super(Message, self).__eq__(other) and self.body == other.body
except __HOLE__:
return False
|
AttributeError
|
dataset/ETHPy150Open veegee/amqpy/amqpy/message.py/Message.__eq__
|
727
|
@classmethod
def resolve_alias(cls, heading):
""""""
titled_heading = heading.title()
try:
return cls.ALIASES[titled_heading]
except __HOLE__:
return heading
|
KeyError
|
dataset/ETHPy150Open KristoforMaynard/SublimeAutoDocstring/docstring_styles.py/Section.resolve_alias
|
728
|
def exists(path):
try:
return os.path.exists(path)
except __HOLE__:
return False
|
IOError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/exists
|
729
|
def is_file(path):
try:
return os.path.isfile(path)
except __HOLE__:
return False
|
IOError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/is_file
|
730
|
def create_folders(path):
try:
# Recursive mkdirs if dir path is not complete
os.makedirs(path)
except __HOLE__:
#Already exists, no prob !
pass
except Exception as err:
# Another problem
raise ResourceException('Failed when creating folders: %s' % err)
|
OSError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/create_folders
|
731
|
def update_meta(path, owner, group, filemode):
if not os.path.exists(path):
raise ResourceException('This path does not exist.')
ownerid = get_owner_id(owner)
groupid = get_group_id(group)
octfilemode = int(filemode, 8)
try:
os.chmod(path, octfilemode)
os.chown(path, ownerid, groupid)
except __HOLE__ as err:
raise ResourceException(err)
|
ValueError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/update_meta
|
732
|
def delete(path):
try:
os.remove(path)
except __HOLE__:
log.debug('File %s does not exist', path)
|
OSError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/delete
|
733
|
def owner(path):
if not os.path.exists(path):
raise ResourceException('File does not exist.')
si = os.stat(path)
uid = si.st_uid
try:
return pwd.getpwuid(uid).pw_name
except __HOLE__ as err:
raise ResourceException(err)
|
KeyError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/owner
|
734
|
def get_owner_id(name):
try:
return pwd.getpwnam(name).pw_uid
except __HOLE__ as err:
raise ResourceException(err)
|
KeyError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/get_owner_id
|
735
|
def group(path):
if not os.path.exists(path):
raise ResourceException('File does not exist.')
si = os.stat(path)
gid = si.st_gid
try:
return grp.getgrgid(gid).gr_name
except __HOLE__ as err:
raise ResourceException(err)
|
KeyError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/group
|
736
|
def get_group_id(name):
try:
return grp.getgrnam(name).gr_gid
except __HOLE__ as err:
raise ResourceException(err)
|
KeyError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/files-plugin/unix-files.py/get_group_id
|
737
|
def get_readme():
try:
return open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
except __HOLE__:
return ''
|
IOError
|
dataset/ETHPy150Open Atomidata/django-audit-log/setup.py/get_readme
|
738
|
def http_emitter(message, log, agentConfig, endpoint):
"Send payload"
url = agentConfig['sd_url']
log.debug('http_emitter: attempting postback to ' + url)
# Post back the data
try:
payload = json.dumps(message)
except __HOLE__:
message = remove_control_chars(message)
payload = json.dumps(message)
#zipped = zlib.compress(payload)
zipped = payload
log.debug("payload_size=%d, compressed_size=%d, compression_ratio=%.3f"
% (len(payload), len(zipped), float(len(payload))/float(len(zipped))))
agentKey = message.get('agentKey', None)
if not agentKey:
raise Exception("The http emitter requires an agent key")
url = "{0}/intake/{1}?agent_key={2}".format(url, endpoint, agentKey)
try:
headers = post_headers(agentConfig, zipped)
r = requests.post(url, data=zipped, timeout=5, headers=headers)
r.raise_for_status()
if r.status_code >= 200 and r.status_code < 205:
log.debug("Payload accepted")
except Exception:
log.exception("Unable to post payload.")
try:
log.error("Received status code: {0}".format(r.status_code))
except Exception:
pass
|
UnicodeDecodeError
|
dataset/ETHPy150Open serverdensity/sd-agent/emitter.py/http_emitter
|
739
|
def assert_series_equal(left, right, check_names=True, **kwargs):
"""Backwards compatibility wrapper for
``pandas.util.testing.assert_series_equal``
Examples
--------
>>> import pandas as pd
>>> s = pd.Series(list('abc'), name='a')
>>> s2 = pd.Series(list('abc'), name='b')
>>> assert_series_equal(s, s2) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
AssertionError: ...
>>> assert_series_equal(s, s2, check_names=False)
See Also
--------
pandas.util.testing.assert_series_equal
"""
try:
return tm.assert_series_equal(left, right, check_names=check_names,
**kwargs)
except __HOLE__:
if check_names:
assert left.name == right.name
return tm.assert_series_equal(left, right, **kwargs)
|
TypeError
|
dataset/ETHPy150Open blaze/blaze/blaze/compatibility.py/assert_series_equal
|
740
|
def _load_github_hooks(github_url='https://api.github.com'):
"""Request GitHub's IP block from their API.
Return the IP network.
If we detect a rate-limit error, raise an error message stating when
the rate limit will reset.
If something else goes wrong, raise a generic 503.
"""
try:
resp = requests.get(github_url + '/meta')
if resp.status_code == 200:
return resp.json()['hooks']
else:
if resp.headers.get('X-RateLimit-Remaining') == '0':
reset_ts = int(resp.headers['X-RateLimit-Reset'])
reset_string = time.strftime('%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(reset_ts))
raise ServiceUnavailable('Rate limited from GitHub until ' +
reset_string)
else:
raise ServiceUnavailable('Error reaching GitHub')
except (__HOLE__, ValueError, requests.exceptions.ConnectionError):
raise ServiceUnavailable('Error reaching GitHub')
# So we don't get rate limited
|
KeyError
|
dataset/ETHPy150Open nickfrostatx/flask-hookserver/flask_hookserver.py/_load_github_hooks
|
741
|
def create_event(message_type=None, routing_key='everybody', **kwargs):
'''
Create an event in VictorOps. Designed for use in states.
The following parameters are required:
:param message_type: One of the following values: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY.
The following parameters are optional:
:param routing_key: The key for where messages should be routed. By default, sent to
'everyone' route.
:param entity_id: The name of alerting entity. If not provided, a random name will be assigned.
:param timestamp: Timestamp of the alert in seconds since epoch. Defaults to the
time the alert is received at VictorOps.
:param timestamp_fmt The date format for the timestamp parameter.
:param state_start_time: The time this entity entered its current state
(seconds since epoch). Defaults to the time alert is received.
:param state_start_time_fmt: The date format for the timestamp parameter.
:param state_message: Any additional status information from the alert item.
:param entity_is_host: Used within VictorOps to select the appropriate
display format for the incident.
:param entity_display_name: Used within VictorOps to display a human-readable name for the entity.
:param ack_message: A user entered comment for the acknowledgment.
:param ack_author: The user that acknowledged the incident.
:return: A dictionary with result, entity_id, and message if result was failure.
CLI Example:
.. code-block:: yaml
salt myminion victorops.create_event message_type='CRITICAL' routing_key='everyone' \
entity_id='hostname/diskspace'
salt myminion victorops.create_event message_type='ACKNOWLEDGEMENT' routing_key='everyone' \
entity_id='hostname/diskspace' ack_message='Acknowledged' ack_author='username'
salt myminion victorops.create_event message_type='RECOVERY' routing_key='everyone' \
entity_id='hostname/diskspace'
The following parameters are required:
message_type
'''
keyword_args = {'entity_id': str,
'state_message': str,
'entity_is_host': bool,
'entity_display_name': str,
'ack_message': str,
'ack_author': str
}
data = {}
if not message_type:
raise SaltInvocationError('Required argument "message_type" is missing.')
if message_type.upper() not in ['INFO', 'WARNING', 'ACKNOWLEDGEMENT', 'CRITICAL', 'RECOVERY']:
raise SaltInvocationError('"message_type" must be INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, or RECOVERY.')
data['message_type'] = message_type
data['monitoring_tool'] = 'SaltStack'
if 'timestamp' in kwargs:
timestamp_fmt = kwargs.get('timestamp_fmt', '%Y-%m-%dT%H:%M:%S')
try:
timestamp = datetime.datetime.strptime(kwargs['timestamp'], timestamp_fmt)
data['timestamp'] = int(time.mktime(timestamp.timetuple()))
except (TypeError, ValueError):
raise SaltInvocationError('Date string could not be parsed: {0}, {1}'.format(
kwargs['timestamp'], timestamp_fmt)
)
if 'state_start_time' in kwargs:
state_start_time_fmt = kwargs.get('state_start_time_fmt', '%Y-%m-%dT%H:%M:%S')
try:
state_start_time = datetime.datetime.strptime(kwargs['state_start_time'], state_start_time_fmt)
data['state_start_time'] = int(time.mktime(state_start_time.timetuple()))
except (__HOLE__, ValueError):
raise SaltInvocationError('Date string could not be parsed: {0}, {1}'.format(
kwargs['state_start_time'], state_start_time_fmt)
)
for kwarg in keyword_args:
if kwarg in kwargs:
if isinstance(kwargs[kwarg], keyword_args[kwarg]):
data[kwarg] = kwargs[kwarg]
else:
# Should this faile on the wrong type.
log.error('Wrong type, skipping {0}'.format(kwarg))
status, result = _query(action='alert',
routing_key=routing_key,
data=json.dumps(data),
method='POST'
)
return result
|
TypeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/victorops.py/create_event
|
742
|
def chunks(self, chunk_size=None):
"""
Read the file and yield chucks of ``chunk_size`` bytes (defaults to
``UploadedFile.DEFAULT_CHUNK_SIZE``).
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
try:
self.seek(0)
except (__HOLE__, UnsupportedOperation):
pass
while True:
data = self.read(chunk_size)
if not data:
break
yield data
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/files/base.py/File.chunks
|
743
|
def read_utf8(self):
try:
import codecs
f = codecs.open(self.filepath, encoding='utf_8', mode='r')
except __HOLE__ as ioe:
sys.stderr.write("Glue Plugin Error: Unable to open file for read with read_utf8() method.")
raise ioe
try:
return f.read()
except Exception as e:
sys.stderr.write("Glue Plugin Error: Unable to read the file with UTF-8 encoding using the read_utf8() method.")
raise e
finally:
f.close()
#------------------------------------------------------------------------------
# [ FileWriter class ] - write to local files
#------------------------------------------------------------------------------
|
IOError
|
dataset/ETHPy150Open chrissimpkins/glue/GlueIO.py/FileReader.read_utf8
|
744
|
def write_utf8(self, text):
try:
import codecs
f = codecs.open(self.filepath, encoding='utf_8', mode='w')
except __HOLE__ as ioe:
sys.stderr.write("Glue Plugin Error: Unable to open file for write with the write_utf8() method.")
raise ioe
try:
f.write(text)
except Exception as e:
sys.stderr.write("Glue Plugin Error: Unable to write UTF-8 encoded text to file with the write_utf8() method.")
raise e
finally:
f.close()
|
IOError
|
dataset/ETHPy150Open chrissimpkins/glue/GlueIO.py/FileWriter.write_utf8
|
745
|
def postproc(traj, results, idx):
get_root_logger().info(idx)
if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
traj.f_load_skeleton()
if isinstance(traj.v_storage_service, (QueueStorageServiceSender, PipeStorageServiceSender)):
try:
traj.f_load()
raise RuntimeError('Should not load')
except __HOLE__:
pass
if len(results) <= 4 and len(traj) == 4:
return {'x':[1,2], 'y':[1,2]}
if len(results) <= 6 and len(traj) == 6:
traj.f_expand({'x':[2,3], 'y':[0,1]})
|
NotImplementedError
|
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/tests/integration/pipeline_test.py/postproc
|
746
|
def vacuum(self, i, namespace, duration):
"""
Trim metrics that are older than settings.FULL_DURATION and
purge old metrics.
"""
begin = time()
# Discover assigned metrics
unique_metrics = list(self.redis_conn.smembers(namespace + 'unique_metrics'))
keys_per_processor = len(unique_metrics) / settings.ROOMBA_PROCESSES
assigned_max = i * keys_per_processor
assigned_min = assigned_max - keys_per_processor
assigned_keys = range(assigned_min, assigned_max)
# Compile assigned metrics
assigned_metrics = [unique_metrics[index] for index in assigned_keys]
euthanized = 0
blocked = 0
for i in xrange(len(assigned_metrics)):
self.check_if_parent_is_alive()
pipe = self.redis_conn.pipeline()
now = time()
key = assigned_metrics[i]
try:
# WATCH the key
pipe.watch(key)
# Everything below NEEDS to happen before another datapoint
# comes in. If your data has a very small resolution (<.1s),
# this technique may not suit you.
raw_series = pipe.get(key)
unpacker = Unpacker(use_list = False)
unpacker.feed(raw_series)
timeseries = sorted([unpacked for unpacked in unpacker])
# Put pipe back in multi mode
pipe.multi()
# There's one value. Purge if it's too old
try:
if not isinstance(timeseries[0], TupleType):
if timeseries[0] < now - duration:
pipe.delete(key)
pipe.srem(namespace + 'unique_metrics', key)
pipe.execute()
euthanized += 1
continue
except __HOLE__:
continue
# Check if the last value is too old and purge
if timeseries[-1][0] < now - duration:
pipe.delete(key)
pipe.srem(namespace + 'unique_metrics', key)
pipe.execute()
euthanized += 1
continue
# Remove old datapoints and duplicates from timeseries
temp = set()
temp_add = temp.add
delta = now - duration
trimmed = [
tuple for tuple in timeseries
if tuple[0] > delta
and tuple[0] not in temp
and not temp_add(tuple[0])
]
# Purge if everything was deleted, set key otherwise
if len(trimmed) > 0:
# Serialize and turn key back into not-an-array
btrimmed = packb(trimmed)
if len(trimmed) <= 15:
value = btrimmed[1:]
elif len(trimmed) <= 65535:
value = btrimmed[3:]
else:
value = btrimmed[5:]
pipe.set(key, value)
else:
pipe.delete(key)
pipe.srem(namespace + 'unique_metrics', key)
euthanized += 1
pipe.execute()
except WatchError:
blocked += 1
assigned_metrics.append(key)
except Exception as e:
# If something bad happens, zap the key and hope it goes away
pipe.delete(key)
pipe.srem(namespace + 'unique_metrics', key)
pipe.execute()
euthanized += 1
logger.info(e)
logger.info("Euthanizing " + key)
finally:
pipe.reset()
logger.info('operated on %s in %f seconds' % (namespace, time() - begin))
logger.info('%s keyspace is %d' % (namespace, (len(assigned_metrics) - euthanized)))
logger.info('blocked %d times' % blocked)
logger.info('euthanized %d geriatric keys' % euthanized)
if (time() - begin < 30):
logger.info('sleeping due to low run time...')
sleep(10)
|
IndexError
|
dataset/ETHPy150Open etsy/skyline/src/horizon/roomba.py/Roomba.vacuum
|
747
|
def _cursor(self):
if self.connection is None:
## The following is the same as in django.db.backends.sqlite3.base ##
settings_dict = self.settings_dict
if not settings_dict['NAME']:
raise ImproperlyConfigured("Please fill out the database NAME in the settings module before using the database.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
self.connection = Database.connect(**kwargs)
# Register extract, date_trunc, and regexp functions.
self.connection.create_function("django_extract", 2, _sqlite_extract)
self.connection.create_function("django_date_trunc", 2, _sqlite_date_trunc)
self.connection.create_function("regexp", 2, _sqlite_regexp)
connection_created.send(sender=self.__class__, connection=self)
## From here on, customized for GeoDjango ##
# Enabling extension loading on the SQLite connection.
try:
self.connection.enable_load_extension(True)
except __HOLE__:
raise ImproperlyConfigured('The pysqlite library does not support C extension loading. '
'Both SQLite and pysqlite must be configured to allow '
'the loading of extensions to use SpatiaLite.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = self.connection.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception, msg:
raise ImproperlyConfigured('Unable to load the SpatiaLite library extension '
'"%s" because: %s' % (self.spatialite_lib, msg))
return cur
else:
return self.connection.cursor(factory=SQLiteCursorWrapper)
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/gis/db/backends/spatialite/base.py/DatabaseWrapper._cursor
|
748
|
def validate(self, value, model_instance):
super(JSONField, self).validate(value, model_instance)
try:
json.dumps(value)
except __HOLE__:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
|
TypeError
|
dataset/ETHPy150Open django/django/django/contrib/postgres/fields/jsonb.py/JSONField.validate
|
749
|
def as_sql(self, compiler, connection):
key_transforms = [self.key_name]
previous = self.lhs
while isinstance(previous, KeyTransform):
key_transforms.insert(0, previous.key_name)
previous = previous.lhs
lhs, params = compiler.compile(previous)
if len(key_transforms) > 1:
return "{} #> %s".format(lhs), [key_transforms] + params
try:
int(self.key_name)
except __HOLE__:
lookup = "'%s'" % self.key_name
else:
lookup = "%s" % self.key_name
return "%s -> %s" % (lhs, lookup), params
|
ValueError
|
dataset/ETHPy150Open django/django/django/contrib/postgres/fields/jsonb.py/KeyTransform.as_sql
|
750
|
def __getitem__(self, coord):
x, y = coord
if isinstance(x, slice) or isinstance(y, slice):
if isinstance(x, slice):
x_indices = x.indices(self.width)
else:
x_indices = [x]
if isinstance(y, slice):
y_indices = y.indices(self.height)
else:
y_indices = [y]
try:
wrap_x, wrap_y = self._wrap_functions
ret = []
for y_index in range(*y_indices):
nodes_y = self.nodes[ wrap_y(y_index) ]
for x_index in range(*x_indices):
ret.append( nodes_y[ wrap_x(x_index) ] )
except IndexError:
raise IndexError("Slice out of range")
return ret
x, y = self.wrap(coord)
if x < 0 or y < 0:
raise IndexError("coordinate out of range")
try:
return self.nodes[y][x]
except __HOLE__:
raise IndexError("coordinate out of range")
|
IndexError
|
dataset/ETHPy150Open PythonProgramming/Beginning-Game-Development-with-Python-and-Pygame/gameobjects/grid.py/Grid.__getitem__
|
751
|
def get_layout_by_name(layout_name):
""" Get a layout.
Parameters
----------
layout_name : str
a valid layout name
Returns
-------
layout_str : str
the layout as a string
Raises
------
KeyError
if the layout_name is not known
See Also
--------
get_available_layouts
"""
# decode and return this layout
try:
return zlib.decompress(base64.decodebytes(__layouts.__dict__[layout_name].encode())).decode()
except __HOLE__ as ke:
# This happens if layout_name is not a valid key in the __dict__.
# I.e. if the layout_name is not available.
# The error message would be to terse "KeyError: 'non_existing_layout'",
# thus reraise as ValueError with appropriate error message.
raise ValueError("Layout: '%s' is not known." % ke.args)
|
KeyError
|
dataset/ETHPy150Open ASPP/pelita/pelita/layout.py/get_layout_by_name
|
752
|
def __new__(cls):
global undefined
try:
return undefined
except __HOLE__:
undefined = super().__new__(cls)
return undefined
|
NameError
|
dataset/ETHPy150Open tailhook/pyzza/pyzza/abc.py/Undefined.__new__
|
753
|
@classmethod
def init_conn(cls):
try:
hive
except __HOLE__:
raise DbError('Hive client module not found/loaded. Please make sure all dependencies are installed\n')
cls.conn = cls.conn()
cls.cursor = cls.conn.cursor()
cls.conn_initialized = True
return cls
|
NameError
|
dataset/ETHPy150Open appnexus/schema-tool/schematool/db/_hive.py/HiveDb.init_conn
|
754
|
@classmethod
def _check_geo_field(cls, opts, lookup):
"""
Utility for checking the given lookup with the given model options.
The lookup is a string either specifying the geographic field, e.g.
'point, 'the_geom', or a related lookup on a geographic field like
'address__point'.
If a GeometryField exists according to the given lookup on the model
options, it will be returned. Otherwise returns None.
"""
# This takes into account the situation where the lookup is a
# lookup to a related geographic field, e.g., 'address__point'.
field_list = lookup.split(LOOKUP_SEP)
# Reversing so list operates like a queue of related lookups,
# and popping the top lookup.
field_list.reverse()
fld_name = field_list.pop()
try:
geo_fld = opts.get_field(fld_name)
# If the field list is still around, then it means that the
# lookup was for a geometry field across a relationship --
# thus we keep on getting the related model options and the
# model field associated with the next field in the list
# until there's no more left.
while len(field_list):
opts = geo_fld.rel.to._meta
geo_fld = opts.get_field(field_list.pop())
except (FieldDoesNotExist, __HOLE__):
return False
# Finally, make sure we got a Geographic field and return.
if isinstance(geo_fld, GeometryField):
return geo_fld
else:
return False
|
AttributeError
|
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/contrib/gis/db/models/sql/where.py/GeoWhereNode._check_geo_field
|
755
|
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except __HOLE__:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
|
SystemExit
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/_dummy_thread.py/start_new_thread
|
756
|
def test_is_iterable_failure(self):
try:
assert_that(123).is_iterable()
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to('Expected iterable, but was not.')
|
AssertionError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_iterable_failure
|
757
|
def test_is_not_iterable_failure(self):
try:
assert_that(['a','b','c']).is_not_iterable()
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to('Expected not iterable, but was.')
|
AssertionError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_not_iterable_failure
|
758
|
def test_is_subset_of_failure(self):
try:
assert_that(['a','b','c']).is_subset_of(['a','b'])
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to("Expected <['a', 'b', 'c']> to be subset of ['a', 'b'], but <c> was missing.")
|
AssertionError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_subset_of_failure
|
759
|
def test_is_subset_of_failure(self):
try:
assert_that(set([1,2,3])).is_subset_of(set([1,2]))
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to('Expected <%s> to be subset of [1, 2], but <3> was missing.' % set([1,2,3]))
|
AssertionError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_subset_of_failure
|
760
|
def test_is_subset_of_bad_val_failure(self):
try:
assert_that(123).is_subset_of(1234)
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to('val is not iterable')
|
TypeError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_subset_of_bad_val_failure
|
761
|
def test_is_subset_of_bad_arg_failure(self):
try:
assert_that(['a','b','c']).is_subset_of()
fail('should have raised error')
except __HOLE__ as ex:
assert_that(str(ex)).is_equal_to('one or more superset args must be given')
|
ValueError
|
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_collection.py/TestCollection.test_is_subset_of_bad_arg_failure
|
762
|
def releaseLicenseForRenderNode(self, licenseName, renderNode):
"""
:licenseName:
:renderNode: render node object expected
"""
if "&" not in licenseName:
licenseName += "&"
for licName in licenseName.split("&"):
if len(licName):
try:
lic = self.licenses[licName]
try:
if renderNode in lic.currentUsingRenderNodes:
rnId = lic.currentUsingRenderNodes.index(renderNode)
del lic.currentUsingRenderNodes[rnId]
lic.release()
except IndexError:
print "Cannot release license %s for renderNode %s" % (licName, renderNode)
except __HOLE__:
print "License %s not found" % licName
|
KeyError
|
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/licenses/licensemanager.py/LicenseManager.releaseLicenseForRenderNode
|
763
|
def reserveLicenseForRenderNode(self, licenseName, renderNode):
if "&" not in licenseName:
licenseName += "&"
globalsuccess = True
liclist = []
for licName in licenseName.split("&"):
if len(licName):
try:
lic = self.licenses[licName]
success = lic.reserve()
if success:
lic.currentUsingRenderNodes.append(renderNode)
liclist.append(lic)
else:
# if only one reservation fails, the whole reservation fails
globalsuccess = False
except __HOLE__:
print("License %r not found" % licName)
globalsuccess = False
# in case of reservation failure, release the already reserved licenses, if any
if not globalsuccess:
for lic in liclist:
rnId = lic.currentUsingRenderNodes.index(renderNode)
del lic.currentUsingRenderNodes[rnId]
lic.release()
return globalsuccess
|
KeyError
|
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/licenses/licensemanager.py/LicenseManager.reserveLicenseForRenderNode
|
764
|
def setMaxLicensesNumber(self, licenseName, number):
try:
lic = self.licenses[licenseName]
if lic.maximum != number:
lic.setMaxNumber(number)
except __HOLE__:
self.licenses[licenseName] = LicenseManager.License(licenseName, number)
print "License %r not found... Creating new entry" % licenseName
|
KeyError
|
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/licenses/licensemanager.py/LicenseManager.setMaxLicensesNumber
|
765
|
def create_size(self, photosize):
if self.size_exists(photosize):
return
try:
im = Image.open(self.image.storage.open(self.image.name))
except IOError:
return
# Save the original format
im_format = im.format
# Apply effect if found
if self.effect is not None:
im = self.effect.pre_process(im)
elif photosize.effect is not None:
im = photosize.effect.pre_process(im)
# Rotate if found & necessary
if 'Image Orientation' in self.EXIF() and \
self.EXIF().get('Image Orientation').values[0] in IMAGE_EXIF_ORIENTATION_MAP:
im = im.transpose(
IMAGE_EXIF_ORIENTATION_MAP[self.EXIF().get('Image Orientation').values[0]])
# Resize/crop image
if im.size != photosize.size and photosize.size != (0, 0):
im = self.resize_image(im, photosize)
# Apply watermark if found
if photosize.watermark is not None:
im = photosize.watermark.post_process(im)
# Apply effect if found
if self.effect is not None:
im = self.effect.post_process(im)
elif photosize.effect is not None:
im = photosize.effect.post_process(im)
# Save file
im_filename = getattr(self, "get_%s_filename" % photosize.name)()
try:
buffer = BytesIO()
if im_format != 'JPEG':
im.save(buffer, im_format)
else:
im.save(buffer, 'JPEG', quality=int(photosize.quality),
optimize=True)
buffer_contents = ContentFile(buffer.getvalue())
self.image.storage.save(im_filename, buffer_contents)
except __HOLE__ as e:
if self.image.storage.exists(im_filename):
self.image.storage.delete(im_filename)
raise e
|
IOError
|
dataset/ETHPy150Open jdriscoll/django-photologue/photologue/models.py/ImageModel.create_size
|
766
|
def create_sample(self):
try:
im = Image.open(SAMPLE_IMAGE_PATH)
except __HOLE__:
raise IOError(
'Photologue was unable to open the sample image: %s.' % SAMPLE_IMAGE_PATH)
im = self.process(im)
buffer = BytesIO()
im.save(buffer, 'JPEG', quality=90, optimize=True)
buffer_contents = ContentFile(buffer.getvalue())
default_storage.save(self.sample_filename(), buffer_contents)
|
IOError
|
dataset/ETHPy150Open jdriscoll/django-photologue/photologue/models.py/BaseEffect.create_sample
|
767
|
def pre_process(self, im):
if self.transpose_method != '':
method = getattr(Image, self.transpose_method)
im = im.transpose(method)
if im.mode != 'RGB' and im.mode != 'RGBA':
return im
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
factor = getattr(self, name.lower())
if factor != 1.0:
im = getattr(ImageEnhance, name)(im).enhance(factor)
for name in self.filters.split('->'):
image_filter = getattr(ImageFilter, name.upper(), None)
if image_filter is not None:
try:
im = im.filter(image_filter)
except __HOLE__:
pass
return im
|
ValueError
|
dataset/ETHPy150Open jdriscoll/django-photologue/photologue/models.py/PhotoEffect.pre_process
|
768
|
def match_iter(self, req, body, sizes):
"""\
This skips sizes because there's its not part of the iter api.
"""
for line in req.body:
if b'\n' in line[:-1]:
raise AssertionError("Embedded new line: %r" % line)
if line != body[:len(line)]:
raise AssertionError("Invalid body data read: %r != %r" % (
line, body[:len(line)]))
body = body[len(line):]
if len(body):
raise AssertionError("Failed to read entire body: %r" % body)
try:
data = six.next(iter(req.body))
raise AssertionError("Read data after body finished: %r" % data)
except __HOLE__:
pass
# Construct a series of test cases from the permutations of
# send, size, and match functions.
|
StopIteration
|
dataset/ETHPy150Open benoitc/gunicorn/tests/treq.py/request.match_iter
|
769
|
def get_heartbeat(self, worker):
try:
heartbeat = worker.heartbeats[-1]
except __HOLE__:
return
# Check for timezone settings
if getattr(settings, "USE_TZ", False):
return aware_tstamp(heartbeat)
return datetime.fromtimestamp(heartbeat)
|
IndexError
|
dataset/ETHPy150Open celery/django-celery/djcelery/snapshot.py/Camera.get_heartbeat
|
770
|
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.model_name" string.
"""
try:
return apps.get_model(model_identifier)
except (LookupError, __HOLE__):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
|
TypeError
|
dataset/ETHPy150Open django/django/django/core/serializers/python.py/_get_model
|
771
|
@when(u'paasta mark-for-deployments is run against the repo')
def step_paasta_mark_for_deployments_when(context):
fake_args = mock.MagicMock(
deploy_group='test_cluster.test_instance',
service='fake_deployments_json_service',
git_url=context.test_git_repo_dir,
commit=context.expected_commit
)
context.force_bounce_timestamp = format_timestamp(datetime.utcnow())
with contextlib.nested(
mock.patch('paasta_tools.utils.format_timestamp', autosepc=True,
return_value=context.force_bounce_timestamp),
mock.patch('paasta_tools.cli.cmds.mark_for_deployment.validate_service_name', autospec=True,
return_value=True),
) as (
mock_format_timestamp,
mock_validate_service_name,
):
try:
paasta_mark_for_deployment(fake_args)
except __HOLE__:
pass
|
SystemExit
|
dataset/ETHPy150Open Yelp/paasta/general_itests/steps/deployments_json_steps.py/step_paasta_mark_for_deployments_when
|
772
|
@when(u'paasta stop is run against the repo')
def step_paasta_stop_when(context):
fake_args = mock.MagicMock(
clusters='test_cluster',
instance='test_instance',
soa_dir='fake_soa_configs',
service='fake_deployments_json_service',
)
context.force_bounce_timestamp = format_timestamp(datetime.utcnow())
with contextlib.nested(
mock.patch('paasta_tools.cli.cmds.start_stop_restart.utils.get_git_url', autospec=True,
return_value=context.test_git_repo_dir),
mock.patch('paasta_tools.utils.format_timestamp', autospec=True,
return_value=context.force_bounce_timestamp),
) as (
mock_get_git_url,
mock_get_timestamp,
):
try:
paasta_stop(fake_args)
except __HOLE__:
pass
|
SystemExit
|
dataset/ETHPy150Open Yelp/paasta/general_itests/steps/deployments_json_steps.py/step_paasta_stop_when
|
773
|
@when(u'we generate deployments.json for that service')
def step_impl_when(context):
context.deployments_file = os.path.join('fake_soa_configs', 'fake_deployments_json_service', 'deployments.json')
try:
os.remove(context.deployments_file)
except __HOLE__:
pass
fake_args = mock.MagicMock(
service='fake_deployments_json_service',
soa_dir='fake_soa_configs',
verbose=True,
)
with contextlib.nested(
mock.patch('paasta_tools.generate_deployments_for_service.get_git_url', autospec=True,
return_value=context.test_git_repo_dir),
mock.patch('paasta_tools.generate_deployments_for_service.parse_args',
autospec=True, return_value=fake_args),
) as (
mock_get_git_url,
mock_parse_args,
):
generate_deployments_for_service.main()
|
OSError
|
dataset/ETHPy150Open Yelp/paasta/general_itests/steps/deployments_json_steps.py/step_impl_when
|
774
|
def dset_sheet(dataset, ws, freeze_panes=True):
"""Completes given worksheet from given Dataset."""
_package = dataset._package(dicts=False)
for i, sep in enumerate(dataset._separators):
_offset = i
_package.insert((sep[0] + _offset), (sep[1],))
for i, row in enumerate(_package):
row_number = i + 1
for j, col in enumerate(row):
col_idx = get_column_letter(j + 1)
# bold headers
if (row_number == 1) and dataset.headers:
# ws.cell('%s%s'%(col_idx, row_number)).value = unicode(
# '%s' % col, errors='ignore')
ws.cell('%s%s'%(col_idx, row_number)).value = unicode(col)
style = ws.get_style('%s%s' % (col_idx, row_number))
style.font.bold = True
if freeze_panes:
# As already done in #53, but after Merge lost:
# Export Freeze only after first Line
ws.freeze_panes = 'A2'
# bold separators
elif len(row) < dataset.width:
ws.cell('%s%s'%(col_idx, row_number)).value = unicode(
'%s' % col, errors='ignore')
style = ws.get_style('%s%s' % (col_idx, row_number))
style.font.bold = True
# wrap the rest
else:
try:
if '\n' in col:
ws.cell('%s%s'%(col_idx, row_number)).value = unicode(
'%s' % col, errors='ignore')
style = ws.get_style('%s%s' % (col_idx, row_number))
style.alignment.wrap_text
else:
ws.cell('%s%s'%(col_idx, row_number)).value = unicode(
'%s' % col, errors='ignore')
except __HOLE__:
ws.cell('%s%s'%(col_idx, row_number)).value = unicode(col)
|
TypeError
|
dataset/ETHPy150Open kennethreitz/tablib/tablib/formats/_xlsx.py/dset_sheet
|
775
|
def main():
# setup packages
setuptools.setup(
version=VERSION,
author='Anton Gavrik',
name='polaris-gslb',
description=('A versatile Global Server Load Balancing(GSLB) '
'solution, DNS-based traffic manager.'),
packages = setuptools.find_packages('.'),
install_requires=[
'pyyaml',
'python-memcached',
'python-daemon-3K'
],
license='BSD 3-Clause',
url='https://github.com/polaris-gslb/polaris-gslb',
download_url=('https://github.com/polaris-gslb/polaris-gslb/tarball/v{}'
.format(VERSION)),
classifiers=[
'Programming Language :: Python :: 3',
]
)
# use value from POLARIS_INSTALL_PREFIX env if set
try:
install_prefix = os.environ['POLARIS_INSTALL_PREFIX']
except __HOLE__:
install_prefix = os.path.join(os.sep, 'opt', 'polaris')
# determine the directory where setup.py is located
pwd = os.path.abspath(
os.path.split(inspect.getfile(inspect.currentframe()))[0])
print('Creating directory topology...')
for path in [
os.path.join(install_prefix, 'etc'),
os.path.join(install_prefix, 'bin'),
os.path.join(install_prefix, 'run'),
]:
try:
os.makedirs(path)
except FileExistsError:
continue
print('Copying dist configuration and executables...')
for dirname in [ 'etc', 'bin' ]:
copy_files(os.path.join(pwd, dirname),
os.path.join(install_prefix, dirname))
print('Creating /etc/default/polaris...')
py3_bin = which('python3')
py3_path = ''
if py3_bin is None:
print('Unable to find Python3 executable in the $PATH, '
'add the path manually to /etc/default/polaris')
else:
py3_path = os.path.split(py3_bin)[0]
with open(os.path.join(os.sep, 'etc', 'default', 'polaris'), 'w') as f:
f.write('export PATH=$PATH:{}\n'.format(py3_path))
f.write('export POLARIS_INSTALL_PREFIX={}\n'
.format(install_prefix))
|
KeyError
|
dataset/ETHPy150Open polaris-gslb/polaris-gslb/setup.py/main
|
776
|
def _initialize(self, settings_module):
"""
Initialize the settings from a given settings_module
settings_module - path to settings module
"""
#Get the global settings values and assign them as self attributes
self.settings_list = []
for setting in dir(global_settings):
#Only get upper case settings
if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
self.settings_list.append(setting)
#If a settings module was passed in, import it, and grab settings from it
#Overwrite global settings with theses
if settings_module is not None:
self.SETTINGS_MODULE = settings_module
#Try to import the settings module
try:
mod = import_module(self.SETTINGS_MODULE)
except __HOLE__:
error_message = "Could not import settings at {0}".format(self.SETTINGS_MODULE)
log.exception(error_message)
raise ImportError(error_message)
#Grab uppercased settings as set them as self attrs
for setting in dir(mod):
if setting == setting.upper():
if setting == "INSTALLED_APPS":
self.INSTALLED_APPS += getattr(mod, setting)
else:
setattr(self, setting, getattr(mod, setting))
self.settings_list.append(setting)
#If PATH_SETTINGS is in the settings file, extend the system path to include it
if hasattr(self, "PATH_SETTINGS"):
for path in self.PATH_SETTINGS:
sys.path.extend(getattr(self,path))
self.settings_list = list(set(self.settings_list))
|
ImportError
|
dataset/ETHPy150Open VikParuchuri/percept/percept/conf/base.py/Settings._initialize
|
777
|
def _setup(self):
"""
Perform initial setup of the settings class, such as getting the settings module and setting the settings
"""
settings_module = None
#Get the settings module from the environment variables
try:
settings_module = os.environ[global_settings.MODULE_VARIABLE]
except __HOLE__:
error_message = "Settings not properly configured. Cannot find the environment variable {0}".format(global_settings.MODULE_VARIABLE)
log.exception(error_message)
self._initialize(settings_module)
self._configure_logging()
|
KeyError
|
dataset/ETHPy150Open VikParuchuri/percept/percept/conf/base.py/Settings._setup
|
778
|
@misc.disallow_when_frozen(FrozenNode)
def disassociate(self):
"""Removes this node from its parent (if any).
:returns: occurences of this node that were removed from its parent.
"""
occurrences = 0
if self.parent is not None:
p = self.parent
self.parent = None
# Remove all instances of this node from its parent.
while True:
try:
p._children.remove(self)
except __HOLE__:
break
else:
occurrences += 1
return occurrences
|
ValueError
|
dataset/ETHPy150Open openstack/taskflow/taskflow/types/tree.py/Node.disassociate
|
779
|
def dump_packet(data):
def is_ascii(data):
if byte2int(data) >= 65 and byte2int(data) <= 122: #data.isalnum():
return data
return '.'
try:
print "packet length %d" % len(data)
print "method call[1]: %s" % sys._getframe(1).f_code.co_name
print "method call[2]: %s" % sys._getframe(2).f_code.co_name
print "method call[3]: %s" % sys._getframe(3).f_code.co_name
print "method call[4]: %s" % sys._getframe(4).f_code.co_name
print "method call[5]: %s" % sys._getframe(5).f_code.co_name
print "-" * 88
except __HOLE__: pass
dump_data = [data[i:i+16] for i in xrange(len(data)) if i%16 == 0]
for d in dump_data:
print ' '.join(map(lambda x:"%02X" % byte2int(x), d)) + \
' ' * (16 - len(d)) + ' ' * 2 + \
' '.join(map(lambda x:"%s" % is_ascii(x), d))
print "-" * 88
print ""
|
ValueError
|
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/pymysql/connections.py/dump_packet
|
780
|
def unpack_int24(n):
try:
return struct.unpack('B',n[0])[0] + (struct.unpack('B', n[1])[0] << 8) +\
(struct.unpack('B',n[2])[0] << 16)
except __HOLE__:
return n[0] + (n[1] << 8) + (n[2] << 16)
|
TypeError
|
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/pymysql/connections.py/unpack_int24
|
781
|
def unpack_int32(n):
try:
return struct.unpack('B',n[0])[0] + (struct.unpack('B', n[1])[0] << 8) +\
(struct.unpack('B',n[2])[0] << 16) + (struct.unpack('B', n[3])[0] << 24)
except __HOLE__:
return n[0] + (n[1] << 8) + (n[2] << 16) + (n[3] << 24)
|
TypeError
|
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/pymysql/connections.py/unpack_int32
|
782
|
def unpack_int64(n):
try:
return struct.unpack('B',n[0])[0] + (struct.unpack('B', n[1])[0]<<8) +\
(struct.unpack('B',n[2])[0] << 16) + (struct.unpack('B',n[3])[0]<<24)+\
(struct.unpack('B',n[4])[0] << 32) + (struct.unpack('B',n[5])[0]<<40)+\
(struct.unpack('B',n[6])[0] << 48) + (struct.unpack('B',n[7])[0]<<56)
except __HOLE__:
return n[0] + (n[1] << 8) + (n[2] << 16) + (n[3] << 24) +\
(n[4] << 32) + (n[5] << 40) + (n[6] << 48) + (n[7] << 56)
|
TypeError
|
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/pymysql/connections.py/unpack_int64
|
783
|
def send_email(request, context, template_name, cls=None, to_email=None):
"""
Sends an e-mail based on a Django template.
"""
# Imports moved into function because it was causing installer to fail
# because django may not be installed when installer is running
# MOLLY-244
from django.template import Context, RequestContext, loader
from django.core.mail import EmailMessage
from django.conf import settings
if to_email is not None:
pass
elif cls and hasattr(cls.conf, 'to_email'):
to_email = cls.conf.to_email
else:
to_email = ('%s <%s>' % admin for admin in settings.MANAGERS)
if cls and hasattr(cls.conf, 'from_email'):
from_email = cls.conf.from_email
else:
from_email = settings.DEFAULT_FROM_EMAIL
if request:
email_context = RequestContext(request, {
'from_email': from_email,
'to_email': to_email,
})
email_context.update({
'session_key': request.session.session_key,
'devid': request.device.devid,
'ua': request.META.get('HTTP_USER_AGENT'),
'lon': request.session.get('geolocation:location', (None, None))[0],
'lat': request.session.get('geolocation:location', (None, None))[1],
'host': request.META.get('HTTP_HOST'),
'request': request,
})
else:
email_context = Context(request, {
'from_email': from_email,
'to_email': to_email,
})
email_context.update(context)
template = loader.get_template(template_name)
email = template.render(email_context)
headers, last_header = {}, None
headers_section, body = email.split('\n\n', 1)
for header in headers_section.split('\n'):
if header.startswith(' '):
headers[last_header] += ' ' + header.strip()
else:
try:
key, value = header.split(': ', 1)
headers[key] = value
last_header = key
except __HOLE__:
# if the header line isn't in the form Key: Value
headers[last_header] += ' ' + header.strip()
subject = headers.pop('Subject', '[no subject]')
from_email = headers.pop('from_email', from_email)
if 'to_email' in headers:
to_email = (e.strip() for e in headers.pop('to_email').split(';'))
email = EmailMessage(
subject = subject,
body = body,
from_email = from_email,
to = to_email,
headers = headers,
)
email.send()
|
ValueError
|
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/__init__.py/send_email
|
784
|
def _create_cache(backend, **kwargs):
try:
# Try to get the CACHES entry for the given backend name first
try:
conf = settings.CACHES[backend]
except __HOLE__:
try:
# Trying to import the given backend, in case it's a dotted path
import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
backend, e))
location = kwargs.pop('LOCATION', '')
params = kwargs
else:
params = conf.copy()
params.update(kwargs)
backend = params.pop('BACKEND')
location = params.pop('LOCATION', '')
backend_cls = import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError(
"Could not find backend '%s': %s" % (backend, e))
return backend_cls(location, params)
|
KeyError
|
dataset/ETHPy150Open django/django/django/core/cache/__init__.py/_create_cache
|
785
|
def __getitem__(self, alias):
try:
return self._caches.caches[alias]
except AttributeError:
self._caches.caches = {}
except __HOLE__:
pass
if alias not in settings.CACHES:
raise InvalidCacheBackendError(
"Could not find config for '%s' in settings.CACHES" % alias
)
cache = _create_cache(alias)
self._caches.caches[alias] = cache
return cache
|
KeyError
|
dataset/ETHPy150Open django/django/django/core/cache/__init__.py/CacheHandler.__getitem__
|
786
|
def setup_logging(conf):
"""
Sets up the logging options for a log with supplied name
:param conf: a cfg.ConfOpts object
"""
if conf.log_config:
# Use a logging configuration file for all settings...
if os.path.exists(conf.log_config):
logging.config.fileConfig(conf.log_config)
return
else:
raise RuntimeError("Unable to locate specified logging "
"config file: %s" % conf.log_config)
root_logger = logging.root
if conf.debug:
root_logger.setLevel(logging.DEBUG)
elif conf.verbose:
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
formatter = logging.Formatter(conf.log_format, conf.log_date_format)
if conf.use_syslog:
try:
facility = getattr(logging.handlers.SysLogHandler,
conf.syslog_log_facility)
except __HOLE__:
raise ValueError(_("Invalid syslog facility"))
handler = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
elif conf.log_file:
logfile = conf.log_file
if conf.log_dir:
logfile = os.path.join(conf.log_dir, logfile)
handler = logging.handlers.WatchedFileHandler(logfile)
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
|
AttributeError
|
dataset/ETHPy150Open rcbops/glance-buildpackage/glance/common/config.py/setup_logging
|
787
|
def load_paste_app(conf, app_name=None):
"""
Builds and returns a WSGI app from a paste config file.
We assume the last config file specified in the supplied ConfigOpts
object is the paste config file.
:param conf: a cfg.ConfigOpts object
:param app_name: name of the application to load
:raises RuntimeError when config file cannot be located or application
cannot be loaded from config file
"""
if app_name is None:
app_name = conf.prog
# append the deployment flavor to the application name,
# in order to identify the appropriate paste pipeline
app_name += _get_deployment_flavor(conf)
conf_file = _get_deployment_config_file(conf)
try:
# Setup logging early
setup_logging(conf)
logger = logging.getLogger(app_name)
app = wsgi.paste_deploy_app(conf_file, app_name, conf)
# Log the options used when starting if we're in debug mode...
if conf.debug:
conf.log_opt_values(logging.getLogger(app_name), logging.DEBUG)
return app
except (LookupError, __HOLE__), e:
raise RuntimeError("Unable to load %(app_name)s from "
"configuration file %(conf_file)s."
"\nGot: %(e)r" % locals())
|
ImportError
|
dataset/ETHPy150Open rcbops/glance-buildpackage/glance/common/config.py/load_paste_app
|
788
|
def get_value(obj,key,create = False):
key_fragments = key.split(".")
current_dict = obj
last_dict = None
last_key = None
for key_fragment in key_fragments:
try:
if create and not key_fragment in current_dict:
current_dict[key_fragment] = {}
except TypeError:
if last_dict:
last_dict[last_key] = {key_fragment : {}}
current_dict = last_dict[last_key]
else:
raise KeyError
last_key = key_fragment
last_dict = current_dict
try:
current_dict = current_dict[key_fragment]
except __HOLE__:
raise KeyError
return current_dict
|
TypeError
|
dataset/ETHPy150Open adewes/blitzdb/blitzdb/helpers.py/get_value
|
789
|
def set_value(obj,key,value,overwrite = True):
key_fragments = key.split('.')
current_dict = obj
last_dict = None
last_key = None
for key_fragment in key_fragments:
try:
if not key_fragment in current_dict:
current_dict[key_fragment] = {}
except __HOLE__:
if last_dict:
last_dict[last_key] = {key_fragment : {}}
current_dict = last_dict[last_key]
else:
raise
last_dict = current_dict
last_key = key_fragment
current_dict = current_dict[key_fragment]
if (not overwrite) and key_fragments[-1] in last_dict:
return last_dict[key_fragments[-1]]
last_dict[key_fragments[-1]] = value
return last_dict[key_fragments[-1]]
|
TypeError
|
dataset/ETHPy150Open adewes/blitzdb/blitzdb/helpers.py/set_value
|
790
|
def delete_value(obj,key):
key_fragments = key.split('.')
current_dict = obj
last_dict = None
for key_fragment in key_fragments:
try:
if not key_fragment in current_dict:
return
except __HOLE__:
return
last_dict = current_dict
current_dict = current_dict[key_fragment]
if key_fragments[-1] in last_dict:
del last_dict[key_fragments[-1]]
|
TypeError
|
dataset/ETHPy150Open adewes/blitzdb/blitzdb/helpers.py/delete_value
|
791
|
def lxml(self):
"""Get an lxml etree if possible."""
if ('html' not in self.mimetype and 'xml' not in self.mimetype):
raise AttributeError('Not an HTML/XML response')
from lxml import etree
try:
from lxml.html import fromstring
except __HOLE__:
fromstring = etree.HTML
if self.mimetype=='text/html':
return fromstring(self.data)
return etree.XML(self.data)
|
ImportError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/werkzeug/contrib/testtools.py/ContentAccessors.lxml
|
792
|
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.mimetype:
raise AttributeError('Not a JSON response')
try:
from simplejson import loads
except __HOLE__:
from json import loads
return loads(self.data)
|
ImportError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/werkzeug/contrib/testtools.py/ContentAccessors.json
|
793
|
def ring_new(self, element):
if isinstance(element, PolyElement):
if self == element.ring:
return element
elif isinstance(self.domain, PolynomialRing) and self.domain.ring == element.ring:
return self.ground_new(element)
else:
raise NotImplementedError("conversion")
elif isinstance(element, string_types):
raise NotImplementedError("parsing")
elif isinstance(element, dict):
return self.from_dict(element)
elif isinstance(element, list):
try:
return self.from_terms(element)
except __HOLE__:
return self.from_list(element)
elif isinstance(element, Expr):
return self.from_expr(element)
else:
return self.ground_new(element)
|
ValueError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/rings.py/PolyRing.ring_new
|
794
|
def index(self, gen):
"""Compute index of ``gen`` in ``self.gens``. """
if gen is None:
i = 0
elif isinstance(gen, int):
i = gen
if 0 <= i and i < self.ngens:
pass
elif -self.ngens <= i and i <= -1:
i = -i - 1
else:
raise ValueError("invalid generator index: %s" % gen)
elif isinstance(gen, self.dtype):
try:
i = self.gens.index(gen)
except __HOLE__:
raise ValueError("invalid generator: %s" % gen)
elif isinstance(gen, string_types):
try:
i = self.symbols.index(gen)
except ValueError:
raise ValueError("invalid generator: %s" % gen)
else:
raise ValueError("expected a polynomial generator, an integer, a string or None, got %s" % gen)
return i
|
ValueError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/rings.py/PolyRing.index
|
795
|
def parse_body(self):
try:
js = json.loads(self.body)
if js[js.keys()[0]]['response_type'] == "ERROR":
raise RimuHostingException(js[js.keys()[0]]['human_readable_message'])
return js[js.keys()[0]]
except __HOLE__:
raise RimuHostingException('Could not parse body: %s' % (self.body))
except KeyError:
raise RimuHostingException('Could not parse body: %s' % (self.body))
|
ValueError
|
dataset/ETHPy150Open infincia/AEServmon/libcloud/drivers/rimuhosting.py/RimuHostingResponse.parse_body
|
796
|
@staticmethod
def load(path, name):
cluster_path = os.path.join(path, name)
filename = os.path.join(cluster_path, 'cluster.conf')
with open(filename, 'r') as f:
data = yaml.load(f)
try:
install_dir = None
if 'install_dir' in data:
install_dir = data['install_dir']
repository.validate(install_dir)
if install_dir is None and 'cassandra_dir' in data:
install_dir = data['cassandra_dir']
repository.validate(install_dir)
if common.isDse(install_dir):
cluster = DseCluster(path, data['name'], install_dir=install_dir, create_directory=False)
else:
cluster = Cluster(path, data['name'], install_dir=install_dir, create_directory=False)
node_list = data['nodes']
seed_list = data['seeds']
if 'partitioner' in data:
cluster.partitioner = data['partitioner']
if 'config_options' in data:
cluster._config_options = data['config_options']
if 'dse_config_options' in data:
cluster._dse_config_options = data['dse_config_options']
if 'log_level' in data:
cluster.__log_level = data['log_level']
if 'use_vnodes' in data:
cluster.use_vnodes = data['use_vnodes']
if 'datadirs' in data:
cluster.data_dir_count = int(data['datadirs'])
except __HOLE__ as k:
raise common.LoadError("Error Loading " + filename + ", missing property:" + k)
for node_name in node_list:
cluster.nodes[node_name] = Node.load(cluster_path, node_name, cluster)
for seed in seed_list:
cluster.seeds.append(seed)
return cluster
|
KeyError
|
dataset/ETHPy150Open pcmanus/ccm/ccmlib/cluster_factory.py/ClusterFactory.load
|
797
|
def run(self):
"""
Return True in the case we succeed in running, False otherwise.
This means we can use several processors and have one or the other work.
"""
if not self.extension in self.supported_extensions:
return self.refuse()
self.accept() # We accept now so the run method can discard
try:
self._run()
except __HOLE__ as e:
msg = 'Could not call external processor {0}: {1}'.format(self.__class__.__name__, e)
if self.critical:
logger.critical(msg)
raise ExternalFailure(self.__class__.__name__, e)
else:
logger.info(msg)
self.refuse()
|
OSError
|
dataset/ETHPy150Open koenbok/Cactus/cactus/static/external/__init__.py/External.run
|
798
|
def add_to_app(self, app, **kwargs):
""" Instead of adding a route to the flask application this will
include and load routes similar, same as in the
:py:class:`flask_via.Via` class.abs
.. versionchanged:: 2014.05.08
* ``url_prefix`` now injected into kwargs when loading in routes
.. versionchanged:: 2014.05.19
* ``endpoint`` now injects into kwargs when loading in routes
Arguments
---------
app : flask.app.Flask
Flask application instance
\*\*kwargs
Arbitrary keyword arguments passed in to ``init_app``
"""
# Routes name can be configured by setting VIA_ROUTES_NAME
if not self.routes_name:
self.routes_name = app.config.get('VIA_ROUTES_NAME', 'routes')
# Inject url_prefix into kwargs
if self.url_prefix:
# This allows us to chain url prefix's when multiple includes
# are called
try:
url_prefix = kwargs['url_prefix']
except __HOLE__:
url_prefix = ''
finally:
kwargs['url_prefix'] = url_prefix + self.url_prefix
# Inject endpoint into kwagrs
if self.endpoint is not None:
# This allows us to add a endpoint prefix to routes included
try:
endpoint = kwargs['endpoint']
except KeyError:
endpoint = ''
finally:
kwargs['endpoint'] = endpoint + self.endpoint + '.'
# Get the routes
routes = self.include(self.routes_module, self.routes_name)
# Load the routes
self.load(app, routes, **kwargs)
|
KeyError
|
dataset/ETHPy150Open thisissoon/Flask-Via/flask_via/routers/__init__.py/Include.add_to_app
|
799
|
def get_arg_clause(self, view, arg_value):
field = self.get_field(view)
try:
value = field.deserialize(arg_value)
except __HOLE__ as e:
errors = (
self.format_validation_error(message)
for message, path in utils.iter_validation_errors(e.messages)
)
raise ApiError(400, *errors)
return self.get_filter_clause(view, value)
|
ValidationError
|
dataset/ETHPy150Open 4Catalyzer/flask-resty/flask_resty/filtering.py/FilterFieldBase.get_arg_clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.