Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
300
|
def get_title(self):
"""
Return the string literal that is used in the template.
The title is used in the admin screens.
"""
try:
return extract_literal(self.meta_kwargs['title'])
except __HOLE__:
slot = self.get_slot()
if slot is not None:
return slot.replace('_', ' ').title()
return None
|
KeyError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/templatetags/fluent_contents_tags.py/PagePlaceholderNode.get_title
|
301
|
def get_role(self):
"""
Return the string literal that is used in the template.
The role can be "main", "sidebar" or "related", or shorted to "m", "s", "r".
"""
try:
return extract_literal(self.meta_kwargs['role'])
except __HOLE__:
return None
|
KeyError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/templatetags/fluent_contents_tags.py/PagePlaceholderNode.get_role
|
302
|
def get_fallback_language(self):
"""
Return whether to use the fallback language.
"""
try:
# Note: currently not supporting strings yet.
return extract_literal_bool(self.kwargs['fallback']) or None
except __HOLE__:
return False
|
KeyError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/templatetags/fluent_contents_tags.py/PagePlaceholderNode.get_fallback_language
|
303
|
def _get_placeholder_arg(arg_name, placeholder):
"""
Validate and return the Placeholder object that the template variable points to.
"""
if placeholder is None:
raise RuntimeWarning(u"placeholder object is None")
elif isinstance(placeholder, Placeholder):
return placeholder
elif isinstance(placeholder, Manager):
manager = placeholder
try:
parent_object = manager.instance # read RelatedManager code
except __HOLE__:
parent_object = None
try:
placeholder = manager.all()[0]
if parent_object is not None:
placeholder.parent = parent_object # Fill GFK cache
return placeholder
except IndexError:
raise RuntimeWarning(u"No placeholders found for query '{0}.all.0'".format(arg_name))
else:
raise ValueError(u"The field '{0}' does not refer to a placeholder object!".format(arg_name))
|
AttributeError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/templatetags/fluent_contents_tags.py/_get_placeholder_arg
|
304
|
def get_bug_list(self):
"""Returns a list of bugs associated with this review request."""
if self.bugs_closed == "":
return []
bugs = list(set(re.split(r"[, ]+", self.bugs_closed)))
# First try a numeric sort, to show the best results for the majority
# case of bug trackers with numeric IDs. If that fails, sort
# alphabetically.
try:
bugs.sort(key=int)
except __HOLE__:
bugs.sort()
return bugs
|
ValueError
|
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/reviews/models/base_review_request_details.py/BaseReviewRequestDetails.get_bug_list
|
305
|
def available():
try:
import av
from PIL import Image
except __HOLE__:
return False
else:
return True
|
ImportError
|
dataset/ETHPy150Open soft-matter/pims/pims/pyav_reader.py/available
|
306
|
def test_wanted_dirs(self):
# _candidate_tempdir_list contains the expected directories
# Make sure the interesting environment variables are all set.
with test_support.EnvironmentVarGuard() as env:
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname:
env.set(envname, os.path.abspath(envname))
cand = tempfile._candidate_tempdir_list()
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname: raise ValueError
self.assert_(dirname in cand)
try:
dirname = os.getcwd()
except (__HOLE__, os.error):
dirname = os.curdir
self.assert_(dirname in cand)
# Not practical to try to verify the presence of OS-specific
# paths in this list.
|
AttributeError
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_tempfile.py/test__candidate_tempdir_list.test_wanted_dirs
|
307
|
def test_noinherit(self):
# _mkstemp_inner file handles are not inherited by child processes
if not has_spawnl:
return # ugh, can't use TestSkipped.
if test_support.verbose:
v="v"
else:
v="q"
file = self.do_create()
fd = "%d" % file.fd
try:
me = __file__
except __HOLE__:
me = sys.argv[0]
# We have to exec something, so that FD_CLOEXEC will take
# effect. The core of this test is therefore in
# tf_inherit_check.py, which see.
tester = os.path.join(os.path.dirname(os.path.abspath(me)),
"tf_inherit_check.py")
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
if sys.platform in ('win32',):
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
decorated = sys.executable
retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
self.failIf(retval < 0,
"child process caught fatal signal %d" % -retval)
self.failIf(retval > 0, "child process reports failure %d"%retval)
|
NameError
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_tempfile.py/test__mkstemp_inner.test_noinherit
|
308
|
def _Exists(self):
"""Returns true if the disk exists."""
cmd = util.GcloudCommand(self, 'compute', 'disks', 'describe', self.name)
stdout, _, _ = cmd.Issue(suppress_warning=True)
try:
json.loads(stdout)
except __HOLE__:
return False
return True
|
ValueError
|
dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gce_disk.py/GceDisk._Exists
|
309
|
def _get_key_value(string):
"""Return the (key, value) as a tuple from a string."""
# Normally all properties look like this:
# Unique Identifier: 600508B1001CE4ACF473EE9C826230FF
# Disk Name: /dev/sda
# Mount Points: None
key = ''
value = ''
try:
key, value = string.split(':')
except __HOLE__:
# This handles the case when the property of a logical drive
# returned is as follows. Here we cannot split by ':' because
# the disk id has colon in it. So if this is about disk,
# then strip it accordingly.
# Mirror Group 0: physicaldrive 6I:1:5
string = string.lstrip(' ')
if string.startswith('physicaldrive'):
fields = string.split(' ')
key = fields[0]
value = fields[1]
else:
# TODO(rameshg87): Check if this ever occurs.
return None, None
return key.lstrip(' ').rstrip(' '), value.lstrip(' ').rstrip(' ')
|
ValueError
|
dataset/ETHPy150Open openstack/proliantutils/proliantutils/hpssa/objects.py/_get_key_value
|
310
|
def _hpssacli(*args, **kwargs):
"""Wrapper function for executing hpssacli command.
:param args: args to be provided to hpssacli command
:param kwargs: kwargs to be sent to processutils except the
following:
- dont_transform_to_hpssa_exception - Set to True if this
method shouldn't transform other exceptions to hpssa
exceptions. This is useful when the return code from hpssacli
is useful for analysis.
:returns: a tuple containing the stdout and stderr after running
the process.
:raises: HPSSAOperationError, if some error was encountered and
dont_dont_transform_to_hpssa_exception was set to False.
:raises: OSError or processutils.ProcessExecutionError if execution
failed and dont_dont_transform_to_hpssa_exception was set to True.
"""
dont_transform_to_hpssa_exception = kwargs.get(
'dont_transform_to_hpssa_exception', False)
kwargs.pop('dont_transform_to_hpssa_exception', None)
try:
stdout, stderr = processutils.execute("hpssacli",
*args, **kwargs)
except (__HOLE__, processutils.ProcessExecutionError) as e:
if not dont_transform_to_hpssa_exception:
raise exception.HPSSAOperationError(reason=e)
else:
raise
return stdout, stderr
|
OSError
|
dataset/ETHPy150Open openstack/proliantutils/proliantutils/hpssa/objects.py/_hpssacli
|
311
|
def __init__(self, id, properties, parent):
"""Constructor for a LogicalDrive object."""
# Strip off 'Logical Drive' before storing it in id
self.id = id[15:]
self.parent = parent
self.properties = properties
# 'string_to_bytes' takes care of converting any returned
# (like 500MB, 25GB) unit of storage space to bytes (Integer value).
# It requires space to be stripped.
size = self.properties['Size'].replace(' ', '')
try:
# TODO(rameshg87): Reduce the disk size by 1 to make sure Ironic
# has enough space to write a config drive. Remove this when
# Ironic doesn't need it.
self.size_gb = int(strutils.string_to_bytes(size,
return_int=True) /
(1024*1024*1024)) - 1
except __HOLE__:
msg = ("hpssacli returned unknown size '%(size)s' for logical "
"disk '%(logical_disk)s' of RAID array '%(array)s' in "
"controller '%(controller)s'." %
{'size': size, 'logical_disk': self.id,
'array': self.parent.id,
'controller': self.parent.parent.id})
raise exception.HPSSAOperationError(reason=msg)
self.raid_level = self.properties.get('Fault Tolerance')
# For RAID levels (like 5+0 and 6+0), HPSSA names them differently.
# Check if we have mapping stored, otherwise use the same.
raid_level_mapping = constants.RAID_LEVEL_HPSSA_TO_INPUT_MAPPING
self.raid_level = raid_level_mapping.get(self.raid_level,
self.raid_level)
self.volume_name = self.properties.get('Logical Drive Label')
# Trim down the WWN to 16 digits (8 bytes) so that it matches
# lsblk output in Linux.
wwn = self.properties.get('Unique Identifier')
if wwn:
wwn = '0x' + wwn[:16].lower()
self.wwn = wwn
|
ValueError
|
dataset/ETHPy150Open openstack/proliantutils/proliantutils/hpssa/objects.py/LogicalDrive.__init__
|
312
|
def __init__(self, id, properties, parent):
"""Constructor for a PhysicalDrive object."""
self.parent = parent
self.properties = properties
# Strip off physicaldrive before storing it in id
self.id = id[14:]
size = self.properties['Size'].replace(' ', '')
# 'string_to_bytes' takes care of converting any returned
# (like 500MB, 25GB) unit of storage space to bytes (Integer value).
# It requires space to be stripped.
try:
self.size_gb = int(strutils.string_to_bytes(size,
return_int=True) /
(1024*1024*1024))
except __HOLE__:
msg = ("hpssacli returned unknown size '%(size)s' for physical "
"disk '%(physical_disk)s' of controller "
"'%(controller)s'." %
{'size': size, 'physical_disk': self.id,
'controller': self.parent.id})
raise exception.HPSSAOperationError(reason=msg)
ssa_interface = self.properties['Interface Type']
self.interface_type = constants.get_interface_type(ssa_interface)
self.disk_type = constants.get_disk_type(ssa_interface)
self.model = self.properties.get('Model')
self.firmware = self.properties.get('Firmware Revision')
|
ValueError
|
dataset/ETHPy150Open openstack/proliantutils/proliantutils/hpssa/objects.py/PhysicalDrive.__init__
|
313
|
def insert_stats(self, stats):
try:
with self._cursor() as c:
ri = stats['run_info']
try:
c.execute("""INSERT INTO run_info VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
[ri['id'], int(float(ri['timestamp'])), ri['machine'], ri['user'],
ri['version'], ri['buildroot'], ri['outcome'], ri['cmd_line']])
except KeyError as e:
raise StatsDBError('Failed to insert stats. Key {} not found in RunInfo: {}'.format(
e.args[0], str(ri)))
rid = ri['id']
for table in ['cumulative_timings', 'self_timings']:
timings = stats[table]
for timing in timings:
try:
c.execute("""INSERT INTO {} VALUES (?, ?, ?)""".format(table),
[rid, timing['label'], self._to_ms(timing['timing'])])
except KeyError as e:
raise StatsDBError('Failed to insert stats. Key {} not found in timing: {}'.format(
e.args[0], str(timing)))
except __HOLE__ as e:
raise StatsDBError('Failed to insert stats. Key {} not found in stats object.'.format(
e.args[0]))
|
KeyError
|
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/stats/statsdb.py/StatsDB.insert_stats
|
314
|
def _is_valid_name(self, project_name):
def _module_exists(module_name):
try:
import_module(module_name)
return True
except __HOLE__:
return False
if not re.search(r'^[_a-zA-Z]\w*$', project_name):
print('Error: Project names must begin with a letter and contain'\
' only\nletters, numbers and underscores')
elif exists(project_name):
print('Error: Directory %r already exists' % project_name)
elif _module_exists(project_name):
print('Error: Module %r already exists' % project_name)
else:
return True
return False
|
ImportError
|
dataset/ETHPy150Open scrapy/scrapy/scrapy/commands/startproject.py/Command._is_valid_name
|
315
|
def main(options):
client = get_client(options.host, options.port, options.user, options.password)
def compute_signature(index):
signature = index["ns"]
for key in index["key"]:
try:
signature += "%s_%s" % (key, int(index["key"][key]))
except __HOLE__:
signature += "%s_%s" % (key, index["key"][key])
return signature
def report_redundant_indexes(current_db):
print "Checking DB: %s" % current_db.name
indexes = current_db.system.indexes.find()
index_map = {}
for index in indexes:
signature = compute_signature(index)
index_map[signature] = index
for signature in index_map.keys():
for other_sig in index_map.keys():
if signature == other_sig:
continue
if other_sig.startswith(signature):
print "Index %s[%s] may be redundant with %s[%s]" % (
index_map[signature]["ns"],
index_map[signature]["name"],
index_map[other_sig]["ns"],
index_map[other_sig]["name"])
databases= []
if options.database:
databases.append(options.database)
else:
databases = client.database_names()
for db in databases:
report_redundant_indexes(client[db])
|
ValueError
|
dataset/ETHPy150Open jwilder/mongodb-tools/mongodbtools/redundant_indexes.py/main
|
316
|
def _handle_request(self, request):
res = webob.Response()
res_content_type = None
path = request.path
if path.startswith(self._webpath):
path = path[len(self._webpath):]
routepath, func = self.find_route(path)
if routepath:
content = func()
if isinstance(content, six.text_type):
res.text = content
elif isinstance(content, six.binary_type):
res.body = content
res.content_type = func._cfg['content-type']
return res
if request.path == self._webpath + '/api.spore':
res.body = spore.getdesc(self, request.host_url)
res.content_type = 'application/json'
return res
try:
msg = None
error_status = 500
protocol = self._select_protocol(request)
except ClientSideError as e:
error_status = e.code
msg = e.faultstring
protocol = None
except Exception as e:
msg = ("Unexpected error while selecting protocol: %s" % str(e))
log.exception(msg)
protocol = None
error_status = 500
if protocol is None:
if not msg:
msg = ("None of the following protocols can handle this "
"request : %s" % ','.join((
p.name for p in self.protocols)))
res.status = error_status
res.content_type = 'text/plain'
try:
res.text = u(msg)
except __HOLE__:
res.text = msg
log.error(msg)
return res
request.calls = []
request.client_errorcount = 0
request.client_last_status_code = None
request.server_errorcount = 0
try:
context = None
if hasattr(protocol, 'prepare_response_body'):
prepare_response_body = protocol.prepare_response_body
else:
prepare_response_body = default_prepare_response_body
body = prepare_response_body(request, (
self._do_call(protocol, context)
for context in protocol.iter_calls(request)))
if isinstance(body, six.text_type):
res.text = body
else:
res.body = body
if len(request.calls) == 1:
if hasattr(protocol, 'get_response_status'):
res.status = protocol.get_response_status(request)
else:
if request.client_errorcount == 1:
res.status = request.client_last_status_code
elif request.client_errorcount:
res.status = 400
elif request.server_errorcount:
res.status = 500
else:
res.status = 200
else:
res.status = protocol.get_response_status(request)
res_content_type = protocol.get_response_contenttype(request)
except ClientSideError as e:
request.server_errorcount += 1
res.status = e.code
res.text = e.faultstring
except Exception:
infos = wsme.api.format_exception(sys.exc_info(), self._debug)
request.server_errorcount += 1
res.text = protocol.encode_error(context, infos)
res.status = 500
if res_content_type is None:
# Attempt to correctly guess what content-type we should return.
ctypes = [ct for ct in protocol.content_types if ct]
if ctypes:
res_content_type = request.accept.best_match(ctypes)
# If not we will attempt to convert the body to an accepted
# output format.
if res_content_type is None:
if "text/html" in request.accept:
res.text = self._html_format(res.body, protocol.content_types)
res_content_type = "text/html"
# TODO should we consider the encoding asked by
# the web browser ?
res.headers['Content-Type'] = "%s; charset=UTF-8" % res_content_type
return res
|
TypeError
|
dataset/ETHPy150Open openstack/wsme/wsme/root.py/WSRoot._handle_request
|
317
|
def print_(object_):
import threading
import sys
# START OF CRITICAL SECTION
__builtin__.__GIL__.acquire()
try:
import multiprocessing
if multiprocessing.current_process().name == 'MainProcess':
sys.stdout.write("<%s:%s> : %s\n" % (multiprocessing.current_process().name, threading.current_thread().name, object_))
else:
sys.stdout.write("<PID #%d> : %s\n" % (multiprocessing.current_process().pid, object_))
except __HOLE__:
sys.stdout.write("<%s> : %s\n" % (threading.current_thread().name, object_))
sys.stdout.flush()
__builtin__.__GIL__.release()
# END OF CRITICAL SECTION
return None
# Classes
|
ImportError
|
dataset/ETHPy150Open ikotler/pythonect/pythonect/internal/lang.py/print_
|
318
|
def _is_valid(report):
"""
checks if this meets the preconditions for being allowed in the cache
"""
try:
return (
report.request.domain
and report.request.couch_user._id
and report.request.get_full_path().startswith(
'/a/{domain}/'.format(domain=report.request.domain)
)
)
except __HOLE__:
return False
|
AttributeError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/cache.py/_is_valid
|
319
|
def _secure_open_write(filename, fmode):
# We only want to write to this file, so open it in write only mode
flags = os.O_WRONLY
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
# will open *new* files.
# We specify this because we want to ensure that the mode we pass is the
# mode of the file.
flags |= os.O_CREAT | os.O_EXCL
# Do not follow symlinks to prevent someone from making a symlink that
# we follow and insecurely open a cache file.
if hasattr(os, "O_NOFOLLOW"):
flags |= os.O_NOFOLLOW
# On Windows we'll mark this file as binary
if hasattr(os, "O_BINARY"):
flags |= os.O_BINARY
# Before we open our file, we want to delete any existing file that is
# there
try:
os.remove(filename)
except (IOError, __HOLE__):
# The file must not exist already, so we can just skip ahead to opening
pass
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
# race condition happens between the os.remove and this line, that an
# error will be raised. Because we utilize a lockfile this should only
# happen if someone is attempting to attack us.
fd = os.open(filename, flags, fmode)
try:
return os.fdopen(fd, "wb")
except:
# An error occurred wrapping our FD in a file object
os.close(fd)
raise
|
OSError
|
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/cachecontrol/caches/file_cache.py/_secure_open_write
|
320
|
def set(self, key, value):
name = self._fn(key)
# Make sure the directory exists
try:
os.makedirs(os.path.dirname(name), self.dirmode)
except (IOError, __HOLE__):
pass
with self.lock_class(name) as lock:
# Write our actual file
with _secure_open_write(lock.path, self.filemode) as fh:
fh.write(value)
|
OSError
|
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/cachecontrol/caches/file_cache.py/FileCache.set
|
321
|
def test_accessors_not_setup(self):
info = TorInfo(self.protocol)
self.assertTrue(info.__dict__['_setup'] is False)
self.assertRaises(TypeError, len, info)
dir(info)
try:
info[0]
self.fail("Should have raised TypeError")
except __HOLE__:
pass
|
TypeError
|
dataset/ETHPy150Open meejah/txtorcon/test/test_torinfo.py/InfoTests.test_accessors_not_setup
|
322
|
def test_with_arg(self):
self.protocol.answers.append('''info/names=
multi/path/arg/* a documentation string
''')
info = TorInfo(self.protocol)
self.assertTrue(hasattr(info, 'multi'))
self.assertTrue(hasattr(getattr(info, 'multi'), 'path'))
self.assertTrue(
hasattr(getattr(getattr(info, 'multi'), 'path'), 'arg')
)
# FIXME should have a test that "really" goes out through
# TorControlProtocol instance for this stuff...
# TorControlProtocol now strips the OK line...
self.protocol.answers.append('multi/path/arg/quux=\nbar\nbaz\nquux')
try:
info.multi.path.arg()
self.assertTrue(False)
except __HOLE__:
pass
d = info.multi.path.arg('quux')
d.addCallback(CheckAnswer(self, 'bar\nbaz\nquux'))
return d
|
TypeError
|
dataset/ETHPy150Open meejah/txtorcon/test/test_torinfo.py/InfoTests.test_with_arg
|
323
|
def test_with_arg_error(self):
self.protocol.answers.append('''info/names=
multi/no-arg docstring
''')
info = TorInfo(self.protocol)
try:
info.multi.no_arg('an argument')
self.assertTrue(False)
except __HOLE__:
pass
|
TypeError
|
dataset/ETHPy150Open meejah/txtorcon/test/test_torinfo.py/InfoTests.test_with_arg_error
|
324
|
def __init__ (self,reactor):
self.reactor = reactor
self.logger = Logger()
self.parser = Parser.Text(reactor)
try:
for name in self.functions:
self.callback['text'][name] = Command.Text.callback[name]
except __HOLE__:
raise RuntimeError('The code does not have an implementation for "%s", please code it !' % name)
|
KeyError
|
dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/reactor/api/api.py/API.__init__
|
325
|
def clean(self, value):
super(EEPersonalIdentificationCode, self).clean(value)
if value in EMPTY_VALUES:
return ''
match = re.match(idcode, value)
if not match:
raise ValidationError(self.error_messages['invalid_format'])
century, year, month, day, check = map(int, match.groups())
if check != self.ee_checksum(value[:10]):
raise ValidationError(self.error_messages['invalid'])
# Century digit also encodes gender:
# 1 - male born in 18xx
# 2 - female born in 18xx
# 3 - male born in 19xx
# ...
year += 1800 + 100 * ((century - 1) // 2)
try:
date(year, month, day)
except __HOLE__:
raise ValidationError(self.error_messages['invalid'])
return value
|
ValueError
|
dataset/ETHPy150Open django/django-localflavor/localflavor/ee/forms.py/EEPersonalIdentificationCode.clean
|
326
|
def __next__(self, full=False):
if self.max_items is not None:
if self.count >= self.max_items:
raise StopIteration
try:
item = six.next(self._iter)
self.count += 1
if 'timestamp' in item:
item['timestamp'] = parse_timestamp(item['timestamp'])
if full:
return item
if type(self.return_values) is tuple:
return tuple((item[i] for i in self.return_values))
elif self.return_values is None:
return item
else:
return item[self.return_values]
except __HOLE__:
if self.last:
raise StopIteration
self.load_chunk()
return List.__next__(self, full=full)
|
StopIteration
|
dataset/ETHPy150Open mwclient/mwclient/mwclient/listing.py/List.__next__
|
327
|
def handle(self, *args, **options):
update_all = options.get('all')
recalculate = options.get('recalculate')
if update_all:
if recalculate:
raise CommandError('--recalculate cannot be used with --all')
q = ReviewRequest.objects.all()
else:
pks = []
for arg in args:
try:
pks.append(int(arg))
except __HOLE__:
raise CommandError('%s is not a valid review request ID'
% arg)
if not pks:
raise CommandError(
'One or more review request IDs must be provided.')
q = ReviewRequest.objects.filter(pk__in=pks)
q.update(issue_open_count=None,
issue_resolved_count=None,
issue_dropped_count=None)
if not update_all and recalculate:
if int(options['verbosity']) > 1:
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Load each review request. The issue counters will recalculate,
# and output will be shown if verbosity > 1.
list(q)
if update_all:
self.stdout.write('All issue counts reset.')
else:
self.stdout.write('Issue counts for review request(s) %s reset.'
% ', '.join(args))
|
ValueError
|
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/reviews/management/commands/reset-issue-counts.py/Command.handle
|
328
|
@types.convert(image={"type": "glance_image"},
flavor={"type": "nova_flavor"})
@validation.image_valid_on_flavor("flavor", "image")
@validation.valid_command("command")
@validation.number("port", minval=1, maxval=65535, nullable=True,
integer_only=True)
@validation.external_network_exists("floating_network")
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"],
"keypair": {}, "allow_ssh": {}})
def boot_runcommand_delete(self, image, flavor,
username,
password=None,
command=None,
volume_args=None,
floating_network=None,
port=22,
use_floating_ip=True,
force_delete=False,
wait_for_ping=True,
max_log_length=None,
**kwargs):
"""Boot a server, run script specified in command and delete server.
Example Script in samples/tasks/support/instance_dd_test.sh
The script to be executed is provided like command['remote_path'] or
command['local_path'] and interpreter in command['interpreter']
respectively.
:param image: glance image name to use for the vm
:param flavor: VM flavor name
:param username: ssh username on server, str
:param password: Password on SSH authentication
:param command: Command-specifying dictionary that either specifies
remote command path via `remote_path' (can be uploaded from a
local file specified by `local_path`), an inline script via
`script_inline' or a local script file path using `script_file'.
Both `script_file' and `local_path' are checked to be accessible
by the `file_exists' validator code.
The `script_inline' and `script_file' both require an `interpreter'
value to specify the interpreter script should be run with.
Note that any of `interpreter' and `remote_path' can be an array
prefixed with environment variables and suffixed with args for
the `interpreter' command. `remote_path's last component must be
a path to a command to execute (also upload destination if a
`local_path' is given). Uploading an interpreter is possible
but requires that `remote_path' and `interpreter' path do match.
Examples::
# Run a `local_script.pl' file sending it to a remote
# Perl interpreter
command = {
"script_file": "local_script.pl",
"interpreter": "/usr/bin/perl"
}
# Run an inline script sending it to a remote interpreter
command = {
"script_inline": "echo 'Hello, World!'",
"interpreter": "/bin/sh"
}
# Run a remote command
command = {
"remote_path": "/bin/false"
}
# Copy a local command and run it
command = {
"remote_path": "/usr/local/bin/fio",
"local_path": "/home/foobar/myfiodir/bin/fio"
}
# Copy a local command and run it with environment variable
command = {
"remote_path": ["HOME=/root", "/usr/local/bin/fio"],
"local_path": "/home/foobar/myfiodir/bin/fio"
}
# Run an inline script sending it to a remote interpreter
command = {
"script_inline": "echo \"Hello, ${NAME:-World}\"",
"interpreter": ["NAME=Earth", "/bin/sh"]
}
# Run an inline script sending it to an uploaded remote
# interpreter
command = {
"script_inline": "echo \"Hello, ${NAME:-World}\"",
"interpreter": ["NAME=Earth", "/tmp/sh"],
"remote_path": "/tmp/sh",
"local_path": "/home/user/work/cve/sh-1.0/bin/sh"
}
:param volume_args: volume args for booting server from volume
:param floating_network: external network name, for floating ip
:param port: ssh port for SSH connection
:param use_floating_ip: bool, floating or fixed IP for SSH connection
:param force_delete: whether to use force_delete for servers
:param wait_for_ping: whether to check connectivity on server creation
:param **kwargs: extra arguments for booting the server
:param max_log_length: The number of tail nova console-log lines user
would like to retrieve
:returns: dictionary with keys `data' and `errors':
data: dict, JSON output from the script
errors: str, raw data from the script's stderr stream
"""
if volume_args:
volume = self._create_volume(volume_args["size"], imageRef=None)
kwargs["block_device_mapping"] = {"vdrally": "%s:::1" % volume.id}
server, fip = self._boot_server_with_fip(
image, flavor, use_floating_ip=use_floating_ip,
floating_network=floating_network,
key_name=self.context["user"]["keypair"]["name"],
**kwargs)
try:
if wait_for_ping:
self._wait_for_ping(fip["ip"])
code, out, err = self._run_command(
fip["ip"], port, username, password, command=command)
if code:
raise exceptions.ScriptError(
"Error running command %(command)s. "
"Error %(code)s: %(error)s" % {
"command": command, "code": code, "error": err})
try:
data = json.loads(out)
except __HOLE__ as e:
raise exceptions.ScriptError(
"Command %(command)s has not output valid JSON: %(error)s."
" Output: %(output)s" % {
"command": command, "error": str(e), "output": out})
except (exceptions.TimeoutException,
exceptions.SSHTimeout):
console_logs = self._get_server_console_output(server,
max_log_length)
LOG.debug("VM console logs:\n%s", console_logs)
raise
finally:
self._delete_server_with_fip(server, fip,
force_delete=force_delete)
return {"data": data, "errors": err}
|
ValueError
|
dataset/ETHPy150Open openstack/rally/rally/plugins/openstack/scenarios/vm/vmtasks.py/VMTasks.boot_runcommand_delete
|
329
|
def do_load(self, line):
"""Load a backup based on its number per `list`..
"""
try:
i = int(line)
filename = self.get_filenames()[i]
except (ValueError, __HOLE__):
print('\x1b[31;1mBad backup number!\x1b[0m')
print('\x1b[32;1mPick one of these:\x1b[0m')
self.do_list('')
else:
if subprocess.call(['dropdb', self.dbname]) == 0:
if subprocess.call(['createdb', self.dbname]) == 0:
subprocess.call( 'psql {} < {}/{}'.format(self.dbname, self.root, filename)
, shell=True
)
|
KeyError
|
dataset/ETHPy150Open gratipay/gratipay.com/bin/snapper.py/Snapper.do_load
|
330
|
def run(infile, options, report_step=10000):
options.tablename = quoteTableName(
options.tablename, backend=options.backend)
if options.map:
m = {}
for x in options.map:
f, t = x.split(":")
m[f] = t
options.map = m
else:
options.map = {}
existing_tables = set()
quick_import_separator = "\t"
if options.database_backend == "postgres":
import psycopg2
raise NotImplementedError("needs refactoring for commandline options")
dbhandle = psycopg2.connect(options.psql_connection)
error = psycopg2.Error
options.null = "NULL"
options.string_value = "'%s'"
options.text = "TEXT"
options.index = "TEXT"
if options.insert_quick:
raise ValueError("quick import not implemented.")
elif options.database_backend == "mysql":
import MySQLdb
dbhandle = MySQLdb.connect(host=options.database_host,
user=options.database_username,
passwd=options.database_password,
port=options.database_port,
db=options.database_name)
error = Exception
options.null = "NULL"
options.string_value = "%s"
options.text = "TEXT"
options.index = "VARCHAR(40)"
if options.insert_quick:
raise ValueError("quick import not implemented.")
elif options.backend == "sqlite":
import sqlite3
dbhandle = sqlite3.connect(options.database_name)
try:
os.chmod(options.database_name, 0664)
except OSError, msg:
E.warn("could not change permissions of database: %s" % msg)
# Avoid the following error:
# sqlite3.ProgrammingError: You must not use 8-bit bytestrings
# unless you use a text_factory that can interpret 8-bit
# bytestrings (like text_factory = str). It is highly
# recommended that you instead just switch your application
# to Unicode strings
# Note: might be better to make csv2db unicode aware.
dbhandle.text_factory = str
error = sqlite3.OperationalError
options.insert_many = True # False
options.null = None # "NULL"
options.text = "TEXT"
options.index = "TEXT"
options.string_value = "%s" # "'%s'"
statement = "SELECT name FROM sqlite_master WHERE type='table'"
cc = executewait(dbhandle, statement, error, options.retry)
existing_tables = set([x[0] for x in cc])
cc.close()
# use , as separator
quick_import_statement = \
"sqlite3 %s '.import %%s %s'" % \
(options.database, options.tablename)
quick_import_separator = "|"
if options.header is not None:
options.header = [x.strip() for x in options.header.split(",")]
if options.utf:
reader = CSV.UnicodeDictReader(infile,
dialect=options.dialect,
fieldnames=options.header)
else:
reader = CSV.DictReader(infile,
dialect=options.dialect,
fieldnames=options.header)
if options.replace_header:
try:
reader.next()
except StopIteration:
pass
E.info("reading %i columns to guess column types" % options.guess_size)
rows = []
for row in reader:
if None in row:
raise ValueError(
"undefined columns in input file at row: %s" % row)
try:
rows.append(IOTools.convertDictionary(row, map=options.map))
except TypeError, msg:
E.warn(
"incomplete line? Type error in conversion: "
"'%s' with data: %s" % (msg, str(row)))
except __HOLE__, msg:
E.warn(
"incomplete line? Type error in conversion: "
"'%s' with data: %s" % (msg, str(row)))
if len(rows) >= options.guess_size:
break
E.info("read %i rows for type guessing" % len(rows))
E.info("creating table")
if len(rows) == 0:
if options.allow_empty:
if not reader.fieldnames:
E.warn("no data - no table created")
else:
# create empty table and exit
take, map_column2type, ignored = createTable(
dbhandle,
error,
options.tablename,
options,
retry=options.retry,
headers=reader.fieldnames,
ignore_empty=options.ignore_empty,
ignore_columns=options.ignore_columns,
rename_columns=options.rename_columns,
lowercase=options.lowercase,
ignore_duplicates=options.ignore_duplicates,
indices=options.indices,
first_column=options.first_column,
existing_tables=existing_tables,
append=options.append)
E.info("empty table created")
return
else:
raise ValueError("empty table")
else:
take, map_column2type, ignored = createTable(
dbhandle,
error,
options.tablename,
options,
rows=rows,
retry=options.retry,
headers=reader.fieldnames,
ignore_empty=options.ignore_empty,
ignore_columns=options.ignore_columns,
rename_columns=options.rename_columns,
lowercase=options.lowercase,
ignore_duplicates=options.ignore_duplicates,
indices=options.indices,
first_column=options.first_column,
existing_tables=existing_tables,
append=options.append)
def row_iter(rows, reader):
for row in rows:
yield quoteRow(row, take, map_column2type,
options.missing_values,
null=options.null,
string_value=options.string_value)
for data in reader:
yield quoteRow(IOTools.convertDictionary(data, map=options.map),
take,
map_column2type,
options.missing_values,
null=options.null,
string_value=options.string_value)
ninput = 0
E.info("inserting data")
if options.insert_quick:
E.info("using quick insert")
outfile, filename = tempfile.mkstemp()
E.debug("dumping data into %s" % filename)
for d in row_iter(rows, reader):
ninput += 1
os.write(outfile, quick_import_separator.join(
[str(d[x]) for x in take]) + "\n")
if ninput % report_step == 0:
E.info("iteration %i\n" % ninput)
os.close(outfile)
statement = quick_import_statement % filename
E.debug(statement)
# infinite loop possible
while 1:
retcode = E.run(statement, cwd=os.getcwd(), close_fds=True)
if retcode != 0:
E.warn("import error using statement: %s" % statement)
if not options.retry:
raise ValueError(
"import error using statement: %s" % statement)
time.sleep(5)
continue
break
os.remove(filename)
# there is no way to insert NULL values into sqlite. The only
# solution is to update all colums.
for column in take:
executewait(dbhandle,
"UPDATE %s SET %s = NULL WHERE %s = 'None'" % (
options.tablename, column, column),
error,
options.retry)
elif options.insert_many:
data = []
for d in row_iter(rows, reader):
ninput += 1
data.append([d[x] for x in take])
if ninput % report_step == 0:
E.info("iteration %i" % ninput)
statement = "INSERT INTO %s VALUES (%s)" % (
options.tablename, ",".join("?" * len(take)))
E.info("inserting %i rows" % len(data))
E.debug("multiple insert:\n# %s" % statement)
while 1:
try:
dbhandle.executemany(statement, data)
except error, msg:
E.warn("import failed: msg=%s, statement=\n %s" %
(msg, statement))
# TODO: check for database locked msg
if not options.retry:
raise error(msg)
if not re.search("locked", str(msg)):
raise error(msg)
time.sleep(5)
continue
break
else:
# insert line by line (could not figure out how to do bulk loading with
# subprocess and COPY FROM STDIN)
statement = "INSERT INTO %s VALUES (%%(%s)s)" % (options.tablename,
')s, %('.join(take))
# output data used for guessing:
for d in row_iter(rows, reader):
ninput += 1
E.debug("single insert:\n# %s" % (statement % d))
cc = executewait(dbhandle, statement, error,
retry=options.retry,
args=d)
cc.close()
if ninput % report_step == 0:
E.info("iteration %i" % ninput)
E.info("building indices")
nindex = 0
for index in options.indices:
nindex += 1
try:
statement = "CREATE INDEX %s_index%i ON %s (%s)" % (
options.tablename, nindex, options.tablename, index)
cc = executewait(dbhandle, statement, error, options.retry)
cc.close()
E.info("added index on column %s" % (index))
except error, msg:
E.info("adding index on column %s failed: %s" % (index, msg))
statement = "SELECT COUNT(*) FROM %s" % (options.tablename)
cc = executewait(dbhandle, statement, error, options.retry)
result = cc.fetchone()
cc.close()
noutput = result[0]
E.info("ninput=%i, noutput=%i, nskipped_columns=%i" %
(ninput, noutput, len(ignored)))
dbhandle.commit()
|
ValueError
|
dataset/ETHPy150Open CGATOxford/cgat/CGAT/CSV2DB.py/run
|
331
|
def test_auth(self):
# client without global auth set
hsc = HubstorageClient(endpoint=self.hsclient.endpoint)
self.assertEqual(hsc.auth, None)
# check no-auth access
try:
hsc.push_job(self.projectid, self.spidername)
except HTTPError as exc:
self.assertTrue(exc.response.status_code, 401)
else:
self.assertTrue(False, '401 not raised')
try:
hsc.get_project(self.projectid).push_job(self.spidername)
except HTTPError as exc:
self.assertTrue(exc.response.status_code, 401)
else:
self.assertTrue(False, '401 not raised')
try:
hsc.get_job((self.projectid, 1, 1)).items.list()
except __HOLE__ as exc:
self.assertTrue(exc.response.status_code, 401)
else:
self.assertTrue(False, '401 not raised')
try:
hsc.get_project(self.projectid).get_job((self.projectid, 1, 1)).items.list()
except HTTPError as exc:
self.assertTrue(exc.response.status_code, 401)
else:
self.assertTrue(False, '401 not raised')
# create project with auth
auth = self.hsclient.auth
project = hsc.get_project(self.projectid, auth)
self.assertEqual(project.auth, auth)
job = project.push_job(self.spidername)
samejob = project.get_job(job.key)
self.assertEqual(samejob.key, job.key)
|
HTTPError
|
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_project.py/ProjectTest.test_auth
|
332
|
def filter(self, userinfo, user_info_claims=None):
"""
Return only those claims that are asked for.
It's a best effort task; if essential claims are not present
no error is flagged.
:param userinfo: A dictionary containing the available user info.
:param user_info_claims: A dictionary specifying the asked for claims
:return: A dictionary of filtered claims.
"""
if user_info_claims is None:
return copy.copy(userinfo)
else:
result = {}
missing = []
optional = []
for key, restr in user_info_claims.items():
try:
result[key] = userinfo[key]
except __HOLE__:
if restr == {"essential": True}:
missing.append(key)
else:
optional.append(key)
return result
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/userinfo/__init__.py/UserInfo.filter
|
333
|
def __call__(self, userid, client_id, user_info_claims=None, **kwargs):
try:
return self.filter(self.db[userid], user_info_claims)
except __HOLE__:
return {}
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/userinfo/__init__.py/UserInfo.__call__
|
334
|
def get_most_significant_input_dimensions(self, which_indices=None):
"""
Determine which dimensions should be plotted
Returns the top three most signification input dimensions
if less then three dimensions, the non existing dimensions are
labeled as None, so for a 1 dimensional input this returns
(0, None, None).
:param which_indices: force the indices to be the given indices.
:type which_indices: int or tuple(int,int) or tuple(int,int,int)
"""
if which_indices is None:
which_indices = np.argsort(self.input_sensitivity())[::-1][:3]
try:
input_1, input_2, input_3 = which_indices
except ValueError:
# which indices is tuple or int
try:
input_3 = None
input_1, input_2 = which_indices
except TypeError:
# which_indices is an int
input_1, input_2 = which_indices, None
except __HOLE__:
# which_indices was a list or array like with only one int
input_1, input_2 = which_indices[0], None
return input_1, input_2, input_3
|
ValueError
|
dataset/ETHPy150Open SheffieldML/GPy/GPy/kern/src/kern.py/Kern.get_most_significant_input_dimensions
|
335
|
def set_editor(self, editor):
try:
self.setHtml(editor.to_html())
except (TypeError, __HOLE__):
self.setHtml('<center>No preview available...</center>')
self._editor = None
self.hide_requested.emit()
else:
if self._editor is not None and editor != self._editor:
try:
self._editor.textChanged.disconnect(self._on_text_changed)
except TypeError:
pass
editor.textChanged.connect(self._on_text_changed)
self._editor = proxy(editor)
self.show_requested.emit()
|
AttributeError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/preview.py/HtmlPreviewWidget.set_editor
|
336
|
def _update_preview(self):
try:
# remember cursor/scrollbar position
p = self.textCursor().position()
v = self.verticalScrollBar().value()
# display new html
self.setHtml(self._editor.to_html())
# restore cursor/scrollbar position
c = self.textCursor()
c.setPosition(p)
self.setTextCursor(c)
self.verticalScrollBar().setValue(v)
except (TypeError, __HOLE__):
self.setHtml('<center>No preview available...</center>')
self.hide_requested.emit()
|
AttributeError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/preview.py/HtmlPreviewWidget._update_preview
|
337
|
@web.authenticated
@gen.coroutine
def get(self):
app = self.application
capp = app.capp
try:
broker = Broker(capp.connection().as_uri(include_password=True),
http_api=app.options.broker_api)
except __HOLE__:
self.write({})
return
queue_names = ControlHandler.get_active_queue_names()
queues = yield broker.queues(queue_names)
data = defaultdict(int)
for queue in queues:
data[queue['name']] = queue.get('messages', 0)
self.write(data)
|
NotImplementedError
|
dataset/ETHPy150Open mher/flower/flower/views/monitor.py/BrokerMonitor.get
|
338
|
def run_only_if_bernhard_is_available(func):
try:
import bernhard
except __HOLE__:
bernhard = None
pred = lambda: bernhard is not None
return run_only(func, pred)
|
ImportError
|
dataset/ETHPy150Open BrightcoveOS/Diamond/src/diamond/handler/test/testriemann.py/run_only_if_bernhard_is_available
|
339
|
def ext_pillar(minion_id, pillar, profile=None):
'''
Read pillar data from Confidant via its API.
'''
if profile is None:
profile = {}
# default to returning failure
ret = {
'credentials_result': False,
'credentials': None,
'credentials_metadata': None
}
profile_data = copy.deepcopy(profile)
if profile_data.get('disabled', False):
ret['result'] = True
return ret
token_version = profile_data.get('token_version', 1)
try:
url = profile_data['url']
auth_key = profile_data['auth_key']
auth_context = profile_data['auth_context']
role = auth_context['from']
except (KeyError, __HOLE__):
msg = ('profile has undefined url, auth_key or auth_context')
log.debug(msg)
return ret
region = profile_data.get('region', 'us-east-1')
token_duration = profile_data.get('token_duration', 60)
retries = profile_data.get('retries', 5)
token_cache_file = profile_data.get('token_cache_file')
backoff = profile_data.get('backoff', 1)
client = confidant.client.ConfidantClient(
url,
auth_key,
auth_context,
token_lifetime=token_duration,
token_version=token_version,
token_cache_file=token_cache_file,
region=region,
retries=retries,
backoff=backoff
)
try:
data = client.get_service(
role,
decrypt_blind=True
)
except confidant.client.TokenCreationError:
return ret
if not data['result']:
return ret
ret = confidant.formatter.combined_credential_pair_format(data)
ret['credentials_result'] = True
return ret
|
TypeError
|
dataset/ETHPy150Open saltstack/salt/salt/pillar/confidant.py/ext_pillar
|
340
|
def search(request):
params = request.GET
q = params.get('q', '')
page = params.get('page', 1)
fmt = params.get('format', 'html')
raw = params.get('raw', False)
online = params.get('online', False)
page_size = params.get('page_size', 20)
try:
page = int(page)
page_size = int(page_size)
except __HOLE__:
raise Http404
# summon can't return results for pages > 50 with page size of 20
max_pages = 50
if page > max_pages:
raise Http404
api = summon.Summon(settings.SUMMON_ID, settings.SUMMON_SECRET_KEY)
kwargs = {
"hl": False,
"pn": page,
"ps": page_size,
"fq": ['SourceType:("Library Catalog")'],
"ff": [
'ContentType,or',
'Author,or',
'Language,or',
'Genre,or',
'Institution,or',
'Discipline,or',
'SubjectTerms,or',
'TemporalSubjectTerms,or',
'GeographicLocations,or',
],
"ho": "t",
"light": "t",
"raw": raw,
}
q, kwargs = _filter_by_pubdate(q, kwargs)
# add to the query if they want online resources only
if online:
if q:
q += " AND"
q += " lccallnum:('gw electronic' OR 'shared+electronic'" + \
" OR 'e-resources' OR 'e-govpub' OR 'streaming')"
# add selected facets to the query
for facet in params.getlist('facet'):
if ':' not in facet:
continue
facet_field, facet_value = facet.split(':', 1)
if 'fvf' not in kwargs:
kwargs['fvf'] = []
# TODO: maybe summoner should do these escapes somehow?
def escape(m):
return '\\' + m.group(1)
facet_value = re.sub('([,:\()${}])', escape, facet_value)
kwargs['fvf'].append('%s,%s,false' % (facet_field, facet_value))
# add Library facet field if user is faceting by Institution
if facet_field == 'Institution' and 'Library,or' not in kwargs['ff']:
kwargs['ff'].append('Library,or')
if fmt == "html":
kwargs['for_template'] = True
# catch and retry up to MAX_ATTEMPTS times if we get errors
# from the Summon API?
retries = 0
while True:
try:
search_results = api.search(q, **kwargs)
break
except requests.HTTPError as error:
retries += 1
if retries <= settings.SER_SOL_API_MAX_ATTEMPTS:
time.sleep(1)
continue
else:
logger.exception('unable to search Summon (3 tries): %s' %
error)
return error500(request)
if not raw:
search_results = _remove_facets(search_results)
search_results = _reorder_facets(search_results)
search_results = _remove_active_facets(request, search_results)
search_results = _format_facets(request, search_results)
# json-ld
if fmt == "json":
return HttpResponse(
json.dumps(search_results, indent=2),
content_type='application/json'
)
# raw summon results
elif raw:
if settings.DEBUG:
return HttpResponse(
json.dumps(search_results, indent=2),
content_type='application/json'
)
else:
# don't serve as a Summon proxy in production!
return HttpResponse('Unauthorized', status=401)
# default to a regular html web page
else:
# TODO: pull out page link generation so it can be tested
# how many pagination links to display
page_links = 5
# the first page number in the pagination links
page_range_start = ((page - 1) / page_links) * page_links + 1
# the last page number + 1 in the pagination links
# this is used in a range() below so it is offset by 1
page_range_end = page_range_start + page_links
# don't display page links that we can't get
actual_pages = search_results['totalResults'] / page_size + 1
if actual_pages <= page_range_end:
page_range_end = actual_pages + 1
# build page links as a list of tuples (page number, url)
page_range = []
page_query = request.GET.copy()
for n in range(page_range_start, page_range_end):
page_query['page'] = n
page_range.append((n, page_query.urlencode()))
# create links for going to the next set of page results
next_page_range = prev_page_range = None
if page_range_end - 1 < actual_pages \
and page_range_end - 1 < max_pages:
page_query['page'] = page_range_end
next_page_range = page_query.urlencode()
if page_range_start > page_links:
page_query['page'] = page_range_start - 1
prev_page_range = page_query.urlencode()
return render(request, 'search.html', {
"q": params.get('q'),
"original_facets": params.getlist('facet'),
"active_facets": _get_active_facets(request),
"page": page,
"page_range": page_range,
"next_page_range": next_page_range,
"prev_page_range": prev_page_range,
"search_results": search_results,
"debug": settings.DEBUG,
"online": online,
"json_url": request.get_full_path() + "&format=json",
"raw_url": request.get_full_path() + "&raw=true",
"max_subjects": settings.MAX_SUBJECTS,
})
|
ValueError
|
dataset/ETHPy150Open gwu-libraries/launchpad/lp/ui/views.py/search
|
341
|
def get_format_args(fstr):
"""
Turn a format string into two lists of arguments referenced by the
format string. One is positional arguments, and the other is named
arguments. Each element of the list includes the name and the
nominal type of the field.
# >>> get_format_args("{noun} is {1:d} years old{punct}")
# ([(1, <type 'int'>)], [('noun', <type 'str'>), ('punct', <type 'str'>)])
# XXX: Py3k
>>> get_format_args("{noun} is {1:d} years old{punct}") == \
([(1, int)], [('noun', str), ('punct', str)])
True
"""
# TODO: memoize
formatter = Formatter()
fargs, fkwargs, _dedup = [], [], set()
def _add_arg(argname, type_char='s'):
if argname not in _dedup:
_dedup.add(argname)
argtype = _TYPE_MAP.get(type_char, str) # TODO: unicode
try:
fargs.append((int(argname), argtype))
except __HOLE__:
fkwargs.append((argname, argtype))
for lit, fname, fspec, conv in formatter.parse(fstr):
if fname is not None:
type_char = fspec[-1:]
fname_list = re.split('[.[]', fname)
if len(fname_list) > 1:
raise ValueError('encountered compound format arg: %r' % fname)
try:
base_fname = fname_list[0]
assert base_fname
except (IndexError, AssertionError):
raise ValueError('encountered anonymous positional argument')
_add_arg(fname, type_char)
for sublit, subfname, _, _ in formatter.parse(fspec):
# TODO: positional and anon args not allowed here.
if subfname is not None:
_add_arg(subfname)
return fargs, fkwargs
|
ValueError
|
dataset/ETHPy150Open mahmoud/boltons/boltons/formatutils.py/get_format_args
|
342
|
def __format__(self, fmt):
value = self.get_value()
pt = fmt[-1:] # presentation type
type_conv = _TYPE_MAP.get(pt, str)
try:
return value.__format__(fmt)
except (ValueError, __HOLE__):
# TODO: this may be overkill
return type_conv(value).__format__(fmt)
# end formatutils.py
|
TypeError
|
dataset/ETHPy150Open mahmoud/boltons/boltons/formatutils.py/DeferredValue.__format__
|
343
|
def getStream(self):
"""Gets the resource as stream.
@see: L{IApplicationResource.getStream}
"""
try:
ds = DownloadStream(file(self._sourceFile, 'rb'),
self.getMIMEType(),
self.getFilename())
length = str( getsize(self._sourceFile) )
ds.setParameter('Content-Length', length)
ds.setCacheTime(self._cacheTime)
return ds
except __HOLE__:
# Log the exception using the application error handler
class Error(IErrorEvent):
def getThrowable(self):
return self.e
self.getApplication().getErrorHandler().terminalError( Error() )
return None
|
IOError
|
dataset/ETHPy150Open rwl/muntjac/muntjac/terminal/file_resource.py/FileResource.getStream
|
344
|
def getOptPort(rets, f_target, l_period=1, naLower=None, naUpper=None, lNagDebug=0):
"""
@summary Returns the Markowitz optimum portfolio for a specific return.
@param rets: Daily returns of the various stocks (using returnize1)
@param f_target: Target return, i.e. 0.04 = 4% per period
@param l_period: Period to compress the returns to, e.g. 7 = weekly
@param naLower: List of floats which corresponds to lower portfolio% for each stock
@param naUpper: List of floats which corresponds to upper portfolio% for each stock
@return tuple: (weights of portfolio, min possible return, max possible return)
"""
# Attempt to import library """
try:
pass
import nagint as nag
except ImportError:
print 'Could not import NAG library'
print 'make sure nagint.so is in your python path'
return ([], 0, 0)
# Get number of stocks """
lStocks = rets.shape[1]
# If period != 1 we need to restructure the data """
if( l_period != 1 ):
rets = getReindexedRets( rets, l_period)
# Calculate means and covariance """
naAvgRets = np.average( rets, axis=0 )
naCov = np.cov( rets, rowvar=False )
# Special case for None == f_target"""
# simply return average returns and cov """
if( f_target is None ):
return naAvgRets, np.std(rets, axis=0)
# Calculate upper and lower limits of variables as well as constraints """
if( naUpper is None ):
naUpper = np.ones( lStocks ) # max portfolio % is 1
if( naLower is None ):
naLower = np.zeros( lStocks ) # min is 0, set negative for shorting
# Two extra constraints for linear conditions"""
# result = desired return, and sum of weights = 1 """
naUpper = np.append( naUpper, [f_target, 1.0] )
naLower = np.append( naLower, [f_target, 1.0] )
# Initial estimate of portfolio """
naInitial = np.array([1.0/lStocks]*lStocks)
# Set up constraints matrix"""
# composed of expected returns in row one, unity row in row two """
naConstraints = np.vstack( (naAvgRets, np.ones(lStocks)) )
# Get portfolio weights, last entry in array is actually variance """
try:
naReturn = nag.optPort( naConstraints, naLower, naUpper, \
naCov, naInitial, lNagDebug )
except __HOLE__:
print 'NAG Runtime error with target: %.02lf'%(f_target)
return ( naInitial, sqrt( naCov[0][0] ) )
#return semi-junk to not mess up the rest of the plot
# Calculate stdev of entire portfolio to return"""
# what NAG returns is slightly different """
fPortDev = np.std( np.dot(rets, naReturn[0,0:-1]) )
# Show difference between above stdev and sqrt NAG covariance"""
# possibly not taking correlation into account """
#print fPortDev / sqrt(naReturn[0, -1])
# Return weights and stdDev of portfolio."""
# note again the last value of naReturn is NAG's reported variance """
return (naReturn[0, 0:-1], fPortDev)
|
RuntimeError
|
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/QSTK/qstkutil/tsutil.py/getOptPort
|
345
|
def OptPort( naData, fTarget, naLower=None, naUpper=None, naExpected=None, s_type = "long"):
"""
@summary Returns the Markowitz optimum portfolio for a specific return.
@param naData: Daily returns of the various stocks (using returnize1)
@param fTarget: Target return, i.e. 0.04 = 4% per period
@param lPeriod: Period to compress the returns to, e.g. 7 = weekly
@param naLower: List of floats which corresponds to lower portfolio% for each stock
@param naUpper: List of floats which corresponds to upper portfolio% for each stock
@return tuple: (weights of portfolio, min possible return, max possible return)
"""
''' Attempt to import library '''
try:
pass
from cvxopt import matrix
from cvxopt.blas import dot
from cvxopt.solvers import qp, options
except __HOLE__:
print 'Could not import CVX library'
raise
''' Get number of stocks '''
length = naData.shape[1]
b_error = False
naLower = deepcopy(naLower)
naUpper = deepcopy(naUpper)
naExpected = deepcopy(naExpected)
# Assuming AvgReturns as the expected returns if parameter is not specified
if (naExpected==None):
naExpected = np.average( naData, axis=0 )
na_signs = np.sign(naExpected)
indices, = np.where(na_signs == 0)
na_signs[indices] = 1
if s_type == "long":
na_signs = np.ones(len(na_signs))
elif s_type == "short":
na_signs = np.ones(len(na_signs))*(-1)
naData = na_signs*naData
naExpected = na_signs*naExpected
# Covariance matrix of the Data Set
naCov=np.cov(naData, rowvar=False)
# If length is one, just return 100% single symbol
if length == 1:
return (list(na_signs), np.std(naData, axis=0)[0], False)
if length == 0:
return ([], [0], False)
# If we have 0/1 "free" equity we can't optimize
# We just use limits since we are stuck with 0 degrees of freedom
''' Special case for None == fTarget, simply return average returns and cov '''
if( fTarget is None ):
return (naExpected, np.std(naData, axis=0), b_error)
# Upper bound of the Weights of a equity, If not specified, assumed to be 1.
if(naUpper is None):
naUpper= np.ones(length)
# Lower bound of the Weights of a equity, If not specified assumed to be 0 (No shorting case)
if(naLower is None):
naLower= np.zeros(length)
if sum(naLower) == 1:
fPortDev = np.std(np.dot(naData, naLower))
return (naLower, fPortDev, False)
if sum(naUpper) == 1:
fPortDev = np.std(np.dot(naData, naUpper))
return (naUpper, fPortDev, False)
naFree = naUpper != naLower
if naFree.sum() <= 1:
lnaPortfolios = naUpper.copy()
# If there is 1 free we need to modify it to make the total
# Add up to 1
if naFree.sum() == 1:
f_rest = naUpper[~naFree].sum()
lnaPortfolios[naFree] = 1.0 - f_rest
lnaPortfolios = na_signs * lnaPortfolios
fPortDev = np.std(np.dot(naData, lnaPortfolios))
return (lnaPortfolios, fPortDev, False)
# Double the covariance of the diagonal elements for calculating risk.
for i in range(length):
naCov[i][i]=2*naCov[i][i]
# Note, returns are modified to all be long from here on out
(fMin, fMax) = getRetRange(False, naLower, naUpper, naExpected, "long")
#print (fTarget, fMin, fMax)
if fTarget<fMin or fTarget>fMax:
print "Target not possible", fTarget, fMin, fMax
b_error = True
naLower = naLower*(-1)
# Setting up the parameters for the CVXOPT Library, it takes inputs in Matrix format.
'''
The Risk minimization problem is a standard Quadratic Programming problem according to the Markowitz Theory.
'''
S=matrix(naCov)
#pbar=matrix(naExpected)
naLower.shape=(length,1)
naUpper.shape=(length,1)
naExpected.shape = (1,length)
zeo=matrix(0.0,(length,1))
I = np.eye(length)
minusI=-1*I
G=matrix(np.vstack((I, minusI)))
h=matrix(np.vstack((naUpper, naLower)))
ones=matrix(1.0,(1,length))
A=matrix(np.vstack((naExpected, ones)))
b=matrix([float(fTarget),1.0])
# Optional Settings for CVXOPT
options['show_progress'] = False
options['abstol']=1e-25
options['reltol']=1e-24
options['feastol']=1e-25
# Optimization Calls
# Optimal Portfolio
try:
lnaPortfolios = qp(S, -zeo, G, h, A, b)['x']
except:
b_error = True
if b_error == True:
print "Optimization not Possible"
na_port = naLower*-1
if sum(na_port) < 1:
if sum(naUpper) == 1:
na_port = naUpper
else:
i=0
while(sum(na_port)<1 and i<25):
naOrder = naUpper - na_port
i = i+1
indices = np.where(naOrder > 0)
na_port[indices]= na_port[indices] + (1-sum(na_port))/len(indices[0])
naOrder = naUpper - na_port
indices = np.where(naOrder < 0)
na_port[indices]= naUpper[indices]
lnaPortfolios = matrix(na_port)
lnaPortfolios = (na_signs.reshape(-1,1) * lnaPortfolios).reshape(-1)
# Expected Return of the Portfolio
# lfReturn = dot(pbar, lnaPortfolios)
# Risk of the portfolio
fPortDev = np.std(np.dot(naData, lnaPortfolios))
return (lnaPortfolios, fPortDev, b_error)
|
ImportError
|
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/QSTK/qstkutil/tsutil.py/OptPort
|
346
|
def get_uid_gid(username, groupname=None):
"""Try to change UID and GID to the provided values.
The parameters are given as names like 'nobody' not integer.
May raise KeyError.
Src: http://mail.mems-exchange.org/durusmail/quixote-users/4940/1/
"""
try:
uid, default_grp = pwd.getpwnam(username)[2:4]
except __HOLE__:
raise KeyError("Couldn't get user id for user %s" % (username,))
if groupname is None:
gid = default_grp
else:
try:
gid = grp.getgrnam(groupname)[2]
except KeyError:
raise KeyError("Couldn't get group id for group %s" % (groupname,))
return (uid, gid)
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/daemon_utils.py/get_uid_gid
|
347
|
def finalize_options(self):
if self.match is None:
raise DistutilsOptionError(
"Must specify one or more (comma-separated) match patterns "
"(e.g. '.zip' or '.egg')"
)
if self.keep is None:
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
except __HOLE__:
raise DistutilsOptionError("--keep must be an integer")
if isinstance(self.match, basestring):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/setuptools-0.6c11/setuptools/command/rotate.py/rotate.finalize_options
|
348
|
@register.function
@jinja2.contextfunction
def breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
app = context['request'].APP
crumbs = [(urlresolvers.reverse('home'), page_name(app))]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except __HOLE__:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs}
t = get_env().get_template('amo/breadcrumbs.html').render(c)
return jinja2.Markup(t)
|
TypeError
|
dataset/ETHPy150Open mozilla/addons-server/src/olympia/amo/helpers.py/breadcrumbs
|
349
|
@register.function
@jinja2.contextfunction
def impala_breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
base_title = page_name(context['request'].APP)
crumbs = [(urlresolvers.reverse('home'), base_title)]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except __HOLE__:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs, 'has_home': add_default}
t = get_env().get_template('amo/impala/breadcrumbs.html').render(c)
return jinja2.Markup(t)
|
TypeError
|
dataset/ETHPy150Open mozilla/addons-server/src/olympia/amo/helpers.py/impala_breadcrumbs
|
350
|
@register.filter
def is_choice_field(value):
try:
return isinstance(value.field.widget, CheckboxInput)
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open mozilla/addons-server/src/olympia/amo/helpers.py/is_choice_field
|
351
|
@register.function
@jinja2.contextfunction
def remora_url(context, url, lang=None, app=None, prefix=''):
"""Wrapper for urlresolvers.remora_url"""
if lang is None:
_lang = context['LANG']
if _lang:
lang = to_locale(_lang).replace('_', '-')
if app is None:
try:
app = context['APP'].short
except (AttributeError, __HOLE__):
pass
return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix)
|
KeyError
|
dataset/ETHPy150Open mozilla/addons-server/src/olympia/amo/helpers.py/remora_url
|
352
|
@register.function
@jinja2.contextfunction
def hasOneToOne(context, obj, attr):
try:
getattr(obj, attr)
return True
except __HOLE__:
return False
|
ObjectDoesNotExist
|
dataset/ETHPy150Open mozilla/addons-server/src/olympia/amo/helpers.py/hasOneToOne
|
353
|
def recv_reply(self):
code = None
message_lines = []
incomplete = True
input = self.recv_buffer
while incomplete:
start_i = 0
while start_i is not None:
match = reply_line_pattern.match(input, start_i)
if match:
if code and code != match.group(2):
raise BadReply(match.group(1))
code = match.group(2)
message_lines.append(match.group(4))
self.recv_buffer = input[match.end(0):]
if match.group(3) != b'-':
incomplete = False
start_i = None
else:
start_i = match.end(0)
else:
match = line_pattern.match(input, start_i)
if match:
self.recv_buffer = input[match.end(0):]
message_lines.append(match.group(1))
raise BadReply(b'\r\n'.join(message_lines))
else:
start_i = None
if incomplete:
self.buffered_recv()
input = self.recv_buffer
body = b'\r\n'.join(message_lines)
try:
return code.decode('ascii'), body.decode('utf-8')
except __HOLE__:
raise BadReply(b'\r\n'.join(message_lines))
|
UnicodeDecodeError
|
dataset/ETHPy150Open slimta/python-slimta/slimta/smtp/io.py/IO.recv_reply
|
354
|
def __eq__(self, other):
"""'Deep, sparse compare.
Deeply compare two entities, following the non-None attributes of the
non-persisted object, if possible.
"""
if other is self:
return True
elif not self.__class__ == other.__class__:
return False
if id(self) in _recursion_stack:
return True
_recursion_stack.add(id(self))
try:
# pick the entity that's not SA persisted as the source
try:
self_key = sa.orm.attributes.instance_state(self).key
except sa.orm.exc.NO_STATE:
self_key = None
if other is None:
a = self
b = other
elif self_key is not None:
a = other
b = self
else:
a = self
b = other
for attr in list(a.__dict__):
if attr.startswith('_'):
continue
value = getattr(a, attr)
try:
# handle lazy loader errors
battr = getattr(b, attr)
except (__HOLE__, sa_exc.UnboundExecutionError):
return False
if hasattr(value, '__iter__'):
if hasattr(value, '__getitem__') and not hasattr(
value, 'keys'):
if list(value) != list(battr):
return False
else:
if set(value) != set(battr):
return False
else:
if value is not None and value != battr:
return False
return True
finally:
_recursion_stack.remove(id(self))
|
AttributeError
|
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/testing/entities.py/ComparableEntity.__eq__
|
355
|
def __init__(self, key=None, api_url=None, version=None,
center=None, zoom=None, dom_id='map',
kml_urls=[], polylines=None, polygons=None, markers=None,
template='gis/google/google-map.js',
js_module='geodjango',
extra_context={}):
# The Google Maps API Key defined in the settings will be used
# if not passed in as a parameter. The use of an API key is
# _required_.
if not key:
try:
self.key = settings.GOOGLE_MAPS_API_KEY
except __HOLE__:
raise GoogleMapException('Google Maps API Key not found (try adding GOOGLE_MAPS_API_KEY to your settings).')
else:
self.key = key
# Getting the Google Maps API version, defaults to using the latest ("2.x"),
# this is not necessarily the most stable.
if not version:
self.version = getattr(settings, 'GOOGLE_MAPS_API_VERSION', '2.x')
else:
self.version = version
# Can specify the API URL in the `api_url` keyword.
if not api_url:
self.api_url = getattr(settings, 'GOOGLE_MAPS_URL', GOOGLE_MAPS_URL) % self.version
else:
self.api_url = api_url
# Setting the DOM id of the map, the load function, the JavaScript
# template, and the KML URLs array.
self.dom_id = dom_id
self.extra_context = extra_context
self.js_module = js_module
self.template = template
self.kml_urls = kml_urls
# Does the user want any GMarker, GPolygon, and/or GPolyline overlays?
overlay_info = [[GMarker, markers, 'markers'],
[GPolygon, polygons, 'polygons'],
[GPolyline, polylines, 'polylines']]
for overlay_class, overlay_list, varname in overlay_info:
setattr(self, varname, [])
if overlay_list:
for overlay in overlay_list:
if isinstance(overlay, overlay_class):
getattr(self, varname).append(overlay)
else:
getattr(self, varname).append(overlay_class(overlay))
# If GMarker, GPolygons, and/or GPolylines are used the zoom will be
# automatically calculated via the Google Maps API. If both a zoom
# level and a center coordinate are provided with polygons/polylines,
# no automatic determination will occur.
self.calc_zoom = False
if self.polygons or self.polylines or self.markers:
if center is None or zoom is None:
self.calc_zoom = True
# Defaults for the zoom level and center coordinates if the zoom
# is not automatically calculated.
if zoom is None: zoom = 4
self.zoom = zoom
if center is None: center = (0, 0)
self.center = center
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/gis/maps/google/gmap.py/GoogleMap.__init__
|
356
|
def _get_config_errors(request, cache=True):
"""Returns a list of (confvar, err_msg) tuples."""
global _CONFIG_ERROR_LIST
if not cache or _CONFIG_ERROR_LIST is None:
error_list = [ ]
for module in appmanager.DESKTOP_MODULES:
# Get the config_validator() function
try:
validator = getattr(module.conf, CONFIG_VALIDATOR)
except __HOLE__:
continue
if not callable(validator):
LOG.warn("Auto config validation: %s.%s is not a function" %
(module.conf.__name__, CONFIG_VALIDATOR))
continue
try:
for confvar, error in validator(request.user):
error = {
'name': confvar if isinstance(confvar, str) else confvar.get_fully_qualifying_key(),
'message': error,
}
if isinstance(confvar, BoundConfig):
error['value'] = confvar.get()
error_list.append(error)
except Exception, ex:
LOG.exception("Error in config validation by %s: %s" % (module.nice_name, ex))
_CONFIG_ERROR_LIST = error_list
return _CONFIG_ERROR_LIST
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/views.py/_get_config_errors
|
357
|
def parse_ivorn(ivorn):
"""
Takes an IVORN of the form
ivo://authorityID/resourceKey#local_ID
and returns (authorityID, resourceKey, local_ID). Raise if that isn't
possible.
Refer to the IVOA Identifiers Recommendation (1.12) for justification, but
note that document is not as clear as unambiguous as one might hope. We
have assumed that anything which is not explicitly permitted is forbitten
in the authority and the resource name, while anything which would be
permitted in an RFC-3986 URI is permitted in the fragment.
"""
try:
return IVORN_RE.match(ivorn).groups()
except __HOLE__ as e:
log.debug("Failed to parse as IVORN: ", str(e))
raise Exception("Invalid IVORN: %s" % (ivorn,))
|
AttributeError
|
dataset/ETHPy150Open jdswinbank/Comet/comet/utility/voevent.py/parse_ivorn
|
358
|
def __getattr__(self, name):
try:
return self.providers[name]
except __HOLE__:
msg = "'_SocialState' object has no attribute '%s'" % name
raise AttributeError(msg)
|
KeyError
|
dataset/ETHPy150Open mattupstate/flask-social/flask_social/core.py/_SocialState.__getattr__
|
359
|
def __init__(self):
# grammar.app_context: [grammar instances with given app_content field]
self.global_grammars = []
self.active_global_grammars = []
self.local_grammars = {}
self.active_local_grammars = {}
self.triggered = {
'word': {
'before': [],
'after': [],
},
'match': {
'before': [],
'after': [],
},
'command': {
'before': [],
'after': [],
}
}
try:
self.process_contexts = _locals.GLOBAL_CONTEXTS
except __HOLE__:
self.process_contexts = {}
|
AttributeError
|
dataset/ETHPy150Open evfredericksen/pynacea/pynhost/pynhost/grammarhandler.py/GrammarHandler.__init__
|
360
|
def load_grammars_from_module(self, module):
clsmembers = inspect.getmembers(sys.modules[module.__name__], inspect.isclass)
for member in clsmembers:
# screen for objects with grammarbase.GrammarBase ancestor
class_hierarchy = inspect.getmro(member[1])
if len(class_hierarchy) > 2 and class_hierarchy[-2] == grammarbase.GrammarBase:
grammar = self.initialize_grammar(member[1])
app_pattern = grammar.app_context
if grammar.app_context != '':
app_pattern = re.compile(grammar.app_context)
try:
self.local_grammars[app_pattern].append(grammar)
except __HOLE__:
self.local_grammars[app_pattern] = [grammar]
else:
self.global_grammars.append(grammar)
|
KeyError
|
dataset/ETHPy150Open evfredericksen/pynacea/pynhost/pynhost/grammarhandler.py/GrammarHandler.load_grammars_from_module
|
361
|
def set_active_grammars(self):
try:
self.active_global_grammars = utilities.filter_grammar_list(self.global_grammars, self.process_contexts)
except __HOLE__:
self.active_global_grammars = []
self.active_local_grammars = {}
self.active_global_grammars.sort(reverse=True)
for app_pattern, grammar_list in self.local_grammars.items():
active_list = utilities.filter_grammar_list(grammar_list, self.process_contexts)
self.active_local_grammars[app_pattern] = active_list + self.active_global_grammars
self.active_local_grammars[app_pattern].sort(reverse=True)
|
KeyError
|
dataset/ETHPy150Open evfredericksen/pynacea/pynhost/pynhost/grammarhandler.py/GrammarHandler.set_active_grammars
|
362
|
def __init__(self, connexion, graph, logger,
separator = '@@@', has_root = False
):
"""initialize a Redisgraph instance
connexion: a redis connexion
graph: a graph name (string)
logger: a logger
separator: fields separator must not be in an node name
(default @@@) (string)
has_root: flag to set if node with no predecessors must be linked
to a factice "root" node (default False) (boolean)
"""
self.connexion = connexion #the redis connexion
self.graph = graph #the name of the graph
self.logger = logger #the logger
self.separator = separator #the key separator
self.has_root = has_root #flag for handling of node
# with no predecessor
self.root = 'RO_' + self.separator + '_OT' #name of the root
# (ultimate predecessor)
self.transactions = {} #dictionnary of transactions
self.re_check_separator = re.compile(separator, 0)
#compiled regular expression used to check
#that separator is not in the key
#(warn about possible key collision)
self.legacy_mode = False #small flag for a legacy mode
#(support of lua scripting in both python-redis and redis-server
#a small lua script to handle the has_root option
# first arg is the node name, second arg is the node predecessors key
self.lua_handle_no_predecessor = ("""
local rootismember = redis.call('SISMEMBER', ARGV[2], '%(ROOT)s')
local card = redis.call('SCARD', ARGV[2])
if card == 0
then redis.call('SADD', ARGV[2], '%(ROOT)s')
redis.call('SADD', '%(ROOTSUCC)s', ARGV[1])
end
if card > 1 and rootismember == 1
then redis.call('SREM', ARGV[2], '%(ROOT)s')
redis.call('SREM', '%(ROOTSUCC)s', ARGV[1])
end
""" % {
'ROOTSUCC' : self._gen_key(self.root, ['successors', ]),
'ROOT' : self.root
}
)
#we try to register the script
try:
self._handle_no_predecessor_script = self.connexion.register_script(
self.lua_handle_no_predecessor)
except redis.exceptions.ResponseError:
#server is to old for lua scripting
self.legacy_mode = True
self.logger.info('redis server < 2.6.0, using legacy mode, no atomicity')
except __HOLE__:
#redis-pyton is to old for lua scripting
self.legacy_mode = True
self.logger.info('redis-python < 2.7.0, using legacy mode, no atomicity')
#if we are not in legacy mode, we try to register other lua scripts
if not self.legacy_mode:
#a small lua script to remove all the attributes of a given node
# first arg is the node name, second arg is the node attributs key
self.lua_remove_all_attributs = ("""
local attributs = redis.call('SMEMBERS', ARGV[1]..'%(sep)sattributs_list')
for i, attribut in ipairs(attributs) do
redis.call('DEL', ARGV[1]..'%(sep)sattribut%(sep)s'..attribut)
end
redis.call('DEL', ARGV[1]..'%(sep)sattribut_list')
""" % {
'sep' : self.separator
}
)
#we register the script
#<graph name><sep><node_name><sep><variable_name>[<sep><other>]*
self.remove_all_attributes_script = self.connexion.register_script(
self.lua_remove_all_attributs)
#a small lua script to remove a node from its predecessors and successors
# first arg is the node name
self.lua_remove_node_successors_predecessors = ("""
local predecessors = redis.call('SMEMBERS', '%(graph)s%(sep)s'..ARGV[1]..'%(sep)spredecessors')
local successors = redis.call('SMEMBERS', '%(graph)s%(sep)s'..ARGV[1]..'%(sep)ssuccessors')
for i, predecessor in ipairs(predecessors) do
redis.call('SREM', '%(graph)s%(sep)s'..predecessor..'%(sep)ssuccessors', ARGV[1])
end
for i, successor in ipairs(successors) do
local predecessors_key = '%(graph)s%(sep)s'..successor..'%(sep)spredecessors'
redis.call('SREM', predecessors_key, ARGV[1])
end
""" % {
'sep' : self.separator,
'graph' : self.graph
}
)
self.lua_handle_root_remove = ("""
for i, successor in ipairs(successors) do
local predecessors_key = '%(graph)s%(sep)s'..successor..'%(sep)spredecessors'
local card = redis.call('SCARD', predecessors_key)
if card == 0
then redis.call('SADD', predecessors_key, '%(ROOT)s')
redis.call('SADD', '%(ROOTSUCC)s', successor)
end
end
""" % {
'sep' : self.separator,
'graph' : self.graph,
'ROOTSUCC' : self._gen_key(self.root, ['successors', ]),
'ROOT' : self.root
}
)
if has_root:
#we register the script
self.remove_node_successors_predecessors_script = self.connexion.register_script(
self.lua_remove_node_successors_predecessors +
self.lua_handle_root_remove
)
else:
#we register the script
self.remove_node_successors_predecessors_script = self.connexion.register_script(
self.lua_remove_node_successors_predecessors)
|
AttributeError
|
dataset/ETHPy150Open kakwa/pygraph_redis/pygraph_redis/directed_graph.py/Directed_graph.__init__
|
363
|
@classmethod
def _loadUserModule(cls, userModule):
"""
Imports and returns the module object represented by the given module descriptor.
:type userModule: ModuleDescriptor
"""
if not userModule.belongsToToil:
userModule = userModule.localize()
if userModule.dirPath not in sys.path:
sys.path.append(userModule.dirPath)
try:
return importlib.import_module(userModule.name)
except __HOLE__:
logger.error('Failed to import user module %r from sys.path=%r', userModule, sys.path)
raise
|
ImportError
|
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/job.py/Job._loadUserModule
|
364
|
def run(self, fileStore):
#Unpickle the service
userModule = self._loadUserModule(self.serviceModule)
service = self._unpickle( userModule, BytesIO( self.pickledService ) )
#Start the service
startCredentials = service.start(fileStore)
try:
#The start credentials must be communicated to processes connecting to
#the service, to do this while the run method is running we
#cheat and set the return value promise within the run method
self._setReturnValuesForPromises(startCredentials, fileStore.jobStore)
self._rvs = {} # Set this to avoid the return values being updated after the
#run method has completed!
#Now flag that the service is running jobs can connect to it
logger.debug("Removing the start jobStoreID to indicate that establishment of the service")
assert self.jobWrapper.startJobStoreID != None
if fileStore.jobStore.fileExists(self.jobWrapper.startJobStoreID):
fileStore.jobStore.deleteFile(self.jobWrapper.startJobStoreID)
assert not fileStore.jobStore.fileExists(self.jobWrapper.startJobStoreID)
#Now block until we are told to stop, which is indicated by the removal
#of a file
assert self.jobWrapper.terminateJobStoreID != None
while True:
# Check for the terminate signal
if not fileStore.jobStore.fileExists(self.jobWrapper.terminateJobStoreID):
logger.debug("Detected that the terminate jobStoreID has been removed so exiting")
if not fileStore.jobStore.fileExists(self.jobWrapper.errorJobStoreID):
raise RuntimeError("Detected the error jobStoreID has been removed so exiting with an error")
break
# Check the service's status and exit if failed or complete
try:
if not service.check():
logger.debug("The service has finished okay, exiting")
break
except __HOLE__:
logger.debug("Detected termination of the service")
raise
time.sleep(fileStore.jobStore.config.servicePollingInterval) #Avoid excessive polling
#Now kill the service
#service.stop(fileStore)
# Remove link to the jobWrapper
self.jobWrapper = None
logger.debug("Service is done")
finally:
# The stop function is always called
service.stop(fileStore)
|
RuntimeError
|
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/job.py/ServiceJob.run
|
365
|
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
context.push()
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
context.pop()
return self.nodelist_empty.render(context)
nodelist = NodeList()
if self.is_reversed:
values = reversed(values)
unpack = len(self.loopvars) > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i+1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
unpacked_vars = dict(zip(self.loopvars, item))
except __HOLE__:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
context.pop()
return nodelist.render(context)
|
TypeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/template/defaulttags.py/ForNode.render
|
366
|
def render(self, context):
if not include_is_allowed(self.filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
fp = open(self.filepath, 'r')
output = fp.read()
fp.close()
except __HOLE__:
output = ''
if self.parsed:
try:
t = Template(output, name=self.filepath)
return t.render(context)
except TemplateSyntaxError, e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
|
IOError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/template/defaulttags.py/SsiNode.render
|
367
|
def render(self, context):
try:
value = self.val_expr.resolve(context)
maxvalue = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except ValueError:
raise TemplateSyntaxError("widthratio final argument must be an number")
try:
value = float(value)
maxvalue = float(maxvalue)
ratio = (value / maxvalue) * max_width
except (__HOLE__, ZeroDivisionError):
return ''
return str(int(round(ratio)))
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/template/defaulttags.py/WidthRatioNode.render
|
368
|
def mkdirs(path):
try:
makedirs(path)
except __HOLE__ as err:
if err.errno != EEXIST:
raise
|
OSError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/mkdirs
|
369
|
def __init__(self, path, body, headers):
self._path = path
self._body = body
self._actual_read = 0
self._content_length = None
self._actual_md5 = None
self._expected_etag = headers.get('etag')
if ('x-object-manifest' not in headers
and 'x-static-large-object' not in headers):
self._actual_md5 = md5()
if 'content-length' in headers:
try:
self._content_length = int(headers.get('content-length'))
except __HOLE__:
raise SwiftError('content-length header must be an integer')
|
ValueError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/_SwiftReader.__init__
|
370
|
def _download_object_job(self, conn, container, obj, options):
out_file = options['out_file']
results_dict = {}
req_headers = split_headers(options['header'], '')
pseudodir = False
path = join(container, obj) if options['yes_all'] else obj
path = path.lstrip(os_path_sep)
options['skip_identical'] = (options['skip_identical'] and
out_file != '-')
if options['prefix'] and options['remove_prefix']:
path = path[len(options['prefix']):].lstrip('/')
if options['out_directory']:
path = os.path.join(options['out_directory'], path)
if options['skip_identical']:
filename = out_file if out_file else path
try:
fp = open(filename, 'rb')
except IOError:
pass
else:
with fp:
md5sum = md5()
while True:
data = fp.read(65536)
if not data:
break
md5sum.update(data)
req_headers['If-None-Match'] = md5sum.hexdigest()
try:
start_time = time()
get_args = {'resp_chunk_size': 65536,
'headers': req_headers,
'response_dict': results_dict}
if options['skip_identical']:
# Assume the file is a large object; if we're wrong, the query
# string is ignored and the If-None-Match header will trigger
# the behavior we want
get_args['query_string'] = 'multipart-manifest=get'
try:
headers, body = conn.get_object(container, obj, **get_args)
except ClientException as e:
if not options['skip_identical']:
raise
if e.http_status != 304: # Only handling Not Modified
raise
headers = results_dict['headers']
if 'x-object-manifest' in headers:
# DLO: most likely it has more than one page worth of
# segments and we have an empty file locally
body = []
elif config_true_value(headers.get('x-static-large-object')):
# SLO: apparently we have a copy of the manifest locally?
# provide no chunking data to force a fresh download
body = [b'[]']
else:
# Normal object: let it bubble up
raise
if options['skip_identical']:
if config_true_value(headers.get('x-static-large-object')) or \
'x-object-manifest' in headers:
# The request was chunked, so stitch it back together
chunk_data = self._get_chunk_data(conn, container, obj,
headers, b''.join(body))
else:
chunk_data = None
if chunk_data is not None:
if self._is_identical(chunk_data, filename):
raise ClientException('Large object is identical',
http_status=304)
# Large objects are different; start the real download
del get_args['query_string']
get_args['response_dict'].clear()
headers, body = conn.get_object(container, obj, **get_args)
headers_receipt = time()
obj_body = _SwiftReader(path, body, headers)
no_file = options['no_download']
if out_file == "-" and not no_file:
res = {
'action': 'download_object',
'container': container,
'object': obj,
'path': path,
'pseudodir': pseudodir,
'contents': obj_body
}
return res
fp = None
try:
content_type = headers.get('content-type')
if (content_type and
content_type.split(';', 1)[0] == 'text/directory'):
make_dir = not no_file and out_file != "-"
if make_dir and not isdir(path):
mkdirs(path)
else:
make_dir = not (no_file or out_file)
if make_dir:
dirpath = dirname(path)
if dirpath and not isdir(dirpath):
mkdirs(dirpath)
if not no_file:
if out_file:
fp = open(out_file, 'wb')
else:
if basename(path):
fp = open(path, 'wb')
else:
pseudodir = True
for chunk in obj_body:
if fp is not None:
fp.write(chunk)
finish_time = time()
finally:
bytes_read = obj_body.bytes_read()
if fp is not None:
fp.close()
if 'x-object-meta-mtime' in headers and not no_file:
try:
mtime = float(headers['x-object-meta-mtime'])
except __HOLE__:
pass # no real harm; couldn't trust it anyway
else:
if options['out_file']:
utime(options['out_file'], (mtime, mtime))
else:
utime(path, (mtime, mtime))
res = {
'action': 'download_object',
'success': True,
'container': container,
'object': obj,
'path': path,
'pseudodir': pseudodir,
'start_time': start_time,
'finish_time': finish_time,
'headers_receipt': headers_receipt,
'auth_end_time': conn.auth_end_time,
'read_length': bytes_read,
'attempts': conn.attempts,
'response_dict': results_dict
}
return res
except Exception as err:
traceback, err_time = report_traceback()
logger.exception(err)
res = {
'action': 'download_object',
'container': container,
'object': obj,
'success': False,
'error': err,
'traceback': traceback,
'error_timestamp': err_time,
'response_dict': results_dict,
'path': path,
'pseudodir': pseudodir,
'attempts': conn.attempts
}
return res
|
ValueError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/SwiftService._download_object_job
|
371
|
def _submit_page_downloads(self, container, page_generator, options):
try:
list_page = next(page_generator)
except __HOLE__:
return None
if list_page["success"]:
objects = [o["name"] for o in list_page["listing"]]
if options["shuffle"]:
shuffle(objects)
o_downs = [
self.thread_manager.object_dd_pool.submit(
self._download_object_job, container, obj, options
) for obj in objects
]
return o_downs
else:
raise list_page["error"]
|
StopIteration
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/SwiftService._submit_page_downloads
|
372
|
def upload(self, container, objects, options=None):
"""
Upload a list of objects to a given container.
:param container: The container (or pseudo-folder path) to put the
uploads into.
:param objects: A list of file/directory names (strings) or
SwiftUploadObject instances containing a source for the
created object, an object name, and an options dict
(can be None) to override the options for that
individual upload operation::
[
'/path/to/file',
SwiftUploadObject('/path', object_name='obj1'),
...
]
The options dict is as described below.
The SwiftUploadObject source may be one of:
* A file-like object (with a read method)
* A string containing the path to a local
file or directory
* None, to indicate that we want an empty object
:param options: A dictionary containing options to override the global
options specified during the service object creation.
These options are applied to all upload operations
performed by this call, unless overridden on a per
object basis. Possible options are given below::
{
'meta': [],
'header': [],
'segment_size': None,
'use_slo': False,
'segment_container': None,
'leave_segments': False,
'changed': None,
'skip_identical': False,
'fail_fast': False,
'dir_marker': False # Only for None sources
}
:returns: A generator for returning the results of the uploads.
:raises: SwiftError
:raises: ClientException
"""
if options is not None:
options = dict(self._options, **options)
else:
options = self._options
try:
segment_size = int(0 if options['segment_size'] is None else
options['segment_size'])
except __HOLE__:
raise SwiftError('Segment size should be an integer value')
# Incase we have a psudeo-folder path for <container> arg, derive
# the container name from the top path and prepend the rest to
# the object name. (same as passing --object-name).
container, _sep, pseudo_folder = container.partition('/')
# Try to create the container, just in case it doesn't exist. If this
# fails, it might just be because the user doesn't have container PUT
# permissions, so we'll ignore any error. If there's really a problem,
# it'll surface on the first object PUT.
policy_header = {}
_header = split_headers(options["header"])
if POLICY in _header:
policy_header[POLICY] = \
_header[POLICY]
create_containers = [
self.thread_manager.container_pool.submit(
self._create_container_job, container, headers=policy_header)
]
# wait for first container job to complete before possibly attempting
# segment container job because segment container job may attempt
# to HEAD the first container
for r in interruptable_as_completed(create_containers):
res = r.result()
yield res
if segment_size:
seg_container = container + '_segments'
if options['segment_container']:
seg_container = options['segment_container']
if seg_container != container:
if not policy_header:
# Since no storage policy was specified on the command
# line, rather than just letting swift pick the default
# storage policy, we'll try to create the segments
# container with the same policy as the upload container
create_containers = [
self.thread_manager.container_pool.submit(
self._create_container_job, seg_container,
policy_source=container
)
]
else:
create_containers = [
self.thread_manager.container_pool.submit(
self._create_container_job, seg_container,
headers=policy_header
)
]
for r in interruptable_as_completed(create_containers):
res = r.result()
yield res
# We maintain a results queue here and a separate thread to monitor
# the futures because we want to get results back from potential
# segment uploads too
rq = Queue()
file_jobs = {}
upload_objects = self._make_upload_objects(objects, pseudo_folder)
for upload_object in upload_objects:
s = upload_object.source
o = upload_object.object_name
o_opts = upload_object.options
details = {'action': 'upload', 'container': container}
if o_opts is not None:
object_options = deepcopy(options)
object_options.update(o_opts)
else:
object_options = options
if hasattr(s, 'read'):
# We've got a file like object to upload to o
file_future = self.thread_manager.object_uu_pool.submit(
self._upload_object_job, container, s, o, object_options
)
details['file'] = s
details['object'] = o
file_jobs[file_future] = details
elif s is not None:
# We've got a path to upload to o
details['path'] = s
details['object'] = o
if isdir(s):
dir_future = self.thread_manager.object_uu_pool.submit(
self._create_dir_marker_job, container, o,
object_options, path=s
)
file_jobs[dir_future] = details
else:
try:
stat(s)
file_future = \
self.thread_manager.object_uu_pool.submit(
self._upload_object_job, container, s, o,
object_options, results_queue=rq
)
file_jobs[file_future] = details
except OSError as err:
# Avoid tying up threads with jobs that will fail
traceback, err_time = report_traceback()
logger.exception(err)
res = {
'action': 'upload_object',
'container': container,
'object': o,
'success': False,
'error': err,
'traceback': traceback,
'error_timestamp': err_time,
'path': s
}
rq.put(res)
else:
# Create an empty object (as a dir marker if is_dir)
details['file'] = None
details['object'] = o
if object_options['dir_marker']:
dir_future = self.thread_manager.object_uu_pool.submit(
self._create_dir_marker_job, container, o,
object_options
)
file_jobs[dir_future] = details
else:
file_future = self.thread_manager.object_uu_pool.submit(
self._upload_object_job, container, StringIO(),
o, object_options
)
file_jobs[file_future] = details
# Start a thread to watch for upload results
Thread(
target=self._watch_futures, args=(file_jobs, rq)
).start()
# yield results as they become available, including those from
# segment uploads.
res = get_from_queue(rq)
cancelled = False
while res is not None:
yield res
if not res['success']:
if not cancelled and options['fail_fast']:
cancelled = True
for f in file_jobs:
f.cancel()
res = get_from_queue(rq)
|
ValueError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/SwiftService.upload
|
373
|
def _is_identical(self, chunk_data, path):
try:
fp = open(path, 'rb')
except __HOLE__:
return False
with fp:
for chunk in chunk_data:
to_read = chunk['bytes']
md5sum = md5()
while to_read:
data = fp.read(min(65536, to_read))
if not data:
return False
md5sum.update(data)
to_read -= len(data)
if md5sum.hexdigest() != chunk['hash']:
return False
# Each chunk is verified; check that we're at the end of the file
return not fp.read(1)
|
IOError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/SwiftService._is_identical
|
374
|
def _upload_object_job(self, conn, container, source, obj, options,
results_queue=None):
if obj.startswith('./') or obj.startswith('.\\'):
obj = obj[2:]
if obj.startswith('/'):
obj = obj[1:]
res = {
'action': 'upload_object',
'container': container,
'object': obj
}
if hasattr(source, 'read'):
stream = source
path = None
else:
path = source
res['path'] = path
try:
if path is not None:
put_headers = {'x-object-meta-mtime': "%f" % getmtime(path)}
else:
put_headers = {'x-object-meta-mtime': "%f" % round(time())}
res['headers'] = put_headers
# We need to HEAD all objects now in case we're overwriting a
# manifest object and need to delete the old segments
# ourselves.
old_manifest = None
old_slo_manifest_paths = []
new_slo_manifest_paths = set()
segment_size = int(0 if options['segment_size'] is None
else options['segment_size'])
if (options['changed'] or options['skip_identical']
or not options['leave_segments']):
try:
headers = conn.head_object(container, obj)
is_slo = config_true_value(
headers.get('x-static-large-object'))
if options['skip_identical'] or (
is_slo and not options['leave_segments']):
chunk_data = self._get_chunk_data(
conn, container, obj, headers)
if options['skip_identical'] and self._is_identical(
chunk_data, path):
res.update({
'success': True,
'status': 'skipped-identical'
})
return res
cl = int(headers.get('content-length'))
mt = headers.get('x-object-meta-mtime')
if (path is not None and options['changed']
and cl == getsize(path)
and mt == put_headers['x-object-meta-mtime']):
res.update({
'success': True,
'status': 'skipped-changed'
})
return res
if not options['leave_segments']:
old_manifest = headers.get('x-object-manifest')
if is_slo:
for old_seg in chunk_data:
seg_path = old_seg['name'].lstrip('/')
if isinstance(seg_path, text_type):
seg_path = seg_path.encode('utf-8')
old_slo_manifest_paths.append(seg_path)
except ClientException as err:
if err.http_status != 404:
traceback, err_time = report_traceback()
logger.exception(err)
res.update({
'success': False,
'error': err,
'traceback': traceback,
'error_timestamp': err_time
})
return res
# Merge the command line header options to the put_headers
put_headers.update(split_headers(options['header'], ''))
# Don't do segment job if object is not big enough, and never do
# a segment job if we're reading from a stream - we may fail if we
# go over the single object limit, but this gives us a nice way
# to create objects from memory
if (path is not None and segment_size
and (getsize(path) > segment_size)):
res['large_object'] = True
seg_container = container + '_segments'
if options['segment_container']:
seg_container = options['segment_container']
full_size = getsize(path)
segment_futures = []
segment_pool = self.thread_manager.segment_pool
segment = 0
segment_start = 0
while segment_start < full_size:
if segment_start + segment_size > full_size:
segment_size = full_size - segment_start
if options['use_slo']:
segment_name = '%s/slo/%s/%s/%s/%08d' % (
obj, put_headers['x-object-meta-mtime'],
full_size, options['segment_size'], segment
)
else:
segment_name = '%s/%s/%s/%s/%08d' % (
obj, put_headers['x-object-meta-mtime'],
full_size, options['segment_size'], segment
)
seg = segment_pool.submit(
self._upload_segment_job, path, container,
segment_name, segment_start, segment_size, segment,
obj, options, results_queue=results_queue
)
segment_futures.append(seg)
segment += 1
segment_start += segment_size
segment_results = []
errors = False
exceptions = []
for f in interruptable_as_completed(segment_futures):
try:
r = f.result()
if not r['success']:
errors = True
segment_results.append(r)
except Exception as err:
traceback, err_time = report_traceback()
logger.exception(err)
errors = True
exceptions.append((err, traceback, err_time))
if errors:
err = ClientException(
'Aborting manifest creation '
'because not all segments could be uploaded. %s/%s'
% (container, obj))
res.update({
'success': False,
'error': err,
'exceptions': exceptions,
'segment_results': segment_results
})
return res
res['segment_results'] = segment_results
if options['use_slo']:
segment_results.sort(key=lambda di: di['segment_index'])
for seg in segment_results:
seg_loc = seg['segment_location'].lstrip('/')
if isinstance(seg_loc, text_type):
seg_loc = seg_loc.encode('utf-8')
new_slo_manifest_paths.add(seg_loc)
manifest_data = json.dumps([
{
'path': d['segment_location'],
'etag': d['segment_etag'],
'size_bytes': d['segment_size']
} for d in segment_results
])
put_headers['x-static-large-object'] = 'true'
mr = {}
conn.put_object(
container, obj, manifest_data,
headers=put_headers,
query_string='multipart-manifest=put',
response_dict=mr
)
res['manifest_response_dict'] = mr
else:
new_object_manifest = '%s/%s/%s/%s/%s/' % (
quote(seg_container.encode('utf8')),
quote(obj.encode('utf8')),
put_headers['x-object-meta-mtime'], full_size,
options['segment_size'])
if old_manifest and old_manifest.rstrip('/') == \
new_object_manifest.rstrip('/'):
old_manifest = None
put_headers['x-object-manifest'] = new_object_manifest
mr = {}
conn.put_object(
container, obj, '', content_length=0,
headers=put_headers,
response_dict=mr
)
res['manifest_response_dict'] = mr
else:
res['large_object'] = False
obr = {}
if path is not None:
content_length = getsize(path)
contents = LengthWrapper(open(path, 'rb'),
content_length,
md5=options['checksum'])
else:
content_length = None
contents = ReadableToIterable(stream,
md5=options['checksum'])
etag = conn.put_object(
container, obj, contents,
content_length=content_length, headers=put_headers,
response_dict=obr
)
res['response_dict'] = obr
if (options['checksum'] and
etag and etag != contents.get_md5sum()):
raise SwiftError('Object upload verification failed: '
'md5 mismatch, local {0} != remote {1} '
'(remote object has not been removed)'
.format(contents.get_md5sum(), etag))
if old_manifest or old_slo_manifest_paths:
drs = []
delobjsmap = {}
if old_manifest:
scontainer, sprefix = old_manifest.split('/', 1)
sprefix = sprefix.rstrip('/') + '/'
delobjsmap[scontainer] = []
for part in self.list(scontainer, {'prefix': sprefix}):
if not part["success"]:
raise part["error"]
delobjsmap[scontainer].extend(
seg['name'] for seg in part['listing'])
if old_slo_manifest_paths:
for seg_to_delete in old_slo_manifest_paths:
if seg_to_delete in new_slo_manifest_paths:
continue
scont, sobj = \
seg_to_delete.split(b'/', 1)
delobjs_cont = delobjsmap.get(scont, [])
delobjs_cont.append(sobj)
delobjsmap[scont] = delobjs_cont
del_segs = []
for dscont, dsobjs in delobjsmap.items():
for dsobj in dsobjs:
del_seg = self.thread_manager.segment_pool.submit(
self._delete_segment, dscont, dsobj,
results_queue=results_queue
)
del_segs.append(del_seg)
for del_seg in interruptable_as_completed(del_segs):
drs.append(del_seg.result())
res['segment_delete_results'] = drs
# return dict for printing
res.update({
'success': True,
'status': 'uploaded',
'attempts': conn.attempts})
return res
except __HOLE__ as err:
traceback, err_time = report_traceback()
logger.exception(err)
if err.errno == ENOENT:
error = SwiftError('Local file %r not found' % path, exc=err)
else:
error = err
res.update({
'success': False,
'error': error,
'traceback': traceback,
'error_timestamp': err_time
})
except Exception as err:
traceback, err_time = report_traceback()
logger.exception(err)
res.update({
'success': False,
'error': err,
'traceback': traceback,
'error_timestamp': err_time
})
return res
# Delete related methods
#
|
OSError
|
dataset/ETHPy150Open openstack/python-swiftclient/swiftclient/service.py/SwiftService._upload_object_job
|
375
|
@task
def serve(port=8000):
"""Simple HTTP server for the docs"""
import os
from SimpleHTTPServer import SimpleHTTPRequestHandler
import SocketServer
os.chdir("./build/dirhtml")
httpd = SocketServer.TCPServer(("", port), SimpleHTTPRequestHandler)
try:
print "Serving documentation on %d" % port
httpd.serve_forever()
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open armstrong/docs.armstrongcms.org/fabfile.py/serve
|
376
|
def connect(self, obj, name, callback, user_arg=None, weak_args=None, user_args=None):
"""
:param obj: the object sending a signal
:type obj: object
:param name: the signal to listen for, typically a string
:type name: signal name
:param callback: the function to call when that signal is sent
:type callback: function
:param user_arg: deprecated additional argument to callback (appended
after the arguments passed when the signal is
emitted). If None no arguments will be added.
Don't use this argument, use user_args instead.
:param weak_args: additional arguments passed to the callback
(before any arguments passed when the signal
is emitted and before any user_args).
These arguments are stored as weak references
(but converted back into their original value
before passing them to callback) to prevent
any objects referenced (indirectly) from
weak_args from being kept alive just because
they are referenced by this signal handler.
Use this argument only as a keyword argument,
since user_arg might be removed in the future.
:type weak_args: iterable
:param user_args: additional arguments to pass to the callback,
(before any arguments passed when the signal
is emitted but after any weak_args).
Use this argument only as a keyword argument,
since user_arg might be removed in the future.
:type user_args: iterable
When a matching signal is sent, callback will be called. The
arguments it receives will be the user_args passed at connect
time (as individual arguments) followed by all the positional
parameters sent with the signal.
As an example of using weak_args, consider the following snippet:
>>> import urwid
>>> debug = urwid.Text('')
>>> def handler(widget, newtext):
... debug.set_text("Edit widget changed to %s" % newtext)
>>> edit = urwid.Edit('')
>>> key = urwid.connect_signal(edit, 'change', handler)
If you now build some interface using "edit" and "debug", the
"debug" widget will show whatever you type in the "edit" widget.
However, if you remove all references to the "debug" widget, it
will still be kept alive by the signal handler. This because the
signal handler is a closure that (implicitly) references the
"edit" widget. If you want to allow the "debug" widget to be
garbage collected, you can create a "fake" or "weak" closure
(it's not really a closure, since it doesn't reference any
outside variables, so it's just a dynamic function):
>>> debug = urwid.Text('')
>>> def handler(weak_debug, widget, newtext):
... weak_debug.set_text("Edit widget changed to %s" % newtext)
>>> edit = urwid.Edit('')
>>> key = urwid.connect_signal(edit, 'change', handler, weak_args=[debug])
Here the weak_debug parameter in print_debug is the value passed
in the weak_args list to connect_signal. Note that the
weak_debug value passed is not a weak reference anymore, the
signals code transparently dereferences the weakref parameter
before passing it to print_debug.
Returns a key associated by this signal handler, which can be
used to disconnect the signal later on using
urwid.disconnect_signal_by_key. Alternatively, the signal
handler can also be disconnected by calling
urwid.disconnect_signal, which doesn't need this key.
"""
sig_cls = obj.__class__
if not name in self._supported.get(sig_cls, []):
raise NameError, "No such signal %r for object %r" % \
(name, obj)
# Just generate an arbitrary (but unique) key
key = Key()
signals = setdefaultattr(obj, self._signal_attr, {})
handlers = signals.setdefault(name, [])
# Remove the signal handler when any of the weakref'd arguments
# are garbage collected. Note that this means that the handlers
# dictionary can be modified _at any time_, so it should never
# be iterated directly (e.g. iterate only over .keys() and
# .items(), never over .iterkeys(), .iteritems() or the object
# itself).
# We let the callback keep a weakref to the object as well, to
# prevent a circular reference between the handler and the
# object (via the weakrefs, which keep strong references to
# their callbacks) from existing.
obj_weak = weakref.ref(obj)
def weakref_callback(weakref):
o = obj_weak()
if o:
try:
del getattr(o, self._signal_attr, {})[name][key]
except __HOLE__:
pass
user_args = self._prepare_user_args(weak_args, user_args, weakref_callback)
handlers.append((key, callback, user_arg, user_args))
return key
|
KeyError
|
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/signals.py/Signals.connect
|
377
|
@property
def organization(self):
"""Name of the organization/employer."""
try:
# For v1 of gdata ("service" modules)?
return self.entry.organization.org_name.text
except __HOLE__:
# For v3 of gdata ("client" modules)?
return self.entry.organization.name.text
|
AttributeError
|
dataset/ETHPy150Open vinitkumar/googlecl/src/googlecl/contacts/__init__.py/ContactsEntryToStringWrapper.organization
|
378
|
@property
# Overrides Base's title. "name" will still give name of contact.
def title(self):
"""Title of contact in organization."""
try:
# For v1 of gdata ("service" modules)?
return self.entry.organization.org_title.text
except __HOLE__:
# For v3 of gdata ("client" modules)?
return self.entry.organization.title.text
|
AttributeError
|
dataset/ETHPy150Open vinitkumar/googlecl/src/googlecl/contacts/__init__.py/ContactsEntryToStringWrapper.title
|
379
|
def main():
try:
logFileDir = sys.argv[1]
except IndexError:
print 'Usage: %s <log file directory>' % sys.argv[0]
sys.exit(1)
spy.findLCMModulesInSysPath()
catalogThread = CatalogThread(logFileDir, 'DECKLINK_VIDEO_CAPTURE')
catalogThread.start()
serverThread = ServerThread(catalogThread.utimeMap)
serverThread.start()
try:
while True:
time.sleep(1)
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open RobotLocomotion/director/src/python/scripts/videoLogServer.py/main
|
380
|
def get_code_dir(self):
#Rationale for the default code directory location:
# PEP 3147
# http://www.python.org/dev/peps/pep-3147/
#
# Which standardizes the __pycache__ directory as a place to put
# compilation artifacts for python programs
source_dir, source_file = os.path.split(inspect.getsourcefile(self.fn))
candidate = os.path.join(source_dir, '__pycache__', source_file, self.__name__)
if os.path.exists(candidate):
return candidate
try:
os.makedirs(candidate)
return candidate
except __HOLE__:
#Fallback!
#Can't create a directory where the source file lives
#(Maybe the source file is in a system directory)
#Let's put it in a tempdir which we know will be writable
candidate = os.path.join(tempfile.gettempdir(),
'copperhead-cache-uid%s' % os.getuid(),
source_file, self.__name__)
if os.path.exists(candidate):
return candidate
#No check here to ensure this succeeds - fatal error if it fails
os.makedirs(candidate)
return candidate
|
OSError
|
dataset/ETHPy150Open bryancatanzaro/copperhead/copperhead/runtime/cufunction.py/CuFunction.get_code_dir
|
381
|
def on_get_value(self, rowref, column):
fname = os.path.join(self.dirname, rowref)
try:
filestat = os.stat(fname)
except __HOLE__:
return None
mode = filestat.st_mode
if column is 0:
if stat.S_ISDIR(mode):
return folderpb
else:
return filepb
elif column is 1:
return rowref
elif column is 2:
return filestat.st_size
elif column is 3:
return oct(stat.S_IMODE(mode))
return time.ctime(filestat.st_mtime)
|
OSError
|
dataset/ETHPy150Open anandology/pyjamas/pygtkweb/demos/049-filelisting-gtm.py/FileListModel.on_get_value
|
382
|
def on_iter_next(self, rowref):
try:
i = self.files.index(rowref)+1
return self.files[i]
except __HOLE__:
return None
|
IndexError
|
dataset/ETHPy150Open anandology/pyjamas/pygtkweb/demos/049-filelisting-gtm.py/FileListModel.on_iter_next
|
383
|
def on_iter_nth_child(self, rowref, n):
if rowref:
return None
try:
return self.files[n]
except __HOLE__:
return None
|
IndexError
|
dataset/ETHPy150Open anandology/pyjamas/pygtkweb/demos/049-filelisting-gtm.py/FileListModel.on_iter_nth_child
|
384
|
def mkdir_p(path):
try:
os.makedirs(path)
except __HOLE__ as e:
if e.errno != errno.EEXIST or not os.path.isdir(path):
raise
|
OSError
|
dataset/ETHPy150Open msanders/cider/cider/_sh.py/mkdir_p
|
385
|
def read_config(path, fallback=None):
is_json = os.path.splitext(path)[1] == ".json"
try:
with open(path, "r") as f:
contents = f.read() or "{}"
return json.loads(contents) if is_json else yaml.load(contents)
except __HOLE__ as e:
if fallback is not None and e.errno == errno.ENOENT:
return fallback
raise e
except (JSONDecodeError, yaml.parser.ParserError) as e:
raise ParserError(e, path)
|
IOError
|
dataset/ETHPy150Open msanders/cider/cider/_sh.py/read_config
|
386
|
def get_credentials():
try:
with open('metadata') as cred_file:
creds = yaml.safe_load(cred_file)
creds['opsmgr']
creds['opsmgr']['url']
creds['opsmgr']['username']
creds['opsmgr']['password']
except __HOLE__ as e:
print >> sys.stderr, 'metadata file is missing a value:', e.message
sys.exit(1)
except IOError as e:
print >> sys.stderr, 'Not a Concourse PCF pool resource.'
print >> sys.stderr, 'Execute this from within the pool repository root, after a successful claim/acquire.'
sys.exit(1)
return creds
|
KeyError
|
dataset/ETHPy150Open cf-platform-eng/tile-generator/lib/opsmgr.py/get_credentials
|
387
|
def get_config(self, credit_card):
setting_name_base = 'LIVE' if not self.test_mode else 'TEST'
setting_names = ['%s_%s' % (setting_name_base, CARD_NAMES[credit_card.card_type]),
setting_name_base]
for name in setting_names:
try:
config_dict = self.config[name]
except __HOLE__:
continue
return Config(config_dict)
raise KeyError("Couldn't find key %s in config %s" % (' or '.join(setting_names), self.config))
|
KeyError
|
dataset/ETHPy150Open agiliq/merchant/billing/gateways/global_iris_gateway.py/GlobalIrisBase.get_config
|
388
|
def register_user(register_user):
url = BH_URL + "/api/v1/user"
try:
response = requests.post(url,
data=register_user.to_JSON(),
headers=json_headers)
response.raise_for_status()
# Return our username on a successful response
return register_user.username
except ConnectionError as error:
print("Looks like there's a connection error. Please try again later")
except __HOLE__ as error:
if response.status_code == 409:
print(response.text)
else:
print(error)
print("Please try again...")
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/register_user
|
389
|
def login_user(login_form):
url = BH_URL + "/api/v1/login"
try:
response = requests.post(url,
data=login_form.to_JSON(),
headers=json_headers)
response.raise_for_status()
login_response_json = json.dumps(response.json())
return LoginResponse.from_JSON(login_response_json).access_token
except ConnectionError as error:
print("Looks like there's a connection error. Please try again later")
return None
except __HOLE__ as error:
if response.status_code == 409 or 401:
print(response.text)
else:
print(error)
print("Please try again...")
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/login_user
|
390
|
def register_system(register_system):
url = BH_URL + "/api/v1/system"
headers = {'content-type': 'application/json'}
try:
response = requests.post(url,
data=register_system.to_JSON(),
headers=json_auth_headers())
response.raise_for_status()
return register_system.name
except ConnectionError as error:
print("Looks like there's a connection error. Please try again later")
except __HOLE__ as error:
if response.status_code == 409:
print response.text
else:
print error
print "Please try again..."
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/register_system
|
391
|
def get_system_information(mac):
url = BH_URL + '/api/v1/system'
payload = {'mac': mac}
try:
response = requests.get(url,
params=payload,
headers=json_auth_headers())
response.raise_for_status()
system_json = json.dumps(response.json())
return System.from_JSON(system_json)
except ConnectionError as error:
print("Looks like there's a connection error. Please try again later")
except __HOLE__ as error:
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/get_system_information
|
392
|
def get_command(uuid):
url = BH_URL + "/api/v1/command/{0}".format(uuid)
try:
response = requests.get(url, headers=json_auth_headers())
response.raise_for_status()
json_command = json.dumps(response.json())
return Command.from_JSON(json_command)
except ConnectionError as error:
print("Looks like there's a connection error. Please try again later")
except __HOLE__ as error:
print(error)
print("Please try again...")
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/get_command
|
393
|
def delete_command(uuid):
url = BH_URL + "/api/v1/command/{0}".format(uuid)
try:
response = requests.delete(url, headers=base_auth_headers())
response.raise_for_status()
return uuid
except ConnectionError as error:
pass
except __HOLE__ as error:
print(error)
return None
|
HTTPError
|
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/rest_client.py/delete_command
|
394
|
@property
def tagdict(self):
"""return a dict converted from this string interpreted as a tag-string
.. code-block:: py
>>> from pprint import pprint
>>> dict_ = IrcString('aaa=bbb;ccc;example.com/ddd=eee').tagdict
>>> pprint({str(k): str(v) for k, v in dict_.items()})
{'aaa': 'bbb', 'ccc': 'None', 'example.com/ddd': 'eee'}
"""
tagdict = getattr(self, '_tagdict', None)
if tagdict is None:
try:
self._tagdict = tags.decode(self)
except __HOLE__:
self._tagdict = {}
return self._tagdict
|
ValueError
|
dataset/ETHPy150Open gawel/irc3/irc3/utils.py/IrcString.tagdict
|
395
|
def maybedotted(name):
"""Resolve dotted names:
.. code-block:: python
>>> maybedotted('irc3.config')
<module 'irc3.config' from '...'>
>>> maybedotted('irc3.utils.IrcString')
<class 'irc3.utils.IrcString'>
..
"""
if not name:
raise LookupError(
'Not able to resolve %s' % name)
if not hasattr(name, '__name__'):
try:
mod = importlib.import_module(name)
except __HOLE__:
attr = None
if '.' in name:
names = name.split('.')
attr = names.pop(-1)
try:
mod = maybedotted('.'.join(names))
except LookupError:
attr = None
else:
attr = getattr(mod, attr, None)
if attr is not None:
return attr
raise LookupError(
'Not able to resolve %s' % name)
else:
return mod
return name
|
ImportError
|
dataset/ETHPy150Open gawel/irc3/irc3/utils.py/maybedotted
|
396
|
def info(self):
'''Returns the number of completed Adobe payloads,
and the AdobeCode of the most recently completed payload.'''
last_adobecode = ""
logfile = self.get_current_log()
if logfile:
if self.kind in ['CS6', 'CS5']:
regex = r'END TIMER :: \[Payload Operation :\{'
elif self.kind in ['CS3', 'CS4']:
if self.operation == 'install':
regex = r'Closed PCD cache session payload with ID'
else:
regex = r'Closed CAPS session for removal of payload'
else:
if self.operation == 'install':
regex = r'Completing installation for payload at '
else:
regex = r'Physical payload uninstall result '
cmd = ['/usr/bin/grep', '-E', regex, logfile]
proc = subprocess.Popen(cmd, bufsize=-1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(output, dummy_err) = proc.communicate()
if output:
lines = str(output).splitlines()
completed_payloads = len(lines)
if (not logfile in self.payload_count
or completed_payloads > self.payload_count[logfile]):
# record number of completed payloads
self.payload_count[logfile] = completed_payloads
# now try to get the AdobeCode of the most recently
# completed payload.
# this isn't 100% accurate, but it's mostly for show
# anyway...
regex = re.compile(r'[^{]*(\{[A-Fa-f0-9-]+\})')
lines.reverse()
for line in lines:
m = regex.match(line)
try:
last_adobecode = m.group(1)
break
except (IndexError, __HOLE__):
pass
total_completed_payloads = 0
for key in self.payload_count.keys():
total_completed_payloads += self.payload_count[key]
return (total_completed_payloads, last_adobecode)
# dmg helper
# we need this instead of the one in munkicommon because the Adobe stuff
# needs the dmgs mounted under /Volumes. We can merge this later (or not).
|
AttributeError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/adobeutils.py/AdobeInstallProgressMonitor.info
|
397
|
def killStupidProcesses():
'''A nasty bit of hackery to get Adobe CS5 AAMEE packages to install
when at the loginwindow.'''
stupid_processes = ["Adobe AIR Installer",
"Adobe AIR Application Installer",
"InstallAdobeHelp",
"open -a /Library/Application Support/Adobe/"
"SwitchBoard/SwitchBoard.app",
"/bin/bash /Library/Application Support/Adobe/"
"SwitchBoard/SwitchBoard.app/Contents/MacOS/"
"switchboard.sh"]
for procname in stupid_processes:
pid = utils.getPIDforProcessName(procname)
if pid:
if not pid in secondsToLive:
secondsToLive[pid] = 30
else:
secondsToLive[pid] = secondsToLive[pid] - 1
if secondsToLive[pid] == 0:
# it's been running too long; kill it
munkicommon.log("Killing PID %s: %s" % (pid, procname))
try:
os.kill(int(pid), 9)
except __HOLE__:
pass
# remove this PID from our list
del secondsToLive[pid]
# only kill one process per invocation
return
|
OSError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/adobeutils.py/killStupidProcesses
|
398
|
def runAdobeInstallTool(
cmd, number_of_payloads=0, killAdobeAIR=False, payloads=None,
kind="CS5", operation="install"):
'''An abstraction of the tasks for running Adobe Setup,
AdobeUberInstaller, AdobeUberUninstaller, AdobeDeploymentManager, etc'''
# initialize an AdobeInstallProgressMonitor object.
progress_monitor = AdobeInstallProgressMonitor(
kind=kind, operation=operation)
if munkicommon.munkistatusoutput and not number_of_payloads:
# indeterminate progress bar
munkistatus.percent(-1)
proc = subprocess.Popen(cmd, shell=False, bufsize=1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
old_payload_completed_count = 0
payloadname = ""
while proc.poll() == None:
time.sleep(1)
(payload_completed_count, adobe_code) = progress_monitor.info()
if payload_completed_count > old_payload_completed_count:
old_payload_completed_count = payload_completed_count
if adobe_code and payloads:
matched_payloads = [payload for payload in payloads
if payload.get('AdobeCode') == adobe_code]
if matched_payloads:
payloadname = matched_payloads[0].get('display_name')
else:
payloadname = adobe_code
payloadinfo = " - " + payloadname
else:
payloadinfo = ""
if number_of_payloads:
munkicommon.display_status_minor(
'Completed payload %s of %s%s' %
(payload_completed_count, number_of_payloads,
payloadinfo))
else:
munkicommon.display_status_minor(
'Completed payload %s%s',
payload_completed_count, payloadinfo)
if munkicommon.munkistatusoutput:
munkistatus.percent(
getPercent(payload_completed_count, number_of_payloads))
# Adobe AIR Installer workaround/hack
# CSx installs at the loginwindow hang when Adobe AIR is installed.
# So we check for this and kill the process. Ugly.
# Hopefully we can disable this in the future.
if killAdobeAIR:
if (not munkicommon.getconsoleuser() or
munkicommon.getconsoleuser() == u"loginwindow"):
# we're at the loginwindow.
killStupidProcesses()
# run of tool completed
retcode = proc.poll()
#check output for errors
output = proc.stdout.readlines()
for line in output:
line = line.rstrip("\n")
if line.startswith("Error"):
munkicommon.display_error(line)
if line.startswith("Exit Code:"):
if retcode == 0:
try:
retcode = int(line[11:])
except (ValueError, __HOLE__):
retcode = -1
if retcode != 0 and retcode != 8:
munkicommon.display_error(
'Adobe Setup error: %s: %s', retcode, adobeSetupError(retcode))
else:
if munkicommon.munkistatusoutput:
munkistatus.percent(100)
munkicommon.display_status_minor('Done.')
return retcode
|
TypeError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/adobeutils.py/runAdobeInstallTool
|
399
|
def writefile(stringdata, path):
'''Writes string data to path.
Returns the path on success, empty string on failure.'''
try:
fileobject = open(path, mode='w', buffering=1)
print >> fileobject, stringdata.encode('UTF-8')
fileobject.close()
return path
except (__HOLE__, IOError):
munkicommon.display_error("Couldn't write %s" % stringdata)
return ""
|
OSError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/adobeutils.py/writefile
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.