Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
200
|
def main():
"""Run the tests."""
parsed_args = _PARSER.parse_args()
if parsed_args.test_target and parsed_args.test_path:
raise Exception('At most one of test_path and test_target '
'should be specified.')
if parsed_args.test_path and '.' in parsed_args.test_path:
raise Exception('The delimiter in test_path should be a slash (/)')
if parsed_args.test_target and '/' in parsed_args.test_target:
raise Exception('The delimiter in test_target should be a dot (.)')
if parsed_args.test_target:
all_test_targets = [parsed_args.test_target]
else:
all_test_targets = _get_all_test_targets(
test_path=parsed_args.test_path)
# Prepare tasks.
task_to_taskspec = {}
tasks = []
for test_target in all_test_targets:
test = TestingTaskSpec(
test_target, parsed_args.generate_coverage_report)
task = TaskThread(test.run, name=test_target)
task_to_taskspec[task] = test
tasks.append(task)
task_execution_failed = False
try:
_execute_tasks(tasks)
except Exception:
task_execution_failed = True
for task in tasks:
if task.exception:
log(str(task.exception))
print ''
print '+------------------+'
print '| SUMMARY OF TESTS |'
print '+------------------+'
print ''
# Check we ran all tests as expected.
total_count = 0
total_errors = 0
total_failures = 0
for task in tasks:
spec = task_to_taskspec[task]
if not task.finished:
print 'CANCELED %s' % spec.test_target
test_count = 0
elif 'No tests were run' in str(task.exception):
print 'ERROR %s: No tests found.' % spec.test_target
test_count = 0
elif task.exception:
exc_str = str(task.exception).decode('utf-8')
print exc_str[exc_str.find('=') : exc_str.rfind('-')]
tests_failed_regex_match = re.search(
r'Test suite failed: ([0-9]+) tests run, ([0-9]+) errors, '
'([0-9]+) failures',
str(task.exception))
try:
test_count = int(tests_failed_regex_match.group(1))
errors = int(tests_failed_regex_match.group(2))
failures = int(tests_failed_regex_match.group(3))
total_errors += errors
total_failures += failures
print 'FAILED %s: %s errors, %s failures' % (
spec.test_target, errors, failures)
except __HOLE__:
# There was an internal error, and the tests did not run. (The
# error message did not match `tests_failed_regex_match`.)
test_count = 0
print ''
print '------------------------------------------------------'
print ' WARNING: FAILED TO RUN TESTS.'
print ''
print ' This is most likely due to an import error.'
print '------------------------------------------------------'
else:
try:
tests_run_regex_match = re.search(
r'Ran ([0-9]+) tests? in ([0-9\.]+)s', task.output)
test_count = int(tests_run_regex_match.group(1))
test_time = float(tests_run_regex_match.group(2))
print ('SUCCESS %s: %d tests (%.1f secs)' %
(spec.test_target, test_count, test_time))
except Exception:
print (
'An unexpected error occurred. '
'Task output:\n%s' % task.output)
total_count += test_count
print ''
if total_count == 0:
raise Exception('WARNING: No tests were run.')
else:
print 'Ran %s test%s in %s test class%s.' % (
total_count, '' if total_count == 1 else 's',
len(tasks), '' if len(tasks) == 1 else 'es')
if total_errors or total_failures:
print '(%s ERRORS, %s FAILURES)' % (total_errors, total_failures)
else:
print 'All tests passed.'
if task_execution_failed:
raise Exception('Task execution failed.')
elif total_errors or total_failures:
raise Exception(
'%s errors, %s failures' % (total_errors, total_failures))
|
AttributeError
|
dataset/ETHPy150Open oppia/oppia/scripts/backend_tests.py/main
|
201
|
def __init__(self, result_string, active_mode=False):
"""Set 'active_mode' to True to parse results in
an Active Learning context."""
self.raw_output = result_string
result_list = []
# TODO: Something more robust than whitespace splitting
# to handle modes like --audit ?
for token in result_string.split():
try:
result = float(token)
result_list.append(result)
except __HOLE__:
# Ignore tokens that can't be made into floats (like tags)
logging.debug("Ignoring non-float token {}".format(token))
self.value_list = result_list
if result_list:
self.prediction = result_list[0]
else:
self.prediction = None
if active_mode:
if len(result_list) > 1:
self.importance = result_list[1]
else:
self.importance = 0.
|
ValueError
|
dataset/ETHPy150Open mokelly/wabbit_wappa/wabbit_wappa/__init__.py/VWResult.__init__
|
202
|
def post(self, request, *args, **kwargs):
"""Handler for HTTP POST requests."""
context = self.get_context_data(**kwargs)
workflow = context[self.context_object_name]
try:
# Check for the VALIDATE_STEP* headers, if they are present
# and valid integers, return validation results as JSON,
# otherwise proceed normally.
validate_step_start = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_START', ''))
validate_step_end = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_END', ''))
except __HOLE__:
# No VALIDATE_STEP* headers, or invalid values. Just proceed
# with normal workflow handling for POSTs.
pass
else:
# There are valid VALIDATE_STEP* headers, so only do validation
# for the specified steps and return results.
data = self.validate_steps(request, workflow,
validate_step_start,
validate_step_end)
return http.HttpResponse(json.dumps(data),
content_type="application/json")
if not workflow.is_valid():
return self.render_to_response(context)
try:
success = workflow.finalize()
except forms.ValidationError:
return self.render_to_response(context)
except Exception:
success = False
exceptions.handle(request)
if success:
msg = workflow.format_status_message(workflow.success_message)
messages.success(request, msg)
else:
msg = workflow.format_status_message(workflow.failure_message)
messages.error(request, msg)
if "HTTP_X_HORIZON_ADD_TO_FIELD" in self.request.META:
field_id = self.request.META["HTTP_X_HORIZON_ADD_TO_FIELD"]
data = [self.get_object_id(workflow.object),
self.get_object_display(workflow.object)]
response = http.HttpResponse(json.dumps(data))
response["X-Horizon-Add-To-Field"] = field_id
return response
next_url = self.request.REQUEST.get(workflow.redirect_param_name, None)
return shortcuts.redirect(next_url or workflow.get_success_url())
|
ValueError
|
dataset/ETHPy150Open CiscoSystems/avos/horizon/workflows/views.py/WorkflowView.post
|
203
|
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
args, varargs, varkw, defaults = getargspec(func)
f_name = func.__name__
arg2value = {}
# The following closures are basically because of tuple parameter unpacking.
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except __HOLE__:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg,subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg,str):
return arg in arg2value
return arg in assigned_tuple_params
if ismethod(func) and func.im_self is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.im_self,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos-num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
if varkw:
if num_pos:
# XXX: We should use num_pos, but Python also uses num_total:
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults: # fill in any missing values with the defaults
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, unicode):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
|
StopIteration
|
dataset/ETHPy150Open GrahamDumpleton/wrapt/src/wrapt/arguments.py/getcallargs
|
204
|
def highlight_syntax(src, lang, linenums=False):
"""Pass code to the [Pygments](http://pygments.pocoo.org/) highliter
with optional line numbers. The output should then be styled with CSS
to your liking. No styles are applied by default - only styling hooks
(i.e.: <span class="k">).
"""
src = src.strip('\n')
if not lang:
lexer = TextLexer()
else:
try:
lexer = get_lexer_by_name(lang, stripall=True)
except __HOLE__:
lexer = TextLexer()
formatter = HtmlFormatter(linenos=linenums, tab_length=TAB_LENGTH)
html = highlight(src, lexer, formatter)
if lang:
open_code = OPEN_CODE % (LANG_TAG % (lang, ), )
else:
open_code = OPEN_CODE % u''
html = html.replace('<div class="highlight"><pre>', open_code, 1)
html = html.replace('</pre></div>', CLOSE_CODE)
return html
|
ValueError
|
dataset/ETHPy150Open lucuma/Clay/clay/markdown_ext/md_fencedcode.py/highlight_syntax
|
205
|
def to_representation(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except __HOLE__:
return value
|
AttributeError
|
dataset/ETHPy150Open bkg/django-spillway/spillway/fields.py/GeometryField.to_representation
|
206
|
def run(self, context):
cmd = "echo $$>{dir}/pidfile; exec {dir}/recentfling.sh -i {}; rm {dir}/pidfile"
cmd = cmd.format(self.loops, dir=self.device.working_directory)
try:
self.output = self.device.execute(cmd, timeout=120)
except __HOLE__:
self._kill_recentfling()
raise
|
KeyboardInterrupt
|
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/workloads/recentfling/__init__.py/Recentfling.run
|
207
|
def create_from_exception(self, exception=None, traceback=None, **kwargs):
"""
Creates an error log from an exception.
"""
if not exception:
exc_type, exc_value, traceback = sys.exc_info()
elif not traceback:
warnings.warn('Using just the ``exception`` argument is deprecated, send ``traceback`` in addition.', DeprecationWarning)
exc_type, exc_value, traceback = sys.exc_info()
else:
exc_type = exception.__class__
exc_value = exception
def to_unicode(f):
if isinstance(f, dict):
nf = dict()
for k, v in f.iteritems():
nf[str(k)] = to_unicode(v)
f = nf
elif isinstance(f, (list, tuple)):
f = [to_unicode(f) for f in f]
else:
try:
f = smart_unicode(f)
except (UnicodeEncodeError, __HOLE__):
f = '(Error decoding value)'
return f
reporter = ExceptionReporter(None, exc_type, exc_value, traceback)
frames = reporter.get_traceback_frames()
data = kwargs.pop('data', {}) or {}
data['exc'] = base64.b64encode(pickle.dumps(map(to_unicode, [exc_type.__class__.__module__, exc_value.args, frames])).encode('zlib'))
tb_message = '\n'.join(traceback_mod.format_exception(exc_type, exc_value, traceback))
kwargs.setdefault('message', to_unicode(exc_value))
return self._create(
class_name=exc_type.__name__,
traceback=tb_message,
data=data,
**kwargs
)
|
UnicodeDecodeError
|
dataset/ETHPy150Open dcramer/django-db-log/djangodblog/manager.py/DBLogManager.create_from_exception
|
208
|
def _on_path_loaded(self, path):
if os.path.normpath(path) != self._root_path:
return
try:
self.setModel(self._fs_model_proxy)
file_root_index = self._fs_model_source.setRootPath(
self._root_path)
root_index = self._fs_model_proxy.mapFromSource(file_root_index)
self.setRootIndex(root_index)
if not os.path.ismount(self._root_path):
self.expandToDepth(0)
if self._hide_extra_colums:
self.setHeaderHidden(True)
for i in range(1, 4):
self.hideColumn(i)
if self._path_to_select:
self.select_path(self._path_to_select)
self._path_to_select = None
except __HOLE__:
# wrapped C/C++ object of type FileSystemTreeView has been deleted
return
|
RuntimeError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/filesystem_treeview.py/FileSystemTreeView._on_path_loaded
|
209
|
def _paste(self, sources, destination, copy):
"""
Copies the files listed in ``sources`` to destination. Source are
removed if copy is set to False.
"""
for src in sources:
debug('%s <%s> to <%s>' % (
'copying' if copy else 'cutting', src, destination))
perform_copy = True
ext = os.path.splitext(src)[1]
original = os.path.splitext(os.path.split(src)[1])[0]
filename, status = QtWidgets.QInputDialog.getText(
self.tree_view, _('Copy'), _('New name:'),
QtWidgets.QLineEdit.Normal, original)
if filename == '' or not status:
return
filename = filename + ext
final_dest = os.path.join(destination, filename)
if os.path.exists(final_dest):
rep = QtWidgets.QMessageBox.question(
self.tree_view, _('File exists'),
_('File <%s> already exists. Do you want to erase it?') %
final_dest,
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if rep == QtWidgets.QMessageBox.No:
perform_copy = False
if not perform_copy:
continue
try:
if os.path.isfile(src):
shutil.copy(src, final_dest)
else:
shutil.copytree(src, final_dest)
except (IOError, __HOLE__) as e:
QtWidgets.QMessageBox.warning(
self.tree_view, _('Failed to copy file'), str(e))
_logger().exception('failed to copy %s to %s', src,
destination)
else:
debug('file copied %s', src)
if not copy:
debug('removing source (cut operation)')
if os.path.isfile(src):
os.remove(src)
else:
shutil.rmtree(src)
|
OSError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/filesystem_treeview.py/FileSystemHelper._paste
|
210
|
def delete(self):
"""
Deletes the selected items.
"""
urls = self.selected_urls()
rep = QtWidgets.QMessageBox.question(
self.tree_view, _('Confirm delete'),
_('Are you sure about deleting the selected files?'),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.Yes)
if rep == QtWidgets.QMessageBox.Yes:
deleted_files = []
for fn in urls:
try:
if os.path.isfile(fn):
os.remove(fn)
deleted_files.append(fn)
else:
files = self._get_files(fn)
shutil.rmtree(fn)
deleted_files += files
except __HOLE__ as e:
QtWidgets.QMessageBox.warning(
self.tree_view, _('Failed to remove %s') % fn, str(e))
_logger().exception('failed to remove %s', fn)
self.tree_view.files_deleted.emit(deleted_files)
for d in deleted_files:
debug('%s removed', d)
self.tree_view.file_deleted.emit(os.path.normpath(d))
|
OSError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/filesystem_treeview.py/FileSystemHelper.delete
|
211
|
def create_directory(self):
"""
Creates a directory under the selected directory (if the selected item
is a file, the parent directory is used).
"""
src = self.get_current_path()
name, status = QtWidgets.QInputDialog.getText(
self.tree_view, _('Create directory'), _('Name:'),
QtWidgets.QLineEdit.Normal, '')
if status:
fatal_names = ['.', '..']
for i in fatal_names:
if i == name:
QtWidgets.QMessageBox.critical(
self.tree_view, _("Error"), _("Wrong directory name"))
return
if os.path.isfile(src):
src = os.path.dirname(src)
try:
os.makedirs(os.path.join(src, name), exist_ok=True)
except __HOLE__ as e:
QtWidgets.QMessageBox.warning(
self.tree_view, _('Failed to create directory'),
_('Failed to create directory: %s'), str(e))
|
OSError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/filesystem_treeview.py/FileSystemHelper.create_directory
|
212
|
def create_file(self):
"""
Creates a file under the current directory.
"""
src = self.get_current_path()
name, status = QtWidgets.QInputDialog.getText(
self.tree_view, _('Create new file'), _('File name:'),
QtWidgets.QLineEdit.Normal, '')
if status:
fatal_names = ['.', '..', os.sep]
for i in fatal_names:
if i == name:
QtWidgets.QMessageBox.critical(
self.tree_view, _("Error"), _("Wrong directory name"))
return
if os.path.isfile(src):
src = os.path.dirname(src)
path = os.path.join(src, name)
try:
with open(path, 'w'):
pass
except __HOLE__ as e:
QtWidgets.QMessageBox.warning(
self.tree_view, _('Failed to create new file'),
_('Failed to create file: %s') % str(e))
else:
self.tree_view.file_created.emit(os.path.normpath(path))
|
OSError
|
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/filesystem_treeview.py/FileSystemHelper.create_file
|
213
|
def connect(dbhandle, attach=None):
"""attempt to connect to database.
If `dbhandle` is an existing connection to a database,
it will be returned unchanged. Otherwise, this method
will attempt to establish a connection.
Arguments
---------
dbhandle : object or string
A database handle or a connection string.
Returns
-------
dbhandle : object
A DB-API2 conforming database handle
"""
if type(dbhandle) is str:
try:
import sqlite3
except __HOLE__:
raise ValueError(
"If an sqlite database location is passed"
" directly the sqlite3 module must be installed")
dbhandle = sqlite3.connect(dbhandle)
cc = dbhandle.cursor()
if attach is not None:
if type(attach) is str:
db_execute(cc, attach)
elif isinstance(attach, (tuple, list)):
for attach_statement in attach:
db_execute(cc, attach_statement)
return dbhandle
|
ImportError
|
dataset/ETHPy150Open CGATOxford/cgat/CGAT/Database.py/connect
|
214
|
def load_assets(self):
from frappe.modules import get_module_path, scrub
import os
page_name = scrub(self.name)
path = os.path.join(get_module_path(self.module), 'page', page_name)
# script
fpath = os.path.join(path, page_name + '.js')
if os.path.exists(fpath):
with open(fpath, 'r') as f:
self.script = render_include(f.read())
# css
fpath = os.path.join(path, page_name + '.css')
if os.path.exists(fpath):
with open(fpath, 'r') as f:
self.style = unicode(f.read(), "utf-8")
# html as js template
for fname in os.listdir(path):
if fname.endswith(".html"):
with open(os.path.join(path, fname), 'r') as f:
template = unicode(f.read(), "utf-8")
if "<!-- jinja -->" in template:
context = frappe._dict({})
try:
out = frappe.get_attr("{app}.{module}.page.{page}.{page}.get_context".format(
app = frappe.local.module_app[scrub(self.module)],
module = scrub(self.module),
page = page_name
))(context)
if out:
context = out
except (AttributeError, __HOLE__):
pass
template = frappe.render_template(template, context)
self.script = html_to_js_template(fname, template) + self.script
if frappe.lang != 'en':
from frappe.translate import get_lang_js
self.script += get_lang_js("page", self.name)
for path in get_code_files_via_hooks("page_js", self.name):
js = get_js(path)
if js:
self.script += "\n\n" + js
|
ImportError
|
dataset/ETHPy150Open frappe/frappe/frappe/core/doctype/page/page.py/Page.load_assets
|
215
|
def getDefault(self, key, context=None):
#print "getting default for key", key, self.defaults
# 1) Check on the request
current = self.defaults.get(key, None)
if current is None:
# 2) Check on the session
if context is not None:
sessionDefaults = context.locate(iformless.IFormDefaults)
if sessionDefaults is not self:
current = sessionDefaults.getDefault(key)
if current is not None:
return current
# 3) Ask the Binding instance for the default values
try:
configurable = context.locate(iformless.IConfigurable)
except __HOLE__:
return ''
return configurable.getDefault(context.locate(inevow.IData))
return current
|
KeyError
|
dataset/ETHPy150Open twisted/nevow/formless/formutils.py/FormDefaults.getDefault
|
216
|
def calculatePostURL(context, data):
postLocation = inevow.ICurrentSegments(context)[-1]
if postLocation == '':
postLocation = '.'
try:
configurableKey = context.locate(iformless.IConfigurableKey)
except __HOLE__:
#print "IConfigurableKey was not remembered when calculating full binding name for %s in node %s" % (configurable, context.key)
configurableKey = ''
bindingName = context.key
return "%s/freeform_post!%s!%s" % (postLocation, configurableKey, bindingName)
|
KeyError
|
dataset/ETHPy150Open twisted/nevow/formless/formutils.py/calculatePostURL
|
217
|
def applyHistory(self, axis_object):
string_count = {}
for cp in self.__command_list:
log_string = cp.log_string
try:
count = string_count[log_string]
string_count[log_string] += 1
except __HOLE__:
string_count[log_string] = count = 1
if count < 5:
print log_string
elif count == 5:
print log_string + "\n<<< More of these surpressed.>>> "
getattr(axis_object, cp.name)(*cp.args, **cp.kwargs)
|
KeyError
|
dataset/ETHPy150Open hoytak/lazyrunner/lazyrunner/pmodule/axisproxy.py/AxisProxy.applyHistory
|
218
|
def main():
from sys import argv
if len(argv) < 4:
print "%s <Filter Name> <run|rerun|visualize> <osmdb_file> [<filter args> ...]" % argv[0]
print "Filters:"
for k,v in globals().items():
if type(v) == type and issubclass(v,OSMDBFilter):
print " -- %s" % k
exit()
filter_cls, mode, osmdb_file = argv[1:4]
try:
f = globals()[filter_cls]()
except __HOLE__, e:
raise Exception("Filter not found.")
db = OSMDB(osmdb_file)
if len(argv) > 4:
extra = argv[4:]
else:
extra = []
if mode == 'run':
f.run(db, *extra)
elif mode == 'rerun':
f.rerun(db, *extra)
elif mode == 'visualize':
f.visualize(db, *extra)
else:
raise Exception("Unknown mode.")
|
KeyError
|
dataset/ETHPy150Open bmander/graphserver/pygs/graphserver/ext/osm/osmfilters.py/main
|
219
|
def handle(self, *args, **options): # NoQA
"""
Execute the command.
"""
# Load the settings
self.require_settings(args, options)
# Load your AWS credentials from ~/.aws/credentials
self.load_credentials()
try:
# Tail the available logs
all_logs = self.zappa.fetch_logs(self.lambda_name)
self.print_logs(all_logs)
# Keep polling, and print any new logs.
while True:
all_logs_again = self.zappa.fetch_logs(self.lambda_name)
new_logs = []
for log in all_logs_again:
if log not in all_logs:
new_logs.append(log)
self.print_logs(new_logs)
all_logs = all_logs + new_logs
except __HOLE__:
# Die gracefully
try:
sys.exit(0)
except SystemExit:
os._exit(0)
return
|
KeyboardInterrupt
|
dataset/ETHPy150Open Miserlou/django-zappa/django_zappa/management/commands/tail.py/Command.handle
|
220
|
def main():
try:
signal.signal(signal.SIGTSTP, signal.SIG_IGN) # ignore CTRL+Z
signal.signal(signal.SIGINT, signal_handler) # custom CTRL+C handler
except AttributeError: # OS Does not support some signals, probably windows
pass
if len(sys.argv) == 1:
usage()
# defaults
dnsrecord = 'A'
count = 10
timeout = 5
quiet = False
verbose = False
dnsserver = '8.8.8.8'
dst_port = 53
src_port = 0
src_ip = None
hostname = 'wikipedia.org'
try:
opts, args = getopt.getopt(sys.argv[1:], "qhc:s:t:w:vp:P:S:",
["help", "output=", "count=", "server=", "quiet", "type=", "wait=", "verbose",
"port", "dstport=", "srcip="])
except getopt.GetoptError as err:
# print help information and exit:
print(err) # will print something like "option -a not recognized"
usage()
if args and len(args) == 1:
hostname = args[0]
else:
usage()
for o, a in opts:
if o in ("-h", "--help"):
usage()
elif o in ("-c", "--count"):
count = int(a)
elif o in ("-v", "--verbose"):
verbose = True
elif o in ("-s", "--server"):
dnsserver = a
elif o in ("-p", "--port"):
dst_port = int(a)
elif o in ("-q", "--quiet"):
quiet = True
verbose = False
elif o in ("-w", "--wait"):
timeout = int(a)
elif o in ("-t", "--type"):
dnsrecord = a
elif o in ("-P", "--srcport"):
src_port = int(a)
if src_port < 1024:
print("WARNING: Source ports below 1024 are only available to superuser")
elif o in ("-S", "--srcip"):
src_ip = a
else:
usage()
# check if we have a valid dns server address
try:
ipaddress.ip_address(dnsserver)
except ValueError: # so it is not a valid IPv4 or IPv6 address, so try to resolve host name
try:
dnsserver = socket.getaddrinfo(dnsserver, port=None)[1][4][0]
except __HOLE__:
print('Error: cannot resolve hostname:', dnsserver)
exit(1)
resolver = dns.resolver.Resolver()
resolver.nameservers = [dnsserver]
resolver.timeout = timeout
resolver.lifetime = timeout
resolver.port = dst_port
resolver.retry_servfail = 0
response_time = []
i = 0
print("%s DNS: %s:%d, hostname: %s, rdatatype: %s" % (__PROGNAME__, dnsserver, dst_port, hostname, dnsrecord))
for i in range(count):
if should_stop:
break
try:
stime = time.time()
answers = resolver.query(hostname, dnsrecord, source_port=src_port, source=src_ip)
etime = time.time()
except dns.resolver.NoNameservers as e:
if not quiet:
print("No response to dns request")
if verbose:
print("error:", e)
exit(1)
except dns.resolver.NXDOMAIN as e:
if not quiet:
print("Hostname does not exist")
if verbose:
print("Error:", e)
exit(1)
except dns.resolver.Timeout:
if not quiet:
print("Request timeout")
pass
except dns.resolver.NoAnswer:
if not quiet:
print("No answer")
pass
else:
elapsed = (etime - stime) * 1000 # convert to milliseconds
response_time.append(elapsed)
if not quiet:
print(
"%d bytes from %s: seq=%-3d time=%3.3f ms" % (
len(str(answers.rrset)), dnsserver, i, elapsed))
if verbose:
print(answers.rrset)
r_sent = i + 1
r_received = len(response_time)
r_lost = r_sent - r_received
r_lost_percent = (100 * r_lost) / r_sent
if response_time:
r_min = min(response_time)
r_max = max(response_time)
r_avg = sum(response_time) / r_received
r_stddev = stdev(response_time)
else:
r_min = 0
r_max = 0
r_avg = 0
r_stddev = 0
print('\n--- %s dnsping statistics ---' % dnsserver)
print('%d requests transmitted, %d responses received, %3.0f%% lost' % (r_sent, r_received, r_lost_percent))
print('min=%3.3f ms, avg=%3.3f ms, max=%3.3f ms, stddev=%3.3f ms' % (r_min, r_avg, r_max, r_stddev))
|
OSError
|
dataset/ETHPy150Open farrokhi/dnstools/dnsping.py/main
|
221
|
def get_caller(skip=0, get_dump=False):
# this whole thing fails if we're not in a valid directory
try:
cwd = os.getcwd()
except __HOLE__ as e:
return '~could not get caller (current directory not valid)~'
stack = inspect.stack(3)
if len(inspect.stack()) < 3 + skip:
return 'top'
output = None
last_caller = None
for record in stack[2 + skip:]:
caller = record[3]
frame = record[0]
try:
if 'self' in frame.f_locals:
try:
output = '%s->%s' % (frame.f_locals['self'].name, caller)
except Exception as e:
pass
finally:
if output is None:
output = '%s->%s' % (
frame.f_locals['self'].__class__.__name__, caller)
else:
last_caller = caller
if get_dump:
output = output + "\n" + \
"\t".join(dump(frame.f_locals['self'], 'self'))
except Exception as e:
pass
if output is not None:
return output
if output is None:
return last_caller
|
OSError
|
dataset/ETHPy150Open mono/bockbuild/bockbuild/util/util.py/get_caller
|
222
|
def delete(path):
trace('deleting %s' % path)
if not os.path.isabs(path):
raise BockbuildException('Relative paths are not allowed: %s' % path)
if not os.path.lexists(path):
raise CommandException('Invalid path to rm: %s' % path)
if os.getcwd() == path:
raise BockbuildException(
'Will not delete current directory: %s' % path)
# get the dir out of the way so that we don't have to deal with
# inconsistent state if we fail
if os.path.isfile(path):
os.remove(path)
return
# directory removal
if os.path.islink(path):
os.unlink(path)
return
orig_path = path
unprotect_dir(path, recursive=True)
path = path + '.deleting'
if os.path.exists(path):
delete(path)
shutil.move(orig_path, path)
for x in range(1, 5):
try:
if os.path.isfile(path) or os.path.islink(path):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path, ignore_errors=False)
except __HOLE__ as e:
pass
finally:
if not os.path.exists(path):
break
warn('retrying delete of %s' % path)
# try to sabotage whoever else is writing in the directory...
protect_dir(path, recursive=True)
time.sleep(1)
unprotect_dir(path, recursive=True)
if os.path.exists(path):
error('Deleting failed: %s' % orig_path)
|
OSError
|
dataset/ETHPy150Open mono/bockbuild/bockbuild/util/util.py/delete
|
223
|
def is_running(pid):
try:
kill(pid, 0)
except __HOLE__ as error:
if error.errno == ESRCH:
return False
return True
|
OSError
|
dataset/ETHPy150Open circuits/circuits/tests/app/test_daemon.py/is_running
|
224
|
def test_all_instantiable(self):
"""Test if all the TVTK classes can be instantiated"""
errors = []
for name in self.names:
klass = getattr(vtk, name)
tvtk_name = get_tvtk_name(name)
tvtk_klass = getattr(tvtk, tvtk_name, None)
if hasattr(klass, '__bases__') and tvtk_klass is not None:
try:
klass()
except (__HOLE__, NotImplementedError):
# These classes are abstract and can't/shouldn't
# be instantiated.
pass
else:
try:
tvtk_klass()
except TraitError:
errors.append(traceback.format_exc())
if len(errors) > 0:
message = "Not all classes could be instantiated:\n{0}\n"
raise AssertionError(message.format(''.join(errors)))
|
TypeError
|
dataset/ETHPy150Open enthought/mayavi/tvtk/tests/test_tvtk.py/TestTVTKModule.test_all_instantiable
|
225
|
def latest(name,
rev='HEAD',
target=None,
branch=None,
user=None,
update_head=True,
force_checkout=False,
force_clone=False,
force_fetch=False,
force_reset=False,
submodules=False,
bare=False,
mirror=False,
remote='origin',
fetch_tags=True,
depth=None,
identity=None,
https_user=None,
https_pass=None,
onlyif=False,
unless=False,
**kwargs):
'''
Make sure the repository is cloned to the given directory and is
up-to-date.
name
Address of the remote repository as passed to "git clone"
rev : HEAD
The remote branch, tag, or revision ID to checkout after clone / before
update. If specified, then Salt will also ensure that the tracking
branch is set to ``<remote>/<rev>``, unless ``rev`` refers to a tag or
SHA1, in which case Salt will ensure that the tracking branch is unset.
If ``rev`` is not specified, it will be assumed to be ``HEAD``, and
Salt will not manage the tracking branch at all.
.. versionchanged:: 2015.8.0
If not specified, ``rev`` now defaults to the remote repository's
HEAD.
target
Name of the target directory where repository is about to be cloned
branch
Name of the branch into which to checkout the specified rev. If not
specified, then Salt will not care what branch is being used locally
and will just use whatever branch is currently there.
.. note::
If not specified, this means that the local branch name will not be
changed if the repository is reset to another branch/tag/SHA1.
.. versionadded:: 2015.8.0
user
User under which to run git commands. By default, commands are run by
the user under which the minion is running.
.. versionadded:: 0.17.0
update_head : True
If set to ``False``, then the remote repository will be fetched (if
necessary) to ensure that the commit to which ``rev`` points exists in
the local checkout, but no changes will be made to the local HEAD.
.. versionadded:: 2015.8.3
force : False
.. deprecated:: 2015.8.0
Use ``force_clone`` instead. For earlier Salt versions, ``force``
must be used.
force_checkout : False
When checking out the local branch, the state will fail if there are
unwritten changes. Set this argument to ``True`` to discard unwritten
changes when checking out.
force_clone : False
If the ``target`` directory exists and is not a git repository, then
this state will fail. Set this argument to ``True`` to remove the
contents of the target directory and clone the repo into it.
force_fetch : False
If a fetch needs to be performed, non-fast-forward fetches will cause
this state to fail. Set this argument to ``True`` to force the fetch
even if it is a non-fast-forward update.
.. versionadded:: 2015.8.0
force_reset : False
If the update is not a fast-forward, this state will fail. Set this
argument to ``True`` to force a hard-reset to the remote revision in
these cases.
submodules : False
Update submodules on clone or branch change
bare : False
Set to ``True`` if the repository is to be a bare clone of the remote
repository.
.. note:
Setting this option to ``True`` is incompatible with the ``rev``
argument.
mirror
Set to ``True`` if the repository is to be a mirror of the remote
repository. This implies that ``bare`` set to ``True``, and thus is
incompatible with ``rev``.
remote : origin
Git remote to use. If this state needs to clone the repo, it will clone
it using this value as the initial remote name. If the repository
already exists, and a remote by this name is not present, one will be
added.
remote_name
.. deprecated:: 2015.8.0
Use ``remote`` instead. For earlier Salt versions, ``remote_name``
must be used.
fetch_tags : True
If ``True``, then when a fetch is performed all tags will be fetched,
even those which are not reachable by any branch on the remote.
depth
Defines depth in history when git a clone is needed in order to ensure
latest. E.g. ``depth: 1`` is useful when deploying from a repository
with a long history. Use rev to specify branch. This is not compatible
with tags or revision IDs.
identity
Path to a private key to use for ssh URLs. This can be either a single
string, or a list of strings. For example:
.. code-block:: yaml
# Single key
git@github.com:user/repo.git:
git.latest:
- user: deployer
- identity: /home/deployer/.ssh/id_rsa
# Two keys
git@github.com:user/repo.git:
git.latest:
- user: deployer
- identity:
- /home/deployer/.ssh/id_rsa
- /home/deployer/.ssh/id_rsa_alternate
If multiple keys are specified, they will be tried one-by-one in order
for each git command which needs to authenticate.
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionadded:: 2016.3.0
https_user
HTTP Basic Auth username for HTTPS (only) clones
.. versionadded:: 2015.5.0
https_pass
HTTP Basic Auth password for HTTPS (only) clones
.. versionadded:: 2015.5.0
onlyif
A command to run as a check, run the named command only if the command
passed to the ``onlyif`` option returns true
unless
A command to run as a check, only run the named command if the command
passed to the ``unless`` option returns false
.. note::
Clashing ID declarations can be avoided when including different
branches from the same git repository in the same sls file by using the
``name`` declaration. The example below checks out the ``gh-pages``
and ``gh-pages-prod`` branches from the same repository into separate
directories. The example also sets up the ``ssh_known_hosts`` ssh key
required to perform the git checkout.
.. code-block:: yaml
gitlab.example.com:
ssh_known_hosts:
- present
- user: root
- enc: ecdsa
- fingerprint: 4e:94:b0:54:c1:5b:29:a2:70:0e:e1:a3:51:ee:ee:e3
git-website-staging:
git.latest:
- name: ssh://git@gitlab.example.com:user/website.git
- rev: gh-pages
- target: /usr/share/nginx/staging
- identity: /root/.ssh/website_id_rsa
- require:
- pkg: git
- ssh_known_hosts: gitlab.example.com
git-website-staging:
git.latest:
- name: ssh://git@gitlab.example.com:user/website.git
- rev: gh-pages
- target: /usr/share/nginx/staging
- identity: salt://website/id_rsa
- require:
- pkg: git
- ssh_known_hosts: gitlab.example.com
.. versionadded:: 2016.3.0
git-website-prod:
git.latest:
- name: ssh://git@gitlab.example.com:user/website.git
- rev: gh-pages-prod
- target: /usr/share/nginx/prod
- identity: /root/.ssh/website_id_rsa
- require:
- pkg: git
- ssh_known_hosts: gitlab.example.com
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
kwargs = salt.utils.clean_kwargs(**kwargs)
always_fetch = kwargs.pop('always_fetch', False)
force = kwargs.pop('force', False)
remote_name = kwargs.pop('remote_name', False)
if kwargs:
return _fail(
ret,
salt.utils.invalid_kwargs(kwargs, raise_exc=False)
)
if always_fetch:
salt.utils.warn_until(
'Nitrogen',
'The \'always_fetch\' argument to the git.latest state no longer '
'has any effect, see the 2015.8.0 release notes for details.'
)
if force:
salt.utils.warn_until(
'Nitrogen',
'The \'force\' argument to the git.latest state has been '
'deprecated, please use \'force_clone\' instead.'
)
force_clone = force
if remote_name:
salt.utils.warn_until(
'Nitrogen',
'The \'remote_name\' argument to the git.latest state has been '
'deprecated, please use \'remote\' instead.'
)
remote = remote_name
if not remote:
return _fail(ret, '\'remote\' argument is required')
if not target:
return _fail(ret, '\'target\' argument is required')
if not rev:
return _fail(
ret,
'\'{0}\' is not a valid value for the \'rev\' argument'.format(rev)
)
# Ensure that certain arguments are strings to ensure that comparisons work
if not isinstance(rev, six.string_types):
rev = str(rev)
if target is not None:
if not isinstance(target, six.string_types):
target = str(target)
if not os.path.isabs(target):
return _fail(
ret,
'target \'{0}\' is not an absolute path'.format(target)
)
if branch is not None and not isinstance(branch, six.string_types):
branch = str(branch)
if user is not None and not isinstance(user, six.string_types):
user = str(user)
if remote is not None and not isinstance(remote, six.string_types):
remote = str(remote)
if identity is not None:
if isinstance(identity, six.string_types):
identity = [identity]
elif not isinstance(identity, list):
return _fail(ret, 'identity must be either a list or a string')
for ident_path in identity:
if 'salt://' in ident_path:
try:
ident_path = __salt__['cp.cache_file'](ident_path)
except IOError as exc:
log.error(
'Failed to cache {0}: {1}'.format(ident_path, exc)
)
return _fail(
ret,
'identity \'{0}\' does not exist.'.format(
ident_path
)
)
if not os.path.isabs(ident_path):
return _fail(
ret,
'identity \'{0}\' is not an absolute path'.format(
ident_path
)
)
if https_user is not None and not isinstance(https_user, six.string_types):
https_user = str(https_user)
if https_pass is not None and not isinstance(https_pass, six.string_types):
https_pass = str(https_pass)
if os.path.isfile(target):
return _fail(
ret,
'Target \'{0}\' exists and is a regular file, cannot proceed'
.format(target)
)
try:
desired_fetch_url = salt.utils.url.add_http_basic_auth(
name,
https_user,
https_pass,
https_only=True
)
except ValueError as exc:
return _fail(ret, exc.__str__())
redacted_fetch_url = \
salt.utils.url.redact_http_basic_auth(desired_fetch_url)
if mirror:
bare = True
# Check to make sure rev and mirror/bare are not both in use
if rev != 'HEAD' and bare:
return _fail(ret, ('\'rev\' is not compatible with the \'mirror\' and '
'\'bare\' arguments'))
run_check_cmd_kwargs = {'runas': user}
if 'shell' in __grains__:
run_check_cmd_kwargs['shell'] = __grains__['shell']
# check if git.latest should be applied
cret = mod_run_check(
run_check_cmd_kwargs, onlyif, unless
)
if isinstance(cret, dict):
ret.update(cret)
return ret
refspecs = [
'refs/heads/*:refs/remotes/{0}/*'.format(remote),
'+refs/tags/*:refs/tags/*'
] if fetch_tags else []
log.info('Checking remote revision for {0}'.format(name))
try:
all_remote_refs = __salt__['git.remote_refs'](
name,
heads=False,
tags=False,
user=user,
identity=identity,
https_user=https_user,
https_pass=https_pass,
ignore_retcode=False)
except CommandExecutionError as exc:
return _fail(
ret,
'Failed to check remote refs: {0}'.format(_strip_exc(exc))
)
if bare:
remote_rev = None
remote_rev_type = None
else:
if rev == 'HEAD':
if 'HEAD' in all_remote_refs:
remote_rev = all_remote_refs['HEAD']
# Just go with whatever the upstream currently is
desired_upstream = None
remote_rev_type = 'sha1'
else:
# Empty remote repo
remote_rev = None
remote_rev_type = None
elif 'refs/heads/' + rev in all_remote_refs:
remote_rev = all_remote_refs['refs/heads/' + rev]
desired_upstream = '/'.join((remote, rev))
remote_rev_type = 'branch'
elif 'refs/tags/' + rev + '^{}' in all_remote_refs:
# Annotated tag
remote_rev = all_remote_refs['refs/tags/' + rev + '^{}']
desired_upstream = False
remote_rev_type = 'tag'
elif 'refs/tags/' + rev in all_remote_refs:
# Non-annotated tag
remote_rev = all_remote_refs['refs/tags/' + rev]
desired_upstream = False
remote_rev_type = 'tag'
else:
if len(rev) <= 40 \
and all(x in string.hexdigits for x in rev):
# git ls-remote did not find the rev, and because it's a
# hex string <= 40 chars we're going to assume that the
# desired rev is a SHA1
rev = rev.lower()
remote_rev = rev
desired_upstream = False
remote_rev_type = 'sha1'
else:
remote_rev = None
remote_rev_type = None
# For the comment field of the state return dict, the remote location
# (and short-sha1, if rev is not a sha1) is referenced several times,
# determine it once here and reuse the value below.
if remote_rev_type == 'sha1':
if rev == 'HEAD':
remote_loc = 'remote HEAD (' + remote_rev[:7] + ')'
else:
remote_loc = remote_rev[:7]
elif remote_rev is not None:
remote_loc = '{0} ({1})'.format(
desired_upstream if remote_rev_type == 'branch' else rev,
remote_rev[:7]
)
else:
# Shouldn't happen but log a warning here for future
# troubleshooting purposes in the event we find a corner case.
log.warning(
'Unable to determine remote_loc. rev is %s, remote_rev is '
'%s, remove_rev_type is %s, desired_upstream is %s, and bare '
'is%s set',
rev,
remote_rev,
remote_rev_type,
desired_upstream,
' not' if not bare else ''
)
remote_loc = None
if remote_rev is None and not bare:
if rev != 'HEAD':
# A specific rev is desired, but that rev doesn't exist on the
# remote repo.
return _fail(
ret,
'No revision matching \'{0}\' exists in the remote '
'repository'.format(rev)
)
git_ver = _LooseVersion(__salt__['git.version'](versioninfo=False))
check = 'refs' if bare else '.git'
gitdir = os.path.join(target, check)
comments = []
if os.path.isdir(gitdir) or __salt__['git.is_worktree'](target):
# Target directory is a git repository or git worktree
try:
all_local_branches = __salt__['git.list_branches'](
target, user=user)
all_local_tags = __salt__['git.list_tags'](target, user=user)
local_rev, local_branch = _get_local_rev_and_branch(target, user)
if not bare and remote_rev is None and local_rev is not None:
return _fail(
ret,
'Remote repository is empty, cannot update from a '
'non-empty to an empty repository'
)
# Base rev and branch are the ones from which any reset or merge
# will take place. If the branch is not being specified, the base
# will be the "local" rev and branch, i.e. those we began with
# before this state was run. If a branch is being specified and it
# both exists and is not the one with which we started, then we'll
# be checking that branch out first, and it instead becomes our
# base. The base branch and rev will be used below in comparisons
# to determine what changes to make.
base_rev = local_rev
base_branch = local_branch
if _need_branch_change(branch, local_branch):
if branch not in all_local_branches:
# We're checking out a new branch, so the base_rev and
# remote_rev will be identical.
base_rev = remote_rev
else:
base_branch = branch
# Desired branch exists locally and is not the current
# branch. We'll be performing a checkout to that branch
# eventually, but before we do that we need to find the
# current SHA1.
try:
base_rev = __salt__['git.rev_parse'](
target,
branch + '^{commit}',
user=user,
ignore_retcode=True)
except CommandExecutionError as exc:
return _fail(
ret,
'Unable to get position of local branch \'{0}\': '
'{1}'.format(branch, _strip_exc(exc)),
comments
)
remotes = __salt__['git.remotes'](target,
user=user,
redact_auth=False)
revs_match = _revs_equal(local_rev, remote_rev, remote_rev_type)
if remote_rev_type == 'sha1' \
and base_rev is not None \
and base_rev.startswith(remote_rev):
# Either we're already checked out to the branch we need and it
# is up-to-date, or the branch to which we need to switch is
# on the same SHA1 as the desired remote revision. Either way,
# we know we have the remote rev present already and no fetch
# will be needed.
has_remote_rev = True
else:
has_remote_rev = False
if remote_rev is not None:
try:
__salt__['git.rev_parse'](
target,
remote_rev + '^{commit}',
ignore_retcode=True)
except CommandExecutionError:
# Local checkout doesn't have the remote_rev
pass
else:
# The object might exist enough to get a rev-parse to
# work, while the local ref could have been
# deleted/changed/force updated. Do some further sanity
# checks to determine if we really do have the
# remote_rev.
if remote_rev_type == 'branch':
if remote in remotes:
try:
# Do a rev-parse on <remote>/<rev> to get
# the local SHA1 for it, so we can compare
# it to the remote_rev SHA1.
local_copy = __salt__['git.rev_parse'](
target,
desired_upstream,
user=user,
ignore_retcode=True)
except CommandExecutionError:
pass
else:
# If the SHA1s don't match, then the remote
# branch was force-updated, and we need to
# fetch to update our local copy the ref
# for the remote branch. If they do match,
# then we have the remote_rev and don't
# need to fetch.
if local_copy == remote_rev:
has_remote_rev = True
elif remote_rev_type == 'tag':
if rev in all_local_tags:
try:
local_tag_sha1 = __salt__['git.rev_parse'](
target,
rev + '^{commit}',
user=user,
ignore_retcode=True)
except CommandExecutionError:
# Shouldn't happen if the tag exists
# locally but account for this just in
# case.
local_tag_sha1 = None
if local_tag_sha1 == remote_rev:
has_remote_rev = True
else:
if not force_reset:
# SHA1 of tag on remote repo is
# different than local tag. Unless
# we're doing a hard reset then we
# don't need to proceed as we know that
# the fetch will update the tag and the
# only way to make the state succeed is
# to reset the branch to point at the
# tag's new location.
return _fail(
ret,
'\'{0}\' is a tag, but the remote '
'SHA1 for this tag ({1}) doesn\'t '
'match the local SHA1 ({2}). Set '
'\'force_reset\' to True to force '
'this update.'.format(
rev,
_short_sha(remote_rev),
_short_sha(local_tag_sha1)
)
)
elif remote_rev_type == 'sha1':
has_remote_rev = True
if not has_remote_rev:
# Either the remote rev could not be found with git
# ls-remote (in which case we won't know more until
# fetching) or we're going to be checking out a new branch
# and don't have to worry about fast-forwarding.
fast_forward = None
else:
# Remote rev already present
if (not revs_match and not update_head) \
and (branch is None or branch == local_branch):
ret['comment'] = remote_loc.capitalize() \
if rev == 'HEAD' \
else remote_loc
ret['comment'] += (
' is already present and local HEAD ({0}) does not '
'match, but update_head=False. HEAD has not been '
'updated locally.'.format(local_rev[:7])
)
return ret
if base_rev is None:
# If we're here, the remote_rev exists in the local
# checkout but there is still no HEAD locally. A possible
# reason for this is that an empty repository existed there
# and a remote was added and fetched, but the repository
# was not fast-forwarded. Regardless, going from no HEAD to
# a locally-present rev is considered a fast-forward update.
fast_forward = True
else:
fast_forward = __salt__['git.merge_base'](
target,
refs=[base_rev, remote_rev],
is_ancestor=True,
user=user,
ignore_retcode=True)
if fast_forward is False:
if not force_reset:
return _not_fast_forward(
ret,
base_rev,
remote_rev,
branch,
local_branch,
comments)
merge_action = 'hard-reset'
elif fast_forward is True:
merge_action = 'fast-forwarded'
else:
merge_action = 'updated'
if base_branch is None:
# No local branch, no upstream tracking branch
upstream = None
else:
try:
upstream = __salt__['git.rev_parse'](
target,
base_branch + '@{upstream}',
opts=['--abbrev-ref'],
user=user,
ignore_retcode=True)
except CommandExecutionError:
# There is a local branch but the rev-parse command
# failed, so that means there is no upstream tracking
# branch. This could be because it is just not set, or
# because the branch was checked out to a SHA1 or tag
# instead of a branch. Set upstream to False to make a
# distinction between the case above where there is no
# local_branch (when the local checkout is an empty
# repository).
upstream = False
if remote in remotes:
fetch_url = remotes[remote]['fetch']
else:
log.debug(
'Remote \'{0}\' not found in git checkout at {1}'
.format(remote, target)
)
fetch_url = None
if remote_rev is not None and desired_fetch_url != fetch_url:
if __opts__['test']:
actions = [
'Remote \'{0}\' would be changed from {1} to {2}'
.format(
remote,
salt.utils.url.redact_http_basic_auth(fetch_url),
redacted_fetch_url
)
]
if not has_remote_rev:
actions.append('Remote would be fetched')
if not revs_match:
if update_head:
ret['changes']['revision'] = {
'old': local_rev, 'new': remote_rev
}
if fast_forward is False:
ret['changes']['forced update'] = True
actions.append(
'Repository would be {0} to {1}'.format(
merge_action,
_short_sha(remote_rev)
)
)
if ret['changes']:
return _neutral_test(ret, _format_comments(actions))
else:
if not revs_match and not update_head:
# Repo content would not be modified but the remote
# URL would be modified, so we can't just say that
# the repo is up-to-date, we need to inform the
# user of the actions taken.
ret['comment'] = _format_comments(actions)
return ret
return _uptodate(ret,
target,
_format_comments(actions))
# The fetch_url for the desired remote does not match the
# specified URL (or the remote does not exist), so set the
# remote URL.
__salt__['git.remote_set'](target,
url=name,
remote=remote,
user=user,
https_user=https_user,
https_pass=https_pass)
comments.append(
'Remote \'{0}\' changed from {1} to {2}'.format(
remote,
salt.utils.url.redact_http_basic_auth(fetch_url),
redacted_fetch_url
)
)
if remote_rev is not None:
if __opts__['test']:
actions = []
if not has_remote_rev:
actions.append(
'Remote \'{0}\' would be fetched'.format(remote)
)
if (not revs_match) \
and (update_head or (branch is not None
and branch != local_branch)):
ret['changes']['revision'] = {
'old': local_rev, 'new': remote_rev
}
if _need_branch_change(branch, local_branch):
if branch not in all_local_branches:
actions.append(
'New branch \'{0}\' would be checked '
'out, with {1} as a starting '
'point'.format(branch, remote_loc)
)
if desired_upstream:
actions.append(
'Tracking branch would be set to {0}'
.format(desired_upstream)
)
else:
actions.append(
'Branch \'{0}\' would be checked out '
'and {1} to {2}'.format(
branch,
merge_action,
_short_sha(remote_rev)
)
)
else:
if not revs_match:
if update_head:
if fast_forward is True:
actions.append(
'Repository would be fast-forwarded from '
'{0} to {1}'.format(
_short_sha(local_rev),
_short_sha(remote_rev)
)
)
else:
actions.append(
'Repository would be {0} from {1} to {2}'
.format(
'hard-reset'
if force_reset and has_remote_rev
else 'updated',
_short_sha(local_rev),
_short_sha(remote_rev)
)
)
else:
actions.append(
'Local HEAD ({0}) does not match {1} but '
'update_head=False, HEAD would not be '
'updated locally'.format(
local_rev[:7],
remote_loc
)
)
# Check if upstream needs changing
if not upstream and desired_upstream:
actions.append(
'Tracking branch would be set to {0}'.format(
desired_upstream
)
)
elif upstream and desired_upstream is False:
actions.append(
'Tracking branch would be unset'
)
elif desired_upstream and upstream != desired_upstream:
actions.append(
'Tracking branch would be '
'updated to {0}'.format(desired_upstream)
)
if ret['changes']:
return _neutral_test(ret, _format_comments(actions))
else:
formatted_actions = _format_comments(actions)
if not revs_match \
and not update_head \
and formatted_actions:
ret['comment'] = formatted_actions
return ret
return _uptodate(ret,
target,
_format_comments(actions))
if not upstream and desired_upstream:
upstream_action = (
'Tracking branch was set to {0}'.format(
desired_upstream
)
)
branch_opts = _get_branch_opts(
branch,
local_branch,
all_local_branches,
desired_upstream,
git_ver)
elif upstream and desired_upstream is False:
# If the remote_rev is a tag or SHA1, and there is an
# upstream tracking branch, we will unset it. However, we
# can only do this if the git version is 1.8.0 or newer, as
# the --unset-upstream option was not added until that
# version.
if git_ver >= _LooseVersion('1.8.0'):
upstream_action = 'Tracking branch was unset'
branch_opts = ['--unset-upstream']
else:
branch_opts = None
elif desired_upstream and upstream != desired_upstream:
upstream_action = (
'Tracking branch was updated to {0}'.format(
desired_upstream
)
)
branch_opts = _get_branch_opts(
branch,
local_branch,
all_local_branches,
desired_upstream,
git_ver)
else:
branch_opts = None
if not has_remote_rev:
try:
fetch_changes = __salt__['git.fetch'](
target,
remote=remote,
force=force_fetch,
refspecs=refspecs,
user=user,
identity=identity)
except CommandExecutionError as exc:
return _failed_fetch(ret, exc, comments)
else:
if fetch_changes:
comments.append(
'{0} was fetched, resulting in updated '
'refs'.format(name)
)
try:
__salt__['git.rev_parse'](
target,
remote_rev + '^{commit}',
ignore_retcode=True)
except CommandExecutionError as exc:
return _fail(
ret,
'Fetch did not successfully retrieve rev \'{0}\' '
'from {1}: {2}'.format(rev, name, exc)
)
if (not revs_match and not update_head) \
and (branch is None or branch == local_branch):
# Rev now exists locally (was fetched), and since we're
# not updating HEAD we'll just exit here.
ret['comment'] = remote_loc.capitalize() \
if rev == 'HEAD' \
else remote_loc
ret['comment'] += (
' is already present and local HEAD ({0}) does not '
'match, but update_head=False. HEAD has not been '
'updated locally.'.format(local_rev[:7])
)
return ret
# Now that we've fetched, check again whether or not
# the update is a fast-forward.
if base_rev is None:
fast_forward = True
else:
fast_forward = __salt__['git.merge_base'](
target,
refs=[base_rev, remote_rev],
is_ancestor=True,
user=user)
if fast_forward is False and not force_reset:
return _not_fast_forward(
ret,
base_rev,
remote_rev,
branch,
local_branch,
comments)
if _need_branch_change(branch, local_branch):
local_changes = __salt__['git.status'](target,
user=user)
if local_changes and not force_checkout:
return _fail(
ret,
'Local branch \'{0}\' has uncommitted '
'changes. Set \'force_checkout\' to True to '
'discard them and proceed.'.format(local_branch)
)
# TODO: Maybe re-retrieve all_local_branches to handle
# the corner case where the destination branch was
# added to the local checkout during a fetch that takes
# a long time to complete.
if branch not in all_local_branches:
if rev == 'HEAD':
checkout_rev = remote_rev
else:
checkout_rev = desired_upstream \
if desired_upstream \
else rev
checkout_opts = ['-b', branch]
else:
checkout_rev = branch
checkout_opts = []
__salt__['git.checkout'](target,
checkout_rev,
force=force_checkout,
opts=checkout_opts,
user=user)
if '-b' in checkout_opts:
comments.append(
'New branch \'{0}\' was checked out, with {1} '
'as a starting point'.format(
branch,
remote_loc
)
)
else:
comments.append(
'\'{0}\' was checked out'.format(checkout_rev)
)
if fast_forward is False:
__salt__['git.reset'](
target,
opts=['--hard', remote_rev],
user=user
)
ret['changes']['forced update'] = True
comments.append(
'Repository was hard-reset to {0}'.format(remote_loc)
)
if branch_opts is not None:
__salt__['git.branch'](
target,
opts=branch_opts,
user=user)
comments.append(upstream_action)
# Fast-forward to the desired revision
if fast_forward is True \
and not _revs_equal(base_rev,
remote_rev,
remote_rev_type):
if desired_upstream or rev == 'HEAD':
# Check first to see if we are on a branch before
# trying to merge changes. (The call to
# git.symbolic_ref will only return output if HEAD
# points to a branch.)
if __salt__['git.symbolic_ref'](target,
'HEAD',
opts=['--quiet'],
ignore_retcode=True):
merge_rev = remote_rev if rev == 'HEAD' \
else desired_upstream
if git_ver >= _LooseVersion('1.8.1.6'):
# --ff-only added in version 1.8.1.6. It's not
# 100% necessary, but if we can use it, we'll
# ensure that the merge doesn't go through if
# not a fast-forward. Granted, the logic that
# gets us to this point shouldn't allow us to
# attempt this merge if it's not a
# fast-forward, but it's an extra layer of
# protection.
merge_opts = ['--ff-only']
else:
merge_opts = []
__salt__['git.merge'](
target,
rev=merge_rev,
opts=merge_opts,
user=user
)
comments.append(
'Repository was fast-forwarded to {0}'
.format(remote_loc)
)
else:
return _fail(
ret,
'Unable to fast-forward, HEAD is detached',
comments
)
else:
# Update is a fast forward, but we cannot merge to that
# commit so we'll reset to it.
__salt__['git.reset'](
target,
opts=['--hard',
remote_rev if rev == 'HEAD' else rev],
user=user
)
comments.append(
'Repository was reset to {0} (fast-forward)'
.format(rev)
)
# TODO: Figure out how to add submodule update info to
# test=True return data, and changes dict.
if submodules:
try:
__salt__['git.submodule'](
target,
'update',
opts=['--init', '--recursive'],
user=user,
identity=identity)
except CommandExecutionError as exc:
return _failed_submodule_update(ret, exc, comments)
elif bare:
if __opts__['test']:
msg = (
'Bare repository at {0} would be fetched'
.format(target)
)
if ret['changes']:
return _neutral_test(ret, msg)
else:
return _uptodate(ret, target, msg)
try:
fetch_changes = __salt__['git.fetch'](
target,
remote=remote,
force=force_fetch,
refspecs=refspecs,
user=user,
identity=identity)
except CommandExecutionError as exc:
return _failed_fetch(ret, exc, comments)
else:
comments.append(
'Bare repository at {0} was fetched{1}'.format(
target,
', resulting in updated refs'
if fetch_changes
else ''
)
)
try:
new_rev = __salt__['git.revision'](
cwd=target,
user=user,
ignore_retcode=True)
except CommandExecutionError:
new_rev = None
except Exception as exc:
log.error(
'Unexpected exception in git.latest state',
exc_info=True
)
if isinstance(exc, CommandExecutionError):
msg = _strip_exc(exc)
else:
msg = str(exc)
return _fail(ret, msg, comments)
if not bare and not _revs_equal(new_rev,
remote_rev,
remote_rev_type):
return _fail(ret, 'Failed to update repository', comments)
if local_rev != new_rev:
log.info(
'Repository {0} updated: {1} => {2}'.format(
target, local_rev, new_rev)
)
ret['comment'] = _format_comments(comments)
ret['changes']['revision'] = {'old': local_rev, 'new': new_rev}
else:
return _uptodate(ret, target, _format_comments(comments))
else:
if os.path.isdir(target):
if force_clone:
# Clone is required, and target directory exists, but the
# ``force`` option is enabled, so we need to clear out its
# contents to proceed.
if __opts__['test']:
ret['changes']['forced clone'] = True
ret['changes']['new'] = name + ' => ' + target
return _neutral_test(
ret,
'Target directory {0} exists. Since force_clone=True, '
'the contents of {0} would be deleted, and {1} would '
'be cloned into this directory.'.format(target, name)
)
log.debug(
'Removing contents of {0} to clone repository {1} in its '
'place (force_clone=True set in git.latest state)'
.format(target, name)
)
try:
if os.path.islink(target):
os.unlink(target)
else:
salt.utils.rm_rf(target)
except __HOLE__ as exc:
return _fail(
ret,
'Unable to remove {0}: {1}'.format(target, exc),
comments
)
else:
ret['changes']['forced clone'] = True
# Clone is required, but target dir exists and is non-empty. We
# can't proceed.
elif os.listdir(target):
return _fail(
ret,
'Target \'{0}\' exists, is non-empty and is not a git '
'repository. Set the \'force_clone\' option to True to '
'remove this directory\'s contents and proceed with '
'cloning the remote repository'.format(target)
)
log.debug(
'Target {0} is not found, \'git clone\' is required'.format(target)
)
if __opts__['test']:
ret['changes']['new'] = name + ' => ' + target
return _neutral_test(
ret,
'Repository {0} would be cloned to {1}'.format(
name, target
)
)
try:
clone_opts = ['--mirror'] if mirror else ['--bare'] if bare else []
if remote != 'origin':
clone_opts.extend(['--origin', remote])
if depth is not None:
clone_opts.extend(['--depth', str(depth)])
# We're cloning a fresh repo, there is no local branch or revision
local_branch = local_rev = None
try:
__salt__['git.clone'](target,
name,
user=user,
opts=clone_opts,
identity=identity,
https_user=https_user,
https_pass=https_pass)
except CommandExecutionError as exc:
msg = 'Clone failed: {0}'.format(_strip_exc(exc))
return _fail(ret, msg, comments)
ret['changes']['new'] = name + ' => ' + target
comments.append(
'{0} cloned to {1}{2}'.format(
name,
target,
' as mirror' if mirror
else ' as bare repository' if bare
else ''
)
)
if not bare:
if not remote_rev:
if rev != 'HEAD':
# No HEAD means the remote repo is empty, which means
# our new clone will also be empty. This state has
# failed, since a rev was specified but no matching rev
# exists on the remote host.
msg = (
'{{0}} was cloned but is empty, so {0}/{1} '
'cannot be checked out'.format(remote, rev)
)
log.error(msg.format(name))
return _fail(ret, msg.format('Repository'), comments)
else:
if remote_rev_type == 'tag' \
and rev not in __salt__['git.list_tags'](
target, user=user):
return _fail(
ret,
'Revision \'{0}\' does not exist in clone'
.format(rev),
comments
)
if branch is not None:
if branch not in \
__salt__['git.list_branches'](target,
user=user):
if rev == 'HEAD':
checkout_rev = remote_rev
else:
checkout_rev = desired_upstream \
if desired_upstream \
else rev
__salt__['git.checkout'](target,
checkout_rev,
opts=['-b', branch],
user=user)
comments.append(
'Branch \'{0}\' checked out, with {1} '
'as a starting point'.format(
branch,
remote_loc
)
)
local_rev, local_branch = \
_get_local_rev_and_branch(target, user)
if not _revs_equal(local_rev, remote_rev, remote_rev_type):
__salt__['git.reset'](
target,
opts=['--hard', remote_rev],
user=user
)
comments.append(
'Repository was reset to {0}'.format(remote_loc)
)
try:
upstream = __salt__['git.rev_parse'](
target,
local_branch + '@{upstream}',
opts=['--abbrev-ref'],
user=user,
ignore_retcode=True)
except CommandExecutionError:
upstream = False
if not upstream and desired_upstream:
upstream_action = (
'Tracking branch was set to {0}'.format(
desired_upstream
)
)
branch_opts = _get_branch_opts(
branch,
local_branch,
__salt__['git.list_branches'](target, user=user),
desired_upstream,
git_ver)
elif upstream and desired_upstream is False:
# If the remote_rev is a tag or SHA1, and there is an
# upstream tracking branch, we will unset it. However,
# we can only do this if the git version is 1.8.0 or
# newer, as the --unset-upstream option was not added
# until that version.
if git_ver >= _LooseVersion('1.8.0'):
upstream_action = 'Tracking branch was unset'
branch_opts = ['--unset-upstream']
else:
branch_opts = None
elif desired_upstream and upstream != desired_upstream:
upstream_action = (
'Tracking branch was updated to {0}'.format(
desired_upstream
)
)
branch_opts = _get_branch_opts(
branch,
local_branch,
__salt__['git.list_branches'](target, user=user),
desired_upstream,
git_ver)
else:
branch_opts = None
if branch_opts is not None:
__salt__['git.branch'](
target,
opts=branch_opts,
user=user)
comments.append(upstream_action)
if submodules and remote_rev:
try:
__salt__['git.submodule'](target,
'update',
opts=['--init', '--recursive'],
user=user,
identity=identity)
except CommandExecutionError as exc:
return _failed_submodule_update(ret, exc, comments)
try:
new_rev = __salt__['git.revision'](
cwd=target,
user=user,
ignore_retcode=True)
except CommandExecutionError:
new_rev = None
except Exception as exc:
log.error(
'Unexpected exception in git.latest state',
exc_info=True
)
if isinstance(exc, CommandExecutionError):
msg = _strip_exc(exc)
else:
msg = str(exc)
return _fail(ret, msg, comments)
msg = _format_comments(comments)
log.info(msg)
ret['comment'] = msg
if new_rev is not None:
ret['changes']['revision'] = {'old': None, 'new': new_rev}
return ret
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/states/git.py/latest
|
226
|
def present(name,
force=False,
bare=True,
template=None,
separate_git_dir=None,
shared=None,
user=None):
'''
Ensure that a repository exists in the given directory
.. warning::
If the minion has Git 2.5 or later installed, ``name`` points to a
worktree_, and ``force`` is set to ``True``, then the worktree will be
deleted. This has been corrected in Salt 2015.8.0.
name
Path to the directory
.. versionchanged:: 2015.8.0
This path must now be absolute
force : False
If ``True``, and if ``name`` points to an existing directory which does
not contain a git repository, then the contents of that directory will
be recursively removed and a new repository will be initialized in its
place.
bare : True
If ``True``, and a repository must be initialized, then the repository
will be a bare repository.
.. note::
This differs from the default behavior of :py:func:`git.init
<salt.modules.git.init>`, make sure to set this value to ``False``
if a bare repo is not desired.
template
If a new repository is initialized, this argument will specify an
alternate `template directory`_
.. versionadded:: 2015.8.0
separate_git_dir
If a new repository is initialized, this argument will specify an
alternate ``$GIT_DIR``
.. versionadded:: 2015.8.0
shared
Set sharing permissions on git repo. See `git-init(1)`_ for more
details.
.. versionadded:: 2015.5.0
user
User under which to run git commands. By default, commands are run by
the user under which the minion is running.
.. versionadded:: 0.17.0
.. _`git-init(1)`: http://git-scm.com/docs/git-init
.. _`worktree`: http://git-scm.com/docs/git-worktree
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
# If the named directory is a git repo return True
if os.path.isdir(name):
if bare and os.path.isfile(os.path.join(name, 'HEAD')):
return ret
elif not bare and \
(os.path.isdir(os.path.join(name, '.git')) or
__salt__['git.is_worktree'](name)):
return ret
# Directory exists and is not a git repo, if force is set destroy the
# directory and recreate, otherwise throw an error
elif force:
# Directory exists, and the ``force`` option is enabled, so we need
# to clear out its contents to proceed.
if __opts__['test']:
ret['changes']['new'] = name
ret['changes']['forced init'] = True
return _neutral_test(
ret,
'Target directory {0} exists. Since force=True, the '
'contents of {0} would be deleted, and a {1}repository '
'would be initialized in its place.'
.format(name, 'bare ' if bare else '')
)
log.debug(
'Removing contents of {0} to initialize {1}repository in its '
'place (force=True set in git.present state)'
.format(name, 'bare ' if bare else '')
)
try:
if os.path.islink(name):
os.unlink(name)
else:
salt.utils.rm_rf(name)
except __HOLE__ as exc:
return _fail(
ret,
'Unable to remove {0}: {1}'.format(name, exc)
)
else:
ret['changes']['forced init'] = True
elif os.listdir(name):
return _fail(
ret,
'Target \'{0}\' exists, is non-empty, and is not a git '
'repository. Set the \'force\' option to True to remove '
'this directory\'s contents and proceed with initializing a '
'repository'.format(name)
)
# Run test is set
if __opts__['test']:
ret['changes']['new'] = name
return _neutral_test(
ret,
'New {0}repository would be created'.format(
'bare ' if bare else ''
)
)
__salt__['git.init'](cwd=name,
bare=bare,
template=template,
separate_git_dir=separate_git_dir,
shared=shared,
user=user)
actions = [
'Initialized {0}repository in {1}'.format(
'bare ' if bare else '',
name
)
]
if template:
actions.append('Template directory set to {0}'.format(template))
if separate_git_dir:
actions.append('Gitdir set to {0}'.format(separate_git_dir))
message = '. '.join(actions)
if len(actions) > 1:
message += '.'
log.info(message)
ret['changes']['new'] = name
ret['comment'] = message
return ret
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/states/git.py/present
|
227
|
def detached(name,
ref,
target=None,
remote='origin',
user=None,
force_clone=False,
force_checkout=False,
fetch_remote=True,
hard_reset=False,
submodules=False,
identity=None,
https_user=None,
https_pass=None,
onlyif=False,
unless=False,
**kwargs):
'''
.. versionadded:: 2016.3.0
Make sure a repository is cloned to the given target directory and is
a detached HEAD checkout of the commit ID resolved from ``ref``.
name
Address of the remote repository.
ref
The branch, tag, or commit ID to checkout after clone.
If a branch or tag is specified it will be resolved to a commit ID
and checked out.
target
Name of the target directory where repository is about to be cloned.
remote : origin
Git remote to use. If this state needs to clone the repo, it will clone
it using this value as the initial remote name. If the repository
already exists, and a remote by this name is not present, one will be
added.
user
User under which to run git commands. By default, commands are run by
the user under which the minion is running.
force_clone : False
If the ``target`` directory exists and is not a git repository, then
this state will fail. Set this argument to ``True`` to remove the
contents of the target directory and clone the repo into it.
force_checkout : False
When checking out the revision ID, the state will fail if there are
unwritten changes. Set this argument to ``True`` to discard unwritten
changes when checking out.
fetch_remote : True
If ``False`` a fetch will not be performed and only local refs
will be reachable.
hard_reset : False
If ``True`` a hard reset will be performed before the checkout and any
uncommitted modifications to the working directory will be discarded.
Untracked files will remain in place.
.. note::
Changes resulting from a hard reset will not trigger requisites.
submodules : False
Update submodules
identity
A path on the minion server to a private key to use over SSH
Key can be specified as a SaltStack file server URL
eg. salt://location/identity_file
https_user
HTTP Basic Auth username for HTTPS (only) clones
https_pass
HTTP Basic Auth password for HTTPS (only) clones
onlyif
A command to run as a check, run the named command only if the command
passed to the ``onlyif`` option returns true
unless
A command to run as a check, only run the named command if the command
passed to the ``unless`` option returns false
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
kwargs = salt.utils.clean_kwargs(**kwargs)
if kwargs:
return _fail(
ret,
salt.utils.invalid_kwargs(kwargs, raise_exc=False)
)
if not ref:
return _fail(
ret,
'\'{0}\' is not a valid value for the \'ref\' argument'.format(ref)
)
if not target:
return _fail(
ret,
'\'{0}\' is not a valid value for the \'target\' argument'.format(ref)
)
# Ensure that certain arguments are strings to ensure that comparisons work
if not isinstance(ref, six.string_types):
ref = str(ref)
if target is not None:
if not isinstance(target, six.string_types):
target = str(target)
if not os.path.isabs(target):
return _fail(
ret,
'Target \'{0}\' is not an absolute path'.format(target)
)
if user is not None and not isinstance(user, six.string_types):
user = str(user)
if remote is not None and not isinstance(remote, six.string_types):
remote = str(remote)
if identity is not None:
if isinstance(identity, six.string_types):
identity = [identity]
elif not isinstance(identity, list):
return _fail(ret, 'Identity must be either a list or a string')
for ident_path in identity:
if 'salt://' in ident_path:
try:
ident_path = __salt__['cp.cache_file'](ident_path)
except IOError as exc:
log.error(
'Failed to cache {0}: {1}'.format(ident_path, exc)
)
return _fail(
ret,
'Identity \'{0}\' does not exist.'.format(
ident_path
)
)
if not os.path.isabs(ident_path):
return _fail(
ret,
'Identity \'{0}\' is not an absolute path'.format(
ident_path
)
)
if https_user is not None and not isinstance(https_user, six.string_types):
https_user = str(https_user)
if https_pass is not None and not isinstance(https_pass, six.string_types):
https_pass = str(https_pass)
if os.path.isfile(target):
return _fail(
ret,
'Target \'{0}\' exists and is a regular file, cannot proceed'
.format(target)
)
try:
desired_fetch_url = salt.utils.url.add_http_basic_auth(
name,
https_user,
https_pass,
https_only=True
)
except __HOLE__ as exc:
return _fail(ret, exc.__str__())
redacted_fetch_url = salt.utils.url.redact_http_basic_auth(desired_fetch_url)
# Check if onlyif or unless conditions match
run_check_cmd_kwargs = {'runas': user}
if 'shell' in __grains__:
run_check_cmd_kwargs['shell'] = __grains__['shell']
cret = mod_run_check(
run_check_cmd_kwargs, onlyif, unless
)
if isinstance(cret, dict):
ret.update(cret)
return ret
# Determine if supplied ref is a hash
remote_ref_type = 'ref'
if len(ref) <= 40 \
and all(x in string.hexdigits for x in ref):
ref = ref.lower()
remote_ref_type = 'hash'
comments = []
hash_exists_locally = False
local_commit_id = None
gitdir = os.path.join(target, '.git')
if os.path.isdir(gitdir) or __salt__['git.is_worktree'](target):
# Target directory is a git repository or git worktree
local_commit_id = _get_local_rev_and_branch(target, user)[0]
if remote_ref_type is 'hash' and __salt__['git.describe'](ref):
# The ref is a hash and it exists locally so skip to checkout
hash_exists_locally = True
else:
# Check that remote is present and set to correct url
remotes = __salt__['git.remotes'](target,
user=user,
redact_auth=False)
if remote in remotes and name in remotes[remote]['fetch']:
pass
else:
# The fetch_url for the desired remote does not match the
# specified URL (or the remote does not exist), so set the
# remote URL.
current_fetch_url = None
if remote in remotes:
current_fetch_url = remotes[remote]['fetch']
if __opts__['test']:
return _neutral_test(
ret,
'Remote {0} would be set to {1}'.format(
remote, name
)
)
__salt__['git.remote_set'](target,
url=name,
remote=remote,
user=user,
https_user=https_user,
https_pass=https_pass)
comments.append(
'Remote {0} updated from \'{1}\' to \'{2}\''.format(
remote,
str(current_fetch_url),
name
)
)
else:
# Clone repository
if os.path.isdir(target):
if force_clone:
# Clone is required, and target directory exists, but the
# ``force`` option is enabled, so we need to clear out its
# contents to proceed.
if __opts__['test']:
return _neutral_test(
ret,
'Target directory {0} exists. Since force_clone=True, '
'the contents of {0} would be deleted, and {1} would '
'be cloned into this directory.'.format(target, name)
)
log.debug(
'Removing contents of {0} to clone repository {1} in its '
'place (force_clone=True set in git.detached state)'
.format(target, name)
)
try:
if os.path.islink(target):
os.unlink(target)
else:
salt.utils.rm_rf(target)
except OSError as exc:
return _fail(
ret,
'Unable to remove {0}: {1}'.format(target, exc),
comments
)
else:
ret['changes']['forced clone'] = True
elif os.listdir(target):
# Clone is required, but target dir exists and is non-empty. We
# can't proceed.
return _fail(
ret,
'Target \'{0}\' exists, is non-empty and is not a git '
'repository. Set the \'force_clone\' option to True to '
'remove this directory\'s contents and proceed with '
'cloning the remote repository'.format(target)
)
log.debug(
'Target {0} is not found, \'git clone\' is required'.format(target)
)
if __opts__['test']:
return _neutral_test(
ret,
'Repository {0} would be cloned to {1}'.format(
name, target
)
)
try:
clone_opts = ['--no-checkout']
if remote != 'origin':
clone_opts.extend(['--origin', remote])
__salt__['git.clone'](target,
name,
user=user,
opts=clone_opts,
identity=identity,
https_user=https_user,
https_pass=https_pass)
comments.append(
'{0} cloned to {1}'.format(
name,
target
)
)
except Exception as exc:
log.error(
'Unexpected exception in git.detached state',
exc_info=True
)
if isinstance(exc, CommandExecutionError):
msg = _strip_exc(exc)
else:
msg = str(exc)
return _fail(ret, msg, comments)
# Repository exists and is ready for fetch/checkout
refspecs = [
'refs/heads/*:refs/remotes/{0}/*'.format(remote),
'+refs/tags/*:refs/tags/*'
]
if hash_exists_locally or fetch_remote is False:
pass
else:
# Fetch refs from remote
if __opts__['test']:
return _neutral_test(
ret,
'Repository remote {0} would be fetched'.format(
remote
)
)
try:
fetch_changes = __salt__['git.fetch'](
target,
remote=remote,
force=True,
refspecs=refspecs,
user=user,
identity=identity)
except CommandExecutionError as exc:
msg = 'Fetch failed'
msg += ':\n\n' + str(exc)
return _fail(ret, msg, comments)
else:
if fetch_changes:
comments.append(
'Remote {0} was fetched, resulting in updated '
'refs'.format(remote)
)
#get refs and checkout
checkout_commit_id = ''
if remote_ref_type is 'hash':
if __salt__['git.describe'](ref):
checkout_commit_id = ref
else:
return _fail(
ret,
'Ref does not exist: {0}'.format(ref)
)
else:
try:
all_remote_refs = __salt__['git.remote_refs'](
target,
user=user,
identity=identity,
https_user=https_user,
https_pass=https_pass,
ignore_retcode=False)
if 'refs/remotes/'+remote+'/'+ref in all_remote_refs:
checkout_commit_id = all_remote_refs['refs/remotes/'+remote+'/'+ref]
elif 'refs/tags/'+ref in all_remote_refs:
checkout_commit_id = all_remote_refs['refs/tags/'+ref]
else:
return _fail(
ret,
'Ref {0} does not exist'.format(ref)
)
except CommandExecutionError as exc:
return _fail(
ret,
'Failed to list refs for {0}: {1}'.format(remote, _strip_exc(exc))
)
if hard_reset:
if __opts__['test']:
return _neutral_test(
ret,
'Hard reset to HEAD would be performed on {0}'.format(
target
)
)
__salt__['git.reset'](
target,
opts=['--hard', 'HEAD'],
user=user
)
comments.append(
'Repository was reset to HEAD before checking out ref'
)
# TODO: implement clean function for git module and add clean flag
if checkout_commit_id == local_commit_id:
new_rev = None
else:
if __opts__['test']:
ret['changes']['HEAD'] = {'old': local_commit_id, 'new': checkout_commit_id}
return _neutral_test(
ret,
'Commit ID {0} would be checked out at {1}'.format(
checkout_commit_id,
target
)
)
__salt__['git.checkout'](target,
checkout_commit_id,
force=force_checkout,
user=user)
comments.append(
'Commit ID {0} was checked out at {1}'.format(
checkout_commit_id,
target
)
)
try:
new_rev = __salt__['git.revision'](
cwd=target,
user=user,
ignore_retcode=True)
except CommandExecutionError:
new_rev = None
if submodules:
__salt__['git.submodule'](target,
'update',
opts=['--init', '--recursive'],
user=user,
identity=identity)
comments.append(
'Submodules were updated'
)
if new_rev is not None:
ret['changes']['HEAD'] = {'old': local_commit_id, 'new': new_rev}
else:
comments.append("Already checked out at correct revision")
msg = _format_comments(comments)
log.info(msg)
ret['comment'] = msg
return ret
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/states/git.py/detached
|
228
|
def config_set(name,
value=None,
multivar=None,
repo=None,
user=None,
**kwargs):
'''
.. versionadded:: 2014.7.0
.. versionchanged:: 2015.8.0
Renamed from ``git.config`` to ``git.config_set``. For earlier
versions, use ``git.config``.
Ensure that a config value is set to the desired value(s)
name
Name of the git config value to set
value
Set a single value for the config item
multivar
Set multiple values for the config item
.. note::
The order matters here, if the same parameters are set but in a
different order, they will be removed and replaced in the order
specified.
.. versionadded:: 2015.8.0
repo
Location of the git repository for which the config value should be
set. Required unless ``global`` is set to ``True``.
user
Optional name of a user as whom `git config` will be run
global : False
If ``True``, this will set a global git config option
.. versionchanged:: 2015.8.0
Option renamed from ``is_global`` to ``global``. For earlier
versions, use ``is_global``.
**Local Config Example:**
.. code-block:: yaml
# Single value
mylocalrepo:
git.config_set:
- name: user.email
- value: foo@bar.net
- repo: /path/to/repo
# Multiple values
mylocalrepo:
git.config_set:
- name: mysection.myattribute
- multivar:
- foo
- bar
- baz
- repo: /path/to/repo
**Global Config Example (User ``foo``):**
.. code-block:: yaml
mylocalrepo:
git.config_set:
- name: user.name
- value: Foo Bar
- user: foo
- global: True
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
if value is not None and multivar is not None:
return _fail(
ret,
'Only one of \'value\' and \'multivar\' is permitted'
)
# Sanitize kwargs and make sure that no invalid ones were passed. This
# allows us to accept 'global' as an argument to this function without
# shadowing global(), while also not allowing unwanted arguments to be
# passed.
kwargs = salt.utils.clean_kwargs(**kwargs)
global_ = kwargs.pop('global', False)
is_global = kwargs.pop('is_global', False)
if kwargs:
return _fail(
ret,
salt.utils.invalid_kwargs(kwargs, raise_exc=False)
)
if is_global:
salt.utils.warn_until(
'Nitrogen',
'The \'is_global\' argument to the git.config_set state has been '
'deprecated, please use \'global\' instead.'
)
global_ = is_global
if not global_ and not repo:
return _fail(
ret,
'Non-global config options require the \'repo\' argument to be '
'set'
)
if not isinstance(name, six.string_types):
name = str(name)
if value is not None:
if not isinstance(value, six.string_types):
value = str(value)
value_comment = '\'' + value + '\''
desired = [value]
if multivar is not None:
if not isinstance(multivar, list):
try:
multivar = multivar.split(',')
except __HOLE__:
multivar = str(multivar).split(',')
else:
new_multivar = []
for item in multivar:
if isinstance(item, six.string_types):
new_multivar.append(item)
else:
new_multivar.append(str(item))
multivar = new_multivar
value_comment = multivar
desired = multivar
# Get current value
pre = __salt__['git.config_get'](
cwd=repo,
key=name,
user=user,
ignore_retcode=True,
**{'all': True, 'global': global_}
)
if desired == pre:
ret['comment'] = '{0}\'{1}\' is already set to {2}'.format(
'Global key ' if global_ else '',
name,
value_comment
)
return ret
if __opts__['test']:
ret['changes'] = {'old': pre, 'new': desired}
msg = '{0}\'{1}\' would be {2} {3}'.format(
'Global key ' if global_ else '',
name,
'added as' if pre is None else 'set to',
value_comment
)
return _neutral_test(ret, msg)
try:
# Set/update config value
post = __salt__['git.config_set'](
cwd=repo,
key=name,
value=value,
multivar=multivar,
user=user,
**{'global': global_}
)
except CommandExecutionError as exc:
return _fail(
ret,
'Failed to set {0}\'{1}\' to {2}: {3}'.format(
'global key ' if global_ else '',
name,
value_comment,
_strip_exc(exc)
)
)
if pre != post:
ret['changes'][name] = {'old': pre, 'new': post}
if post != desired:
return _fail(
ret,
'Failed to set {0}\'{1}\' to {2}'.format(
'global key ' if global_ else '',
name,
value_comment
)
)
ret['comment'] = '{0}\'{1}\' was {2} {3}'.format(
'Global key ' if global_ else '',
name,
'added as' if pre is None else 'set to',
value_comment
)
return ret
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/states/git.py/config_set
|
229
|
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
By default this requires `self.queryset` and a `pk` or `slug` argument
in the URLconf, but subclasses can override this to return any object.
"""
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
# Next, try looking up by primary key.
pk = self.kwargs.get(self.pk_url_kwarg, None)
slug = self.kwargs.get(self.slug_url_kwarg, None)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
elif slug is not None:
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
# If none of those are defined, it's an error.
else:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except __HOLE__:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
|
ObjectDoesNotExist
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/views/generic/detail.py/SingleObjectMixin.get_object
|
230
|
def _get_image_from_file(dir_path, image_file):
"""
Get an instance of PIL.Image from the given file.
@param {String} dir_path - The directory containing the image file
@param {String} image_file - The filename of the image file within dir_path
@return {PIL.Image} An instance of the image file as a PIL Image, or None
if the functionality is not available. This could be because PIL is not
present, or because it can't process the given file type.
"""
# Save ourselves the effort if PIL is not present, and return None now
if not PIL_ENABLED:
return None
# Put together full path
path = os.path.join(dir_path, image_file)
# Try to read the image
img = None
try:
img = Image.open(path)
except __HOLE__ as exptn:
print('Error loading image file %s: %s' % (path, exptn))
# Return image or None
return img
|
IOError
|
dataset/ETHPy150Open unwitting/imageme/imageme.py/_get_image_from_file
|
231
|
def _get_src_from_image(img, fallback_image_file):
"""
Get base-64 encoded data as a string for the given image. Fallback to return
fallback_image_file if cannot get the image data or img is None.
@param {Image} img - The PIL Image to get src data for
@param {String} fallback_image_file - The filename of the image file,
to be used when image data capture fails
@return {String} The base-64 encoded image data string, or path to the file
itself if not supported.
"""
# If the image is None, then we can't process, so we should return the
# path to the file itself
if img is None:
return fallback_image_file
# Target format should be the same as the original image format, unless it's
# a TIF/TIFF, which can't be displayed by most browsers; we convert these
# to jpeg
target_format = img.format
if target_format.lower() in ['tif', 'tiff']:
target_format = 'JPEG'
# If we have an actual Image, great - put together the base64 image string
try:
bytesio = io.BytesIO()
img.save(bytesio, target_format)
byte_value = bytesio.getvalue()
b64 = base64.b64encode(byte_value)
return 'data:image/%s;base64,%s' % (target_format.lower(), b64)
except __HOLE__ as exptn:
print('IOError while saving image bytes: %s' % exptn)
return fallback_image_file
|
IOError
|
dataset/ETHPy150Open unwitting/imageme/imageme.py/_get_src_from_image
|
232
|
def _get_thumbnail_image_from_file(dir_path, image_file):
"""
Get a PIL.Image from the given image file which has been scaled down to
THUMBNAIL_WIDTH wide.
@param {String} dir_path - The directory containing the image file
@param {String} image_file - The filename of the image file within dir_path
@return {PIL.Image} An instance of the thumbnail as a PIL Image, or None
if the functionality is not available. See _get_image_from_file for
details.
"""
# Get image
img = _get_image_from_file(dir_path, image_file)
# If it's not supported, exit now
if img is None:
return None
if img.format.lower() == 'gif':
return None
# Get image dimensions
img_width, img_height = img.size
# We need to perform a resize - first, work out the scale ratio to take the
# image width to THUMBNAIL_WIDTH (THUMBNAIL_WIDTH:img_width ratio)
scale_ratio = THUMBNAIL_WIDTH / float(img_width)
# Work out target image height based on the scale ratio
target_height = int(scale_ratio * img_height)
# Perform the resize
try:
img.thumbnail((THUMBNAIL_WIDTH, target_height), resample=RESAMPLE)
except __HOLE__ as exptn:
print('WARNING: IOError when thumbnailing %s/%s: %s' % (
dir_path, image_file, exptn
))
return None
# Return the resized image
return img
|
IOError
|
dataset/ETHPy150Open unwitting/imageme/imageme.py/_get_thumbnail_image_from_file
|
233
|
def _run_server():
"""
Run the image server. This is blocking. Will handle user KeyboardInterrupt
and other exceptions appropriately and return control once the server is
stopped.
@return {None}
"""
# Get the port to run on
port = _get_server_port()
# Configure allow_reuse_address to make re-runs of the script less painful -
# if this is not True then waiting for the address to be freed after the
# last run can block a subsequent run
SocketServer.TCPServer.allow_reuse_address = True
# Create the server instance
server = SocketServer.TCPServer(
('', port),
SimpleHTTPServer.SimpleHTTPRequestHandler
)
# Print out before actually running the server (cheeky / optimistic, however
# you want to look at it)
print('Your images are at http://127.0.0.1:%d/%s' % (
port,
INDEX_FILE_NAME
))
# Try to run the server
try:
# Run it - this call blocks until the server is killed
server.serve_forever()
except __HOLE__:
# This is the expected way of the server being killed, since imageMe is
# intended for ad-hoc running from command line
print('User interrupted, stopping')
except Exception as exptn:
# Catch everything else - this will handle shutdowns via other signals
# and faults actually starting the server in the first place
print(exptn)
print('Unhandled exception in server, stopping')
|
KeyboardInterrupt
|
dataset/ETHPy150Open unwitting/imageme/imageme.py/_run_server
|
234
|
@classmethod
def unpack (cls, data, negotiated):
try:
if cls.cached:
if data == cls.previous:
return cls.cached
# # This code may mess with the cached data
# elif cls.previous and data.startswith(cls.previous):
# attributes = Attributes()
# for key in cls.cached:
# attributes[key] = cls.cached[key]
# attributes.parse(data[len(cls.previous):],negotiated)
else:
attributes = cls().parse(data,negotiated)
else:
attributes = cls().parse(data,negotiated)
if Attribute.CODE.AS_PATH in attributes and Attribute.CODE.AS4_PATH in attributes:
attributes.merge_attributes()
if Attribute.CODE.MP_REACH_NLRI not in attributes and Attribute.CODE.MP_UNREACH_NLRI not in attributes:
cls.previous = data
cls.cached = attributes
else:
cls.previous = ''
cls.cached = None
return attributes
except __HOLE__:
raise Notify(3,2,data)
|
IndexError
|
dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/bgp/message/update/attribute/attributes.py/Attributes.unpack
|
235
|
def sameValuesAs (self, other):
# we sort based on packed values since the items do not
# necessarily implement __cmp__
def sorter (x, y):
return cmp(x.pack(), y.pack())
try:
for key in set(self.iterkeys()).union(set(other.iterkeys())):
if (key == Attribute.CODE.MP_REACH_NLRI or key == Attribute.CODE.MP_UNREACH_NLRI):
continue
sval = self[key]
oval = other[key]
# In the case where the attribute is Communities or
# extended communities, we want to compare values independently of their order
if isinstance(sval, Communities):
if not isinstance(oval, Communities):
return False
sval = sorted(sval,sorter)
oval = sorted(oval,sorter)
if cmp(sval,oval) != 0:
return False
return True
except __HOLE__:
return False
|
KeyError
|
dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/bgp/message/update/attribute/attributes.py/Attributes.sameValuesAs
|
236
|
def count_tied_groups(x, use_missing=False):
"""
Counts the number of tied values.
Parameters
----------
x : sequence
Sequence of data on which to counts the ties
use_missing : bool, optional
Whether to consider missing values as tied.
Returns
-------
count_tied_groups : dict
Returns a dictionary (nb of ties: nb of groups).
Examples
--------
>>> from scipy.stats import mstats
>>> z = [0, 0, 0, 2, 2, 2, 3, 3, 4, 5, 6]
>>> mstats.count_tied_groups(z)
{2: 1, 3: 2}
In the above example, the ties were 0 (3x), 2 (3x) and 3 (2x).
>>> z = np.ma.array([0, 0, 1, 2, 2, 2, 3, 3, 4, 5, 6])
>>> mstats.count_tied_groups(z)
{2: 2, 3: 1}
>>> z[[1,-1]] = np.ma.masked
>>> mstats.count_tied_groups(z, use_missing=True)
{2: 2, 3: 1}
"""
nmasked = ma.getmask(x).sum()
# We need the copy as find_repeats will overwrite the initial data
data = ma.compressed(x).copy()
(ties, counts) = find_repeats(data)
nties = {}
if len(ties):
nties = dict(zip(np.unique(counts), itertools.repeat(1)))
nties.update(dict(zip(*find_repeats(counts))))
if nmasked and use_missing:
try:
nties[nmasked] += 1
except __HOLE__:
nties[nmasked] = 1
return nties
|
KeyError
|
dataset/ETHPy150Open scipy/scipy/scipy/stats/mstats_basic.py/count_tied_groups
|
237
|
def __init__(self,
source,
include_regex='^%include\s"([^"]+)"',
max_nest_level=100,
output=None):
"""
Create a new ``Includer`` object.
:Parameters:
source : file or str
The source to be read and expanded. May be an open file-like
object, a path name, or a URL string.
include_regex : str
Regular expression defining the include syntax. Must contain a
single parenthetical group that can be used to extract the
included file or URL.
max_nest_level : int
Maximum include nesting level. Exceeding this level will cause
``Includer`` to throw an ``IncludeError``.
output : str or file
A string (path name) or file-like object to which to save the
expanded output.
:raise IncludeError: On error
"""
if isinstance(source, str):
f, is_url, name = self.__open(source, None, False)
else:
# Assume file-like object.
f = source
is_url = False
try:
name = source.name
except __HOLE__:
name = None
self.closed = False
self.mode = None
self.__include_pattern = re.compile(include_regex)
self.__name = name
if output == None:
from cStringIO import StringIO
output = StringIO()
self.__maxnest = max_nest_level
self.__nested = 0
self.__process_includes(f, name, is_url, output)
self.__f = output
self.__f.seek(0)
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/grizzled/grizzled/file/includer.py/Includer.__init__
|
238
|
def _make_container_root(name):
'''
Make the container root directory
'''
path = _root(name)
if os.path.exists(path):
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
raise CommandExecutionError(
'Container {0} already exists'.format(name)
)
else:
try:
os.makedirs(path)
return path
except __HOLE__ as exc:
raise CommandExecutionError(
'Unable to make container root directory {0}: {1}'
.format(name, exc)
)
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/_make_container_root
|
239
|
def _build_failed(dst, name):
try:
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
shutil.rmtree(dst)
except __HOLE__ as exc:
if exc.errno != errno.ENOENT:
raise CommandExecutionError(
'Unable to cleanup container root dir {0}'.format(dst)
)
raise CommandExecutionError(
'Container {0} failed to build'.format(name)
)
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/_build_failed
|
240
|
def _ensure_systemd(version):
'''
Raises an exception if the systemd version is not greater than the
passed version.
'''
try:
version = int(version)
except ValueError:
raise CommandExecutionError('Invalid version \'{0}\''.format(version))
try:
installed = _sd_version()
log.debug('nspawn: detected systemd {0}'.format(installed))
except (__HOLE__, ValueError):
raise CommandExecutionError('nspawn: Unable to get systemd version')
if installed < version:
raise CommandExecutionError(
'This function requires systemd >= {0} '
'(Detected version: {1}).'.format(version, installed)
)
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/_ensure_systemd
|
241
|
@_ensure_exists
def pid(name):
'''
Returns the PID of a container
name
Container name
CLI Example:
.. code-block:: bash
salt myminion nspawn.pid arch1
'''
try:
return int(info(name).get('PID'))
except (TypeError, __HOLE__) as exc:
raise CommandExecutionError(
'Unable to get PID for container \'{0}\': {1}'.format(name, exc)
)
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/pid
|
242
|
def bootstrap_container(name, dist=None, version=None):
'''
Bootstrap a container from package servers, if dist is None the os the
minion is running as will be created, otherwise the needed bootstrapping
tools will need to be available on the host.
CLI Example:
.. code-block:: bash
salt myminion nspawn.bootstrap_container <name>
'''
if not dist:
dist = __grains__['os'].lower()
log.debug(
'nspawn.bootstrap: no dist provided, defaulting to \'{0}\''
.format(dist)
)
try:
return globals()['_bootstrap_{0}'.format(dist)](name, version=version)
except __HOLE__:
raise CommandExecutionError('Unsupported distribution "{0}"'.format(dist))
|
KeyError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/bootstrap_container
|
243
|
def bootstrap_salt(name,
config=None,
approve_key=True,
install=True,
pub_key=None,
priv_key=None,
bootstrap_url=None,
force_install=False,
unconditional_install=False,
bootstrap_delay=None,
bootstrap_args=None,
bootstrap_shell=None):
'''
Bootstrap a container from package servers, if dist is None the os the
minion is running as will be created, otherwise the needed bootstrapping
tools will need to be available on the host.
CLI Example::
salt '*' nspawn.bootstrap_salt arch1
'''
if bootstrap_delay is not None:
try:
time.sleep(bootstrap_delay)
except __HOLE__:
# Bad input, but assume since a value was passed that
# a delay was desired, and sleep for 5 seconds
time.sleep(5)
c_info = info(name)
if not c_info:
return None
# default set here as we cannot set them
# in def as it can come from a chain of procedures.
if bootstrap_args:
# custom bootstrap args can be totally customized, and user could
# have inserted the placeholder for the config directory.
# For example, some salt bootstrap script do not use at all -c
if '{0}' not in bootstrap_args:
bootstrap_args += ' -c {0}'
else:
bootstrap_args = '-c {0}'
if not bootstrap_shell:
bootstrap_shell = 'sh'
orig_state = _ensure_running(name)
if not orig_state:
return orig_state
if not force_install:
needs_install = _needs_install(name)
else:
needs_install = True
seeded = retcode(name, 'test -e \'{0}\''.format(SEED_MARKER)) == 0
tmp = tempfile.mkdtemp()
if seeded and not unconditional_install:
ret = True
else:
ret = False
cfg_files = __salt__['seed.mkconfig'](
config, tmp=tmp, id_=name, approve_key=approve_key,
pub_key=pub_key, priv_key=priv_key)
if needs_install or force_install or unconditional_install:
if install:
rstr = __salt__['test.rand_str']()
configdir = '/tmp/.c_{0}'.format(rstr)
run(name,
'install -m 0700 -d {0}'.format(configdir),
python_shell=False)
bs_ = __salt__['config.gather_bootstrap_script'](
bootstrap=bootstrap_url)
dest_dir = os.path.join('/tmp', rstr)
for cmd in [
'mkdir -p {0}'.format(dest_dir),
'chmod 700 {0}'.format(dest_dir),
]:
if run_stdout(name, cmd):
log.error(
('tmpdir {0} creation'
' failed ({1}').format(dest_dir, cmd))
return False
copy_to(name,
bs_,
'{0}/bootstrap.sh'.format(dest_dir),
makedirs=True)
copy_to(name, cfg_files['config'],
os.path.join(configdir, 'minion'))
copy_to(name, cfg_files['privkey'],
os.path.join(configdir, 'minion.pem'))
copy_to(name, cfg_files['pubkey'],
os.path.join(configdir, 'minion.pub'))
bootstrap_args = bootstrap_args.format(configdir)
cmd = ('{0} {2}/bootstrap.sh {1}'
.format(bootstrap_shell,
bootstrap_args.replace("'", "''"),
dest_dir))
# log ASAP the forged bootstrap command which can be wrapped
# out of the output in case of unexpected problem
log.info('Running {0} in LXC container \'{1}\''
.format(cmd, name))
ret = retcode(name, cmd, output_loglevel='info',
use_vt=True) == 0
else:
ret = False
else:
minion_config = salt.config.minion_config(cfg_files['config'])
pki_dir = minion_config['pki_dir']
copy_to(name, cfg_files['config'], '/etc/salt/minion')
copy_to(name, cfg_files['privkey'], os.path.join(pki_dir, 'minion.pem'))
copy_to(name, cfg_files['pubkey'], os.path.join(pki_dir, 'minion.pub'))
run(name,
'salt-call --local service.enable salt-minion',
python_shell=False)
ret = True
shutil.rmtree(tmp)
if orig_state == 'stopped':
stop(name)
# mark seeded upon successful install
if ret:
run(name,
'touch \'{0}\''.format(SEED_MARKER),
python_shell=False)
return ret
|
TypeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/bootstrap_salt
|
244
|
def list_all():
'''
Lists all nspawn containers
CLI Example:
.. code-block:: bash
salt myminion nspawn.list_all
'''
ret = []
if _sd_version() >= 219:
for line in _machinectl('list-images')['stdout'].splitlines():
try:
ret.append(line.split()[0])
except __HOLE__:
continue
else:
rootdir = _root()
try:
for dirname in os.listdir(rootdir):
if os.path.isdir(os.path.join(rootdir, dirname)):
ret.append(dirname)
except OSError:
pass
return ret
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/list_all
|
245
|
def list_running():
'''
Lists running nspawn containers
.. note::
``nspawn.list`` also works to list running containers
CLI Example:
.. code-block:: bash
salt myminion nspawn.list_running
salt myminion nspawn.list
'''
ret = []
for line in _machinectl('list')['stdout'].splitlines():
try:
ret.append(line.split()[0])
except __HOLE__:
pass
return sorted(ret)
# 'machinectl list' shows only running containers, so allow this to work as an
# alias to nspawn.list_running
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/list_running
|
246
|
@_ensure_exists
def state(name):
'''
Return state of container (running or stopped)
CLI Example:
.. code-block:: bash
salt myminion nspawn.state <name>
'''
try:
cmd = 'show {0} --property=State'.format(name)
return _machinectl(cmd, ignore_retcode=True)['stdout'].split('=')[-1]
except __HOLE__:
return 'stopped'
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/state
|
247
|
def info(name, **kwargs):
'''
Return info about a container
.. note::
The container must be running for ``machinectl`` to gather information
about it. If the container is stopped, then this function will start
it.
start : False
If ``True``, then the container will be started to retrieve the info. A
``Started`` key will be in the return data if the container was
started.
CLI Example:
.. code-block:: bash
salt myminion nspawn.info arch1
salt myminion nspawn.info arch1 force_start=False
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
start_ = kwargs.pop('start', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if not start_:
_ensure_running(name)
elif name not in list_running():
start(name)
# Have to parse 'machinectl status' here since 'machinectl show' doesn't
# contain IP address info or OS info. *shakes fist angrily*
c_info = _machinectl('status {0}'.format(name))
if c_info['retcode'] != 0:
raise CommandExecutionError(
'Unable to get info for container \'{0}\''.format(name)
)
# Better human-readable names. False means key should be ignored.
key_name_map = {
'Iface': 'Network Interface',
'Leader': 'PID',
'Service': False,
'Since': 'Running Since',
}
ret = {}
kv_pair = re.compile(r'^\s+([A-Za-z]+): (.+)$')
tree = re.compile(r'[|`]')
lines = c_info['stdout'].splitlines()
multiline = False
cur_key = None
for idx in range(len(lines)):
match = kv_pair.match(lines[idx])
if match:
key, val = match.groups()
# Get a better key name if one exists
key = key_name_map.get(key, key)
if key is False:
continue
elif key == 'PID':
try:
val = val.split()[0]
except __HOLE__:
pass
cur_key = key
if multiline:
multiline = False
ret[key] = val
else:
if cur_key is None:
continue
if tree.search(lines[idx]):
# We've reached the process tree, bail out
break
if multiline:
ret[cur_key].append(lines[idx].strip())
else:
ret[cur_key] = [ret[key], lines[idx].strip()]
multiline = True
return ret
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/info
|
248
|
@_ensure_exists
def remove(name, stop=False):
'''
Remove the named container
.. warning::
This function will remove all data associated with the container. It
will not, however, remove the btrfs subvolumes created by pulling
container images (:mod:`nspawn.pull_raw
<salt.modules.nspawn.pull_raw>`, :mod:`nspawn.pull_tar
<salt.modules.nspawn.pull_tar>`, :mod:`nspawn.pull_dkr
<salt.modules.nspawn.pull_dkr>`).
stop : False
If ``True``, the container will be destroyed even if it is
running/frozen.
CLI Examples:
.. code-block:: bash
salt '*' nspawn.remove foo
salt '*' nspawn.remove foo stop=True
'''
if not stop and state(name) != 'stopped':
raise CommandExecutionError(
'Container \'{0}\' is not stopped'.format(name)
)
def _failed_remove(name, exc):
raise CommandExecutionError(
'Unable to remove container \'{0}\': {1}'.format(name, exc)
)
if _sd_version() >= 219:
ret = _machinectl('remove {0}'.format(name))
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
_failed_remove(name, ret['stderr'])
else:
try:
shutil.rmtree(os.path.join(_root(), name))
except __HOLE__ as exc:
_failed_remove(name, exc)
return True
# Compatibility between LXC and nspawn
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/remove
|
249
|
@_ensure_exists
def copy_to(name, source, dest, overwrite=False, makedirs=False):
'''
Copy a file from the host into a container
name
Container name
source
File to be copied to the container
dest
Destination on the container. Must be an absolute path.
overwrite : False
Unless this option is set to ``True``, then if a file exists at the
location specified by the ``dest`` argument, an error will be raised.
makedirs : False
Create the parent directory on the container if it does not already
exist.
CLI Example:
.. code-block:: bash
salt 'minion' nspawn.copy_to /tmp/foo /root/foo
'''
path = source
try:
if source.startswith('salt://'):
cached_source = __salt__['cp.cache_file'](source)
if not cached_source:
raise CommandExecutionError(
'Unable to cache {0}'.format(source)
)
path = cached_source
except __HOLE__:
raise SaltInvocationError('Invalid source file {0}'.format(source))
if _sd_version() >= 219:
# TODO: Use machinectl copy-to
pass
return __salt__['container_resource.copy_to'](
name,
path,
dest,
container_type=__virtualname__,
exec_driver=EXEC_DRIVER,
overwrite=overwrite,
makedirs=makedirs)
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/copy_to
|
250
|
def _pull_image(pull_type, image, name, **kwargs):
'''
Common logic for machinectl pull-* commands
'''
_ensure_systemd(219)
if exists(name):
raise SaltInvocationError(
'Container \'{0}\' already exists'.format(name)
)
if pull_type in ('raw', 'tar'):
valid_kwargs = ('verify',)
elif pull_type == 'dkr':
valid_kwargs = ('index',)
else:
raise SaltInvocationError(
'Unsupported image type \'{0}\''.format(pull_type)
)
kwargs = salt.utils.clean_kwargs(**kwargs)
bad_kwargs = dict(
[(x, y) for x, y in six.iteritems(salt.utils.clean_kwargs(**kwargs))
if x not in valid_kwargs]
)
if bad_kwargs:
salt.utils.invalid_kwargs(bad_kwargs)
pull_opts = []
if pull_type in ('raw', 'tar'):
verify = kwargs.get('verify', False)
if not verify:
pull_opts.append('--verify=no')
else:
def _bad_verify():
raise SaltInvocationError(
'\'verify\' must be one of the following: '
'signature, checksum'
)
try:
verify = verify.lower()
except __HOLE__:
_bad_verify()
else:
if verify not in ('signature', 'checksum'):
_bad_verify()
pull_opts.append('--verify={0}'.format(verify))
elif pull_type == 'dkr':
# No need to validate the index URL, machinectl will take care of this
# for us.
if 'index' in kwargs:
pull_opts.append('--dkr-index-url={0}'.format(kwargs['index']))
cmd = 'pull-{0} {1} {2} {3}'.format(
pull_type, ' '.join(pull_opts), image, name
)
result = _machinectl(cmd, use_vt=True)
if result['retcode'] != 0:
msg = 'Error occurred pulling image. Stderr from the pull command ' \
'(if any) follows: '
if result['stderr']:
msg += '\n\n{0}'.format(result['stderr'])
raise CommandExecutionError(msg)
return True
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nspawn.py/_pull_image
|
251
|
def _password_cmd(self):
if self.password:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
except __HOLE__:
raise errors.AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
(self.rfd, self.wfd) = os.pipe()
return ["sshpass", "-d%d" % self.rfd]
return []
|
OSError
|
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/vcpe/steps/ansible_test/xos.py/Connection._password_cmd
|
252
|
def not_in_host_file(self, host):
if 'USER' in os.environ:
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
user_host_file = "~/.ssh/known_hosts"
user_host_file = os.path.expanduser(user_host_file)
host_file_list = []
host_file_list.append(user_host_file)
host_file_list.append("/etc/ssh/ssh_known_hosts")
host_file_list.append("/etc/ssh/ssh_known_hosts2")
hfiles_not_found = 0
for hf in host_file_list:
if not os.path.exists(hf):
hfiles_not_found += 1
continue
try:
host_fh = open(hf)
except __HOLE__, e:
hfiles_not_found += 1
continue
else:
data = host_fh.read()
host_fh.close()
for line in data.split("\n"):
if line is None or " " not in line:
continue
tokens = line.split()
if tokens[0].find(self.HASHED_KEY_MAGIC) == 0:
# this is a hashed known host entry
try:
(kn_salt,kn_host) = tokens[0][len(self.HASHED_KEY_MAGIC):].split("|",2)
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
hash.update(host)
if hash.digest() == kn_host.decode('base64'):
return False
except:
# invalid hashed host key, skip it
continue
else:
# standard host file entry
if host in tokens[0]:
return False
if (hfiles_not_found == len(host_file_list)):
vvv("EXEC previous known host file not found for %s" % host)
return True
|
IOError
|
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/vcpe/steps/ansible_test/xos.py/Connection.not_in_host_file
|
253
|
def load_template_source(template_name, template_dirs=None):
for filepath in get_template_sources(template_name, template_dirs):
try:
return (open(filepath).read(), filepath)
except __HOLE__:
pass
raise TemplateDoesNotExist, template_name
|
IOError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/template/loaders/app_directories.py/load_template_source
|
254
|
def stash(self, storage, url):
"""Stores the uploaded file in a temporary storage location."""
result = {}
if self.is_valid():
upload = self.cleaned_data['upload']
name = storage.save(upload.name, upload)
result['filename'] = os.path.basename(name)
try:
result['url'] = storage.url(name)
except __HOLE__:
result['url'] = None
result['stored'] = serialize_upload(name, storage, url)
return result
|
NotImplementedError
|
dataset/ETHPy150Open caktus/django-sticky-uploads/stickyuploads/forms.py/UploadForm.stash
|
255
|
def get_dtd_element(self, name):
try:
return getattr(self.dtd, identifier(name))
except __HOLE__:
raise ValidationError("No element: %s" % (name,))
|
AttributeError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/FlowMixin.get_dtd_element
|
256
|
def new_image(self, _imagefile, _alt=None, **kwargs):
check_flag(kwargs, "ismap")
if Image is not None:
try:
im = Image.open(_imagefile)
except __HOLE__:
pass
else:
x, y = im.size
kwargs["width"] = str(x)
kwargs["height"] = str(y)
try:
im.close()
except:
pass
del im
kwargs["src"] = _imagefile # XXX adjust for server alias?
if _alt:
kwargs["alt"] = _alt
img = self.dtd.Img(**kwargs)
self.append(img)
return img
|
IOError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/FlowMixin.new_image
|
257
|
def next_id(self, name):
try:
self._COUNTERS[name].next()
return str(self._COUNTERS[name])
except __HOLE__:
ctr = self._COUNTERS[name] = _Counter(name)
return str(ctr)
# helpers for adding specific elements
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/XHTMLDocument.next_id
|
258
|
def new_inline(self, _name, _obj, **attribs):
_obj = create_POM(_obj, self.dtd)
try:
ilmc = getattr(self.dtd, _name)
except __HOLE__:
raise ValidationError("%s: not valid for this DTD." % (_name,))
Inline = get_class(self.dtd, "Inline%s" % (_name,), (InlineMixin, ilmc))
il = Inline(**attribs)
il._init(self.dtd)
if _obj:
il.append(_obj)
self.append(il)
return il
|
AttributeError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/InlineMixin.new_inline
|
259
|
def set_cell(self, col, row, val):
val = check_object(val)
for inter in range(row - len(self._t_rows)):
newrow = self.dtd.Tr()
self._t_rows.append(newrow)
for inter in range(col):
newrow.append(self.dtd.Td())
r = self._t_rows[row-1]
while 1:
try:
td = r[col-1]
except __HOLE__:
r.append(self.dtd.Td())
else:
break
td.append(val)
return td
|
IndexError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/TableMixin.set_cell
|
260
|
def _check_node(self, container, node):
if isinstance(node, (self.dtd.Input, self.dtd.Select, self.dtd.Textarea)):
try:
l = container[node.name]
except __HOLE__:
l = container[node.name] = []
l.append(node)
raise XMLVisitorContinue
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/WWW/pycopia/WWW/XHTML.py/FormMixin._check_node
|
261
|
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except __HOLE__:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
|
AttributeError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/warnings.py/warn
|
262
|
def showwarning(message, category, filename, lineno, file=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno))
except __HOLE__:
pass # the file (probably stderr) is invalid - this warning gets lost.
|
IOError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/warnings.py/showwarning
|
263
|
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (__HOLE__, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/warnings.py/_setoption
|
264
|
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except __HOLE__:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Module initialization
|
NameError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/warnings.py/_getcategory
|
265
|
def test_len_cycles(self):
N = 20
items = [RefCycle() for i in range(N)]
s = WeakSet(items)
del items
it = iter(s)
try:
next(it)
except __HOLE__:
pass
gc.collect()
n1 = len(s)
del it
gc.collect()
n2 = len(s)
# one item may be kept alive inside the iterator
self.assertIn(n1, (0, 1))
self.assertEqual(n2, 0)
|
StopIteration
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_weakset.py/TestWeakSet.test_len_cycles
|
266
|
@unittest.skipIf(test_support.is_jython, "GarbageCollection not deterministic in Jython")
def test_len_race(self):
# Extended sanity checks for len() in the face of cyclic collection
self.addCleanup(gc.set_threshold, *gc.get_threshold())
for th in range(1, 100):
N = 20
gc.collect(0)
gc.set_threshold(th, th, th)
items = [RefCycle() for i in range(N)]
s = WeakSet(items)
del items
# All items will be collected at next garbage collection pass
it = iter(s)
try:
next(it)
except __HOLE__:
pass
n1 = len(s)
del it
n2 = len(s)
self.assertGreaterEqual(n1, 0)
self.assertLessEqual(n1, N)
self.assertGreaterEqual(n2, 0)
self.assertLessEqual(n2, n1)
|
StopIteration
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_weakset.py/TestWeakSet.test_len_race
|
267
|
@classmethod
def _hashable_item(cls, item):
key, value = item
try:
hash(value)
except __HOLE__:
return key, cls._HASHABLE
return key, value
|
TypeError
|
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/core/config.py/HashableFrozenDict._hashable_item
|
268
|
def flatten(obj):
"""
>>> list(flatten([1, 2]))
[1, 2]
>>> list(flatten([[1, [2, 3]], 4]))
[1, 2, 3, 4]
>>> list(flatten([xrange(1, 3), xrange(3, 5)]))
[1, 2, 3, 4]
>>> list(flatten(list))
[]
"""
queue = collections.deque([obj])
while queue:
obj = queue.popleft()
if callable(obj):
queue.appendleft(obj())
continue
try:
iterable = iter(obj)
except __HOLE__:
yield obj
else:
queue.extendleft(reversed(list(iterable)))
|
TypeError
|
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/core/config.py/flatten
|
269
|
def load_configs(path, name="configs"):
abspath = os.path.abspath(path)
dirname, filename = os.path.split(abspath)
sys_path = list(sys.path)
argv_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
with _workdir(dirname):
try:
try:
sys.path.remove(argv_dir)
except __HOLE__:
pass
sys.path.insert(0, dirname)
module = _load_config_module(abspath)
try:
config_attr = getattr(module, name)
except AttributeError:
raise ImportError("no {0!r} defined in module {1!r}".format(name, filename))
return tuple(flatten(config_attr))
finally:
sys.path[:] = sys_path
|
ValueError
|
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/core/config.py/load_configs
|
270
|
def is_valid_ipv6_address(ip_str):
"""
Ensure we have a valid IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A boolean, True if this is a valid IPv6 address.
"""
from django.core.validators import validate_ipv4_address
symbols_re = re.compile(r'^[0-9a-fA-F:.]+$')
if not symbols_re.match(ip_str):
return False
# We need to have at least one ':'.
if ':' not in ip_str:
return False
# We can only have one '::' shortener.
if ip_str.count('::') > 1:
return False
# '::' should be encompassed by start, digits or end.
if ':::' in ip_str:
return False
# A single colon can neither start nor end an address.
if ((ip_str.startswith(':') and not ip_str.startswith('::')) or
(ip_str.endswith(':') and not ip_str.endswith('::'))):
return False
# We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid)
if ip_str.count(':') > 7:
return False
# If we have no concatenation, we need to have 8 fields with 7 ':'.
if '::' not in ip_str and ip_str.count(':') != 7:
# We might have an IPv4 mapped address.
if ip_str.count('.') != 3:
return False
ip_str = _explode_shorthand_ip_string(ip_str)
# Now that we have that all squared away, let's check that each of the
# hextets are between 0x0 and 0xFFFF.
for hextet in ip_str.split(':'):
if hextet.count('.') == 3:
# If we have an IPv4 mapped address, the IPv4 portion has to
# be at the end of the IPv6 portion.
if not ip_str.split(':')[-1] == hextet:
return False
try:
validate_ipv4_address(hextet)
except ValidationError:
return False
else:
try:
# a value error here means that we got a bad hextet,
# something like 0xzzzz
if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:
return False
except __HOLE__:
return False
return True
|
ValueError
|
dataset/ETHPy150Open django/django/django/utils/ipv6.py/is_valid_ipv6_address
|
271
|
def thread_local_property(name):
'''Creates a thread local ``property``.'''
name = '_thread_local_' + name
def fget(self):
try:
return getattr(self, name).value
except __HOLE__:
return None
def fset(self, value):
getattr(self, name).value = value
return property(fget=fget, fset=fset)
|
AttributeError
|
dataset/ETHPy150Open dossier/dossier.web/dossier/web/config.py/thread_local_property
|
272
|
def global_config(name):
try:
return yakonfig.get_global_config(name)
except __HOLE__:
return {}
|
KeyError
|
dataset/ETHPy150Open dossier/dossier.web/dossier/web/config.py/global_config
|
273
|
def _reference_list(cmd, references):
""" Return a list of the values in the references mapping whose
keys appear in the command
Parameters
----------
cmd : string. A template command
references : a mapping from tags to substitution objects
Returns
-------
A list of the unique values in references that appear in the command
Examples
--------
>>> cmd = '{g} - {r} + {g}'
>>> references = {'g' : g_object, 'r' : r_object, 'i' : i_object}
>>> _reference_list(cmd, references)
[g_object, r_object]
Raises
------
KeyError: if tags in the command aren't in the reference mapping
"""
try:
return list(set(references[m.group('tag')]
for m in TAG_RE.finditer(cmd)))
except __HOLE__:
raise KeyError("Tags from command not in reference mapping")
|
KeyError
|
dataset/ETHPy150Open glue-viz/glue/glue/core/parse.py/_reference_list
|
274
|
def solveset(f, symbol=None, domain=S.Complexes):
"""Solves a given inequality or equation with set as output
Parameters
==========
f : Expr or a relational.
The target equation or inequality
symbol : Symbol
The variable for which the equation is solved
domain : Set
The domain over which the equation is solved
Returns
=======
Set
A set of values for `symbol` for which `f` is True or is equal to
zero. An `EmptySet` is returned if `f` is False or nonzero.
A `ConditionSet` is returned as unsolved object if algorithms
to evaluatee complete solution are not yet implemented.
`solveset` claims to be complete in the solution set that it returns.
Raises
======
NotImplementedError
The algorithms to solve inequalities in complex domain are
not yet implemented.
ValueError
The input is not valid.
RuntimeError
It is a bug, please report to the github issue tracker.
Notes
=====
Python interprets 0 and 1 as False and True, respectively, but
in this function they refer to solutions of an expression. So 0 and 1
return the Domain and EmptySet, respectively, while True and False
return the opposite (as they are assumed to be solutions of relational
expressions).
See Also
========
solveset_real: solver for real domain
solveset_complex: solver for complex domain
Examples
========
>>> from sympy import exp, sin, Symbol, pprint, S
>>> from sympy.solvers.solveset import solveset, solveset_real
* The default domain is complex. Not specifying a domain will lead
to the solving of the equation in the complex domain (and this
is not affected by the assumptions on the symbol):
>>> x = Symbol('x')
>>> pprint(solveset(exp(x) - 1, x), use_unicode=False)
{2*n*I*pi | n in Integers()}
>>> x = Symbol('x', real=True)
>>> pprint(solveset(exp(x) - 1, x), use_unicode=False)
{2*n*I*pi | n in Integers()}
* If you want to use `solveset` to solve the equation in the
real domain, provide a real domain. (Using `solveset\_real`
does this automatically.)
>>> R = S.Reals
>>> x = Symbol('x')
>>> solveset(exp(x) - 1, x, R)
{0}
>>> solveset_real(exp(x) - 1, x)
{0}
The solution is mostly unaffected by assumptions on the symbol,
but there may be some slight difference:
>>> pprint(solveset(sin(x)/x,x), use_unicode=False)
({2*n*pi | n in Integers()} \ {0}) U ({2*n*pi + pi | n in Integers()} \ {0})
>>> p = Symbol('p', positive=True)
>>> pprint(solveset(sin(p)/p, p), use_unicode=False)
{2*n*pi | n in Integers()} U {2*n*pi + pi | n in Integers()}
* Inequalities can be solved over the real domain only. Use of a complex
domain leads to a NotImplementedError.
>>> solveset(exp(x) > 1, x, R)
(0, oo)
"""
f = sympify(f)
if f is S.true:
return domain
if f is S.false:
return S.EmptySet
if not isinstance(f, (Expr, Number)):
raise ValueError("%s is not a valid SymPy expression" % (f))
free_symbols = f.free_symbols
if not free_symbols:
b = Eq(f, 0)
if b is S.true:
return domain
elif b is S.false:
return S.EmptySet
else:
raise NotImplementedError(filldedent('''
relationship between value and 0 is unknown: %s''' % b))
if symbol is None:
if len(free_symbols) == 1:
symbol = free_symbols.pop()
else:
raise ValueError(filldedent('''
The independent variable must be specified for a
multivariate equation.'''))
elif not getattr(symbol, 'is_Symbol', False):
raise ValueError('A Symbol must be given, not type %s: %s' %
(type(symbol), symbol))
if isinstance(f, Eq):
from sympy.core import Add
f = Add(f.lhs, - f.rhs, evaluate=False)
elif f.is_Relational:
if not domain.is_subset(S.Reals):
raise NotImplementedError(filldedent('''
Inequalities in the complex domain are
not supported. Try the real domain by
setting domain=S.Reals'''))
try:
result = solve_univariate_inequality(
f, symbol, relational=False) - _invalid_solutions(
f, symbol, domain)
except __HOLE__:
result = ConditionSet(symbol, f, domain)
return result
return _solveset(f, symbol, domain, _check=True)
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/solvers/solveset.py/solveset
|
275
|
def linsolve(system, *symbols):
r"""
Solve system of N linear equations with M variables, which
means both under - and overdetermined systems are supported.
The possible number of solutions is zero, one or infinite.
Zero solutions throws a ValueError, where as infinite
solutions are represented parametrically in terms of given
symbols. For unique solution a FiniteSet of ordered tuple
is returned.
All Standard input formats are supported:
For the given set of Equations, the respective input types
are given below:
.. math:: 3x + 2y - z = 1
.. math:: 2x - 2y + 4z = -2
.. math:: 2x - y + 2z = 0
* Augmented Matrix Form, `system` given below:
::
[3 2 -1 1]
system = [2 -2 4 -2]
[2 -1 2 0]
* List Of Equations Form
`system = [3x + 2y - z - 1, 2x - 2y + 4z + 2, 2x - y + 2z]`
* Input A & b Matrix Form (from Ax = b) are given as below:
::
[3 2 -1 ] [ 1 ]
A = [2 -2 4 ] b = [ -2 ]
[2 -1 2 ] [ 0 ]
`system = (A, b)`
Symbols to solve for should be given as input in all the
cases either in an iterable or as comma separated arguments.
This is done to maintain consistency in returning solutions
in the form of variable input by the user.
The algorithm used here is Gauss-Jordan elimination, which
results, after elimination, in an row echelon form matrix.
Returns
=======
A FiniteSet of ordered tuple of values of `symbols` for which
the `system` has solution.
Please note that general FiniteSet is unordered, the solution
returned here is not simply a FiniteSet of solutions, rather
it is a FiniteSet of ordered tuple, i.e. the first & only
argument to FiniteSet is a tuple of solutions, which is ordered,
& hence the returned solution is ordered.
Also note that solution could also have been returned as an
ordered tuple, FiniteSet is just a wrapper `{}` around
the tuple. It has no other significance except for
the fact it is just used to maintain a consistent output
format throughout the solveset.
Returns EmptySet(), if the linear system is inconsistent.
Raises
======
ValueError
The input is not valid.
The symbols are not given.
Examples
========
>>> from sympy import Matrix, S, linsolve, symbols
>>> x, y, z = symbols("x, y, z")
>>> A = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 10]])
>>> b = Matrix([3, 6, 9])
>>> A
Matrix([
[1, 2, 3],
[4, 5, 6],
[7, 8, 10]])
>>> b
Matrix([
[3],
[6],
[9]])
>>> linsolve((A, b), [x, y, z])
{(-1, 2, 0)}
* Parametric Solution: In case the system is under determined, the function
will return parametric solution in terms of the given symbols.
Free symbols in the system are returned as it is. For e.g. in the system
below, `z` is returned as the solution for variable z, which means z is a
free symbol, i.e. it can take arbitrary values.
>>> A = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
>>> b = Matrix([3, 6, 9])
>>> linsolve((A, b), [x, y, z])
{(z - 1, -2*z + 2, z)}
* List of Equations as input
>>> Eqns = [3*x + 2*y - z - 1, 2*x - 2*y + 4*z + 2, - x + S(1)/2*y - z]
>>> linsolve(Eqns, x, y, z)
{(1, -2, -2)}
* Augmented Matrix as input
>>> aug = Matrix([[2, 1, 3, 1], [2, 6, 8, 3], [6, 8, 18, 5]])
>>> aug
Matrix([
[2, 1, 3, 1],
[2, 6, 8, 3],
[6, 8, 18, 5]])
>>> linsolve(aug, x, y, z)
{(3/10, 2/5, 0)}
* Solve for symbolic coefficients
>>> a, b, c, d, e, f = symbols('a, b, c, d, e, f')
>>> eqns = [a*x + b*y - c, d*x + e*y - f]
>>> linsolve(eqns, x, y)
{((-b*f + c*e)/(a*e - b*d), (a*f - c*d)/(a*e - b*d))}
* A degenerate system returns solution as set of given
symbols.
>>> system = Matrix(([0,0,0], [0,0,0], [0,0,0]))
>>> linsolve(system, x, y)
{(x, y)}
* For an empty system linsolve returns empty set
>>> linsolve([ ], x)
EmptySet()
"""
if not system:
return S.EmptySet
if not symbols:
raise ValueError('Symbols must be given, for which solution of the '
'system is to be found.')
if hasattr(symbols[0], '__iter__'):
symbols = symbols[0]
try:
sym = symbols[0].is_Symbol
except __HOLE__:
sym = False
if not sym:
raise ValueError('Symbols or iterable of symbols must be given as '
'second argument, not type %s: %s' % (type(symbols[0]), symbols[0]))
# 1). Augmented Matrix input Form
if isinstance(system, Matrix):
A, b = system[:, :-1], system[:, -1:]
elif hasattr(system, '__iter__'):
# 2). A & b as input Form
if len(system) == 2 and system[0].is_Matrix:
A, b = system[0], system[1]
# 3). List of equations Form
if not system[0].is_Matrix:
A, b = linear_eq_to_matrix(system, symbols)
else:
raise ValueError("Invalid arguments")
# Solve using Gauss-Jordan elimination
try:
sol, params, free_syms = A.gauss_jordan_solve(b, freevar=True)
except ValueError:
# No solution
return EmptySet()
# Replace free parameters with free symbols
solution = []
if params:
for s in sol:
for k, v in enumerate(params):
s = s.xreplace({v: symbols[free_syms[k]]})
solution.append(simplify(s))
else:
for s in sol:
solution.append(simplify(s))
# Return solutions
solution = FiniteSet(tuple(solution))
return solution
|
AttributeError
|
dataset/ETHPy150Open sympy/sympy/sympy/solvers/solveset.py/linsolve
|
276
|
def delete_shelf_tab(shelf_name, confirm=True):
"""The python version of the original mel script of Maya
:param shelf_name: The name of the shelf to delete
"""
try:
shelf_top_level_path = pm.melGlobals['gShelfTopLevel']
except KeyError:
# not in GUI mode
return
shelf_top_level = pm.windows.tabLayout(shelf_top_level_path, e=1)
if len(shelf_top_level.children()) < 0:
return
if confirm:
# before doing anything ask it
response = pm.confirmDialog(
title='Delete Shelf?',
message='Delete %s?' % shelf_name,
button=['Yes', 'No'],
defaultButton='No',
cancelButton='No',
dismissString='No'
)
if response == 'No':
return
# update the preferences
shelf_number = -1
number_of_shelves = pm.optionVar['numShelves']
for i in range(1, number_of_shelves + 1):
if pm.optionVar['shelfName%s' % i] == shelf_name:
shelf_number = i
break
if shelf_number == -1:
# there should be no shelf with this name
return
# offset shelves
for i in range(shelf_number, number_of_shelves):
pm.optionVar['shelfLoad%s' % i] = pm.optionVar['shelfLoad%s' % (i + 1)]
pm.optionVar['shelfName%s' % i] = pm.optionVar['shelfName%s' % (i + 1)]
pm.optionVar['shelfFile%s' % i] = pm.optionVar['shelfFile%s' % (i + 1)]
pm.optionVar.pop('shelfLoad%s' % number_of_shelves)
number_of_shelves -= 1
pm.optionVar['numShelves'] = number_of_shelves
pm.windows.deleteUI('%s|%s' % (shelf_top_level_path, shelf_name), layout=1)
# remove the shelf mel file from user folders
for path in pm.internalVar(userShelfDir=1).split(os.path.pathsep):
shelf_file_name = 'shelf_%s.mel' % shelf_name
shelf_file_full_path = os.path.join(path, shelf_file_name)
deleted_file_name = '%s.deleted' % shelf_file_name
deleted_file_full_path = os.path.join(path, deleted_file_name)
try:
os.remove(deleted_file_full_path)
except __HOLE__:
pass
try:
os.remove(shelf_file_full_path)
break
except OSError:
pass
# Make sure the new active shelf tab has buttons
pm.mel.eval('shelfTabChange();')
|
OSError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/delete_shelf_tab
|
277
|
def create_hud(self, hud_name):
"""creates HUD
"""
self.remove_hud(hud_name)
try:
# create our HUD
pm.headsUpDisplay(
hud_name,
section=7,
block=1,
ao=1,
blockSize="medium",
labelFontSize="large",
dfs="large",
command=self.get_hud_data,
atr=1
)
except __HOLE__:
# there is another HUD in that position remove it
pm.headsUpDisplay(removePosition=(7, 1))
self.create_hud(hud_name)
|
RuntimeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/Playblaster.create_hud
|
278
|
def set_view_options(self):
"""set view options for playblast
"""
active_panel = self.get_active_panel()
# turn all show/hide display options off except for polygons and
# surfaces
pm.modelEditor(active_panel, e=1, allObjects=False)
pm.modelEditor(active_panel, e=1, manipulators=False)
pm.modelEditor(active_panel, e=1, grid=False)
pm.modelEditor(active_panel, e=1, polymeshes=True)
pm.modelEditor(active_panel, e=1, nurbsSurfaces=True)
pm.modelEditor(active_panel, e=1, subdivSurfaces=True)
pm.modelEditor(active_panel, e=1,
pluginObjects=('gpuCacheDisplayFilter', True))
pm.modelEditor(active_panel, e=1, dynamics=True)
pm.modelEditor(active_panel, e=1, nParticles=True)
pm.modelEditor(active_panel, e=1, nCloths=True)
pm.modelEditor(active_panel, e=1, fluids=True)
pm.modelEditor(active_panel, e=1, nParticles=True)
pm.modelEditor(active_panel, e=1, planes=True)
pm.modelEditor(active_panel, e=1, imagePlane=True)
# turn all hud displays off
hud_flags = pm.headsUpDisplay(lh=1)
for flag in hud_flags:
pm.headsUpDisplay(flag, e=1, vis=0)
# set camera options for playblast
for camera in pm.ls(type='camera'):
try:
camera.setAttr('overscan', 1)
except __HOLE__:
pass
try:
camera.setAttr('filmFit', 1)
except RuntimeError:
pass
try:
camera.setAttr('displayFilmGate', 1)
except RuntimeError:
pass
try:
camera.setAttr('displayResolution', 0)
except RuntimeError:
pass
|
RuntimeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/Playblaster.set_view_options
|
279
|
def restore_user_options(self):
"""restores user options
"""
active_panel = self.get_active_panel()
for flag, value in self.user_view_options['display_flags'].items():
pm.modelEditor(active_panel, **{'e': 1, flag: value})
# reassign original hud display options
for hud, value in self.user_view_options['huds'].items():
if pm.headsUpDisplay(hud, q=1, ex=1):
pm.headsUpDisplay(hud, e=1, vis=value)
# reassign original camera options
for camera in pm.ls(type='camera'):
camera_name = camera.name()
camera_flags = self.user_view_options['camera_flags'][camera_name]
for attr, value in camera_flags.items():
try:
camera.setAttr(attr, value)
except __HOLE__:
pass
self.remove_hud(self.hud_name)
|
RuntimeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/Playblaster.restore_user_options
|
280
|
def playblast_simple(self, extra_playblast_options=None):
"""Does a simple playblast
:param extra_playblast_options: A dictionary for extra playblast
options.
:return: A string showing the path of the resultant movie file
"""
playblast_options = copy.copy(self.global_playblast_options)
playblast_options['sequenceTime'] = False
playblast_options['percent'] = 50
if extra_playblast_options:
playblast_options.update(extra_playblast_options)
# find some audio
audio_node = self.get_audio_node()
if audio_node:
playblast_options['sound'] = audio_node
playblast_options['useTraxSounds'] = False
else:
playblast_options['useTraxSounds'] = True
# width height
if 'wh' not in playblast_options:
# get project resolution
# use half HD by default
width = 1920
height = 1080
if self.version:
project = self.version.task.project
# get the resolution
imf = project.image_format
width = int(imf.width)
height = int(imf.height)
playblast_options['wh'] = (width, height)
# output path
if 'filename' not in playblast_options:
if self.version:
# use version.base_name
filename = os.path.splitext(self.version.filename)[0]
else:
# use the current scene name
filename = os.path.splitext(
os.path.basename(
pm.sceneName()
)
)[0]
# also render to temp
playblast_options['filename'] = \
os.path.join(tempfile.gettempdir(), filename)
result = []
try:
self.store_user_options()
self.set_view_options()
self.create_hud(self.hud_name)
import pprint
pprint.pprint(playblast_options)
# update all cameras in the scene to have correct film back
for cam in pm.ls(type='camera'):
try:
cam.verticalFilmAperture.set(
cam.horizontalFilmAperture.get() *
float(playblast_options['wh'][1]) /
float(playblast_options['wh'][0])
)
except __HOLE__:
pass
result = [pm.playblast(**playblast_options)]
finally:
self.restore_user_options()
return result
|
AttributeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/Playblaster.playblast_simple
|
281
|
@classmethod
def upload_output(cls, version, output_file_full_path):
"""sets the given file as the output of the given version, also
generates a thumbnail and a web version if it is a movie file
:param version: The stalker version instance
:param output_file_full_path: the path of the media file
"""
from stalker import Version
if not isinstance(version, Version):
raise RuntimeError('version should be a stalker version instance!')
hires_extension = '.mov'
webres_extension = '.webm'
thumbnail_extension = '.png'
output_file_name = os.path.basename(output_file_full_path)
hires_output_file_name = '%s%s' % (
os.path.splitext(output_file_name)[0],
hires_extension
)
webres_output_file_name = '%s%s' % (
os.path.splitext(output_file_name)[0],
webres_extension
)
thumbnail_output_file_name = '%s%s' % (
os.path.splitext(output_file_name)[0],
thumbnail_extension
)
task = version.task
hires_path = os.path.join(
task.absolute_path, 'Outputs', 'Stalker_Pyramid',
hires_output_file_name
)
webres_path = os.path.join(
task.absolute_path, 'Outputs', 'Stalker_Pyramid', 'ForWeb',
webres_output_file_name
)
thumbnail_path = os.path.join(
task.absolute_path, 'Outputs', 'Stalker_Pyramid', 'Thumbnail',
thumbnail_output_file_name
)
# create folders
try:
os.makedirs(os.path.dirname(hires_path))
except OSError:
pass
try:
os.makedirs(os.path.dirname(webres_path))
except OSError:
pass
try:
os.makedirs(os.path.dirname(thumbnail_path))
except OSError:
pass
shutil.copy(output_file_full_path, hires_path)
# generate the web version
from anima.utils import MediaManager
m = MediaManager()
temp_web_version_full_path = \
m.generate_media_for_web(output_file_full_path)
try:
shutil.copy(temp_web_version_full_path, webres_path)
except IOError:
pass
temp_thumbnail_full_path = \
m.generate_thumbnail(output_file_full_path)
try:
# also upload thumbnail
shutil.copy(temp_thumbnail_full_path, thumbnail_path)
except __HOLE__:
pass
project = task.project
repo = project.repository
from stalker import db, Link
# try to find a file with the same name assigned to the version as
# output
found = None
hires_os_independent_path = repo.to_os_independent_path(hires_path)
for output in version.outputs:
if output.full_path == hires_os_independent_path:
found = True
break
# if we found a file with the same name as the output, just overwrite
# it
if not found:
l_hires = Link(
full_path=repo.to_os_independent_path(hires_path),
original_filename=hires_output_file_name
)
l_for_web = Link(
full_path=repo.to_os_independent_path(webres_path),
original_filename=hires_output_file_name
)
l_hires.thumbnail = l_for_web
version.outputs.append(l_hires)
l_thumb = Link(
full_path=repo.to_os_independent_path(thumbnail_path),
original_filename=hires_output_file_name
)
l_for_web.thumbnail = l_thumb
db.DBSession.add_all([l_hires, l_for_web, l_thumb])
db.DBSession.commit()
return hires_path
|
IOError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/Playblaster.upload_output
|
282
|
def export_alembic_from_cache_node(handles=0, step=1):
"""exports alembic caches by looking at the current scene and try to find
transform nodes which has an attribute called "cacheable"
:param int handles: An integer that shows the desired handles from start
and end.
"""
import os
# get cacheable nodes in the current scene
cacheable_nodes = get_cacheable_nodes()
# stop if there are no cacheable nodes in the scene
if not cacheable_nodes:
return
# load Abc plugin first
if not pm.pluginInfo('AbcExport', q=1, l=1):
pm.loadPlugin('AbcExport')
from anima.ui.progress_dialog import ProgressDialogManager
pdm = ProgressDialogManager()
cacheable_nodes.sort(key=lambda x: x.getAttr('cacheable'))
caller = pdm.register(len(cacheable_nodes), 'Exporting Alembic Caches')
start_frame = pm.playbackOptions(q=1, ast=1)
end_frame = pm.playbackOptions(q=1, aet=1)
current_file_full_path = pm.sceneName()
current_file_path = os.path.dirname(current_file_full_path)
current_file_name = os.path.basename(current_file_full_path)
# export alembic caches
previous_cacheable_attr_value = ''
i = 1
for cacheable_node in cacheable_nodes:
cacheable_attr_value = cacheable_node.getAttr('cacheable')
if cacheable_attr_value == previous_cacheable_attr_value:
i += 1
else:
i = 1
# hide any child node that has "rig" or "proxy" or "low" in its name
wrong_node_names = ['rig', 'proxy', 'low']
hidden_nodes = []
for child in pm.ls(cacheable_node.getChildren(), type='transform'):
if any([n in child.name().split(':')[-1].lower() for n in wrong_node_names]):
if child.v.get() is True and not child.v.isLocked():
child.v.set(False)
hidden_nodes.append(child)
output_path = os.path.join(
current_file_path,
'Outputs/alembic/%s%i/' % (cacheable_attr_value, i)
)
output_filename = '%s_%i_%i_%s%i%s' % (
os.path.splitext(current_file_name)[0],
start_frame, end_frame, cacheable_attr_value, i, '.abc'
)
output_full_path = \
os.path.join(output_path, output_filename).replace('\\', '/')
try:
os.makedirs(os.path.dirname(output_full_path))
except __HOLE__:
pass
command = 'AbcExport -j "-frameRange %s %s -step %s -ro ' \
'-stripNamespaces -uvWrite -worldSpace -eulerFilter ' \
'-writeVisibility -root %s -file %s";'
# use a temp file to export the cache
# and then move it in to place
temp_cache_file_path = \
tempfile.mktemp(suffix='.abc').replace('\\', '/')
pm.mel.eval(
command % (
int(start_frame - handles),
int(end_frame + handles),
step,
cacheable_node.fullPath(),
temp_cache_file_path
)
)
# move in to place
shutil.move(temp_cache_file_path, output_full_path)
previous_cacheable_attr_value = cacheable_attr_value
# reveal any previously hidden nodes
for node in hidden_nodes:
node.v.set(True)
caller.step()
# noinspection PyStatementEffect
|
OSError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/export_alembic_from_cache_node
|
283
|
def unsetup(self):
"""deletes the barn door setup
"""
try:
pm.delete(self.light.attr(self.message_storage_attr_name).inputs())
except __HOLE__:
pass
pm.scriptJob(
k=int(self.light.getAttr(self.custom_data_storage_attr_name))
)
|
AttributeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/BarnDoorSimulator.unsetup
|
284
|
def match_hierarchy(source, target):
"""Matches the objects in two different hierarchy by looking at their
names.
Returns a dictionary where you can look up for matches by using the object
name.
"""
source_nodes = source.listRelatives(
ad=1,
type=(pm.nt.Mesh, pm.nt.NurbsSurface)
)
target_nodes = target.listRelatives(
ad=1,
type=(pm.nt.Mesh, pm.nt.NurbsSurface)
)
source_node_names = []
target_node_names = []
lut = {
'match': [],
'no_match': []
}
for node in source_nodes:
name = node.name().split(':')[-1].split('|')[-1]
source_node_names.append(name)
for node in target_nodes:
name = node.name().split(':')[-1].split('|')[-1]
target_node_names.append(name)
for i, target_node in enumerate(target_nodes):
target_node_name = target_node_names[i]
try:
tmp_target_node_name = target_node_name
if target_node_name.endswith('Deformed'):
tmp_target_node_name = \
target_node_name.replace('Deformed', '')
index = source_node_names.index(tmp_target_node_name)
except __HOLE__:
lut['no_match'].append(target_node)
else:
lut['match'].append((source_nodes[index], target_nodes[i]))
return lut
|
ValueError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/mayaEnv/auxiliary.py/match_hierarchy
|
285
|
def occurrences_after(self, after=None):
"""
returns a generator that produces occurrences after the datetime
``after``. Includes all of the persisted Occurrences.
"""
if after is None:
after = timezone.now()
occ_replacer = OccurrenceReplacer(self.occurrence_set.all())
generator = self._occurrences_after_generator(after)
trickies = list(self.occurrence_set.filter(original_start__lte=after, start__gte=after).order_by('start'))
while True:
try:
nxt = next(generator)
except __HOLE__:
nxt = None
if (len(trickies) > 0 and (nxt is None or nxt.start > trickies[0].start)):
yield trickies.pop(0)
if (nxt is None):
raise StopIteration
yield occ_replacer.get_occurrence(nxt)
|
StopIteration
|
dataset/ETHPy150Open llazzaro/django-scheduler/schedule/models/events.py/Event.occurrences_after
|
286
|
@property
def effective_start(self):
if self.pk and self.end_recurring_period:
occ_generator = self._occurrences_after_generator(self.start)
try:
return next(occ_generator).start
except __HOLE__:
pass
elif self.pk:
return self.start
return None
|
StopIteration
|
dataset/ETHPy150Open llazzaro/django-scheduler/schedule/models/events.py/Event.effective_start
|
287
|
def build(master=None, initialcolor=None, initfile=None, ignore=None,
dbfile=None):
# create all output widgets
s = Switchboard(not ignore and initfile)
# defer to the command line chosen color database, falling back to the one
# in the .pynche file.
if dbfile is None:
dbfile = s.optiondb().get('DBFILE')
# find a parseable color database
colordb = None
files = RGB_TXT[:]
if dbfile is None:
dbfile = files.pop()
while colordb is None:
try:
colordb = ColorDB.get_colordb(dbfile)
except (__HOLE__, IOError):
pass
if colordb is None:
if not files:
break
dbfile = files.pop(0)
if not colordb:
usage(1, 'No color database file found, see the -d option.')
s.set_colordb(colordb)
# create the application window decorations
app = PyncheWidget(__version__, s, master=master)
w = app.window()
# these built-in viewers live inside the main Pynche window
s.add_view(StripViewer(s, w))
s.add_view(ChipViewer(s, w))
s.add_view(TypeinViewer(s, w))
# get the initial color as components and set the color on all views. if
# there was no initial color given on the command line, use the one that's
# stored in the option database
if initialcolor is None:
optiondb = s.optiondb()
red = optiondb.get('RED')
green = optiondb.get('GREEN')
blue = optiondb.get('BLUE')
# but if there wasn't any stored in the database, use grey50
if red is None or blue is None or green is None:
red, green, blue = initial_color('grey50', colordb)
else:
red, green, blue = initial_color(initialcolor, colordb)
s.update_views(red, green, blue)
return app, s
|
KeyError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Tools/pynche/Main.py/build
|
288
|
def run(app, s):
try:
app.start()
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Tools/pynche/Main.py/run
|
289
|
def __init__(self, apps):
try:
apps = apps.items()
except __HOLE__:
pass
# Sort the apps by len(path), descending
apps.sort()
apps.reverse()
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/WSGIPathInfoDispatcher.__init__
|
290
|
def _parse_request(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
# Force self.ready = False so the connection will close.
self.ready = False
return
if request_line == "\r\n":
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
self.ready = False
return
environ = self.environ
try:
method, path, req_protocol = request_line.strip().split(" ", 2)
except __HOLE__:
self.simple_response(400, "Malformed Request-Line")
return
environ["REQUEST_METHOD"] = method
# path may be an abs_path (including "http://host.domain.tld");
scheme, location, path, params, qs, frag = urlparse(path)
if frag:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return
if scheme:
environ["wsgi.url_scheme"] = scheme
if params:
path = path + ";" + params
environ["SCRIPT_NAME"] = ""
# Unquote the path+params (e.g. "/this%20path" -> "this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
atoms = [unquote(x) for x in quoted_slash.split(path)]
path = "%2F".join(atoms)
environ["PATH_INFO"] = path
# Note that, like wsgiref and most other WSGI servers,
# we unquote the path but not the query string.
environ["QUERY_STRING"] = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
rp = int(req_protocol[5]), int(req_protocol[7])
server_protocol = environ["ACTUAL_SERVER_PROTOCOL"]
sp = int(server_protocol[5]), int(server_protocol[7])
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return
# Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
environ["SERVER_PROTOCOL"] = req_protocol
self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
# If the Request-URI was an absoluteURI, use its location atom.
if location:
environ["SERVER_NAME"] = location
# then all the http headers
try:
self.read_headers()
except ValueError, ex:
self.simple_response("400 Bad Request", repr(ex.args))
return
mrbs = self.max_request_body_size
if mrbs and int(environ.get("CONTENT_LENGTH", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large")
return
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
if environ.get("HTTP_CONNECTION", "") == "close":
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if environ.get("HTTP_CONNECTION", "") != "Keep-Alive":
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = environ.get("HTTP_TRANSFER_ENCODING")
if te:
te = [x.strip().lower() for x in te.split(",") if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == "chunked":
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response("501 Unimplemented")
self.close_connection = True
return
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if environ.get("HTTP_EXPECT", "") == "100-continue":
self.simple_response(100)
self.ready = True
|
ValueError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/HTTPRequest._parse_request
|
291
|
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers."""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif "content-length" not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
if (self.response_protocol == 'HTTP/1.1'
and self.environ["REQUEST_METHOD"] != 'HEAD'):
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append(("Transfer-Encoding", "chunked"))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if "connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append(("Connection", "close"))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append(("Connection", "Keep-Alive"))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
size = self.rfile.maxlen - self.rfile.bytes_read
if size > 0:
self.rfile.read(size)
if "date" not in hkeys:
self.outheaders.append(("Date", rfc822.formatdate()))
if "server" not in hkeys:
self.outheaders.append(("Server", self.environ['SERVER_SOFTWARE']))
buf = [self.environ['ACTUAL_SERVER_PROTOCOL'], " ", self.status, "\r\n"]
try:
buf += [k + ": " + v + "\r\n" for k, v in self.outheaders]
except __HOLE__:
if not isinstance(k, str):
raise TypeError("WSGI response header key %r is not a string.")
if not isinstance(v, str):
raise TypeError("WSGI response header value %r is not a string.")
else:
raise
buf.append("\r\n")
self.wfile.sendall("".join(buf))
|
TypeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/HTTPRequest.send_headers
|
292
|
def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
errnum = e.args[0]
if is_reader and errnum in socket_errors_to_ignore:
return ""
raise socket.error(errnum)
except SSL.Error, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except __HOLE__:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise NoSSLError()
raise FatalSSLAlert(*e.args)
if time.time() - start > self.ssl_timeout:
raise socket.timeout("timed out")
|
IndexError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/SSL_fileobject._safe_call
|
293
|
def communicate(self):
"""Read each request and respond appropriately."""
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.wfile, self.environ,
self.wsgi_app)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
req.respond()
if req.close_connection:
return
except socket.error, e:
errnum = e.args[0]
if errnum == 'timed out':
# Don't send a 408 if there is no outstanding request; only
# if we're in the middle of a request.
# See http://www.cherrypy.org/ticket/853
if req and req.started_request and not req.sent_headers:
req.simple_response("408 Request Timeout")
elif errnum not in socket_errors_to_ignore:
if req and not req.sent_headers:
req.simple_response("500 Internal Server Error",
format_exc())
return
except (KeyboardInterrupt, __HOLE__):
raise
except FatalSSLAlert, e:
# Close the connection.
return
except NoSSLError:
if req and not req.sent_headers:
# Unwrap our wfile
req.wfile = CP_fileobject(self.socket._sock, "wb", -1)
req.simple_response("400 Bad Request",
"The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
self.linger = True
except Exception, e:
if req and not req.sent_headers:
req.simple_response("500 Internal Server Error", format_exc())
|
SystemExit
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/HTTPConnection.communicate
|
294
|
def run(self):
try:
self.ready = True
while True:
try:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
try:
conn.communicate()
finally:
conn.close()
self.conn = None
except Exception, ex:
LOG.exception('WSGI (%s) error: %s' % (self, ex))
except (__HOLE__, SystemExit), exc:
self.server.interrupt = exc
return
|
KeyboardInterrupt
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/WorkerThread.run
|
295
|
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
worker.join(timeout)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
if SSL and isinstance(c.socket, SSL.ConnectionType):
# pyOpenSSL.socket.shutdown takes no args
c.socket.shutdown()
else:
c.socket.shutdown(socket.SHUT_RD)
worker.join()
except (__HOLE__,
# Ignore repeated Ctrl-C.
# See http://www.cherrypy.org/ticket/691.
KeyboardInterrupt), exc1:
pass
|
AssertionError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/ThreadPool.stop
|
296
|
def bind_server(self):
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
# So we can reuse the socket...
try:
os.unlink(self.bind_addr)
except IOError:
pass
# So everyone can access the socket...
try:
os.chmod(self.bind_addr, 0777)
except __HOLE__:
pass
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
# Probably a DNS issue. Assume IPv4.
info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", self.bind_addr)]
self.socket = None
msg = "No socket could be created"
for res in info:
af, socktype, proto, canonname, sa = res
try:
self._bind(af, socktype, proto)
except socket.error, msg:
if self.socket:
self.socket.close()
self.socket = None
continue
break
if not self.socket:
raise socket.error, msg
|
IOError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/CherryPyWSGIServer.bind_server
|
297
|
def _bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_certificate and self.ssl_private_key:
if SSL is None:
raise ImportError("You must install pyOpenSSL to use HTTPS.")
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
ctx = SSL.Context(SSL.SSLv23_METHOD)
if self.ssl_password_cb is not None:
ctx.set_passwd_cb(self.ssl_password_cb)
ctx.set_cipher_list(self.ssl_cipher_list)
try:
ctx.use_privatekey_file(self.ssl_private_key)
ctx.use_certificate_file(self.ssl_certificate)
if self.ssl_certificate_chain:
ctx.use_certificate_chain_file(self.ssl_certificate_chain)
except Exception, ex:
logging.exception('SSL key and certificate could not be found or have a problem')
raise ex
ctx.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
self.socket = SSLConnection(ctx, self.socket)
self.populate_ssl_environ()
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See http://www.cherrypy.org/ticket/871.
if (not isinstance(self.bind_addr, basestring)
and self.bind_addr[0] == '::' and family == socket.AF_INET6):
try:
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (__HOLE__, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/lib/wsgiserver.py/CherryPyWSGIServer._bind
|
298
|
def main():
r = "json"
u = "changeme"
p = "changeme"
f = "marcuz"
t = "44**********"
msg = {'reqtype': r, 'api_secret': p, 'from': f, 'to': t, 'api_key': u}
verify = copy.deepcopy(msg)
try:
if sys.argv[1] == 'verify':
verify['number'] = sys.argv[2]
verify['brand'] = sys.argv[3]
elif sys.argv[1] == 'verify/check':
verify['request_id'] = sys.argv[2]
verify['code'] = sys.argv[3]
elif sys.argv[1] == 'verify/search':
verify['request_id'] = sys.argv[2]
elif sys.argv[1] == 'verify/control':
verify['request_id'] = sys.argv[2]
verify['cmd'] = sys.argv[3]
else:
sys.exit('Request not supported: %s' % sys.argv[1])
except __HOLE__:
print('Missing parameters:')
print('%s verify number brand' % sys.argv[0])
print('%s verify/check request_id code' % sys.argv[0])
print('%s verify/search request_id' % sys.argv[0])
print('%s verify/control request_id cmd' % sys.argv[0])
sys.exit()
verify['type'] = sys.argv[1]
r = NexmoVerify(verify)
print("Details: %s") % (r.get_details())
print r.send_request()
|
IndexError
|
dataset/ETHPy150Open marcuz/libpynexmo/demos/simple/nexmo_verify.py/main
|
299
|
def __init__(self, tag_name, as_var, parent_expr, slot_expr, **kwargs):
super(PagePlaceholderNode, self).__init__(tag_name, as_var, parent_expr, slot_expr, **kwargs)
self.slot_expr = slot_expr
# Move some arguments outside the regular "kwargs"
# because they don't need to be parsed as variables.
# Those are the remaining non-functional args for CMS admin page.
self.meta_kwargs = {}
for arg in self.allowed_meta_kwargs:
try:
self.meta_kwargs[arg] = kwargs.pop(arg)
except __HOLE__:
pass
|
KeyError
|
dataset/ETHPy150Open edoburu/django-fluent-contents/fluent_contents/templatetags/fluent_contents_tags.py/PagePlaceholderNode.__init__
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.