_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q265200 | join_dicts | validation | def join_dicts(*dicts):
'''Join a bunch of dicts'''
out_dict = {}
for d in dicts:
for k, v in d.iteritems():
if not type(v) in JOINERS:
raise KeyError('Invalid type in dict: {}'.format(type(v)))
JOINERS[type(v)](out_dict, k, v)
return out_dict | python | {
"resource": ""
} |
q265201 | env_to_dict | validation | def env_to_dict(env, pathsep=os.pathsep):
'''
Convert a dict containing environment variables into a standard dict.
Variables containing multiple values will be split into a list based on
the argument passed to pathsep.
:param env: Environment dict like os.environ.data
:param pathsep: Path separator used to split variables
'''
out_dict = {}
for k, v in env.iteritems():
if pathsep in v:
out_dict[k] = v.split(pathsep)
else:
out_dict[k] = v
return out_dict | python | {
"resource": ""
} |
q265202 | dict_to_env | validation | def dict_to_env(d, pathsep=os.pathsep):
'''
Convert a python dict to a dict containing valid environment variable
values.
:param d: Dict to convert to an env dict
:param pathsep: Path separator used to join lists(default os.pathsep)
'''
out_env = {}
for k, v in d.iteritems():
if isinstance(v, list):
out_env[k] = pathsep.join(v)
elif isinstance(v, string_types):
out_env[k] = v
else:
raise TypeError('{} not a valid env var type'.format(type(v)))
return out_env | python | {
"resource": ""
} |
q265203 | expand_envvars | validation | def expand_envvars(env):
'''
Expand all environment variables in an environment dict
:param env: Environment dict
'''
out_env = {}
for k, v in env.iteritems():
out_env[k] = Template(v).safe_substitute(env)
# Expand twice to make sure we expand everything we possibly can
for k, v in out_env.items():
out_env[k] = Template(v).safe_substitute(out_env)
return out_env | python | {
"resource": ""
} |
q265204 | get_store_env_tmp | validation | def get_store_env_tmp():
'''Returns an unused random filepath.'''
tempdir = tempfile.gettempdir()
temp_name = 'envstore{0:0>3d}'
temp_path = unipath(tempdir, temp_name.format(random.getrandbits(9)))
if not os.path.exists(temp_path):
return temp_path
else:
return get_store_env_tmp() | python | {
"resource": ""
} |
q265205 | store_env | validation | def store_env(path=None):
'''Encode current environment as yaml and store in path or a temporary
file. Return the path to the stored environment.
'''
path = path or get_store_env_tmp()
env_dict = yaml.safe_dump(os.environ.data, default_flow_style=False)
with open(path, 'w') as f:
f.write(env_dict)
return path | python | {
"resource": ""
} |
q265206 | BaseHandler.upstream_url | validation | def upstream_url(self, uri):
"Returns the URL to the upstream data source for the given URI based on configuration"
return self.application.options.upstream + self.request.uri | python | {
"resource": ""
} |
q265207 | ProxyHandler.make_upstream_request | validation | def make_upstream_request(self):
"Return request object for calling the upstream"
url = self.upstream_url(self.request.uri)
return tornado.httpclient.HTTPRequest(url,
method=self.request.method,
headers=self.request.headers,
body=self.request.body if self.request.body else None) | python | {
"resource": ""
} |
q265208 | ProxyHandler.ttl | validation | def ttl(self, response):
"""Returns time to live in seconds. 0 means no caching.
Criteria:
- response code 200
- read-only method (GET, HEAD, OPTIONS)
Plus http headers:
- cache-control: option1, option2, ...
where options are:
private | public
no-cache
no-store
max-age: seconds
s-maxage: seconds
must-revalidate
proxy-revalidate
- expires: Thu, 01 Dec 1983 20:00:00 GMT
- pragma: no-cache (=cache-control: no-cache)
See http://www.mobify.com/blog/beginners-guide-to-http-cache-headers/
TODO: tests
"""
if response.code != 200: return 0
if not self.request.method in ['GET', 'HEAD', 'OPTIONS']: return 0
try:
pragma = self.request.headers['pragma']
if pragma == 'no-cache':
return 0
except KeyError:
pass
try:
cache_control = self.request.headers['cache-control']
# no caching options
for option in ['private', 'no-cache', 'no-store', 'must-revalidate', 'proxy-revalidate']:
if cache_control.find(option): return 0
# further parsing to get a ttl
options = parse_cache_control(cache_control)
try:
return int(options['s-maxage'])
except KeyError:
pass
try:
return int(options['max-age'])
except KeyError:
pass
if 's-maxage' in options:
max_age = options['s-maxage']
if max_age < ttl: ttl = max_age
if 'max-age' in options:
max_age = options['max-age']
if max_age < ttl: ttl = max_age
return ttl
except KeyError:
pass
try:
expires = self.request.headers['expires']
return time.mktime(time.strptime(expires, '%a, %d %b %Y %H:%M:%S')) - time.time()
except KeyError:
pass | python | {
"resource": ""
} |
q265209 | manifest | validation | def manifest():
"""Guarantee the existence of a basic MANIFEST.in.
manifest doc: http://docs.python.org/distutils/sourcedist.html#manifest
`options.paved.dist.manifest.include`: set of files (or globs) to include with the `include` directive.
`options.paved.dist.manifest.recursive_include`: set of files (or globs) to include with the `recursive-include` directive.
`options.paved.dist.manifest.prune`: set of files (or globs) to exclude with the `prune` directive.
`options.paved.dist.manifest.include_sphinx_docroot`: True -> sphinx docroot is added as `graft`
`options.paved.dist.manifest.include_sphinx_docroot`: True -> sphinx builddir is added as `prune`
"""
prune = options.paved.dist.manifest.prune
graft = set()
if options.paved.dist.manifest.include_sphinx_docroot:
docroot = options.get('docroot', 'docs')
graft.update([docroot])
if options.paved.dist.manifest.exclude_sphinx_builddir:
builddir = docroot + '/' + options.get("builddir", ".build")
prune.update([builddir])
with open(options.paved.cwd / 'MANIFEST.in', 'w') as fo:
for item in graft:
fo.write('graft %s\n' % item)
for item in options.paved.dist.manifest.include:
fo.write('include %s\n' % item)
for item in options.paved.dist.manifest.recursive_include:
fo.write('recursive-include %s\n' % item)
for item in prune:
fo.write('prune %s\n' % item) | python | {
"resource": ""
} |
q265210 | format_pathname | validation | def format_pathname(
pathname,
max_length):
"""
Format a pathname
:param str pathname: Pathname to format
:param int max_length: Maximum length of result pathname (> 3)
:return: Formatted pathname
:rtype: str
:raises ValueError: If *max_length* is not larger than 3
This function formats a pathname so it is not longer than *max_length*
characters. The resulting pathname is returned. It does so by replacing
characters at the start of the *pathname* with three dots, if necessary.
The idea is that the end of the *pathname* is the most important part
to be able to identify the file.
"""
if max_length <= 3:
raise ValueError("max length must be larger than 3")
if len(pathname) > max_length:
pathname = "...{}".format(pathname[-(max_length-3):])
return pathname | python | {
"resource": ""
} |
q265211 | format_uuid | validation | def format_uuid(
uuid,
max_length=10):
"""
Format a UUID string
:param str uuid: UUID to format
:param int max_length: Maximum length of result string (> 3)
:return: Formatted UUID
:rtype: str
:raises ValueError: If *max_length* is not larger than 3
This function formats a UUID so it is not longer than *max_length*
characters. The resulting string is returned. It does so by replacing
characters at the end of the *uuid* with three dots, if necessary.
The idea is that the start of the *uuid* is the most important part
to be able to identify the related entity.
The default *max_length* is 10, which will result in a string
containing the first 7 characters of the *uuid* passed in. Most of
the time, such a string is still unique within a collection of UUIDs.
"""
if max_length <= 3:
raise ValueError("max length must be larger than 3")
if len(uuid) > max_length:
uuid = "{}...".format(uuid[0:max_length-3])
return uuid | python | {
"resource": ""
} |
q265212 | paginate_update | validation | def paginate_update(update):
"""
attempts to get next and previous on updates
"""
from happenings.models import Update
time = update.pub_time
event = update.event
try:
next = Update.objects.filter(
event=event,
pub_time__gt=time
).order_by('pub_time').only('title')[0]
except:
next = None
try:
previous = Update.objects.filter(
event=event,
pub_time__lt=time
).order_by('-pub_time').only('title')[0]
except:
previous = None
return {'next': next, 'previous': previous, 'event': event} | python | {
"resource": ""
} |
q265213 | notify_client | validation | def notify_client(
notifier_uri,
client_id,
status_code,
message=None):
"""
Notify the client of the result of handling a request
The payload contains two elements:
- client_id
- result
The *client_id* is the id of the client to notify. It is assumed
that the notifier service is able to identify the client by this id
and that it can pass the *result* to it.
The *result* always contains a *status_code* element. In case the
message passed in is not None, it will also contain a *message*
element.
In case the notifier service does not exist or returns an error,
an error message will be logged to *stderr*.
"""
payload = {
"client_id": client_id,
"result": {
"response": {
"status_code": status_code
}
}
}
if message is not None:
payload["result"]["response"]["message"] = message
response = requests.post(notifier_uri, json=payload)
if response.status_code != 201:
sys.stderr.write("failed to notify client: {}\n".format(payload))
sys.stderr.flush() | python | {
"resource": ""
} |
q265214 | Plugin.setting | validation | def setting(self, name_hyphen):
"""
Retrieves the setting value whose name is indicated by name_hyphen.
Values starting with $ are assumed to reference environment variables,
and the value stored in environment variables is retrieved. It's an
error if thes corresponding environment variable it not set.
"""
if name_hyphen in self._instance_settings:
value = self._instance_settings[name_hyphen][1]
else:
msg = "No setting named '%s'" % name_hyphen
raise UserFeedback(msg)
if hasattr(value, 'startswith') and value.startswith("$"):
env_var = value.lstrip("$")
if env_var in os.environ:
return os.getenv(env_var)
else:
msg = "'%s' is not defined in your environment" % env_var
raise UserFeedback(msg)
elif hasattr(value, 'startswith') and value.startswith("\$"):
return value.replace("\$", "$")
else:
return value | python | {
"resource": ""
} |
q265215 | Plugin._update_settings | validation | def _update_settings(self, new_settings, enforce_helpstring=True):
"""
This method does the work of updating settings. Can be passed with
enforce_helpstring = False which you may want if allowing end users to
add arbitrary metadata via the settings system.
Preferable to use update_settings (without leading _) in code to do the
right thing and always have docstrings.
"""
for raw_setting_name, value in six.iteritems(new_settings):
setting_name = raw_setting_name.replace("_", "-")
setting_already_exists = setting_name in self._instance_settings
value_is_list_len_2 = isinstance(value, list) and len(value) == 2
treat_as_tuple = not setting_already_exists and value_is_list_len_2
if isinstance(value, tuple) or treat_as_tuple:
self._instance_settings[setting_name] = value
else:
if setting_name not in self._instance_settings:
if enforce_helpstring:
msg = "You must specify param '%s' as a tuple of (helpstring, value)"
raise InternalCashewException(msg % setting_name)
else:
# Create entry with blank helpstring.
self._instance_settings[setting_name] = ('', value,)
else:
# Save inherited helpstring, replace default value.
orig = self._instance_settings[setting_name]
self._instance_settings[setting_name] = (orig[0], value,) | python | {
"resource": ""
} |
q265216 | Plugin.settings_and_attributes | validation | def settings_and_attributes(self):
"""Return a combined dictionary of setting values and attribute values."""
attrs = self.setting_values()
attrs.update(self.__dict__)
skip = ["_instance_settings", "aliases"]
for a in skip:
del attrs[a]
return attrs | python | {
"resource": ""
} |
q265217 | PluginMeta.get_reference_to_class | validation | def get_reference_to_class(cls, class_or_class_name):
"""
Detect if we get a class or a name, convert a name to a class.
"""
if isinstance(class_or_class_name, type):
return class_or_class_name
elif isinstance(class_or_class_name, string_types):
if ":" in class_or_class_name:
mod_name, class_name = class_or_class_name.split(":")
if not mod_name in sys.modules:
__import__(mod_name)
mod = sys.modules[mod_name]
return mod.__dict__[class_name]
else:
return cls.load_class_from_locals(class_or_class_name)
else:
msg = "Unexpected Type '%s'" % type(class_or_class_name)
raise InternalCashewException(msg) | python | {
"resource": ""
} |
q265218 | PluginMeta.check_docstring | validation | def check_docstring(cls):
"""
Asserts that the class has a docstring, returning it if successful.
"""
docstring = inspect.getdoc(cls)
if not docstring:
breadcrumbs = " -> ".join(t.__name__ for t in inspect.getmro(cls)[:-1][::-1])
msg = "docstring required for plugin '%s' (%s, defined in %s)"
args = (cls.__name__, breadcrumbs, cls.__module__)
raise InternalCashewException(msg % args)
max_line_length = cls._class_settings.get('max-docstring-length')
if max_line_length:
for i, line in enumerate(docstring.splitlines()):
if len(line) > max_line_length:
msg = "docstring line %s of %s is %s chars too long"
args = (i, cls.__name__, len(line) - max_line_length)
raise Exception(msg % args)
return docstring | python | {
"resource": ""
} |
q265219 | logbookForm.resourcePath | validation | def resourcePath(self, relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
from os import path
import sys
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = path.dirname(path.abspath(__file__))
return path.join(base_path, relative_path) | python | {
"resource": ""
} |
q265220 | logbookForm.addLogbook | validation | def addLogbook(self, physDef= "LCLS", mccDef="MCC", initialInstance=False):
'''Add new block of logbook selection windows. Only 5 allowed.'''
if self.logMenuCount < 5:
self.logMenus.append(LogSelectMenu(self.logui.multiLogLayout, initialInstance))
self.logMenus[-1].addLogbooks(self.logTypeList[1], self.physics_programs, physDef)
self.logMenus[-1].addLogbooks(self.logTypeList[0], self.mcc_programs, mccDef)
self.logMenus[-1].show()
self.logMenuCount += 1
if initialInstance:
# Initial logbook menu can add additional menus, all others can only remove themselves.
QObject.connect(self.logMenus[-1].logButton, SIGNAL("clicked()"), self.addLogbook)
else:
from functools import partial
QObject.connect(self.logMenus[-1].logButton, SIGNAL("clicked()"), partial(self.removeLogbook, self.logMenus[-1])) | python | {
"resource": ""
} |
q265221 | logbookForm.removeLogbook | validation | def removeLogbook(self, menu=None):
'''Remove logbook menu set.'''
if self.logMenuCount > 1 and menu is not None:
menu.removeMenu()
self.logMenus.remove(menu)
self.logMenuCount -= 1 | python | {
"resource": ""
} |
q265222 | logbookForm.selectedLogs | validation | def selectedLogs(self):
'''Return selected log books by type.'''
mcclogs = []
physlogs = []
for i in range(len(self.logMenus)):
logType = self.logMenus[i].selectedType()
log = self.logMenus[i].selectedProgram()
if logType == "MCC":
if log not in mcclogs:
mcclogs.append(log)
elif logType == "Physics":
if log not in physlogs:
physlogs.append(log)
return mcclogs, physlogs | python | {
"resource": ""
} |
q265223 | logbookForm.acceptedUser | validation | def acceptedUser(self, logType):
'''Verify enetered user name is on accepted MCC logbook list.'''
from urllib2 import urlopen, URLError, HTTPError
import json
isApproved = False
userName = str(self.logui.userName.text())
if userName == "":
return False # Must have a user name to submit entry
if logType == "MCC":
networkFault = False
data = []
log_url = "https://mccelog.slac.stanford.edu/elog/dev/mgibbs/dev_json_user_list.php/?username=" + userName
try:
data = urlopen(log_url, None, 5).read()
data = json.loads(data)
except URLError as error:
print("URLError: " + str(error.reason))
networkFault = True
except HTTPError as error:
print("HTTPError: " + str(error.reason))
networkFault = True
# If network fails, ask user to verify
if networkFault:
msgBox = QMessageBox()
msgBox.setText("Cannot connect to MCC Log Server!")
msgBox.setInformativeText("Use entered User name anyway?")
msgBox.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
msgBox.setDefaultButton(QMessageBox.Ok)
if msgBox.exec_() == QMessageBox.Ok:
isApproved = True
if data != [] and (data is not None):
isApproved = True
else:
isApproved = True
return isApproved | python | {
"resource": ""
} |
q265224 | logbookForm.prettify | validation | def prettify(self, elem):
"""Parse xml elements for pretty printing"""
from xml.etree import ElementTree
from re import sub
rawString = ElementTree.tostring(elem, 'utf-8')
parsedString = sub(r'(?=<[^/].*>)', '\n', rawString) # Adds newline after each closing tag
return parsedString[1:] | python | {
"resource": ""
} |
q265225 | logbookForm.prepareImages | validation | def prepareImages(self, fileName, logType):
"""Convert supplied QPixmap object to image file."""
import subprocess
if self.imageType == "png":
self.imagePixmap.save(fileName + ".png", "PNG", -1)
if logType == "Physics":
makePostScript = "convert " + fileName + ".png " + fileName + ".ps"
process = subprocess.Popen(makePostScript, shell=True)
process.wait()
thumbnailPixmap = self.imagePixmap.scaled(500, 450, Qt.KeepAspectRatio)
thumbnailPixmap.save(fileName + ".png", "PNG", -1)
else:
renameImage = "cp " + self.image + " " + fileName + ".gif"
process = subprocess.Popen(renameImage, shell=True)
process.wait()
if logType == "Physics":
thumbnailPixmap = self.imagePixmap.scaled(500, 450, Qt.KeepAspectRatio)
thumbnailPixmap.save(fileName + ".png", "PNG", -1) | python | {
"resource": ""
} |
q265226 | logbookForm.submitEntry | validation | def submitEntry(self):
"""Process user inputs and subit logbook entry when user clicks Submit button"""
# logType = self.logui.logType.currentText()
mcclogs, physlogs = self.selectedLogs()
success = True
if mcclogs != []:
if not self.acceptedUser("MCC"):
QMessageBox().warning(self, "Invalid User", "Please enter a valid user name!")
return
fileName = self.xmlSetup("MCC", mcclogs)
if fileName is None:
return
if not self.imagePixmap.isNull():
self.prepareImages(fileName, "MCC")
success = self.sendToLogbook(fileName, "MCC")
if physlogs != []:
for i in range(len(physlogs)):
fileName = self.xmlSetup("Physics", physlogs[i])
if fileName is None:
return
if not self.imagePixmap.isNull():
self.prepareImages(fileName, "Physics")
success_phys = self.sendToLogbook(fileName, "Physics", physlogs[i])
success = success and success_phys
self.done(success) | python | {
"resource": ""
} |
q265227 | logbookForm.sendToLogbook | validation | def sendToLogbook(self, fileName, logType, location=None):
'''Process log information and push to selected logbooks.'''
import subprocess
success = True
if logType == "MCC":
fileString = ""
if not self.imagePixmap.isNull():
fileString = fileName + "." + self.imageType
logcmd = "xml2elog " + fileName + ".xml " + fileString
process = subprocess.Popen(logcmd, shell=True)
process.wait()
if process.returncode != 0:
success = False
else:
from shutil import copy
path = "/u1/" + location.lower() + "/physics/logbook/data/" # Prod path
# path = "/home/softegr/alverson/log_test/" # Dev path
try:
if not self.imagePixmap.isNull():
copy(fileName + ".png", path)
if self.imageType == "png":
copy(fileName + ".ps", path)
else:
copy(fileName + "." + self.imageType, path)
# Copy .xml file last to ensure images will be picked up by cron job
# print("Copying file " + fileName + " to path " + path)
copy(fileName + ".xml", path)
except IOError as error:
print(error)
success = False
return success | python | {
"resource": ""
} |
q265228 | LogSelectMenu.setupUI | validation | def setupUI(self):
'''Create graphical objects for menus.'''
labelSizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
labelSizePolicy.setHorizontalStretch(0)
labelSizePolicy.setVerticalStretch(0)
menuSizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
menuSizePolicy.setHorizontalStretch(0)
menuSizePolicy.setVerticalStretch(0)
logTypeLayout = QHBoxLayout()
logTypeLayout.setSpacing(0)
typeLabel = QLabel("Log Type:")
typeLabel.setMinimumSize(QSize(65, 0))
typeLabel.setMaximumSize(QSize(65, 16777215))
typeLabel.setSizePolicy(labelSizePolicy)
logTypeLayout.addWidget(typeLabel)
self.logType = QComboBox(self)
self.logType.setMinimumSize(QSize(100, 0))
self.logType.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.logType.sizePolicy().hasHeightForWidth())
self.logType.setSizePolicy(menuSizePolicy)
logTypeLayout.addWidget(self.logType)
logTypeLayout.setStretch(1, 6)
programLayout = QHBoxLayout()
programLayout.setSpacing(0)
programLabel = QLabel("Program:")
programLabel.setMinimumSize(QSize(60, 0))
programLabel.setMaximumSize(QSize(60, 16777215))
programLabel.setSizePolicy(labelSizePolicy)
programLayout.addWidget(programLabel)
self.programName = QComboBox(self)
self.programName.setMinimumSize(QSize(100, 0))
self.programName.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.programName.sizePolicy().hasHeightForWidth())
self.programName.setSizePolicy(menuSizePolicy)
programLayout.addWidget(self.programName)
programLayout.setStretch(1, 6)
# Initial instance allows adding additional menus, all following menus can only remove themselves.
if self.initialInstance:
self.logButton = QPushButton("+", self)
self.logButton.setToolTip("Add logbook")
else:
self.logButton = QPushButton("-")
self.logButton.setToolTip("Remove logbook")
self.logButton.setMinimumSize(QSize(16, 16)) # 24x24
self.logButton.setMaximumSize(QSize(16, 16)) # 24x24
self.logButton.setObjectName("roundButton")
# self.logButton.setAutoFillBackground(True)
# region = QRegion(QRect(self.logButton.x()+15, self.logButton.y()+14, 20, 20), QRegion.Ellipse)
# self.logButton.setMask(region)
self.logButton.setStyleSheet("QPushButton {border-radius: 8px;}")
self._logSelectLayout = QHBoxLayout()
self._logSelectLayout.setSpacing(6)
self._logSelectLayout.addLayout(logTypeLayout)
self._logSelectLayout.addLayout(programLayout)
self._logSelectLayout.addWidget(self.logButton)
self._logSelectLayout.setStretch(0, 6)
self._logSelectLayout.setStretch(1, 6) | python | {
"resource": ""
} |
q265229 | LogSelectMenu.show | validation | def show(self):
'''Display menus and connect even signals.'''
self.parent.addLayout(self._logSelectLayout)
self.menuCount += 1
self._connectSlots() | python | {
"resource": ""
} |
q265230 | LogSelectMenu.addLogbooks | validation | def addLogbooks(self, type=None, logs=[], default=""):
'''Add or change list of logbooks.'''
if type is not None and len(logs) != 0:
if type in self.logList:
for logbook in logs:
if logbook not in self.logList.get(type)[0]:
# print("Adding log " + " to " + type + " log type.")
self.logList.get(type)[0].append(logbook)
else:
# print("Adding log type: " + type)
self.logList[type] = []
self.logList[type].append(logs)
# If default given, auto-select upon menu creation
if len(self.logList[type]) > 1 and default != "":
self.logList.get(type)[1] == default
else:
self.logList.get(type).append(default)
self.logType.clear()
self.logType.addItems(list(self.logList.keys()))
self.changeLogType() | python | {
"resource": ""
} |
q265231 | LogSelectMenu.removeLogbooks | validation | def removeLogbooks(self, type=None, logs=[]):
'''Remove unwanted logbooks from list.'''
if type is not None and type in self.logList:
if len(logs) == 0 or logs == "All":
del self.logList[type]
else:
for logbook in logs:
if logbook in self.logList[type]:
self.logList[type].remove(logbook)
self.changeLogType() | python | {
"resource": ""
} |
q265232 | LogSelectMenu.changeLogType | validation | def changeLogType(self):
'''Populate log program list to correspond with log type selection.'''
logType = self.selectedType()
programs = self.logList.get(logType)[0]
default = self.logList.get(logType)[1]
if logType in self.logList:
self.programName.clear()
self.programName.addItems(programs)
self.programName.setCurrentIndex(programs.index(default)) | python | {
"resource": ""
} |
q265233 | LogSelectMenu.addMenu | validation | def addMenu(self):
'''Add menus to parent gui.'''
self.parent.multiLogLayout.addLayout(self.logSelectLayout)
self.getPrograms(logType, programName) | python | {
"resource": ""
} |
q265234 | LogSelectMenu.removeLayout | validation | def removeLayout(self, layout):
'''Iteratively remove graphical objects from layout.'''
for cnt in reversed(range(layout.count())):
item = layout.takeAt(cnt)
widget = item.widget()
if widget is not None:
widget.deleteLater()
else:
'''If sublayout encountered, iterate recursively.'''
self.removeLayout(item.layout()) | python | {
"resource": ""
} |
q265235 | addlabel | validation | def addlabel(ax=None, toplabel=None, xlabel=None, ylabel=None, zlabel=None, clabel=None, cb=None, windowlabel=None, fig=None, axes=None):
"""Adds labels to a plot."""
if (axes is None) and (ax is not None):
axes = ax
if (windowlabel is not None) and (fig is not None):
fig.canvas.set_window_title(windowlabel)
if fig is None:
fig = _plt.gcf()
if fig is not None and axes is None:
axes = fig.get_axes()
if axes == []:
logger.error('No axes found!')
if axes is not None:
if toplabel is not None:
axes.set_title(toplabel)
if xlabel is not None:
axes.set_xlabel(xlabel)
if ylabel is not None:
axes.set_ylabel(ylabel)
if zlabel is not None:
axes.set_zlabel(zlabel)
if (clabel is not None) or (cb is not None):
if (clabel is not None) and (cb is not None):
cb.set_label(clabel)
else:
if clabel is None:
logger.error('Missing colorbar label')
else:
logger.error('Missing colorbar instance') | python | {
"resource": ""
} |
q265236 | linkcode_resolve | validation | def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except:
lineno = None
if lineno:
linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
else:
linespec = ""
fn = relpath(fn, start=dirname(scisalt.__file__))
if 'dev' in scisalt.__version__:
return "http://github.com/joelfrederico/SciSalt/blob/master/scisalt/%s%s" % (
fn, linespec)
else:
return "http://github.com/joelfrederico/SciSalt/blob/v%s/scisalt/%s%s" % (
scisalt.__version__, fn, linespec) | python | {
"resource": ""
} |
q265237 | syncdb | validation | def syncdb(args):
"""Update the database with model schema. Shorthand for `paver manage syncdb`.
"""
cmd = args and 'syncdb %s' % ' '.join(options.args) or 'syncdb --noinput'
call_manage(cmd)
for fixture in options.paved.django.syncdb.fixtures:
call_manage("loaddata %s" % fixture) | python | {
"resource": ""
} |
q265238 | start | validation | def start(info):
"""Run the dev server.
Uses `django_extensions <http://pypi.python.org/pypi/django-extensions/0.5>`, if
available, to provide `runserver_plus`.
Set the command to use with `options.paved.django.runserver`
Set the port to use with `options.paved.django.runserver_port`
"""
cmd = options.paved.django.runserver
if cmd == 'runserver_plus':
try:
import django_extensions
except ImportError:
info("Could not import django_extensions. Using default runserver.")
cmd = 'runserver'
port = options.paved.django.runserver_port
if port:
cmd = '%s %s' % (cmd, port)
call_manage(cmd) | python | {
"resource": ""
} |
q265239 | schema | validation | def schema(args):
"""Run South's schemamigration command.
"""
try:
import south
cmd = args and 'schemamigration %s' % ' '.join(options.args) or 'schemamigration'
call_manage(cmd)
except ImportError:
error('Could not import south.') | python | {
"resource": ""
} |
q265240 | MapperDefinition.validate | validation | def validate(cls, definition):
'''
This static method validates a BioMapMapper definition.
It returns None on success and throws an exception otherwise.
'''
schema_path = os.path.join(os.path.dirname(__file__),
'../../schema/mapper_definition_schema.json')
with open(schema_path, 'r') as jsonfp:
schema = json.load(jsonfp)
# Validation of JSON schema
jsonschema.validate(definition, schema)
# Validation of JSON properties relations
assert definition['main_key'] in definition['supported_keys'], \
'\'main_key\' must be contained in \'supported_keys\''
assert set(definition.get('list_valued_keys', [])) <= set(definition['supported_keys']), \
'\'list_valued_keys\' must be a subset of \'supported_keys\''
assert set(definition.get('disjoint', [])) <= set(definition.get('list_valued_keys', [])), \
'\'disjoint\' must be a subset of \'list_valued_keys\''
assert set(definition.get('key_synonyms', {}).values()) <= set(definition['supported_keys']), \
'\'The values of the \'key_synonyms\' mapping must be in \'supported_keys\'' | python | {
"resource": ""
} |
q265241 | Mapper.map | validation | def map(self, ID_s,
FROM=None,
TO=None,
target_as_set=False,
no_match_sub=None):
'''
The main method of this class and the essence of the package.
It allows to "map" stuff.
Args:
ID_s: Nested lists with strings as leafs (plain strings also possible)
FROM (str): Origin key for the mapping (default: main key)
TO (str): Destination key for the mapping (default: main key)
target_as_set (bool): Whether to summarize the output as a set (removes duplicates)
no_match_sub: Object representing the status of an ID not being able to be matched
(default: None)
Returns:
Mapping: a mapping object capturing the result of the mapping request
'''
def io_mode(ID_s):
'''
Handles the input/output modalities of the mapping.
'''
unlist_return = False
list_of_lists = False
if isinstance(ID_s, str):
ID_s = [ID_s]
unlist_return = True
elif isinstance(ID_s, list):
if len(ID_s) > 0 and isinstance(ID_s[0], list):
# assuming ID_s is a list of lists of ID strings
list_of_lists = True
return ID_s, unlist_return, list_of_lists
# interpret input
if FROM == TO:
return ID_s
ID_s, unlist_return, list_of_lists = io_mode(ID_s)
# map consistent with interpretation of input
if list_of_lists:
mapped_ids = [self.map(ID, FROM, TO, target_as_set, no_match_sub) for ID in ID_s]
else:
mapped_ids = self._map(ID_s, FROM, TO, target_as_set, no_match_sub)
# return consistent with interpretation of input
if unlist_return:
return mapped_ids[0]
return Mapping(ID_s, mapped_ids) | python | {
"resource": ""
} |
q265242 | Mapper.get_all | validation | def get_all(self, key=None):
'''
Returns all data entries for a particular key. Default is the main key.
Args:
key (str): key whose values to return (default: main key)
Returns:
List of all data entries for the key
'''
key = self.definition.main_key if key is None else key
key = self.definition.key_synonyms.get(key, key)
entries = self._get_all(key)
if key in self.definition.scalar_nonunique_keys:
return set(entries)
return entries | python | {
"resource": ""
} |
q265243 | FieldCutter.line | validation | def line(self, line):
"""Returns list of strings split by input delimeter
Argument:
line - Input line to cut
"""
# Remove empty strings in case of multiple instances of delimiter
return [x for x in re.split(self.delimiter, line.rstrip()) if x != ''] | python | {
"resource": ""
} |
q265244 | Messages.get_message | validation | def get_message(self, message_id):
"""
Get Existing Message
http://dev.wheniwork.com/#get-existing-message
"""
url = "/2/messages/%s" % message_id
return self.message_from_json(self._get_resource(url)["message"]) | python | {
"resource": ""
} |
q265245 | Messages.create_message | validation | def create_message(self, params={}):
"""
Creates a message
http://dev.wheniwork.com/#create/update-message
"""
url = "/2/messages/"
body = params
data = self._post_resource(url, body)
return self.message_from_json(data["message"]) | python | {
"resource": ""
} |
q265246 | Messages.update_message | validation | def update_message(self, message):
"""
Modify an existing message.
http://dev.wheniwork.com/#create/update-message
"""
url = "/2/messages/%s" % message.message_id
data = self._put_resource(url, message.json_data())
return self.message_from_json(data) | python | {
"resource": ""
} |
q265247 | Messages.delete_messages | validation | def delete_messages(self, messages):
"""
Delete existing messages.
http://dev.wheniwork.com/#delete-existing-message
"""
url = "/2/messages/?%s" % urlencode([('ids', ",".join(messages))])
data = self._delete_resource(url)
return data | python | {
"resource": ""
} |
q265248 | Sites.get_site | validation | def get_site(self, site_id):
"""
Returns site data.
http://dev.wheniwork.com/#get-existing-site
"""
url = "/2/sites/%s" % site_id
return self.site_from_json(self._get_resource(url)["site"]) | python | {
"resource": ""
} |
q265249 | Sites.get_sites | validation | def get_sites(self):
"""
Returns a list of sites.
http://dev.wheniwork.com/#listing-sites
"""
url = "/2/sites"
data = self._get_resource(url)
sites = []
for entry in data['sites']:
sites.append(self.site_from_json(entry))
return sites | python | {
"resource": ""
} |
q265250 | Sites.create_site | validation | def create_site(self, params={}):
"""
Creates a site
http://dev.wheniwork.com/#create-update-site
"""
url = "/2/sites/"
body = params
data = self._post_resource(url, body)
return self.site_from_json(data["site"]) | python | {
"resource": ""
} |
q265251 | admin_link_move_up | validation | def admin_link_move_up(obj, link_text='up'):
"""Returns a link to a view that moves the passed in object up in rank.
:param obj:
Object to move
:param link_text:
Text to display in the link. Defaults to "up"
:returns:
HTML link code to view for moving the object
"""
if obj.rank == 1:
return ''
content_type = ContentType.objects.get_for_model(obj)
link = reverse('awl-rankedmodel-move', args=(content_type.id, obj.id,
obj.rank - 1))
return '<a href="%s">%s</a>' % (link, link_text) | python | {
"resource": ""
} |
q265252 | admin_link_move_down | validation | def admin_link_move_down(obj, link_text='down'):
"""Returns a link to a view that moves the passed in object down in rank.
:param obj:
Object to move
:param link_text:
Text to display in the link. Defaults to "down"
:returns:
HTML link code to view for moving the object
"""
if obj.rank == obj.grouped_filter().count():
return ''
content_type = ContentType.objects.get_for_model(obj)
link = reverse('awl-rankedmodel-move', args=(content_type.id, obj.id,
obj.rank + 1))
return '<a href="%s">%s</a>' % (link, link_text) | python | {
"resource": ""
} |
q265253 | showfig | validation | def showfig(fig, aspect="auto"):
"""
Shows a figure with a typical orientation so that x and y axes are set up as expected.
"""
ax = fig.gca()
# Swap y axis if needed
alim = list(ax.axis())
if alim[3] < alim[2]:
temp = alim[2]
alim[2] = alim[3]
alim[3] = temp
ax.axis(alim)
ax.set_aspect(aspect)
fig.show() | python | {
"resource": ""
} |
q265254 | _setup_index | validation | def _setup_index(index):
"""Shifts indicies as needed to account for one based indexing
Positive indicies need to be reduced by one to match with zero based
indexing.
Zero is not a valid input, and as such will throw a value error.
Arguments:
index - index to shift
"""
index = int(index)
if index > 0:
index -= 1
elif index == 0:
# Zero indicies should not be allowed by default.
raise ValueError
return index | python | {
"resource": ""
} |
q265255 | Cutter.cut | validation | def cut(self, line):
"""Returns selected positions from cut input source in desired
arrangement.
Argument:
line - input to cut
"""
result = []
line = self.line(line)
for i, field in enumerate(self.positions):
try:
index = _setup_index(field)
try:
result += line[index]
except IndexError:
result.append(self.invalid_pos)
except ValueError:
result.append(str(field))
except TypeError:
result.extend(self._cut_range(line, int(field[0]), i))
return ''.join(result) | python | {
"resource": ""
} |
q265256 | Cutter._setup_positions | validation | def _setup_positions(self, positions):
"""Processes positions to account for ranges
Arguments:
positions - list of positions and/or ranges to process
"""
updated_positions = []
for i, position in enumerate(positions):
ranger = re.search(r'(?P<start>-?\d*):(?P<end>\d*)', position)
if ranger:
if i > 0:
updated_positions.append(self.separator)
start = group_val(ranger.group('start'))
end = group_val(ranger.group('end'))
if start and end:
updated_positions.extend(self._extendrange(start, end + 1))
# Since the number of positions on a line is unknown,
# send input to cause exception that can be caught and call
# _cut_range helper function
elif ranger.group('start'):
updated_positions.append([start])
else:
updated_positions.extend(self._extendrange(1, end + 1))
else:
updated_positions.append(positions[i])
try:
if int(position) and int(positions[i+1]):
updated_positions.append(self.separator)
except (ValueError, IndexError):
pass
return updated_positions | python | {
"resource": ""
} |
q265257 | Cutter._cut_range | validation | def _cut_range(self, line, start, current_position):
"""Performs cut for range from start position to end
Arguments:
line - input to cut
start - start of range
current_position - current position in main cut function
"""
result = []
try:
for j in range(start, len(line)):
index = _setup_index(j)
try:
result.append(line[index])
except IndexError:
result.append(self.invalid_pos)
finally:
result.append(self.separator)
result.append(line[-1])
except IndexError:
pass
try:
int(self.positions[current_position+1])
result.append(self.separator)
except (ValueError, IndexError):
pass
return result | python | {
"resource": ""
} |
q265258 | Cutter._extendrange | validation | def _extendrange(self, start, end):
"""Creates list of values in a range with output delimiters.
Arguments:
start - range start
end - range end
"""
range_positions = []
for i in range(start, end):
if i != 0:
range_positions.append(str(i))
if i < end:
range_positions.append(self.separator)
return range_positions | python | {
"resource": ""
} |
q265259 | lock_file | validation | def lock_file(filename):
"""Locks the file by writing a '.lock' file.
Returns True when the file is locked and
False when the file was locked already"""
lockfile = "%s.lock"%filename
if isfile(lockfile):
return False
else:
with open(lockfile, "w"):
pass
return True | python | {
"resource": ""
} |
q265260 | unlock_file | validation | def unlock_file(filename):
"""Unlocks the file by remove a '.lock' file.
Returns True when the file is unlocked and
False when the file was unlocked already"""
lockfile = "%s.lock"%filename
if isfile(lockfile):
os.remove(lockfile)
return True
else:
return False | python | {
"resource": ""
} |
q265261 | cmd_init_push_to_cloud | validation | def cmd_init_push_to_cloud(args):
"""Initiate the local catalog and push it the cloud"""
(lcat, ccat) = (args.local_catalog, args.cloud_catalog)
logging.info("[init-push-to-cloud]: %s => %s"%(lcat, ccat))
if not isfile(lcat):
args.error("[init-push-to-cloud] The local catalog does not exist: %s"%lcat)
if isfile(ccat):
args.error("[init-push-to-cloud] The cloud catalog already exist: %s"%ccat)
(lmeta, cmeta) = ("%s.lrcloud"%lcat, "%s.lrcloud"%ccat)
if isfile(lmeta):
args.error("[init-push-to-cloud] The local meta-data already exist: %s"%lmeta)
if isfile(cmeta):
args.error("[init-push-to-cloud] The cloud meta-data already exist: %s"%cmeta)
#Let's "lock" the local catalog
logging.info("Locking local catalog: %s"%(lcat))
if not lock_file(lcat):
raise RuntimeError("The catalog %s is locked!"%lcat)
#Copy catalog from local to cloud, which becomes the new "base" changeset
util.copy(lcat, ccat)
# Write meta-data both to local and cloud
mfile = MetaFile(lmeta)
utcnow = datetime.utcnow().strftime(DATETIME_FORMAT)[:-4]
mfile['catalog']['hash'] = hashsum(lcat)
mfile['catalog']['modification_utc'] = utcnow
mfile['catalog']['filename'] = lcat
mfile['last_push']['filename'] = ccat
mfile['last_push']['hash'] = hashsum(lcat)
mfile['last_push']['modification_utc'] = utcnow
mfile.flush()
mfile = MetaFile(cmeta)
mfile['changeset']['is_base'] = True
mfile['changeset']['hash'] = hashsum(lcat)
mfile['changeset']['modification_utc'] = utcnow
mfile['changeset']['filename'] = basename(ccat)
mfile.flush()
#Let's copy Smart Previews
if not args.no_smart_previews:
copy_smart_previews(lcat, ccat, local2cloud=True)
#Finally,let's unlock the catalog files
logging.info("Unlocking local catalog: %s"%(lcat))
unlock_file(lcat)
logging.info("[init-push-to-cloud]: Success!") | python | {
"resource": ""
} |
q265262 | cmd_init_pull_from_cloud | validation | def cmd_init_pull_from_cloud(args):
"""Initiate the local catalog by downloading the cloud catalog"""
(lcat, ccat) = (args.local_catalog, args.cloud_catalog)
logging.info("[init-pull-from-cloud]: %s => %s"%(ccat, lcat))
if isfile(lcat):
args.error("[init-pull-from-cloud] The local catalog already exist: %s"%lcat)
if not isfile(ccat):
args.error("[init-pull-from-cloud] The cloud catalog does not exist: %s"%ccat)
(lmeta, cmeta) = ("%s.lrcloud"%lcat, "%s.lrcloud"%ccat)
if isfile(lmeta):
args.error("[init-pull-from-cloud] The local meta-data already exist: %s"%lmeta)
if not isfile(cmeta):
args.error("[init-pull-from-cloud] The cloud meta-data does not exist: %s"%cmeta)
#Let's "lock" the local catalog
logging.info("Locking local catalog: %s"%(lcat))
if not lock_file(lcat):
raise RuntimeError("The catalog %s is locked!"%lcat)
#Copy base from cloud to local
util.copy(ccat, lcat)
#Apply changesets
cloudDAG = ChangesetDAG(ccat)
path = cloudDAG.path(cloudDAG.root.hash, cloudDAG.leafs[0].hash)
util.apply_changesets(args, path, lcat)
# Write meta-data both to local and cloud
mfile = MetaFile(lmeta)
utcnow = datetime.utcnow().strftime(DATETIME_FORMAT)[:-4]
mfile['catalog']['hash'] = hashsum(lcat)
mfile['catalog']['modification_utc'] = utcnow
mfile['catalog']['filename'] = lcat
mfile['last_push']['filename'] = cloudDAG.leafs[0].mfile['changeset']['filename']
mfile['last_push']['hash'] = cloudDAG.leafs[0].mfile['changeset']['hash']
mfile['last_push']['modification_utc'] = cloudDAG.leafs[0].mfile['changeset']['modification_utc']
mfile.flush()
#Let's copy Smart Previews
if not args.no_smart_previews:
copy_smart_previews(lcat, ccat, local2cloud=False)
#Finally, let's unlock the catalog files
logging.info("Unlocking local catalog: %s"%(lcat))
unlock_file(lcat)
logging.info("[init-pull-from-cloud]: Success!") | python | {
"resource": ""
} |
q265263 | ChangesetDAG.path | validation | def path(self, a_hash, b_hash):
"""Return nodes in the path between 'a' and 'b' going from
parent to child NOT including 'a' """
def _path(a, b):
if a is b:
return [a]
else:
assert len(a.children) == 1
return [a] + _path(a.children[0], b)
a = self.nodes[a_hash]
b = self.nodes[b_hash]
return _path(a, b)[1:] | python | {
"resource": ""
} |
q265264 | _rindex | validation | def _rindex(mylist: Sequence[T], x: T) -> int:
"""Index of the last occurrence of x in the sequence."""
return len(mylist) - mylist[::-1].index(x) - 1 | python | {
"resource": ""
} |
q265265 | create_admin | validation | def create_admin(username='admin', email='admin@admin.com', password='admin'):
"""Create and save an admin user.
:param username:
Admin account's username. Defaults to 'admin'
:param email:
Admin account's email address. Defaults to 'admin@admin.com'
:param password:
Admin account's password. Defaults to 'admin'
:returns:
Django user with staff and superuser privileges
"""
admin = User.objects.create_user(username, email, password)
admin.is_staff = True
admin.is_superuser = True
admin.save()
return admin | python | {
"resource": ""
} |
q265266 | messages_from_response | validation | def messages_from_response(response):
"""Returns a list of the messages from the django MessageMiddleware
package contained within the given response. This is to be used during
unit testing when trying to see if a message was set properly in a view.
:param response: HttpResponse object, likely obtained through a
test client.get() or client.post() call
:returns: a list of tuples (message_string, message_level), one for each
message in the response context
"""
messages = []
if hasattr(response, 'context') and response.context and \
'messages' in response.context:
messages = response.context['messages']
elif hasattr(response, 'cookies'):
# no "context" set-up or no messages item, check for message info in
# the cookies
morsel = response.cookies.get('messages')
if not morsel:
return []
# use the decoder in the CookieStore to process and get a list of
# messages
from django.contrib.messages.storage.cookie import CookieStorage
store = CookieStorage(FakeRequest())
messages = store._decode(morsel.value)
else:
return []
return [(m.message, m.level) for m in messages] | python | {
"resource": ""
} |
q265267 | AdminToolsMixin.authorize | validation | def authorize(self):
"""Authenticates the superuser account via the web login."""
response = self.client.login(username=self.USERNAME,
password=self.PASSWORD)
self.assertTrue(response)
self.authed = True | python | {
"resource": ""
} |
q265268 | AdminToolsMixin.authed_get | validation | def authed_get(self, url, response_code=200, headers={}, follow=False):
"""Does a django test client ``get`` against the given url after
logging in the admin first.
:param url:
URL to fetch
:param response_code:
Expected response code from the URL fetch. This value is
asserted. Defaults to 200
:param headers:
Optional dictionary of headers to send in the request
:param follow:
When True, the get call will follow any redirect requests.
Defaults to False.
:returns:
Django testing ``Response`` object
"""
if not self.authed:
self.authorize()
response = self.client.get(url, follow=follow, **headers)
self.assertEqual(response_code, response.status_code)
return response | python | {
"resource": ""
} |
q265269 | AdminToolsMixin.authed_post | validation | def authed_post(self, url, data, response_code=200, follow=False,
headers={}):
"""Does a django test client ``post`` against the given url after
logging in the admin first.
:param url:
URL to fetch
:param data:
Dictionary to form contents to post
:param response_code:
Expected response code from the URL fetch. This value is
asserted. Defaults to 200
:param headers:
Optional dictionary of headers to send in with the request
:returns:
Django testing ``Response`` object
"""
if not self.authed:
self.authorize()
response = self.client.post(url, data, follow=follow, **headers)
self.assertEqual(response_code, response.status_code)
return response | python | {
"resource": ""
} |
q265270 | AdminToolsMixin.field_value | validation | def field_value(self, admin_model, instance, field_name):
"""Returns the value displayed in the column on the web interface for
a given instance.
:param admin_model:
Instance of a :class:`admin.ModelAdmin` object that is responsible
for displaying the change list
:param instance:
Object instance that is the row in the admin change list
:field_name:
Name of the field/column to fetch
"""
_, _, value = lookup_field(field_name, instance, admin_model)
return value | python | {
"resource": ""
} |
q265271 | Imshow_Slider_Array.imgmax | validation | def imgmax(self):
"""
Highest value of input image.
"""
if not hasattr(self, '_imgmax'):
imgmax = _np.max(self.images[0])
for img in self.images:
imax = _np.max(img)
if imax > imgmax:
imgmax = imax
self._imgmax = imgmax
return self._imgmax | python | {
"resource": ""
} |
q265272 | Imshow_Slider_Array.imgmin | validation | def imgmin(self):
"""
Lowest value of input image.
"""
if not hasattr(self, '_imgmin'):
imgmin = _np.min(self.images[0])
for img in self.images:
imin = _np.min(img)
if imin > imgmin:
imgmin = imin
self._imgmin = imgmin
return _np.min(self.image) | python | {
"resource": ""
} |
q265273 | spawn | validation | def spawn(func, *args, **kwargs):
""" spawns a greenlet that does not print exceptions to the screen.
if you use this function you MUST use this module's join or joinall otherwise the exception will be lost """
return gevent.spawn(wrap_uncaught_greenlet_exceptions(func), *args, **kwargs) | python | {
"resource": ""
} |
q265274 | _usage | validation | def _usage(prog_name=os.path.basename(sys.argv[0])):
'''Returns usage string with no trailing whitespace.'''
spacer = ' ' * len('usage: ')
usage = prog_name + ' -b LIST [-S SEPARATOR] [file ...]\n' \
+ spacer + prog_name + ' -c LIST [-S SEPERATOR] [file ...]\n' \
+ spacer + prog_name \
+ ' -f LIST [-d DELIM] [-e] [-S SEPERATOR] [-s] [file ...]'
# Return usage message with trailing whitespace removed.
return "usage: " + usage.rstrip() | python | {
"resource": ""
} |
q265275 | _parse_args | validation | def _parse_args(args):
"""Setup argparser to process arguments and generate help"""
# parser uses custom usage string, with 'usage: ' removed, as it is
# added automatically via argparser.
parser = argparse.ArgumentParser(description="Remove and/or rearrange "
+ "sections from each line of a file(s).",
usage=_usage()[len('usage: '):])
parser.add_argument('-b', "--bytes", action='store', type=lst, default=[],
help="Bytes to select")
parser.add_argument('-c', "--chars", action='store', type=lst, default=[],
help="Character to select")
parser.add_argument('-f', "--fields", action='store', type=lst, default=[],
help="Fields to select")
parser.add_argument('-d', "--delimiter", action='store', default="\t",
help="Sets field delimiter(default is TAB)")
parser.add_argument('-e', "--regex", action='store_true',
help='Enable regular expressions to be used as input '+
'delimiter')
parser.add_argument('-s', '--skip', action='store_true',
help="Skip lines that do not contain input delimiter.")
parser.add_argument('-S', "--separator", action='store', default="\t",
help="Sets field separator for output.")
parser.add_argument('file', nargs='*', default="-",
help="File(s) to cut")
return parser.parse_args(args) | python | {
"resource": ""
} |
q265276 | open_s3 | validation | def open_s3(bucket):
"""
Opens connection to S3 returning bucket and key
"""
conn = boto.connect_s3(options.paved.s3.access_id, options.paved.s3.secret)
try:
bucket = conn.get_bucket(bucket)
except boto.exception.S3ResponseError:
bucket = conn.create_bucket(bucket)
return bucket | python | {
"resource": ""
} |
q265277 | upload_s3 | validation | def upload_s3(file_path, bucket_name, file_key, force=False, acl='private'):
"""Upload a local file to S3.
"""
file_path = path(file_path)
bucket = open_s3(bucket_name)
if file_path.isdir():
# Upload the contents of the dir path.
paths = file_path.listdir()
paths_keys = list(zip(paths, ['%s/%s' % (file_key, p.name) for p in paths]))
else:
# Upload just the given file path.
paths_keys = [(file_path, file_key)]
for p, k in paths_keys:
headers = {}
s3_key = bucket.get_key(k)
if not s3_key:
from boto.s3.key import Key
s3_key = Key(bucket, k)
content_type = mimetypes.guess_type(p)[0]
if content_type:
headers['Content-Type'] = content_type
file_size = p.stat().st_size
file_data = p.bytes()
file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())
# Check the hash.
if s3_key.etag:
s3_md5 = s3_key.etag.replace('"', '')
if s3_md5 == file_md5:
info('Hash is the same. Skipping %s' % file_path)
continue
elif not force:
# Check if file on S3 is older than local file.
s3_datetime = datetime.datetime(*time.strptime(
s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
local_datetime = datetime.datetime.utcfromtimestamp(p.stat().st_mtime)
if local_datetime < s3_datetime:
info("File %s hasn't been modified since last " \
"being uploaded" % (file_key))
continue
# File is newer, let's process and upload
info("Uploading %s..." % (file_key))
try:
s3_key.set_contents_from_string(file_data, headers, policy=acl, replace=True, md5=(file_md5, file_md5_64))
except Exception as e:
error("Failed: %s" % e)
raise | python | {
"resource": ""
} |
q265278 | download_s3 | validation | def download_s3(bucket_name, file_key, file_path, force=False):
"""Download a remote file from S3.
"""
file_path = path(file_path)
bucket = open_s3(bucket_name)
file_dir = file_path.dirname()
file_dir.makedirs()
s3_key = bucket.get_key(file_key)
if file_path.exists():
file_data = file_path.bytes()
file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())
# Check the hash.
try:
s3_md5 = s3_key.etag.replace('"', '')
except KeyError:
pass
else:
if s3_md5 == file_md5:
info('Hash is the same. Skipping %s' % file_path)
return
elif not force:
# Check if file on S3 is older than local file.
s3_datetime = datetime.datetime(*time.strptime(
s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
local_datetime = datetime.datetime.utcfromtimestamp(file_path.stat().st_mtime)
if s3_datetime < local_datetime:
info("File at %s is less recent than the local version." % (file_key))
return
# If it is newer, let's process and upload
info("Downloading %s..." % (file_key))
try:
with open(file_path, 'w') as fo:
s3_key.get_contents_to_file(fo)
except Exception as e:
error("Failed: %s" % e)
raise | python | {
"resource": ""
} |
q265279 | create_ical | validation | def create_ical(request, slug):
""" Creates an ical .ics file for an event using python-card-me. """
event = get_object_or_404(Event, slug=slug)
# convert dates to datetimes.
# when we change code to datetimes, we won't have to do this.
start = event.start_date
start = datetime.datetime(start.year, start.month, start.day)
if event.end_date:
end = event.end_date
end = datetime.datetime(end.year, end.month, end.day)
else:
end = start
cal = card_me.iCalendar()
cal.add('method').value = 'PUBLISH'
vevent = cal.add('vevent')
vevent.add('dtstart').value = start
vevent.add('dtend').value = end
vevent.add('dtstamp').value = datetime.datetime.now()
vevent.add('summary').value = event.name
response = HttpResponse(cal.serialize(), content_type='text/calendar')
response['Filename'] = 'filename.ics'
response['Content-Disposition'] = 'attachment; filename=filename.ics'
return response | python | {
"resource": ""
} |
q265280 | event_all_comments_list | validation | def event_all_comments_list(request, slug):
"""
Returns a list view of all comments for a given event.
Combines event comments and update comments in one list.
"""
event = get_object_or_404(Event, slug=slug)
comments = event.all_comments
page = int(request.GET.get('page', 99999)) # feed empty page by default to push to last page
is_paginated = False
if comments:
paginator = Paginator(comments, 50) # Show 50 comments per page
try:
comments = paginator.page(page)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
comments = paginator.page(paginator.num_pages)
is_paginated = comments.has_other_pages()
return render(request, 'happenings/event_comments.html', {
"event": event,
"comment_list": comments,
"object_list": comments,
"page_obj": comments,
"page": page,
"is_paginated": is_paginated,
"key": key
}) | python | {
"resource": ""
} |
q265281 | event_update_list | validation | def event_update_list(request, slug):
"""
Returns a list view of updates for a given event.
If the event is over, it will be in chronological order.
If the event is upcoming or still going,
it will be in reverse chronological order.
"""
event = get_object_or_404(Event, slug=slug)
updates = Update.objects.filter(event__slug=slug)
if event.recently_ended():
# if the event is over, use chronological order
updates = updates.order_by('id')
else:
# if not, use reverse chronological
updates = updates.order_by('-id')
return render(request, 'happenings/updates/update_list.html', {
'event': event,
'object_list': updates,
}) | python | {
"resource": ""
} |
q265282 | video_list | validation | def video_list(request, slug):
"""
Displays list of videos for given event.
"""
event = get_object_or_404(Event, slug=slug)
return render(request, 'video/video_list.html', {
'event': event,
'video_list': event.eventvideo_set.all()
}) | python | {
"resource": ""
} |
q265283 | add_event | validation | def add_event(request):
""" Public form to add an event. """
form = AddEventForm(request.POST or None)
if form.is_valid():
instance = form.save(commit=False)
instance.sites = settings.SITE_ID
instance.submitted_by = request.user
instance.approved = True
instance.slug = slugify(instance.name)
instance.save()
messages.success(request, 'Your event has been added.')
return HttpResponseRedirect(reverse('events_index'))
return render(request, 'happenings/event_form.html', {
'form': form,
'form_title': 'Add an event'
}) | python | {
"resource": ""
} |
q265284 | add_memory | validation | def add_memory(request, slug):
""" Adds a memory to an event. """
event = get_object_or_404(Event, slug=slug)
form = MemoryForm(request.POST or None, request.FILES or None)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.event = event
instance.save()
msg = "Your thoughts were added. "
if request.FILES:
photo_list = request.FILES.getlist('photos')
photo_count = len(photo_list)
for upload_file in photo_list:
process_upload(upload_file, instance, form, event, request)
if photo_count > 1:
msg += "{} images were added and should appear soon.".format(photo_count)
else:
msg += "{} image was added and should appear soon.".format(photo_count)
messages.success(request, msg)
return HttpResponseRedirect('../')
return render(request, 'happenings/add_memories.html', {'form': form, 'event': event}) | python | {
"resource": ""
} |
q265285 | SketchRunner.__register_library | validation | def __register_library(self, module_name: str, attr: str, fallback: str = None):
"""Inserts Interpreter Library of imports into sketch in a very non-consensual way"""
# Import the module Named in the string
try:
module = importlib.import_module(module_name)
# If module is not found it checks if an alternative is is listed
# If it is then it substitutes it, just so that the code can run
except ImportError:
if fallback is not None:
module = importlib.import_module(fallback)
self.__logger.warn(module_name + " not available: Replaced with " + fallback)
else:
self.__logger.warn(module_name + " not available: No Replacement Specified")
# Cram the module into the __sketch in the form of module -> "attr"
# AKA the same as `import module as attr`
if not attr in dir(self.__sketch):
setattr(self.__sketch, attr, module)
else:
self.__logger.warn(attr +" could not be imported as it's label is already used in the sketch") | python | {
"resource": ""
} |
q265286 | EllipseBeam.set_moments | validation | def set_moments(self, sx, sxp, sxxp):
"""
Sets the beam moments directly.
Parameters
----------
sx : float
Beam moment where :math:`\\text{sx}^2 = \\langle x^2 \\rangle`.
sxp : float
Beam moment where :math:`\\text{sxp}^2 = \\langle x'^2 \\rangle`.
sxxp : float
Beam moment where :math:`\\text{sxxp} = \\langle x x' \\rangle`.
"""
self._sx = sx
self._sxp = sxp
self._sxxp = sxxp
emit = _np.sqrt(sx**2 * sxp**2 - sxxp**2)
self._store_emit(emit=emit) | python | {
"resource": ""
} |
q265287 | EllipseBeam.set_Courant_Snyder | validation | def set_Courant_Snyder(self, beta, alpha, emit=None, emit_n=None):
"""
Sets the beam moments indirectly using Courant-Snyder parameters.
Parameters
----------
beta : float
Courant-Snyder parameter :math:`\\beta`.
alpha : float
Courant-Snyder parameter :math:`\\alpha`.
emit : float
Beam emittance :math:`\\epsilon`.
emit_n : float
Normalized beam emittance :math:`\\gamma \\epsilon`.
"""
self._store_emit(emit=emit, emit_n=emit_n)
self._sx = _np.sqrt(beta*self.emit)
self._sxp = _np.sqrt((1+alpha**2)/beta*self.emit)
self._sxxp = -alpha*self.emit | python | {
"resource": ""
} |
q265288 | normalize_slice | validation | def normalize_slice(slice_obj, length):
"""
Given a slice object, return appropriate values for use in the range function
:param slice_obj: The slice object or integer provided in the `[]` notation
:param length: For negative indexing we need to know the max length of the object.
"""
if isinstance(slice_obj, slice):
start, stop, step = slice_obj.start, slice_obj.stop, slice_obj.step
if start is None:
start = 0
if stop is None:
stop = length
if step is None:
step = 1
if start < 0:
start += length
if stop < 0:
stop += length
elif isinstance(slice_obj, int):
start = slice_obj
if start < 0:
start += length
stop = start + 1
step = 1
else:
raise TypeError
if (0 <= start <= length) and (0 <= stop <= length):
return start, stop, step
raise IndexError | python | {
"resource": ""
} |
q265289 | BaseValidator.error | validation | def error(self, error_code, value, **kwargs):
"""
Helper to add error to messages field. It fills placeholder with extra call parameters
or values from message_value map.
:param error_code: Error code to use
:rparam error_code: str
:param value: Value checked
:param kwargs: Map of values to use in placeholders
"""
code = self.error_code_map.get(error_code, error_code)
try:
message = Template(self.error_messages[code])
except KeyError:
message = Template(self.error_messages[error_code])
placeholders = {"value": self.hidden_value if self.hidden else value}
placeholders.update(kwargs)
placeholders.update(self.message_values)
self.messages[code] = message.safe_substitute(placeholders) | python | {
"resource": ""
} |
q265290 | copy | validation | def copy(src, dst):
"""File copy that support compress and decompress of zip files"""
(szip, dzip) = (src.endswith(".zip"), dst.endswith(".zip"))
logging.info("Copy: %s => %s"%(src, dst))
if szip and dzip:#If both zipped, we can simply use copy
shutil.copy2(src, dst)
elif szip:
with zipfile.ZipFile(src, mode='r') as z:
tmpdir = tempfile.mkdtemp()
try:
z.extractall(tmpdir)
if len(z.namelist()) != 1:
raise RuntimeError("The zip file '%s' should only have one "\
"compressed file"%src)
tmpfile = join(tmpdir,z.namelist()[0])
try:
os.remove(dst)
except OSError:
pass
shutil.move(tmpfile, dst)
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
elif dzip:
with zipfile.ZipFile(dst, mode='w', compression=ZIP_DEFLATED) as z:
z.write(src, arcname=basename(src))
else:#None of them are zipped
shutil.copy2(src, dst) | python | {
"resource": ""
} |
q265291 | apply_changesets | validation | def apply_changesets(args, changesets, catalog):
"""Apply to the 'catalog' the changesets in the metafile list 'changesets'"""
tmpdir = tempfile.mkdtemp()
tmp_patch = join(tmpdir, "tmp.patch")
tmp_lcat = join(tmpdir, "tmp.lcat")
for node in changesets:
remove(tmp_patch)
copy(node.mfile['changeset']['filename'], tmp_patch)
logging.info("mv %s %s"%(catalog, tmp_lcat))
shutil.move(catalog, tmp_lcat)
cmd = args.patch_cmd.replace("$in1", tmp_lcat)\
.replace("$patch", tmp_patch)\
.replace("$out", catalog)
logging.info("Patch: %s"%cmd)
subprocess.check_call(cmd, shell=True)
shutil.rmtree(tmpdir, ignore_errors=True) | python | {
"resource": ""
} |
q265292 | AddEventForm.clean | validation | def clean(self):
"""
Validate that an event with this name on this date does not exist.
"""
cleaned = super(EventForm, self).clean()
if Event.objects.filter(name=cleaned['name'], start_date=cleaned['start_date']).count():
raise forms.ValidationError(u'This event appears to be in the database already.')
return cleaned | python | {
"resource": ""
} |
q265293 | loop_in_background | validation | def loop_in_background(interval, callback):
"""
When entering the context, spawns a greenlet that sleeps for `interval` seconds between `callback` executions.
When leaving the context stops the greenlet.
The yielded object is the `GeventLoop` object so the loop can be stopped from within the context.
For example:
```
with loop_in_background(60.0, purge_cache) as purge_cache_job:
...
...
if should_stop_cache():
purge_cache_job.stop()
```
"""
loop = GeventLoop(interval, callback)
loop.start()
try:
yield loop
finally:
if loop.has_started():
loop.stop() | python | {
"resource": ""
} |
q265294 | GeventLoopBase._loop | validation | def _loop(self):
"""Main loop - used internally."""
while True:
try:
with uncaught_greenlet_exception_context():
self._loop_callback()
except gevent.GreenletExit:
break
if self._stop_event.wait(self._interval):
break
self._clear() | python | {
"resource": ""
} |
q265295 | GeventLoopBase.start | validation | def start(self):
"""
Starts the loop. Calling a running loop is an error.
"""
assert not self.has_started(), "called start() on an active GeventLoop"
self._stop_event = Event()
# note that we don't use safe_greenlets.spawn because we take care of it in _loop by ourselves
self._greenlet = gevent.spawn(self._loop) | python | {
"resource": ""
} |
q265296 | GeventLoopBase.kill | validation | def kill(self):
"""Kills the running loop and waits till it gets killed."""
assert self.has_started(), "called kill() on a non-active GeventLoop"
self._stop_event.set()
self._greenlet.kill()
self._clear() | python | {
"resource": ""
} |
q265297 | NonUniformImage | validation | def NonUniformImage(x, y, z, ax=None, fig=None, cmap=None, alpha=None, scalex=True, scaley=True, add_cbar=True, **kwargs):
"""
Used to plot a set of coordinates.
Parameters
----------
x, y : :class:`numpy.ndarray`
1-D ndarrays of lengths N and M, respectively, specifying pixel centers
z : :class:`numpy.ndarray`
An (M, N) ndarray or masked array of values to be colormapped, or a (M, N, 3) RGB array, or a (M, N, 4) RGBA array.
ax : :class:`matplotlib.axes.Axes`, optional
The axis to plot to.
fig : :class:`matplotlib.figure.Figure`, optional
The figure to plot to.
cmap : :class:`matplotlib.colors.Colormap`, optional
The colormap to use.
alpha : float, optional
The transparency to use.
scalex : bool, optional
To set the x limits to available data
scaley : bool, optional
To set the y limits to available data
add_cbar : bool, optional
Whether ot add a colorbar or not.
Returns
-------
img : :class:`matplotlib.image.NonUniformImage`
Object representing the :class:`matplotlib.image.NonUniformImage`.
"""
if ax is None and fig is None:
fig, ax = _setup_axes()
elif ax is None:
ax = fig.gca()
elif fig is None:
fig = ax.get_figure()
norm = kwargs.get('norm', None)
im = _mplim.NonUniformImage(ax, **kwargs)
vmin = kwargs.pop('vmin', _np.min(z))
vmax = kwargs.pop('vmax', _np.max(z))
# im.set_clim(vmin=vmin, vmax=vmax)
if cmap is not None:
im.set_cmap(cmap)
m = _cm.ScalarMappable(cmap=im.get_cmap(), norm=norm)
m.set_array(z)
if add_cbar:
cax, cb = _cb(ax=ax, im=m, fig=fig)
if alpha is not None:
im.set_alpha(alpha)
im.set_data(x, y, z)
ax.images.append(im)
if scalex:
xmin = min(x)
xmax = max(x)
ax.set_xlim(xmin, xmax)
if scaley:
ymin = min(y)
ymax = max(y)
ax.set_ylim(ymin, ymax)
return _SI(im=im, cb=cb, cax=cax) | python | {
"resource": ""
} |
q265298 | LatexFixer._sentence_to_interstitial_spacing | validation | def _sentence_to_interstitial_spacing(self):
"""Fix common spacing errors caused by LaTeX's habit
of using an inter-sentence space after any full stop."""
not_sentence_end_chars = [' ']
abbreviations = ['i.e.', 'e.g.', ' v.',
' w.', ' wh.']
titles = ['Prof.', 'Mr.', 'Mrs.', 'Messrs.',
'Mmes.', 'Msgr.', 'Ms.', 'Fr.', 'Rev.',
'St.', 'Dr.', 'Lieut.', 'Lt.', 'Capt.',
'Cptn.', 'Sgt.', 'Sjt.', 'Gen.', 'Hon.',
'Cpl.', 'L-Cpl.', 'Pvt.', 'Dvr.', 'Gnr.',
'Spr.', 'Col.', 'Lt-Col', 'Lt-Gen.', 'Mx.']
for abbrev in abbreviations:
for x in not_sentence_end_chars:
self._str_replacement(abbrev + x, abbrev + '\ ')
for title in titles:
for x in not_sentence_end_chars:
self._str_replacement(title + x, title + '~') | python | {
"resource": ""
} |
q265299 | LatexFixer._hyphens_to_dashes | validation | def _hyphens_to_dashes(self):
"""Transform hyphens to various kinds of dashes"""
problematic_hyphens = [(r'-([.,!)])', r'---\1'),
(r'(?<=\d)-(?=\d)', '--'),
(r'(?<=\s)-(?=\s)', '---')]
for problem_case in problematic_hyphens:
self._regex_replacement(*problem_case) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.