_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 31 13.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q264100 | Repo.find_matching_files | validation | def find_matching_files(self, includes):
"""
For various actions we need files that match patterns
"""
if len(includes) == 0:
return []
files = [f['relativepath'] for f in self.package['resources']]
includes = r'|'.join([fnmatch.translate(x) for x in includes])
# Match both the file name as well the path..
| python | {
"resource": ""
} |
q264101 | Repo.run | validation | def run(self, cmd, *args):
"""
Run a specific command using the manager
"""
if self.manager is None:
raise Exception("Fatal internal error: Missing | python | {
"resource": ""
} |
q264102 | Repo.get_resource | validation | def get_resource(self, p):
"""
Get metadata for a given file
"""
for r in self.package['resources']:
| python | {
"resource": ""
} |
q264103 | RepoManagerBase.lookup | validation | def lookup(self, username=None, reponame=None, key=None):
"""
Lookup all available repos
"""
if key is None:
key = self.key(username, reponame)
| python | {
"resource": ""
} |
q264104 | RepoManagerBase.rootdir | validation | def rootdir(self, username, reponame, create=True):
"""
Working directory for the repo
"""
path = os.path.join(self.workspace,
'datasets',
username,
| python | {
"resource": ""
} |
q264105 | RepoManagerBase.add | validation | def add(self, repo):
"""
Add repo to the internal lookup table...
"""
key = self.key(repo.username, repo.reponame)
| python | {
"resource": ""
} |
q264106 | lookup | validation | def lookup(username, reponame):
"""
Lookup a repo based on username reponame
"""
mgr = plugins_get_mgr()
# XXX This should be generalized to all repo managers.
repomgr = mgr.get(what='repomanager', name='git')
| python | {
"resource": ""
} |
q264107 | shellcmd | validation | def shellcmd(repo, args):
"""
Run a shell command within the repo's context
Parameters
----------
repo: Repository object
args: Shell | python | {
"resource": ""
} |
q264108 | datapackage_exists | validation | def datapackage_exists(repo):
"""
Check if the datapackage exists...
"""
| python | {
"resource": ""
} |
q264109 | bootstrap_datapackage | validation | def bootstrap_datapackage(repo, force=False,
options=None, noinput=False):
"""
Create the datapackage file..
"""
print("Bootstrapping datapackage")
# get the directory
tsprefix = datetime.now().date().isoformat()
# Initial data package json
package = OrderedDict([
('title', ''),
('description', ''),
('username', repo.username),
('reponame', repo.reponame),
('name', str(repo)),
('title', ""),
('description', ""),
('keywords', []),
('resources', []),
('creator', getpass.getuser()),
| python | {
"resource": ""
} |
q264110 | init | validation | def init(username, reponame, setup,
force=False, options=None,
noinput=False):
"""
Initialize an empty repository with datapackage.json
Parameters
----------
username: Name of the user
reponame: Name of the repo
setup: Specify the 'configuration' (git only, git+s3 backend etc)
force: Force creation of the files
options: Dictionary with content of dgit.json, if available.
noinput: Automatic operation with no human interaction
"""
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = None
if setup == 'git+s3':
backendmgr = mgr.get(what='backend', name='s3')
repo = repomgr.init(username, reponame, force, backendmgr)
# | python | {
"resource": ""
} |
q264111 | annotate_metadata_data | validation | def annotate_metadata_data(repo, task, patterns=["*"], size=0):
"""
Update metadata with the content of the files
"""
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
matching_files = repo.find_matching_files(patterns)
package = repo.package
rootdir = repo.rootdir
files = package['resources']
for f in files:
relativepath = f['relativepath']
if relativepath in matching_files:
path = os.path.join(rootdir, relativepath)
if task == 'preview':
| python | {
"resource": ""
} |
q264112 | annotate_metadata_code | validation | def annotate_metadata_code(repo, files):
"""
Update metadata with the commit information
"""
package = repo.package
package['code'] = []
for p in files:
matching_files = glob2.glob("**/{}".format(p))
for f in matching_files:
| python | {
"resource": ""
} |
q264113 | annotate_metadata_action | validation | def annotate_metadata_action(repo):
"""
Update metadata with the action history
"""
package = repo.package
print("Including history of actions")
with cd(repo.rootdir):
filename = ".dgit/log.json"
if os.path.exists(filename):
history = open(filename).readlines()
actions = []
for a in history:
try:
| python | {
"resource": ""
} |
q264114 | annotate_metadata_platform | validation | def annotate_metadata_platform(repo):
"""
Update metadata host information
"""
print("Added platform information")
package = repo.package
mgr = plugins_get_mgr()
| python | {
"resource": ""
} |
q264115 | annotate_metadata_dependencies | validation | def annotate_metadata_dependencies(repo):
"""
Collect information from the dependent repo's
"""
options = repo.options
if 'dependencies' not in options:
print("No dependencies")
return []
repos = []
dependent_repos = options['dependencies']
for d in dependent_repos:
if "/" not in d:
print("Invalid dependency specification")
(username, reponame) = d.split("/")
try:
repos.append(repo.manager.lookup(username, reponame))
except:
| python | {
"resource": ""
} |
q264116 | post | validation | def post(repo, args=[]):
"""
Post to metadata server
Parameters
----------
repo: Repository object (result of lookup)
"""
mgr = plugins_get_mgr()
keys = mgr.search(what='metadata')
keys = keys['metadata']
if len(keys) == 0:
return
# Incorporate pipeline information...
if 'pipeline' in repo.options:
for name, details in repo.options['pipeline'].items():
patterns = details['files']
matching_files = repo.find_matching_files(patterns)
matching_files.sort()
details['files'] = matching_files
for i, f in enumerate(matching_files):
r = repo.get_resource(f)
if 'pipeline' not in r:
r['pipeline'] = []
r['pipeline'].append(name + " [Step {}]".format(i))
if 'metadata-management' in repo.options:
print("Collecting all the required metadata to post")
metadata = repo.options['metadata-management']
# Add data repo history
if 'include-data-history' in metadata and metadata['include-data-history']:
repo.package['history'] = get_history(repo.rootdir)
# Add action history
if 'include-action-history' in metadata and metadata['include-action-history']:
annotate_metadata_action(repo)
# Add data repo history
if 'include-preview' in metadata:
annotate_metadata_data(repo,
task='preview',
patterns=metadata['include-preview']['files'],
size=metadata['include-preview']['length'])
if (('include-schema' in metadata) and metadata['include-schema']):
annotate_metadata_data(repo, task='schema')
if 'include-code-history' in metadata:
annotate_metadata_code(repo, files=metadata['include-code-history'])
if 'include-platform' in metadata:
annotate_metadata_platform(repo)
if 'include-validation' in metadata:
annotate_metadata_validation(repo)
if 'include-dependencies' in metadata:
annotate_metadata_dependencies(repo)
history = repo.package.get('history',None)
if (('include-tab-diffs' | python | {
"resource": ""
} |
q264117 | plugins_show | validation | def plugins_show(what=None, name=None, version=None, details=False):
"""
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
| python | {
"resource": ""
} |
q264118 | PluginManager.discover_all_plugins | validation | def discover_all_plugins(self):
"""
Load all plugins from dgit extension
"""
for | python | {
"resource": ""
} |
q264119 | PluginManager.register | validation | def register(self, what, obj):
"""
Registering a plugin
Params
------
what: Nature of the plugin (backend, instrumentation, repo)
obj: Instance of the plugin
"""
# print("Registering pattern", name, pattern)
name = obj.name | python | {
"resource": ""
} |
q264120 | PluginManager.search | validation | def search(self, what, name=None, version=None):
"""
Search for a plugin
"""
filtered = {}
# The search may for a scan (what is None) or
if what is None:
whats = list(self.plugins.keys())
elif what is not None:
if what not in self.plugins:
raise Exception("Unknown class of plugins")
| python | {
"resource": ""
} |
q264121 | instantiate | validation | def instantiate(repo, validator_name=None, filename=None, rulesfiles=None):
"""
Instantiate the validation specification
"""
default_validators = repo.options.get('validator', {})
validators = {}
if validator_name is not None:
# Handle the case validator is specified..
if validator_name in default_validators:
validators = {
validator_name : default_validators[validator_name]
}
else:
validators = {
validator_name : {
'files': [],
'rules': {},
'rules-files': []
}
}
else:
validators = default_validators
#=========================================
# Insert the file names
#=========================================
if filename is not None:
matching_files = repo.find_matching_files([filename])
if len(matching_files) == 0:
print("Filename could not be found", filename)
raise Exception("Invalid filename pattern")
for v in validators:
validators[v]['files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'files' not in validators[v]:
validators[v]['files'] = []
elif len(validators[v]['files']) > 0:
| python | {
"resource": ""
} |
q264122 | validate | validation | def validate(repo,
validator_name=None,
filename=None,
rulesfiles=None,
args=[]):
"""
Validate the content of the files for consistency. Validators can
look as deeply as needed into the files. dgit treats them all as
black boxes.
Parameters
----------
repo: Repository object
validator_name: Name of validator, if any. If none, then all validators specified in dgit.json will be included.
filename: Pattern that specifies files that must be processed by the validators selected. If none, then the default specification in dgit.json is used.
rules: Pattern specifying the files that have rules that validators will use
show: Print the validation results on the terminal
Returns
-------
status: A list of dictionaries, each with target file processed, rules file applied, status of the validation and any error message.
""" | python | {
"resource": ""
} |
q264123 | LocalBackend.url_is_valid | validation | def url_is_valid(self, url):
"""
Check if a URL exists
"""
# Check if the file system path exists...
if url.startswith("file://"):
| python | {
"resource": ""
} |
q264124 | BasicMetadata.post | validation | def post(self, repo):
"""
Post to the metadata server
Parameters
----------
repo
"""
datapackage = repo.package
url = self.url
token = self.token
headers = {
'Authorization': 'Token {}'.format(token),
'Content-Type': 'application/json'
}
try:
r = requests.post(url,
| python | {
"resource": ""
} |
q264125 | get_module_class | validation | def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
| python | {
"resource": ""
} |
q264126 | find_executable_files | validation | def find_executable_files():
"""
Find max 5 executables that are responsible for this repo.
"""
files = glob.glob("*") + glob.glob("*/*") + glob.glob('*/*/*')
files = filter(lambda f: os.path.isfile(f), files)
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
final = []
for filename in files:
if os.path.isfile(filename):
| python | {
"resource": ""
} |
q264127 | auto_get_repo | validation | def auto_get_repo(autooptions, debug=False):
"""
Automatically get repo
Parameters
----------
autooptions: dgit.json content
"""
# plugin manager
pluginmgr = plugins_get_mgr()
# get the repo manager
repomgr = pluginmgr.get(what='repomanager', name='git')
repo = None
try:
if debug:
print("Looking repo")
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
except:
# Clone the repo
try:
print("Checking and cloning if the dataset exists on backend")
url = autooptions['remoteurl']
if debug:
print("Doesnt exist. trying to clone: {}".format(url))
common_clone(url)
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
if debug:
print("Cloning successful")
except:
# traceback.print_exc()
yes = input("Repo doesnt exist. Should | python | {
"resource": ""
} |
q264128 | get_files_to_commit | validation | def get_files_to_commit(autooptions):
"""
Look through the local directory to pick up files to check
"""
workingdir = autooptions['working-directory']
includes = autooptions['track']['includes']
excludes = autooptions['track']['excludes']
# transform glob patterns to regular expressions
# print("Includes ", includes)
includes = r'|'.join([fnmatch.translate(x) for x in includes])
excludes = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'
matched_files = []
for root, dirs, files in os.walk(workingdir):
# print("Looking at ", files)
# exclude dirs
# dirs[:] = [os.path.join(root, d) for d in dirs]
| python | {
"resource": ""
} |
q264129 | auto_add | validation | def auto_add(repo, autooptions, files):
"""
Cleanup the paths and add
"""
# Get the mappings and keys.
mapping = { ".": "" }
if (('import' in autooptions) and
('directory-mapping' in autooptions['import'])):
mapping = autooptions['import']['directory-mapping']
# Apply the longest prefix first...
keys = mapping.keys()
keys = sorted(keys, key=lambda k: len(k), reverse=True)
count = 0
params = []
for f in files:
# Find the destination
relativepath = f
for k in keys:
| python | {
"resource": ""
} |
q264130 | Api.pull_stream | validation | def pull_stream(self, uri, **kwargs):
"""
This will try to pull in a stream from an external source. Once a
stream has been successfully pulled it is assigned a 'local stream
name' which can be used to access the stream from the EMS.
:param uri: The URI of the external stream. Can be RTMP, RTSP or
unicast/multicast (d) mpegts
:type uri: str
:param keepAlive: If keepAlive is set to 1, the server will attempt to
reestablish connection with a stream source after a connection has
been lost. The reconnect will be attempted once every second
(default: 1 true)
:type keepAlive: int
:param localStreamName: If provided, the stream will be given this
name. Otherwise, a fallback techniques used to determine the stream
name (based on the URI)
:type localStreamName: str
:param forceTcp: If 1 and if the stream is RTSP, a TCP connection will
be forced. Otherwise the transport mechanism will be negotiated
(UDP or TCP) (default: 1 true)
:type forceTcp: int
:param tcUrl: When specified, this value will be used to set the TC URL
in the initial RTMP connect invoke
:type tcUrl: str
:param pageUrl: When specified, this value will be used to set the
originating web page address in the initial RTMP connect invoke
:type pageUrl: str
:param swfUrl: When specified, this value will be used to set the
originating swf URL in the initial RTMP connect invoke
:type swfUrl: str
:param rangeStart: For RTSP and RTMP connections. A value from which
the playback should start expressed in seconds. There are 2 special
values: -2 and -1. For more information, please read about
start/len parameters here:
http://livedocs.adobe.com/flashmediaserver/3.0/hpdocs/help.html?content=00000185.html
:type rangeStart: int
:param rangeEnd: The length in seconds for the playback. -1 is a
special value. For more information, please read about start/len
parameters here:
http://livedocs.adobe.com/flashmediaserver/3.0/hpdocs/help.html?content=00000185.html
:type rangeEnd: int
:param ttl: Sets the IP_TTL (time to live) option on the socket
:type ttl: int
:param tos: Sets the IP_TOS (Type of Service) option on the socket
:type tos: int
:param rtcpDetectionInterval: How much time (in seconds) should the
server wait for RTCP packets before declaring the RTSP stream as a
RTCP-less stream
:type | python | {
"resource": ""
} |
q264131 | Api.record | validation | def record(self, localStreamName, pathToFile, **kwargs):
"""
Records any inbound stream. The record command allows users to record
a stream that may not yet exist. When a new stream is brought into
the server, it is checked against a list of streams to be recorded.
Streams can be recorded as FLV files, MPEG-TS files or as MP4 files.
:param localStreamName: The name of the stream to be used as input
for recording.
:type localStreamName: str
:param pathToFile: Specify path and file name to write to.
:type pathToFile: str
:param type: `ts`, `mp4` or `flv`
:type type: str
:param overwrite: If false, when a file already exists for the stream
name, a new file will be created with the next appropriate number
appended. If 1 (true), files with the same name will be
overwritten.
:type overwrite: int
:param keepAlive: If 1 (true), the server will restart recording every
time the stream becomes available again.
:type keepAlive: int
:param chunkLength: If non-zero the record command will start a new
recording file after ChunkLength seconds have elapsed.
:type chunkLength: int
:param waitForIDR: This is used if | python | {
"resource": ""
} |
q264132 | Api.create_ingest_point | validation | def create_ingest_point(self, privateStreamName, publicStreamName):
"""
Creates an RTMP ingest point, which mandates that streams pushed into
the EMS have a target stream name which matches one Ingest Point
privateStreamName.
:param privateStreamName: The name that RTMP Target Stream Names must
match.
:type privateStreamName: str
:param publicStreamName: The name that is used to access the stream
pushed to the privateStreamName. The publicStreamName becomes the
streams localStreamName.
:type publicStreamName: str
| python | {
"resource": ""
} |
q264133 | instantiate | validation | def instantiate(repo, name=None, filename=None):
"""
Instantiate the generator and filename specification
"""
default_transformers = repo.options.get('transformer', {})
# If a name is specified, then lookup the options from dgit.json
# if specfied. Otherwise it is initialized to an empty list of
# files.
transformers = {}
if name is not None:
# Handle the case generator is specified..
if name in default_transformers:
transformers = {
name : default_transformers[name]
}
else:
transformers = {
name : {
'files': [],
}
}
else:
transformers = default_transformers
#=========================================
# Map the filename patterns to list of files
#=========================================
# Instantiate the files from the patterns specified
input_matching_files = None
if filename is not None:
input_matching_files = repo.find_matching_files([filename])
for t in transformers:
for k in transformers[t]:
if "files" not in k:
continue
if k == "files" and | python | {
"resource": ""
} |
q264134 | GitRepoManager._run | validation | def _run(self, cmd):
"""
Helper function to run commands
Parameters
----------
cmd : list
Arguments to git command
"""
# This is here in case the .gitconfig is not accessible for
# some reason.
environ = os.environ.copy()
environ['GIT_COMMITTER_NAME'] = self.fullname
environ['GIT_COMMITTER_EMAIL'] = self.email
environ['GIT_AUTHOR_NAME'] = self.fullname
| python | {
"resource": ""
} |
q264135 | GitRepoManager._run_generic_command | validation | def _run_generic_command(self, repo, cmd):
"""
Run a generic command within the repo. Assumes that you are
in the repo's root directory
"""
result = None
with cd(repo.rootdir):
# Dont use sh. It is not collecting the stdout of all
| python | {
"resource": ""
} |
q264136 | GitRepoManager.init | validation | def init(self, username, reponame, force, backend=None):
"""
Initialize a Git repo
Parameters
----------
username, reponame : Repo name is tuple (name, reponame)
force: force initialization of the repo even if exists
backend: backend that must be used for this (e.g. s3)
"""
key = self.key(username, reponame)
# In local filesystem-based server, add a repo
server_repodir = self.server_rootdir(username,
reponame,
create=False)
# Force cleanup if needed
if os.path.exists(server_repodir) and not force:
raise RepositoryExists()
if os.path.exists(server_repodir):
shutil.rmtree(server_repodir)
os.makedirs(server_repodir)
# Initialize the repo
with cd(server_repodir):
git.init(".", "--bare")
if backend is not None:
backend.init_repo(server_repodir)
# Now clone the filesystem-based repo
repodir = self.rootdir(username, reponame, create=False)
# Prepare it if needed
| python | {
"resource": ""
} |
q264137 | GitRepoManager.delete | validation | def delete(self, repo, args=[]):
"""
Delete files from the repo
"""
result = None
with cd(repo.rootdir):
try:
cmd = ['rm'] + list(args)
result = {
'status': 'success',
'message': self._run(cmd)
}
| python | {
"resource": ""
} |
q264138 | GitRepoManager.drop | validation | def drop(self, repo, args=[]):
"""
Cleanup the repo
"""
# Clean up the rootdir
rootdir = repo.rootdir
if os.path.exists(rootdir):
print("Cleaning repo directory: {}".format(rootdir))
shutil.rmtree(rootdir)
# Cleanup the local version of the repo (this could be on
# the server etc.
server_repodir = self.server_rootdir_from_repo(repo,
| python | {
"resource": ""
} |
q264139 | GitRepoManager.permalink | validation | def permalink(self, repo, path):
"""
Get the permalink to command that generated the dataset
"""
if not os.path.exists(path):
# print("Path does not exist", path)
return (None, None)
# Get this directory
cwd = os.getcwd()
# Find the root of the repo and cd into that directory..
if os.path.isfile(path):
os.chdir(os.path.dirname(path))
rootdir = self._run(["rev-parse", "--show-toplevel"])
if "fatal" in rootdir:
# print("fatal", rootdir)
return (None, None)
os.chdir(rootdir)
# print("Rootdir = ", rootdir)
# Now find relative path
relpath = os.path.relpath(path, rootdir)
# print("relpath = ", relpath)
# Get the last commit for this file
#3764cc2600b221ac7d7497de3d0dbcb4cffa2914
sha1 = self._run(["log", "-n", "1", "--format=format:%H", relpath])
# print("sha1 = ", sha1)
# Get the repo URL
| python | {
"resource": ""
} |
q264140 | GitRepoManager.add_files | validation | def add_files(self, repo, files):
"""
Add files to the repo
"""
rootdir = repo.rootdir
for f in files:
relativepath = f['relativepath']
sourcepath = f['localfullpath']
if sourcepath is None:
# This can happen if the relative path is a URL
continue #
# Prepare the target path
targetpath = os.path.join(rootdir, relativepath)
try:
| python | {
"resource": ""
} |
q264141 | Invoice.send | validation | def send(self, send_email=True):
"""Marks the invoice as sent in Holvi
If send_email is False then the invoice is *not* automatically emailed to the recipient
and your must take care of sending the invoice yourself.
"""
url = str(self.api.base_url + '{code}/status/').format(code=self.code) # six.u messes this up
| python | {
"resource": ""
} |
q264142 | Invoice.to_holvi_dict | validation | def to_holvi_dict(self):
"""Convert our Python object to JSON acceptable to Holvi API"""
self._jsondata["items"] = []
for item in self.items:
self._jsondata["items"].append(item.to_holvi_dict())
self._jsondata["issue_date"] = self.issue_date.isoformat()
| python | {
"resource": ""
} |
q264143 | api_call_action | validation | def api_call_action(func):
"""
API wrapper documentation
"""
def _inner(*args, **kwargs): | python | {
"resource": ""
} |
q264144 | Order.save | validation | def save(self):
"""Saves this order to Holvi, returns a tuple with the order itself and checkout_uri"""
if self.code:
raise HolviError("Orders cannot be updated")
send_json = self.to_holvi_dict()
send_json.update({
'pool': self.api.connection.pool
})
url = six.u(self.api.base_url + "order/") | python | {
"resource": ""
} |
q264145 | untokenize | validation | def untokenize(tokens):
"""Return source code based on tokens.
This is like tokenize.untokenize(), but it preserves spacing between
tokens. So if the original soure code had multiple spaces between
some tokens or if escaped newlines were used, those things will be
reflected by untokenize().
"""
text = ''
previous_line = ''
last_row = 0
last_column = -1
last_non_whitespace_token_type = None
for (token_type, token_string, start, end, line) in tokens:
if TOKENIZE_HAS_ENCODING and token_type == tokenize.ENCODING:
| python | {
"resource": ""
} |
q264146 | init | validation | def init(globalvars=None, show=False):
"""
Load profile INI
"""
global config
profileini = getprofileini()
if os.path.exists(profileini):
config = configparser.ConfigParser()
config.read(profileini)
mgr = plugins_get_mgr()
mgr.update_configs(config)
if show:
for source in config:
print("[%s] :" %(source))
for k | python | {
"resource": ""
} |
q264147 | update | validation | def update(globalvars):
"""
Update the profile
"""
global config
profileini = getprofileini()
config = configparser.ConfigParser()
config.read(profileini)
defaults = {}
if globalvars is not None:
defaults = {a[0]: a[1] for a in globalvars }
# Generic variables to be captured...
generic_configs = [{
'name': 'User',
'nature': 'generic',
'description': "General information",
'variables': ['user.email', 'user.name',
'user.fullname'],
'defaults': {
'user.email': {
'value': defaults.get('user.email',''),
'description': "Email address",
'validator': EmailValidator()
},
'user.fullname': {
'value': defaults.get('user.fullname',''),
'description': "Full Name",
'validator': NonEmptyValidator()
},
'user.name': {
'value': defaults.get('user.name', getpass.getuser()),
'description': "Name",
'validator': NonEmptyValidator()
},
}
}]
# Gather configuration requirements from all plugins
mgr = plugins_get_mgr()
extra_configs = mgr.gather_configs()
allconfigs = generic_configs + extra_configs
# Read the existing config and update the defaults
for c in allconfigs:
name = c['name']
for v in c['variables']:
try:
c['defaults'][v]['value'] = config[name][v]
except:
continue
for c in allconfigs:
print("")
print(c['description'])
print("==================")
if len(c['variables']) == 0:
print("Nothing to do. Enabled by default")
continue
name = c['name']
config[name] = {}
config[name]['nature'] = c['nature']
for v in c['variables']:
# defaults
value = ''
| python | {
"resource": ""
} |
q264148 | S3Backend.init_repo | validation | def init_repo(self, gitdir):
"""
Insert hook into the repo
"""
hooksdir = os.path.join(gitdir, 'hooks')
content = postreceive_template % {
'client': self.client,
'bucket': self.bucket,
's3cfg': self.s3cfg,
'prefix': self.prefix
| python | {
"resource": ""
} |
q264149 | compute_sha256 | validation | def compute_sha256(filename):
"""
Try the library. If it doesnt work, use the command line..
"""
try:
h = sha256()
fd = open(filename, 'rb')
while True:
| python | {
"resource": ""
} |
q264150 | run | validation | def run(cmd):
"""
Run a shell command
"""
cmd = [pipes.quote(c) for c in cmd]
cmd = " ".join(cmd)
cmd += "; exit 0"
# print("Running {} in {}".format(cmd, os.getcwd()))
try:
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
| python | {
"resource": ""
} |
q264151 | get_tree | validation | def get_tree(gitdir="."):
"""
Get the commit history for a given dataset
"""
cmd = ["git", "log", "--all", "--branches", '--pretty=format:{ "commit": "%H", "abbreviated_commit": "%h", "tree": "%T", "abbreviated_tree": "%t", "parent": "%P", "abbreviated_parent": "%p", "refs": "%d", "encoding": "%e", "subject": "%s", "sanitized_subject_line": "%f", "commit_notes": "", "author": { "name": "%aN", "email": "%aE", "date": "%ai" }, "commiter": { "name": "%cN", "email": "%cE", "date": "%ci" }},']
output = run(cmd)
lines = output.split("\n")
content = ""
history = []
for l in lines:
try:
revisedcontent = content + l
if revisedcontent.count('"') % 2 == 0:
j = json.loads(revisedcontent[:-1])
if "Notes added by" in j['subject']:
content = ""
continue
| python | {
"resource": ""
} |
q264152 | get_diffs | validation | def get_diffs(history):
"""
Look at files and compute the diffs intelligently
"""
# First get all possible representations
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
for i in range(len(history)):
if i+1 > len(history) - 1:
continue
prev = history[i]
curr = history[i+1]
#print(prev['subject'], "==>", curr['subject'])
#print(curr['changes'])
for c in curr['changes']:
path = c['path']
# Skip the metadata file
if c['path'].endswith('datapackage.json'):
continue
# Find a handler for this kind of file...
handler = None
for r in representations:
if r.can_process(path):
handler = r
break
if handler is None:
continue
# print(path, "being handled by", handler)
v1_hex = prev['commit']
v2_hex = curr['commit']
temp1 = tempfile.mkdtemp(prefix="dgit-diff-")
try:
for h in [v1_hex, v2_hex]:
filename = '{}/{}/checkout.tar'.format(temp1, h)
try:
os.makedirs(os.path.dirname(filename))
except:
pass
extractcmd = ['git', 'archive', '-o', filename, h, path]
output = run(extractcmd)
if 'fatal' in output:
raise Exception("File not present in commit")
| python | {
"resource": ""
} |
q264153 | SSHClient.wait | validation | def wait(self, cmd, raise_on_error=True):
"""
Execute command and wait for it to finish. Proceed with caution because
if you run a command that causes a prompt this will hang
"""
_, stdout, stderr = self.exec_command(cmd)
stdout.channel.recv_exit_status()
output = stdout.read()
if self.interactive:
| python | {
"resource": ""
} |
q264154 | SSHClient.sudo | validation | def sudo(self, password=None):
"""
Enter sudo mode
"""
if self.username == 'root':
raise ValueError('Already root user')
password = self.validate_password(password)
stdin, stdout, stderr = self.exec_command('sudo | python | {
"resource": ""
} |
q264155 | SSHClient.apt | validation | def apt(self, package_names, raise_on_error=False):
"""
Install specified packages using apt-get. -y options are
automatically used. Waits for command to finish.
Parameters
----------
package_names: list-like of str
| python | {
"resource": ""
} |
q264156 | SSHClient.pip | validation | def pip(self, package_names, raise_on_error=True):
"""
Install specified python packages using pip. -U option added
Waits for command to finish.
Parameters
----------
package_names: list-like of str
raise_on_error: bool, default True
If True then raise ValueError if stderr is not empty | python | {
"resource": ""
} |
q264157 | SSHClient.pip_r | validation | def pip_r(self, requirements, raise_on_error=True):
"""
Install all requirements contained in the given file path
Waits for command to finish.
Parameters
----------
requirements: str
Path to requirements.txt
raise_on_error: bool, default | python | {
"resource": ""
} |
q264158 | stitch_macro | validation | def stitch_macro(path, output_folder=None):
"""Create fiji-macros for stitching all channels and z-stacks for a well.
Parameters
----------
path : string
Well path.
output_folder : string
Folder to store images. If not given well path is used.
Returns
-------
output_files, macros : tuple
Tuple with filenames and macros for stitched well.
"""
output_folder = output_folder or path
debug('stitching ' + path + ' to ' + output_folder)
fields = glob(_pattern(path, _field))
# assume we have rectangle of fields
xs = [attribute(field, 'X') for field in fields]
ys = [attribute(field, 'Y') for field in fields]
x_min, x_max = min(xs), max(xs)
y_min, y_max = min(ys), max(ys)
fields_column = len(set(xs))
fields_row = len(set(ys))
# assume all fields are the same
# and get properties from images in first field
images = glob(_pattern(fields[0], _image))
# assume attributes are the same on all images
attr = attributes(images[0])
# find all channels and z-stacks
channels = []
z_stacks = []
for image in images:
channel = attribute_as_str(image, 'C')
if channel not in channels:
channels.append(channel)
z = attribute_as_str(image, 'Z')
if z not in z_stacks:
z_stacks.append(z)
debug('channels ' + str(channels))
debug('z-stacks ' + str(z_stacks))
# create macro
_, extension = os.path.splitext(images[-1])
if extension == '.tif':
# assume .ome.tif
extension = '.ome.tif'
macros = []
output_files = []
for Z in z_stacks:
for C in channels:
filenames = os.path.join(
_field + '--X{xx}--Y{yy}',
_image + '--L' + attr.L +
'--S' + attr.S +
'--U' + attr.U +
'--V' + attr.V +
| python | {
"resource": ""
} |
q264159 | compress | validation | def compress(images, delete_tif=False, folder=None):
"""Lossless compression. Save images as PNG and TIFF tags to json. Can be
reversed with `decompress`. Will run in multiprocessing, where
number of workers is decided by ``leicaexperiment.experiment._pools``.
Parameters
----------
images : list of filenames
Images to lossless compress.
delete_tif : bool
Wheter to delete original images.
folder : string
Where to store images. Basename will be kept.
Returns
-------
list of filenames
List of compressed files.
"""
| python | {
"resource": ""
} |
q264160 | compress_blocking | validation | def compress_blocking(image, delete_tif=False, folder=None, force=False):
"""Lossless compression. Save image as PNG and TIFF tags to json. Process
can be reversed with `decompress`.
Parameters
----------
image : string
TIF-image which should be compressed lossless.
delete_tif : bool
Wheter to delete original images.
force : bool
Wheter to compress even if .png already exists.
Returns
-------
string
Filename of compressed image, or empty string if compress failed.
"""
debug('compressing {}'.format(image))
try:
new_filename, extension = os.path.splitext(image)
# remove last occurrence of .ome
new_filename = new_filename.rsplit('.ome', 1)[0]
# if compressed file should be put in specified folder
if folder:
basename = os.path.basename(new_filename)
new_filename = os.path.join(folder, basename + '.png')
else:
new_filename = new_filename + '.png'
# check if png exists
if os.path.isfile(new_filename) and not force:
| python | {
"resource": ""
} |
q264161 | _set_path | validation | def _set_path(self, path):
"Set self.path, self.dirname and self.basename."
import os.path
self.path = os.path.abspath(path)
| python | {
"resource": ""
} |
q264162 | Experiment.images | validation | def images(self):
"List of paths to images."
tifs = _pattern(self._image_path, extension='tif')
pngs = _pattern(self._image_path, extension='png')
| python | {
"resource": ""
} |
q264163 | Experiment.image | validation | def image(self, well_row, well_column, field_row, field_column):
"""Get path of specified image.
Parameters
----------
well_row : int
Starts at 0. Same as --U in files.
well_column : int
Starts at 0. Same as --V in files.
field_row : int
Starts at 0. Same as --Y in files.
field_column : int
Starts at 0. Same as --X in files.
Returns
-------
string
| python | {
"resource": ""
} |
q264164 | Experiment.well_images | validation | def well_images(self, well_row, well_column):
"""Get list of paths to images in specified well.
Parameters
----------
well_row : int
Starts at 0. Same as --V in files.
well_column : int
Starts at 0. Save as --U in files.
Returns
-------
list of strings
Paths to images or empty list if no images | python | {
"resource": ""
} |
q264165 | Experiment.stitch | validation | def stitch(self, folder=None):
"""Stitches all wells in experiment with ImageJ. Stitched images are
saved in experiment root.
Images which already exists are omitted stitching.
Parameters
----------
folder : string
Where to store stitched images. Defaults to experiment path.
Returns
-------
list
Filenames of stitched images. Files which already exists before
stitching are also returned.
"""
debug('stitching ' + self.__str__())
if not folder:
folder = self.path
# create list of macros and files
macros = []
files = []
for well in self.wells:
f,m = stitch_macro(well, folder)
macros.extend(m)
files.extend(f)
| python | {
"resource": ""
} |
q264166 | Experiment.compress | validation | def compress(self, delete_tif=False, folder=None):
"""Lossless compress all images in experiment to PNG. If folder is
omitted, images will not be moved.
Images which already exists in PNG are omitted.
Parameters
----------
folder : string
Where to store PNGs. Defaults to the folder they are in.
delete_tif : bool
If set to truthy value, ome.tifs will be deleted after compression.
Returns
| python | {
"resource": ""
} |
q264167 | Experiment.field_metadata | validation | def field_metadata(self, well_row=0, well_column=0,
field_row=0, field_column=0):
"""Get OME-XML metadata of given field.
Parameters
----------
well_row : int
Y well coordinate. Same as --V in files.
well_column : int
X well coordinate. Same as --U in files.
field_row : int
Y field coordinate. Same as --Y in files.
field_column : int
X field coordinate. Same as --X in files.
| python | {
"resource": ""
} |
q264168 | Experiment.stitch_coordinates | validation | def stitch_coordinates(self, well_row=0, well_column=0):
"""Get a list of stitch coordinates for the given well.
Parameters
----------
well_row : int
Y well coordinate. Same as --V in files.
well_column : int
X well coordinate. Same as --U in files.
Returns
-------
(xs, ys, attr) : tuples with float and collections.OrderedDict
Tuple of x's, y's and attributes.
"""
well = [w for w in self.wells
if attribute(w, 'u') == well_column and
attribute(w, 'v') == well_row]
if len(well) == 1:
well = well[0]
tile = os.path.join(well, 'TileConfiguration.registered.txt')
with open(tile) as f:
data = [x.strip()
for l in f.readlines()
| python | {
"resource": ""
} |
q264169 | Droplets.create | validation | def create(self, name, region, size, image, ssh_keys=None,
backups=None, ipv6=None, private_networking=None, wait=True):
"""
Create a new droplet
Parameters
----------
name: str
Name of new droplet
region: str
slug for region (e.g., sfo1, nyc1)
size: str
slug for droplet size (e.g., 512mb, 1024mb)
image: int or str
image id (e.g., 12352) or slug (e.g., 'ubuntu-14-04-x64')
ssh_keys: list, optional
default SSH keys to be added on creation
this is highly recommended for ssh access
backups: bool, optional
whether automated backups should be enabled for the Droplet.
Automated backups can only be enabled when the Droplet is created.
ipv6: bool, optional
whether IPv6 is enabled on the Droplet
private_networking: bool, optional
whether private networking is enabled for the Droplet. Private
networking is currently only available in certain regions
wait: bool, default True
if True then block until creation is complete
| python | {
"resource": ""
} |
q264170 | Droplets.get | validation | def get(self, id):
"""
Retrieve a droplet by id
Parameters
----------
id: int
droplet id
Returns
-------
droplet: DropletActions | python | {
"resource": ""
} |
q264171 | DropletActions.restore | validation | def restore(self, image, wait=True):
"""
Restore this droplet with given image id
A Droplet restoration will rebuild an image using a backup image.
The image ID that is passed in must be a backup of the current Droplet
| python | {
"resource": ""
} |
q264172 | DropletActions.rebuild | validation | def rebuild(self, image, wait=True):
"""
Rebuild this droplet with given image id
Parameters
----------
image: int or str
int for image id and str for image slug
wait: bool, default True
| python | {
"resource": ""
} |
q264173 | DropletActions.rename | validation | def rename(self, name, wait=True):
"""
Change the name of this droplet
Parameters
----------
name: str
New name for the droplet
wait: bool, default True
Whether to block until the pending action is completed
| python | {
"resource": ""
} |
q264174 | DropletActions.change_kernel | validation | def change_kernel(self, kernel_id, wait=True):
"""
Change the kernel of this droplet
Parameters
----------
kernel_id: int
Can be retrieved from output of self.kernels()
wait: bool, default True
Whether to block until the pending action is completed
| python | {
"resource": ""
} |
q264175 | DropletActions.delete | validation | def delete(self, wait=True):
"""
Delete this droplet
Parameters
----------
wait: bool, default True
Whether to block until the pending action is completed
"""
| python | {
"resource": ""
} |
q264176 | DropletActions.wait | validation | def wait(self):
"""
wait for all actions to complete on a droplet
"""
interval_seconds = 5
while True:
actions = self.actions()
slept = False
| python | {
"resource": ""
} |
q264177 | DropletActions.connect | validation | def connect(self, interactive=False):
"""
Open SSH connection to droplet
Parameters
----------
interactive: bool, default False
If True then SSH client will prompt for password when necessary
and also print | python | {
"resource": ""
} |
q264178 | RestAPI.send_request | validation | def send_request(self, kind, resource, url_components, **kwargs):
"""
Send a request to the REST API
Parameters
----------
kind: str, {get, delete, put, post, head}
resource: str
url_components: list or tuple to be appended to the request URL
Notes
-----
kwargs contain request parameters to be sent as | python | {
"resource": ""
} |
q264179 | RestAPI.format_parameters | validation | def format_parameters(self, **kwargs):
"""
Properly formats array types
"""
req_data = {}
for k, v in kwargs.items():
| python | {
"resource": ""
} |
q264180 | DigitalOceanAPI.format_request_url | validation | def format_request_url(self, resource, *args):
"""create request url for resource"""
return | python | {
"resource": ""
} |
q264181 | Resource.send_request | validation | def send_request(self, kind, url_components, **kwargs):
"""
Send a request for this resource to the API
Parameters
----------
kind: str, {'get', 'delete', 'put', 'post', 'head'}
"""
| python | {
"resource": ""
} |
q264182 | ResourceCollection.list | validation | def list(self, url_components=()):
"""
Send list request for all members of a collection
"""
| python | {
"resource": ""
} |
q264183 | MutableCollection.get | validation | def get(self, id, **kwargs):
"""
Get single unit of collection
| python | {
"resource": ""
} |
q264184 | ImageActions.transfer | validation | def transfer(self, region):
"""
Transfer this image to given region
Parameters
----------
region: str
region slug to transfer to (e.g., sfo1, nyc1)
"""
| python | {
"resource": ""
} |
q264185 | Images.get | validation | def get(self, id):
"""id or slug"""
info = super(Images, self).get(id)
| python | {
"resource": ""
} |
q264186 | Keys.update | validation | def update(self, id, name):
"""id or fingerprint"""
return | python | {
"resource": ""
} |
q264187 | Domains.create | validation | def create(self, name, ip_address):
"""
Creates a new domain
Parameters
----------
name: str
new domain name
ip_address: str
IP address | python | {
"resource": ""
} |
q264188 | Domains.records | validation | def records(self, name):
"""
Get a list of all domain records for the given domain name
Parameters
----------
name: str
| python | {
"resource": ""
} |
q264189 | DomainRecords.rename | validation | def rename(self, id, name):
"""
Change the name of this domain record
Parameters
----------
id: int
domain record id
name: str
| python | {
"resource": ""
} |
q264190 | DomainRecords.get | validation | def get(self, id, **kwargs):
"""
Retrieve a single domain record given the id
"""
| python | {
"resource": ""
} |
q264191 | FogBugz.logon | validation | def logon(self, username, password):
"""
Logs the user on to FogBugz.
Returns None for a successful login.
"""
if self._token:
self.logoff()
try:
response = self.__makerequest(
'logon', email=username, password=password)
except FogBugzAPIError:
| python | {
"resource": ""
} |
q264192 | chop | validation | def chop(list_, n):
"Chop list_ into n chunks. Returns a list."
# could look into itertools also, might be implemented there
size = len(list_)
each = size // n
if each == 0:
| python | {
"resource": ""
} |
q264193 | get_first | validation | def get_first():
"""
return first droplet
"""
client = po.connect() # this depends on the DIGITALOCEAN_API_KEY envvar
all_droplets = client.droplets.list()
| python | {
"resource": ""
} |
q264194 | take_snapshot | validation | def take_snapshot(droplet, name):
"""
Take a snapshot of a droplet
Parameters
----------
name: str
name for snapshot
"""
print "powering off"
droplet.power_off()
droplet.wait() # wait for pending actions to complete | python | {
"resource": ""
} |
q264195 | ManagedResource.allowed_operations | validation | def allowed_operations(self):
"""Retrieves the allowed operations for this request."""
if self.slug is not None:
| python | {
"resource": ""
} |
q264196 | ManagedResource.assert_operations | validation | def assert_operations(self, *args):
"""Assets if the requested operations are allowed in this context."""
| python | {
"resource": ""
} |
q264197 | ManagedResource.make_response | validation | def make_response(self, data=None):
"""Fills the response object from the passed data."""
if data is not None:
# Prepare the data for transmission.
data = self.prepare(data)
| python | {
"resource": ""
} |
q264198 | ManagedResource.get | validation | def get(self, request, response):
"""Processes a `GET` request."""
# Ensure we're allowed to read the resource.
self.assert_operations('read')
# Delegate to `read` to retrieve the items.
items = self.read()
# if self.slug is not None and not items:
# # Requested a specific resource but nothing is returned.
# # Attempt to resolve by changing what we understand as
# # a slug to a path.
| python | {
"resource": ""
} |
q264199 | ManagedResource.post | validation | def post(self, request, response):
"""Processes a `POST` request."""
if self.slug is not None:
# Don't know what to do an item access.
raise http.exceptions.NotImplemented()
# Ensure we're allowed to create a resource.
self.assert_operations('create')
# Deserialize and clean the incoming object.
data | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.