_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q264100 | Repo.find_matching_files | validation | def find_matching_files(self, includes):
"""
For various actions we need files that match patterns
"""
if len(includes) == 0:
return []
files = [f['relativepath'] for f in self.package['resources']]
includes = r'|'.join([fnmatch.translate(x) for x in includes])
# Match both the file name as well the path..
files = [f for f in files if re.match(includes, os.path.basename(f))] + \
[f for f in files if re.match(includes, f)]
files = list(set(files))
return files | python | {
"resource": ""
} |
q264101 | Repo.run | validation | def run(self, cmd, *args):
"""
Run a specific command using the manager
"""
if self.manager is None:
raise Exception("Fatal internal error: Missing repository manager")
if cmd not in dir(self.manager):
raise Exception("Fatal internal error: Invalid command {} being run".format(cmd))
func = getattr(self.manager, cmd)
repo = self
return func(repo, *args) | python | {
"resource": ""
} |
q264102 | Repo.get_resource | validation | def get_resource(self, p):
"""
Get metadata for a given file
"""
for r in self.package['resources']:
if r['relativepath'] == p:
r['localfullpath'] = os.path.join(self.rootdir, p)
return r
raise Exception("Invalid path") | python | {
"resource": ""
} |
q264103 | RepoManagerBase.lookup | validation | def lookup(self, username=None, reponame=None, key=None):
"""
Lookup all available repos
"""
if key is None:
key = self.key(username, reponame)
if key not in self.repos:
raise UnknownRepository()
return self.repos[key] | python | {
"resource": ""
} |
q264104 | RepoManagerBase.rootdir | validation | def rootdir(self, username, reponame, create=True):
"""
Working directory for the repo
"""
path = os.path.join(self.workspace,
'datasets',
username,
reponame)
if create:
try:
os.makedirs(path)
except:
pass
return path | python | {
"resource": ""
} |
q264105 | RepoManagerBase.add | validation | def add(self, repo):
"""
Add repo to the internal lookup table...
"""
key = self.key(repo.username, repo.reponame)
repo.key = key
self.repos[key] = repo
return key | python | {
"resource": ""
} |
q264106 | lookup | validation | def lookup(username, reponame):
"""
Lookup a repo based on username reponame
"""
mgr = plugins_get_mgr()
# XXX This should be generalized to all repo managers.
repomgr = mgr.get(what='repomanager', name='git')
repo = repomgr.lookup(username=username,
reponame=reponame)
return repo | python | {
"resource": ""
} |
q264107 | shellcmd | validation | def shellcmd(repo, args):
"""
Run a shell command within the repo's context
Parameters
----------
repo: Repository object
args: Shell command
"""
with cd(repo.rootdir):
result = run(args)
return result | python | {
"resource": ""
} |
q264108 | datapackage_exists | validation | def datapackage_exists(repo):
"""
Check if the datapackage exists...
"""
datapath = os.path.join(repo.rootdir, "datapackage.json")
return os.path.exists(datapath) | python | {
"resource": ""
} |
q264109 | bootstrap_datapackage | validation | def bootstrap_datapackage(repo, force=False,
options=None, noinput=False):
"""
Create the datapackage file..
"""
print("Bootstrapping datapackage")
# get the directory
tsprefix = datetime.now().date().isoformat()
# Initial data package json
package = OrderedDict([
('title', ''),
('description', ''),
('username', repo.username),
('reponame', repo.reponame),
('name', str(repo)),
('title', ""),
('description', ""),
('keywords', []),
('resources', []),
('creator', getpass.getuser()),
('createdat', datetime.now().isoformat()),
('remote-url', repo.remoteurl)
])
if options is not None:
package['title'] = options['title']
package['description'] = options['description']
else:
if noinput:
raise IncompleteParameters("Option field with title and description")
for var in ['title', 'description']:
value = ''
while value in ['',None]:
value = input('Your Repo ' + var.title() + ": ")
if len(value) == 0:
print("{} cannot be empty. Please re-enter.".format(var.title()))
package[var] = value
# Now store the package...
(handle, filename) = tempfile.mkstemp()
with open(filename, 'w') as fd:
fd.write(json.dumps(package, indent=4))
repo.package = package
return filename | python | {
"resource": ""
} |
q264110 | init | validation | def init(username, reponame, setup,
force=False, options=None,
noinput=False):
"""
Initialize an empty repository with datapackage.json
Parameters
----------
username: Name of the user
reponame: Name of the repo
setup: Specify the 'configuration' (git only, git+s3 backend etc)
force: Force creation of the files
options: Dictionary with content of dgit.json, if available.
noinput: Automatic operation with no human interaction
"""
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = None
if setup == 'git+s3':
backendmgr = mgr.get(what='backend', name='s3')
repo = repomgr.init(username, reponame, force, backendmgr)
# Now bootstrap the datapackage.json metadata file and copy it in...
# Insert a gitignore with .dgit directory in the repo. This
# directory will be used to store partial results
(handle, gitignore) = tempfile.mkstemp()
with open(gitignore, 'w') as fd:
fd.write(".dgit")
# Try to bootstrap. If you cant, cleanup and return
try:
filename = bootstrap_datapackage(repo, force, options, noinput)
except Exception as e:
repomgr.drop(repo,[])
os.unlink(gitignore)
raise e
repo.run('add_files',
[
{
'relativepath': 'datapackage.json',
'localfullpath': filename,
},
{
'relativepath': '.gitignore',
'localfullpath': gitignore,
},
])
# Cleanup temp files
os.unlink(filename)
os.unlink(gitignore)
args = ['-a', '-m', 'Bootstrapped the repo']
repo.run('commit', args)
return repo | python | {
"resource": ""
} |
q264111 | annotate_metadata_data | validation | def annotate_metadata_data(repo, task, patterns=["*"], size=0):
"""
Update metadata with the content of the files
"""
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
matching_files = repo.find_matching_files(patterns)
package = repo.package
rootdir = repo.rootdir
files = package['resources']
for f in files:
relativepath = f['relativepath']
if relativepath in matching_files:
path = os.path.join(rootdir, relativepath)
if task == 'preview':
print("Adding preview for ", relativepath)
f['content'] = open(path).read()[:size]
elif task == 'schema':
for r in representations:
if r.can_process(path):
print("Adding schema for ", path)
f['schema'] = r.get_schema(path)
break | python | {
"resource": ""
} |
q264112 | annotate_metadata_code | validation | def annotate_metadata_code(repo, files):
"""
Update metadata with the commit information
"""
package = repo.package
package['code'] = []
for p in files:
matching_files = glob2.glob("**/{}".format(p))
for f in matching_files:
absf = os.path.abspath(f)
print("Add commit data for {}".format(f))
package['code'].append(OrderedDict([
('script', f),
('permalink', repo.manager.permalink(repo, absf)),
('mimetypes', mimetypes.guess_type(absf)[0]),
('sha256', compute_sha256(absf))
])) | python | {
"resource": ""
} |
q264113 | annotate_metadata_action | validation | def annotate_metadata_action(repo):
"""
Update metadata with the action history
"""
package = repo.package
print("Including history of actions")
with cd(repo.rootdir):
filename = ".dgit/log.json"
if os.path.exists(filename):
history = open(filename).readlines()
actions = []
for a in history:
try:
a = json.loads(a)
for x in ['code']:
if x not in a or a[x] == None:
a[x] = "..."
actions.append(a)
except:
pass
package['actions'] = actions | python | {
"resource": ""
} |
q264114 | annotate_metadata_platform | validation | def annotate_metadata_platform(repo):
"""
Update metadata host information
"""
print("Added platform information")
package = repo.package
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
package['platform'] = repomgr.get_metadata() | python | {
"resource": ""
} |
q264115 | annotate_metadata_dependencies | validation | def annotate_metadata_dependencies(repo):
"""
Collect information from the dependent repo's
"""
options = repo.options
if 'dependencies' not in options:
print("No dependencies")
return []
repos = []
dependent_repos = options['dependencies']
for d in dependent_repos:
if "/" not in d:
print("Invalid dependency specification")
(username, reponame) = d.split("/")
try:
repos.append(repo.manager.lookup(username, reponame))
except:
print("Repository does not exist. Please create one", d)
package = repo.package
package['dependencies'] = []
for r in repos:
package['dependencies'].append({
'username': r.username,
'reponame': r.reponame,
}) | python | {
"resource": ""
} |
q264116 | post | validation | def post(repo, args=[]):
"""
Post to metadata server
Parameters
----------
repo: Repository object (result of lookup)
"""
mgr = plugins_get_mgr()
keys = mgr.search(what='metadata')
keys = keys['metadata']
if len(keys) == 0:
return
# Incorporate pipeline information...
if 'pipeline' in repo.options:
for name, details in repo.options['pipeline'].items():
patterns = details['files']
matching_files = repo.find_matching_files(patterns)
matching_files.sort()
details['files'] = matching_files
for i, f in enumerate(matching_files):
r = repo.get_resource(f)
if 'pipeline' not in r:
r['pipeline'] = []
r['pipeline'].append(name + " [Step {}]".format(i))
if 'metadata-management' in repo.options:
print("Collecting all the required metadata to post")
metadata = repo.options['metadata-management']
# Add data repo history
if 'include-data-history' in metadata and metadata['include-data-history']:
repo.package['history'] = get_history(repo.rootdir)
# Add action history
if 'include-action-history' in metadata and metadata['include-action-history']:
annotate_metadata_action(repo)
# Add data repo history
if 'include-preview' in metadata:
annotate_metadata_data(repo,
task='preview',
patterns=metadata['include-preview']['files'],
size=metadata['include-preview']['length'])
if (('include-schema' in metadata) and metadata['include-schema']):
annotate_metadata_data(repo, task='schema')
if 'include-code-history' in metadata:
annotate_metadata_code(repo, files=metadata['include-code-history'])
if 'include-platform' in metadata:
annotate_metadata_platform(repo)
if 'include-validation' in metadata:
annotate_metadata_validation(repo)
if 'include-dependencies' in metadata:
annotate_metadata_dependencies(repo)
history = repo.package.get('history',None)
if (('include-tab-diffs' in metadata) and
metadata['include-tab-diffs'] and
history is not None):
annotate_metadata_diffs(repo)
# Insert options as well
repo.package['config'] = repo.options
try:
for k in keys:
# print("Key", k)
metadatamgr = mgr.get_by_key('metadata', k)
url = metadatamgr.url
o = urlparse(url)
print("Posting to ", o.netloc)
response = metadatamgr.post(repo)
if isinstance(response, str):
print("Error while posting:", response)
elif response.status_code in [400]:
content = response.json()
print("Error while posting:")
for k in content:
print(" ", k,"- ", ",".join(content[k]))
except NetworkError as e:
print("Unable to reach metadata server!")
except NetworkInvalidConfiguration as e:
print("Invalid network configuration in the INI file")
print(e.message)
except Exception as e:
print("Could not post. Unknown error")
print(e) | python | {
"resource": ""
} |
q264117 | plugins_show | validation | def plugins_show(what=None, name=None, version=None, details=False):
"""
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown?
"""
global pluginmgr
return pluginmgr.show(what, name, version, details) | python | {
"resource": ""
} |
q264118 | PluginManager.discover_all_plugins | validation | def discover_all_plugins(self):
"""
Load all plugins from dgit extension
"""
for v in pkg_resources.iter_entry_points('dgit.plugins'):
m = v.load()
m.setup(self) | python | {
"resource": ""
} |
q264119 | PluginManager.register | validation | def register(self, what, obj):
"""
Registering a plugin
Params
------
what: Nature of the plugin (backend, instrumentation, repo)
obj: Instance of the plugin
"""
# print("Registering pattern", name, pattern)
name = obj.name
version = obj.version
enable = obj.enable
if enable == 'n':
return
key = Key(name, version)
self.plugins[what][key] = obj | python | {
"resource": ""
} |
q264120 | PluginManager.search | validation | def search(self, what, name=None, version=None):
"""
Search for a plugin
"""
filtered = {}
# The search may for a scan (what is None) or
if what is None:
whats = list(self.plugins.keys())
elif what is not None:
if what not in self.plugins:
raise Exception("Unknown class of plugins")
whats = [what]
for what in whats:
if what not in filtered:
filtered[what] = []
for key in self.plugins[what].keys():
(k_name, k_version) = key
if name is not None and k_name != name:
continue
if version is not None and k_version != version:
continue
if self.plugins[what][key].enable == 'n':
continue
filtered[what].append(key)
# print(filtered)
return filtered | python | {
"resource": ""
} |
q264121 | instantiate | validation | def instantiate(repo, validator_name=None, filename=None, rulesfiles=None):
"""
Instantiate the validation specification
"""
default_validators = repo.options.get('validator', {})
validators = {}
if validator_name is not None:
# Handle the case validator is specified..
if validator_name in default_validators:
validators = {
validator_name : default_validators[validator_name]
}
else:
validators = {
validator_name : {
'files': [],
'rules': {},
'rules-files': []
}
}
else:
validators = default_validators
#=========================================
# Insert the file names
#=========================================
if filename is not None:
matching_files = repo.find_matching_files([filename])
if len(matching_files) == 0:
print("Filename could not be found", filename)
raise Exception("Invalid filename pattern")
for v in validators:
validators[v]['files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'files' not in validators[v]:
validators[v]['files'] = []
elif len(validators[v]['files']) > 0:
matching_files = repo.find_matching_files(validators[v]['files'])
validators[v]['files'] = matching_files
#=========================================
# Insert the rules files..
#=========================================
if rulesfiles is not None:
# Command lines...
matching_files = repo.find_matching_files([rulesfiles])
if len(matching_files) == 0:
print("Could not find matching rules files ({}) for {}".format(rulesfiles,v))
raise Exception("Invalid rules")
for v in validators:
validators[v]['rules-files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'rules-files' not in validators[v]:
validators[v]['rules-files'] = []
else:
rulesfiles = validators[v]['rules-files']
matching_files = repo.find_matching_files(rulesfiles)
validators[v]['rules-files'] = matching_files
return validators | python | {
"resource": ""
} |
q264122 | validate | validation | def validate(repo,
validator_name=None,
filename=None,
rulesfiles=None,
args=[]):
"""
Validate the content of the files for consistency. Validators can
look as deeply as needed into the files. dgit treats them all as
black boxes.
Parameters
----------
repo: Repository object
validator_name: Name of validator, if any. If none, then all validators specified in dgit.json will be included.
filename: Pattern that specifies files that must be processed by the validators selected. If none, then the default specification in dgit.json is used.
rules: Pattern specifying the files that have rules that validators will use
show: Print the validation results on the terminal
Returns
-------
status: A list of dictionaries, each with target file processed, rules file applied, status of the validation and any error message.
"""
mgr = plugins_get_mgr()
# Expand the specification. Now we have full file paths
validator_specs = instantiate(repo, validator_name, filename, rulesfiles)
# Run the validators with rules files...
allresults = []
for v in validator_specs:
keys = mgr.search(what='validator',name=v)['validator']
for k in keys:
validator = mgr.get_by_key('validator', k)
result = validator.evaluate(repo,
validator_specs[v],
args)
allresults.extend(result)
return allresults | python | {
"resource": ""
} |
q264123 | LocalBackend.url_is_valid | validation | def url_is_valid(self, url):
"""
Check if a URL exists
"""
# Check if the file system path exists...
if url.startswith("file://"):
url = url.replace("file://","")
return os.path.exists(url) | python | {
"resource": ""
} |
q264124 | BasicMetadata.post | validation | def post(self, repo):
"""
Post to the metadata server
Parameters
----------
repo
"""
datapackage = repo.package
url = self.url
token = self.token
headers = {
'Authorization': 'Token {}'.format(token),
'Content-Type': 'application/json'
}
try:
r = requests.post(url,
data = json.dumps(datapackage),
headers=headers)
return r
except Exception as e:
#print(e)
#traceback.print_exc()
raise NetworkError()
return "" | python | {
"resource": ""
} |
q264125 | get_module_class | validation | def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
try:
mod = import_module(mod_name)
except ImportError as ex:
raise EvoStreamException('Error importing module %s: '
'"%s"' % (mod_name, ex))
return getattr(mod, cls_name) | python | {
"resource": ""
} |
q264126 | find_executable_files | validation | def find_executable_files():
"""
Find max 5 executables that are responsible for this repo.
"""
files = glob.glob("*") + glob.glob("*/*") + glob.glob('*/*/*')
files = filter(lambda f: os.path.isfile(f), files)
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
final = []
for filename in files:
if os.path.isfile(filename):
st = os.stat(filename)
mode = st.st_mode
if mode & executable:
final.append(filename)
if len(final) > 5:
break
return final | python | {
"resource": ""
} |
q264127 | auto_get_repo | validation | def auto_get_repo(autooptions, debug=False):
"""
Automatically get repo
Parameters
----------
autooptions: dgit.json content
"""
# plugin manager
pluginmgr = plugins_get_mgr()
# get the repo manager
repomgr = pluginmgr.get(what='repomanager', name='git')
repo = None
try:
if debug:
print("Looking repo")
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
except:
# Clone the repo
try:
print("Checking and cloning if the dataset exists on backend")
url = autooptions['remoteurl']
if debug:
print("Doesnt exist. trying to clone: {}".format(url))
common_clone(url)
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
if debug:
print("Cloning successful")
except:
# traceback.print_exc()
yes = input("Repo doesnt exist. Should I create one? [yN]")
if yes == 'y':
setup = "git"
if autooptions['remoteurl'].startswith('s3://'):
setup = 'git+s3'
repo = common_init(username=autooptions['username'],
reponame=autooptions['reponame'],
setup=setup,
force=True,
options=autooptions)
if debug:
print("Successfully inited repo")
else:
raise Exception("Cannot load repo")
repo.options = autooptions
return repo | python | {
"resource": ""
} |
q264128 | get_files_to_commit | validation | def get_files_to_commit(autooptions):
"""
Look through the local directory to pick up files to check
"""
workingdir = autooptions['working-directory']
includes = autooptions['track']['includes']
excludes = autooptions['track']['excludes']
# transform glob patterns to regular expressions
# print("Includes ", includes)
includes = r'|'.join([fnmatch.translate(x) for x in includes])
excludes = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'
matched_files = []
for root, dirs, files in os.walk(workingdir):
# print("Looking at ", files)
# exclude dirs
# dirs[:] = [os.path.join(root, d) for d in dirs]
dirs[:] = [d for d in dirs if not re.match(excludes, d)]
# exclude/include files
files = [f for f in files if not re.match(excludes, f)]
#print("Files after excludes", files)
#print(includes)
files = [f for f in files if re.match(includes, f)]
#print("Files after includes", files)
files = [os.path.join(root, f) for f in files]
matched_files.extend(files)
return matched_files | python | {
"resource": ""
} |
q264129 | auto_add | validation | def auto_add(repo, autooptions, files):
"""
Cleanup the paths and add
"""
# Get the mappings and keys.
mapping = { ".": "" }
if (('import' in autooptions) and
('directory-mapping' in autooptions['import'])):
mapping = autooptions['import']['directory-mapping']
# Apply the longest prefix first...
keys = mapping.keys()
keys = sorted(keys, key=lambda k: len(k), reverse=True)
count = 0
params = []
for f in files:
# Find the destination
relativepath = f
for k in keys:
v = mapping[k]
if f.startswith(k + "/"):
#print("Replacing ", k)
relativepath = f.replace(k + "/", v)
break
# Now add to repository
count += files_add(repo=repo,
args=[f],
targetdir=os.path.dirname(relativepath))
return count | python | {
"resource": ""
} |
q264130 | Api.pull_stream | validation | def pull_stream(self, uri, **kwargs):
"""
This will try to pull in a stream from an external source. Once a
stream has been successfully pulled it is assigned a 'local stream
name' which can be used to access the stream from the EMS.
:param uri: The URI of the external stream. Can be RTMP, RTSP or
unicast/multicast (d) mpegts
:type uri: str
:param keepAlive: If keepAlive is set to 1, the server will attempt to
reestablish connection with a stream source after a connection has
been lost. The reconnect will be attempted once every second
(default: 1 true)
:type keepAlive: int
:param localStreamName: If provided, the stream will be given this
name. Otherwise, a fallback techniques used to determine the stream
name (based on the URI)
:type localStreamName: str
:param forceTcp: If 1 and if the stream is RTSP, a TCP connection will
be forced. Otherwise the transport mechanism will be negotiated
(UDP or TCP) (default: 1 true)
:type forceTcp: int
:param tcUrl: When specified, this value will be used to set the TC URL
in the initial RTMP connect invoke
:type tcUrl: str
:param pageUrl: When specified, this value will be used to set the
originating web page address in the initial RTMP connect invoke
:type pageUrl: str
:param swfUrl: When specified, this value will be used to set the
originating swf URL in the initial RTMP connect invoke
:type swfUrl: str
:param rangeStart: For RTSP and RTMP connections. A value from which
the playback should start expressed in seconds. There are 2 special
values: -2 and -1. For more information, please read about
start/len parameters here:
http://livedocs.adobe.com/flashmediaserver/3.0/hpdocs/help.html?content=00000185.html
:type rangeStart: int
:param rangeEnd: The length in seconds for the playback. -1 is a
special value. For more information, please read about start/len
parameters here:
http://livedocs.adobe.com/flashmediaserver/3.0/hpdocs/help.html?content=00000185.html
:type rangeEnd: int
:param ttl: Sets the IP_TTL (time to live) option on the socket
:type ttl: int
:param tos: Sets the IP_TOS (Type of Service) option on the socket
:type tos: int
:param rtcpDetectionInterval: How much time (in seconds) should the
server wait for RTCP packets before declaring the RTSP stream as a
RTCP-less stream
:type rtcpDetectionInterval: int
:param emulateUserAgent: When specified, this value will be used as the
user agent string. It is meaningful only for RTMP
:type emulateUserAgent: str
:param isAudio: If 1 and if the stream is RTP, it indicates that the
currently pulled stream is an audio source. Otherwise the pulled
source is assumed as a video source
:type isAudio: int
:param audioCodecBytes: The audio codec setup of this RTP stream if it
is audio. Represented as hex format without '0x' or 'h'. For
example: audioCodecBytes=1190
:type audioCodecBytes: str
:param spsBytes: The video SPS bytes of this RTP stream if it is video.
It should be base 64 encoded.
:type spsBytes: str
:param ppsBytes: The video PPS bytes of this RTP stream if it is video.
It should be base 64 encoded
:type ppsBytes: str
:param ssmIp: The source IP from source-specific-multicast. Only usable
when doing UDP based pull
:type ssmIp: str
:param httpProxy: This parameter has two valid values: IP:Port - This
value combination specifies an RTSP HTTP Proxy from which the RTSP
stream should be pulled from Self - Specifying "self" as the value
implies pulling RTSP over HTTP
:type httpProxy: str
:link: http://docs.evostream.com/ems_api_definition/pullstream
"""
return self.protocol.execute('pullStream', uri=uri, **kwargs) | python | {
"resource": ""
} |
q264131 | Api.record | validation | def record(self, localStreamName, pathToFile, **kwargs):
"""
Records any inbound stream. The record command allows users to record
a stream that may not yet exist. When a new stream is brought into
the server, it is checked against a list of streams to be recorded.
Streams can be recorded as FLV files, MPEG-TS files or as MP4 files.
:param localStreamName: The name of the stream to be used as input
for recording.
:type localStreamName: str
:param pathToFile: Specify path and file name to write to.
:type pathToFile: str
:param type: `ts`, `mp4` or `flv`
:type type: str
:param overwrite: If false, when a file already exists for the stream
name, a new file will be created with the next appropriate number
appended. If 1 (true), files with the same name will be
overwritten.
:type overwrite: int
:param keepAlive: If 1 (true), the server will restart recording every
time the stream becomes available again.
:type keepAlive: int
:param chunkLength: If non-zero the record command will start a new
recording file after ChunkLength seconds have elapsed.
:type chunkLength: int
:param waitForIDR: This is used if the recording is being chunked.
When true, new files will only be created on IDR boundaries.
:type waitForIDR: int
:param winQtCompat: Mandates 32bit header fields to ensure
compatibility with Windows QuickTime.
:type winQtCompat: int
:param dateFolderStructure: If set to 1 (true), folders will be
created with names in `YYYYMMDD` format. Recorded files will be
placed inside these folders based on the date they were created.
:type dateFolderStructure: int
:link: http://docs.evostream.com/ems_api_definition/record
"""
return self.protocol.execute('record',
localStreamName=localStreamName,
pathToFile=pathToFile, **kwargs) | python | {
"resource": ""
} |
q264132 | Api.create_ingest_point | validation | def create_ingest_point(self, privateStreamName, publicStreamName):
"""
Creates an RTMP ingest point, which mandates that streams pushed into
the EMS have a target stream name which matches one Ingest Point
privateStreamName.
:param privateStreamName: The name that RTMP Target Stream Names must
match.
:type privateStreamName: str
:param publicStreamName: The name that is used to access the stream
pushed to the privateStreamName. The publicStreamName becomes the
streams localStreamName.
:type publicStreamName: str
:link: http://docs.evostream.com/ems_api_definition/createingestpoint
"""
return self.protocol.execute('createIngestPoint',
privateStreamName=privateStreamName,
publicStreamName=publicStreamName) | python | {
"resource": ""
} |
q264133 | instantiate | validation | def instantiate(repo, name=None, filename=None):
"""
Instantiate the generator and filename specification
"""
default_transformers = repo.options.get('transformer', {})
# If a name is specified, then lookup the options from dgit.json
# if specfied. Otherwise it is initialized to an empty list of
# files.
transformers = {}
if name is not None:
# Handle the case generator is specified..
if name in default_transformers:
transformers = {
name : default_transformers[name]
}
else:
transformers = {
name : {
'files': [],
}
}
else:
transformers = default_transformers
#=========================================
# Map the filename patterns to list of files
#=========================================
# Instantiate the files from the patterns specified
input_matching_files = None
if filename is not None:
input_matching_files = repo.find_matching_files([filename])
for t in transformers:
for k in transformers[t]:
if "files" not in k:
continue
if k == "files" and input_matching_files is not None:
# Use the files specified on the command line..
transformers[t][k] = input_matching_files
else:
# Try to match the specification
if transformers[t][k] is None or len(transformers[t][k]) == 0:
transformers[t][k] = []
else:
matching_files = repo.find_matching_files(transformers[t][k])
transformers[t][k] = matching_files
return transformers | python | {
"resource": ""
} |
q264134 | GitRepoManager._run | validation | def _run(self, cmd):
"""
Helper function to run commands
Parameters
----------
cmd : list
Arguments to git command
"""
# This is here in case the .gitconfig is not accessible for
# some reason.
environ = os.environ.copy()
environ['GIT_COMMITTER_NAME'] = self.fullname
environ['GIT_COMMITTER_EMAIL'] = self.email
environ['GIT_AUTHOR_NAME'] = self.fullname
environ['GIT_AUTHOR_EMAIL'] = self.email
cmd = [pipes.quote(c) for c in cmd]
cmd = " ".join(['/usr/bin/git'] + cmd)
cmd += "; exit 0"
#print("Running cmd", cmd)
try:
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True,
env=environ)
except subprocess.CalledProcessError as e:
output = e.output
output = output.decode('utf-8')
output = output.strip()
# print("Output of command", output)
return output | python | {
"resource": ""
} |
q264135 | GitRepoManager._run_generic_command | validation | def _run_generic_command(self, repo, cmd):
"""
Run a generic command within the repo. Assumes that you are
in the repo's root directory
"""
result = None
with cd(repo.rootdir):
# Dont use sh. It is not collecting the stdout of all
# child processes.
output = self._run(cmd)
try:
result = {
'cmd': cmd,
'status': 'success',
'message': output,
}
except Exception as e:
result = {
'cmd': cmd,
'status': 'error',
'message': str(e)
}
return result | python | {
"resource": ""
} |
q264136 | GitRepoManager.init | validation | def init(self, username, reponame, force, backend=None):
"""
Initialize a Git repo
Parameters
----------
username, reponame : Repo name is tuple (name, reponame)
force: force initialization of the repo even if exists
backend: backend that must be used for this (e.g. s3)
"""
key = self.key(username, reponame)
# In local filesystem-based server, add a repo
server_repodir = self.server_rootdir(username,
reponame,
create=False)
# Force cleanup if needed
if os.path.exists(server_repodir) and not force:
raise RepositoryExists()
if os.path.exists(server_repodir):
shutil.rmtree(server_repodir)
os.makedirs(server_repodir)
# Initialize the repo
with cd(server_repodir):
git.init(".", "--bare")
if backend is not None:
backend.init_repo(server_repodir)
# Now clone the filesystem-based repo
repodir = self.rootdir(username, reponame, create=False)
# Prepare it if needed
if os.path.exists(repodir) and not force:
raise Exception("Local repo already exists")
if os.path.exists(repodir):
shutil.rmtree(repodir)
os.makedirs(repodir)
# Now clone...
with cd(os.path.dirname(repodir)):
git.clone(server_repodir, '--no-hardlinks')
url = server_repodir
if backend is not None:
url = backend.url(username, reponame)
repo = Repo(username, reponame)
repo.manager = self
repo.remoteurl = url
repo.rootdir = self.rootdir(username, reponame)
self.add(repo)
return repo | python | {
"resource": ""
} |
q264137 | GitRepoManager.delete | validation | def delete(self, repo, args=[]):
"""
Delete files from the repo
"""
result = None
with cd(repo.rootdir):
try:
cmd = ['rm'] + list(args)
result = {
'status': 'success',
'message': self._run(cmd)
}
except Exception as e:
result = {
'status': 'error',
'message': str(e)
}
# print(result)
return result | python | {
"resource": ""
} |
q264138 | GitRepoManager.drop | validation | def drop(self, repo, args=[]):
"""
Cleanup the repo
"""
# Clean up the rootdir
rootdir = repo.rootdir
if os.path.exists(rootdir):
print("Cleaning repo directory: {}".format(rootdir))
shutil.rmtree(rootdir)
# Cleanup the local version of the repo (this could be on
# the server etc.
server_repodir = self.server_rootdir_from_repo(repo,
create=False)
if os.path.exists(server_repodir):
print("Cleaning data from local git 'server': {}".format(server_repodir))
shutil.rmtree(server_repodir)
super(GitRepoManager, self).drop(repo)
return {
'status': 'success',
'message': "successful cleanup"
} | python | {
"resource": ""
} |
q264139 | GitRepoManager.permalink | validation | def permalink(self, repo, path):
"""
Get the permalink to command that generated the dataset
"""
if not os.path.exists(path):
# print("Path does not exist", path)
return (None, None)
# Get this directory
cwd = os.getcwd()
# Find the root of the repo and cd into that directory..
if os.path.isfile(path):
os.chdir(os.path.dirname(path))
rootdir = self._run(["rev-parse", "--show-toplevel"])
if "fatal" in rootdir:
# print("fatal", rootdir)
return (None, None)
os.chdir(rootdir)
# print("Rootdir = ", rootdir)
# Now find relative path
relpath = os.path.relpath(path, rootdir)
# print("relpath = ", relpath)
# Get the last commit for this file
#3764cc2600b221ac7d7497de3d0dbcb4cffa2914
sha1 = self._run(["log", "-n", "1", "--format=format:%H", relpath])
# print("sha1 = ", sha1)
# Get the repo URL
#git@gitlab.com:pingali/simple-regression.git
#https://gitlab.com/kanban_demo/test_project.git
remoteurl = self._run(["config", "--get", "remote.origin.url"])
# print("remoteurl = ", remoteurl)
# Go back to the original directory...
os.chdir(cwd)
# Now match it against two possible formats of the remote url
# Examples
#https://help.github.com/articles/getting-permanent-links-to-files/
#https://github.com/github/hubot/blob/ed25584f5ac2520a6c28547ffd0961c7abd7ea49/README.md
#https://gitlab.com/pingali/simple-regression/blob/3764cc2600b221ac7d7497de3d0dbcb4cffa2914/model.py
#https://github.com/pingali/dgit/blob/ff91b5d04b2978cad0bf9b006d1b0a16d18a778e/README.rst
#https://gitlab.com/kanban_demo/test_project/blob/b004677c23b3a31eb7b5588a5194857b2c8b2b95/README.md
m = re.search('^git@([^:\/]+):([^/]+)/([^/]+)', remoteurl)
if m is None:
m = re.search('^https://([^:/]+)/([^/]+)/([^/]+)', remoteurl)
if m is not None:
domain = m.group(1)
username = m.group(2)
project = m.group(3)
if project.endswith(".git"):
project = project[:-4]
permalink = "https://{}/{}/{}/blob/{}/{}".format(domain, username, project,
sha1, relpath)
# print("permalink = ", permalink)
return (relpath, permalink)
else:
return (None, None) | python | {
"resource": ""
} |
q264140 | GitRepoManager.add_files | validation | def add_files(self, repo, files):
"""
Add files to the repo
"""
rootdir = repo.rootdir
for f in files:
relativepath = f['relativepath']
sourcepath = f['localfullpath']
if sourcepath is None:
# This can happen if the relative path is a URL
continue #
# Prepare the target path
targetpath = os.path.join(rootdir, relativepath)
try:
os.makedirs(os.path.dirname(targetpath))
except:
pass
# print(sourcepath," => ", targetpath)
print("Updating: {}".format(relativepath))
shutil.copyfile(sourcepath, targetpath)
with cd(repo.rootdir):
self._run(['add', relativepath]) | python | {
"resource": ""
} |
q264141 | Invoice.send | validation | def send(self, send_email=True):
"""Marks the invoice as sent in Holvi
If send_email is False then the invoice is *not* automatically emailed to the recipient
and your must take care of sending the invoice yourself.
"""
url = str(self.api.base_url + '{code}/status/').format(code=self.code) # six.u messes this up
payload = {
'mark_as_sent': True,
'send_email': send_email,
}
stat = self.api.connection.make_put(url, payload) | python | {
"resource": ""
} |
q264142 | Invoice.to_holvi_dict | validation | def to_holvi_dict(self):
"""Convert our Python object to JSON acceptable to Holvi API"""
self._jsondata["items"] = []
for item in self.items:
self._jsondata["items"].append(item.to_holvi_dict())
self._jsondata["issue_date"] = self.issue_date.isoformat()
self._jsondata["due_date"] = self.due_date.isoformat()
self._jsondata["receiver"] = self.receiver.to_holvi_dict()
return {k: v for (k, v) in self._jsondata.items() if k in self._valid_keys} | python | {
"resource": ""
} |
q264143 | api_call_action | validation | def api_call_action(func):
"""
API wrapper documentation
"""
def _inner(*args, **kwargs):
return func(*args, **kwargs)
_inner.__name__ = func.__name__
_inner.__doc__ = func.__doc__
return _inner | python | {
"resource": ""
} |
q264144 | Order.save | validation | def save(self):
"""Saves this order to Holvi, returns a tuple with the order itself and checkout_uri"""
if self.code:
raise HolviError("Orders cannot be updated")
send_json = self.to_holvi_dict()
send_json.update({
'pool': self.api.connection.pool
})
url = six.u(self.api.base_url + "order/")
stat = self.api.connection.make_post(url, send_json)
code = stat["details_uri"].split("/")[-2] # Maybe slightly ugly but I don't want to basically reimplement all but uri formation of the api method
return (stat["checkout_uri"], self.api.get_order(code)) | python | {
"resource": ""
} |
q264145 | untokenize | validation | def untokenize(tokens):
"""Return source code based on tokens.
This is like tokenize.untokenize(), but it preserves spacing between
tokens. So if the original soure code had multiple spaces between
some tokens or if escaped newlines were used, those things will be
reflected by untokenize().
"""
text = ''
previous_line = ''
last_row = 0
last_column = -1
last_non_whitespace_token_type = None
for (token_type, token_string, start, end, line) in tokens:
if TOKENIZE_HAS_ENCODING and token_type == tokenize.ENCODING:
continue
(start_row, start_column) = start
(end_row, end_column) = end
# Preserve escaped newlines.
if (
last_non_whitespace_token_type != tokenize.COMMENT and
start_row > last_row and
previous_line.endswith(('\\\n', '\\\r\n', '\\\r'))
):
text += previous_line[len(previous_line.rstrip(' \t\n\r\\')):]
# Preserve spacing.
if start_row > last_row:
last_column = 0
if start_column > last_column:
text += line[last_column:start_column]
text += token_string
previous_line = line
last_row = end_row
last_column = end_column
if token_type not in WHITESPACE_TOKENS:
last_non_whitespace_token_type = token_type
return text | python | {
"resource": ""
} |
q264146 | init | validation | def init(globalvars=None, show=False):
"""
Load profile INI
"""
global config
profileini = getprofileini()
if os.path.exists(profileini):
config = configparser.ConfigParser()
config.read(profileini)
mgr = plugins_get_mgr()
mgr.update_configs(config)
if show:
for source in config:
print("[%s] :" %(source))
for k in config[source]:
print(" %s : %s" % (k, config[source][k]))
else:
print("Profile does not exist. So creating one")
if not show:
update(globalvars)
print("Complete init") | python | {
"resource": ""
} |
q264147 | update | validation | def update(globalvars):
"""
Update the profile
"""
global config
profileini = getprofileini()
config = configparser.ConfigParser()
config.read(profileini)
defaults = {}
if globalvars is not None:
defaults = {a[0]: a[1] for a in globalvars }
# Generic variables to be captured...
generic_configs = [{
'name': 'User',
'nature': 'generic',
'description': "General information",
'variables': ['user.email', 'user.name',
'user.fullname'],
'defaults': {
'user.email': {
'value': defaults.get('user.email',''),
'description': "Email address",
'validator': EmailValidator()
},
'user.fullname': {
'value': defaults.get('user.fullname',''),
'description': "Full Name",
'validator': NonEmptyValidator()
},
'user.name': {
'value': defaults.get('user.name', getpass.getuser()),
'description': "Name",
'validator': NonEmptyValidator()
},
}
}]
# Gather configuration requirements from all plugins
mgr = plugins_get_mgr()
extra_configs = mgr.gather_configs()
allconfigs = generic_configs + extra_configs
# Read the existing config and update the defaults
for c in allconfigs:
name = c['name']
for v in c['variables']:
try:
c['defaults'][v]['value'] = config[name][v]
except:
continue
for c in allconfigs:
print("")
print(c['description'])
print("==================")
if len(c['variables']) == 0:
print("Nothing to do. Enabled by default")
continue
name = c['name']
config[name] = {}
config[name]['nature'] = c['nature']
for v in c['variables']:
# defaults
value = ''
description = v + " "
helptext = ""
validator = None
# Look up pre-set values
if v in c['defaults']:
value = c['defaults'][v].get('value','')
helptext = c['defaults'][v].get("description","")
validator = c['defaults'][v].get('validator',None)
if helptext != "":
description += "(" + helptext + ")"
# Get user input..
while True:
choice = input_with_default(description, value)
if validator is not None:
if validator.is_valid(choice):
break
else:
print("Invalid input. Expected input is {}".format(validator.message))
else:
break
config[name][v] = choice
if v == 'enable' and choice == 'n':
break
with open(profileini, 'w') as fd:
config.write(fd)
print("Updated profile file:", config) | python | {
"resource": ""
} |
q264148 | S3Backend.init_repo | validation | def init_repo(self, gitdir):
"""
Insert hook into the repo
"""
hooksdir = os.path.join(gitdir, 'hooks')
content = postreceive_template % {
'client': self.client,
'bucket': self.bucket,
's3cfg': self.s3cfg,
'prefix': self.prefix
}
postrecv_filename =os.path.join(hooksdir, 'post-receive')
with open(postrecv_filename,'w') as fd:
fd.write(content)
self.make_hook_executable(postrecv_filename)
print("Wrote to", postrecv_filename) | python | {
"resource": ""
} |
q264149 | compute_sha256 | validation | def compute_sha256(filename):
"""
Try the library. If it doesnt work, use the command line..
"""
try:
h = sha256()
fd = open(filename, 'rb')
while True:
buf = fd.read(0x1000000)
if buf in [None, ""]:
break
h.update(buf.encode('utf-8'))
fd.close()
return h.hexdigest()
except:
output = run(["sha256sum", "-b", filename])
return output.split(" ")[0] | python | {
"resource": ""
} |
q264150 | run | validation | def run(cmd):
"""
Run a shell command
"""
cmd = [pipes.quote(c) for c in cmd]
cmd = " ".join(cmd)
cmd += "; exit 0"
# print("Running {} in {}".format(cmd, os.getcwd()))
try:
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True)
except subprocess.CalledProcessError as e:
output = e.output
output = output.decode('utf-8')
output = output.strip()
return output | python | {
"resource": ""
} |
q264151 | get_tree | validation | def get_tree(gitdir="."):
"""
Get the commit history for a given dataset
"""
cmd = ["git", "log", "--all", "--branches", '--pretty=format:{ "commit": "%H", "abbreviated_commit": "%h", "tree": "%T", "abbreviated_tree": "%t", "parent": "%P", "abbreviated_parent": "%p", "refs": "%d", "encoding": "%e", "subject": "%s", "sanitized_subject_line": "%f", "commit_notes": "", "author": { "name": "%aN", "email": "%aE", "date": "%ai" }, "commiter": { "name": "%cN", "email": "%cE", "date": "%ci" }},']
output = run(cmd)
lines = output.split("\n")
content = ""
history = []
for l in lines:
try:
revisedcontent = content + l
if revisedcontent.count('"') % 2 == 0:
j = json.loads(revisedcontent[:-1])
if "Notes added by" in j['subject']:
content = ""
continue
history.append(j)
content = ""
else:
content = revisedcontent
except Exception as e:
print("Error while parsing record")
print(revisedcontent)
content = ""
# Order by time. First commit first...
history.reverse()
#
changes = get_change()
for i in range(len(history)):
abbrev_commit = history[i]['abbreviated_commit']
if abbrev_commit not in changes:
raise Exception("Missing changes for " + abbrev_commit)
history[i]['changes'] = changes[abbrev_commit]['changes']
return history | python | {
"resource": ""
} |
q264152 | get_diffs | validation | def get_diffs(history):
"""
Look at files and compute the diffs intelligently
"""
# First get all possible representations
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
for i in range(len(history)):
if i+1 > len(history) - 1:
continue
prev = history[i]
curr = history[i+1]
#print(prev['subject'], "==>", curr['subject'])
#print(curr['changes'])
for c in curr['changes']:
path = c['path']
# Skip the metadata file
if c['path'].endswith('datapackage.json'):
continue
# Find a handler for this kind of file...
handler = None
for r in representations:
if r.can_process(path):
handler = r
break
if handler is None:
continue
# print(path, "being handled by", handler)
v1_hex = prev['commit']
v2_hex = curr['commit']
temp1 = tempfile.mkdtemp(prefix="dgit-diff-")
try:
for h in [v1_hex, v2_hex]:
filename = '{}/{}/checkout.tar'.format(temp1, h)
try:
os.makedirs(os.path.dirname(filename))
except:
pass
extractcmd = ['git', 'archive', '-o', filename, h, path]
output = run(extractcmd)
if 'fatal' in output:
raise Exception("File not present in commit")
with cd(os.path.dirname(filename)):
cmd = ['tar', 'xvf', 'checkout.tar']
output = run(cmd)
if 'fatal' in output:
print("Cleaning up - fatal 1", temp1)
shutil.rmtree(temp1)
continue
# Check to make sure that
path1 = os.path.join(temp1, v1_hex, path)
path2 = os.path.join(temp1, v2_hex, path)
if not os.path.exists(path1) or not os.path.exists(path2):
# print("One of the two output files is missing")
shutil.rmtree(temp1)
continue
#print(path1, path2)
# Now call the handler
diff = handler.get_diff(path1, path2)
# print("Inserting diff", diff)
c['diff'] = diff
except Exception as e:
#traceback.print_exc()
#print("Cleaning up - Exception ", temp1)
shutil.rmtree(temp1) | python | {
"resource": ""
} |
q264153 | SSHClient.wait | validation | def wait(self, cmd, raise_on_error=True):
"""
Execute command and wait for it to finish. Proceed with caution because
if you run a command that causes a prompt this will hang
"""
_, stdout, stderr = self.exec_command(cmd)
stdout.channel.recv_exit_status()
output = stdout.read()
if self.interactive:
print(output)
errors = stderr.read()
if self.interactive:
print(errors)
if errors and raise_on_error:
raise ValueError(errors)
return output | python | {
"resource": ""
} |
q264154 | SSHClient.sudo | validation | def sudo(self, password=None):
"""
Enter sudo mode
"""
if self.username == 'root':
raise ValueError('Already root user')
password = self.validate_password(password)
stdin, stdout, stderr = self.exec_command('sudo su')
stdin.write("%s\n" % password)
stdin.flush()
errors = stderr.read()
if errors:
raise ValueError(errors) | python | {
"resource": ""
} |
q264155 | SSHClient.apt | validation | def apt(self, package_names, raise_on_error=False):
"""
Install specified packages using apt-get. -y options are
automatically used. Waits for command to finish.
Parameters
----------
package_names: list-like of str
raise_on_error: bool, default False
If True then raise ValueError if stderr is not empty
debconf often gives tty error
"""
if isinstance(package_names, basestring):
package_names = [package_names]
cmd = "apt-get install -y %s" % (' '.join(package_names))
return self.wait(cmd, raise_on_error=raise_on_error) | python | {
"resource": ""
} |
q264156 | SSHClient.pip | validation | def pip(self, package_names, raise_on_error=True):
"""
Install specified python packages using pip. -U option added
Waits for command to finish.
Parameters
----------
package_names: list-like of str
raise_on_error: bool, default True
If True then raise ValueError if stderr is not empty
"""
if isinstance(package_names, basestring):
package_names = [package_names]
cmd = "pip install -U %s" % (' '.join(package_names))
return self.wait(cmd, raise_on_error=raise_on_error) | python | {
"resource": ""
} |
q264157 | SSHClient.pip_r | validation | def pip_r(self, requirements, raise_on_error=True):
"""
Install all requirements contained in the given file path
Waits for command to finish.
Parameters
----------
requirements: str
Path to requirements.txt
raise_on_error: bool, default True
If True then raise ValueError if stderr is not empty
"""
cmd = "pip install -r %s" % requirements
return self.wait(cmd, raise_on_error=raise_on_error) | python | {
"resource": ""
} |
q264158 | stitch_macro | validation | def stitch_macro(path, output_folder=None):
"""Create fiji-macros for stitching all channels and z-stacks for a well.
Parameters
----------
path : string
Well path.
output_folder : string
Folder to store images. If not given well path is used.
Returns
-------
output_files, macros : tuple
Tuple with filenames and macros for stitched well.
"""
output_folder = output_folder or path
debug('stitching ' + path + ' to ' + output_folder)
fields = glob(_pattern(path, _field))
# assume we have rectangle of fields
xs = [attribute(field, 'X') for field in fields]
ys = [attribute(field, 'Y') for field in fields]
x_min, x_max = min(xs), max(xs)
y_min, y_max = min(ys), max(ys)
fields_column = len(set(xs))
fields_row = len(set(ys))
# assume all fields are the same
# and get properties from images in first field
images = glob(_pattern(fields[0], _image))
# assume attributes are the same on all images
attr = attributes(images[0])
# find all channels and z-stacks
channels = []
z_stacks = []
for image in images:
channel = attribute_as_str(image, 'C')
if channel not in channels:
channels.append(channel)
z = attribute_as_str(image, 'Z')
if z not in z_stacks:
z_stacks.append(z)
debug('channels ' + str(channels))
debug('z-stacks ' + str(z_stacks))
# create macro
_, extension = os.path.splitext(images[-1])
if extension == '.tif':
# assume .ome.tif
extension = '.ome.tif'
macros = []
output_files = []
for Z in z_stacks:
for C in channels:
filenames = os.path.join(
_field + '--X{xx}--Y{yy}',
_image + '--L' + attr.L +
'--S' + attr.S +
'--U' + attr.U +
'--V' + attr.V +
'--J' + attr.J +
'--E' + attr.E +
'--O' + attr.O +
'--X{xx}--Y{yy}' +
'--T' + attr.T +
'--Z' + Z +
'--C' + C +
extension)
debug('filenames ' + filenames)
cur_attr = attributes(filenames)._asdict()
f = 'stitched--U{U}--V{V}--C{C}--Z{Z}.png'.format(**cur_attr)
output = os.path.join(output_folder, f)
debug('output ' + output)
output_files.append(output)
if os.path.isfile(output):
# file already exists
print('leicaexperiment stitched file already'
' exists {}'.format(output))
continue
macros.append(fijibin.macro.stitch(path, filenames,
fields_column, fields_row,
output_filename=output,
x_start=x_min, y_start=y_min))
return (output_files, macros) | python | {
"resource": ""
} |
q264159 | compress | validation | def compress(images, delete_tif=False, folder=None):
"""Lossless compression. Save images as PNG and TIFF tags to json. Can be
reversed with `decompress`. Will run in multiprocessing, where
number of workers is decided by ``leicaexperiment.experiment._pools``.
Parameters
----------
images : list of filenames
Images to lossless compress.
delete_tif : bool
Wheter to delete original images.
folder : string
Where to store images. Basename will be kept.
Returns
-------
list of filenames
List of compressed files.
"""
if type(images) == str:
# only one image
return [compress_blocking(images, delete_tif, folder)]
filenames = copy(images) # as images property will change when looping
return Parallel(n_jobs=_pools)(delayed(compress_blocking)
(image=image, delete_tif=delete_tif, folder=folder)
for image in filenames) | python | {
"resource": ""
} |
q264160 | compress_blocking | validation | def compress_blocking(image, delete_tif=False, folder=None, force=False):
"""Lossless compression. Save image as PNG and TIFF tags to json. Process
can be reversed with `decompress`.
Parameters
----------
image : string
TIF-image which should be compressed lossless.
delete_tif : bool
Wheter to delete original images.
force : bool
Wheter to compress even if .png already exists.
Returns
-------
string
Filename of compressed image, or empty string if compress failed.
"""
debug('compressing {}'.format(image))
try:
new_filename, extension = os.path.splitext(image)
# remove last occurrence of .ome
new_filename = new_filename.rsplit('.ome', 1)[0]
# if compressed file should be put in specified folder
if folder:
basename = os.path.basename(new_filename)
new_filename = os.path.join(folder, basename + '.png')
else:
new_filename = new_filename + '.png'
# check if png exists
if os.path.isfile(new_filename) and not force:
compressed_images.append(new_filename)
msg = "Aborting compress, PNG already" \
" exists: {}".format(new_filename)
raise AssertionError(msg)
if extension != '.tif':
msg = "Aborting compress, not a TIFF: {}".format(image)
raise AssertionError(msg)
# open image, load and close file pointer
img = Image.open(image)
fptr = img.fp # keep file pointer, for closing
img.load() # load img-data before switching mode, also closes fp
# get tags and save them as json
tags = img.tag.as_dict()
with open(new_filename[:-4] + '.json', 'w') as f:
if img.mode == 'P':
# keep palette
tags['palette'] = img.getpalette()
json.dump(tags, f)
# check if image is palette-mode
if img.mode == 'P':
# switch to luminance to keep data intact
debug('palette-mode switched to luminance')
img.mode = 'L'
if img.mode == 'I;16':
# https://github.com/python-pillow/Pillow/issues/1099
img = img.convert(mode='I')
# compress/save
debug('saving to {}'.format(new_filename))
img.save(new_filename)
fptr.close() # windows bug Pillow
if delete_tif:
os.remove(image)
except (IOError, AssertionError) as e:
# print error - continue
print('leicaexperiment {}'.format(e))
return ''
return new_filename | python | {
"resource": ""
} |
q264161 | _set_path | validation | def _set_path(self, path):
"Set self.path, self.dirname and self.basename."
import os.path
self.path = os.path.abspath(path)
self.dirname = os.path.dirname(path)
self.basename = os.path.basename(path) | python | {
"resource": ""
} |
q264162 | Experiment.images | validation | def images(self):
"List of paths to images."
tifs = _pattern(self._image_path, extension='tif')
pngs = _pattern(self._image_path, extension='png')
imgs = []
imgs.extend(glob(tifs))
imgs.extend(glob(pngs))
return imgs | python | {
"resource": ""
} |
q264163 | Experiment.image | validation | def image(self, well_row, well_column, field_row, field_column):
"""Get path of specified image.
Parameters
----------
well_row : int
Starts at 0. Same as --U in files.
well_column : int
Starts at 0. Same as --V in files.
field_row : int
Starts at 0. Same as --Y in files.
field_column : int
Starts at 0. Same as --X in files.
Returns
-------
string
Path to image or empty string if image is not found.
"""
return next((i for i in self.images
if attribute(i, 'u') == well_column and
attribute(i, 'v') == well_row and
attribute(i, 'x') == field_column and
attribute(i, 'y') == field_row), '') | python | {
"resource": ""
} |
q264164 | Experiment.well_images | validation | def well_images(self, well_row, well_column):
"""Get list of paths to images in specified well.
Parameters
----------
well_row : int
Starts at 0. Same as --V in files.
well_column : int
Starts at 0. Save as --U in files.
Returns
-------
list of strings
Paths to images or empty list if no images are found.
"""
return list(i for i in self.images
if attribute(i, 'u') == well_column and
attribute(i, 'v') == well_row) | python | {
"resource": ""
} |
q264165 | Experiment.stitch | validation | def stitch(self, folder=None):
"""Stitches all wells in experiment with ImageJ. Stitched images are
saved in experiment root.
Images which already exists are omitted stitching.
Parameters
----------
folder : string
Where to store stitched images. Defaults to experiment path.
Returns
-------
list
Filenames of stitched images. Files which already exists before
stitching are also returned.
"""
debug('stitching ' + self.__str__())
if not folder:
folder = self.path
# create list of macros and files
macros = []
files = []
for well in self.wells:
f,m = stitch_macro(well, folder)
macros.extend(m)
files.extend(f)
chopped_arguments = zip(chop(macros, _pools), chop(files, _pools))
chopped_filenames = Parallel(n_jobs=_pools)(delayed(fijibin.macro.run)
(macro=arg[0], output_files=arg[1])
for arg in chopped_arguments)
# flatten
return [f for list_ in chopped_filenames for f in list_] | python | {
"resource": ""
} |
q264166 | Experiment.compress | validation | def compress(self, delete_tif=False, folder=None):
"""Lossless compress all images in experiment to PNG. If folder is
omitted, images will not be moved.
Images which already exists in PNG are omitted.
Parameters
----------
folder : string
Where to store PNGs. Defaults to the folder they are in.
delete_tif : bool
If set to truthy value, ome.tifs will be deleted after compression.
Returns
-------
list
Filenames of PNG images. Files which already exists before
compression are also returned.
"""
return compress(self.images, delete_tif, folder) | python | {
"resource": ""
} |
q264167 | Experiment.field_metadata | validation | def field_metadata(self, well_row=0, well_column=0,
field_row=0, field_column=0):
"""Get OME-XML metadata of given field.
Parameters
----------
well_row : int
Y well coordinate. Same as --V in files.
well_column : int
X well coordinate. Same as --U in files.
field_row : int
Y field coordinate. Same as --Y in files.
field_column : int
X field coordinate. Same as --X in files.
Returns
-------
lxml.objectify.ObjectifiedElement
lxml object of OME-XML found in slide/chamber/field/metadata.
"""
def condition(path):
attrs = attributes(path)
return (attrs.u == well_column and attrs.v == well_row
and attrs.x == field_column and attrs.y == field_row)
field = [f for f in self.fields if condition(f)]
if field:
field = field[0]
filename = _pattern(field, 'metadata',
_image, extension='*.ome.xml')
filename = glob(filename)[0] # resolve, assume found
return objectify.parse(filename).getroot() | python | {
"resource": ""
} |
q264168 | Experiment.stitch_coordinates | validation | def stitch_coordinates(self, well_row=0, well_column=0):
"""Get a list of stitch coordinates for the given well.
Parameters
----------
well_row : int
Y well coordinate. Same as --V in files.
well_column : int
X well coordinate. Same as --U in files.
Returns
-------
(xs, ys, attr) : tuples with float and collections.OrderedDict
Tuple of x's, y's and attributes.
"""
well = [w for w in self.wells
if attribute(w, 'u') == well_column and
attribute(w, 'v') == well_row]
if len(well) == 1:
well = well[0]
tile = os.path.join(well, 'TileConfiguration.registered.txt')
with open(tile) as f:
data = [x.strip()
for l in f.readlines()
if l[0:7] == 'image--'
for x in l.split(';')] # flat list
coordinates = (ast.literal_eval(x) for x in data[2::3])
# flatten
coordinates = sum(coordinates, ())
attr = tuple(attributes(x) for x in data[0::3])
return coordinates[0::2], coordinates[1::2], attr
else:
print('leicaexperiment stitch_coordinates'
'({}, {}) Well not found'.format(well_row, well_column)) | python | {
"resource": ""
} |
q264169 | Droplets.create | validation | def create(self, name, region, size, image, ssh_keys=None,
backups=None, ipv6=None, private_networking=None, wait=True):
"""
Create a new droplet
Parameters
----------
name: str
Name of new droplet
region: str
slug for region (e.g., sfo1, nyc1)
size: str
slug for droplet size (e.g., 512mb, 1024mb)
image: int or str
image id (e.g., 12352) or slug (e.g., 'ubuntu-14-04-x64')
ssh_keys: list, optional
default SSH keys to be added on creation
this is highly recommended for ssh access
backups: bool, optional
whether automated backups should be enabled for the Droplet.
Automated backups can only be enabled when the Droplet is created.
ipv6: bool, optional
whether IPv6 is enabled on the Droplet
private_networking: bool, optional
whether private networking is enabled for the Droplet. Private
networking is currently only available in certain regions
wait: bool, default True
if True then block until creation is complete
"""
if ssh_keys and not isinstance(ssh_keys, (list, tuple)):
raise TypeError("ssh_keys must be a list")
resp = self.post(name=name, region=region, size=size, image=image,
ssh_keys=ssh_keys,
private_networking=private_networking,
backups=backups, ipv6=ipv6)
droplet = self.get(resp[self.singular]['id'])
if wait:
droplet.wait()
# HACK sometimes the IP address doesn't return correctly
droplet = self.get(resp[self.singular]['id'])
return droplet | python | {
"resource": ""
} |
q264170 | Droplets.get | validation | def get(self, id):
"""
Retrieve a droplet by id
Parameters
----------
id: int
droplet id
Returns
-------
droplet: DropletActions
"""
info = self._get_droplet_info(id)
return DropletActions(self.api, self, **info) | python | {
"resource": ""
} |
q264171 | DropletActions.restore | validation | def restore(self, image, wait=True):
"""
Restore this droplet with given image id
A Droplet restoration will rebuild an image using a backup image.
The image ID that is passed in must be a backup of the current Droplet
instance. The operation will leave any embedded SSH keys intact.
Parameters
----------
image: int or str
int for image id and str for image slug
wait: bool, default True
Whether to block until the pending action is completed
"""
return self._action('restore', image=image, wait=wait) | python | {
"resource": ""
} |
q264172 | DropletActions.rebuild | validation | def rebuild(self, image, wait=True):
"""
Rebuild this droplet with given image id
Parameters
----------
image: int or str
int for image id and str for image slug
wait: bool, default True
Whether to block until the pending action is completed
"""
return self._action('rebuild', image=image, wait=wait) | python | {
"resource": ""
} |
q264173 | DropletActions.rename | validation | def rename(self, name, wait=True):
"""
Change the name of this droplet
Parameters
----------
name: str
New name for the droplet
wait: bool, default True
Whether to block until the pending action is completed
Raises
------
APIError if region does not support private networking
"""
return self._action('rename', name=name, wait=wait) | python | {
"resource": ""
} |
q264174 | DropletActions.change_kernel | validation | def change_kernel(self, kernel_id, wait=True):
"""
Change the kernel of this droplet
Parameters
----------
kernel_id: int
Can be retrieved from output of self.kernels()
wait: bool, default True
Whether to block until the pending action is completed
Raises
------
APIError if region does not support private networking
"""
return self._action('change_kernel', kernel=kernel_id, wait=wait) | python | {
"resource": ""
} |
q264175 | DropletActions.delete | validation | def delete(self, wait=True):
"""
Delete this droplet
Parameters
----------
wait: bool, default True
Whether to block until the pending action is completed
"""
resp = self.parent.delete(self.id)
if wait:
self.wait()
return resp | python | {
"resource": ""
} |
q264176 | DropletActions.wait | validation | def wait(self):
"""
wait for all actions to complete on a droplet
"""
interval_seconds = 5
while True:
actions = self.actions()
slept = False
for a in actions:
if a['status'] == 'in-progress':
# n.b. gevent will monkey patch
time.sleep(interval_seconds)
slept = True
break
if not slept:
break | python | {
"resource": ""
} |
q264177 | DropletActions.connect | validation | def connect(self, interactive=False):
"""
Open SSH connection to droplet
Parameters
----------
interactive: bool, default False
If True then SSH client will prompt for password when necessary
and also print output to console
"""
from poseidon.ssh import SSHClient
rs = SSHClient(self.ip_address, interactive=interactive)
return rs | python | {
"resource": ""
} |
q264178 | RestAPI.send_request | validation | def send_request(self, kind, resource, url_components, **kwargs):
"""
Send a request to the REST API
Parameters
----------
kind: str, {get, delete, put, post, head}
resource: str
url_components: list or tuple to be appended to the request URL
Notes
-----
kwargs contain request parameters to be sent as request data
"""
url = self.format_request_url(resource, *url_components)
meth = getattr(requests, kind)
headers = self.get_request_headers()
req_data = self.format_parameters(**kwargs)
response = meth(url, headers=headers, data=req_data)
data = self.get_response(response)
if response.status_code >= 300:
msg = data.pop('message', 'API request returned error')
raise APIError(msg, response.status_code, **data)
return data | python | {
"resource": ""
} |
q264179 | RestAPI.format_parameters | validation | def format_parameters(self, **kwargs):
"""
Properly formats array types
"""
req_data = {}
for k, v in kwargs.items():
if isinstance(v, (list, tuple)):
k = k + '[]'
req_data[k] = v
return req_data | python | {
"resource": ""
} |
q264180 | DigitalOceanAPI.format_request_url | validation | def format_request_url(self, resource, *args):
"""create request url for resource"""
return '/'.join((self.api_url, self.api_version, resource) +
tuple(str(x) for x in args)) | python | {
"resource": ""
} |
q264181 | Resource.send_request | validation | def send_request(self, kind, url_components, **kwargs):
"""
Send a request for this resource to the API
Parameters
----------
kind: str, {'get', 'delete', 'put', 'post', 'head'}
"""
return self.api.send_request(kind, self.resource_path, url_components,
**kwargs) | python | {
"resource": ""
} |
q264182 | ResourceCollection.list | validation | def list(self, url_components=()):
"""
Send list request for all members of a collection
"""
resp = self.get(url_components)
return resp.get(self.result_key, []) | python | {
"resource": ""
} |
q264183 | MutableCollection.get | validation | def get(self, id, **kwargs):
"""
Get single unit of collection
"""
return (super(MutableCollection, self).get((id,), **kwargs)
.get(self.singular, None)) | python | {
"resource": ""
} |
q264184 | ImageActions.transfer | validation | def transfer(self, region):
"""
Transfer this image to given region
Parameters
----------
region: str
region slug to transfer to (e.g., sfo1, nyc1)
"""
action = self.post(type='transfer', region=region)['action']
return self.parent.get(action['resource_id']) | python | {
"resource": ""
} |
q264185 | Images.get | validation | def get(self, id):
"""id or slug"""
info = super(Images, self).get(id)
return ImageActions(self.api, parent=self, **info) | python | {
"resource": ""
} |
q264186 | Keys.update | validation | def update(self, id, name):
"""id or fingerprint"""
return super(Keys, self).update(id, name=name) | python | {
"resource": ""
} |
q264187 | Domains.create | validation | def create(self, name, ip_address):
"""
Creates a new domain
Parameters
----------
name: str
new domain name
ip_address: str
IP address for the new domain
"""
return (self.post(name=name, ip_address=ip_address)
.get(self.singular, None)) | python | {
"resource": ""
} |
q264188 | Domains.records | validation | def records(self, name):
"""
Get a list of all domain records for the given domain name
Parameters
----------
name: str
domain name
"""
if self.get(name):
return DomainRecords(self.api, name) | python | {
"resource": ""
} |
q264189 | DomainRecords.rename | validation | def rename(self, id, name):
"""
Change the name of this domain record
Parameters
----------
id: int
domain record id
name: str
new name of record
"""
return super(DomainRecords, self).update(id, name=name)[self.singular] | python | {
"resource": ""
} |
q264190 | DomainRecords.get | validation | def get(self, id, **kwargs):
"""
Retrieve a single domain record given the id
"""
return super(DomainRecords, self).get(id, **kwargs) | python | {
"resource": ""
} |
q264191 | FogBugz.logon | validation | def logon(self, username, password):
"""
Logs the user on to FogBugz.
Returns None for a successful login.
"""
if self._token:
self.logoff()
try:
response = self.__makerequest(
'logon', email=username, password=password)
except FogBugzAPIError:
e = sys.exc_info()[1]
raise FogBugzLogonError(e)
self._token = response.token.string
if type(self._token) == CData:
self._token = self._token.encode('utf-8') | python | {
"resource": ""
} |
q264192 | chop | validation | def chop(list_, n):
"Chop list_ into n chunks. Returns a list."
# could look into itertools also, might be implemented there
size = len(list_)
each = size // n
if each == 0:
return [list_]
chopped = []
for i in range(n):
start = i * each
end = (i+1) * each
if i == (n - 1):
# make sure we get all items, let last worker do a litte more
end = size
chopped.append(list_[start:end])
return chopped | python | {
"resource": ""
} |
q264193 | get_first | validation | def get_first():
"""
return first droplet
"""
client = po.connect() # this depends on the DIGITALOCEAN_API_KEY envvar
all_droplets = client.droplets.list()
id = all_droplets[0]['id'] # I'm cheating because I only have one droplet
return client.droplets.get(id) | python | {
"resource": ""
} |
q264194 | take_snapshot | validation | def take_snapshot(droplet, name):
"""
Take a snapshot of a droplet
Parameters
----------
name: str
name for snapshot
"""
print "powering off"
droplet.power_off()
droplet.wait() # wait for pending actions to complete
print "taking snapshot"
droplet.take_snapshot(name)
droplet.wait()
snapshots = droplet.snapshots()
print "Current snapshots"
print snapshots | python | {
"resource": ""
} |
q264195 | ManagedResource.allowed_operations | validation | def allowed_operations(self):
"""Retrieves the allowed operations for this request."""
if self.slug is not None:
return self.meta.detail_allowed_operations
return self.meta.list_allowed_operations | python | {
"resource": ""
} |
q264196 | ManagedResource.assert_operations | validation | def assert_operations(self, *args):
"""Assets if the requested operations are allowed in this context."""
if not set(args).issubset(self.allowed_operations):
raise http.exceptions.Forbidden() | python | {
"resource": ""
} |
q264197 | ManagedResource.make_response | validation | def make_response(self, data=None):
"""Fills the response object from the passed data."""
if data is not None:
# Prepare the data for transmission.
data = self.prepare(data)
# Encode the data using a desired encoder.
self.response.write(data, serialize=True) | python | {
"resource": ""
} |
q264198 | ManagedResource.get | validation | def get(self, request, response):
"""Processes a `GET` request."""
# Ensure we're allowed to read the resource.
self.assert_operations('read')
# Delegate to `read` to retrieve the items.
items = self.read()
# if self.slug is not None and not items:
# # Requested a specific resource but nothing is returned.
# # Attempt to resolve by changing what we understand as
# # a slug to a path.
# self.path = self.path + self.slug if self.path else self.slug
# self.slug = None
# # Attempt to retreive the resource again.
# items = self.read()
# Ensure that if we have a slug and still no items that a 404
# is rasied appropriately.
if not items:
raise http.exceptions.NotFound()
if (isinstance(items, Iterable)
and not isinstance(items, six.string_types)) and items:
# Paginate over the collection.
items = pagination.paginate(self.request, self.response, items)
# Build the response object.
self.make_response(items) | python | {
"resource": ""
} |
q264199 | ManagedResource.post | validation | def post(self, request, response):
"""Processes a `POST` request."""
if self.slug is not None:
# Don't know what to do an item access.
raise http.exceptions.NotImplemented()
# Ensure we're allowed to create a resource.
self.assert_operations('create')
# Deserialize and clean the incoming object.
data = self._clean(None, self.request.read(deserialize=True))
# Delegate to `create` to create the item.
item = self.create(data)
# Build the response object.
self.response.status = http.client.CREATED
self.make_response(item) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.