code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def register(**kwargs):
"""Decorator for registering API function.
Does not do any check or validation.
"""
def decorator(func):
_CALL_REGISTRY[kwargs.get(API_SYM, func.__name__)] = func
return func
return decorator
|
def function[register, parameter[]]:
constant[Decorator for registering API function.
Does not do any check or validation.
]
def function[decorator, parameter[func]]:
call[name[_CALL_REGISTRY]][call[name[kwargs].get, parameter[name[API_SYM], name[func].__name__]]] assign[=] name[func]
return[name[func]]
return[name[decorator]]
|
keyword[def] identifier[register] (** identifier[kwargs] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
identifier[_CALL_REGISTRY] [ identifier[kwargs] . identifier[get] ( identifier[API_SYM] , identifier[func] . identifier[__name__] )]= identifier[func]
keyword[return] identifier[func]
keyword[return] identifier[decorator]
|
def register(**kwargs):
"""Decorator for registering API function.
Does not do any check or validation.
"""
def decorator(func):
_CALL_REGISTRY[kwargs.get(API_SYM, func.__name__)] = func
return func
return decorator
|
def resolver(*for_resolve, attr_package='__package_for_resolve_deco__'):
""" Resolve dotted names in function arguments
Usage:
>>> @resolver('obj')
>>> def func(param, obj):
>>> assert isinstance(param, str)
>>> assert not isinstance(obj, str)
>>>
>>> func('os.path', 'sys.exit')
"""
def decorator(func):
spec = inspect.getargspec(func).args
if set(for_resolve) - set(spec):
raise ValueError('bad arguments')
@wraps(func)
def wrapper(*args, **kwargs):
args = list(args)
if args and attr_package:
package = getattr(args[0], attr_package, None)
else:
package = None
for item in for_resolve:
n = spec.index(item)
if n >= len(args):
continue
if n is not None and isinstance(args[n], str):
args[n] = resolve(args[n], package)
for kw, value in kwargs.copy().items():
if kw in for_resolve and isinstance(value, str):
kwargs[kw] = resolve(value, package)
return func(*args, **kwargs)
return wrapper
return decorator
|
def function[resolver, parameter[]]:
constant[ Resolve dotted names in function arguments
Usage:
>>> @resolver('obj')
>>> def func(param, obj):
>>> assert isinstance(param, str)
>>> assert not isinstance(obj, str)
>>>
>>> func('os.path', 'sys.exit')
]
def function[decorator, parameter[func]]:
variable[spec] assign[=] call[name[inspect].getargspec, parameter[name[func]]].args
if binary_operation[call[name[set], parameter[name[for_resolve]]] - call[name[set], parameter[name[spec]]]] begin[:]
<ast.Raise object at 0x7da2043440a0>
def function[wrapper, parameter[]]:
variable[args] assign[=] call[name[list], parameter[name[args]]]
if <ast.BoolOp object at 0x7da204344850> begin[:]
variable[package] assign[=] call[name[getattr], parameter[call[name[args]][constant[0]], name[attr_package], constant[None]]]
for taget[name[item]] in starred[name[for_resolve]] begin[:]
variable[n] assign[=] call[name[spec].index, parameter[name[item]]]
if compare[name[n] greater_or_equal[>=] call[name[len], parameter[name[args]]]] begin[:]
continue
if <ast.BoolOp object at 0x7da204347ac0> begin[:]
call[name[args]][name[n]] assign[=] call[name[resolve], parameter[call[name[args]][name[n]], name[package]]]
for taget[tuple[[<ast.Name object at 0x7da204346f80>, <ast.Name object at 0x7da204345f30>]]] in starred[call[call[name[kwargs].copy, parameter[]].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da204344820> begin[:]
call[name[kwargs]][name[kw]] assign[=] call[name[resolve], parameter[name[value], name[package]]]
return[call[name[func], parameter[<ast.Starred object at 0x7da204346080>]]]
return[name[wrapper]]
return[name[decorator]]
|
keyword[def] identifier[resolver] (* identifier[for_resolve] , identifier[attr_package] = literal[string] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
identifier[spec] = identifier[inspect] . identifier[getargspec] ( identifier[func] ). identifier[args]
keyword[if] identifier[set] ( identifier[for_resolve] )- identifier[set] ( identifier[spec] ):
keyword[raise] identifier[ValueError] ( literal[string] )
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[args] = identifier[list] ( identifier[args] )
keyword[if] identifier[args] keyword[and] identifier[attr_package] :
identifier[package] = identifier[getattr] ( identifier[args] [ literal[int] ], identifier[attr_package] , keyword[None] )
keyword[else] :
identifier[package] = keyword[None]
keyword[for] identifier[item] keyword[in] identifier[for_resolve] :
identifier[n] = identifier[spec] . identifier[index] ( identifier[item] )
keyword[if] identifier[n] >= identifier[len] ( identifier[args] ):
keyword[continue]
keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[args] [ identifier[n] ], identifier[str] ):
identifier[args] [ identifier[n] ]= identifier[resolve] ( identifier[args] [ identifier[n] ], identifier[package] )
keyword[for] identifier[kw] , identifier[value] keyword[in] identifier[kwargs] . identifier[copy] (). identifier[items] ():
keyword[if] identifier[kw] keyword[in] identifier[for_resolve] keyword[and] identifier[isinstance] ( identifier[value] , identifier[str] ):
identifier[kwargs] [ identifier[kw] ]= identifier[resolve] ( identifier[value] , identifier[package] )
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator]
|
def resolver(*for_resolve, attr_package='__package_for_resolve_deco__'):
""" Resolve dotted names in function arguments
Usage:
>>> @resolver('obj')
>>> def func(param, obj):
>>> assert isinstance(param, str)
>>> assert not isinstance(obj, str)
>>>
>>> func('os.path', 'sys.exit')
"""
def decorator(func):
spec = inspect.getargspec(func).args
if set(for_resolve) - set(spec):
raise ValueError('bad arguments') # depends on [control=['if'], data=[]]
@wraps(func)
def wrapper(*args, **kwargs):
args = list(args)
if args and attr_package:
package = getattr(args[0], attr_package, None) # depends on [control=['if'], data=[]]
else:
package = None
for item in for_resolve:
n = spec.index(item)
if n >= len(args):
continue # depends on [control=['if'], data=[]]
if n is not None and isinstance(args[n], str):
args[n] = resolve(args[n], package) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
for (kw, value) in kwargs.copy().items():
if kw in for_resolve and isinstance(value, str):
kwargs[kw] = resolve(value, package) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return func(*args, **kwargs)
return wrapper
return decorator
|
def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
"""Set/Assign Analyses to this AR
:param items: List of Analysis objects/brains, AnalysisService
objects/brains and/or Analysis Service uids
:type items: list
:param prices: Mapping of AnalysisService UID -> price
:type prices: dict
:param specs: List of AnalysisService UID -> Result Range mappings
:type specs: list
:param hidden: List of AnalysisService UID -> Hidden mappings
:type hidden: list
:returns: list of new assigned Analyses
"""
# This setter returns a list of new set Analyses
new_analyses = []
# Current assigned analyses
analyses = instance.objectValues("Analysis")
# Analyses which are in a non-open state must be retained, except those
# that are in a registered state (the sample has not been received)
non_open_analyses = filter(lambda an: not an.isOpen(), analyses)
non_open_analyses = filter(
lambda an: api.get_workflow_status_of(an) != "registered",
non_open_analyses)
# Prevent removing all analyses
#
# N.B.: Non-open analyses are rendered disabled in the HTML form.
# Therefore, their UIDs are not included in the submitted UIDs.
if not items and not non_open_analyses:
logger.warn("Not allowed to remove all Analyses from AR.")
return new_analyses
# Bail out if the items is not a list type
if not isinstance(items, (list, tuple)):
raise TypeError(
"Items parameter must be a tuple or list, got '{}'".format(
type(items)))
# Bail out if the AR is inactive
if not api.is_active(instance):
raise Unauthorized("Inactive ARs can not be modified"
.format(AddAnalysis))
# Bail out if the user has not the right permission
if not check_permission(AddAnalysis, instance):
raise Unauthorized("You do not have the '{}' permission"
.format(AddAnalysis))
# Convert the items to a valid list of AnalysisServices
services = filter(None, map(self._to_service, items))
# Calculate dependencies
# FIXME Infinite recursion error possible here, if the formula includes
# the Keyword of the Service that includes the Calculation
dependencies = map(lambda s: s.getServiceDependencies(), services)
dependencies = list(itertools.chain.from_iterable(dependencies))
# Merge dependencies and services
services = set(services + dependencies)
# Modify existing AR specs with new form values of selected analyses.
self._update_specs(instance, specs)
# Create a mapping of Service UID -> Hidden status
if hidden is None:
hidden = []
hidden = dict(map(lambda d: (d.get("uid"), d.get("hidden")), hidden))
# Ensure we have a prices dictionary
if prices is None:
prices = dict()
# CREATE/MODIFY ANALYSES
for service in services:
service_uid = api.get_uid(service)
keyword = service.getKeyword()
# Create the Analysis if it doesn't exist
if shasattr(instance, keyword):
analysis = instance._getOb(keyword)
else:
analysis = create_analysis(instance, service)
new_analyses.append(analysis)
# set the hidden status
analysis.setHidden(hidden.get(service_uid, False))
# Set the price of the Analysis
analysis.setPrice(prices.get(service_uid, service.getPrice()))
# DELETE ANALYSES
# Service UIDs
service_uids = map(api.get_uid, services)
# Analyses IDs to delete
delete_ids = []
# Assigned Attachments
assigned_attachments = []
for analysis in analyses:
service_uid = analysis.getServiceUID()
# Skip if the Service is selected
if service_uid in service_uids:
continue
# Skip non-open Analyses
if analysis in non_open_analyses:
continue
# Remember assigned attachments
# https://github.com/senaite/senaite.core/issues/1025
assigned_attachments.extend(analysis.getAttachment())
analysis.setAttachment([])
# If it is assigned to a worksheet, unassign it before deletion.
worksheet = analysis.getWorksheet()
if worksheet:
worksheet.removeAnalysis(analysis)
# Unset the partition reference
# TODO Remove in >v1.3.0 - This is kept for backwards-compatibility
part = analysis.getSamplePartition()
if part:
# From this partition, remove the reference to the current
# analysis that is going to be removed to prevent inconsistent
# states (Sample Partitions referencing to Analyses that do not
# exist anymore
an_uid = api.get_uid(analysis)
part_ans = part.getAnalyses() or []
part_ans = filter(
lambda an: api.get_uid(an) != an_uid, part_ans)
part.setAnalyses(part_ans)
# Unset the Analysis-to-Partition reference
analysis.setSamplePartition(None)
delete_ids.append(analysis.getId())
if delete_ids:
# Note: subscriber might promote the AR
instance.manage_delObjects(ids=delete_ids)
# Remove orphaned attachments
for attachment in assigned_attachments:
# only delete attachments which are no further linked
if not attachment.getLinkedAnalyses():
logger.info(
"Deleting attachment: {}".format(attachment.getId()))
attachment_id = api.get_id(attachment)
api.get_parent(attachment).manage_delObjects(attachment_id)
return new_analyses
|
def function[set, parameter[self, instance, items, prices, specs, hidden]]:
constant[Set/Assign Analyses to this AR
:param items: List of Analysis objects/brains, AnalysisService
objects/brains and/or Analysis Service uids
:type items: list
:param prices: Mapping of AnalysisService UID -> price
:type prices: dict
:param specs: List of AnalysisService UID -> Result Range mappings
:type specs: list
:param hidden: List of AnalysisService UID -> Hidden mappings
:type hidden: list
:returns: list of new assigned Analyses
]
variable[new_analyses] assign[=] list[[]]
variable[analyses] assign[=] call[name[instance].objectValues, parameter[constant[Analysis]]]
variable[non_open_analyses] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b1d65fc0>, name[analyses]]]
variable[non_open_analyses] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b1d64c10>, name[non_open_analyses]]]
if <ast.BoolOp object at 0x7da1b1d66bf0> begin[:]
call[name[logger].warn, parameter[constant[Not allowed to remove all Analyses from AR.]]]
return[name[new_analyses]]
if <ast.UnaryOp object at 0x7da1b1d66a40> begin[:]
<ast.Raise object at 0x7da1b1d672e0>
if <ast.UnaryOp object at 0x7da1b1d65b10> begin[:]
<ast.Raise object at 0x7da1b1d651e0>
if <ast.UnaryOp object at 0x7da1b1d67430> begin[:]
<ast.Raise object at 0x7da1b1d669b0>
variable[services] assign[=] call[name[filter], parameter[constant[None], call[name[map], parameter[name[self]._to_service, name[items]]]]]
variable[dependencies] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b1d66230>, name[services]]]
variable[dependencies] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[dependencies]]]]]
variable[services] assign[=] call[name[set], parameter[binary_operation[name[services] + name[dependencies]]]]
call[name[self]._update_specs, parameter[name[instance], name[specs]]]
if compare[name[hidden] is constant[None]] begin[:]
variable[hidden] assign[=] list[[]]
variable[hidden] assign[=] call[name[dict], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b1d67940>, name[hidden]]]]]
if compare[name[prices] is constant[None]] begin[:]
variable[prices] assign[=] call[name[dict], parameter[]]
for taget[name[service]] in starred[name[services]] begin[:]
variable[service_uid] assign[=] call[name[api].get_uid, parameter[name[service]]]
variable[keyword] assign[=] call[name[service].getKeyword, parameter[]]
if call[name[shasattr], parameter[name[instance], name[keyword]]] begin[:]
variable[analysis] assign[=] call[name[instance]._getOb, parameter[name[keyword]]]
call[name[analysis].setHidden, parameter[call[name[hidden].get, parameter[name[service_uid], constant[False]]]]]
call[name[analysis].setPrice, parameter[call[name[prices].get, parameter[name[service_uid], call[name[service].getPrice, parameter[]]]]]]
variable[service_uids] assign[=] call[name[map], parameter[name[api].get_uid, name[services]]]
variable[delete_ids] assign[=] list[[]]
variable[assigned_attachments] assign[=] list[[]]
for taget[name[analysis]] in starred[name[analyses]] begin[:]
variable[service_uid] assign[=] call[name[analysis].getServiceUID, parameter[]]
if compare[name[service_uid] in name[service_uids]] begin[:]
continue
if compare[name[analysis] in name[non_open_analyses]] begin[:]
continue
call[name[assigned_attachments].extend, parameter[call[name[analysis].getAttachment, parameter[]]]]
call[name[analysis].setAttachment, parameter[list[[]]]]
variable[worksheet] assign[=] call[name[analysis].getWorksheet, parameter[]]
if name[worksheet] begin[:]
call[name[worksheet].removeAnalysis, parameter[name[analysis]]]
variable[part] assign[=] call[name[analysis].getSamplePartition, parameter[]]
if name[part] begin[:]
variable[an_uid] assign[=] call[name[api].get_uid, parameter[name[analysis]]]
variable[part_ans] assign[=] <ast.BoolOp object at 0x7da1b1d67af0>
variable[part_ans] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b1d67880>, name[part_ans]]]
call[name[part].setAnalyses, parameter[name[part_ans]]]
call[name[analysis].setSamplePartition, parameter[constant[None]]]
call[name[delete_ids].append, parameter[call[name[analysis].getId, parameter[]]]]
if name[delete_ids] begin[:]
call[name[instance].manage_delObjects, parameter[]]
for taget[name[attachment]] in starred[name[assigned_attachments]] begin[:]
if <ast.UnaryOp object at 0x7da1b2346ec0> begin[:]
call[name[logger].info, parameter[call[constant[Deleting attachment: {}].format, parameter[call[name[attachment].getId, parameter[]]]]]]
variable[attachment_id] assign[=] call[name[api].get_id, parameter[name[attachment]]]
call[call[name[api].get_parent, parameter[name[attachment]]].manage_delObjects, parameter[name[attachment_id]]]
return[name[new_analyses]]
|
keyword[def] identifier[set] ( identifier[self] , identifier[instance] , identifier[items] , identifier[prices] = keyword[None] , identifier[specs] = keyword[None] , identifier[hidden] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[new_analyses] =[]
identifier[analyses] = identifier[instance] . identifier[objectValues] ( literal[string] )
identifier[non_open_analyses] = identifier[filter] ( keyword[lambda] identifier[an] : keyword[not] identifier[an] . identifier[isOpen] (), identifier[analyses] )
identifier[non_open_analyses] = identifier[filter] (
keyword[lambda] identifier[an] : identifier[api] . identifier[get_workflow_status_of] ( identifier[an] )!= literal[string] ,
identifier[non_open_analyses] )
keyword[if] keyword[not] identifier[items] keyword[and] keyword[not] identifier[non_open_analyses] :
identifier[logger] . identifier[warn] ( literal[string] )
keyword[return] identifier[new_analyses]
keyword[if] keyword[not] identifier[isinstance] ( identifier[items] ,( identifier[list] , identifier[tuple] )):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] (
identifier[type] ( identifier[items] )))
keyword[if] keyword[not] identifier[api] . identifier[is_active] ( identifier[instance] ):
keyword[raise] identifier[Unauthorized] ( literal[string]
. identifier[format] ( identifier[AddAnalysis] ))
keyword[if] keyword[not] identifier[check_permission] ( identifier[AddAnalysis] , identifier[instance] ):
keyword[raise] identifier[Unauthorized] ( literal[string]
. identifier[format] ( identifier[AddAnalysis] ))
identifier[services] = identifier[filter] ( keyword[None] , identifier[map] ( identifier[self] . identifier[_to_service] , identifier[items] ))
identifier[dependencies] = identifier[map] ( keyword[lambda] identifier[s] : identifier[s] . identifier[getServiceDependencies] (), identifier[services] )
identifier[dependencies] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[dependencies] ))
identifier[services] = identifier[set] ( identifier[services] + identifier[dependencies] )
identifier[self] . identifier[_update_specs] ( identifier[instance] , identifier[specs] )
keyword[if] identifier[hidden] keyword[is] keyword[None] :
identifier[hidden] =[]
identifier[hidden] = identifier[dict] ( identifier[map] ( keyword[lambda] identifier[d] :( identifier[d] . identifier[get] ( literal[string] ), identifier[d] . identifier[get] ( literal[string] )), identifier[hidden] ))
keyword[if] identifier[prices] keyword[is] keyword[None] :
identifier[prices] = identifier[dict] ()
keyword[for] identifier[service] keyword[in] identifier[services] :
identifier[service_uid] = identifier[api] . identifier[get_uid] ( identifier[service] )
identifier[keyword] = identifier[service] . identifier[getKeyword] ()
keyword[if] identifier[shasattr] ( identifier[instance] , identifier[keyword] ):
identifier[analysis] = identifier[instance] . identifier[_getOb] ( identifier[keyword] )
keyword[else] :
identifier[analysis] = identifier[create_analysis] ( identifier[instance] , identifier[service] )
identifier[new_analyses] . identifier[append] ( identifier[analysis] )
identifier[analysis] . identifier[setHidden] ( identifier[hidden] . identifier[get] ( identifier[service_uid] , keyword[False] ))
identifier[analysis] . identifier[setPrice] ( identifier[prices] . identifier[get] ( identifier[service_uid] , identifier[service] . identifier[getPrice] ()))
identifier[service_uids] = identifier[map] ( identifier[api] . identifier[get_uid] , identifier[services] )
identifier[delete_ids] =[]
identifier[assigned_attachments] =[]
keyword[for] identifier[analysis] keyword[in] identifier[analyses] :
identifier[service_uid] = identifier[analysis] . identifier[getServiceUID] ()
keyword[if] identifier[service_uid] keyword[in] identifier[service_uids] :
keyword[continue]
keyword[if] identifier[analysis] keyword[in] identifier[non_open_analyses] :
keyword[continue]
identifier[assigned_attachments] . identifier[extend] ( identifier[analysis] . identifier[getAttachment] ())
identifier[analysis] . identifier[setAttachment] ([])
identifier[worksheet] = identifier[analysis] . identifier[getWorksheet] ()
keyword[if] identifier[worksheet] :
identifier[worksheet] . identifier[removeAnalysis] ( identifier[analysis] )
identifier[part] = identifier[analysis] . identifier[getSamplePartition] ()
keyword[if] identifier[part] :
identifier[an_uid] = identifier[api] . identifier[get_uid] ( identifier[analysis] )
identifier[part_ans] = identifier[part] . identifier[getAnalyses] () keyword[or] []
identifier[part_ans] = identifier[filter] (
keyword[lambda] identifier[an] : identifier[api] . identifier[get_uid] ( identifier[an] )!= identifier[an_uid] , identifier[part_ans] )
identifier[part] . identifier[setAnalyses] ( identifier[part_ans] )
identifier[analysis] . identifier[setSamplePartition] ( keyword[None] )
identifier[delete_ids] . identifier[append] ( identifier[analysis] . identifier[getId] ())
keyword[if] identifier[delete_ids] :
identifier[instance] . identifier[manage_delObjects] ( identifier[ids] = identifier[delete_ids] )
keyword[for] identifier[attachment] keyword[in] identifier[assigned_attachments] :
keyword[if] keyword[not] identifier[attachment] . identifier[getLinkedAnalyses] ():
identifier[logger] . identifier[info] (
literal[string] . identifier[format] ( identifier[attachment] . identifier[getId] ()))
identifier[attachment_id] = identifier[api] . identifier[get_id] ( identifier[attachment] )
identifier[api] . identifier[get_parent] ( identifier[attachment] ). identifier[manage_delObjects] ( identifier[attachment_id] )
keyword[return] identifier[new_analyses]
|
def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
"""Set/Assign Analyses to this AR
:param items: List of Analysis objects/brains, AnalysisService
objects/brains and/or Analysis Service uids
:type items: list
:param prices: Mapping of AnalysisService UID -> price
:type prices: dict
:param specs: List of AnalysisService UID -> Result Range mappings
:type specs: list
:param hidden: List of AnalysisService UID -> Hidden mappings
:type hidden: list
:returns: list of new assigned Analyses
"""
# This setter returns a list of new set Analyses
new_analyses = []
# Current assigned analyses
analyses = instance.objectValues('Analysis')
# Analyses which are in a non-open state must be retained, except those
# that are in a registered state (the sample has not been received)
non_open_analyses = filter(lambda an: not an.isOpen(), analyses)
non_open_analyses = filter(lambda an: api.get_workflow_status_of(an) != 'registered', non_open_analyses)
# Prevent removing all analyses
#
# N.B.: Non-open analyses are rendered disabled in the HTML form.
# Therefore, their UIDs are not included in the submitted UIDs.
if not items and (not non_open_analyses):
logger.warn('Not allowed to remove all Analyses from AR.')
return new_analyses # depends on [control=['if'], data=[]]
# Bail out if the items is not a list type
if not isinstance(items, (list, tuple)):
raise TypeError("Items parameter must be a tuple or list, got '{}'".format(type(items))) # depends on [control=['if'], data=[]]
# Bail out if the AR is inactive
if not api.is_active(instance):
raise Unauthorized('Inactive ARs can not be modified'.format(AddAnalysis)) # depends on [control=['if'], data=[]]
# Bail out if the user has not the right permission
if not check_permission(AddAnalysis, instance):
raise Unauthorized("You do not have the '{}' permission".format(AddAnalysis)) # depends on [control=['if'], data=[]]
# Convert the items to a valid list of AnalysisServices
services = filter(None, map(self._to_service, items))
# Calculate dependencies
# FIXME Infinite recursion error possible here, if the formula includes
# the Keyword of the Service that includes the Calculation
dependencies = map(lambda s: s.getServiceDependencies(), services)
dependencies = list(itertools.chain.from_iterable(dependencies))
# Merge dependencies and services
services = set(services + dependencies)
# Modify existing AR specs with new form values of selected analyses.
self._update_specs(instance, specs)
# Create a mapping of Service UID -> Hidden status
if hidden is None:
hidden = [] # depends on [control=['if'], data=['hidden']]
hidden = dict(map(lambda d: (d.get('uid'), d.get('hidden')), hidden))
# Ensure we have a prices dictionary
if prices is None:
prices = dict() # depends on [control=['if'], data=['prices']]
# CREATE/MODIFY ANALYSES
for service in services:
service_uid = api.get_uid(service)
keyword = service.getKeyword()
# Create the Analysis if it doesn't exist
if shasattr(instance, keyword):
analysis = instance._getOb(keyword) # depends on [control=['if'], data=[]]
else:
analysis = create_analysis(instance, service)
new_analyses.append(analysis)
# set the hidden status
analysis.setHidden(hidden.get(service_uid, False))
# Set the price of the Analysis
analysis.setPrice(prices.get(service_uid, service.getPrice())) # depends on [control=['for'], data=['service']]
# DELETE ANALYSES
# Service UIDs
service_uids = map(api.get_uid, services)
# Analyses IDs to delete
delete_ids = []
# Assigned Attachments
assigned_attachments = []
for analysis in analyses:
service_uid = analysis.getServiceUID()
# Skip if the Service is selected
if service_uid in service_uids:
continue # depends on [control=['if'], data=[]]
# Skip non-open Analyses
if analysis in non_open_analyses:
continue # depends on [control=['if'], data=[]]
# Remember assigned attachments
# https://github.com/senaite/senaite.core/issues/1025
assigned_attachments.extend(analysis.getAttachment())
analysis.setAttachment([])
# If it is assigned to a worksheet, unassign it before deletion.
worksheet = analysis.getWorksheet()
if worksheet:
worksheet.removeAnalysis(analysis) # depends on [control=['if'], data=[]]
# Unset the partition reference
# TODO Remove in >v1.3.0 - This is kept for backwards-compatibility
part = analysis.getSamplePartition()
if part:
# From this partition, remove the reference to the current
# analysis that is going to be removed to prevent inconsistent
# states (Sample Partitions referencing to Analyses that do not
# exist anymore
an_uid = api.get_uid(analysis)
part_ans = part.getAnalyses() or []
part_ans = filter(lambda an: api.get_uid(an) != an_uid, part_ans)
part.setAnalyses(part_ans) # depends on [control=['if'], data=[]]
# Unset the Analysis-to-Partition reference
analysis.setSamplePartition(None)
delete_ids.append(analysis.getId()) # depends on [control=['for'], data=['analysis']]
if delete_ids:
# Note: subscriber might promote the AR
instance.manage_delObjects(ids=delete_ids) # depends on [control=['if'], data=[]]
# Remove orphaned attachments
for attachment in assigned_attachments:
# only delete attachments which are no further linked
if not attachment.getLinkedAnalyses():
logger.info('Deleting attachment: {}'.format(attachment.getId()))
attachment_id = api.get_id(attachment)
api.get_parent(attachment).manage_delObjects(attachment_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attachment']]
return new_analyses
|
def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3):
"""
Close Websocket object
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string.
timeout: timeout until receive a close frame.
If None, it will wait forever until receive a close frame.
"""
if self.connected:
if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError("code is invalid range")
try:
self.connected = False
self.send(struct.pack('!H', status) +
reason, ABNF.OPCODE_CLOSE)
sock_timeout = self.sock.gettimeout()
self.sock.settimeout(timeout)
start_time = time.time()
while timeout is None or time.time() - start_time < timeout:
try:
frame = self.recv_frame()
if frame.opcode != ABNF.OPCODE_CLOSE:
continue
if isEnabledForError():
recv_status = struct.unpack("!H", frame.data[0:2])[0]
if recv_status != STATUS_NORMAL:
error("close status: " + repr(recv_status))
break
except:
break
self.sock.settimeout(sock_timeout)
self.sock.shutdown(socket.SHUT_RDWR)
except:
pass
self.shutdown()
|
def function[close, parameter[self, status, reason, timeout]]:
constant[
Close Websocket object
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string.
timeout: timeout until receive a close frame.
If None, it will wait forever until receive a close frame.
]
if name[self].connected begin[:]
if <ast.BoolOp object at 0x7da20c6c77f0> begin[:]
<ast.Raise object at 0x7da20c6c5f00>
<ast.Try object at 0x7da20c6c67a0>
call[name[self].shutdown, parameter[]]
|
keyword[def] identifier[close] ( identifier[self] , identifier[status] = identifier[STATUS_NORMAL] , identifier[reason] = identifier[six] . identifier[b] ( literal[string] ), identifier[timeout] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[connected] :
keyword[if] identifier[status] < literal[int] keyword[or] identifier[status] >= identifier[ABNF] . identifier[LENGTH_16] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
identifier[self] . identifier[connected] = keyword[False]
identifier[self] . identifier[send] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[status] )+
identifier[reason] , identifier[ABNF] . identifier[OPCODE_CLOSE] )
identifier[sock_timeout] = identifier[self] . identifier[sock] . identifier[gettimeout] ()
identifier[self] . identifier[sock] . identifier[settimeout] ( identifier[timeout] )
identifier[start_time] = identifier[time] . identifier[time] ()
keyword[while] identifier[timeout] keyword[is] keyword[None] keyword[or] identifier[time] . identifier[time] ()- identifier[start_time] < identifier[timeout] :
keyword[try] :
identifier[frame] = identifier[self] . identifier[recv_frame] ()
keyword[if] identifier[frame] . identifier[opcode] != identifier[ABNF] . identifier[OPCODE_CLOSE] :
keyword[continue]
keyword[if] identifier[isEnabledForError] ():
identifier[recv_status] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[frame] . identifier[data] [ literal[int] : literal[int] ])[ literal[int] ]
keyword[if] identifier[recv_status] != identifier[STATUS_NORMAL] :
identifier[error] ( literal[string] + identifier[repr] ( identifier[recv_status] ))
keyword[break]
keyword[except] :
keyword[break]
identifier[self] . identifier[sock] . identifier[settimeout] ( identifier[sock_timeout] )
identifier[self] . identifier[sock] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
keyword[except] :
keyword[pass]
identifier[self] . identifier[shutdown] ()
|
def close(self, status=STATUS_NORMAL, reason=six.b(''), timeout=3):
"""
Close Websocket object
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string.
timeout: timeout until receive a close frame.
If None, it will wait forever until receive a close frame.
"""
if self.connected:
if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError('code is invalid range') # depends on [control=['if'], data=[]]
try:
self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
sock_timeout = self.sock.gettimeout()
self.sock.settimeout(timeout)
start_time = time.time()
while timeout is None or time.time() - start_time < timeout:
try:
frame = self.recv_frame()
if frame.opcode != ABNF.OPCODE_CLOSE:
continue # depends on [control=['if'], data=[]]
if isEnabledForError():
recv_status = struct.unpack('!H', frame.data[0:2])[0]
if recv_status != STATUS_NORMAL:
error('close status: ' + repr(recv_status)) # depends on [control=['if'], data=['recv_status']] # depends on [control=['if'], data=[]]
break # depends on [control=['try'], data=[]]
except:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
self.sock.settimeout(sock_timeout)
self.sock.shutdown(socket.SHUT_RDWR) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
self.shutdown() # depends on [control=['if'], data=[]]
|
def default_option(self, fn, optstr):
"""Make an entry in the options_table for fn, with value optstr"""
if fn not in self.lsmagic():
error("%s is not a magic function" % fn)
self.options_table[fn] = optstr
|
def function[default_option, parameter[self, fn, optstr]]:
constant[Make an entry in the options_table for fn, with value optstr]
if compare[name[fn] <ast.NotIn object at 0x7da2590d7190> call[name[self].lsmagic, parameter[]]] begin[:]
call[name[error], parameter[binary_operation[constant[%s is not a magic function] <ast.Mod object at 0x7da2590d6920> name[fn]]]]
call[name[self].options_table][name[fn]] assign[=] name[optstr]
|
keyword[def] identifier[default_option] ( identifier[self] , identifier[fn] , identifier[optstr] ):
literal[string]
keyword[if] identifier[fn] keyword[not] keyword[in] identifier[self] . identifier[lsmagic] ():
identifier[error] ( literal[string] % identifier[fn] )
identifier[self] . identifier[options_table] [ identifier[fn] ]= identifier[optstr]
|
def default_option(self, fn, optstr):
"""Make an entry in the options_table for fn, with value optstr"""
if fn not in self.lsmagic():
error('%s is not a magic function' % fn) # depends on [control=['if'], data=['fn']]
self.options_table[fn] = optstr
|
def getWindow(title, exact=False):
"""Return Window object if 'title' or its part found in visible windows titles, else return None
Return only 1 window found first
Args:
title: unicode string
exact (bool): True if search only exact match
"""
titles = getWindows()
hwnd = titles.get(title, None)
if not hwnd and not exact:
for k, v in titles.items():
if title in k:
hwnd = v
break
if hwnd:
return Window(hwnd)
else:
return None
|
def function[getWindow, parameter[title, exact]]:
constant[Return Window object if 'title' or its part found in visible windows titles, else return None
Return only 1 window found first
Args:
title: unicode string
exact (bool): True if search only exact match
]
variable[titles] assign[=] call[name[getWindows], parameter[]]
variable[hwnd] assign[=] call[name[titles].get, parameter[name[title], constant[None]]]
if <ast.BoolOp object at 0x7da18dc07070> begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c76de40>, <ast.Name object at 0x7da20c76d180>]]] in starred[call[name[titles].items, parameter[]]] begin[:]
if compare[name[title] in name[k]] begin[:]
variable[hwnd] assign[=] name[v]
break
if name[hwnd] begin[:]
return[call[name[Window], parameter[name[hwnd]]]]
|
keyword[def] identifier[getWindow] ( identifier[title] , identifier[exact] = keyword[False] ):
literal[string]
identifier[titles] = identifier[getWindows] ()
identifier[hwnd] = identifier[titles] . identifier[get] ( identifier[title] , keyword[None] )
keyword[if] keyword[not] identifier[hwnd] keyword[and] keyword[not] identifier[exact] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[titles] . identifier[items] ():
keyword[if] identifier[title] keyword[in] identifier[k] :
identifier[hwnd] = identifier[v]
keyword[break]
keyword[if] identifier[hwnd] :
keyword[return] identifier[Window] ( identifier[hwnd] )
keyword[else] :
keyword[return] keyword[None]
|
def getWindow(title, exact=False):
"""Return Window object if 'title' or its part found in visible windows titles, else return None
Return only 1 window found first
Args:
title: unicode string
exact (bool): True if search only exact match
"""
titles = getWindows()
hwnd = titles.get(title, None)
if not hwnd and (not exact):
for (k, v) in titles.items():
if title in k:
hwnd = v
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if hwnd:
return Window(hwnd) # depends on [control=['if'], data=[]]
else:
return None
|
def near_to_position(self, position, max_distance):
'''Returns true iff the record is within max_distance of the given position.
Note: chromosome name not checked, so that's up to you to do first.'''
end = self.ref_end_pos()
return self.POS <= position <= end or abs(position - self.POS) <= max_distance or abs(position - end) <= max_distance
|
def function[near_to_position, parameter[self, position, max_distance]]:
constant[Returns true iff the record is within max_distance of the given position.
Note: chromosome name not checked, so that's up to you to do first.]
variable[end] assign[=] call[name[self].ref_end_pos, parameter[]]
return[<ast.BoolOp object at 0x7da1b1da0640>]
|
keyword[def] identifier[near_to_position] ( identifier[self] , identifier[position] , identifier[max_distance] ):
literal[string]
identifier[end] = identifier[self] . identifier[ref_end_pos] ()
keyword[return] identifier[self] . identifier[POS] <= identifier[position] <= identifier[end] keyword[or] identifier[abs] ( identifier[position] - identifier[self] . identifier[POS] )<= identifier[max_distance] keyword[or] identifier[abs] ( identifier[position] - identifier[end] )<= identifier[max_distance]
|
def near_to_position(self, position, max_distance):
"""Returns true iff the record is within max_distance of the given position.
Note: chromosome name not checked, so that's up to you to do first."""
end = self.ref_end_pos()
return self.POS <= position <= end or abs(position - self.POS) <= max_distance or abs(position - end) <= max_distance
|
def create_identity_from_private_key(self, label: str, pwd: str, private_key: str) -> Identity:
"""
This interface is used to create identity based on given label, password and private key.
:param label: a label for identity.
:param pwd: a password which will be used to encrypt and decrypt the private key.
:param private_key: a private key in the form of string.
:return: if succeed, an Identity object will be returned.
"""
salt = get_random_hex_str(16)
identity = self.__create_identity(label, pwd, salt, private_key)
return identity
|
def function[create_identity_from_private_key, parameter[self, label, pwd, private_key]]:
constant[
This interface is used to create identity based on given label, password and private key.
:param label: a label for identity.
:param pwd: a password which will be used to encrypt and decrypt the private key.
:param private_key: a private key in the form of string.
:return: if succeed, an Identity object will be returned.
]
variable[salt] assign[=] call[name[get_random_hex_str], parameter[constant[16]]]
variable[identity] assign[=] call[name[self].__create_identity, parameter[name[label], name[pwd], name[salt], name[private_key]]]
return[name[identity]]
|
keyword[def] identifier[create_identity_from_private_key] ( identifier[self] , identifier[label] : identifier[str] , identifier[pwd] : identifier[str] , identifier[private_key] : identifier[str] )-> identifier[Identity] :
literal[string]
identifier[salt] = identifier[get_random_hex_str] ( literal[int] )
identifier[identity] = identifier[self] . identifier[__create_identity] ( identifier[label] , identifier[pwd] , identifier[salt] , identifier[private_key] )
keyword[return] identifier[identity]
|
def create_identity_from_private_key(self, label: str, pwd: str, private_key: str) -> Identity:
"""
This interface is used to create identity based on given label, password and private key.
:param label: a label for identity.
:param pwd: a password which will be used to encrypt and decrypt the private key.
:param private_key: a private key in the form of string.
:return: if succeed, an Identity object will be returned.
"""
salt = get_random_hex_str(16)
identity = self.__create_identity(label, pwd, salt, private_key)
return identity
|
def _convert_to_indexer(self, obj, axis=None, is_setter=False):
""" much simpler as we only have to deal with our valid types """
if axis is None:
axis = self.axis or 0
# make need to convert a float key
if isinstance(obj, slice):
return self._convert_slice_indexer(obj, axis)
elif is_float(obj):
return self._convert_scalar_indexer(obj, axis)
try:
self._validate_key(obj, axis)
return obj
except ValueError:
raise ValueError("Can only index by location with "
"a [{types}]".format(types=self._valid_types))
|
def function[_convert_to_indexer, parameter[self, obj, axis, is_setter]]:
constant[ much simpler as we only have to deal with our valid types ]
if compare[name[axis] is constant[None]] begin[:]
variable[axis] assign[=] <ast.BoolOp object at 0x7da18c4ccca0>
if call[name[isinstance], parameter[name[obj], name[slice]]] begin[:]
return[call[name[self]._convert_slice_indexer, parameter[name[obj], name[axis]]]]
<ast.Try object at 0x7da18c4cf2b0>
|
keyword[def] identifier[_convert_to_indexer] ( identifier[self] , identifier[obj] , identifier[axis] = keyword[None] , identifier[is_setter] = keyword[False] ):
literal[string]
keyword[if] identifier[axis] keyword[is] keyword[None] :
identifier[axis] = identifier[self] . identifier[axis] keyword[or] literal[int]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[slice] ):
keyword[return] identifier[self] . identifier[_convert_slice_indexer] ( identifier[obj] , identifier[axis] )
keyword[elif] identifier[is_float] ( identifier[obj] ):
keyword[return] identifier[self] . identifier[_convert_scalar_indexer] ( identifier[obj] , identifier[axis] )
keyword[try] :
identifier[self] . identifier[_validate_key] ( identifier[obj] , identifier[axis] )
keyword[return] identifier[obj]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[types] = identifier[self] . identifier[_valid_types] ))
|
def _convert_to_indexer(self, obj, axis=None, is_setter=False):
""" much simpler as we only have to deal with our valid types """
if axis is None:
axis = self.axis or 0 # depends on [control=['if'], data=['axis']]
# make need to convert a float key
if isinstance(obj, slice):
return self._convert_slice_indexer(obj, axis) # depends on [control=['if'], data=[]]
elif is_float(obj):
return self._convert_scalar_indexer(obj, axis) # depends on [control=['if'], data=[]]
try:
self._validate_key(obj, axis)
return obj # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('Can only index by location with a [{types}]'.format(types=self._valid_types)) # depends on [control=['except'], data=[]]
|
def process_packets(self, transaction_id=None, invoked_method=None,
timeout=None):
"""Wait for packets and process them as needed.
:param transaction_id: int, Wait until the result of this
transaction ID is recieved.
:param invoked_method: int, Wait until this method is invoked
by the server.
:param timeout: int, The time to wait for a result from the server.
Note: This is the timeout used by this method only,
the connection timeout is still used when reading
packets.
Raises :exc:`RTMPError` on error.
Raises :exc:`RTMPTimeoutError` on timeout.
Usage::
>>> @conn.invoke_handler
... def add(x, y):
... return x + y
>>> @conn.process_packets()
"""
start = time()
while self.connected and transaction_id not in self._invoke_results:
if timeout and (time() - start) >= timeout:
raise RTMPTimeoutError("Timeout")
packet = self.read_packet()
if packet.type == PACKET_TYPE_INVOKE:
try:
decoded = decode_amf(packet.body)
except AMFError:
continue
try:
method, transaction_id_, obj = decoded[:3]
args = decoded[3:]
except ValueError:
continue
if method == "_result":
if len(args) > 0:
result = args[0]
else:
result = None
self._invoke_results[transaction_id_] = result
else:
handler = self._invoke_handlers.get(method)
if handler:
res = handler(*args)
if res is not None:
self.call("_result", res,
transaction_id=transaction_id_)
if method == invoked_method:
self._invoke_args[invoked_method] = args
break
if transaction_id_ == 1.0:
self._connect_result = packet
else:
self.handle_packet(packet)
else:
self.handle_packet(packet)
if transaction_id:
result = self._invoke_results.pop(transaction_id, None)
return result
if invoked_method:
args = self._invoke_args.pop(invoked_method, None)
return args
|
def function[process_packets, parameter[self, transaction_id, invoked_method, timeout]]:
constant[Wait for packets and process them as needed.
:param transaction_id: int, Wait until the result of this
transaction ID is recieved.
:param invoked_method: int, Wait until this method is invoked
by the server.
:param timeout: int, The time to wait for a result from the server.
Note: This is the timeout used by this method only,
the connection timeout is still used when reading
packets.
Raises :exc:`RTMPError` on error.
Raises :exc:`RTMPTimeoutError` on timeout.
Usage::
>>> @conn.invoke_handler
... def add(x, y):
... return x + y
>>> @conn.process_packets()
]
variable[start] assign[=] call[name[time], parameter[]]
while <ast.BoolOp object at 0x7da18f09f400> begin[:]
if <ast.BoolOp object at 0x7da18f09fcd0> begin[:]
<ast.Raise object at 0x7da18f09c4c0>
variable[packet] assign[=] call[name[self].read_packet, parameter[]]
if compare[name[packet].type equal[==] name[PACKET_TYPE_INVOKE]] begin[:]
<ast.Try object at 0x7da18f09e140>
<ast.Try object at 0x7da18f00f400>
if compare[name[method] equal[==] constant[_result]] begin[:]
if compare[call[name[len], parameter[name[args]]] greater[>] constant[0]] begin[:]
variable[result] assign[=] call[name[args]][constant[0]]
call[name[self]._invoke_results][name[transaction_id_]] assign[=] name[result]
if compare[name[transaction_id_] equal[==] constant[1.0]] begin[:]
name[self]._connect_result assign[=] name[packet]
if name[transaction_id] begin[:]
variable[result] assign[=] call[name[self]._invoke_results.pop, parameter[name[transaction_id], constant[None]]]
return[name[result]]
if name[invoked_method] begin[:]
variable[args] assign[=] call[name[self]._invoke_args.pop, parameter[name[invoked_method], constant[None]]]
return[name[args]]
|
keyword[def] identifier[process_packets] ( identifier[self] , identifier[transaction_id] = keyword[None] , identifier[invoked_method] = keyword[None] ,
identifier[timeout] = keyword[None] ):
literal[string]
identifier[start] = identifier[time] ()
keyword[while] identifier[self] . identifier[connected] keyword[and] identifier[transaction_id] keyword[not] keyword[in] identifier[self] . identifier[_invoke_results] :
keyword[if] identifier[timeout] keyword[and] ( identifier[time] ()- identifier[start] )>= identifier[timeout] :
keyword[raise] identifier[RTMPTimeoutError] ( literal[string] )
identifier[packet] = identifier[self] . identifier[read_packet] ()
keyword[if] identifier[packet] . identifier[type] == identifier[PACKET_TYPE_INVOKE] :
keyword[try] :
identifier[decoded] = identifier[decode_amf] ( identifier[packet] . identifier[body] )
keyword[except] identifier[AMFError] :
keyword[continue]
keyword[try] :
identifier[method] , identifier[transaction_id_] , identifier[obj] = identifier[decoded] [: literal[int] ]
identifier[args] = identifier[decoded] [ literal[int] :]
keyword[except] identifier[ValueError] :
keyword[continue]
keyword[if] identifier[method] == literal[string] :
keyword[if] identifier[len] ( identifier[args] )> literal[int] :
identifier[result] = identifier[args] [ literal[int] ]
keyword[else] :
identifier[result] = keyword[None]
identifier[self] . identifier[_invoke_results] [ identifier[transaction_id_] ]= identifier[result]
keyword[else] :
identifier[handler] = identifier[self] . identifier[_invoke_handlers] . identifier[get] ( identifier[method] )
keyword[if] identifier[handler] :
identifier[res] = identifier[handler] (* identifier[args] )
keyword[if] identifier[res] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[call] ( literal[string] , identifier[res] ,
identifier[transaction_id] = identifier[transaction_id_] )
keyword[if] identifier[method] == identifier[invoked_method] :
identifier[self] . identifier[_invoke_args] [ identifier[invoked_method] ]= identifier[args]
keyword[break]
keyword[if] identifier[transaction_id_] == literal[int] :
identifier[self] . identifier[_connect_result] = identifier[packet]
keyword[else] :
identifier[self] . identifier[handle_packet] ( identifier[packet] )
keyword[else] :
identifier[self] . identifier[handle_packet] ( identifier[packet] )
keyword[if] identifier[transaction_id] :
identifier[result] = identifier[self] . identifier[_invoke_results] . identifier[pop] ( identifier[transaction_id] , keyword[None] )
keyword[return] identifier[result]
keyword[if] identifier[invoked_method] :
identifier[args] = identifier[self] . identifier[_invoke_args] . identifier[pop] ( identifier[invoked_method] , keyword[None] )
keyword[return] identifier[args]
|
def process_packets(self, transaction_id=None, invoked_method=None, timeout=None):
"""Wait for packets and process them as needed.
:param transaction_id: int, Wait until the result of this
transaction ID is recieved.
:param invoked_method: int, Wait until this method is invoked
by the server.
:param timeout: int, The time to wait for a result from the server.
Note: This is the timeout used by this method only,
the connection timeout is still used when reading
packets.
Raises :exc:`RTMPError` on error.
Raises :exc:`RTMPTimeoutError` on timeout.
Usage::
>>> @conn.invoke_handler
... def add(x, y):
... return x + y
>>> @conn.process_packets()
"""
start = time()
while self.connected and transaction_id not in self._invoke_results:
if timeout and time() - start >= timeout:
raise RTMPTimeoutError('Timeout') # depends on [control=['if'], data=[]]
packet = self.read_packet()
if packet.type == PACKET_TYPE_INVOKE:
try:
decoded = decode_amf(packet.body) # depends on [control=['try'], data=[]]
except AMFError:
continue # depends on [control=['except'], data=[]]
try:
(method, transaction_id_, obj) = decoded[:3]
args = decoded[3:] # depends on [control=['try'], data=[]]
except ValueError:
continue # depends on [control=['except'], data=[]]
if method == '_result':
if len(args) > 0:
result = args[0] # depends on [control=['if'], data=[]]
else:
result = None
self._invoke_results[transaction_id_] = result # depends on [control=['if'], data=[]]
else:
handler = self._invoke_handlers.get(method)
if handler:
res = handler(*args)
if res is not None:
self.call('_result', res, transaction_id=transaction_id_) # depends on [control=['if'], data=['res']] # depends on [control=['if'], data=[]]
if method == invoked_method:
self._invoke_args[invoked_method] = args
break # depends on [control=['if'], data=['invoked_method']]
if transaction_id_ == 1.0:
self._connect_result = packet # depends on [control=['if'], data=[]]
else:
self.handle_packet(packet) # depends on [control=['if'], data=[]]
else:
self.handle_packet(packet) # depends on [control=['while'], data=[]]
if transaction_id:
result = self._invoke_results.pop(transaction_id, None)
return result # depends on [control=['if'], data=[]]
if invoked_method:
args = self._invoke_args.pop(invoked_method, None)
return args # depends on [control=['if'], data=[]]
|
def filename_add_custom_url_params(filename, request):
""" Adds custom url parameters to filename string
:param filename: Initial filename
:type filename: str
:param request: OGC-type request with specified bounding box, cloud coverage for specific product.
:type request: OgcRequest or GeopediaRequest
:return: Filename with custom url parameters in the name
:rtype: str
"""
if hasattr(request, 'custom_url_params') and request.custom_url_params is not None:
for param, value in sorted(request.custom_url_params.items(),
key=lambda parameter_item: parameter_item[0].value):
filename = '_'.join([filename, param.value, str(value)])
return filename
|
def function[filename_add_custom_url_params, parameter[filename, request]]:
constant[ Adds custom url parameters to filename string
:param filename: Initial filename
:type filename: str
:param request: OGC-type request with specified bounding box, cloud coverage for specific product.
:type request: OgcRequest or GeopediaRequest
:return: Filename with custom url parameters in the name
:rtype: str
]
if <ast.BoolOp object at 0x7da1b18e7730> begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b18e7dc0>, <ast.Name object at 0x7da1b18e6b00>]]] in starred[call[name[sorted], parameter[call[name[request].custom_url_params.items, parameter[]]]]] begin[:]
variable[filename] assign[=] call[constant[_].join, parameter[list[[<ast.Name object at 0x7da1b18e6860>, <ast.Attribute object at 0x7da1b18e61a0>, <ast.Call object at 0x7da1b18e6e00>]]]]
return[name[filename]]
|
keyword[def] identifier[filename_add_custom_url_params] ( identifier[filename] , identifier[request] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[request] , literal[string] ) keyword[and] identifier[request] . identifier[custom_url_params] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[param] , identifier[value] keyword[in] identifier[sorted] ( identifier[request] . identifier[custom_url_params] . identifier[items] (),
identifier[key] = keyword[lambda] identifier[parameter_item] : identifier[parameter_item] [ literal[int] ]. identifier[value] ):
identifier[filename] = literal[string] . identifier[join] ([ identifier[filename] , identifier[param] . identifier[value] , identifier[str] ( identifier[value] )])
keyword[return] identifier[filename]
|
def filename_add_custom_url_params(filename, request):
""" Adds custom url parameters to filename string
:param filename: Initial filename
:type filename: str
:param request: OGC-type request with specified bounding box, cloud coverage for specific product.
:type request: OgcRequest or GeopediaRequest
:return: Filename with custom url parameters in the name
:rtype: str
"""
if hasattr(request, 'custom_url_params') and request.custom_url_params is not None:
for (param, value) in sorted(request.custom_url_params.items(), key=lambda parameter_item: parameter_item[0].value):
filename = '_'.join([filename, param.value, str(value)]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return filename
|
def check_namespace(namespace_id):
"""
Verify that a namespace ID is well-formed
>>> check_namespace(123)
False
>>> check_namespace(None)
False
>>> check_namespace('')
False
>>> check_namespace('abcd')
True
>>> check_namespace('Abcd')
False
>>> check_namespace('a+bcd')
False
>>> check_namespace('.abcd')
False
>>> check_namespace('abcdabcdabcdabcdabcd')
False
>>> check_namespace('abcdabcdabcdabcdabc')
True
"""
if type(namespace_id) not in [str, unicode]:
return False
if not is_namespace_valid(namespace_id):
return False
return True
|
def function[check_namespace, parameter[namespace_id]]:
constant[
Verify that a namespace ID is well-formed
>>> check_namespace(123)
False
>>> check_namespace(None)
False
>>> check_namespace('')
False
>>> check_namespace('abcd')
True
>>> check_namespace('Abcd')
False
>>> check_namespace('a+bcd')
False
>>> check_namespace('.abcd')
False
>>> check_namespace('abcdabcdabcdabcdabcd')
False
>>> check_namespace('abcdabcdabcdabcdabc')
True
]
if compare[call[name[type], parameter[name[namespace_id]]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da20c6aa830>, <ast.Name object at 0x7da20c6aace0>]]] begin[:]
return[constant[False]]
if <ast.UnaryOp object at 0x7da20c6aa6b0> begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[check_namespace] ( identifier[namespace_id] ):
literal[string]
keyword[if] identifier[type] ( identifier[namespace_id] ) keyword[not] keyword[in] [ identifier[str] , identifier[unicode] ]:
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[is_namespace_valid] ( identifier[namespace_id] ):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def check_namespace(namespace_id):
"""
Verify that a namespace ID is well-formed
>>> check_namespace(123)
False
>>> check_namespace(None)
False
>>> check_namespace('')
False
>>> check_namespace('abcd')
True
>>> check_namespace('Abcd')
False
>>> check_namespace('a+bcd')
False
>>> check_namespace('.abcd')
False
>>> check_namespace('abcdabcdabcdabcdabcd')
False
>>> check_namespace('abcdabcdabcdabcdabc')
True
"""
if type(namespace_id) not in [str, unicode]:
return False # depends on [control=['if'], data=[]]
if not is_namespace_valid(namespace_id):
return False # depends on [control=['if'], data=[]]
return True
|
def query(self, req) -> ResponseQuery:
"""Return the last tx count"""
v = encode_number(self.txCount)
return ResponseQuery(code=CodeTypeOk, value=v, height=self.last_block_height)
|
def function[query, parameter[self, req]]:
constant[Return the last tx count]
variable[v] assign[=] call[name[encode_number], parameter[name[self].txCount]]
return[call[name[ResponseQuery], parameter[]]]
|
keyword[def] identifier[query] ( identifier[self] , identifier[req] )-> identifier[ResponseQuery] :
literal[string]
identifier[v] = identifier[encode_number] ( identifier[self] . identifier[txCount] )
keyword[return] identifier[ResponseQuery] ( identifier[code] = identifier[CodeTypeOk] , identifier[value] = identifier[v] , identifier[height] = identifier[self] . identifier[last_block_height] )
|
def query(self, req) -> ResponseQuery:
"""Return the last tx count"""
v = encode_number(self.txCount)
return ResponseQuery(code=CodeTypeOk, value=v, height=self.last_block_height)
|
def diff(self):
"""
Only report difference in content between two directories
"""
self._copyfiles = False
self._updatefiles = False
self._purge = False
self._creatdirs = False
self._updatefiles = False
self.log('Difference of directory %s from %s\n' %
(self._dir2, self._dir1))
self._diff(self._dir1, self._dir2)
|
def function[diff, parameter[self]]:
constant[
Only report difference in content between two directories
]
name[self]._copyfiles assign[=] constant[False]
name[self]._updatefiles assign[=] constant[False]
name[self]._purge assign[=] constant[False]
name[self]._creatdirs assign[=] constant[False]
name[self]._updatefiles assign[=] constant[False]
call[name[self].log, parameter[binary_operation[constant[Difference of directory %s from %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b255d060>, <ast.Attribute object at 0x7da1b255d990>]]]]]
call[name[self]._diff, parameter[name[self]._dir1, name[self]._dir2]]
|
keyword[def] identifier[diff] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_copyfiles] = keyword[False]
identifier[self] . identifier[_updatefiles] = keyword[False]
identifier[self] . identifier[_purge] = keyword[False]
identifier[self] . identifier[_creatdirs] = keyword[False]
identifier[self] . identifier[_updatefiles] = keyword[False]
identifier[self] . identifier[log] ( literal[string] %
( identifier[self] . identifier[_dir2] , identifier[self] . identifier[_dir1] ))
identifier[self] . identifier[_diff] ( identifier[self] . identifier[_dir1] , identifier[self] . identifier[_dir2] )
|
def diff(self):
"""
Only report difference in content between two directories
"""
self._copyfiles = False
self._updatefiles = False
self._purge = False
self._creatdirs = False
self._updatefiles = False
self.log('Difference of directory %s from %s\n' % (self._dir2, self._dir1))
self._diff(self._dir1, self._dir2)
|
def describe_reserved_db_instances_offerings(ReservedDBInstancesOfferingId=None, DBInstanceClass=None, Duration=None, ProductDescription=None, OfferingType=None, MultiAZ=None, Filters=None, MaxRecords=None, Marker=None):
"""
Lists available reserved DB instance offerings.
See also: AWS API Documentation
Examples
This example lists information for all reserved DB instance offerings for the specified DB instance class, duration, product, offering type, and availability zone settings.
Expected Output:
:example: response = client.describe_reserved_db_instances_offerings(
ReservedDBInstancesOfferingId='string',
DBInstanceClass='string',
Duration='string',
ProductDescription='string',
OfferingType='string',
MultiAZ=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: The offering identifier filter value. Specify this parameter to show only the available offering that matches the specified reservation identifier.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.
:type Duration: string
:param Duration: Duration filter value, specified in years or seconds. Specify this parameter to show only reservations for this duration.
Valid Values: 1 | 3 | 31536000 | 94608000
:type ProductDescription: string
:param ProductDescription: Product description filter value. Specify this parameter to show only the available offerings matching the specified product description.
:type OfferingType: string
:param OfferingType: The offering type filter value. Specify this parameter to show only the available offerings matching the specified offering type.
Valid Values: 'Partial Upfront' | 'All Upfront' | 'No Upfront'
:type MultiAZ: boolean
:param MultiAZ: The Multi-AZ filter value. Specify this parameter to show only the available offerings matching the specified Multi-AZ parameter.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'ReservedDBInstancesOfferings': [
{
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
"""
pass
|
def function[describe_reserved_db_instances_offerings, parameter[ReservedDBInstancesOfferingId, DBInstanceClass, Duration, ProductDescription, OfferingType, MultiAZ, Filters, MaxRecords, Marker]]:
constant[
Lists available reserved DB instance offerings.
See also: AWS API Documentation
Examples
This example lists information for all reserved DB instance offerings for the specified DB instance class, duration, product, offering type, and availability zone settings.
Expected Output:
:example: response = client.describe_reserved_db_instances_offerings(
ReservedDBInstancesOfferingId='string',
DBInstanceClass='string',
Duration='string',
ProductDescription='string',
OfferingType='string',
MultiAZ=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: The offering identifier filter value. Specify this parameter to show only the available offering that matches the specified reservation identifier.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.
:type Duration: string
:param Duration: Duration filter value, specified in years or seconds. Specify this parameter to show only reservations for this duration.
Valid Values: 1 | 3 | 31536000 | 94608000
:type ProductDescription: string
:param ProductDescription: Product description filter value. Specify this parameter to show only the available offerings matching the specified product description.
:type OfferingType: string
:param OfferingType: The offering type filter value. Specify this parameter to show only the available offerings matching the specified offering type.
Valid Values: 'Partial Upfront' | 'All Upfront' | 'No Upfront'
:type MultiAZ: boolean
:param MultiAZ: The Multi-AZ filter value. Specify this parameter to show only the available offerings matching the specified Multi-AZ parameter.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'ReservedDBInstancesOfferings': [
{
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
]
pass
|
keyword[def] identifier[describe_reserved_db_instances_offerings] ( identifier[ReservedDBInstancesOfferingId] = keyword[None] , identifier[DBInstanceClass] = keyword[None] , identifier[Duration] = keyword[None] , identifier[ProductDescription] = keyword[None] , identifier[OfferingType] = keyword[None] , identifier[MultiAZ] = keyword[None] , identifier[Filters] = keyword[None] , identifier[MaxRecords] = keyword[None] , identifier[Marker] = keyword[None] ):
literal[string]
keyword[pass]
|
def describe_reserved_db_instances_offerings(ReservedDBInstancesOfferingId=None, DBInstanceClass=None, Duration=None, ProductDescription=None, OfferingType=None, MultiAZ=None, Filters=None, MaxRecords=None, Marker=None):
"""
Lists available reserved DB instance offerings.
See also: AWS API Documentation
Examples
This example lists information for all reserved DB instance offerings for the specified DB instance class, duration, product, offering type, and availability zone settings.
Expected Output:
:example: response = client.describe_reserved_db_instances_offerings(
ReservedDBInstancesOfferingId='string',
DBInstanceClass='string',
Duration='string',
ProductDescription='string',
OfferingType='string',
MultiAZ=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: The offering identifier filter value. Specify this parameter to show only the available offering that matches the specified reservation identifier.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.
:type Duration: string
:param Duration: Duration filter value, specified in years or seconds. Specify this parameter to show only reservations for this duration.
Valid Values: 1 | 3 | 31536000 | 94608000
:type ProductDescription: string
:param ProductDescription: Product description filter value. Specify this parameter to show only the available offerings matching the specified product description.
:type OfferingType: string
:param OfferingType: The offering type filter value. Specify this parameter to show only the available offerings matching the specified offering type.
Valid Values: 'Partial Upfront' | 'All Upfront' | 'No Upfront'
:type MultiAZ: boolean
:param MultiAZ: The Multi-AZ filter value. Specify this parameter to show only the available offerings matching the specified Multi-AZ parameter.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'ReservedDBInstancesOfferings': [
{
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
"""
pass
|
def random_ucast_ip():
"""
Function to generate a random unicast ip address
:return:
A unicast IP Address
"""
first_octet = str(__random.randrange(1, 224))
def get_other_octetes():
return str(__random.randrange(0, 255))
return '{first_octet}.{second_octet}.{third_octet}.{fourth_octet}'.format(first_octet=first_octet,
second_octet=get_other_octetes(),
third_octet=get_other_octetes(),
fourth_octet=get_other_octetes())
|
def function[random_ucast_ip, parameter[]]:
constant[
Function to generate a random unicast ip address
:return:
A unicast IP Address
]
variable[first_octet] assign[=] call[name[str], parameter[call[name[__random].randrange, parameter[constant[1], constant[224]]]]]
def function[get_other_octetes, parameter[]]:
return[call[name[str], parameter[call[name[__random].randrange, parameter[constant[0], constant[255]]]]]]
return[call[constant[{first_octet}.{second_octet}.{third_octet}.{fourth_octet}].format, parameter[]]]
|
keyword[def] identifier[random_ucast_ip] ():
literal[string]
identifier[first_octet] = identifier[str] ( identifier[__random] . identifier[randrange] ( literal[int] , literal[int] ))
keyword[def] identifier[get_other_octetes] ():
keyword[return] identifier[str] ( identifier[__random] . identifier[randrange] ( literal[int] , literal[int] ))
keyword[return] literal[string] . identifier[format] ( identifier[first_octet] = identifier[first_octet] ,
identifier[second_octet] = identifier[get_other_octetes] (),
identifier[third_octet] = identifier[get_other_octetes] (),
identifier[fourth_octet] = identifier[get_other_octetes] ())
|
def random_ucast_ip():
"""
Function to generate a random unicast ip address
:return:
A unicast IP Address
"""
first_octet = str(__random.randrange(1, 224))
def get_other_octetes():
return str(__random.randrange(0, 255))
return '{first_octet}.{second_octet}.{third_octet}.{fourth_octet}'.format(first_octet=first_octet, second_octet=get_other_octetes(), third_octet=get_other_octetes(), fourth_octet=get_other_octetes())
|
def changed_bit_pos(a, b):
"""
Return the index of the first bit that changed between `a` an `b`.
Return None if there are no changed bits.
"""
c = a ^ b
n = 0
while c > 0:
if c & 1 == 1:
return n
c >>= 1
n += 1
return None
|
def function[changed_bit_pos, parameter[a, b]]:
constant[
Return the index of the first bit that changed between `a` an `b`.
Return None if there are no changed bits.
]
variable[c] assign[=] binary_operation[name[a] <ast.BitXor object at 0x7da2590d6b00> name[b]]
variable[n] assign[=] constant[0]
while compare[name[c] greater[>] constant[0]] begin[:]
if compare[binary_operation[name[c] <ast.BitAnd object at 0x7da2590d6b60> constant[1]] equal[==] constant[1]] begin[:]
return[name[n]]
<ast.AugAssign object at 0x7da1b1c59c00>
<ast.AugAssign object at 0x7da1b1c58580>
return[constant[None]]
|
keyword[def] identifier[changed_bit_pos] ( identifier[a] , identifier[b] ):
literal[string]
identifier[c] = identifier[a] ^ identifier[b]
identifier[n] = literal[int]
keyword[while] identifier[c] > literal[int] :
keyword[if] identifier[c] & literal[int] == literal[int] :
keyword[return] identifier[n]
identifier[c] >>= literal[int]
identifier[n] += literal[int]
keyword[return] keyword[None]
|
def changed_bit_pos(a, b):
"""
Return the index of the first bit that changed between `a` an `b`.
Return None if there are no changed bits.
"""
c = a ^ b
n = 0
while c > 0:
if c & 1 == 1:
return n # depends on [control=['if'], data=[]]
c >>= 1
n += 1 # depends on [control=['while'], data=['c']]
return None
|
def check_permissions(self, request):
""" Retrieves the controlled object and perform the permissions check. """
obj = (
hasattr(self, 'get_controlled_object') and self.get_controlled_object() or
hasattr(self, 'get_object') and self.get_object() or getattr(self, 'object', None)
)
user = request.user
# Get the permissions to check
perms = self.get_required_permissions(self)
# Check permissions
has_permissions = self.perform_permissions_check(user, obj, perms)
if not has_permissions and not user.is_authenticated:
return HttpResponseRedirect('{}?{}={}'.format(
resolve_url(self.login_url),
self.redirect_field_name,
urlquote(request.get_full_path())
))
elif not has_permissions:
raise PermissionDenied
|
def function[check_permissions, parameter[self, request]]:
constant[ Retrieves the controlled object and perform the permissions check. ]
variable[obj] assign[=] <ast.BoolOp object at 0x7da1b11786a0>
variable[user] assign[=] name[request].user
variable[perms] assign[=] call[name[self].get_required_permissions, parameter[name[self]]]
variable[has_permissions] assign[=] call[name[self].perform_permissions_check, parameter[name[user], name[obj], name[perms]]]
if <ast.BoolOp object at 0x7da1b1179ea0> begin[:]
return[call[name[HttpResponseRedirect], parameter[call[constant[{}?{}={}].format, parameter[call[name[resolve_url], parameter[name[self].login_url]], name[self].redirect_field_name, call[name[urlquote], parameter[call[name[request].get_full_path, parameter[]]]]]]]]]
|
keyword[def] identifier[check_permissions] ( identifier[self] , identifier[request] ):
literal[string]
identifier[obj] =(
identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[get_controlled_object] () keyword[or]
identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[get_object] () keyword[or] identifier[getattr] ( identifier[self] , literal[string] , keyword[None] )
)
identifier[user] = identifier[request] . identifier[user]
identifier[perms] = identifier[self] . identifier[get_required_permissions] ( identifier[self] )
identifier[has_permissions] = identifier[self] . identifier[perform_permissions_check] ( identifier[user] , identifier[obj] , identifier[perms] )
keyword[if] keyword[not] identifier[has_permissions] keyword[and] keyword[not] identifier[user] . identifier[is_authenticated] :
keyword[return] identifier[HttpResponseRedirect] ( literal[string] . identifier[format] (
identifier[resolve_url] ( identifier[self] . identifier[login_url] ),
identifier[self] . identifier[redirect_field_name] ,
identifier[urlquote] ( identifier[request] . identifier[get_full_path] ())
))
keyword[elif] keyword[not] identifier[has_permissions] :
keyword[raise] identifier[PermissionDenied]
|
def check_permissions(self, request):
""" Retrieves the controlled object and perform the permissions check. """
obj = hasattr(self, 'get_controlled_object') and self.get_controlled_object() or (hasattr(self, 'get_object') and self.get_object()) or getattr(self, 'object', None)
user = request.user
# Get the permissions to check
perms = self.get_required_permissions(self)
# Check permissions
has_permissions = self.perform_permissions_check(user, obj, perms)
if not has_permissions and (not user.is_authenticated):
return HttpResponseRedirect('{}?{}={}'.format(resolve_url(self.login_url), self.redirect_field_name, urlquote(request.get_full_path()))) # depends on [control=['if'], data=[]]
elif not has_permissions:
raise PermissionDenied # depends on [control=['if'], data=[]]
|
def get_object(self):
""" Returns the OrganizationUser object based on the primary keys for both
the organization and the organization user.
"""
if hasattr(self, "organization_user"):
return self.organization_user
organization_pk = self.kwargs.get("organization_pk", None)
user_pk = self.kwargs.get("user_pk", None)
self.organization_user = get_object_or_404(
self.get_user_model().objects.select_related(),
user__pk=user_pk,
organization__pk=organization_pk,
)
return self.organization_user
|
def function[get_object, parameter[self]]:
constant[ Returns the OrganizationUser object based on the primary keys for both
the organization and the organization user.
]
if call[name[hasattr], parameter[name[self], constant[organization_user]]] begin[:]
return[name[self].organization_user]
variable[organization_pk] assign[=] call[name[self].kwargs.get, parameter[constant[organization_pk], constant[None]]]
variable[user_pk] assign[=] call[name[self].kwargs.get, parameter[constant[user_pk], constant[None]]]
name[self].organization_user assign[=] call[name[get_object_or_404], parameter[call[call[name[self].get_user_model, parameter[]].objects.select_related, parameter[]]]]
return[name[self].organization_user]
|
keyword[def] identifier[get_object] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] identifier[self] . identifier[organization_user]
identifier[organization_pk] = identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[user_pk] = identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[self] . identifier[organization_user] = identifier[get_object_or_404] (
identifier[self] . identifier[get_user_model] (). identifier[objects] . identifier[select_related] (),
identifier[user__pk] = identifier[user_pk] ,
identifier[organization__pk] = identifier[organization_pk] ,
)
keyword[return] identifier[self] . identifier[organization_user]
|
def get_object(self):
""" Returns the OrganizationUser object based on the primary keys for both
the organization and the organization user.
"""
if hasattr(self, 'organization_user'):
return self.organization_user # depends on [control=['if'], data=[]]
organization_pk = self.kwargs.get('organization_pk', None)
user_pk = self.kwargs.get('user_pk', None)
self.organization_user = get_object_or_404(self.get_user_model().objects.select_related(), user__pk=user_pk, organization__pk=organization_pk)
return self.organization_user
|
def __find_reports(self, report_type, usage_page, usage_id = 0):
"Find input report referencing HID usage control/data item"
if not self.is_opened():
raise HIDError("Device must be opened")
#
results = list()
if usage_page:
for report_id in self.report_set.get( report_type, set() ):
#build report object, gathering usages matching report_id
report_obj = HidReport(self, report_type, report_id)
if get_full_usage_id(usage_page, usage_id) in report_obj:
results.append( report_obj )
else:
#all (any one)
for report_id in self.report_set.get(report_type, set()):
report_obj = HidReport(self, report_type, report_id)
results.append( report_obj )
return results
|
def function[__find_reports, parameter[self, report_type, usage_page, usage_id]]:
constant[Find input report referencing HID usage control/data item]
if <ast.UnaryOp object at 0x7da1b065b040> begin[:]
<ast.Raise object at 0x7da1b0659c90>
variable[results] assign[=] call[name[list], parameter[]]
if name[usage_page] begin[:]
for taget[name[report_id]] in starred[call[name[self].report_set.get, parameter[name[report_type], call[name[set], parameter[]]]]] begin[:]
variable[report_obj] assign[=] call[name[HidReport], parameter[name[self], name[report_type], name[report_id]]]
if compare[call[name[get_full_usage_id], parameter[name[usage_page], name[usage_id]]] in name[report_obj]] begin[:]
call[name[results].append, parameter[name[report_obj]]]
return[name[results]]
|
keyword[def] identifier[__find_reports] ( identifier[self] , identifier[report_type] , identifier[usage_page] , identifier[usage_id] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_opened] ():
keyword[raise] identifier[HIDError] ( literal[string] )
identifier[results] = identifier[list] ()
keyword[if] identifier[usage_page] :
keyword[for] identifier[report_id] keyword[in] identifier[self] . identifier[report_set] . identifier[get] ( identifier[report_type] , identifier[set] ()):
identifier[report_obj] = identifier[HidReport] ( identifier[self] , identifier[report_type] , identifier[report_id] )
keyword[if] identifier[get_full_usage_id] ( identifier[usage_page] , identifier[usage_id] ) keyword[in] identifier[report_obj] :
identifier[results] . identifier[append] ( identifier[report_obj] )
keyword[else] :
keyword[for] identifier[report_id] keyword[in] identifier[self] . identifier[report_set] . identifier[get] ( identifier[report_type] , identifier[set] ()):
identifier[report_obj] = identifier[HidReport] ( identifier[self] , identifier[report_type] , identifier[report_id] )
identifier[results] . identifier[append] ( identifier[report_obj] )
keyword[return] identifier[results]
|
def __find_reports(self, report_type, usage_page, usage_id=0):
"""Find input report referencing HID usage control/data item"""
if not self.is_opened():
raise HIDError('Device must be opened') # depends on [control=['if'], data=[]] #
results = list()
if usage_page:
for report_id in self.report_set.get(report_type, set()): #build report object, gathering usages matching report_id
report_obj = HidReport(self, report_type, report_id)
if get_full_usage_id(usage_page, usage_id) in report_obj:
results.append(report_obj) # depends on [control=['if'], data=['report_obj']] # depends on [control=['for'], data=['report_id']] # depends on [control=['if'], data=[]]
else: #all (any one)
for report_id in self.report_set.get(report_type, set()):
report_obj = HidReport(self, report_type, report_id)
results.append(report_obj) # depends on [control=['for'], data=['report_id']]
return results
|
def get_builder_toplevel(self, builder):
"""Get the toplevel widget from a Gtk.Builder file.
The main view implementation first searches for the widget named as
self.toplevel_name (which defaults to "main". If this is missing, or not
a Gtk.Window, the first toplevel window found in the Gtk.Builder is
used.
"""
toplevel = builder.get_object(self.toplevel_name)
if not GObject.type_is_a(toplevel, Gtk.Window):
toplevel = None
if toplevel is None:
toplevel = get_first_builder_window(builder)
return toplevel
|
def function[get_builder_toplevel, parameter[self, builder]]:
constant[Get the toplevel widget from a Gtk.Builder file.
The main view implementation first searches for the widget named as
self.toplevel_name (which defaults to "main". If this is missing, or not
a Gtk.Window, the first toplevel window found in the Gtk.Builder is
used.
]
variable[toplevel] assign[=] call[name[builder].get_object, parameter[name[self].toplevel_name]]
if <ast.UnaryOp object at 0x7da1b1077970> begin[:]
variable[toplevel] assign[=] constant[None]
if compare[name[toplevel] is constant[None]] begin[:]
variable[toplevel] assign[=] call[name[get_first_builder_window], parameter[name[builder]]]
return[name[toplevel]]
|
keyword[def] identifier[get_builder_toplevel] ( identifier[self] , identifier[builder] ):
literal[string]
identifier[toplevel] = identifier[builder] . identifier[get_object] ( identifier[self] . identifier[toplevel_name] )
keyword[if] keyword[not] identifier[GObject] . identifier[type_is_a] ( identifier[toplevel] , identifier[Gtk] . identifier[Window] ):
identifier[toplevel] = keyword[None]
keyword[if] identifier[toplevel] keyword[is] keyword[None] :
identifier[toplevel] = identifier[get_first_builder_window] ( identifier[builder] )
keyword[return] identifier[toplevel]
|
def get_builder_toplevel(self, builder):
"""Get the toplevel widget from a Gtk.Builder file.
The main view implementation first searches for the widget named as
self.toplevel_name (which defaults to "main". If this is missing, or not
a Gtk.Window, the first toplevel window found in the Gtk.Builder is
used.
"""
toplevel = builder.get_object(self.toplevel_name)
if not GObject.type_is_a(toplevel, Gtk.Window):
toplevel = None # depends on [control=['if'], data=[]]
if toplevel is None:
toplevel = get_first_builder_window(builder) # depends on [control=['if'], data=['toplevel']]
return toplevel
|
def getsockopt(self, level, optname, *args, **kwargs):
"""get the value of a given socket option
the values for ``level`` and ``optname`` will usually come from
constants in the standard library ``socket`` module. consult the unix
manpage ``getsockopt(2)`` for more information.
:param level: the level of the requested socket option
:type level: int
:param optname: the specific socket option requested
:type optname: int
:param buflen:
the length of the buffer to use to collect the raw value of the
socket option. if provided, the buffer is returned as a string and
it is not parsed.
:type buflen: int
:returns: a string of the socket option's value
"""
return self._sock.getsockopt(level, optname, *args, **kwargs)
|
def function[getsockopt, parameter[self, level, optname]]:
constant[get the value of a given socket option
the values for ``level`` and ``optname`` will usually come from
constants in the standard library ``socket`` module. consult the unix
manpage ``getsockopt(2)`` for more information.
:param level: the level of the requested socket option
:type level: int
:param optname: the specific socket option requested
:type optname: int
:param buflen:
the length of the buffer to use to collect the raw value of the
socket option. if provided, the buffer is returned as a string and
it is not parsed.
:type buflen: int
:returns: a string of the socket option's value
]
return[call[name[self]._sock.getsockopt, parameter[name[level], name[optname], <ast.Starred object at 0x7da18bc70490>]]]
|
keyword[def] identifier[getsockopt] ( identifier[self] , identifier[level] , identifier[optname] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_sock] . identifier[getsockopt] ( identifier[level] , identifier[optname] ,* identifier[args] ,** identifier[kwargs] )
|
def getsockopt(self, level, optname, *args, **kwargs):
"""get the value of a given socket option
the values for ``level`` and ``optname`` will usually come from
constants in the standard library ``socket`` module. consult the unix
manpage ``getsockopt(2)`` for more information.
:param level: the level of the requested socket option
:type level: int
:param optname: the specific socket option requested
:type optname: int
:param buflen:
the length of the buffer to use to collect the raw value of the
socket option. if provided, the buffer is returned as a string and
it is not parsed.
:type buflen: int
:returns: a string of the socket option's value
"""
return self._sock.getsockopt(level, optname, *args, **kwargs)
|
async def restrict_chat_member(self, chat_id: typing.Union[base.Integer, base.String],
user_id: base.Integer,
until_date: typing.Union[base.Integer, None] = None,
can_send_messages: typing.Union[base.Boolean, None] = None,
can_send_media_messages: typing.Union[base.Boolean, None] = None,
can_send_other_messages: typing.Union[base.Boolean, None] = None,
can_add_web_page_previews: typing.Union[base.Boolean, None] = None) -> base.Boolean:
"""
Use this method to restrict a user in a supergroup.
The bot must be an administrator in the supergroup for this to work and must have the appropriate admin rights.
Pass True for all boolean parameters to lift restrictions from a user.
Source: https://core.telegram.org/bots/api#restrictchatmember
:param chat_id: Unique identifier for the target chat or username of the target supergroup
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param user_id: Unique identifier of the target user
:type user_id: :obj:`base.Integer`
:param until_date: Date when restrictions will be lifted for the user, unix time
:type until_date: :obj:`typing.Union[base.Integer, None]`
:param can_send_messages: Pass True, if the user can send text messages, contacts, locations and venues
:type can_send_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_send_media_messages: Pass True, if the user can send audios, documents, photos, videos,
video notes and voice notes, implies can_send_messages
:type can_send_media_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_send_other_messages: Pass True, if the user can send animations, games, stickers and
use inline bots, implies can_send_media_messages
:type can_send_other_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_add_web_page_previews: Pass True, if the user may add web page previews to their messages,
implies can_send_media_messages
:type can_add_web_page_previews: :obj:`typing.Union[base.Boolean, None]`
:return: Returns True on success
:rtype: :obj:`base.Boolean`
"""
until_date = prepare_arg(until_date)
payload = generate_payload(**locals())
result = await self.request(api.Methods.RESTRICT_CHAT_MEMBER, payload)
return result
|
<ast.AsyncFunctionDef object at 0x7da1b1736cb0>
|
keyword[async] keyword[def] identifier[restrict_chat_member] ( identifier[self] , identifier[chat_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , identifier[base] . identifier[String] ],
identifier[user_id] : identifier[base] . identifier[Integer] ,
identifier[until_date] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[can_send_messages] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Boolean] , keyword[None] ]= keyword[None] ,
identifier[can_send_media_messages] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Boolean] , keyword[None] ]= keyword[None] ,
identifier[can_send_other_messages] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Boolean] , keyword[None] ]= keyword[None] ,
identifier[can_add_web_page_previews] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Boolean] , keyword[None] ]= keyword[None] )-> identifier[base] . identifier[Boolean] :
literal[string]
identifier[until_date] = identifier[prepare_arg] ( identifier[until_date] )
identifier[payload] = identifier[generate_payload] (** identifier[locals] ())
identifier[result] = keyword[await] identifier[self] . identifier[request] ( identifier[api] . identifier[Methods] . identifier[RESTRICT_CHAT_MEMBER] , identifier[payload] )
keyword[return] identifier[result]
|
async def restrict_chat_member(self, chat_id: typing.Union[base.Integer, base.String], user_id: base.Integer, until_date: typing.Union[base.Integer, None]=None, can_send_messages: typing.Union[base.Boolean, None]=None, can_send_media_messages: typing.Union[base.Boolean, None]=None, can_send_other_messages: typing.Union[base.Boolean, None]=None, can_add_web_page_previews: typing.Union[base.Boolean, None]=None) -> base.Boolean:
"""
Use this method to restrict a user in a supergroup.
The bot must be an administrator in the supergroup for this to work and must have the appropriate admin rights.
Pass True for all boolean parameters to lift restrictions from a user.
Source: https://core.telegram.org/bots/api#restrictchatmember
:param chat_id: Unique identifier for the target chat or username of the target supergroup
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param user_id: Unique identifier of the target user
:type user_id: :obj:`base.Integer`
:param until_date: Date when restrictions will be lifted for the user, unix time
:type until_date: :obj:`typing.Union[base.Integer, None]`
:param can_send_messages: Pass True, if the user can send text messages, contacts, locations and venues
:type can_send_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_send_media_messages: Pass True, if the user can send audios, documents, photos, videos,
video notes and voice notes, implies can_send_messages
:type can_send_media_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_send_other_messages: Pass True, if the user can send animations, games, stickers and
use inline bots, implies can_send_media_messages
:type can_send_other_messages: :obj:`typing.Union[base.Boolean, None]`
:param can_add_web_page_previews: Pass True, if the user may add web page previews to their messages,
implies can_send_media_messages
:type can_add_web_page_previews: :obj:`typing.Union[base.Boolean, None]`
:return: Returns True on success
:rtype: :obj:`base.Boolean`
"""
until_date = prepare_arg(until_date)
payload = generate_payload(**locals())
result = await self.request(api.Methods.RESTRICT_CHAT_MEMBER, payload)
return result
|
def merge_coords(objs, compat='minimal', join='outer', priority_arg=None,
indexes=None):
"""Merge coordinate variables.
See merge_core below for argument descriptions. This works similarly to
merge_core, except everything we don't worry about whether variables are
coordinates or not.
"""
_assert_compat_valid(compat)
coerced = coerce_pandas_values(objs)
aligned = deep_align(coerced, join=join, copy=False, indexes=indexes)
expanded = expand_variable_dicts(aligned)
priority_vars = _get_priority_vars(aligned, priority_arg, compat=compat)
variables = merge_variables(expanded, priority_vars, compat=compat)
assert_unique_multiindex_level_names(variables)
return variables
|
def function[merge_coords, parameter[objs, compat, join, priority_arg, indexes]]:
constant[Merge coordinate variables.
See merge_core below for argument descriptions. This works similarly to
merge_core, except everything we don't worry about whether variables are
coordinates or not.
]
call[name[_assert_compat_valid], parameter[name[compat]]]
variable[coerced] assign[=] call[name[coerce_pandas_values], parameter[name[objs]]]
variable[aligned] assign[=] call[name[deep_align], parameter[name[coerced]]]
variable[expanded] assign[=] call[name[expand_variable_dicts], parameter[name[aligned]]]
variable[priority_vars] assign[=] call[name[_get_priority_vars], parameter[name[aligned], name[priority_arg]]]
variable[variables] assign[=] call[name[merge_variables], parameter[name[expanded], name[priority_vars]]]
call[name[assert_unique_multiindex_level_names], parameter[name[variables]]]
return[name[variables]]
|
keyword[def] identifier[merge_coords] ( identifier[objs] , identifier[compat] = literal[string] , identifier[join] = literal[string] , identifier[priority_arg] = keyword[None] ,
identifier[indexes] = keyword[None] ):
literal[string]
identifier[_assert_compat_valid] ( identifier[compat] )
identifier[coerced] = identifier[coerce_pandas_values] ( identifier[objs] )
identifier[aligned] = identifier[deep_align] ( identifier[coerced] , identifier[join] = identifier[join] , identifier[copy] = keyword[False] , identifier[indexes] = identifier[indexes] )
identifier[expanded] = identifier[expand_variable_dicts] ( identifier[aligned] )
identifier[priority_vars] = identifier[_get_priority_vars] ( identifier[aligned] , identifier[priority_arg] , identifier[compat] = identifier[compat] )
identifier[variables] = identifier[merge_variables] ( identifier[expanded] , identifier[priority_vars] , identifier[compat] = identifier[compat] )
identifier[assert_unique_multiindex_level_names] ( identifier[variables] )
keyword[return] identifier[variables]
|
def merge_coords(objs, compat='minimal', join='outer', priority_arg=None, indexes=None):
"""Merge coordinate variables.
See merge_core below for argument descriptions. This works similarly to
merge_core, except everything we don't worry about whether variables are
coordinates or not.
"""
_assert_compat_valid(compat)
coerced = coerce_pandas_values(objs)
aligned = deep_align(coerced, join=join, copy=False, indexes=indexes)
expanded = expand_variable_dicts(aligned)
priority_vars = _get_priority_vars(aligned, priority_arg, compat=compat)
variables = merge_variables(expanded, priority_vars, compat=compat)
assert_unique_multiindex_level_names(variables)
return variables
|
def brancher( # noqa: E302
self, branches=None, all_branches=False, tags=None, all_tags=False
):
"""Generator that iterates over specified revisions.
Args:
branches (list): a list of branches to iterate over.
all_branches (bool): iterate over all available branches.
tags (list): a list of tags to iterate over.
all_tags (bool): iterate over all available tags.
Yields:
str: the display name for the currently selected tree, it could be:
- a git revision identifier
- empty string it there is no branches to iterate over
- "Working Tree" if there are uncommited changes in the SCM repo
"""
if not any([branches, all_branches, tags, all_tags]):
yield ""
return
saved_tree = self.tree
revs = []
scm = self.scm
if self.scm.is_dirty():
from dvc.scm.tree import WorkingTree
self.tree = WorkingTree(self.root_dir)
yield "Working Tree"
if all_branches:
branches = scm.list_branches()
if all_tags:
tags = scm.list_tags()
if branches is None:
revs.extend([scm.active_branch()])
else:
revs.extend(branches)
if tags is not None:
revs.extend(tags)
# NOTE: it might be a good idea to wrap this loop in try/finally block
# to don't leave the tree on some unexpected branch after the
# `brancher()`, but this could cause problems on exception handling
# code which might expect the tree on which exception was raised to
# stay in place. This behavior is a subject to change.
for rev in revs:
self.tree = scm.get_tree(rev)
yield rev
self.tree = saved_tree
|
def function[brancher, parameter[self, branches, all_branches, tags, all_tags]]:
constant[Generator that iterates over specified revisions.
Args:
branches (list): a list of branches to iterate over.
all_branches (bool): iterate over all available branches.
tags (list): a list of tags to iterate over.
all_tags (bool): iterate over all available tags.
Yields:
str: the display name for the currently selected tree, it could be:
- a git revision identifier
- empty string it there is no branches to iterate over
- "Working Tree" if there are uncommited changes in the SCM repo
]
if <ast.UnaryOp object at 0x7da1b1f19750> begin[:]
<ast.Yield object at 0x7da1b1f1bfd0>
return[None]
variable[saved_tree] assign[=] name[self].tree
variable[revs] assign[=] list[[]]
variable[scm] assign[=] name[self].scm
if call[name[self].scm.is_dirty, parameter[]] begin[:]
from relative_module[dvc.scm.tree] import module[WorkingTree]
name[self].tree assign[=] call[name[WorkingTree], parameter[name[self].root_dir]]
<ast.Yield object at 0x7da1b1f19600>
if name[all_branches] begin[:]
variable[branches] assign[=] call[name[scm].list_branches, parameter[]]
if name[all_tags] begin[:]
variable[tags] assign[=] call[name[scm].list_tags, parameter[]]
if compare[name[branches] is constant[None]] begin[:]
call[name[revs].extend, parameter[list[[<ast.Call object at 0x7da1b1f1af50>]]]]
if compare[name[tags] is_not constant[None]] begin[:]
call[name[revs].extend, parameter[name[tags]]]
for taget[name[rev]] in starred[name[revs]] begin[:]
name[self].tree assign[=] call[name[scm].get_tree, parameter[name[rev]]]
<ast.Yield object at 0x7da20c6aa0e0>
name[self].tree assign[=] name[saved_tree]
|
keyword[def] identifier[brancher] (
identifier[self] , identifier[branches] = keyword[None] , identifier[all_branches] = keyword[False] , identifier[tags] = keyword[None] , identifier[all_tags] = keyword[False]
):
literal[string]
keyword[if] keyword[not] identifier[any] ([ identifier[branches] , identifier[all_branches] , identifier[tags] , identifier[all_tags] ]):
keyword[yield] literal[string]
keyword[return]
identifier[saved_tree] = identifier[self] . identifier[tree]
identifier[revs] =[]
identifier[scm] = identifier[self] . identifier[scm]
keyword[if] identifier[self] . identifier[scm] . identifier[is_dirty] ():
keyword[from] identifier[dvc] . identifier[scm] . identifier[tree] keyword[import] identifier[WorkingTree]
identifier[self] . identifier[tree] = identifier[WorkingTree] ( identifier[self] . identifier[root_dir] )
keyword[yield] literal[string]
keyword[if] identifier[all_branches] :
identifier[branches] = identifier[scm] . identifier[list_branches] ()
keyword[if] identifier[all_tags] :
identifier[tags] = identifier[scm] . identifier[list_tags] ()
keyword[if] identifier[branches] keyword[is] keyword[None] :
identifier[revs] . identifier[extend] ([ identifier[scm] . identifier[active_branch] ()])
keyword[else] :
identifier[revs] . identifier[extend] ( identifier[branches] )
keyword[if] identifier[tags] keyword[is] keyword[not] keyword[None] :
identifier[revs] . identifier[extend] ( identifier[tags] )
keyword[for] identifier[rev] keyword[in] identifier[revs] :
identifier[self] . identifier[tree] = identifier[scm] . identifier[get_tree] ( identifier[rev] )
keyword[yield] identifier[rev]
identifier[self] . identifier[tree] = identifier[saved_tree]
|
def brancher(self, branches=None, all_branches=False, tags=None, all_tags=False): # noqa: E302
'Generator that iterates over specified revisions.\n\n Args:\n branches (list): a list of branches to iterate over.\n all_branches (bool): iterate over all available branches.\n tags (list): a list of tags to iterate over.\n all_tags (bool): iterate over all available tags.\n\n Yields:\n str: the display name for the currently selected tree, it could be:\n - a git revision identifier\n - empty string it there is no branches to iterate over\n - "Working Tree" if there are uncommited changes in the SCM repo\n '
if not any([branches, all_branches, tags, all_tags]):
yield ''
return # depends on [control=['if'], data=[]]
saved_tree = self.tree
revs = []
scm = self.scm
if self.scm.is_dirty():
from dvc.scm.tree import WorkingTree
self.tree = WorkingTree(self.root_dir)
yield 'Working Tree' # depends on [control=['if'], data=[]]
if all_branches:
branches = scm.list_branches() # depends on [control=['if'], data=[]]
if all_tags:
tags = scm.list_tags() # depends on [control=['if'], data=[]]
if branches is None:
revs.extend([scm.active_branch()]) # depends on [control=['if'], data=[]]
else:
revs.extend(branches)
if tags is not None:
revs.extend(tags) # depends on [control=['if'], data=['tags']]
# NOTE: it might be a good idea to wrap this loop in try/finally block
# to don't leave the tree on some unexpected branch after the
# `brancher()`, but this could cause problems on exception handling
# code which might expect the tree on which exception was raised to
# stay in place. This behavior is a subject to change.
for rev in revs:
self.tree = scm.get_tree(rev)
yield rev # depends on [control=['for'], data=['rev']]
self.tree = saved_tree
|
def otp(ctx, access_code):
"""
Manage OTP Application.
The YubiKey provides two keyboard-based slots which can each be configured
with a credential. Several credential types are supported.
A slot configuration may be write-protected with an access code. This
prevents the configuration to be overwritten without the access code
provided. Mode switching the YubiKey is not possible when a slot is
configured with an access code.
Examples:
\b
Swap the configurations between the two slots:
$ ykman otp swap
\b
Program a random challenge-response credential to slot 2:
$ ykman otp chalresp --generate 2
\b
Program a Yubico OTP credential to slot 2, using the serial as public id:
$ ykman otp yubiotp 1 --serial-public-id
\b
Program a random 38 characters long static password to slot 2:
$ ykman otp static --generate 2 --length 38
"""
ctx.obj['controller'] = OtpController(ctx.obj['dev'].driver)
if access_code is not None:
if access_code == '':
access_code = click.prompt(
'Enter access code', show_default=False, err=True)
try:
access_code = parse_access_code_hex(access_code)
except Exception as e:
ctx.fail('Failed to parse access code: ' + str(e))
ctx.obj['controller'].access_code = access_code
|
def function[otp, parameter[ctx, access_code]]:
constant[
Manage OTP Application.
The YubiKey provides two keyboard-based slots which can each be configured
with a credential. Several credential types are supported.
A slot configuration may be write-protected with an access code. This
prevents the configuration to be overwritten without the access code
provided. Mode switching the YubiKey is not possible when a slot is
configured with an access code.
Examples:
Swap the configurations between the two slots:
$ ykman otp swap
Program a random challenge-response credential to slot 2:
$ ykman otp chalresp --generate 2
Program a Yubico OTP credential to slot 2, using the serial as public id:
$ ykman otp yubiotp 1 --serial-public-id
Program a random 38 characters long static password to slot 2:
$ ykman otp static --generate 2 --length 38
]
call[name[ctx].obj][constant[controller]] assign[=] call[name[OtpController], parameter[call[name[ctx].obj][constant[dev]].driver]]
if compare[name[access_code] is_not constant[None]] begin[:]
if compare[name[access_code] equal[==] constant[]] begin[:]
variable[access_code] assign[=] call[name[click].prompt, parameter[constant[Enter access code]]]
<ast.Try object at 0x7da1b23440a0>
call[name[ctx].obj][constant[controller]].access_code assign[=] name[access_code]
|
keyword[def] identifier[otp] ( identifier[ctx] , identifier[access_code] ):
literal[string]
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[OtpController] ( identifier[ctx] . identifier[obj] [ literal[string] ]. identifier[driver] )
keyword[if] identifier[access_code] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[access_code] == literal[string] :
identifier[access_code] = identifier[click] . identifier[prompt] (
literal[string] , identifier[show_default] = keyword[False] , identifier[err] = keyword[True] )
keyword[try] :
identifier[access_code] = identifier[parse_access_code_hex] ( identifier[access_code] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[ctx] . identifier[fail] ( literal[string] + identifier[str] ( identifier[e] ))
identifier[ctx] . identifier[obj] [ literal[string] ]. identifier[access_code] = identifier[access_code]
|
def otp(ctx, access_code):
"""
Manage OTP Application.
The YubiKey provides two keyboard-based slots which can each be configured
with a credential. Several credential types are supported.
A slot configuration may be write-protected with an access code. This
prevents the configuration to be overwritten without the access code
provided. Mode switching the YubiKey is not possible when a slot is
configured with an access code.
Examples:
\x08
Swap the configurations between the two slots:
$ ykman otp swap
\x08
Program a random challenge-response credential to slot 2:
$ ykman otp chalresp --generate 2
\x08
Program a Yubico OTP credential to slot 2, using the serial as public id:
$ ykman otp yubiotp 1 --serial-public-id
\x08
Program a random 38 characters long static password to slot 2:
$ ykman otp static --generate 2 --length 38
"""
ctx.obj['controller'] = OtpController(ctx.obj['dev'].driver)
if access_code is not None:
if access_code == '':
access_code = click.prompt('Enter access code', show_default=False, err=True) # depends on [control=['if'], data=['access_code']]
try:
access_code = parse_access_code_hex(access_code) # depends on [control=['try'], data=[]]
except Exception as e:
ctx.fail('Failed to parse access code: ' + str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['access_code']]
ctx.obj['controller'].access_code = access_code
|
def get_manhole_factory(namespace, **passwords):
"""Get a Manhole Factory
"""
realm = manhole_ssh.TerminalRealm()
realm.chainedProtocolFactory.protocolFactory = (
lambda _: EnhancedColoredManhole(namespace)
)
p = portal.Portal(realm)
p.registerChecker(
checkers.InMemoryUsernamePasswordDatabaseDontUse(**passwords)
)
return manhole_ssh.ConchFactory(p)
|
def function[get_manhole_factory, parameter[namespace]]:
constant[Get a Manhole Factory
]
variable[realm] assign[=] call[name[manhole_ssh].TerminalRealm, parameter[]]
name[realm].chainedProtocolFactory.protocolFactory assign[=] <ast.Lambda object at 0x7da18f722320>
variable[p] assign[=] call[name[portal].Portal, parameter[name[realm]]]
call[name[p].registerChecker, parameter[call[name[checkers].InMemoryUsernamePasswordDatabaseDontUse, parameter[]]]]
return[call[name[manhole_ssh].ConchFactory, parameter[name[p]]]]
|
keyword[def] identifier[get_manhole_factory] ( identifier[namespace] ,** identifier[passwords] ):
literal[string]
identifier[realm] = identifier[manhole_ssh] . identifier[TerminalRealm] ()
identifier[realm] . identifier[chainedProtocolFactory] . identifier[protocolFactory] =(
keyword[lambda] identifier[_] : identifier[EnhancedColoredManhole] ( identifier[namespace] )
)
identifier[p] = identifier[portal] . identifier[Portal] ( identifier[realm] )
identifier[p] . identifier[registerChecker] (
identifier[checkers] . identifier[InMemoryUsernamePasswordDatabaseDontUse] (** identifier[passwords] )
)
keyword[return] identifier[manhole_ssh] . identifier[ConchFactory] ( identifier[p] )
|
def get_manhole_factory(namespace, **passwords):
"""Get a Manhole Factory
"""
realm = manhole_ssh.TerminalRealm()
realm.chainedProtocolFactory.protocolFactory = lambda _: EnhancedColoredManhole(namespace)
p = portal.Portal(realm)
p.registerChecker(checkers.InMemoryUsernamePasswordDatabaseDontUse(**passwords))
return manhole_ssh.ConchFactory(p)
|
def close(self, using=None, **kwargs):
"""
Closes the index in elasticsearch.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.close`` unchanged.
"""
return self._get_connection(using).indices.close(index=self._name, **kwargs)
|
def function[close, parameter[self, using]]:
constant[
Closes the index in elasticsearch.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.close`` unchanged.
]
return[call[call[name[self]._get_connection, parameter[name[using]]].indices.close, parameter[]]]
|
keyword[def] identifier[close] ( identifier[self] , identifier[using] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_connection] ( identifier[using] ). identifier[indices] . identifier[close] ( identifier[index] = identifier[self] . identifier[_name] ,** identifier[kwargs] )
|
def close(self, using=None, **kwargs):
"""
Closes the index in elasticsearch.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.close`` unchanged.
"""
return self._get_connection(using).indices.close(index=self._name, **kwargs)
|
def _valid_date(self):
"""Check and return a valid query date."""
date = self._parse_date(self.date)
if not date:
exit_after_echo(INVALID_DATE)
try:
date = datetime.strptime(date, '%Y%m%d')
except ValueError:
exit_after_echo(INVALID_DATE)
# A valid query date should within 50 days.
offset = date - datetime.today()
if offset.days not in range(-1, 50):
exit_after_echo(INVALID_DATE)
return datetime.strftime(date, '%Y-%m-%d')
|
def function[_valid_date, parameter[self]]:
constant[Check and return a valid query date.]
variable[date] assign[=] call[name[self]._parse_date, parameter[name[self].date]]
if <ast.UnaryOp object at 0x7da18c4cc850> begin[:]
call[name[exit_after_echo], parameter[name[INVALID_DATE]]]
<ast.Try object at 0x7da18c4cd1e0>
variable[offset] assign[=] binary_operation[name[date] - call[name[datetime].today, parameter[]]]
if compare[name[offset].days <ast.NotIn object at 0x7da2590d7190> call[name[range], parameter[<ast.UnaryOp object at 0x7da18c4cd3f0>, constant[50]]]] begin[:]
call[name[exit_after_echo], parameter[name[INVALID_DATE]]]
return[call[name[datetime].strftime, parameter[name[date], constant[%Y-%m-%d]]]]
|
keyword[def] identifier[_valid_date] ( identifier[self] ):
literal[string]
identifier[date] = identifier[self] . identifier[_parse_date] ( identifier[self] . identifier[date] )
keyword[if] keyword[not] identifier[date] :
identifier[exit_after_echo] ( identifier[INVALID_DATE] )
keyword[try] :
identifier[date] = identifier[datetime] . identifier[strptime] ( identifier[date] , literal[string] )
keyword[except] identifier[ValueError] :
identifier[exit_after_echo] ( identifier[INVALID_DATE] )
identifier[offset] = identifier[date] - identifier[datetime] . identifier[today] ()
keyword[if] identifier[offset] . identifier[days] keyword[not] keyword[in] identifier[range] (- literal[int] , literal[int] ):
identifier[exit_after_echo] ( identifier[INVALID_DATE] )
keyword[return] identifier[datetime] . identifier[strftime] ( identifier[date] , literal[string] )
|
def _valid_date(self):
"""Check and return a valid query date."""
date = self._parse_date(self.date)
if not date:
exit_after_echo(INVALID_DATE) # depends on [control=['if'], data=[]]
try:
date = datetime.strptime(date, '%Y%m%d') # depends on [control=['try'], data=[]]
except ValueError:
exit_after_echo(INVALID_DATE) # depends on [control=['except'], data=[]]
# A valid query date should within 50 days.
offset = date - datetime.today()
if offset.days not in range(-1, 50):
exit_after_echo(INVALID_DATE) # depends on [control=['if'], data=[]]
return datetime.strftime(date, '%Y-%m-%d')
|
def windows_from_blocksize(self, blocksize_xy=512):
"""Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated.
"""
meta = self._get_template_for_given_resolution(self.dst_res, "meta")
width = meta["width"]
height = meta["height"]
blocksize_wins = windows_from_blocksize(blocksize_xy, width, height)
self.windows = np.array([win[1] for win in blocksize_wins])
self.windows_row = np.array([win[0][0] for win in blocksize_wins])
self.windows_col = np.array([win[0][1] for win in blocksize_wins])
return self
|
def function[windows_from_blocksize, parameter[self, blocksize_xy]]:
constant[Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated.
]
variable[meta] assign[=] call[name[self]._get_template_for_given_resolution, parameter[name[self].dst_res, constant[meta]]]
variable[width] assign[=] call[name[meta]][constant[width]]
variable[height] assign[=] call[name[meta]][constant[height]]
variable[blocksize_wins] assign[=] call[name[windows_from_blocksize], parameter[name[blocksize_xy], name[width], name[height]]]
name[self].windows assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b059cb80>]]
name[self].windows_row assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b055e3e0>]]
name[self].windows_col assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b055ce50>]]
return[name[self]]
|
keyword[def] identifier[windows_from_blocksize] ( identifier[self] , identifier[blocksize_xy] = literal[int] ):
literal[string]
identifier[meta] = identifier[self] . identifier[_get_template_for_given_resolution] ( identifier[self] . identifier[dst_res] , literal[string] )
identifier[width] = identifier[meta] [ literal[string] ]
identifier[height] = identifier[meta] [ literal[string] ]
identifier[blocksize_wins] = identifier[windows_from_blocksize] ( identifier[blocksize_xy] , identifier[width] , identifier[height] )
identifier[self] . identifier[windows] = identifier[np] . identifier[array] ([ identifier[win] [ literal[int] ] keyword[for] identifier[win] keyword[in] identifier[blocksize_wins] ])
identifier[self] . identifier[windows_row] = identifier[np] . identifier[array] ([ identifier[win] [ literal[int] ][ literal[int] ] keyword[for] identifier[win] keyword[in] identifier[blocksize_wins] ])
identifier[self] . identifier[windows_col] = identifier[np] . identifier[array] ([ identifier[win] [ literal[int] ][ literal[int] ] keyword[for] identifier[win] keyword[in] identifier[blocksize_wins] ])
keyword[return] identifier[self]
|
def windows_from_blocksize(self, blocksize_xy=512):
"""Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated.
"""
meta = self._get_template_for_given_resolution(self.dst_res, 'meta')
width = meta['width']
height = meta['height']
blocksize_wins = windows_from_blocksize(blocksize_xy, width, height)
self.windows = np.array([win[1] for win in blocksize_wins])
self.windows_row = np.array([win[0][0] for win in blocksize_wins])
self.windows_col = np.array([win[0][1] for win in blocksize_wins])
return self
|
def getUserByNumber(self, base, uidNumber):
""" search for a user in LDAP and return its DN and uid """
res = self.query(base, "uidNumber="+str(uidNumber), ['uid'])
if len(res) > 1:
raise InputError(uidNumber, "Multiple users found. Expecting one.")
return res[0][0], res[0][1]['uid'][0]
|
def function[getUserByNumber, parameter[self, base, uidNumber]]:
constant[ search for a user in LDAP and return its DN and uid ]
variable[res] assign[=] call[name[self].query, parameter[name[base], binary_operation[constant[uidNumber=] + call[name[str], parameter[name[uidNumber]]]], list[[<ast.Constant object at 0x7da1b10aea10>]]]]
if compare[call[name[len], parameter[name[res]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b10ae470>
return[tuple[[<ast.Subscript object at 0x7da1b10af100>, <ast.Subscript object at 0x7da1b10ad420>]]]
|
keyword[def] identifier[getUserByNumber] ( identifier[self] , identifier[base] , identifier[uidNumber] ):
literal[string]
identifier[res] = identifier[self] . identifier[query] ( identifier[base] , literal[string] + identifier[str] ( identifier[uidNumber] ),[ literal[string] ])
keyword[if] identifier[len] ( identifier[res] )> literal[int] :
keyword[raise] identifier[InputError] ( identifier[uidNumber] , literal[string] )
keyword[return] identifier[res] [ literal[int] ][ literal[int] ], identifier[res] [ literal[int] ][ literal[int] ][ literal[string] ][ literal[int] ]
|
def getUserByNumber(self, base, uidNumber):
""" search for a user in LDAP and return its DN and uid """
res = self.query(base, 'uidNumber=' + str(uidNumber), ['uid'])
if len(res) > 1:
raise InputError(uidNumber, 'Multiple users found. Expecting one.') # depends on [control=['if'], data=[]]
return (res[0][0], res[0][1]['uid'][0])
|
def businesstime_hours(self, d1, d2):
"""
Returns a datetime.timedelta of business hours between d1 and d2,
based on the length of the businessday
"""
open_hours = self.open_hours.seconds / 3600
btd = self.businesstimedelta(d1, d2)
btd_hours = btd.seconds / 3600
return datetime.timedelta(hours=(btd.days * open_hours + btd_hours))
|
def function[businesstime_hours, parameter[self, d1, d2]]:
constant[
Returns a datetime.timedelta of business hours between d1 and d2,
based on the length of the businessday
]
variable[open_hours] assign[=] binary_operation[name[self].open_hours.seconds / constant[3600]]
variable[btd] assign[=] call[name[self].businesstimedelta, parameter[name[d1], name[d2]]]
variable[btd_hours] assign[=] binary_operation[name[btd].seconds / constant[3600]]
return[call[name[datetime].timedelta, parameter[]]]
|
keyword[def] identifier[businesstime_hours] ( identifier[self] , identifier[d1] , identifier[d2] ):
literal[string]
identifier[open_hours] = identifier[self] . identifier[open_hours] . identifier[seconds] / literal[int]
identifier[btd] = identifier[self] . identifier[businesstimedelta] ( identifier[d1] , identifier[d2] )
identifier[btd_hours] = identifier[btd] . identifier[seconds] / literal[int]
keyword[return] identifier[datetime] . identifier[timedelta] ( identifier[hours] =( identifier[btd] . identifier[days] * identifier[open_hours] + identifier[btd_hours] ))
|
def businesstime_hours(self, d1, d2):
"""
Returns a datetime.timedelta of business hours between d1 and d2,
based on the length of the businessday
"""
open_hours = self.open_hours.seconds / 3600
btd = self.businesstimedelta(d1, d2)
btd_hours = btd.seconds / 3600
return datetime.timedelta(hours=btd.days * open_hours + btd_hours)
|
def max_pairs(shape):
"""[DEPRECATED] Compute the maximum number of record pairs possible."""
if not isinstance(shape, (tuple, list)):
x = get_length(shape)
n = int(x * (x - 1) / 2)
elif (isinstance(shape, (tuple, list)) and len(shape) == 1):
x = get_length(shape[0])
n = int(x * (x - 1) / 2)
else:
n = numpy.prod([get_length(xi) for xi in shape])
return n
|
def function[max_pairs, parameter[shape]]:
constant[[DEPRECATED] Compute the maximum number of record pairs possible.]
if <ast.UnaryOp object at 0x7da18f58c0d0> begin[:]
variable[x] assign[=] call[name[get_length], parameter[name[shape]]]
variable[n] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[x] * binary_operation[name[x] - constant[1]]] / constant[2]]]]
return[name[n]]
|
keyword[def] identifier[max_pairs] ( identifier[shape] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[shape] ,( identifier[tuple] , identifier[list] )):
identifier[x] = identifier[get_length] ( identifier[shape] )
identifier[n] = identifier[int] ( identifier[x] *( identifier[x] - literal[int] )/ literal[int] )
keyword[elif] ( identifier[isinstance] ( identifier[shape] ,( identifier[tuple] , identifier[list] )) keyword[and] identifier[len] ( identifier[shape] )== literal[int] ):
identifier[x] = identifier[get_length] ( identifier[shape] [ literal[int] ])
identifier[n] = identifier[int] ( identifier[x] *( identifier[x] - literal[int] )/ literal[int] )
keyword[else] :
identifier[n] = identifier[numpy] . identifier[prod] ([ identifier[get_length] ( identifier[xi] ) keyword[for] identifier[xi] keyword[in] identifier[shape] ])
keyword[return] identifier[n]
|
def max_pairs(shape):
"""[DEPRECATED] Compute the maximum number of record pairs possible."""
if not isinstance(shape, (tuple, list)):
x = get_length(shape)
n = int(x * (x - 1) / 2) # depends on [control=['if'], data=[]]
elif isinstance(shape, (tuple, list)) and len(shape) == 1:
x = get_length(shape[0])
n = int(x * (x - 1) / 2) # depends on [control=['if'], data=[]]
else:
n = numpy.prod([get_length(xi) for xi in shape])
return n
|
def readWindowsFile(wfile):
""""
reading file with windows
wfile File containing window info
"""
window_file = wfile+'.wnd'
assert os.path.exists(window_file), '%s is missing.'%window_file
rv = SP.loadtxt(window_file)
return rv
|
def function[readWindowsFile, parameter[wfile]]:
constant["
reading file with windows
wfile File containing window info
]
variable[window_file] assign[=] binary_operation[name[wfile] + constant[.wnd]]
assert[call[name[os].path.exists, parameter[name[window_file]]]]
variable[rv] assign[=] call[name[SP].loadtxt, parameter[name[window_file]]]
return[name[rv]]
|
keyword[def] identifier[readWindowsFile] ( identifier[wfile] ):
literal[string]
identifier[window_file] = identifier[wfile] + literal[string]
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[window_file] ), literal[string] % identifier[window_file]
identifier[rv] = identifier[SP] . identifier[loadtxt] ( identifier[window_file] )
keyword[return] identifier[rv]
|
def readWindowsFile(wfile):
""""
reading file with windows
wfile File containing window info
"""
window_file = wfile + '.wnd'
assert os.path.exists(window_file), '%s is missing.' % window_file
rv = SP.loadtxt(window_file)
return rv
|
def load_library(self,libname):
"""Given the name of a library, load it."""
paths = self.getpaths(libname)
for path in paths:
if os.path.exists(path):
return self.load(path)
raise ImportError("%s not found." % libname)
|
def function[load_library, parameter[self, libname]]:
constant[Given the name of a library, load it.]
variable[paths] assign[=] call[name[self].getpaths, parameter[name[libname]]]
for taget[name[path]] in starred[name[paths]] begin[:]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
return[call[name[self].load, parameter[name[path]]]]
<ast.Raise object at 0x7da1b27b6290>
|
keyword[def] identifier[load_library] ( identifier[self] , identifier[libname] ):
literal[string]
identifier[paths] = identifier[self] . identifier[getpaths] ( identifier[libname] )
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[return] identifier[self] . identifier[load] ( identifier[path] )
keyword[raise] identifier[ImportError] ( literal[string] % identifier[libname] )
|
def load_library(self, libname):
"""Given the name of a library, load it."""
paths = self.getpaths(libname)
for path in paths:
if os.path.exists(path):
return self.load(path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']]
raise ImportError('%s not found.' % libname)
|
def _RunUserDefinedFunctions_(config, data, histObj, position, namespace=__name__):
"""
Return a single updated data record and history object after running user-defined functions
:param dict config: DWM configuration (see DataDictionary)
:param dict data: single record (dictionary) to which user-defined functions should be applied
:param dict histObj: History object to which changes should be appended
:param string position: position name of which function set from config should be run
:param namespace: namespace of current working script; must be passed if using user-defined functions
"""
udfConfig = config['userDefinedFunctions']
if position in udfConfig:
posConfig = udfConfig[position]
for udf in posConfig.keys():
posConfigUDF = posConfig[udf]
data, histObj = getattr(sys.modules[namespace], posConfigUDF)(data=data, histObj=histObj)
return data, histObj
|
def function[_RunUserDefinedFunctions_, parameter[config, data, histObj, position, namespace]]:
constant[
Return a single updated data record and history object after running user-defined functions
:param dict config: DWM configuration (see DataDictionary)
:param dict data: single record (dictionary) to which user-defined functions should be applied
:param dict histObj: History object to which changes should be appended
:param string position: position name of which function set from config should be run
:param namespace: namespace of current working script; must be passed if using user-defined functions
]
variable[udfConfig] assign[=] call[name[config]][constant[userDefinedFunctions]]
if compare[name[position] in name[udfConfig]] begin[:]
variable[posConfig] assign[=] call[name[udfConfig]][name[position]]
for taget[name[udf]] in starred[call[name[posConfig].keys, parameter[]]] begin[:]
variable[posConfigUDF] assign[=] call[name[posConfig]][name[udf]]
<ast.Tuple object at 0x7da1b0af5810> assign[=] call[call[name[getattr], parameter[call[name[sys].modules][name[namespace]], name[posConfigUDF]]], parameter[]]
return[tuple[[<ast.Name object at 0x7da1b0af5c60>, <ast.Name object at 0x7da1b0af7910>]]]
|
keyword[def] identifier[_RunUserDefinedFunctions_] ( identifier[config] , identifier[data] , identifier[histObj] , identifier[position] , identifier[namespace] = identifier[__name__] ):
literal[string]
identifier[udfConfig] = identifier[config] [ literal[string] ]
keyword[if] identifier[position] keyword[in] identifier[udfConfig] :
identifier[posConfig] = identifier[udfConfig] [ identifier[position] ]
keyword[for] identifier[udf] keyword[in] identifier[posConfig] . identifier[keys] ():
identifier[posConfigUDF] = identifier[posConfig] [ identifier[udf] ]
identifier[data] , identifier[histObj] = identifier[getattr] ( identifier[sys] . identifier[modules] [ identifier[namespace] ], identifier[posConfigUDF] )( identifier[data] = identifier[data] , identifier[histObj] = identifier[histObj] )
keyword[return] identifier[data] , identifier[histObj]
|
def _RunUserDefinedFunctions_(config, data, histObj, position, namespace=__name__):
"""
Return a single updated data record and history object after running user-defined functions
:param dict config: DWM configuration (see DataDictionary)
:param dict data: single record (dictionary) to which user-defined functions should be applied
:param dict histObj: History object to which changes should be appended
:param string position: position name of which function set from config should be run
:param namespace: namespace of current working script; must be passed if using user-defined functions
"""
udfConfig = config['userDefinedFunctions']
if position in udfConfig:
posConfig = udfConfig[position]
for udf in posConfig.keys():
posConfigUDF = posConfig[udf]
(data, histObj) = getattr(sys.modules[namespace], posConfigUDF)(data=data, histObj=histObj) # depends on [control=['for'], data=['udf']] # depends on [control=['if'], data=['position', 'udfConfig']]
return (data, histObj)
|
def init(self):
'''
Initialize the device.
Parameters of visa.ResourceManager().open_resource()
'''
super(Visa, self).init()
backend = self._init.get('backend', '') # Empty string means std. backend (NI VISA)
rm = visa.ResourceManager(backend)
try:
logger.info('BASIL VISA TL with %s backend found the following devices: %s', backend, ", ".join(rm.list_resources()))
except NotImplementedError: # some backends do not always implement the list_resources function
logger.info('BASIL VISA TL with %s backend', backend)
self._resource = rm.open_resource(**{key: value for key, value in self._init.items() if key not in ("backend",)})
|
def function[init, parameter[self]]:
constant[
Initialize the device.
Parameters of visa.ResourceManager().open_resource()
]
call[call[name[super], parameter[name[Visa], name[self]]].init, parameter[]]
variable[backend] assign[=] call[name[self]._init.get, parameter[constant[backend], constant[]]]
variable[rm] assign[=] call[name[visa].ResourceManager, parameter[name[backend]]]
<ast.Try object at 0x7da1b050f0a0>
name[self]._resource assign[=] call[name[rm].open_resource, parameter[]]
|
keyword[def] identifier[init] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[Visa] , identifier[self] ). identifier[init] ()
identifier[backend] = identifier[self] . identifier[_init] . identifier[get] ( literal[string] , literal[string] )
identifier[rm] = identifier[visa] . identifier[ResourceManager] ( identifier[backend] )
keyword[try] :
identifier[logger] . identifier[info] ( literal[string] , identifier[backend] , literal[string] . identifier[join] ( identifier[rm] . identifier[list_resources] ()))
keyword[except] identifier[NotImplementedError] :
identifier[logger] . identifier[info] ( literal[string] , identifier[backend] )
identifier[self] . identifier[_resource] = identifier[rm] . identifier[open_resource] (**{ identifier[key] : identifier[value] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[_init] . identifier[items] () keyword[if] identifier[key] keyword[not] keyword[in] ( literal[string] ,)})
|
def init(self):
"""
Initialize the device.
Parameters of visa.ResourceManager().open_resource()
"""
super(Visa, self).init()
backend = self._init.get('backend', '') # Empty string means std. backend (NI VISA)
rm = visa.ResourceManager(backend)
try:
logger.info('BASIL VISA TL with %s backend found the following devices: %s', backend, ', '.join(rm.list_resources())) # depends on [control=['try'], data=[]]
except NotImplementedError: # some backends do not always implement the list_resources function
logger.info('BASIL VISA TL with %s backend', backend) # depends on [control=['except'], data=[]]
self._resource = rm.open_resource(**{key: value for (key, value) in self._init.items() if key not in ('backend',)})
|
def base_boxes(self):
"""
Get the list of vagrant base boxes
"""
return sorted(list(set([name for name, provider in self._box_list()])))
|
def function[base_boxes, parameter[self]]:
constant[
Get the list of vagrant base boxes
]
return[call[name[sorted], parameter[call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b0062da0>]]]]]]]
|
keyword[def] identifier[base_boxes] ( identifier[self] ):
literal[string]
keyword[return] identifier[sorted] ( identifier[list] ( identifier[set] ([ identifier[name] keyword[for] identifier[name] , identifier[provider] keyword[in] identifier[self] . identifier[_box_list] ()])))
|
def base_boxes(self):
"""
Get the list of vagrant base boxes
"""
return sorted(list(set([name for (name, provider) in self._box_list()])))
|
def _do_layout(self):
"""Layout sizers"""
dialog_main_sizer = wx.BoxSizer(wx.HORIZONTAL)
upper_sizer = wx.BoxSizer(wx.HORIZONTAL)
lower_sizer = wx.FlexGridSizer(2, 1, 5, 0)
lower_sizer.AddGrowableRow(0)
lower_sizer.AddGrowableCol(0)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
upper_sizer.Add(self.codetext_ctrl, 1, wx.EXPAND, 0)
lower_sizer.Add(self.result_ctrl, 1, wx.EXPAND, 0)
lower_sizer.Add(button_sizer, 1, wx.EXPAND, 0)
button_sizer.Add(self.apply_button, 1, wx.EXPAND, 0)
self.upper_panel.SetSizer(upper_sizer)
self.lower_panel.SetSizer(lower_sizer)
sash_50 = int(round((config["window_size"][1] - 100) * 0.5))
self.splitter.SplitHorizontally(self.upper_panel,
self.lower_panel, sash_50)
dialog_main_sizer.Add(self.splitter, 1, wx.EXPAND, 0)
self.SetSizer(dialog_main_sizer)
self.Layout()
|
def function[_do_layout, parameter[self]]:
constant[Layout sizers]
variable[dialog_main_sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]]
variable[upper_sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]]
variable[lower_sizer] assign[=] call[name[wx].FlexGridSizer, parameter[constant[2], constant[1], constant[5], constant[0]]]
call[name[lower_sizer].AddGrowableRow, parameter[constant[0]]]
call[name[lower_sizer].AddGrowableCol, parameter[constant[0]]]
variable[button_sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]]
call[name[upper_sizer].Add, parameter[name[self].codetext_ctrl, constant[1], name[wx].EXPAND, constant[0]]]
call[name[lower_sizer].Add, parameter[name[self].result_ctrl, constant[1], name[wx].EXPAND, constant[0]]]
call[name[lower_sizer].Add, parameter[name[button_sizer], constant[1], name[wx].EXPAND, constant[0]]]
call[name[button_sizer].Add, parameter[name[self].apply_button, constant[1], name[wx].EXPAND, constant[0]]]
call[name[self].upper_panel.SetSizer, parameter[name[upper_sizer]]]
call[name[self].lower_panel.SetSizer, parameter[name[lower_sizer]]]
variable[sash_50] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[call[call[name[config]][constant[window_size]]][constant[1]] - constant[100]] * constant[0.5]]]]]]
call[name[self].splitter.SplitHorizontally, parameter[name[self].upper_panel, name[self].lower_panel, name[sash_50]]]
call[name[dialog_main_sizer].Add, parameter[name[self].splitter, constant[1], name[wx].EXPAND, constant[0]]]
call[name[self].SetSizer, parameter[name[dialog_main_sizer]]]
call[name[self].Layout, parameter[]]
|
keyword[def] identifier[_do_layout] ( identifier[self] ):
literal[string]
identifier[dialog_main_sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] )
identifier[upper_sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] )
identifier[lower_sizer] = identifier[wx] . identifier[FlexGridSizer] ( literal[int] , literal[int] , literal[int] , literal[int] )
identifier[lower_sizer] . identifier[AddGrowableRow] ( literal[int] )
identifier[lower_sizer] . identifier[AddGrowableCol] ( literal[int] )
identifier[button_sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] )
identifier[upper_sizer] . identifier[Add] ( identifier[self] . identifier[codetext_ctrl] , literal[int] , identifier[wx] . identifier[EXPAND] , literal[int] )
identifier[lower_sizer] . identifier[Add] ( identifier[self] . identifier[result_ctrl] , literal[int] , identifier[wx] . identifier[EXPAND] , literal[int] )
identifier[lower_sizer] . identifier[Add] ( identifier[button_sizer] , literal[int] , identifier[wx] . identifier[EXPAND] , literal[int] )
identifier[button_sizer] . identifier[Add] ( identifier[self] . identifier[apply_button] , literal[int] , identifier[wx] . identifier[EXPAND] , literal[int] )
identifier[self] . identifier[upper_panel] . identifier[SetSizer] ( identifier[upper_sizer] )
identifier[self] . identifier[lower_panel] . identifier[SetSizer] ( identifier[lower_sizer] )
identifier[sash_50] = identifier[int] ( identifier[round] (( identifier[config] [ literal[string] ][ literal[int] ]- literal[int] )* literal[int] ))
identifier[self] . identifier[splitter] . identifier[SplitHorizontally] ( identifier[self] . identifier[upper_panel] ,
identifier[self] . identifier[lower_panel] , identifier[sash_50] )
identifier[dialog_main_sizer] . identifier[Add] ( identifier[self] . identifier[splitter] , literal[int] , identifier[wx] . identifier[EXPAND] , literal[int] )
identifier[self] . identifier[SetSizer] ( identifier[dialog_main_sizer] )
identifier[self] . identifier[Layout] ()
|
def _do_layout(self):
"""Layout sizers"""
dialog_main_sizer = wx.BoxSizer(wx.HORIZONTAL)
upper_sizer = wx.BoxSizer(wx.HORIZONTAL)
lower_sizer = wx.FlexGridSizer(2, 1, 5, 0)
lower_sizer.AddGrowableRow(0)
lower_sizer.AddGrowableCol(0)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
upper_sizer.Add(self.codetext_ctrl, 1, wx.EXPAND, 0)
lower_sizer.Add(self.result_ctrl, 1, wx.EXPAND, 0)
lower_sizer.Add(button_sizer, 1, wx.EXPAND, 0)
button_sizer.Add(self.apply_button, 1, wx.EXPAND, 0)
self.upper_panel.SetSizer(upper_sizer)
self.lower_panel.SetSizer(lower_sizer)
sash_50 = int(round((config['window_size'][1] - 100) * 0.5))
self.splitter.SplitHorizontally(self.upper_panel, self.lower_panel, sash_50)
dialog_main_sizer.Add(self.splitter, 1, wx.EXPAND, 0)
self.SetSizer(dialog_main_sizer)
self.Layout()
|
def get_list_key(self, data, key, header_lines=2):
"""Get the list of a key elements.
Each element is a tuple (key=None, description, type=None).
Note that the tuple's element can differ depending on the key.
:param data: the data to proceed
:param key: the key
"""
return super(NumpydocTools, self).get_list_key(data, key, header_lines=header_lines)
|
def function[get_list_key, parameter[self, data, key, header_lines]]:
constant[Get the list of a key elements.
Each element is a tuple (key=None, description, type=None).
Note that the tuple's element can differ depending on the key.
:param data: the data to proceed
:param key: the key
]
return[call[call[name[super], parameter[name[NumpydocTools], name[self]]].get_list_key, parameter[name[data], name[key]]]]
|
keyword[def] identifier[get_list_key] ( identifier[self] , identifier[data] , identifier[key] , identifier[header_lines] = literal[int] ):
literal[string]
keyword[return] identifier[super] ( identifier[NumpydocTools] , identifier[self] ). identifier[get_list_key] ( identifier[data] , identifier[key] , identifier[header_lines] = identifier[header_lines] )
|
def get_list_key(self, data, key, header_lines=2):
"""Get the list of a key elements.
Each element is a tuple (key=None, description, type=None).
Note that the tuple's element can differ depending on the key.
:param data: the data to proceed
:param key: the key
"""
return super(NumpydocTools, self).get_list_key(data, key, header_lines=header_lines)
|
def _handle_iorder(self, state):
'''
Take a state and apply the iorder system
'''
if self.opts['state_auto_order']:
for name in state:
for s_dec in state[name]:
if not isinstance(s_dec, six.string_types):
# PyDSL OrderedDict?
continue
if not isinstance(state[name], dict):
# Include's or excludes as lists?
continue
if not isinstance(state[name][s_dec], list):
# Bad syntax, let the verify seq pick it up later on
continue
found = False
if s_dec.startswith('_'):
continue
for arg in state[name][s_dec]:
if isinstance(arg, dict):
if arg:
if next(six.iterkeys(arg)) == 'order':
found = True
if not found:
if not isinstance(state[name][s_dec], list):
# quite certainly a syntax error, managed elsewhere
continue
state[name][s_dec].append(
{'order': self.iorder}
)
self.iorder += 1
return state
|
def function[_handle_iorder, parameter[self, state]]:
constant[
Take a state and apply the iorder system
]
if call[name[self].opts][constant[state_auto_order]] begin[:]
for taget[name[name]] in starred[name[state]] begin[:]
for taget[name[s_dec]] in starred[call[name[state]][name[name]]] begin[:]
if <ast.UnaryOp object at 0x7da20cabd480> begin[:]
continue
if <ast.UnaryOp object at 0x7da20cabd6c0> begin[:]
continue
if <ast.UnaryOp object at 0x7da20cabebf0> begin[:]
continue
variable[found] assign[=] constant[False]
if call[name[s_dec].startswith, parameter[constant[_]]] begin[:]
continue
for taget[name[arg]] in starred[call[call[name[state]][name[name]]][name[s_dec]]] begin[:]
if call[name[isinstance], parameter[name[arg], name[dict]]] begin[:]
if name[arg] begin[:]
if compare[call[name[next], parameter[call[name[six].iterkeys, parameter[name[arg]]]]] equal[==] constant[order]] begin[:]
variable[found] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da20cabcee0> begin[:]
if <ast.UnaryOp object at 0x7da20cabc5e0> begin[:]
continue
call[call[call[name[state]][name[name]]][name[s_dec]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b2347790>], [<ast.Attribute object at 0x7da1b2347730>]]]]
<ast.AugAssign object at 0x7da1b2346d70>
return[name[state]]
|
keyword[def] identifier[_handle_iorder] ( identifier[self] , identifier[state] ):
literal[string]
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]:
keyword[for] identifier[name] keyword[in] identifier[state] :
keyword[for] identifier[s_dec] keyword[in] identifier[state] [ identifier[name] ]:
keyword[if] keyword[not] identifier[isinstance] ( identifier[s_dec] , identifier[six] . identifier[string_types] ):
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[state] [ identifier[name] ], identifier[dict] ):
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[state] [ identifier[name] ][ identifier[s_dec] ], identifier[list] ):
keyword[continue]
identifier[found] = keyword[False]
keyword[if] identifier[s_dec] . identifier[startswith] ( literal[string] ):
keyword[continue]
keyword[for] identifier[arg] keyword[in] identifier[state] [ identifier[name] ][ identifier[s_dec] ]:
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[dict] ):
keyword[if] identifier[arg] :
keyword[if] identifier[next] ( identifier[six] . identifier[iterkeys] ( identifier[arg] ))== literal[string] :
identifier[found] = keyword[True]
keyword[if] keyword[not] identifier[found] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[state] [ identifier[name] ][ identifier[s_dec] ], identifier[list] ):
keyword[continue]
identifier[state] [ identifier[name] ][ identifier[s_dec] ]. identifier[append] (
{ literal[string] : identifier[self] . identifier[iorder] }
)
identifier[self] . identifier[iorder] += literal[int]
keyword[return] identifier[state]
|
def _handle_iorder(self, state):
"""
Take a state and apply the iorder system
"""
if self.opts['state_auto_order']:
for name in state:
for s_dec in state[name]:
if not isinstance(s_dec, six.string_types):
# PyDSL OrderedDict?
continue # depends on [control=['if'], data=[]]
if not isinstance(state[name], dict):
# Include's or excludes as lists?
continue # depends on [control=['if'], data=[]]
if not isinstance(state[name][s_dec], list):
# Bad syntax, let the verify seq pick it up later on
continue # depends on [control=['if'], data=[]]
found = False
if s_dec.startswith('_'):
continue # depends on [control=['if'], data=[]]
for arg in state[name][s_dec]:
if isinstance(arg, dict):
if arg:
if next(six.iterkeys(arg)) == 'order':
found = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
if not found:
if not isinstance(state[name][s_dec], list):
# quite certainly a syntax error, managed elsewhere
continue # depends on [control=['if'], data=[]]
state[name][s_dec].append({'order': self.iorder})
self.iorder += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s_dec']] # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
return state
|
def write_results(self, filename):
"""Writes samples, model stats, acceptance fraction, and random state
to the given file.
Parameters
-----------
filename : str
The file to write to. The file is opened using the ``io`` class
in an an append state.
"""
with self.io(filename, 'a') as fp:
# write samples
fp.write_samples(self.samples, self.model.variable_params,
last_iteration=self.niterations)
# write stats
fp.write_samples(self.model_stats,
last_iteration=self.niterations)
# write accpetance
fp.write_acceptance_fraction(self._sampler.acceptance_fraction)
# write random state
fp.write_random_state(state=self._sampler.random_state)
|
def function[write_results, parameter[self, filename]]:
constant[Writes samples, model stats, acceptance fraction, and random state
to the given file.
Parameters
-----------
filename : str
The file to write to. The file is opened using the ``io`` class
in an an append state.
]
with call[name[self].io, parameter[name[filename], constant[a]]] begin[:]
call[name[fp].write_samples, parameter[name[self].samples, name[self].model.variable_params]]
call[name[fp].write_samples, parameter[name[self].model_stats]]
call[name[fp].write_acceptance_fraction, parameter[name[self]._sampler.acceptance_fraction]]
call[name[fp].write_random_state, parameter[]]
|
keyword[def] identifier[write_results] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[with] identifier[self] . identifier[io] ( identifier[filename] , literal[string] ) keyword[as] identifier[fp] :
identifier[fp] . identifier[write_samples] ( identifier[self] . identifier[samples] , identifier[self] . identifier[model] . identifier[variable_params] ,
identifier[last_iteration] = identifier[self] . identifier[niterations] )
identifier[fp] . identifier[write_samples] ( identifier[self] . identifier[model_stats] ,
identifier[last_iteration] = identifier[self] . identifier[niterations] )
identifier[fp] . identifier[write_acceptance_fraction] ( identifier[self] . identifier[_sampler] . identifier[acceptance_fraction] )
identifier[fp] . identifier[write_random_state] ( identifier[state] = identifier[self] . identifier[_sampler] . identifier[random_state] )
|
def write_results(self, filename):
"""Writes samples, model stats, acceptance fraction, and random state
to the given file.
Parameters
-----------
filename : str
The file to write to. The file is opened using the ``io`` class
in an an append state.
"""
with self.io(filename, 'a') as fp:
# write samples
fp.write_samples(self.samples, self.model.variable_params, last_iteration=self.niterations)
# write stats
fp.write_samples(self.model_stats, last_iteration=self.niterations)
# write accpetance
fp.write_acceptance_fraction(self._sampler.acceptance_fraction)
# write random state
fp.write_random_state(state=self._sampler.random_state) # depends on [control=['with'], data=['fp']]
|
def read(filename, encoding='utf-8'):
"""
Read text from file ('filename')
Return text and encoding
"""
text, encoding = decode( open(filename, 'rb').read() )
return text, encoding
|
def function[read, parameter[filename, encoding]]:
constant[
Read text from file ('filename')
Return text and encoding
]
<ast.Tuple object at 0x7da1b21d7d00> assign[=] call[name[decode], parameter[call[call[name[open], parameter[name[filename], constant[rb]]].read, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da18f09cd90>, <ast.Name object at 0x7da18f09ec20>]]]
|
keyword[def] identifier[read] ( identifier[filename] , identifier[encoding] = literal[string] ):
literal[string]
identifier[text] , identifier[encoding] = identifier[decode] ( identifier[open] ( identifier[filename] , literal[string] ). identifier[read] ())
keyword[return] identifier[text] , identifier[encoding]
|
def read(filename, encoding='utf-8'):
"""
Read text from file ('filename')
Return text and encoding
"""
(text, encoding) = decode(open(filename, 'rb').read())
return (text, encoding)
|
def start(self, network_name, trunk_name, trunk_type):
"""Starts DHCP server process.
in network_name of type str
Name of internal network DHCP server should attach to.
in trunk_name of type str
Name of internal network trunk.
in trunk_type of type str
Type of internal network trunk.
raises :class:`OleErrorFail`
Failed to start the process.
"""
if not isinstance(network_name, basestring):
raise TypeError("network_name can only be an instance of type basestring")
if not isinstance(trunk_name, basestring):
raise TypeError("trunk_name can only be an instance of type basestring")
if not isinstance(trunk_type, basestring):
raise TypeError("trunk_type can only be an instance of type basestring")
self._call("start",
in_p=[network_name, trunk_name, trunk_type])
|
def function[start, parameter[self, network_name, trunk_name, trunk_type]]:
constant[Starts DHCP server process.
in network_name of type str
Name of internal network DHCP server should attach to.
in trunk_name of type str
Name of internal network trunk.
in trunk_type of type str
Type of internal network trunk.
raises :class:`OleErrorFail`
Failed to start the process.
]
if <ast.UnaryOp object at 0x7da20e9b0e50> begin[:]
<ast.Raise object at 0x7da20e9b0c40>
if <ast.UnaryOp object at 0x7da20e9b0040> begin[:]
<ast.Raise object at 0x7da20e9b21d0>
if <ast.UnaryOp object at 0x7da20e9b3490> begin[:]
<ast.Raise object at 0x7da20e9b0f40>
call[name[self]._call, parameter[constant[start]]]
|
keyword[def] identifier[start] ( identifier[self] , identifier[network_name] , identifier[trunk_name] , identifier[trunk_type] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[network_name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[trunk_name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[trunk_type] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[network_name] , identifier[trunk_name] , identifier[trunk_type] ])
|
def start(self, network_name, trunk_name, trunk_type):
"""Starts DHCP server process.
in network_name of type str
Name of internal network DHCP server should attach to.
in trunk_name of type str
Name of internal network trunk.
in trunk_type of type str
Type of internal network trunk.
raises :class:`OleErrorFail`
Failed to start the process.
"""
if not isinstance(network_name, basestring):
raise TypeError('network_name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(trunk_name, basestring):
raise TypeError('trunk_name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(trunk_type, basestring):
raise TypeError('trunk_type can only be an instance of type basestring') # depends on [control=['if'], data=[]]
self._call('start', in_p=[network_name, trunk_name, trunk_type])
|
def encode_invocation_params(params):
""" Returns a list of paramaters meant to be passed to JSON-RPC endpoints. """
final_params = []
for p in params:
if isinstance(p, bool):
final_params.append({'type': ContractParameterTypes.BOOLEAN.value, 'value': p})
elif isinstance(p, int):
final_params.append({'type': ContractParameterTypes.INTEGER.value, 'value': p})
elif is_hash256(p):
final_params.append({'type': ContractParameterTypes.HASH256.value, 'value': p})
elif is_hash160(p):
final_params.append({'type': ContractParameterTypes.HASH160.value, 'value': p})
elif isinstance(p, bytearray):
final_params.append({'type': ContractParameterTypes.BYTE_ARRAY.value, 'value': p})
elif isinstance(p, str):
final_params.append({'type': ContractParameterTypes.STRING.value, 'value': p})
elif isinstance(p, list):
innerp = encode_invocation_params(p)
final_params.append({'type': ContractParameterTypes.ARRAY.value, 'value': innerp})
return final_params
|
def function[encode_invocation_params, parameter[params]]:
constant[ Returns a list of paramaters meant to be passed to JSON-RPC endpoints. ]
variable[final_params] assign[=] list[[]]
for taget[name[p]] in starred[name[params]] begin[:]
if call[name[isinstance], parameter[name[p], name[bool]]] begin[:]
call[name[final_params].append, parameter[dictionary[[<ast.Constant object at 0x7da20c6ab1c0>, <ast.Constant object at 0x7da20c6a8100>], [<ast.Attribute object at 0x7da20c6abb80>, <ast.Name object at 0x7da20c6a9cf0>]]]]
return[name[final_params]]
|
keyword[def] identifier[encode_invocation_params] ( identifier[params] ):
literal[string]
identifier[final_params] =[]
keyword[for] identifier[p] keyword[in] identifier[params] :
keyword[if] identifier[isinstance] ( identifier[p] , identifier[bool] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[BOOLEAN] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[isinstance] ( identifier[p] , identifier[int] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[INTEGER] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[is_hash256] ( identifier[p] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[HASH256] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[is_hash160] ( identifier[p] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[HASH160] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[isinstance] ( identifier[p] , identifier[bytearray] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[BYTE_ARRAY] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[isinstance] ( identifier[p] , identifier[str] ):
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[STRING] . identifier[value] , literal[string] : identifier[p] })
keyword[elif] identifier[isinstance] ( identifier[p] , identifier[list] ):
identifier[innerp] = identifier[encode_invocation_params] ( identifier[p] )
identifier[final_params] . identifier[append] ({ literal[string] : identifier[ContractParameterTypes] . identifier[ARRAY] . identifier[value] , literal[string] : identifier[innerp] })
keyword[return] identifier[final_params]
|
def encode_invocation_params(params):
""" Returns a list of paramaters meant to be passed to JSON-RPC endpoints. """
final_params = []
for p in params:
if isinstance(p, bool):
final_params.append({'type': ContractParameterTypes.BOOLEAN.value, 'value': p}) # depends on [control=['if'], data=[]]
elif isinstance(p, int):
final_params.append({'type': ContractParameterTypes.INTEGER.value, 'value': p}) # depends on [control=['if'], data=[]]
elif is_hash256(p):
final_params.append({'type': ContractParameterTypes.HASH256.value, 'value': p}) # depends on [control=['if'], data=[]]
elif is_hash160(p):
final_params.append({'type': ContractParameterTypes.HASH160.value, 'value': p}) # depends on [control=['if'], data=[]]
elif isinstance(p, bytearray):
final_params.append({'type': ContractParameterTypes.BYTE_ARRAY.value, 'value': p}) # depends on [control=['if'], data=[]]
elif isinstance(p, str):
final_params.append({'type': ContractParameterTypes.STRING.value, 'value': p}) # depends on [control=['if'], data=[]]
elif isinstance(p, list):
innerp = encode_invocation_params(p)
final_params.append({'type': ContractParameterTypes.ARRAY.value, 'value': innerp}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
return final_params
|
def _get_var_type_code(self, coltype):
'''Determines the two-character type code for a given variable type
Parameters
----------
coltype : type or np.dtype
The type of the variable
Returns
-------
str
The variable type code for the given type'''
if type(coltype) is np.dtype:
var_type = coltype.kind + str(coltype.itemsize)
return var_type
else:
if coltype is np.int64:
return 'i8'
elif coltype is np.int32:
return 'i4'
elif coltype is np.int16:
return 'i2'
elif coltype is np.int8:
return 'i1'
elif coltype is np.uint64:
return 'u8'
elif coltype is np.uint32:
return 'u4'
elif coltype is np.uint16:
return 'u2'
elif coltype is np.uint8:
return 'u1'
elif coltype is np.float64:
return 'f8'
elif coltype is np.float32:
return 'f4'
elif issubclass(coltype, basestring):
return 'S1'
else:
raise TypeError('Unknown Variable Type' + str(coltype))
|
def function[_get_var_type_code, parameter[self, coltype]]:
constant[Determines the two-character type code for a given variable type
Parameters
----------
coltype : type or np.dtype
The type of the variable
Returns
-------
str
The variable type code for the given type]
if compare[call[name[type], parameter[name[coltype]]] is name[np].dtype] begin[:]
variable[var_type] assign[=] binary_operation[name[coltype].kind + call[name[str], parameter[name[coltype].itemsize]]]
return[name[var_type]]
|
keyword[def] identifier[_get_var_type_code] ( identifier[self] , identifier[coltype] ):
literal[string]
keyword[if] identifier[type] ( identifier[coltype] ) keyword[is] identifier[np] . identifier[dtype] :
identifier[var_type] = identifier[coltype] . identifier[kind] + identifier[str] ( identifier[coltype] . identifier[itemsize] )
keyword[return] identifier[var_type]
keyword[else] :
keyword[if] identifier[coltype] keyword[is] identifier[np] . identifier[int64] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[int32] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[int16] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[int8] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[uint64] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[uint32] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[uint16] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[uint8] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[float64] :
keyword[return] literal[string]
keyword[elif] identifier[coltype] keyword[is] identifier[np] . identifier[float32] :
keyword[return] literal[string]
keyword[elif] identifier[issubclass] ( identifier[coltype] , identifier[basestring] ):
keyword[return] literal[string]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] + identifier[str] ( identifier[coltype] ))
|
def _get_var_type_code(self, coltype):
"""Determines the two-character type code for a given variable type
Parameters
----------
coltype : type or np.dtype
The type of the variable
Returns
-------
str
The variable type code for the given type"""
if type(coltype) is np.dtype:
var_type = coltype.kind + str(coltype.itemsize)
return var_type # depends on [control=['if'], data=[]]
elif coltype is np.int64:
return 'i8' # depends on [control=['if'], data=[]]
elif coltype is np.int32:
return 'i4' # depends on [control=['if'], data=[]]
elif coltype is np.int16:
return 'i2' # depends on [control=['if'], data=[]]
elif coltype is np.int8:
return 'i1' # depends on [control=['if'], data=[]]
elif coltype is np.uint64:
return 'u8' # depends on [control=['if'], data=[]]
elif coltype is np.uint32:
return 'u4' # depends on [control=['if'], data=[]]
elif coltype is np.uint16:
return 'u2' # depends on [control=['if'], data=[]]
elif coltype is np.uint8:
return 'u1' # depends on [control=['if'], data=[]]
elif coltype is np.float64:
return 'f8' # depends on [control=['if'], data=[]]
elif coltype is np.float32:
return 'f4' # depends on [control=['if'], data=[]]
elif issubclass(coltype, basestring):
return 'S1' # depends on [control=['if'], data=[]]
else:
raise TypeError('Unknown Variable Type' + str(coltype))
|
def neighbors(self, **kwargs):
"""list[dict]: A list of dictionary items describing the operational
state of LLDP.
"""
urn = "{urn:brocade.com:mgmt:brocade-lldp-ext}"
result = []
has_more = ''
last_ifindex = ''
rbridge_id = None
if 'rbridge_id' in kwargs:
rbridge_id = kwargs.pop('rbridge_id')
while (has_more == '') or (has_more == 'true'):
request_lldp = self.get_lldp_neighbors_request(last_ifindex,
rbridge_id)
lldp_result = self._callback(request_lldp, 'get')
has_more = lldp_result.find('%shas-more' % urn).text
for item in lldp_result.findall('%slldp-neighbor-detail' % urn):
local_int_name = item.find('%slocal-interface-name' % urn).text
local_int_mac = item.find('%slocal-interface-mac' % urn).text
last_ifindex = item.find(
'%slocal-interface-ifindex' % urn).text
remote_int_name = item.find(
'%sremote-interface-name' % urn).text
remote_int_mac = item.find('%sremote-interface-mac' % urn).text
remote_chas_id = item.find('%sremote-chassis-id' % urn).text
try:
remote_sys_name = item.find(
'%sremote-system-name' % urn).text
except AttributeError:
remote_sys_name = ''
try:
remote_sys_desc = item.find('%sremote-system-description' %
urn).text
except AttributeError:
remote_sys_desc = ''
try:
remote_mgmt_addr = item.find(
'%sremote-management-address' %
urn).text
except AttributeError:
remote_mgmt_addr = ''
if 'Fo ' in local_int_name:
local_int_name = local_int_name.replace(
'Fo ',
'FortyGigabitEthernet '
)
if 'Te ' in local_int_name:
local_int_name = local_int_name.replace(
'Te ',
'TenGigabitEthernet '
)
item_results = {'local-int-name': local_int_name,
'local-int-mac': local_int_mac,
'remote-int-name': remote_int_name,
'remote-int-mac': remote_int_mac,
'remote-chassis-id': remote_chas_id,
'remote-system-name': remote_sys_name,
'remote-system-description': remote_sys_desc,
'remote-management-address': remote_mgmt_addr}
result.append(item_results)
return result
|
def function[neighbors, parameter[self]]:
constant[list[dict]: A list of dictionary items describing the operational
state of LLDP.
]
variable[urn] assign[=] constant[{urn:brocade.com:mgmt:brocade-lldp-ext}]
variable[result] assign[=] list[[]]
variable[has_more] assign[=] constant[]
variable[last_ifindex] assign[=] constant[]
variable[rbridge_id] assign[=] constant[None]
if compare[constant[rbridge_id] in name[kwargs]] begin[:]
variable[rbridge_id] assign[=] call[name[kwargs].pop, parameter[constant[rbridge_id]]]
while <ast.BoolOp object at 0x7da18f58f670> begin[:]
variable[request_lldp] assign[=] call[name[self].get_lldp_neighbors_request, parameter[name[last_ifindex], name[rbridge_id]]]
variable[lldp_result] assign[=] call[name[self]._callback, parameter[name[request_lldp], constant[get]]]
variable[has_more] assign[=] call[name[lldp_result].find, parameter[binary_operation[constant[%shas-more] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
for taget[name[item]] in starred[call[name[lldp_result].findall, parameter[binary_operation[constant[%slldp-neighbor-detail] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
variable[local_int_name] assign[=] call[name[item].find, parameter[binary_operation[constant[%slocal-interface-name] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[local_int_mac] assign[=] call[name[item].find, parameter[binary_operation[constant[%slocal-interface-mac] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[last_ifindex] assign[=] call[name[item].find, parameter[binary_operation[constant[%slocal-interface-ifindex] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[remote_int_name] assign[=] call[name[item].find, parameter[binary_operation[constant[%sremote-interface-name] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[remote_int_mac] assign[=] call[name[item].find, parameter[binary_operation[constant[%sremote-interface-mac] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[remote_chas_id] assign[=] call[name[item].find, parameter[binary_operation[constant[%sremote-chassis-id] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
<ast.Try object at 0x7da18f58f3d0>
<ast.Try object at 0x7da18f58ce20>
<ast.Try object at 0x7da18f58d450>
if compare[constant[Fo ] in name[local_int_name]] begin[:]
variable[local_int_name] assign[=] call[name[local_int_name].replace, parameter[constant[Fo ], constant[FortyGigabitEthernet ]]]
if compare[constant[Te ] in name[local_int_name]] begin[:]
variable[local_int_name] assign[=] call[name[local_int_name].replace, parameter[constant[Te ], constant[TenGigabitEthernet ]]]
variable[item_results] assign[=] dictionary[[<ast.Constant object at 0x7da18bc734c0>, <ast.Constant object at 0x7da18bc73250>, <ast.Constant object at 0x7da18bc71150>, <ast.Constant object at 0x7da18bc71ba0>, <ast.Constant object at 0x7da18bc70fa0>, <ast.Constant object at 0x7da18bc72350>, <ast.Constant object at 0x7da18bc71c00>, <ast.Constant object at 0x7da18bc73940>], [<ast.Name object at 0x7da18bc71540>, <ast.Name object at 0x7da18bc70eb0>, <ast.Name object at 0x7da18bc731f0>, <ast.Name object at 0x7da18bc704c0>, <ast.Name object at 0x7da18bc73880>, <ast.Name object at 0x7da18bc70520>, <ast.Name object at 0x7da18bc72d10>, <ast.Name object at 0x7da18bc736a0>]]
call[name[result].append, parameter[name[item_results]]]
return[name[result]]
|
keyword[def] identifier[neighbors] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[urn] = literal[string]
identifier[result] =[]
identifier[has_more] = literal[string]
identifier[last_ifindex] = literal[string]
identifier[rbridge_id] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[rbridge_id] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[while] ( identifier[has_more] == literal[string] ) keyword[or] ( identifier[has_more] == literal[string] ):
identifier[request_lldp] = identifier[self] . identifier[get_lldp_neighbors_request] ( identifier[last_ifindex] ,
identifier[rbridge_id] )
identifier[lldp_result] = identifier[self] . identifier[_callback] ( identifier[request_lldp] , literal[string] )
identifier[has_more] = identifier[lldp_result] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
keyword[for] identifier[item] keyword[in] identifier[lldp_result] . identifier[findall] ( literal[string] % identifier[urn] ):
identifier[local_int_name] = identifier[item] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
identifier[local_int_mac] = identifier[item] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
identifier[last_ifindex] = identifier[item] . identifier[find] (
literal[string] % identifier[urn] ). identifier[text]
identifier[remote_int_name] = identifier[item] . identifier[find] (
literal[string] % identifier[urn] ). identifier[text]
identifier[remote_int_mac] = identifier[item] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
identifier[remote_chas_id] = identifier[item] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
keyword[try] :
identifier[remote_sys_name] = identifier[item] . identifier[find] (
literal[string] % identifier[urn] ). identifier[text]
keyword[except] identifier[AttributeError] :
identifier[remote_sys_name] = literal[string]
keyword[try] :
identifier[remote_sys_desc] = identifier[item] . identifier[find] ( literal[string] %
identifier[urn] ). identifier[text]
keyword[except] identifier[AttributeError] :
identifier[remote_sys_desc] = literal[string]
keyword[try] :
identifier[remote_mgmt_addr] = identifier[item] . identifier[find] (
literal[string] %
identifier[urn] ). identifier[text]
keyword[except] identifier[AttributeError] :
identifier[remote_mgmt_addr] = literal[string]
keyword[if] literal[string] keyword[in] identifier[local_int_name] :
identifier[local_int_name] = identifier[local_int_name] . identifier[replace] (
literal[string] ,
literal[string]
)
keyword[if] literal[string] keyword[in] identifier[local_int_name] :
identifier[local_int_name] = identifier[local_int_name] . identifier[replace] (
literal[string] ,
literal[string]
)
identifier[item_results] ={ literal[string] : identifier[local_int_name] ,
literal[string] : identifier[local_int_mac] ,
literal[string] : identifier[remote_int_name] ,
literal[string] : identifier[remote_int_mac] ,
literal[string] : identifier[remote_chas_id] ,
literal[string] : identifier[remote_sys_name] ,
literal[string] : identifier[remote_sys_desc] ,
literal[string] : identifier[remote_mgmt_addr] }
identifier[result] . identifier[append] ( identifier[item_results] )
keyword[return] identifier[result]
|
def neighbors(self, **kwargs):
"""list[dict]: A list of dictionary items describing the operational
state of LLDP.
"""
urn = '{urn:brocade.com:mgmt:brocade-lldp-ext}'
result = []
has_more = ''
last_ifindex = ''
rbridge_id = None
if 'rbridge_id' in kwargs:
rbridge_id = kwargs.pop('rbridge_id') # depends on [control=['if'], data=['kwargs']]
while has_more == '' or has_more == 'true':
request_lldp = self.get_lldp_neighbors_request(last_ifindex, rbridge_id)
lldp_result = self._callback(request_lldp, 'get')
has_more = lldp_result.find('%shas-more' % urn).text
for item in lldp_result.findall('%slldp-neighbor-detail' % urn):
local_int_name = item.find('%slocal-interface-name' % urn).text
local_int_mac = item.find('%slocal-interface-mac' % urn).text
last_ifindex = item.find('%slocal-interface-ifindex' % urn).text
remote_int_name = item.find('%sremote-interface-name' % urn).text
remote_int_mac = item.find('%sremote-interface-mac' % urn).text
remote_chas_id = item.find('%sremote-chassis-id' % urn).text
try:
remote_sys_name = item.find('%sremote-system-name' % urn).text # depends on [control=['try'], data=[]]
except AttributeError:
remote_sys_name = '' # depends on [control=['except'], data=[]]
try:
remote_sys_desc = item.find('%sremote-system-description' % urn).text # depends on [control=['try'], data=[]]
except AttributeError:
remote_sys_desc = '' # depends on [control=['except'], data=[]]
try:
remote_mgmt_addr = item.find('%sremote-management-address' % urn).text # depends on [control=['try'], data=[]]
except AttributeError:
remote_mgmt_addr = '' # depends on [control=['except'], data=[]]
if 'Fo ' in local_int_name:
local_int_name = local_int_name.replace('Fo ', 'FortyGigabitEthernet ') # depends on [control=['if'], data=['local_int_name']]
if 'Te ' in local_int_name:
local_int_name = local_int_name.replace('Te ', 'TenGigabitEthernet ') # depends on [control=['if'], data=['local_int_name']]
item_results = {'local-int-name': local_int_name, 'local-int-mac': local_int_mac, 'remote-int-name': remote_int_name, 'remote-int-mac': remote_int_mac, 'remote-chassis-id': remote_chas_id, 'remote-system-name': remote_sys_name, 'remote-system-description': remote_sys_desc, 'remote-management-address': remote_mgmt_addr}
result.append(item_results) # depends on [control=['for'], data=['item']] # depends on [control=['while'], data=[]]
return result
|
def _apply_pre_prepare(self, pre_prepare: PrePrepare):
"""
Applies (but not commits) requests of the PrePrepare
to the ledger and state
"""
reqs = []
idx = 0
rejects = []
invalid_indices = []
suspicious = False
# 1. apply each request
for req_key in pre_prepare.reqIdr:
req = self.requests[req_key].finalised
try:
self.processReqDuringBatch(req,
pre_prepare.ppTime)
except (InvalidClientMessageException, UnknownIdentifier, SuspiciousPrePrepare) as ex:
self.logger.warning('{} encountered exception {} while processing {}, '
'will reject'.format(self, ex, req))
rejects.append((req.key, Reject(req.identifier, req.reqId, ex)))
invalid_indices.append(idx)
if isinstance(ex, SuspiciousPrePrepare):
suspicious = True
finally:
reqs.append(req)
idx += 1
# 2. call callback for the applied batch
if self.isMaster:
three_pc_batch = ThreePcBatch.from_pre_prepare(pre_prepare,
state_root=self.stateRootHash(pre_prepare.ledgerId,
to_str=False),
txn_root=self.txnRootHash(pre_prepare.ledgerId,
to_str=False),
primaries=[],
valid_digests=self.get_valid_req_ids_from_all_requests(
reqs, invalid_indices))
self.node.onBatchCreated(three_pc_batch)
return reqs, invalid_indices, rejects, suspicious
|
def function[_apply_pre_prepare, parameter[self, pre_prepare]]:
constant[
Applies (but not commits) requests of the PrePrepare
to the ledger and state
]
variable[reqs] assign[=] list[[]]
variable[idx] assign[=] constant[0]
variable[rejects] assign[=] list[[]]
variable[invalid_indices] assign[=] list[[]]
variable[suspicious] assign[=] constant[False]
for taget[name[req_key]] in starred[name[pre_prepare].reqIdr] begin[:]
variable[req] assign[=] call[name[self].requests][name[req_key]].finalised
<ast.Try object at 0x7da1b16c0d00>
<ast.AugAssign object at 0x7da1b16c3400>
if name[self].isMaster begin[:]
variable[three_pc_batch] assign[=] call[name[ThreePcBatch].from_pre_prepare, parameter[name[pre_prepare]]]
call[name[self].node.onBatchCreated, parameter[name[three_pc_batch]]]
return[tuple[[<ast.Name object at 0x7da1b1735c00>, <ast.Name object at 0x7da1b1734370>, <ast.Name object at 0x7da1b1735d20>, <ast.Name object at 0x7da1b1737cd0>]]]
|
keyword[def] identifier[_apply_pre_prepare] ( identifier[self] , identifier[pre_prepare] : identifier[PrePrepare] ):
literal[string]
identifier[reqs] =[]
identifier[idx] = literal[int]
identifier[rejects] =[]
identifier[invalid_indices] =[]
identifier[suspicious] = keyword[False]
keyword[for] identifier[req_key] keyword[in] identifier[pre_prepare] . identifier[reqIdr] :
identifier[req] = identifier[self] . identifier[requests] [ identifier[req_key] ]. identifier[finalised]
keyword[try] :
identifier[self] . identifier[processReqDuringBatch] ( identifier[req] ,
identifier[pre_prepare] . identifier[ppTime] )
keyword[except] ( identifier[InvalidClientMessageException] , identifier[UnknownIdentifier] , identifier[SuspiciousPrePrepare] ) keyword[as] identifier[ex] :
identifier[self] . identifier[logger] . identifier[warning] ( literal[string]
literal[string] . identifier[format] ( identifier[self] , identifier[ex] , identifier[req] ))
identifier[rejects] . identifier[append] (( identifier[req] . identifier[key] , identifier[Reject] ( identifier[req] . identifier[identifier] , identifier[req] . identifier[reqId] , identifier[ex] )))
identifier[invalid_indices] . identifier[append] ( identifier[idx] )
keyword[if] identifier[isinstance] ( identifier[ex] , identifier[SuspiciousPrePrepare] ):
identifier[suspicious] = keyword[True]
keyword[finally] :
identifier[reqs] . identifier[append] ( identifier[req] )
identifier[idx] += literal[int]
keyword[if] identifier[self] . identifier[isMaster] :
identifier[three_pc_batch] = identifier[ThreePcBatch] . identifier[from_pre_prepare] ( identifier[pre_prepare] ,
identifier[state_root] = identifier[self] . identifier[stateRootHash] ( identifier[pre_prepare] . identifier[ledgerId] ,
identifier[to_str] = keyword[False] ),
identifier[txn_root] = identifier[self] . identifier[txnRootHash] ( identifier[pre_prepare] . identifier[ledgerId] ,
identifier[to_str] = keyword[False] ),
identifier[primaries] =[],
identifier[valid_digests] = identifier[self] . identifier[get_valid_req_ids_from_all_requests] (
identifier[reqs] , identifier[invalid_indices] ))
identifier[self] . identifier[node] . identifier[onBatchCreated] ( identifier[three_pc_batch] )
keyword[return] identifier[reqs] , identifier[invalid_indices] , identifier[rejects] , identifier[suspicious]
|
def _apply_pre_prepare(self, pre_prepare: PrePrepare):
"""
Applies (but not commits) requests of the PrePrepare
to the ledger and state
"""
reqs = []
idx = 0
rejects = []
invalid_indices = []
suspicious = False
# 1. apply each request
for req_key in pre_prepare.reqIdr:
req = self.requests[req_key].finalised
try:
self.processReqDuringBatch(req, pre_prepare.ppTime) # depends on [control=['try'], data=[]]
except (InvalidClientMessageException, UnknownIdentifier, SuspiciousPrePrepare) as ex:
self.logger.warning('{} encountered exception {} while processing {}, will reject'.format(self, ex, req))
rejects.append((req.key, Reject(req.identifier, req.reqId, ex)))
invalid_indices.append(idx)
if isinstance(ex, SuspiciousPrePrepare):
suspicious = True # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['ex']]
finally:
reqs.append(req)
idx += 1 # depends on [control=['for'], data=['req_key']]
# 2. call callback for the applied batch
if self.isMaster:
three_pc_batch = ThreePcBatch.from_pre_prepare(pre_prepare, state_root=self.stateRootHash(pre_prepare.ledgerId, to_str=False), txn_root=self.txnRootHash(pre_prepare.ledgerId, to_str=False), primaries=[], valid_digests=self.get_valid_req_ids_from_all_requests(reqs, invalid_indices))
self.node.onBatchCreated(three_pc_batch) # depends on [control=['if'], data=[]]
return (reqs, invalid_indices, rejects, suspicious)
|
def copy_from(self, src, dest):
"""
copy a file or a directory from container or image to host system.
:param src: str, path to a file or a directory within container or image
:param dest: str, path to a file or a directory on host system
:return: None
"""
logger.debug("copying %s from host to container at %s", src, dest)
cmd = ["machinectl", "--no-pager", "copy-from", self.name, src, dest]
run_cmd(cmd)
|
def function[copy_from, parameter[self, src, dest]]:
constant[
copy a file or a directory from container or image to host system.
:param src: str, path to a file or a directory within container or image
:param dest: str, path to a file or a directory on host system
:return: None
]
call[name[logger].debug, parameter[constant[copying %s from host to container at %s], name[src], name[dest]]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b12f1ba0>, <ast.Constant object at 0x7da1b12f0460>, <ast.Constant object at 0x7da1b12f3b50>, <ast.Attribute object at 0x7da1b12f2800>, <ast.Name object at 0x7da1b12f1540>, <ast.Name object at 0x7da1b12f0fa0>]]
call[name[run_cmd], parameter[name[cmd]]]
|
keyword[def] identifier[copy_from] ( identifier[self] , identifier[src] , identifier[dest] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[src] , identifier[dest] )
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , identifier[self] . identifier[name] , identifier[src] , identifier[dest] ]
identifier[run_cmd] ( identifier[cmd] )
|
def copy_from(self, src, dest):
"""
copy a file or a directory from container or image to host system.
:param src: str, path to a file or a directory within container or image
:param dest: str, path to a file or a directory on host system
:return: None
"""
logger.debug('copying %s from host to container at %s', src, dest)
cmd = ['machinectl', '--no-pager', 'copy-from', self.name, src, dest]
run_cmd(cmd)
|
def apply_motion_tracks(self, tracks, accuracy=0.004):
"""
Similar to click but press the screen for the given time interval and then release
Args:
tracks (:py:obj:`list`): list of :py:class:`poco.utils.track.MotionTrack` object
accuracy (:py:obj:`float`): motion accuracy for each motion steps in normalized coordinate metrics.
"""
if not tracks:
raise ValueError('Please provide at least one track. Got {}'.format(repr(tracks)))
tb = MotionTrackBatch(tracks)
return self.agent.input.applyMotionEvents(tb.discretize(accuracy))
|
def function[apply_motion_tracks, parameter[self, tracks, accuracy]]:
constant[
Similar to click but press the screen for the given time interval and then release
Args:
tracks (:py:obj:`list`): list of :py:class:`poco.utils.track.MotionTrack` object
accuracy (:py:obj:`float`): motion accuracy for each motion steps in normalized coordinate metrics.
]
if <ast.UnaryOp object at 0x7da2044c0af0> begin[:]
<ast.Raise object at 0x7da2044c1300>
variable[tb] assign[=] call[name[MotionTrackBatch], parameter[name[tracks]]]
return[call[name[self].agent.input.applyMotionEvents, parameter[call[name[tb].discretize, parameter[name[accuracy]]]]]]
|
keyword[def] identifier[apply_motion_tracks] ( identifier[self] , identifier[tracks] , identifier[accuracy] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[tracks] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[repr] ( identifier[tracks] )))
identifier[tb] = identifier[MotionTrackBatch] ( identifier[tracks] )
keyword[return] identifier[self] . identifier[agent] . identifier[input] . identifier[applyMotionEvents] ( identifier[tb] . identifier[discretize] ( identifier[accuracy] ))
|
def apply_motion_tracks(self, tracks, accuracy=0.004):
"""
Similar to click but press the screen for the given time interval and then release
Args:
tracks (:py:obj:`list`): list of :py:class:`poco.utils.track.MotionTrack` object
accuracy (:py:obj:`float`): motion accuracy for each motion steps in normalized coordinate metrics.
"""
if not tracks:
raise ValueError('Please provide at least one track. Got {}'.format(repr(tracks))) # depends on [control=['if'], data=[]]
tb = MotionTrackBatch(tracks)
return self.agent.input.applyMotionEvents(tb.discretize(accuracy))
|
def _ReadEventDataIntoEvent(self, event):
"""Reads the data into the event.
This function is intended to offer backwards compatible event behavior.
Args:
event (EventObject): event.
"""
if self._storage_type != definitions.STORAGE_TYPE_SESSION:
return
event_data_identifier = event.GetEventDataIdentifier()
if event_data_identifier:
lookup_key = event_data_identifier.CopyToString()
event_data = self._event_data[lookup_key]
for attribute_name, attribute_value in event_data.GetAttributes():
setattr(event, attribute_name, attribute_value)
|
def function[_ReadEventDataIntoEvent, parameter[self, event]]:
constant[Reads the data into the event.
This function is intended to offer backwards compatible event behavior.
Args:
event (EventObject): event.
]
if compare[name[self]._storage_type not_equal[!=] name[definitions].STORAGE_TYPE_SESSION] begin[:]
return[None]
variable[event_data_identifier] assign[=] call[name[event].GetEventDataIdentifier, parameter[]]
if name[event_data_identifier] begin[:]
variable[lookup_key] assign[=] call[name[event_data_identifier].CopyToString, parameter[]]
variable[event_data] assign[=] call[name[self]._event_data][name[lookup_key]]
for taget[tuple[[<ast.Name object at 0x7da18c4ce710>, <ast.Name object at 0x7da18c4cf190>]]] in starred[call[name[event_data].GetAttributes, parameter[]]] begin[:]
call[name[setattr], parameter[name[event], name[attribute_name], name[attribute_value]]]
|
keyword[def] identifier[_ReadEventDataIntoEvent] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[self] . identifier[_storage_type] != identifier[definitions] . identifier[STORAGE_TYPE_SESSION] :
keyword[return]
identifier[event_data_identifier] = identifier[event] . identifier[GetEventDataIdentifier] ()
keyword[if] identifier[event_data_identifier] :
identifier[lookup_key] = identifier[event_data_identifier] . identifier[CopyToString] ()
identifier[event_data] = identifier[self] . identifier[_event_data] [ identifier[lookup_key] ]
keyword[for] identifier[attribute_name] , identifier[attribute_value] keyword[in] identifier[event_data] . identifier[GetAttributes] ():
identifier[setattr] ( identifier[event] , identifier[attribute_name] , identifier[attribute_value] )
|
def _ReadEventDataIntoEvent(self, event):
"""Reads the data into the event.
This function is intended to offer backwards compatible event behavior.
Args:
event (EventObject): event.
"""
if self._storage_type != definitions.STORAGE_TYPE_SESSION:
return # depends on [control=['if'], data=[]]
event_data_identifier = event.GetEventDataIdentifier()
if event_data_identifier:
lookup_key = event_data_identifier.CopyToString()
event_data = self._event_data[lookup_key]
for (attribute_name, attribute_value) in event_data.GetAttributes():
setattr(event, attribute_name, attribute_value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
def params_size(num_components, component_params_size, name=None):
"""Number of `params` needed to create a `MixtureSameFamily` distribution.
Arguments:
num_components: Number of component distributions in the mixture
distribution.
component_params_size: Number of parameters needed to create a single
component distribution.
name: The name to use for the op to compute the number of parameters
(if such an op needs to be created).
Returns:
params_size: The number of parameters needed to create the mixture
distribution.
"""
with tf.compat.v1.name_scope(name, 'MixtureSameFamily_params_size',
[num_components, component_params_size]):
num_components = tf.convert_to_tensor(
value=num_components, name='num_components', dtype_hint=tf.int32)
component_params_size = tf.convert_to_tensor(
value=component_params_size, name='component_params_size')
num_components = dist_util.prefer_static_value(num_components)
component_params_size = dist_util.prefer_static_value(
component_params_size)
return num_components + num_components * component_params_size
|
def function[params_size, parameter[num_components, component_params_size, name]]:
constant[Number of `params` needed to create a `MixtureSameFamily` distribution.
Arguments:
num_components: Number of component distributions in the mixture
distribution.
component_params_size: Number of parameters needed to create a single
component distribution.
name: The name to use for the op to compute the number of parameters
(if such an op needs to be created).
Returns:
params_size: The number of parameters needed to create the mixture
distribution.
]
with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[MixtureSameFamily_params_size], list[[<ast.Name object at 0x7da1b03a3070>, <ast.Name object at 0x7da1b03a31f0>]]]] begin[:]
variable[num_components] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[component_params_size] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[num_components] assign[=] call[name[dist_util].prefer_static_value, parameter[name[num_components]]]
variable[component_params_size] assign[=] call[name[dist_util].prefer_static_value, parameter[name[component_params_size]]]
return[binary_operation[name[num_components] + binary_operation[name[num_components] * name[component_params_size]]]]
|
keyword[def] identifier[params_size] ( identifier[num_components] , identifier[component_params_size] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] , literal[string] ,
[ identifier[num_components] , identifier[component_params_size] ]):
identifier[num_components] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[num_components] , identifier[name] = literal[string] , identifier[dtype_hint] = identifier[tf] . identifier[int32] )
identifier[component_params_size] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[component_params_size] , identifier[name] = literal[string] )
identifier[num_components] = identifier[dist_util] . identifier[prefer_static_value] ( identifier[num_components] )
identifier[component_params_size] = identifier[dist_util] . identifier[prefer_static_value] (
identifier[component_params_size] )
keyword[return] identifier[num_components] + identifier[num_components] * identifier[component_params_size]
|
def params_size(num_components, component_params_size, name=None):
"""Number of `params` needed to create a `MixtureSameFamily` distribution.
Arguments:
num_components: Number of component distributions in the mixture
distribution.
component_params_size: Number of parameters needed to create a single
component distribution.
name: The name to use for the op to compute the number of parameters
(if such an op needs to be created).
Returns:
params_size: The number of parameters needed to create the mixture
distribution.
"""
with tf.compat.v1.name_scope(name, 'MixtureSameFamily_params_size', [num_components, component_params_size]):
num_components = tf.convert_to_tensor(value=num_components, name='num_components', dtype_hint=tf.int32)
component_params_size = tf.convert_to_tensor(value=component_params_size, name='component_params_size')
num_components = dist_util.prefer_static_value(num_components)
component_params_size = dist_util.prefer_static_value(component_params_size)
return num_components + num_components * component_params_size # depends on [control=['with'], data=[]]
|
def run(self):
"""
each plugin has to implement this method -- it is used to run the plugin actually
response from plugin is kept and used in json result response
"""
user_params = None
build_json = get_build_json()
git_url = os.environ['SOURCE_URI']
git_ref = os.environ.get('SOURCE_REF', None)
image = os.environ['OUTPUT_IMAGE']
self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)
self.reactor_env = None
git_commit_depth = None
git_branch = None
try:
user_params = os.environ['USER_PARAMS']
user_data = self.validate_user_data(user_params)
git_commit_depth = user_data.get('git_commit_depth', None)
git_branch = user_data.get('git_branch', None)
self.plugins_json = self.get_plugins_with_user_data(user_params, user_data)
# if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
reactor_config_map = os.environ['REACTOR_CONFIG']
self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json')
except KeyError:
try:
self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
except KeyError:
raise RuntimeError("No plugin configuration found!")
self.plugins_json = json.loads(self.plugins_json)
# validate json before performing any changes
read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')
input_json = {
'source': {
'provider': 'git',
'uri': git_url,
'provider_params': {
'git_commit': git_ref,
'git_commit_depth': git_commit_depth,
'git_branch': git_branch,
},
},
'image': image,
'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None)
}
input_json.update(self.plugins_json)
self.log.debug("build json: %s", input_json)
self.remove_plugins_without_parameters()
# make sure the final json is valid
read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')
return input_json
|
def function[run, parameter[self]]:
constant[
each plugin has to implement this method -- it is used to run the plugin actually
response from plugin is kept and used in json result response
]
variable[user_params] assign[=] constant[None]
variable[build_json] assign[=] call[name[get_build_json], parameter[]]
variable[git_url] assign[=] call[name[os].environ][constant[SOURCE_URI]]
variable[git_ref] assign[=] call[name[os].environ.get, parameter[constant[SOURCE_REF], constant[None]]]
variable[image] assign[=] call[name[os].environ][constant[OUTPUT_IMAGE]]
name[self].target_registry assign[=] call[name[os].environ.get, parameter[constant[OUTPUT_REGISTRY], constant[None]]]
name[self].reactor_env assign[=] constant[None]
variable[git_commit_depth] assign[=] constant[None]
variable[git_branch] assign[=] constant[None]
<ast.Try object at 0x7da18eb559f0>
name[self].plugins_json assign[=] call[name[json].loads, parameter[name[self].plugins_json]]
call[name[read_yaml], parameter[call[name[json].dumps, parameter[name[self].plugins_json]], constant[schemas/plugins.json]]]
variable[input_json] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e9d510>, <ast.Constant object at 0x7da1b0e9fee0>, <ast.Constant object at 0x7da1b0e9d360>], [<ast.Dict object at 0x7da1b0e9ffa0>, <ast.Name object at 0x7da1b0e9ff10>, <ast.Call object at 0x7da1b0e9fe20>]]
call[name[input_json].update, parameter[name[self].plugins_json]]
call[name[self].log.debug, parameter[constant[build json: %s], name[input_json]]]
call[name[self].remove_plugins_without_parameters, parameter[]]
call[name[read_yaml], parameter[call[name[json].dumps, parameter[name[self].plugins_json]], constant[schemas/plugins.json]]]
return[name[input_json]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[user_params] = keyword[None]
identifier[build_json] = identifier[get_build_json] ()
identifier[git_url] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[git_ref] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
identifier[image] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[self] . identifier[target_registry] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
identifier[self] . identifier[reactor_env] = keyword[None]
identifier[git_commit_depth] = keyword[None]
identifier[git_branch] = keyword[None]
keyword[try] :
identifier[user_params] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[user_data] = identifier[self] . identifier[validate_user_data] ( identifier[user_params] )
identifier[git_commit_depth] = identifier[user_data] . identifier[get] ( literal[string] , keyword[None] )
identifier[git_branch] = identifier[user_data] . identifier[get] ( literal[string] , keyword[None] )
identifier[self] . identifier[plugins_json] = identifier[self] . identifier[get_plugins_with_user_data] ( identifier[user_params] , identifier[user_data] )
identifier[reactor_config_map] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[self] . identifier[reactor_env] = identifier[read_yaml] ( identifier[reactor_config_map] , literal[string] )
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[self] . identifier[plugins_json] = identifier[os] . identifier[environ] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[plugins_json] = identifier[json] . identifier[loads] ( identifier[self] . identifier[plugins_json] )
identifier[read_yaml] ( identifier[json] . identifier[dumps] ( identifier[self] . identifier[plugins_json] ), literal[string] )
identifier[input_json] ={
literal[string] :{
literal[string] : literal[string] ,
literal[string] : identifier[git_url] ,
literal[string] :{
literal[string] : identifier[git_ref] ,
literal[string] : identifier[git_commit_depth] ,
literal[string] : identifier[git_branch] ,
},
},
literal[string] : identifier[image] ,
literal[string] : identifier[build_json] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] )
}
identifier[input_json] . identifier[update] ( identifier[self] . identifier[plugins_json] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[input_json] )
identifier[self] . identifier[remove_plugins_without_parameters] ()
identifier[read_yaml] ( identifier[json] . identifier[dumps] ( identifier[self] . identifier[plugins_json] ), literal[string] )
keyword[return] identifier[input_json]
|
def run(self):
"""
each plugin has to implement this method -- it is used to run the plugin actually
response from plugin is kept and used in json result response
"""
user_params = None
build_json = get_build_json()
git_url = os.environ['SOURCE_URI']
git_ref = os.environ.get('SOURCE_REF', None)
image = os.environ['OUTPUT_IMAGE']
self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)
self.reactor_env = None
git_commit_depth = None
git_branch = None
try:
user_params = os.environ['USER_PARAMS']
user_data = self.validate_user_data(user_params)
git_commit_depth = user_data.get('git_commit_depth', None)
git_branch = user_data.get('git_branch', None)
self.plugins_json = self.get_plugins_with_user_data(user_params, user_data)
# if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
reactor_config_map = os.environ['REACTOR_CONFIG']
self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json') # depends on [control=['try'], data=[]]
except KeyError:
try:
self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS'] # depends on [control=['try'], data=[]]
except KeyError:
raise RuntimeError('No plugin configuration found!') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
self.plugins_json = json.loads(self.plugins_json)
# validate json before performing any changes
read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')
input_json = {'source': {'provider': 'git', 'uri': git_url, 'provider_params': {'git_commit': git_ref, 'git_commit_depth': git_commit_depth, 'git_branch': git_branch}}, 'image': image, 'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None)}
input_json.update(self.plugins_json)
self.log.debug('build json: %s', input_json)
self.remove_plugins_without_parameters()
# make sure the final json is valid
read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')
return input_json
|
def init_layout(self):
""" Initialize the layout of the toolkit widget.
This method is called during the bottom-up pass. This method
should initialize the layout of the widget. The child widgets
will be fully initialized and layed out when this is called.
"""
widget = self.widget
for child_widget in self.child_widgets():
widget.addSubview(child_widget)
|
def function[init_layout, parameter[self]]:
constant[ Initialize the layout of the toolkit widget.
This method is called during the bottom-up pass. This method
should initialize the layout of the widget. The child widgets
will be fully initialized and layed out when this is called.
]
variable[widget] assign[=] name[self].widget
for taget[name[child_widget]] in starred[call[name[self].child_widgets, parameter[]]] begin[:]
call[name[widget].addSubview, parameter[name[child_widget]]]
|
keyword[def] identifier[init_layout] ( identifier[self] ):
literal[string]
identifier[widget] = identifier[self] . identifier[widget]
keyword[for] identifier[child_widget] keyword[in] identifier[self] . identifier[child_widgets] ():
identifier[widget] . identifier[addSubview] ( identifier[child_widget] )
|
def init_layout(self):
""" Initialize the layout of the toolkit widget.
This method is called during the bottom-up pass. This method
should initialize the layout of the widget. The child widgets
will be fully initialized and layed out when this is called.
"""
widget = self.widget
for child_widget in self.child_widgets():
widget.addSubview(child_widget) # depends on [control=['for'], data=['child_widget']]
|
def get_stats(package):
"""
Fetch raw statistics of a package, no corrections are made to this
data. You should use get_corrected_stats().
"""
grand_total = 0
if '==' in package:
package, version = package.split('==')
try:
package = normalize(package)
version = None
except ValueError:
raise RuntimeError('No such module or package %r' % package)
# Count downloads
total, releases = count_downloads(
package,
json=True,
version=version,
)
result = {
'version': version,
'releases': releases,
}
grand_total += total
return result, grand_total, version
|
def function[get_stats, parameter[package]]:
constant[
Fetch raw statistics of a package, no corrections are made to this
data. You should use get_corrected_stats().
]
variable[grand_total] assign[=] constant[0]
if compare[constant[==] in name[package]] begin[:]
<ast.Tuple object at 0x7da1b14088e0> assign[=] call[name[package].split, parameter[constant[==]]]
<ast.Try object at 0x7da1b140a6b0>
<ast.Tuple object at 0x7da1b1409f30> assign[=] call[name[count_downloads], parameter[name[package]]]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b1409240>, <ast.Constant object at 0x7da1b1409270>], [<ast.Name object at 0x7da1b14092a0>, <ast.Name object at 0x7da1b14092d0>]]
<ast.AugAssign object at 0x7da1b1408550>
return[tuple[[<ast.Name object at 0x7da1b1409750>, <ast.Name object at 0x7da1b140a530>, <ast.Name object at 0x7da1b140a440>]]]
|
keyword[def] identifier[get_stats] ( identifier[package] ):
literal[string]
identifier[grand_total] = literal[int]
keyword[if] literal[string] keyword[in] identifier[package] :
identifier[package] , identifier[version] = identifier[package] . identifier[split] ( literal[string] )
keyword[try] :
identifier[package] = identifier[normalize] ( identifier[package] )
identifier[version] = keyword[None]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[package] )
identifier[total] , identifier[releases] = identifier[count_downloads] (
identifier[package] ,
identifier[json] = keyword[True] ,
identifier[version] = identifier[version] ,
)
identifier[result] ={
literal[string] : identifier[version] ,
literal[string] : identifier[releases] ,
}
identifier[grand_total] += identifier[total]
keyword[return] identifier[result] , identifier[grand_total] , identifier[version]
|
def get_stats(package):
"""
Fetch raw statistics of a package, no corrections are made to this
data. You should use get_corrected_stats().
"""
grand_total = 0
if '==' in package:
(package, version) = package.split('==') # depends on [control=['if'], data=['package']]
try:
package = normalize(package)
version = None # depends on [control=['try'], data=[]]
except ValueError:
raise RuntimeError('No such module or package %r' % package) # depends on [control=['except'], data=[]]
# Count downloads
(total, releases) = count_downloads(package, json=True, version=version)
result = {'version': version, 'releases': releases}
grand_total += total
return (result, grand_total, version)
|
def update_keywords(self):
"""653 Free Keywords."""
for field in record_get_field_instances(self.record, '653', ind1='1'):
subs = field_get_subfields(field)
new_subs = []
if 'a' in subs:
for val in subs['a']:
new_subs.extend([('9', 'author'), ('a', val)])
new_field = create_field(subfields=new_subs, ind1='1')
record_replace_field(
self.record, '653', new_field, field_position_global=field[4])
|
def function[update_keywords, parameter[self]]:
constant[653 Free Keywords.]
for taget[name[field]] in starred[call[name[record_get_field_instances], parameter[name[self].record, constant[653]]]] begin[:]
variable[subs] assign[=] call[name[field_get_subfields], parameter[name[field]]]
variable[new_subs] assign[=] list[[]]
if compare[constant[a] in name[subs]] begin[:]
for taget[name[val]] in starred[call[name[subs]][constant[a]]] begin[:]
call[name[new_subs].extend, parameter[list[[<ast.Tuple object at 0x7da20c6aa800>, <ast.Tuple object at 0x7da20c6a98d0>]]]]
variable[new_field] assign[=] call[name[create_field], parameter[]]
call[name[record_replace_field], parameter[name[self].record, constant[653], name[new_field]]]
|
keyword[def] identifier[update_keywords] ( identifier[self] ):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[record_get_field_instances] ( identifier[self] . identifier[record] , literal[string] , identifier[ind1] = literal[string] ):
identifier[subs] = identifier[field_get_subfields] ( identifier[field] )
identifier[new_subs] =[]
keyword[if] literal[string] keyword[in] identifier[subs] :
keyword[for] identifier[val] keyword[in] identifier[subs] [ literal[string] ]:
identifier[new_subs] . identifier[extend] ([( literal[string] , literal[string] ),( literal[string] , identifier[val] )])
identifier[new_field] = identifier[create_field] ( identifier[subfields] = identifier[new_subs] , identifier[ind1] = literal[string] )
identifier[record_replace_field] (
identifier[self] . identifier[record] , literal[string] , identifier[new_field] , identifier[field_position_global] = identifier[field] [ literal[int] ])
|
def update_keywords(self):
"""653 Free Keywords."""
for field in record_get_field_instances(self.record, '653', ind1='1'):
subs = field_get_subfields(field)
new_subs = []
if 'a' in subs:
for val in subs['a']:
new_subs.extend([('9', 'author'), ('a', val)]) # depends on [control=['for'], data=['val']] # depends on [control=['if'], data=['subs']]
new_field = create_field(subfields=new_subs, ind1='1')
record_replace_field(self.record, '653', new_field, field_position_global=field[4]) # depends on [control=['for'], data=['field']]
|
def GetList(self):
"""Get Info on Current List
This is run in __init__ so you don't
have to run it again.
Access from self.schema
"""
# Build Request
soap_request = soap('GetList')
soap_request.add_parameter('listName', self.listName)
self.last_request = str(soap_request)
# Send Request
response = self._session.post(url=self._url('Lists'),
headers=self._headers('GetList'),
data=str(soap_request),
verify=self._verify_ssl,
timeout=self.timeout)
# Parse Response
if response.status_code == 200:
envelope = etree.fromstring(response.text.encode('utf-8'), parser=etree.XMLParser(huge_tree=self.huge_tree))
_list = envelope[0][0][0][0]
info = {key: value for (key, value) in _list.items()}
for row in _list[0].getchildren():
self.fields.append({key: value for (key, value) in row.items()})
for setting in _list[1].getchildren():
self.regional_settings[
setting.tag.strip('{http://schemas.microsoft.com/sharepoint/soap/}')] = setting.text
for setting in _list[2].getchildren():
self.server_settings[
setting.tag.strip('{http://schemas.microsoft.com/sharepoint/soap/}')] = setting.text
fields = envelope[0][0][0][0][0]
else:
raise Exception("ERROR:", response.status_code, response.text)
|
def function[GetList, parameter[self]]:
constant[Get Info on Current List
This is run in __init__ so you don't
have to run it again.
Access from self.schema
]
variable[soap_request] assign[=] call[name[soap], parameter[constant[GetList]]]
call[name[soap_request].add_parameter, parameter[constant[listName], name[self].listName]]
name[self].last_request assign[=] call[name[str], parameter[name[soap_request]]]
variable[response] assign[=] call[name[self]._session.post, parameter[]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[envelope] assign[=] call[name[etree].fromstring, parameter[call[name[response].text.encode, parameter[constant[utf-8]]]]]
variable[_list] assign[=] call[call[call[call[name[envelope]][constant[0]]][constant[0]]][constant[0]]][constant[0]]
variable[info] assign[=] <ast.DictComp object at 0x7da1b12bb820>
for taget[name[row]] in starred[call[call[name[_list]][constant[0]].getchildren, parameter[]]] begin[:]
call[name[self].fields.append, parameter[<ast.DictComp object at 0x7da1b12b8be0>]]
for taget[name[setting]] in starred[call[call[name[_list]][constant[1]].getchildren, parameter[]]] begin[:]
call[name[self].regional_settings][call[name[setting].tag.strip, parameter[constant[{http://schemas.microsoft.com/sharepoint/soap/}]]]] assign[=] name[setting].text
for taget[name[setting]] in starred[call[call[name[_list]][constant[2]].getchildren, parameter[]]] begin[:]
call[name[self].server_settings][call[name[setting].tag.strip, parameter[constant[{http://schemas.microsoft.com/sharepoint/soap/}]]]] assign[=] name[setting].text
variable[fields] assign[=] call[call[call[call[call[name[envelope]][constant[0]]][constant[0]]][constant[0]]][constant[0]]][constant[0]]
|
keyword[def] identifier[GetList] ( identifier[self] ):
literal[string]
identifier[soap_request] = identifier[soap] ( literal[string] )
identifier[soap_request] . identifier[add_parameter] ( literal[string] , identifier[self] . identifier[listName] )
identifier[self] . identifier[last_request] = identifier[str] ( identifier[soap_request] )
identifier[response] = identifier[self] . identifier[_session] . identifier[post] ( identifier[url] = identifier[self] . identifier[_url] ( literal[string] ),
identifier[headers] = identifier[self] . identifier[_headers] ( literal[string] ),
identifier[data] = identifier[str] ( identifier[soap_request] ),
identifier[verify] = identifier[self] . identifier[_verify_ssl] ,
identifier[timeout] = identifier[self] . identifier[timeout] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[envelope] = identifier[etree] . identifier[fromstring] ( identifier[response] . identifier[text] . identifier[encode] ( literal[string] ), identifier[parser] = identifier[etree] . identifier[XMLParser] ( identifier[huge_tree] = identifier[self] . identifier[huge_tree] ))
identifier[_list] = identifier[envelope] [ literal[int] ][ literal[int] ][ literal[int] ][ literal[int] ]
identifier[info] ={ identifier[key] : identifier[value] keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[_list] . identifier[items] ()}
keyword[for] identifier[row] keyword[in] identifier[_list] [ literal[int] ]. identifier[getchildren] ():
identifier[self] . identifier[fields] . identifier[append] ({ identifier[key] : identifier[value] keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[row] . identifier[items] ()})
keyword[for] identifier[setting] keyword[in] identifier[_list] [ literal[int] ]. identifier[getchildren] ():
identifier[self] . identifier[regional_settings] [
identifier[setting] . identifier[tag] . identifier[strip] ( literal[string] )]= identifier[setting] . identifier[text]
keyword[for] identifier[setting] keyword[in] identifier[_list] [ literal[int] ]. identifier[getchildren] ():
identifier[self] . identifier[server_settings] [
identifier[setting] . identifier[tag] . identifier[strip] ( literal[string] )]= identifier[setting] . identifier[text]
identifier[fields] = identifier[envelope] [ literal[int] ][ literal[int] ][ literal[int] ][ literal[int] ][ literal[int] ]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] , identifier[response] . identifier[status_code] , identifier[response] . identifier[text] )
|
def GetList(self):
"""Get Info on Current List
This is run in __init__ so you don't
have to run it again.
Access from self.schema
"""
# Build Request
soap_request = soap('GetList')
soap_request.add_parameter('listName', self.listName)
self.last_request = str(soap_request)
# Send Request
response = self._session.post(url=self._url('Lists'), headers=self._headers('GetList'), data=str(soap_request), verify=self._verify_ssl, timeout=self.timeout)
# Parse Response
if response.status_code == 200:
envelope = etree.fromstring(response.text.encode('utf-8'), parser=etree.XMLParser(huge_tree=self.huge_tree))
_list = envelope[0][0][0][0]
info = {key: value for (key, value) in _list.items()}
for row in _list[0].getchildren():
self.fields.append({key: value for (key, value) in row.items()}) # depends on [control=['for'], data=['row']]
for setting in _list[1].getchildren():
self.regional_settings[setting.tag.strip('{http://schemas.microsoft.com/sharepoint/soap/}')] = setting.text # depends on [control=['for'], data=['setting']]
for setting in _list[2].getchildren():
self.server_settings[setting.tag.strip('{http://schemas.microsoft.com/sharepoint/soap/}')] = setting.text # depends on [control=['for'], data=['setting']]
fields = envelope[0][0][0][0][0] # depends on [control=['if'], data=[]]
else:
raise Exception('ERROR:', response.status_code, response.text)
|
def build_data_access(host, port, database_name, collection_name):
"""
Create data access gateway.
:param host: The database server to connect to.
:type host: str
:param port: Database port.
:type port: int
:param database_name: Database name.
:type database_name: str
:param collection_name: Name of the collection with Sacred runs.
:type collection_name: str
"""
return PyMongoDataAccess("mongodb://%s:%d" % (host, port),
database_name, collection_name)
|
def function[build_data_access, parameter[host, port, database_name, collection_name]]:
constant[
Create data access gateway.
:param host: The database server to connect to.
:type host: str
:param port: Database port.
:type port: int
:param database_name: Database name.
:type database_name: str
:param collection_name: Name of the collection with Sacred runs.
:type collection_name: str
]
return[call[name[PyMongoDataAccess], parameter[binary_operation[constant[mongodb://%s:%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204345360>, <ast.Name object at 0x7da2043469b0>]]], name[database_name], name[collection_name]]]]
|
keyword[def] identifier[build_data_access] ( identifier[host] , identifier[port] , identifier[database_name] , identifier[collection_name] ):
literal[string]
keyword[return] identifier[PyMongoDataAccess] ( literal[string] %( identifier[host] , identifier[port] ),
identifier[database_name] , identifier[collection_name] )
|
def build_data_access(host, port, database_name, collection_name):
"""
Create data access gateway.
:param host: The database server to connect to.
:type host: str
:param port: Database port.
:type port: int
:param database_name: Database name.
:type database_name: str
:param collection_name: Name of the collection with Sacred runs.
:type collection_name: str
"""
return PyMongoDataAccess('mongodb://%s:%d' % (host, port), database_name, collection_name)
|
def start_monitor(self):
"""Start the monitor."""
stdout_file, stderr_file = self.new_log_files("monitor")
process_info = ray.services.start_monitor(
self._redis_address,
stdout_file=stdout_file,
stderr_file=stderr_file,
autoscaling_config=self._ray_params.autoscaling_config,
redis_password=self._ray_params.redis_password)
assert ray_constants.PROCESS_TYPE_MONITOR not in self.all_processes
self.all_processes[ray_constants.PROCESS_TYPE_MONITOR] = [process_info]
|
def function[start_monitor, parameter[self]]:
constant[Start the monitor.]
<ast.Tuple object at 0x7da1b23459f0> assign[=] call[name[self].new_log_files, parameter[constant[monitor]]]
variable[process_info] assign[=] call[name[ray].services.start_monitor, parameter[name[self]._redis_address]]
assert[compare[name[ray_constants].PROCESS_TYPE_MONITOR <ast.NotIn object at 0x7da2590d7190> name[self].all_processes]]
call[name[self].all_processes][name[ray_constants].PROCESS_TYPE_MONITOR] assign[=] list[[<ast.Name object at 0x7da18eb57610>]]
|
keyword[def] identifier[start_monitor] ( identifier[self] ):
literal[string]
identifier[stdout_file] , identifier[stderr_file] = identifier[self] . identifier[new_log_files] ( literal[string] )
identifier[process_info] = identifier[ray] . identifier[services] . identifier[start_monitor] (
identifier[self] . identifier[_redis_address] ,
identifier[stdout_file] = identifier[stdout_file] ,
identifier[stderr_file] = identifier[stderr_file] ,
identifier[autoscaling_config] = identifier[self] . identifier[_ray_params] . identifier[autoscaling_config] ,
identifier[redis_password] = identifier[self] . identifier[_ray_params] . identifier[redis_password] )
keyword[assert] identifier[ray_constants] . identifier[PROCESS_TYPE_MONITOR] keyword[not] keyword[in] identifier[self] . identifier[all_processes]
identifier[self] . identifier[all_processes] [ identifier[ray_constants] . identifier[PROCESS_TYPE_MONITOR] ]=[ identifier[process_info] ]
|
def start_monitor(self):
"""Start the monitor."""
(stdout_file, stderr_file) = self.new_log_files('monitor')
process_info = ray.services.start_monitor(self._redis_address, stdout_file=stdout_file, stderr_file=stderr_file, autoscaling_config=self._ray_params.autoscaling_config, redis_password=self._ray_params.redis_password)
assert ray_constants.PROCESS_TYPE_MONITOR not in self.all_processes
self.all_processes[ray_constants.PROCESS_TYPE_MONITOR] = [process_info]
|
def upload(self, f):
"""Upload a file to the Puush account.
Parameters:
* f: The file. Either a path to a file or a file-like object.
"""
if hasattr(f, 'read'):
needs_closing = False
else:
f = open(f, 'rb')
needs_closing = True
# The Puush server can't handle non-ASCII filenames.
# The official Puush desktop app actually substitutes ? for
# non-ISO-8859-1 characters, which helps some Unicode filenames,
# but some are still let through and encounter server errors.
# Try uploading a file named åäö.txt through the desktop app -
# it won't work. It's better to let this Python API do that,
# however, with the behavior probably intended in the desktop app.
filename = os.path.basename(f.name).encode('ascii', 'replace')
filename = filename.decode('ascii') # Requests doesn't like bytes
md5 = md5_file(f)
data = {
'z': 'meaningless',
'c': md5
}
files = {
'f': (filename, f)
}
res = self._api_request('up', data=data, files=files)[0]
if res[0] == '-1':
raise PuushError("File upload failed.")
elif res[0] == '-3':
raise PuushError("File upload failed: hash didn't match with "
"the file the server received.")
if needs_closing:
f.close()
_, url, id, size = res
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return self._File(id, url, filename, now, 0)
|
def function[upload, parameter[self, f]]:
constant[Upload a file to the Puush account.
Parameters:
* f: The file. Either a path to a file or a file-like object.
]
if call[name[hasattr], parameter[name[f], constant[read]]] begin[:]
variable[needs_closing] assign[=] constant[False]
variable[filename] assign[=] call[call[name[os].path.basename, parameter[name[f].name]].encode, parameter[constant[ascii], constant[replace]]]
variable[filename] assign[=] call[name[filename].decode, parameter[constant[ascii]]]
variable[md5] assign[=] call[name[md5_file], parameter[name[f]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1668d00>, <ast.Constant object at 0x7da1b1669810>], [<ast.Constant object at 0x7da1b1668070>, <ast.Name object at 0x7da1b1669720>]]
variable[files] assign[=] dictionary[[<ast.Constant object at 0x7da1b1668fa0>], [<ast.Tuple object at 0x7da1b1669180>]]
variable[res] assign[=] call[call[name[self]._api_request, parameter[constant[up]]]][constant[0]]
if compare[call[name[res]][constant[0]] equal[==] constant[-1]] begin[:]
<ast.Raise object at 0x7da1b1669480>
if name[needs_closing] begin[:]
call[name[f].close, parameter[]]
<ast.Tuple object at 0x7da1b1668c10> assign[=] name[res]
variable[now] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[constant[%Y-%m-%d %H:%M:%S]]]
return[call[name[self]._File, parameter[name[id], name[url], name[filename], name[now], constant[0]]]]
|
keyword[def] identifier[upload] ( identifier[self] , identifier[f] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[needs_closing] = keyword[False]
keyword[else] :
identifier[f] = identifier[open] ( identifier[f] , literal[string] )
identifier[needs_closing] = keyword[True]
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[f] . identifier[name] ). identifier[encode] ( literal[string] , literal[string] )
identifier[filename] = identifier[filename] . identifier[decode] ( literal[string] )
identifier[md5] = identifier[md5_file] ( identifier[f] )
identifier[data] ={
literal[string] : literal[string] ,
literal[string] : identifier[md5]
}
identifier[files] ={
literal[string] :( identifier[filename] , identifier[f] )
}
identifier[res] = identifier[self] . identifier[_api_request] ( literal[string] , identifier[data] = identifier[data] , identifier[files] = identifier[files] )[ literal[int] ]
keyword[if] identifier[res] [ literal[int] ]== literal[string] :
keyword[raise] identifier[PuushError] ( literal[string] )
keyword[elif] identifier[res] [ literal[int] ]== literal[string] :
keyword[raise] identifier[PuushError] ( literal[string]
literal[string] )
keyword[if] identifier[needs_closing] :
identifier[f] . identifier[close] ()
identifier[_] , identifier[url] , identifier[id] , identifier[size] = identifier[res]
identifier[now] = identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
keyword[return] identifier[self] . identifier[_File] ( identifier[id] , identifier[url] , identifier[filename] , identifier[now] , literal[int] )
|
def upload(self, f):
"""Upload a file to the Puush account.
Parameters:
* f: The file. Either a path to a file or a file-like object.
"""
if hasattr(f, 'read'):
needs_closing = False # depends on [control=['if'], data=[]]
else:
f = open(f, 'rb')
needs_closing = True
# The Puush server can't handle non-ASCII filenames.
# The official Puush desktop app actually substitutes ? for
# non-ISO-8859-1 characters, which helps some Unicode filenames,
# but some are still let through and encounter server errors.
# Try uploading a file named åäö.txt through the desktop app -
# it won't work. It's better to let this Python API do that,
# however, with the behavior probably intended in the desktop app.
filename = os.path.basename(f.name).encode('ascii', 'replace')
filename = filename.decode('ascii') # Requests doesn't like bytes
md5 = md5_file(f)
data = {'z': 'meaningless', 'c': md5}
files = {'f': (filename, f)}
res = self._api_request('up', data=data, files=files)[0]
if res[0] == '-1':
raise PuushError('File upload failed.') # depends on [control=['if'], data=[]]
elif res[0] == '-3':
raise PuushError("File upload failed: hash didn't match with the file the server received.") # depends on [control=['if'], data=[]]
if needs_closing:
f.close() # depends on [control=['if'], data=[]]
(_, url, id, size) = res
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return self._File(id, url, filename, now, 0)
|
def get_file(self):
"""stub"""
if self.has_file_asset():
return self._get_asset_content(
Id(self.my_osid_object._my_map['fileId']['assetId']),
self.my_osid_object._my_map['fileId']['assetContentTypeId']).get_data()
raise IllegalState()
|
def function[get_file, parameter[self]]:
constant[stub]
if call[name[self].has_file_asset, parameter[]] begin[:]
return[call[call[name[self]._get_asset_content, parameter[call[name[Id], parameter[call[call[name[self].my_osid_object._my_map][constant[fileId]]][constant[assetId]]]], call[call[name[self].my_osid_object._my_map][constant[fileId]]][constant[assetContentTypeId]]]].get_data, parameter[]]]
<ast.Raise object at 0x7da204621c60>
|
keyword[def] identifier[get_file] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[has_file_asset] ():
keyword[return] identifier[self] . identifier[_get_asset_content] (
identifier[Id] ( identifier[self] . identifier[my_osid_object] . identifier[_my_map] [ literal[string] ][ literal[string] ]),
identifier[self] . identifier[my_osid_object] . identifier[_my_map] [ literal[string] ][ literal[string] ]). identifier[get_data] ()
keyword[raise] identifier[IllegalState] ()
|
def get_file(self):
"""stub"""
if self.has_file_asset():
return self._get_asset_content(Id(self.my_osid_object._my_map['fileId']['assetId']), self.my_osid_object._my_map['fileId']['assetContentTypeId']).get_data() # depends on [control=['if'], data=[]]
raise IllegalState()
|
def list_node(self, tab_level=-1):
"""
Lists the current Node and its children.
Usage::
>>> node_a = AbstractCompositeNode("MyNodeA")
>>> node_b = AbstractCompositeNode("MyNodeB", node_a)
>>> node_c = AbstractCompositeNode("MyNodeC", node_a)
>>> print node_a.list_node()
|----'MyNodeA'
|----'MyNodeB'
|----'MyNodeC'
:param tab_level: Tab level.
:type tab_level: int
:return: Node listing.
:rtype: unicode
"""
output = ""
tab_level += 1
for i in range(tab_level):
output += "\t"
output += "|----'{0}'\n".format(self.name)
for child in self.__children:
output += child.list_node(tab_level)
tab_level -= 1
return output
|
def function[list_node, parameter[self, tab_level]]:
constant[
Lists the current Node and its children.
Usage::
>>> node_a = AbstractCompositeNode("MyNodeA")
>>> node_b = AbstractCompositeNode("MyNodeB", node_a)
>>> node_c = AbstractCompositeNode("MyNodeC", node_a)
>>> print node_a.list_node()
|----'MyNodeA'
|----'MyNodeB'
|----'MyNodeC'
:param tab_level: Tab level.
:type tab_level: int
:return: Node listing.
:rtype: unicode
]
variable[output] assign[=] constant[]
<ast.AugAssign object at 0x7da204963f70>
for taget[name[i]] in starred[call[name[range], parameter[name[tab_level]]]] begin[:]
<ast.AugAssign object at 0x7da204962110>
<ast.AugAssign object at 0x7da204963a00>
for taget[name[child]] in starred[name[self].__children] begin[:]
<ast.AugAssign object at 0x7da204961a80>
<ast.AugAssign object at 0x7da204962830>
return[name[output]]
|
keyword[def] identifier[list_node] ( identifier[self] , identifier[tab_level] =- literal[int] ):
literal[string]
identifier[output] = literal[string]
identifier[tab_level] += literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[tab_level] ):
identifier[output] += literal[string]
identifier[output] += literal[string] . identifier[format] ( identifier[self] . identifier[name] )
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[__children] :
identifier[output] += identifier[child] . identifier[list_node] ( identifier[tab_level] )
identifier[tab_level] -= literal[int]
keyword[return] identifier[output]
|
def list_node(self, tab_level=-1):
"""
Lists the current Node and its children.
Usage::
>>> node_a = AbstractCompositeNode("MyNodeA")
>>> node_b = AbstractCompositeNode("MyNodeB", node_a)
>>> node_c = AbstractCompositeNode("MyNodeC", node_a)
>>> print node_a.list_node()
|----'MyNodeA'
|----'MyNodeB'
|----'MyNodeC'
:param tab_level: Tab level.
:type tab_level: int
:return: Node listing.
:rtype: unicode
"""
output = ''
tab_level += 1
for i in range(tab_level):
output += '\t' # depends on [control=['for'], data=[]]
output += "|----'{0}'\n".format(self.name)
for child in self.__children:
output += child.list_node(tab_level) # depends on [control=['for'], data=['child']]
tab_level -= 1
return output
|
def save_value(self, key, value):
"""
Save an arbitrary, serializable `value` under `key`.
:param str key: A string identifier under which to store the value.
:param value: A serializable value
:return:
"""
with self.save_stream(key) as s:
s.write(value)
|
def function[save_value, parameter[self, key, value]]:
constant[
Save an arbitrary, serializable `value` under `key`.
:param str key: A string identifier under which to store the value.
:param value: A serializable value
:return:
]
with call[name[self].save_stream, parameter[name[key]]] begin[:]
call[name[s].write, parameter[name[value]]]
|
keyword[def] identifier[save_value] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[with] identifier[self] . identifier[save_stream] ( identifier[key] ) keyword[as] identifier[s] :
identifier[s] . identifier[write] ( identifier[value] )
|
def save_value(self, key, value):
"""
Save an arbitrary, serializable `value` under `key`.
:param str key: A string identifier under which to store the value.
:param value: A serializable value
:return:
"""
with self.save_stream(key) as s:
s.write(value) # depends on [control=['with'], data=['s']]
|
def progressbar(stream, prefix='Loading: ', width=0.5, **options):
""" Generator filter to print a progress bar. """
size = len(stream)
if not size:
return stream
if 'width' not in options:
if width <= 1:
width = round(shutil.get_terminal_size()[0] * width)
options['width'] = width
with ProgressBar(max=size, prefix=prefix, **options) as b:
b.set(0)
for i, x in enumerate(stream, 1):
yield x
b.set(i)
|
def function[progressbar, parameter[stream, prefix, width]]:
constant[ Generator filter to print a progress bar. ]
variable[size] assign[=] call[name[len], parameter[name[stream]]]
if <ast.UnaryOp object at 0x7da1b1600400> begin[:]
return[name[stream]]
if compare[constant[width] <ast.NotIn object at 0x7da2590d7190> name[options]] begin[:]
if compare[name[width] less_or_equal[<=] constant[1]] begin[:]
variable[width] assign[=] call[name[round], parameter[binary_operation[call[call[name[shutil].get_terminal_size, parameter[]]][constant[0]] * name[width]]]]
call[name[options]][constant[width]] assign[=] name[width]
with call[name[ProgressBar], parameter[]] begin[:]
call[name[b].set, parameter[constant[0]]]
for taget[tuple[[<ast.Name object at 0x7da1b1603370>, <ast.Name object at 0x7da1b1603580>]]] in starred[call[name[enumerate], parameter[name[stream], constant[1]]]] begin[:]
<ast.Yield object at 0x7da1b1601c90>
call[name[b].set, parameter[name[i]]]
|
keyword[def] identifier[progressbar] ( identifier[stream] , identifier[prefix] = literal[string] , identifier[width] = literal[int] ,** identifier[options] ):
literal[string]
identifier[size] = identifier[len] ( identifier[stream] )
keyword[if] keyword[not] identifier[size] :
keyword[return] identifier[stream]
keyword[if] literal[string] keyword[not] keyword[in] identifier[options] :
keyword[if] identifier[width] <= literal[int] :
identifier[width] = identifier[round] ( identifier[shutil] . identifier[get_terminal_size] ()[ literal[int] ]* identifier[width] )
identifier[options] [ literal[string] ]= identifier[width]
keyword[with] identifier[ProgressBar] ( identifier[max] = identifier[size] , identifier[prefix] = identifier[prefix] ,** identifier[options] ) keyword[as] identifier[b] :
identifier[b] . identifier[set] ( literal[int] )
keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[stream] , literal[int] ):
keyword[yield] identifier[x]
identifier[b] . identifier[set] ( identifier[i] )
|
def progressbar(stream, prefix='Loading: ', width=0.5, **options):
""" Generator filter to print a progress bar. """
size = len(stream)
if not size:
return stream # depends on [control=['if'], data=[]]
if 'width' not in options:
if width <= 1:
width = round(shutil.get_terminal_size()[0] * width) # depends on [control=['if'], data=['width']]
options['width'] = width # depends on [control=['if'], data=['options']]
with ProgressBar(max=size, prefix=prefix, **options) as b:
b.set(0)
for (i, x) in enumerate(stream, 1):
yield x
b.set(i) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['b']]
|
def analyzers_mapping(cls):
"""
Return instance of itself where all used properties are set to
:class:`FuncInfo`.
This method is used by the database, which map all the properties
defined here to itself, runs the functions as new processes and stores
the result in itself. Because it knows how many properties are there,
it may also track the progress, which is then transported to the
frontend and displayed in form of progress bar.
Returns:
obj: :class:`Model` instance.
"""
import analyzers
return cls(
title_tags=_compose_func(analyzers.get_title_tags),
place_tags=_compose_func(
analyzers.get_place_tags,
lambda req_info: (req_info.index, req_info.domain)
),
lang_tags=_compose_func(analyzers.get_lang_tags),
keyword_tags=_compose_func(analyzers.get_keyword_tags),
# yep, authors of webpage are actually publishers
publisher_tags=_compose_func(analyzers.get_author_tags),
annotation_tags=_compose_func(analyzers.get_annotation_tags),
creation_dates=_compose_func(
analyzers.get_creation_date_tags,
lambda req_info: (req_info.url, req_info.domain)
),
)
|
def function[analyzers_mapping, parameter[cls]]:
constant[
Return instance of itself where all used properties are set to
:class:`FuncInfo`.
This method is used by the database, which map all the properties
defined here to itself, runs the functions as new processes and stores
the result in itself. Because it knows how many properties are there,
it may also track the progress, which is then transported to the
frontend and displayed in form of progress bar.
Returns:
obj: :class:`Model` instance.
]
import module[analyzers]
return[call[name[cls], parameter[]]]
|
keyword[def] identifier[analyzers_mapping] ( identifier[cls] ):
literal[string]
keyword[import] identifier[analyzers]
keyword[return] identifier[cls] (
identifier[title_tags] = identifier[_compose_func] ( identifier[analyzers] . identifier[get_title_tags] ),
identifier[place_tags] = identifier[_compose_func] (
identifier[analyzers] . identifier[get_place_tags] ,
keyword[lambda] identifier[req_info] :( identifier[req_info] . identifier[index] , identifier[req_info] . identifier[domain] )
),
identifier[lang_tags] = identifier[_compose_func] ( identifier[analyzers] . identifier[get_lang_tags] ),
identifier[keyword_tags] = identifier[_compose_func] ( identifier[analyzers] . identifier[get_keyword_tags] ),
identifier[publisher_tags] = identifier[_compose_func] ( identifier[analyzers] . identifier[get_author_tags] ),
identifier[annotation_tags] = identifier[_compose_func] ( identifier[analyzers] . identifier[get_annotation_tags] ),
identifier[creation_dates] = identifier[_compose_func] (
identifier[analyzers] . identifier[get_creation_date_tags] ,
keyword[lambda] identifier[req_info] :( identifier[req_info] . identifier[url] , identifier[req_info] . identifier[domain] )
),
)
|
def analyzers_mapping(cls):
"""
Return instance of itself where all used properties are set to
:class:`FuncInfo`.
This method is used by the database, which map all the properties
defined here to itself, runs the functions as new processes and stores
the result in itself. Because it knows how many properties are there,
it may also track the progress, which is then transported to the
frontend and displayed in form of progress bar.
Returns:
obj: :class:`Model` instance.
"""
import analyzers
# yep, authors of webpage are actually publishers
return cls(title_tags=_compose_func(analyzers.get_title_tags), place_tags=_compose_func(analyzers.get_place_tags, lambda req_info: (req_info.index, req_info.domain)), lang_tags=_compose_func(analyzers.get_lang_tags), keyword_tags=_compose_func(analyzers.get_keyword_tags), publisher_tags=_compose_func(analyzers.get_author_tags), annotation_tags=_compose_func(analyzers.get_annotation_tags), creation_dates=_compose_func(analyzers.get_creation_date_tags, lambda req_info: (req_info.url, req_info.domain)))
|
def get_fields_and_fragment_names(
context: ValidationContext,
cached_fields_and_fragment_names: Dict,
parent_type: Optional[GraphQLNamedType],
selection_set: SelectionSetNode,
) -> Tuple[NodeAndDefCollection, List[str]]:
"""Get fields and referenced fragment names
Given a selection set, return the collection of fields (a mapping of response name
to field nodes and definitions) as well as a list of fragment names referenced via
fragment spreads.
"""
cached = cached_fields_and_fragment_names.get(selection_set)
if not cached:
node_and_defs: NodeAndDefCollection = {}
fragment_names: Dict[str, bool] = {}
collect_fields_and_fragment_names(
context, parent_type, selection_set, node_and_defs, fragment_names
)
cached = (node_and_defs, list(fragment_names))
cached_fields_and_fragment_names[selection_set] = cached
return cached
|
def function[get_fields_and_fragment_names, parameter[context, cached_fields_and_fragment_names, parent_type, selection_set]]:
constant[Get fields and referenced fragment names
Given a selection set, return the collection of fields (a mapping of response name
to field nodes and definitions) as well as a list of fragment names referenced via
fragment spreads.
]
variable[cached] assign[=] call[name[cached_fields_and_fragment_names].get, parameter[name[selection_set]]]
if <ast.UnaryOp object at 0x7da1b22adff0> begin[:]
<ast.AnnAssign object at 0x7da1b22aee60>
<ast.AnnAssign object at 0x7da1b22ae470>
call[name[collect_fields_and_fragment_names], parameter[name[context], name[parent_type], name[selection_set], name[node_and_defs], name[fragment_names]]]
variable[cached] assign[=] tuple[[<ast.Name object at 0x7da1b22e9f90>, <ast.Call object at 0x7da1b22e96f0>]]
call[name[cached_fields_and_fragment_names]][name[selection_set]] assign[=] name[cached]
return[name[cached]]
|
keyword[def] identifier[get_fields_and_fragment_names] (
identifier[context] : identifier[ValidationContext] ,
identifier[cached_fields_and_fragment_names] : identifier[Dict] ,
identifier[parent_type] : identifier[Optional] [ identifier[GraphQLNamedType] ],
identifier[selection_set] : identifier[SelectionSetNode] ,
)-> identifier[Tuple] [ identifier[NodeAndDefCollection] , identifier[List] [ identifier[str] ]]:
literal[string]
identifier[cached] = identifier[cached_fields_and_fragment_names] . identifier[get] ( identifier[selection_set] )
keyword[if] keyword[not] identifier[cached] :
identifier[node_and_defs] : identifier[NodeAndDefCollection] ={}
identifier[fragment_names] : identifier[Dict] [ identifier[str] , identifier[bool] ]={}
identifier[collect_fields_and_fragment_names] (
identifier[context] , identifier[parent_type] , identifier[selection_set] , identifier[node_and_defs] , identifier[fragment_names]
)
identifier[cached] =( identifier[node_and_defs] , identifier[list] ( identifier[fragment_names] ))
identifier[cached_fields_and_fragment_names] [ identifier[selection_set] ]= identifier[cached]
keyword[return] identifier[cached]
|
def get_fields_and_fragment_names(context: ValidationContext, cached_fields_and_fragment_names: Dict, parent_type: Optional[GraphQLNamedType], selection_set: SelectionSetNode) -> Tuple[NodeAndDefCollection, List[str]]:
"""Get fields and referenced fragment names
Given a selection set, return the collection of fields (a mapping of response name
to field nodes and definitions) as well as a list of fragment names referenced via
fragment spreads.
"""
cached = cached_fields_and_fragment_names.get(selection_set)
if not cached:
node_and_defs: NodeAndDefCollection = {}
fragment_names: Dict[str, bool] = {}
collect_fields_and_fragment_names(context, parent_type, selection_set, node_and_defs, fragment_names)
cached = (node_and_defs, list(fragment_names))
cached_fields_and_fragment_names[selection_set] = cached # depends on [control=['if'], data=[]]
return cached
|
def _create_doc_summary(self, obj, fullname, refrole):
"""Create a paragraph containing the object's one-sentence docstring
summary with a link to further documentation.
The paragrah should be inserted into the ``desc`` node's
``desc_content``.
"""
summary_text = extract_docstring_summary(get_docstring(obj))
summary_text = summary_text.strip()
# Strip the last "." because the linked ellipses take its place
if summary_text.endswith('.'):
summary_text = summary_text.rstrip('.')
content_node_p = nodes.paragraph(text=summary_text)
content_node_p += self._create_api_details_link(fullname, refrole)
return content_node_p
|
def function[_create_doc_summary, parameter[self, obj, fullname, refrole]]:
constant[Create a paragraph containing the object's one-sentence docstring
summary with a link to further documentation.
The paragrah should be inserted into the ``desc`` node's
``desc_content``.
]
variable[summary_text] assign[=] call[name[extract_docstring_summary], parameter[call[name[get_docstring], parameter[name[obj]]]]]
variable[summary_text] assign[=] call[name[summary_text].strip, parameter[]]
if call[name[summary_text].endswith, parameter[constant[.]]] begin[:]
variable[summary_text] assign[=] call[name[summary_text].rstrip, parameter[constant[.]]]
variable[content_node_p] assign[=] call[name[nodes].paragraph, parameter[]]
<ast.AugAssign object at 0x7da1b2325f60>
return[name[content_node_p]]
|
keyword[def] identifier[_create_doc_summary] ( identifier[self] , identifier[obj] , identifier[fullname] , identifier[refrole] ):
literal[string]
identifier[summary_text] = identifier[extract_docstring_summary] ( identifier[get_docstring] ( identifier[obj] ))
identifier[summary_text] = identifier[summary_text] . identifier[strip] ()
keyword[if] identifier[summary_text] . identifier[endswith] ( literal[string] ):
identifier[summary_text] = identifier[summary_text] . identifier[rstrip] ( literal[string] )
identifier[content_node_p] = identifier[nodes] . identifier[paragraph] ( identifier[text] = identifier[summary_text] )
identifier[content_node_p] += identifier[self] . identifier[_create_api_details_link] ( identifier[fullname] , identifier[refrole] )
keyword[return] identifier[content_node_p]
|
def _create_doc_summary(self, obj, fullname, refrole):
"""Create a paragraph containing the object's one-sentence docstring
summary with a link to further documentation.
The paragrah should be inserted into the ``desc`` node's
``desc_content``.
"""
summary_text = extract_docstring_summary(get_docstring(obj))
summary_text = summary_text.strip()
# Strip the last "." because the linked ellipses take its place
if summary_text.endswith('.'):
summary_text = summary_text.rstrip('.') # depends on [control=['if'], data=[]]
content_node_p = nodes.paragraph(text=summary_text)
content_node_p += self._create_api_details_link(fullname, refrole)
return content_node_p
|
def ToVegaMag(self, wave, flux, **kwargs):
"""Convert to ``vegamag``.
.. math::
\\textnormal{vegamag} = -2.5 \\; \\log(\\frac{\\textnormal{photlam}}{f_{\\textnormal{Vega}}})
where :math:`f_{\\textnormal{Vega}}` is the flux of
:ref:`pysynphot-vega-spec` resampled at given wavelength values
and converted to ``photlam``.
Parameters
----------
wave, flux : number or array_like
Wavelength and flux values to be used for conversion.
kwargs : dict
Extra keywords (not used).
Returns
-------
result : number or array_like
Converted values.
"""
from . import spectrum
resampled = spectrum.Vega.resample(wave)
normalized = flux / resampled._fluxtable
return -2.5 * N.log10(normalized)
|
def function[ToVegaMag, parameter[self, wave, flux]]:
constant[Convert to ``vegamag``.
.. math::
\textnormal{vegamag} = -2.5 \; \log(\frac{\textnormal{photlam}}{f_{\textnormal{Vega}}})
where :math:`f_{\textnormal{Vega}}` is the flux of
:ref:`pysynphot-vega-spec` resampled at given wavelength values
and converted to ``photlam``.
Parameters
----------
wave, flux : number or array_like
Wavelength and flux values to be used for conversion.
kwargs : dict
Extra keywords (not used).
Returns
-------
result : number or array_like
Converted values.
]
from relative_module[None] import module[spectrum]
variable[resampled] assign[=] call[name[spectrum].Vega.resample, parameter[name[wave]]]
variable[normalized] assign[=] binary_operation[name[flux] / name[resampled]._fluxtable]
return[binary_operation[<ast.UnaryOp object at 0x7da2041d8cd0> * call[name[N].log10, parameter[name[normalized]]]]]
|
keyword[def] identifier[ToVegaMag] ( identifier[self] , identifier[wave] , identifier[flux] ,** identifier[kwargs] ):
literal[string]
keyword[from] . keyword[import] identifier[spectrum]
identifier[resampled] = identifier[spectrum] . identifier[Vega] . identifier[resample] ( identifier[wave] )
identifier[normalized] = identifier[flux] / identifier[resampled] . identifier[_fluxtable]
keyword[return] - literal[int] * identifier[N] . identifier[log10] ( identifier[normalized] )
|
def ToVegaMag(self, wave, flux, **kwargs):
"""Convert to ``vegamag``.
.. math::
\\textnormal{vegamag} = -2.5 \\; \\log(\\frac{\\textnormal{photlam}}{f_{\\textnormal{Vega}}})
where :math:`f_{\\textnormal{Vega}}` is the flux of
:ref:`pysynphot-vega-spec` resampled at given wavelength values
and converted to ``photlam``.
Parameters
----------
wave, flux : number or array_like
Wavelength and flux values to be used for conversion.
kwargs : dict
Extra keywords (not used).
Returns
-------
result : number or array_like
Converted values.
"""
from . import spectrum
resampled = spectrum.Vega.resample(wave)
normalized = flux / resampled._fluxtable
return -2.5 * N.log10(normalized)
|
def get_all_deferred_code_breakpoints(self):
"""
Returns a list of deferred code breakpoints.
@rtype: tuple of (int, str, callable, bool)
@return: Tuple containing the following elements:
- Process ID where to set the breakpoint.
- Label pointing to the address where to set the breakpoint.
- Action callback for the breakpoint.
- C{True} of the breakpoint is one-shot, C{False} otherwise.
"""
result = []
for pid, deferred in compat.iteritems(self.__deferredBP):
for (label, (action, oneshot)) in compat.iteritems(deferred):
result.add( (pid, label, action, oneshot) )
return result
|
def function[get_all_deferred_code_breakpoints, parameter[self]]:
constant[
Returns a list of deferred code breakpoints.
@rtype: tuple of (int, str, callable, bool)
@return: Tuple containing the following elements:
- Process ID where to set the breakpoint.
- Label pointing to the address where to set the breakpoint.
- Action callback for the breakpoint.
- C{True} of the breakpoint is one-shot, C{False} otherwise.
]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b08ca7a0>, <ast.Name object at 0x7da1b08c9120>]]] in starred[call[name[compat].iteritems, parameter[name[self].__deferredBP]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b08cb520>, <ast.Tuple object at 0x7da1b08c9fc0>]]] in starred[call[name[compat].iteritems, parameter[name[deferred]]]] begin[:]
call[name[result].add, parameter[tuple[[<ast.Name object at 0x7da1b08c90f0>, <ast.Name object at 0x7da1b08cba30>, <ast.Name object at 0x7da1b08c86a0>, <ast.Name object at 0x7da1b08c82e0>]]]]
return[name[result]]
|
keyword[def] identifier[get_all_deferred_code_breakpoints] ( identifier[self] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[pid] , identifier[deferred] keyword[in] identifier[compat] . identifier[iteritems] ( identifier[self] . identifier[__deferredBP] ):
keyword[for] ( identifier[label] ,( identifier[action] , identifier[oneshot] )) keyword[in] identifier[compat] . identifier[iteritems] ( identifier[deferred] ):
identifier[result] . identifier[add] (( identifier[pid] , identifier[label] , identifier[action] , identifier[oneshot] ))
keyword[return] identifier[result]
|
def get_all_deferred_code_breakpoints(self):
"""
Returns a list of deferred code breakpoints.
@rtype: tuple of (int, str, callable, bool)
@return: Tuple containing the following elements:
- Process ID where to set the breakpoint.
- Label pointing to the address where to set the breakpoint.
- Action callback for the breakpoint.
- C{True} of the breakpoint is one-shot, C{False} otherwise.
"""
result = []
for (pid, deferred) in compat.iteritems(self.__deferredBP):
for (label, (action, oneshot)) in compat.iteritems(deferred):
result.add((pid, label, action, oneshot)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return result
|
def check_guest_exist(check_index=0):
"""Check guest exist in database.
:param check_index: The parameter index of userid(s), default as 0
"""
def outer(f):
@six.wraps(f)
def inner(self, *args, **kw):
userids = args[check_index]
if isinstance(userids, list):
# convert all userids to upper case
userids = [uid.upper() for uid in userids]
new_args = (args[:check_index] + (userids,) +
args[check_index + 1:])
else:
# convert the userid to upper case
userids = userids.upper()
new_args = (args[:check_index] + (userids,) +
args[check_index + 1:])
userids = [userids]
self._vmops.check_guests_exist_in_db(userids)
return f(self, *new_args, **kw)
return inner
return outer
|
def function[check_guest_exist, parameter[check_index]]:
constant[Check guest exist in database.
:param check_index: The parameter index of userid(s), default as 0
]
def function[outer, parameter[f]]:
def function[inner, parameter[self]]:
variable[userids] assign[=] call[name[args]][name[check_index]]
if call[name[isinstance], parameter[name[userids], name[list]]] begin[:]
variable[userids] assign[=] <ast.ListComp object at 0x7da2043453c0>
variable[new_args] assign[=] binary_operation[binary_operation[call[name[args]][<ast.Slice object at 0x7da2043472b0>] + tuple[[<ast.Name object at 0x7da204344c40>]]] + call[name[args]][<ast.Slice object at 0x7da204347e50>]]
call[name[self]._vmops.check_guests_exist_in_db, parameter[name[userids]]]
return[call[name[f], parameter[name[self], <ast.Starred object at 0x7da1b2345840>]]]
return[name[inner]]
return[name[outer]]
|
keyword[def] identifier[check_guest_exist] ( identifier[check_index] = literal[int] ):
literal[string]
keyword[def] identifier[outer] ( identifier[f] ):
@ identifier[six] . identifier[wraps] ( identifier[f] )
keyword[def] identifier[inner] ( identifier[self] ,* identifier[args] ,** identifier[kw] ):
identifier[userids] = identifier[args] [ identifier[check_index] ]
keyword[if] identifier[isinstance] ( identifier[userids] , identifier[list] ):
identifier[userids] =[ identifier[uid] . identifier[upper] () keyword[for] identifier[uid] keyword[in] identifier[userids] ]
identifier[new_args] =( identifier[args] [: identifier[check_index] ]+( identifier[userids] ,)+
identifier[args] [ identifier[check_index] + literal[int] :])
keyword[else] :
identifier[userids] = identifier[userids] . identifier[upper] ()
identifier[new_args] =( identifier[args] [: identifier[check_index] ]+( identifier[userids] ,)+
identifier[args] [ identifier[check_index] + literal[int] :])
identifier[userids] =[ identifier[userids] ]
identifier[self] . identifier[_vmops] . identifier[check_guests_exist_in_db] ( identifier[userids] )
keyword[return] identifier[f] ( identifier[self] ,* identifier[new_args] ,** identifier[kw] )
keyword[return] identifier[inner]
keyword[return] identifier[outer]
|
def check_guest_exist(check_index=0):
"""Check guest exist in database.
:param check_index: The parameter index of userid(s), default as 0
"""
def outer(f):
@six.wraps(f)
def inner(self, *args, **kw):
userids = args[check_index]
if isinstance(userids, list):
# convert all userids to upper case
userids = [uid.upper() for uid in userids]
new_args = args[:check_index] + (userids,) + args[check_index + 1:] # depends on [control=['if'], data=[]]
else:
# convert the userid to upper case
userids = userids.upper()
new_args = args[:check_index] + (userids,) + args[check_index + 1:]
userids = [userids]
self._vmops.check_guests_exist_in_db(userids)
return f(self, *new_args, **kw)
return inner
return outer
|
def valid_file(value):
"""
Check if given file exists and is a regular file.
Args:
value (str): path to the file.
Raises:
argparse.ArgumentTypeError: if not valid.
Returns:
str: original value argument.
"""
if not value:
raise argparse.ArgumentTypeError("'' is not a valid file path")
elif not os.path.exists(value):
raise argparse.ArgumentTypeError(
"%s is not a valid file path" % value)
elif os.path.isdir(value):
raise argparse.ArgumentTypeError(
"%s is a directory, not a regular file" % value)
return value
|
def function[valid_file, parameter[value]]:
constant[
Check if given file exists and is a regular file.
Args:
value (str): path to the file.
Raises:
argparse.ArgumentTypeError: if not valid.
Returns:
str: original value argument.
]
if <ast.UnaryOp object at 0x7da1b20d6170> begin[:]
<ast.Raise object at 0x7da1b20d52a0>
return[name[value]]
|
keyword[def] identifier[valid_file] ( identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[value] :
keyword[raise] identifier[argparse] . identifier[ArgumentTypeError] ( literal[string] )
keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[value] ):
keyword[raise] identifier[argparse] . identifier[ArgumentTypeError] (
literal[string] % identifier[value] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[value] ):
keyword[raise] identifier[argparse] . identifier[ArgumentTypeError] (
literal[string] % identifier[value] )
keyword[return] identifier[value]
|
def valid_file(value):
"""
Check if given file exists and is a regular file.
Args:
value (str): path to the file.
Raises:
argparse.ArgumentTypeError: if not valid.
Returns:
str: original value argument.
"""
if not value:
raise argparse.ArgumentTypeError("'' is not a valid file path") # depends on [control=['if'], data=[]]
elif not os.path.exists(value):
raise argparse.ArgumentTypeError('%s is not a valid file path' % value) # depends on [control=['if'], data=[]]
elif os.path.isdir(value):
raise argparse.ArgumentTypeError('%s is a directory, not a regular file' % value) # depends on [control=['if'], data=[]]
return value
|
def amplify_ground_shaking(T, vs30, gmvs):
"""
:param T: period
:param vs30: velocity
:param gmvs: ground motion values for the current site in units of g
"""
gmvs[gmvs > MAX_GMV] = MAX_GMV # accelerations > 5g are absurd
interpolator = interpolate.interp1d(
[0, 0.1, 0.2, 0.3, 0.4, 5],
[(760 / vs30)**0.35,
(760 / vs30)**0.35,
(760 / vs30)**0.25,
(760 / vs30)**0.10,
(760 / vs30)**-0.05,
(760 / vs30)**-0.05],
) if T <= 0.3 else interpolate.interp1d(
[0, 0.1, 0.2, 0.3, 0.4, 5],
[(760 / vs30)**0.65,
(760 / vs30)**0.65,
(760 / vs30)**0.60,
(760 / vs30)**0.53,
(760 / vs30)**0.45,
(760 / vs30)**0.45],
)
return interpolator(gmvs) * gmvs
|
def function[amplify_ground_shaking, parameter[T, vs30, gmvs]]:
constant[
:param T: period
:param vs30: velocity
:param gmvs: ground motion values for the current site in units of g
]
call[name[gmvs]][compare[name[gmvs] greater[>] name[MAX_GMV]]] assign[=] name[MAX_GMV]
variable[interpolator] assign[=] <ast.IfExp object at 0x7da18f09c070>
return[binary_operation[call[name[interpolator], parameter[name[gmvs]]] * name[gmvs]]]
|
keyword[def] identifier[amplify_ground_shaking] ( identifier[T] , identifier[vs30] , identifier[gmvs] ):
literal[string]
identifier[gmvs] [ identifier[gmvs] > identifier[MAX_GMV] ]= identifier[MAX_GMV]
identifier[interpolator] = identifier[interpolate] . identifier[interp1d] (
[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ],
[( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )**- literal[int] ,
( literal[int] / identifier[vs30] )**- literal[int] ],
) keyword[if] identifier[T] <= literal[int] keyword[else] identifier[interpolate] . identifier[interp1d] (
[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ],
[( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ,
( literal[int] / identifier[vs30] )** literal[int] ],
)
keyword[return] identifier[interpolator] ( identifier[gmvs] )* identifier[gmvs]
|
def amplify_ground_shaking(T, vs30, gmvs):
"""
:param T: period
:param vs30: velocity
:param gmvs: ground motion values for the current site in units of g
"""
gmvs[gmvs > MAX_GMV] = MAX_GMV # accelerations > 5g are absurd
interpolator = interpolate.interp1d([0, 0.1, 0.2, 0.3, 0.4, 5], [(760 / vs30) ** 0.35, (760 / vs30) ** 0.35, (760 / vs30) ** 0.25, (760 / vs30) ** 0.1, (760 / vs30) ** (-0.05), (760 / vs30) ** (-0.05)]) if T <= 0.3 else interpolate.interp1d([0, 0.1, 0.2, 0.3, 0.4, 5], [(760 / vs30) ** 0.65, (760 / vs30) ** 0.65, (760 / vs30) ** 0.6, (760 / vs30) ** 0.53, (760 / vs30) ** 0.45, (760 / vs30) ** 0.45])
return interpolator(gmvs) * gmvs
|
def most_energetic(df):
"""Grab most energetic particle from mc_tracks dataframe."""
idx = df.groupby(['event_id'])['energy'].transform(max) == df['energy']
return df[idx].reindex()
|
def function[most_energetic, parameter[df]]:
constant[Grab most energetic particle from mc_tracks dataframe.]
variable[idx] assign[=] compare[call[call[call[name[df].groupby, parameter[list[[<ast.Constant object at 0x7da18f812a70>]]]]][constant[energy]].transform, parameter[name[max]]] equal[==] call[name[df]][constant[energy]]]
return[call[call[name[df]][name[idx]].reindex, parameter[]]]
|
keyword[def] identifier[most_energetic] ( identifier[df] ):
literal[string]
identifier[idx] = identifier[df] . identifier[groupby] ([ literal[string] ])[ literal[string] ]. identifier[transform] ( identifier[max] )== identifier[df] [ literal[string] ]
keyword[return] identifier[df] [ identifier[idx] ]. identifier[reindex] ()
|
def most_energetic(df):
"""Grab most energetic particle from mc_tracks dataframe."""
idx = df.groupby(['event_id'])['energy'].transform(max) == df['energy']
return df[idx].reindex()
|
def clean_dateobject_to_string(x):
"""Convert a Pandas Timestamp object or datetime object
to 'YYYY-MM-DD' string
Parameters
----------
x : str, list, tuple, numpy.ndarray, pandas.DataFrame
A Pandas Timestamp object or datetime object,
or an array of these objects
Returns
-------
y : str, list, tuple, numpy.ndarray, pandas.DataFrame
A string 'YYYY-MM-DD' or array of date strings.
Example
-------
The function aims to convert a string as follows
Timestamp('2014-09-23 00:00:00') => '2014-09-23'
datetime.datetime(2014,9,23,0,0) => '2014-09-23'
Code Example
------------
print(clean_dateobject_to_string(pd.Timestamp('2014-09-23 00:00:00')))
'2014-09-23'
print(clean_dateobject_to_string(datetime(2014,9,23,0,0)))
'2014-09-23'
Behavior
--------
- If it is not an object with strftime function the None is return
"""
import numpy as np
import pandas as pd
def proc_elem(e):
try:
return e.strftime("%Y-%m-%d")
except Exception as e:
print(e)
return None
def proc_list(x):
return [proc_elem(e) for e in x]
def proc_ndarray(x):
tmp = proc_list(list(x.reshape((x.size,))))
return np.array(tmp).reshape(x.shape)
# transform string, list/tuple, numpy array, pandas dataframe
if "strftime" in dir(x):
return proc_elem(x)
elif isinstance(x, (list, tuple)):
return proc_list(x)
elif isinstance(x, np.ndarray):
return proc_ndarray(x)
elif isinstance(x, pd.DataFrame):
return pd.DataFrame(proc_ndarray(x.values),
columns=x.columns,
index=x.index)
else:
return None
|
def function[clean_dateobject_to_string, parameter[x]]:
constant[Convert a Pandas Timestamp object or datetime object
to 'YYYY-MM-DD' string
Parameters
----------
x : str, list, tuple, numpy.ndarray, pandas.DataFrame
A Pandas Timestamp object or datetime object,
or an array of these objects
Returns
-------
y : str, list, tuple, numpy.ndarray, pandas.DataFrame
A string 'YYYY-MM-DD' or array of date strings.
Example
-------
The function aims to convert a string as follows
Timestamp('2014-09-23 00:00:00') => '2014-09-23'
datetime.datetime(2014,9,23,0,0) => '2014-09-23'
Code Example
------------
print(clean_dateobject_to_string(pd.Timestamp('2014-09-23 00:00:00')))
'2014-09-23'
print(clean_dateobject_to_string(datetime(2014,9,23,0,0)))
'2014-09-23'
Behavior
--------
- If it is not an object with strftime function the None is return
]
import module[numpy] as alias[np]
import module[pandas] as alias[pd]
def function[proc_elem, parameter[e]]:
<ast.Try object at 0x7da20c6c6860>
def function[proc_list, parameter[x]]:
return[<ast.ListComp object at 0x7da20c6c5570>]
def function[proc_ndarray, parameter[x]]:
variable[tmp] assign[=] call[name[proc_list], parameter[call[name[list], parameter[call[name[x].reshape, parameter[tuple[[<ast.Attribute object at 0x7da20c6c41f0>]]]]]]]]
return[call[call[name[np].array, parameter[name[tmp]]].reshape, parameter[name[x].shape]]]
if compare[constant[strftime] in call[name[dir], parameter[name[x]]]] begin[:]
return[call[name[proc_elem], parameter[name[x]]]]
|
keyword[def] identifier[clean_dateobject_to_string] ( identifier[x] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[import] identifier[pandas] keyword[as] identifier[pd]
keyword[def] identifier[proc_elem] ( identifier[e] ):
keyword[try] :
keyword[return] identifier[e] . identifier[strftime] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( identifier[e] )
keyword[return] keyword[None]
keyword[def] identifier[proc_list] ( identifier[x] ):
keyword[return] [ identifier[proc_elem] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[x] ]
keyword[def] identifier[proc_ndarray] ( identifier[x] ):
identifier[tmp] = identifier[proc_list] ( identifier[list] ( identifier[x] . identifier[reshape] (( identifier[x] . identifier[size] ,))))
keyword[return] identifier[np] . identifier[array] ( identifier[tmp] ). identifier[reshape] ( identifier[x] . identifier[shape] )
keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[x] ):
keyword[return] identifier[proc_elem] ( identifier[x] )
keyword[elif] identifier[isinstance] ( identifier[x] ,( identifier[list] , identifier[tuple] )):
keyword[return] identifier[proc_list] ( identifier[x] )
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[np] . identifier[ndarray] ):
keyword[return] identifier[proc_ndarray] ( identifier[x] )
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[pd] . identifier[DataFrame] ):
keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[proc_ndarray] ( identifier[x] . identifier[values] ),
identifier[columns] = identifier[x] . identifier[columns] ,
identifier[index] = identifier[x] . identifier[index] )
keyword[else] :
keyword[return] keyword[None]
|
def clean_dateobject_to_string(x):
"""Convert a Pandas Timestamp object or datetime object
to 'YYYY-MM-DD' string
Parameters
----------
x : str, list, tuple, numpy.ndarray, pandas.DataFrame
A Pandas Timestamp object or datetime object,
or an array of these objects
Returns
-------
y : str, list, tuple, numpy.ndarray, pandas.DataFrame
A string 'YYYY-MM-DD' or array of date strings.
Example
-------
The function aims to convert a string as follows
Timestamp('2014-09-23 00:00:00') => '2014-09-23'
datetime.datetime(2014,9,23,0,0) => '2014-09-23'
Code Example
------------
print(clean_dateobject_to_string(pd.Timestamp('2014-09-23 00:00:00')))
'2014-09-23'
print(clean_dateobject_to_string(datetime(2014,9,23,0,0)))
'2014-09-23'
Behavior
--------
- If it is not an object with strftime function the None is return
"""
import numpy as np
import pandas as pd
def proc_elem(e):
try:
return e.strftime('%Y-%m-%d') # depends on [control=['try'], data=[]]
except Exception as e:
print(e)
return None # depends on [control=['except'], data=['e']]
def proc_list(x):
return [proc_elem(e) for e in x]
def proc_ndarray(x):
tmp = proc_list(list(x.reshape((x.size,))))
return np.array(tmp).reshape(x.shape)
# transform string, list/tuple, numpy array, pandas dataframe
if 'strftime' in dir(x):
return proc_elem(x) # depends on [control=['if'], data=[]]
elif isinstance(x, (list, tuple)):
return proc_list(x) # depends on [control=['if'], data=[]]
elif isinstance(x, np.ndarray):
return proc_ndarray(x) # depends on [control=['if'], data=[]]
elif isinstance(x, pd.DataFrame):
return pd.DataFrame(proc_ndarray(x.values), columns=x.columns, index=x.index) # depends on [control=['if'], data=[]]
else:
return None
|
def write_input(self, output_dir, make_dir_if_not_present=True,
write_cif=False, write_path_cif=False,
write_endpoint_inputs=False):
"""
NEB inputs has a special directory structure where inputs are in 00,
01, 02, ....
Args:
output_dir (str): Directory to output the VASP input files
make_dir_if_not_present (bool): Set to True if you want the
directory (and the whole path) to be created if it is not
present.
write_cif (bool): If true, writes a cif along with each POSCAR.
write_path_cif (bool): If true, writes a cif for each image.
write_endpoint_inputs (bool): If true, writes input files for
running endpoint calculations.
"""
output_dir = Path(output_dir)
if make_dir_if_not_present and not output_dir.exists():
output_dir.mkdir(parents=True)
self.incar.write_file(str(output_dir / 'INCAR'))
self.kpoints.write_file(str(output_dir / 'KPOINTS'))
self.potcar.write_file(str(output_dir / 'POTCAR'))
for i, p in enumerate(self.poscars):
d = output_dir / str(i).zfill(2)
if not d.exists():
d.mkdir(parents=True)
p.write_file(str(d / 'POSCAR'))
if write_cif:
p.structure.to(filename=str(d / '{}.cif'.format(i)))
if write_endpoint_inputs:
end_point_param = MITRelaxSet(
self.structures[0],
user_incar_settings=self.user_incar_settings)
for image in ['00', str(len(self.structures) - 1).zfill(2)]:
end_point_param.incar.write_file(
str(output_dir / image / 'INCAR'))
end_point_param.kpoints.write_file(
str(output_dir / image / 'KPOINTS'))
end_point_param.potcar.write_file(
str(output_dir / image / 'POTCAR'))
if write_path_cif:
sites = set()
l = self.structures[0].lattice
for site in chain(*(s.sites for s in self.structures)):
sites.add(
PeriodicSite(site.species, site.frac_coords, l))
nebpath = Structure.from_sites(sorted(sites))
nebpath.to(filename=str(output_dir / 'path.cif'))
|
def function[write_input, parameter[self, output_dir, make_dir_if_not_present, write_cif, write_path_cif, write_endpoint_inputs]]:
constant[
NEB inputs has a special directory structure where inputs are in 00,
01, 02, ....
Args:
output_dir (str): Directory to output the VASP input files
make_dir_if_not_present (bool): Set to True if you want the
directory (and the whole path) to be created if it is not
present.
write_cif (bool): If true, writes a cif along with each POSCAR.
write_path_cif (bool): If true, writes a cif for each image.
write_endpoint_inputs (bool): If true, writes input files for
running endpoint calculations.
]
variable[output_dir] assign[=] call[name[Path], parameter[name[output_dir]]]
if <ast.BoolOp object at 0x7da18dc98eb0> begin[:]
call[name[output_dir].mkdir, parameter[]]
call[name[self].incar.write_file, parameter[call[name[str], parameter[binary_operation[name[output_dir] / constant[INCAR]]]]]]
call[name[self].kpoints.write_file, parameter[call[name[str], parameter[binary_operation[name[output_dir] / constant[KPOINTS]]]]]]
call[name[self].potcar.write_file, parameter[call[name[str], parameter[binary_operation[name[output_dir] / constant[POTCAR]]]]]]
for taget[tuple[[<ast.Name object at 0x7da18ede4760>, <ast.Name object at 0x7da18ede5810>]]] in starred[call[name[enumerate], parameter[name[self].poscars]]] begin[:]
variable[d] assign[=] binary_operation[name[output_dir] / call[call[name[str], parameter[name[i]]].zfill, parameter[constant[2]]]]
if <ast.UnaryOp object at 0x7da18f722860> begin[:]
call[name[d].mkdir, parameter[]]
call[name[p].write_file, parameter[call[name[str], parameter[binary_operation[name[d] / constant[POSCAR]]]]]]
if name[write_cif] begin[:]
call[name[p].structure.to, parameter[]]
if name[write_endpoint_inputs] begin[:]
variable[end_point_param] assign[=] call[name[MITRelaxSet], parameter[call[name[self].structures][constant[0]]]]
for taget[name[image]] in starred[list[[<ast.Constant object at 0x7da207f03a30>, <ast.Call object at 0x7da207f03190>]]] begin[:]
call[name[end_point_param].incar.write_file, parameter[call[name[str], parameter[binary_operation[binary_operation[name[output_dir] / name[image]] / constant[INCAR]]]]]]
call[name[end_point_param].kpoints.write_file, parameter[call[name[str], parameter[binary_operation[binary_operation[name[output_dir] / name[image]] / constant[KPOINTS]]]]]]
call[name[end_point_param].potcar.write_file, parameter[call[name[str], parameter[binary_operation[binary_operation[name[output_dir] / name[image]] / constant[POTCAR]]]]]]
if name[write_path_cif] begin[:]
variable[sites] assign[=] call[name[set], parameter[]]
variable[l] assign[=] call[name[self].structures][constant[0]].lattice
for taget[name[site]] in starred[call[name[chain], parameter[<ast.Starred object at 0x7da18f723280>]]] begin[:]
call[name[sites].add, parameter[call[name[PeriodicSite], parameter[name[site].species, name[site].frac_coords, name[l]]]]]
variable[nebpath] assign[=] call[name[Structure].from_sites, parameter[call[name[sorted], parameter[name[sites]]]]]
call[name[nebpath].to, parameter[]]
|
keyword[def] identifier[write_input] ( identifier[self] , identifier[output_dir] , identifier[make_dir_if_not_present] = keyword[True] ,
identifier[write_cif] = keyword[False] , identifier[write_path_cif] = keyword[False] ,
identifier[write_endpoint_inputs] = keyword[False] ):
literal[string]
identifier[output_dir] = identifier[Path] ( identifier[output_dir] )
keyword[if] identifier[make_dir_if_not_present] keyword[and] keyword[not] identifier[output_dir] . identifier[exists] ():
identifier[output_dir] . identifier[mkdir] ( identifier[parents] = keyword[True] )
identifier[self] . identifier[incar] . identifier[write_file] ( identifier[str] ( identifier[output_dir] / literal[string] ))
identifier[self] . identifier[kpoints] . identifier[write_file] ( identifier[str] ( identifier[output_dir] / literal[string] ))
identifier[self] . identifier[potcar] . identifier[write_file] ( identifier[str] ( identifier[output_dir] / literal[string] ))
keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[self] . identifier[poscars] ):
identifier[d] = identifier[output_dir] / identifier[str] ( identifier[i] ). identifier[zfill] ( literal[int] )
keyword[if] keyword[not] identifier[d] . identifier[exists] ():
identifier[d] . identifier[mkdir] ( identifier[parents] = keyword[True] )
identifier[p] . identifier[write_file] ( identifier[str] ( identifier[d] / literal[string] ))
keyword[if] identifier[write_cif] :
identifier[p] . identifier[structure] . identifier[to] ( identifier[filename] = identifier[str] ( identifier[d] / literal[string] . identifier[format] ( identifier[i] )))
keyword[if] identifier[write_endpoint_inputs] :
identifier[end_point_param] = identifier[MITRelaxSet] (
identifier[self] . identifier[structures] [ literal[int] ],
identifier[user_incar_settings] = identifier[self] . identifier[user_incar_settings] )
keyword[for] identifier[image] keyword[in] [ literal[string] , identifier[str] ( identifier[len] ( identifier[self] . identifier[structures] )- literal[int] ). identifier[zfill] ( literal[int] )]:
identifier[end_point_param] . identifier[incar] . identifier[write_file] (
identifier[str] ( identifier[output_dir] / identifier[image] / literal[string] ))
identifier[end_point_param] . identifier[kpoints] . identifier[write_file] (
identifier[str] ( identifier[output_dir] / identifier[image] / literal[string] ))
identifier[end_point_param] . identifier[potcar] . identifier[write_file] (
identifier[str] ( identifier[output_dir] / identifier[image] / literal[string] ))
keyword[if] identifier[write_path_cif] :
identifier[sites] = identifier[set] ()
identifier[l] = identifier[self] . identifier[structures] [ literal[int] ]. identifier[lattice]
keyword[for] identifier[site] keyword[in] identifier[chain] (*( identifier[s] . identifier[sites] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[structures] )):
identifier[sites] . identifier[add] (
identifier[PeriodicSite] ( identifier[site] . identifier[species] , identifier[site] . identifier[frac_coords] , identifier[l] ))
identifier[nebpath] = identifier[Structure] . identifier[from_sites] ( identifier[sorted] ( identifier[sites] ))
identifier[nebpath] . identifier[to] ( identifier[filename] = identifier[str] ( identifier[output_dir] / literal[string] ))
|
def write_input(self, output_dir, make_dir_if_not_present=True, write_cif=False, write_path_cif=False, write_endpoint_inputs=False):
"""
NEB inputs has a special directory structure where inputs are in 00,
01, 02, ....
Args:
output_dir (str): Directory to output the VASP input files
make_dir_if_not_present (bool): Set to True if you want the
directory (and the whole path) to be created if it is not
present.
write_cif (bool): If true, writes a cif along with each POSCAR.
write_path_cif (bool): If true, writes a cif for each image.
write_endpoint_inputs (bool): If true, writes input files for
running endpoint calculations.
"""
output_dir = Path(output_dir)
if make_dir_if_not_present and (not output_dir.exists()):
output_dir.mkdir(parents=True) # depends on [control=['if'], data=[]]
self.incar.write_file(str(output_dir / 'INCAR'))
self.kpoints.write_file(str(output_dir / 'KPOINTS'))
self.potcar.write_file(str(output_dir / 'POTCAR'))
for (i, p) in enumerate(self.poscars):
d = output_dir / str(i).zfill(2)
if not d.exists():
d.mkdir(parents=True) # depends on [control=['if'], data=[]]
p.write_file(str(d / 'POSCAR'))
if write_cif:
p.structure.to(filename=str(d / '{}.cif'.format(i))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if write_endpoint_inputs:
end_point_param = MITRelaxSet(self.structures[0], user_incar_settings=self.user_incar_settings)
for image in ['00', str(len(self.structures) - 1).zfill(2)]:
end_point_param.incar.write_file(str(output_dir / image / 'INCAR'))
end_point_param.kpoints.write_file(str(output_dir / image / 'KPOINTS'))
end_point_param.potcar.write_file(str(output_dir / image / 'POTCAR')) # depends on [control=['for'], data=['image']] # depends on [control=['if'], data=[]]
if write_path_cif:
sites = set()
l = self.structures[0].lattice
for site in chain(*(s.sites for s in self.structures)):
sites.add(PeriodicSite(site.species, site.frac_coords, l)) # depends on [control=['for'], data=['site']]
nebpath = Structure.from_sites(sorted(sites))
nebpath.to(filename=str(output_dir / 'path.cif')) # depends on [control=['if'], data=[]]
|
def assert_datetime_about_now(actual, msg_fmt="{msg}"):
"""Fail if a datetime object is not within 5 seconds of the local time.
>>> assert_datetime_about_now(datetime.now())
>>> assert_datetime_about_now(datetime(1900, 1, 1, 12, 0, 0))
Traceback (most recent call last):
...
AssertionError: datetime.datetime(1900, 1, 1, 12, 0) is not close to current date/time
The following msg_fmt arguments are supported:
* msg - the default error message
* actual - datetime object to check
* now - current datetime that was tested against
"""
now = datetime.now()
if actual is None:
msg = "None is not a valid date/time"
fail(msg_fmt.format(msg=msg, actual=actual, now=now))
lower_bound = now - timedelta(seconds=_EPSILON_SECONDS)
upper_bound = now + timedelta(seconds=_EPSILON_SECONDS)
if not lower_bound <= actual <= upper_bound:
msg = "{!r} is not close to current date/time".format(actual)
fail(msg_fmt.format(msg=msg, actual=actual, now=now))
|
def function[assert_datetime_about_now, parameter[actual, msg_fmt]]:
constant[Fail if a datetime object is not within 5 seconds of the local time.
>>> assert_datetime_about_now(datetime.now())
>>> assert_datetime_about_now(datetime(1900, 1, 1, 12, 0, 0))
Traceback (most recent call last):
...
AssertionError: datetime.datetime(1900, 1, 1, 12, 0) is not close to current date/time
The following msg_fmt arguments are supported:
* msg - the default error message
* actual - datetime object to check
* now - current datetime that was tested against
]
variable[now] assign[=] call[name[datetime].now, parameter[]]
if compare[name[actual] is constant[None]] begin[:]
variable[msg] assign[=] constant[None is not a valid date/time]
call[name[fail], parameter[call[name[msg_fmt].format, parameter[]]]]
variable[lower_bound] assign[=] binary_operation[name[now] - call[name[timedelta], parameter[]]]
variable[upper_bound] assign[=] binary_operation[name[now] + call[name[timedelta], parameter[]]]
if <ast.UnaryOp object at 0x7da1b0ca50f0> begin[:]
variable[msg] assign[=] call[constant[{!r} is not close to current date/time].format, parameter[name[actual]]]
call[name[fail], parameter[call[name[msg_fmt].format, parameter[]]]]
|
keyword[def] identifier[assert_datetime_about_now] ( identifier[actual] , identifier[msg_fmt] = literal[string] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[now] ()
keyword[if] identifier[actual] keyword[is] keyword[None] :
identifier[msg] = literal[string]
identifier[fail] ( identifier[msg_fmt] . identifier[format] ( identifier[msg] = identifier[msg] , identifier[actual] = identifier[actual] , identifier[now] = identifier[now] ))
identifier[lower_bound] = identifier[now] - identifier[timedelta] ( identifier[seconds] = identifier[_EPSILON_SECONDS] )
identifier[upper_bound] = identifier[now] + identifier[timedelta] ( identifier[seconds] = identifier[_EPSILON_SECONDS] )
keyword[if] keyword[not] identifier[lower_bound] <= identifier[actual] <= identifier[upper_bound] :
identifier[msg] = literal[string] . identifier[format] ( identifier[actual] )
identifier[fail] ( identifier[msg_fmt] . identifier[format] ( identifier[msg] = identifier[msg] , identifier[actual] = identifier[actual] , identifier[now] = identifier[now] ))
|
def assert_datetime_about_now(actual, msg_fmt='{msg}'):
"""Fail if a datetime object is not within 5 seconds of the local time.
>>> assert_datetime_about_now(datetime.now())
>>> assert_datetime_about_now(datetime(1900, 1, 1, 12, 0, 0))
Traceback (most recent call last):
...
AssertionError: datetime.datetime(1900, 1, 1, 12, 0) is not close to current date/time
The following msg_fmt arguments are supported:
* msg - the default error message
* actual - datetime object to check
* now - current datetime that was tested against
"""
now = datetime.now()
if actual is None:
msg = 'None is not a valid date/time'
fail(msg_fmt.format(msg=msg, actual=actual, now=now)) # depends on [control=['if'], data=['actual']]
lower_bound = now - timedelta(seconds=_EPSILON_SECONDS)
upper_bound = now + timedelta(seconds=_EPSILON_SECONDS)
if not lower_bound <= actual <= upper_bound:
msg = '{!r} is not close to current date/time'.format(actual)
fail(msg_fmt.format(msg=msg, actual=actual, now=now)) # depends on [control=['if'], data=[]]
|
def remove_event_detect(channel):
"""
:param channel: the channel based on the numbering system you have specified
(:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`).
"""
_check_configured(channel, direction=IN)
pin = get_gpio_pin(_mode, channel)
event.remove_edge_detect(pin)
|
def function[remove_event_detect, parameter[channel]]:
constant[
:param channel: the channel based on the numbering system you have specified
(:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`).
]
call[name[_check_configured], parameter[name[channel]]]
variable[pin] assign[=] call[name[get_gpio_pin], parameter[name[_mode], name[channel]]]
call[name[event].remove_edge_detect, parameter[name[pin]]]
|
keyword[def] identifier[remove_event_detect] ( identifier[channel] ):
literal[string]
identifier[_check_configured] ( identifier[channel] , identifier[direction] = identifier[IN] )
identifier[pin] = identifier[get_gpio_pin] ( identifier[_mode] , identifier[channel] )
identifier[event] . identifier[remove_edge_detect] ( identifier[pin] )
|
def remove_event_detect(channel):
"""
:param channel: the channel based on the numbering system you have specified
(:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`).
"""
_check_configured(channel, direction=IN)
pin = get_gpio_pin(_mode, channel)
event.remove_edge_detect(pin)
|
def get_arguments():
"""Get the command line arguments"""
import argparse
parser = argparse.ArgumentParser(
description='Plot spectral responses for a set of satellite imagers')
parser.add_argument("--platform_name", '-p', nargs='*',
help="The Platform name",
type=str, required=True)
parser.add_argument("--sensor", '-s', nargs='*',
help="The sensor/instrument name",
type=str, required=True)
parser.add_argument("-x", "--xlimits", nargs=2,
help=("x-axis boundaries for plot"),
default=None, type=float)
parser.add_argument("-y", "--ylimits", nargs=2,
help=("y-axis boundaries for plot"),
default=None, type=float)
parser.add_argument("-t", "--minimum_response",
help=("Minimum response: Any response lower than " +
"this will be ignored when plotting"),
default=0.015, type=float)
parser.add_argument("-no_platform_name_in_legend", help=("No platform name in legend"),
action='store_true')
parser.add_argument("--title", help=("Plot title"),
default=None, type=str)
parser.add_argument("--wavelength_resolution",
help=("The step in wavelength (nanometers) when scanning\n" +
" the spectral range trying to find bands"),
default=0.005, type=float)
parser.add_argument("-o", "--filename", help=("Output plot file name"),
default=None, type=str)
parser.add_argument(
"-v", '--verbose', help=("Turn logging on"), action='store_true')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--bandname", '-b',
help="The sensor band name", type=str)
group.add_argument("--wavelength", "-w", type=float,
help='the approximate spectral wavelength in micron')
group.add_argument("--range", "-r", nargs='*',
help="The wavelength range for the plot",
default=[None, None], type=float)
return parser.parse_args()
|
def function[get_arguments, parameter[]]:
constant[Get the command line arguments]
import module[argparse]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--platform_name], constant[-p]]]
call[name[parser].add_argument, parameter[constant[--sensor], constant[-s]]]
call[name[parser].add_argument, parameter[constant[-x], constant[--xlimits]]]
call[name[parser].add_argument, parameter[constant[-y], constant[--ylimits]]]
call[name[parser].add_argument, parameter[constant[-t], constant[--minimum_response]]]
call[name[parser].add_argument, parameter[constant[-no_platform_name_in_legend]]]
call[name[parser].add_argument, parameter[constant[--title]]]
call[name[parser].add_argument, parameter[constant[--wavelength_resolution]]]
call[name[parser].add_argument, parameter[constant[-o], constant[--filename]]]
call[name[parser].add_argument, parameter[constant[-v], constant[--verbose]]]
variable[group] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]]
call[name[group].add_argument, parameter[constant[--bandname], constant[-b]]]
call[name[group].add_argument, parameter[constant[--wavelength], constant[-w]]]
call[name[group].add_argument, parameter[constant[--range], constant[-r]]]
return[call[name[parser].parse_args, parameter[]]]
|
keyword[def] identifier[get_arguments] ():
literal[string]
keyword[import] identifier[argparse]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[description] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[str] , identifier[required] = keyword[True] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[str] , identifier[required] = keyword[True] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[int] ,
identifier[help] =( literal[string] ),
identifier[default] = keyword[None] , identifier[type] = identifier[float] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[int] ,
identifier[help] =( literal[string] ),
identifier[default] = keyword[None] , identifier[type] = identifier[float] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] ,
identifier[help] =( literal[string] +
literal[string] ),
identifier[default] = literal[int] , identifier[type] = identifier[float] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] =( literal[string] ),
identifier[action] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] =( literal[string] ),
identifier[default] = keyword[None] , identifier[type] = identifier[str] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] =( literal[string] +
literal[string] ),
identifier[default] = literal[int] , identifier[type] = identifier[float] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] =( literal[string] ),
identifier[default] = keyword[None] , identifier[type] = identifier[str] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[help] =( literal[string] ), identifier[action] = literal[string] )
identifier[group] = identifier[parser] . identifier[add_mutually_exclusive_group] ( identifier[required] = keyword[True] )
identifier[group] . identifier[add_argument] ( literal[string] , literal[string] ,
identifier[help] = literal[string] , identifier[type] = identifier[str] )
identifier[group] . identifier[add_argument] ( literal[string] , literal[string] , identifier[type] = identifier[float] ,
identifier[help] = literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] ,
identifier[help] = literal[string] ,
identifier[default] =[ keyword[None] , keyword[None] ], identifier[type] = identifier[float] )
keyword[return] identifier[parser] . identifier[parse_args] ()
|
def get_arguments():
"""Get the command line arguments"""
import argparse
parser = argparse.ArgumentParser(description='Plot spectral responses for a set of satellite imagers')
parser.add_argument('--platform_name', '-p', nargs='*', help='The Platform name', type=str, required=True)
parser.add_argument('--sensor', '-s', nargs='*', help='The sensor/instrument name', type=str, required=True)
parser.add_argument('-x', '--xlimits', nargs=2, help='x-axis boundaries for plot', default=None, type=float)
parser.add_argument('-y', '--ylimits', nargs=2, help='y-axis boundaries for plot', default=None, type=float)
parser.add_argument('-t', '--minimum_response', help='Minimum response: Any response lower than ' + 'this will be ignored when plotting', default=0.015, type=float)
parser.add_argument('-no_platform_name_in_legend', help='No platform name in legend', action='store_true')
parser.add_argument('--title', help='Plot title', default=None, type=str)
parser.add_argument('--wavelength_resolution', help='The step in wavelength (nanometers) when scanning\n' + ' the spectral range trying to find bands', default=0.005, type=float)
parser.add_argument('-o', '--filename', help='Output plot file name', default=None, type=str)
parser.add_argument('-v', '--verbose', help='Turn logging on', action='store_true')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--bandname', '-b', help='The sensor band name', type=str)
group.add_argument('--wavelength', '-w', type=float, help='the approximate spectral wavelength in micron')
group.add_argument('--range', '-r', nargs='*', help='The wavelength range for the plot', default=[None, None], type=float)
return parser.parse_args()
|
def contained_in(filename, directory):
"""Test if a file is located within the given directory."""
filename = os.path.normcase(os.path.abspath(filename))
directory = os.path.normcase(os.path.abspath(directory))
return os.path.commonprefix([filename, directory]) == directory
|
def function[contained_in, parameter[filename, directory]]:
constant[Test if a file is located within the given directory.]
variable[filename] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[filename]]]]]
variable[directory] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[directory]]]]]
return[compare[call[name[os].path.commonprefix, parameter[list[[<ast.Name object at 0x7da207f009a0>, <ast.Name object at 0x7da207f020e0>]]]] equal[==] name[directory]]]
|
keyword[def] identifier[contained_in] ( identifier[filename] , identifier[directory] ):
literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] ))
identifier[directory] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[directory] ))
keyword[return] identifier[os] . identifier[path] . identifier[commonprefix] ([ identifier[filename] , identifier[directory] ])== identifier[directory]
|
def contained_in(filename, directory):
"""Test if a file is located within the given directory."""
filename = os.path.normcase(os.path.abspath(filename))
directory = os.path.normcase(os.path.abspath(directory))
return os.path.commonprefix([filename, directory]) == directory
|
def get_content(self, params=None):
"""
Returns the raw byte content of a given Filelink
*returns* [Bytes]
```python
from filestack import Client
client = Client('API_KEY')
filelink = client.upload(filepath='/path/to/file/foo.jpg')
byte_content = filelink.get_content()
```
"""
if params:
CONTENT_DOWNLOAD_SCHEMA.check(params)
response = utils.make_call(CDN_URL, 'get',
handle=self.handle,
params=params,
security=self.security,
transform_url=(self.url if isinstance(self, filestack.models.Transform) else None))
return response.content
|
def function[get_content, parameter[self, params]]:
constant[
Returns the raw byte content of a given Filelink
*returns* [Bytes]
```python
from filestack import Client
client = Client('API_KEY')
filelink = client.upload(filepath='/path/to/file/foo.jpg')
byte_content = filelink.get_content()
```
]
if name[params] begin[:]
call[name[CONTENT_DOWNLOAD_SCHEMA].check, parameter[name[params]]]
variable[response] assign[=] call[name[utils].make_call, parameter[name[CDN_URL], constant[get]]]
return[name[response].content]
|
keyword[def] identifier[get_content] ( identifier[self] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[params] :
identifier[CONTENT_DOWNLOAD_SCHEMA] . identifier[check] ( identifier[params] )
identifier[response] = identifier[utils] . identifier[make_call] ( identifier[CDN_URL] , literal[string] ,
identifier[handle] = identifier[self] . identifier[handle] ,
identifier[params] = identifier[params] ,
identifier[security] = identifier[self] . identifier[security] ,
identifier[transform_url] =( identifier[self] . identifier[url] keyword[if] identifier[isinstance] ( identifier[self] , identifier[filestack] . identifier[models] . identifier[Transform] ) keyword[else] keyword[None] ))
keyword[return] identifier[response] . identifier[content]
|
def get_content(self, params=None):
"""
Returns the raw byte content of a given Filelink
*returns* [Bytes]
```python
from filestack import Client
client = Client('API_KEY')
filelink = client.upload(filepath='/path/to/file/foo.jpg')
byte_content = filelink.get_content()
```
"""
if params:
CONTENT_DOWNLOAD_SCHEMA.check(params) # depends on [control=['if'], data=[]]
response = utils.make_call(CDN_URL, 'get', handle=self.handle, params=params, security=self.security, transform_url=self.url if isinstance(self, filestack.models.Transform) else None)
return response.content
|
def config_examples(dest, user_dir):
""" Copy the example workflows to a directory.
\b
DEST: Path to which the examples should be copied.
"""
examples_path = Path(lightflow.__file__).parents[1] / 'examples'
if examples_path.exists():
dest_path = Path(dest).resolve()
if not user_dir:
dest_path = dest_path / 'examples'
if dest_path.exists():
if not click.confirm('Directory already exists. Overwrite existing files?',
default=True, abort=True):
return
else:
dest_path.mkdir()
for example_file in examples_path.glob('*.py'):
shutil.copy(str(example_file), str(dest_path / example_file.name))
click.echo('Copied examples to {}'.format(str(dest_path)))
else:
click.echo('The examples source path does not exist')
|
def function[config_examples, parameter[dest, user_dir]]:
constant[ Copy the example workflows to a directory.
DEST: Path to which the examples should be copied.
]
variable[examples_path] assign[=] binary_operation[call[call[name[Path], parameter[name[lightflow].__file__]].parents][constant[1]] / constant[examples]]
if call[name[examples_path].exists, parameter[]] begin[:]
variable[dest_path] assign[=] call[call[name[Path], parameter[name[dest]]].resolve, parameter[]]
if <ast.UnaryOp object at 0x7da1b1028880> begin[:]
variable[dest_path] assign[=] binary_operation[name[dest_path] / constant[examples]]
if call[name[dest_path].exists, parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da1b1028b80> begin[:]
return[None]
for taget[name[example_file]] in starred[call[name[examples_path].glob, parameter[constant[*.py]]]] begin[:]
call[name[shutil].copy, parameter[call[name[str], parameter[name[example_file]]], call[name[str], parameter[binary_operation[name[dest_path] / name[example_file].name]]]]]
call[name[click].echo, parameter[call[constant[Copied examples to {}].format, parameter[call[name[str], parameter[name[dest_path]]]]]]]
|
keyword[def] identifier[config_examples] ( identifier[dest] , identifier[user_dir] ):
literal[string]
identifier[examples_path] = identifier[Path] ( identifier[lightflow] . identifier[__file__] ). identifier[parents] [ literal[int] ]/ literal[string]
keyword[if] identifier[examples_path] . identifier[exists] ():
identifier[dest_path] = identifier[Path] ( identifier[dest] ). identifier[resolve] ()
keyword[if] keyword[not] identifier[user_dir] :
identifier[dest_path] = identifier[dest_path] / literal[string]
keyword[if] identifier[dest_path] . identifier[exists] ():
keyword[if] keyword[not] identifier[click] . identifier[confirm] ( literal[string] ,
identifier[default] = keyword[True] , identifier[abort] = keyword[True] ):
keyword[return]
keyword[else] :
identifier[dest_path] . identifier[mkdir] ()
keyword[for] identifier[example_file] keyword[in] identifier[examples_path] . identifier[glob] ( literal[string] ):
identifier[shutil] . identifier[copy] ( identifier[str] ( identifier[example_file] ), identifier[str] ( identifier[dest_path] / identifier[example_file] . identifier[name] ))
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[str] ( identifier[dest_path] )))
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] )
|
def config_examples(dest, user_dir):
""" Copy the example workflows to a directory.
\x08
DEST: Path to which the examples should be copied.
"""
examples_path = Path(lightflow.__file__).parents[1] / 'examples'
if examples_path.exists():
dest_path = Path(dest).resolve()
if not user_dir:
dest_path = dest_path / 'examples' # depends on [control=['if'], data=[]]
if dest_path.exists():
if not click.confirm('Directory already exists. Overwrite existing files?', default=True, abort=True):
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
dest_path.mkdir()
for example_file in examples_path.glob('*.py'):
shutil.copy(str(example_file), str(dest_path / example_file.name)) # depends on [control=['for'], data=['example_file']]
click.echo('Copied examples to {}'.format(str(dest_path))) # depends on [control=['if'], data=[]]
else:
click.echo('The examples source path does not exist')
|
def target_path (self):
""" Computes the target path that should be used for
target with these properties.
Returns a tuple of
- the computed path
- if the path is relative to build directory, a value of
'true'.
"""
if not self.target_path_:
# The <location> feature can be used to explicitly
# change the location of generated targets
l = self.get ('<location>')
if l:
computed = l[0]
is_relative = False
else:
p = self.as_path()
if hash_maybe:
p = hash_maybe(p)
# Really, an ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
# other directory layout is really hard. For that reason,
# we teach V2 to do the things regression system requires.
# The value o '<location-prefix>' is predended to the path.
prefix = self.get ('<location-prefix>')
if prefix:
if len (prefix) > 1:
raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
computed = os.path.join(prefix[0], p)
else:
computed = p
if not computed:
computed = "."
is_relative = True
self.target_path_ = (computed, is_relative)
return self.target_path_
|
def function[target_path, parameter[self]]:
constant[ Computes the target path that should be used for
target with these properties.
Returns a tuple of
- the computed path
- if the path is relative to build directory, a value of
'true'.
]
if <ast.UnaryOp object at 0x7da1b1f76440> begin[:]
variable[l] assign[=] call[name[self].get, parameter[constant[<location>]]]
if name[l] begin[:]
variable[computed] assign[=] call[name[l]][constant[0]]
variable[is_relative] assign[=] constant[False]
name[self].target_path_ assign[=] tuple[[<ast.Name object at 0x7da1b1f77d00>, <ast.Name object at 0x7da1b1f75390>]]
return[name[self].target_path_]
|
keyword[def] identifier[target_path] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[target_path_] :
identifier[l] = identifier[self] . identifier[get] ( literal[string] )
keyword[if] identifier[l] :
identifier[computed] = identifier[l] [ literal[int] ]
identifier[is_relative] = keyword[False]
keyword[else] :
identifier[p] = identifier[self] . identifier[as_path] ()
keyword[if] identifier[hash_maybe] :
identifier[p] = identifier[hash_maybe] ( identifier[p] )
identifier[prefix] = identifier[self] . identifier[get] ( literal[string] )
keyword[if] identifier[prefix] :
keyword[if] identifier[len] ( identifier[prefix] )> literal[int] :
keyword[raise] identifier[AlreadyDefined] ( literal[string] % identifier[prefix] )
identifier[computed] = identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] [ literal[int] ], identifier[p] )
keyword[else] :
identifier[computed] = identifier[p]
keyword[if] keyword[not] identifier[computed] :
identifier[computed] = literal[string]
identifier[is_relative] = keyword[True]
identifier[self] . identifier[target_path_] =( identifier[computed] , identifier[is_relative] )
keyword[return] identifier[self] . identifier[target_path_]
|
def target_path(self):
""" Computes the target path that should be used for
target with these properties.
Returns a tuple of
- the computed path
- if the path is relative to build directory, a value of
'true'.
"""
if not self.target_path_:
# The <location> feature can be used to explicitly
# change the location of generated targets
l = self.get('<location>')
if l:
computed = l[0]
is_relative = False # depends on [control=['if'], data=[]]
else:
p = self.as_path()
if hash_maybe:
p = hash_maybe(p) # depends on [control=['if'], data=[]]
# Really, an ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
# other directory layout is really hard. For that reason,
# we teach V2 to do the things regression system requires.
# The value o '<location-prefix>' is predended to the path.
prefix = self.get('<location-prefix>')
if prefix:
if len(prefix) > 1:
raise AlreadyDefined("Two <location-prefix> properties specified: '%s'" % prefix) # depends on [control=['if'], data=[]]
computed = os.path.join(prefix[0], p) # depends on [control=['if'], data=[]]
else:
computed = p
if not computed:
computed = '.' # depends on [control=['if'], data=[]]
is_relative = True
self.target_path_ = (computed, is_relative) # depends on [control=['if'], data=[]]
return self.target_path_
|
def delete_repository(self, repository, params=None):
"""
Removes a shared file system repository.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_
:arg repository: A comma-separated list of repository names
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg timeout: Explicit operation timeout
"""
if repository in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'repository'.")
return self.transport.perform_request('DELETE',
_make_path('_snapshot', repository), params=params)
|
def function[delete_repository, parameter[self, repository, params]]:
constant[
Removes a shared file system repository.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_
:arg repository: A comma-separated list of repository names
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg timeout: Explicit operation timeout
]
if compare[name[repository] in name[SKIP_IN_PATH]] begin[:]
<ast.Raise object at 0x7da18f720880>
return[call[name[self].transport.perform_request, parameter[constant[DELETE], call[name[_make_path], parameter[constant[_snapshot], name[repository]]]]]]
|
keyword[def] identifier[delete_repository] ( identifier[self] , identifier[repository] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[repository] keyword[in] identifier[SKIP_IN_PATH] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] ,
identifier[_make_path] ( literal[string] , identifier[repository] ), identifier[params] = identifier[params] )
|
def delete_repository(self, repository, params=None):
"""
Removes a shared file system repository.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_
:arg repository: A comma-separated list of repository names
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg timeout: Explicit operation timeout
"""
if repository in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'repository'.") # depends on [control=['if'], data=[]]
return self.transport.perform_request('DELETE', _make_path('_snapshot', repository), params=params)
|
async def start(self, host=None, port=0, **kwargs):
"""
:py:func:`asyncio.coroutine`
Start server.
:param host: ip address to bind for listening.
:type host: :py:class:`str`
:param port: port number to bind for listening.
:type port: :py:class:`int`
:param kwargs: keyword arguments, they passed to
:py:func:`asyncio.start_server`
"""
self._start_server_extra_arguments = kwargs
self.connections = {}
self.server_host = host
self.server_port = port
self.server = await asyncio.start_server(
self.dispatcher,
host,
port,
ssl=self.ssl,
**self._start_server_extra_arguments,
)
for sock in self.server.sockets:
if sock.family in (socket.AF_INET, socket.AF_INET6):
host, port, *_ = sock.getsockname()
if not self.server_port:
self.server_port = port
if not self.server_host:
self.server_host = host
logger.info("serving on %s:%s", host, port)
|
<ast.AsyncFunctionDef object at 0x7da18f09ed40>
|
keyword[async] keyword[def] identifier[start] ( identifier[self] , identifier[host] = keyword[None] , identifier[port] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_start_server_extra_arguments] = identifier[kwargs]
identifier[self] . identifier[connections] ={}
identifier[self] . identifier[server_host] = identifier[host]
identifier[self] . identifier[server_port] = identifier[port]
identifier[self] . identifier[server] = keyword[await] identifier[asyncio] . identifier[start_server] (
identifier[self] . identifier[dispatcher] ,
identifier[host] ,
identifier[port] ,
identifier[ssl] = identifier[self] . identifier[ssl] ,
** identifier[self] . identifier[_start_server_extra_arguments] ,
)
keyword[for] identifier[sock] keyword[in] identifier[self] . identifier[server] . identifier[sockets] :
keyword[if] identifier[sock] . identifier[family] keyword[in] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[AF_INET6] ):
identifier[host] , identifier[port] ,* identifier[_] = identifier[sock] . identifier[getsockname] ()
keyword[if] keyword[not] identifier[self] . identifier[server_port] :
identifier[self] . identifier[server_port] = identifier[port]
keyword[if] keyword[not] identifier[self] . identifier[server_host] :
identifier[self] . identifier[server_host] = identifier[host]
identifier[logger] . identifier[info] ( literal[string] , identifier[host] , identifier[port] )
|
async def start(self, host=None, port=0, **kwargs):
"""
:py:func:`asyncio.coroutine`
Start server.
:param host: ip address to bind for listening.
:type host: :py:class:`str`
:param port: port number to bind for listening.
:type port: :py:class:`int`
:param kwargs: keyword arguments, they passed to
:py:func:`asyncio.start_server`
"""
self._start_server_extra_arguments = kwargs
self.connections = {}
self.server_host = host
self.server_port = port
self.server = await asyncio.start_server(self.dispatcher, host, port, ssl=self.ssl, **self._start_server_extra_arguments)
for sock in self.server.sockets:
if sock.family in (socket.AF_INET, socket.AF_INET6):
(host, port, *_) = sock.getsockname()
if not self.server_port:
self.server_port = port # depends on [control=['if'], data=[]]
if not self.server_host:
self.server_host = host # depends on [control=['if'], data=[]]
logger.info('serving on %s:%s', host, port) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sock']]
|
def marshal_bson(
obj,
types=BSON_TYPES,
fields=None,
):
""" Recursively marshal a Python object to a BSON-compatible dict
that can be passed to PyMongo, Motor, etc...
Args:
obj: object, It's members can be nested Python
objects which will be converted to dictionaries
types: tuple-of-types, The BSON primitive types, typically
you would not change this
fields: None-list-of-str, Explicitly marshal only these fields
Returns:
dict
"""
return marshal_dict(
obj,
types,
fields=fields,
)
|
def function[marshal_bson, parameter[obj, types, fields]]:
constant[ Recursively marshal a Python object to a BSON-compatible dict
that can be passed to PyMongo, Motor, etc...
Args:
obj: object, It's members can be nested Python
objects which will be converted to dictionaries
types: tuple-of-types, The BSON primitive types, typically
you would not change this
fields: None-list-of-str, Explicitly marshal only these fields
Returns:
dict
]
return[call[name[marshal_dict], parameter[name[obj], name[types]]]]
|
keyword[def] identifier[marshal_bson] (
identifier[obj] ,
identifier[types] = identifier[BSON_TYPES] ,
identifier[fields] = keyword[None] ,
):
literal[string]
keyword[return] identifier[marshal_dict] (
identifier[obj] ,
identifier[types] ,
identifier[fields] = identifier[fields] ,
)
|
def marshal_bson(obj, types=BSON_TYPES, fields=None):
""" Recursively marshal a Python object to a BSON-compatible dict
that can be passed to PyMongo, Motor, etc...
Args:
obj: object, It's members can be nested Python
objects which will be converted to dictionaries
types: tuple-of-types, The BSON primitive types, typically
you would not change this
fields: None-list-of-str, Explicitly marshal only these fields
Returns:
dict
"""
return marshal_dict(obj, types, fields=fields)
|
def validate_feature_api(project, force=False):
"""Validate feature API"""
if not force and not project.on_pr():
raise SkippedValidationTest('Not on PR')
validator = FeatureApiValidator(project)
result = validator.validate()
if not result:
raise InvalidFeatureApi
|
def function[validate_feature_api, parameter[project, force]]:
constant[Validate feature API]
if <ast.BoolOp object at 0x7da18bcc9060> begin[:]
<ast.Raise object at 0x7da18bccb7c0>
variable[validator] assign[=] call[name[FeatureApiValidator], parameter[name[project]]]
variable[result] assign[=] call[name[validator].validate, parameter[]]
if <ast.UnaryOp object at 0x7da18bccb520> begin[:]
<ast.Raise object at 0x7da18bccb400>
|
keyword[def] identifier[validate_feature_api] ( identifier[project] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[force] keyword[and] keyword[not] identifier[project] . identifier[on_pr] ():
keyword[raise] identifier[SkippedValidationTest] ( literal[string] )
identifier[validator] = identifier[FeatureApiValidator] ( identifier[project] )
identifier[result] = identifier[validator] . identifier[validate] ()
keyword[if] keyword[not] identifier[result] :
keyword[raise] identifier[InvalidFeatureApi]
|
def validate_feature_api(project, force=False):
"""Validate feature API"""
if not force and (not project.on_pr()):
raise SkippedValidationTest('Not on PR') # depends on [control=['if'], data=[]]
validator = FeatureApiValidator(project)
result = validator.validate()
if not result:
raise InvalidFeatureApi # depends on [control=['if'], data=[]]
|
def change_first_point_by_coords(self, x, y, max_distance=1e-4,
raise_if_too_far_away=True):
"""
Set the first point of the exterior to the given point based on its coordinates.
If multiple points are found, the closest one will be picked.
If no matching points are found, an exception is raised.
Note: This method does *not* work in-place.
Parameters
----------
x : number
X-coordinate of the point.
y : number
Y-coordinate of the point.
max_distance : None or number, optional
Maximum distance past which possible matches are ignored.
If ``None`` the distance limit is deactivated.
raise_if_too_far_away : bool, optional
Whether to raise an exception if the closest found point is too
far away (``True``) or simply return an unchanged copy if this
object (``False``).
Returns
-------
imgaug.Polygon
Copy of this polygon with the new point order.
"""
if len(self.exterior) == 0:
raise Exception("Cannot reorder polygon points, because it contains no points.")
closest_idx, closest_dist = self.find_closest_point_index(x=x, y=y, return_distance=True)
if max_distance is not None and closest_dist > max_distance:
if not raise_if_too_far_away:
return self.deepcopy()
closest_point = self.exterior[closest_idx, :]
raise Exception(
"Closest found point (%.9f, %.9f) exceeds max_distance of %.9f exceeded" % (
closest_point[0], closest_point[1], closest_dist)
)
return self.change_first_point_by_index(closest_idx)
|
def function[change_first_point_by_coords, parameter[self, x, y, max_distance, raise_if_too_far_away]]:
constant[
Set the first point of the exterior to the given point based on its coordinates.
If multiple points are found, the closest one will be picked.
If no matching points are found, an exception is raised.
Note: This method does *not* work in-place.
Parameters
----------
x : number
X-coordinate of the point.
y : number
Y-coordinate of the point.
max_distance : None or number, optional
Maximum distance past which possible matches are ignored.
If ``None`` the distance limit is deactivated.
raise_if_too_far_away : bool, optional
Whether to raise an exception if the closest found point is too
far away (``True``) or simply return an unchanged copy if this
object (``False``).
Returns
-------
imgaug.Polygon
Copy of this polygon with the new point order.
]
if compare[call[name[len], parameter[name[self].exterior]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b0237d00>
<ast.Tuple object at 0x7da1b0236a70> assign[=] call[name[self].find_closest_point_index, parameter[]]
if <ast.BoolOp object at 0x7da1b0237190> begin[:]
if <ast.UnaryOp object at 0x7da1b0237130> begin[:]
return[call[name[self].deepcopy, parameter[]]]
variable[closest_point] assign[=] call[name[self].exterior][tuple[[<ast.Name object at 0x7da1b0236f50>, <ast.Slice object at 0x7da1b0235660>]]]
<ast.Raise object at 0x7da1b0237bb0>
return[call[name[self].change_first_point_by_index, parameter[name[closest_idx]]]]
|
keyword[def] identifier[change_first_point_by_coords] ( identifier[self] , identifier[x] , identifier[y] , identifier[max_distance] = literal[int] ,
identifier[raise_if_too_far_away] = keyword[True] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[exterior] )== literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[closest_idx] , identifier[closest_dist] = identifier[self] . identifier[find_closest_point_index] ( identifier[x] = identifier[x] , identifier[y] = identifier[y] , identifier[return_distance] = keyword[True] )
keyword[if] identifier[max_distance] keyword[is] keyword[not] keyword[None] keyword[and] identifier[closest_dist] > identifier[max_distance] :
keyword[if] keyword[not] identifier[raise_if_too_far_away] :
keyword[return] identifier[self] . identifier[deepcopy] ()
identifier[closest_point] = identifier[self] . identifier[exterior] [ identifier[closest_idx] ,:]
keyword[raise] identifier[Exception] (
literal[string] %(
identifier[closest_point] [ literal[int] ], identifier[closest_point] [ literal[int] ], identifier[closest_dist] )
)
keyword[return] identifier[self] . identifier[change_first_point_by_index] ( identifier[closest_idx] )
|
def change_first_point_by_coords(self, x, y, max_distance=0.0001, raise_if_too_far_away=True):
"""
Set the first point of the exterior to the given point based on its coordinates.
If multiple points are found, the closest one will be picked.
If no matching points are found, an exception is raised.
Note: This method does *not* work in-place.
Parameters
----------
x : number
X-coordinate of the point.
y : number
Y-coordinate of the point.
max_distance : None or number, optional
Maximum distance past which possible matches are ignored.
If ``None`` the distance limit is deactivated.
raise_if_too_far_away : bool, optional
Whether to raise an exception if the closest found point is too
far away (``True``) or simply return an unchanged copy if this
object (``False``).
Returns
-------
imgaug.Polygon
Copy of this polygon with the new point order.
"""
if len(self.exterior) == 0:
raise Exception('Cannot reorder polygon points, because it contains no points.') # depends on [control=['if'], data=[]]
(closest_idx, closest_dist) = self.find_closest_point_index(x=x, y=y, return_distance=True)
if max_distance is not None and closest_dist > max_distance:
if not raise_if_too_far_away:
return self.deepcopy() # depends on [control=['if'], data=[]]
closest_point = self.exterior[closest_idx, :]
raise Exception('Closest found point (%.9f, %.9f) exceeds max_distance of %.9f exceeded' % (closest_point[0], closest_point[1], closest_dist)) # depends on [control=['if'], data=[]]
return self.change_first_point_by_index(closest_idx)
|
def vb_get_network_adapters(machine_name=None, machine=None):
'''
A valid machine_name or a machine is needed to make this work!
@param machine_name:
@type machine_name: str
@param machine:
@type machine: IMachine
@return: INetorkAdapter's converted to dicts
@rtype: [dict]
'''
if machine_name:
machine = vb_get_box().findMachine(machine_name)
network_adapters = []
for i in range(vb_get_max_network_slots()):
try:
inetwork_adapter = machine.getNetworkAdapter(i)
network_adapter = vb_xpcom_to_attribute_dict(
inetwork_adapter, 'INetworkAdapter'
)
network_adapter['properties'] = inetwork_adapter.getProperties('')
network_adapters.append(network_adapter)
except Exception:
pass
return network_adapters
|
def function[vb_get_network_adapters, parameter[machine_name, machine]]:
constant[
A valid machine_name or a machine is needed to make this work!
@param machine_name:
@type machine_name: str
@param machine:
@type machine: IMachine
@return: INetorkAdapter's converted to dicts
@rtype: [dict]
]
if name[machine_name] begin[:]
variable[machine] assign[=] call[call[name[vb_get_box], parameter[]].findMachine, parameter[name[machine_name]]]
variable[network_adapters] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[vb_get_max_network_slots], parameter[]]]]] begin[:]
<ast.Try object at 0x7da1b1ff2230>
return[name[network_adapters]]
|
keyword[def] identifier[vb_get_network_adapters] ( identifier[machine_name] = keyword[None] , identifier[machine] = keyword[None] ):
literal[string]
keyword[if] identifier[machine_name] :
identifier[machine] = identifier[vb_get_box] (). identifier[findMachine] ( identifier[machine_name] )
identifier[network_adapters] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[vb_get_max_network_slots] ()):
keyword[try] :
identifier[inetwork_adapter] = identifier[machine] . identifier[getNetworkAdapter] ( identifier[i] )
identifier[network_adapter] = identifier[vb_xpcom_to_attribute_dict] (
identifier[inetwork_adapter] , literal[string]
)
identifier[network_adapter] [ literal[string] ]= identifier[inetwork_adapter] . identifier[getProperties] ( literal[string] )
identifier[network_adapters] . identifier[append] ( identifier[network_adapter] )
keyword[except] identifier[Exception] :
keyword[pass]
keyword[return] identifier[network_adapters]
|
def vb_get_network_adapters(machine_name=None, machine=None):
"""
A valid machine_name or a machine is needed to make this work!
@param machine_name:
@type machine_name: str
@param machine:
@type machine: IMachine
@return: INetorkAdapter's converted to dicts
@rtype: [dict]
"""
if machine_name:
machine = vb_get_box().findMachine(machine_name) # depends on [control=['if'], data=[]]
network_adapters = []
for i in range(vb_get_max_network_slots()):
try:
inetwork_adapter = machine.getNetworkAdapter(i)
network_adapter = vb_xpcom_to_attribute_dict(inetwork_adapter, 'INetworkAdapter')
network_adapter['properties'] = inetwork_adapter.getProperties('')
network_adapters.append(network_adapter) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
return network_adapters
|
def time_slice(self, t_from, t_to=None):
"""Return an new graph containing nodes and interactions present in [t_from, t_to].
Parameters
----------
t_from : snapshot id, mandatory
t_to : snapshot id, optional (default=None)
If None t_to will be set equal to t_from
Returns
-------
H : a DynGraph object
the graph described by interactions in [t_from, t_to]
Examples
--------
>>> G = dn.DynGraph()
>>> G.add_path([0,1,2,3], t=0)
>>> G.add_path([0,4,5,6], t=1)
>>> G.add_path([7,1,2,3], t=2)
>>> H = G.time_slice(0)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3)]
>>> H = G.time_slice(0, 1)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3), (0, 4), (4, 5), (5, 6)]
"""
# create new graph and copy subgraph into it
H = self.__class__()
if t_to is not None:
if t_to < t_from:
raise ValueError("Invalid range: t_to must be grater that t_from")
else:
t_to = t_from
for u, v, ts in self.interactions_iter():
I = t_to
F = t_from
for a, b in ts['t']:
if I <= a and b <= F:
H.add_interaction(u, v, a, b)
elif a <= I and F <= b:
H.add_interaction(u, v, I, F)
elif a <= I <= b and b <= F:
H.add_interaction(u, v, I, b)
elif I <= a <= F and F <= b:
H.add_interaction(u, v, a, F)
return H
|
def function[time_slice, parameter[self, t_from, t_to]]:
constant[Return an new graph containing nodes and interactions present in [t_from, t_to].
Parameters
----------
t_from : snapshot id, mandatory
t_to : snapshot id, optional (default=None)
If None t_to will be set equal to t_from
Returns
-------
H : a DynGraph object
the graph described by interactions in [t_from, t_to]
Examples
--------
>>> G = dn.DynGraph()
>>> G.add_path([0,1,2,3], t=0)
>>> G.add_path([0,4,5,6], t=1)
>>> G.add_path([7,1,2,3], t=2)
>>> H = G.time_slice(0)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3)]
>>> H = G.time_slice(0, 1)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3), (0, 4), (4, 5), (5, 6)]
]
variable[H] assign[=] call[name[self].__class__, parameter[]]
if compare[name[t_to] is_not constant[None]] begin[:]
if compare[name[t_to] less[<] name[t_from]] begin[:]
<ast.Raise object at 0x7da1b04778e0>
for taget[tuple[[<ast.Name object at 0x7da1b04d3fa0>, <ast.Name object at 0x7da1b04d0520>, <ast.Name object at 0x7da1b04d03d0>]]] in starred[call[name[self].interactions_iter, parameter[]]] begin[:]
variable[I] assign[=] name[t_to]
variable[F] assign[=] name[t_from]
for taget[tuple[[<ast.Name object at 0x7da1b04d2560>, <ast.Name object at 0x7da1b04d2b00>]]] in starred[call[name[ts]][constant[t]]] begin[:]
if <ast.BoolOp object at 0x7da1b04d2e30> begin[:]
call[name[H].add_interaction, parameter[name[u], name[v], name[a], name[b]]]
return[name[H]]
|
keyword[def] identifier[time_slice] ( identifier[self] , identifier[t_from] , identifier[t_to] = keyword[None] ):
literal[string]
identifier[H] = identifier[self] . identifier[__class__] ()
keyword[if] identifier[t_to] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[t_to] < identifier[t_from] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
identifier[t_to] = identifier[t_from]
keyword[for] identifier[u] , identifier[v] , identifier[ts] keyword[in] identifier[self] . identifier[interactions_iter] ():
identifier[I] = identifier[t_to]
identifier[F] = identifier[t_from]
keyword[for] identifier[a] , identifier[b] keyword[in] identifier[ts] [ literal[string] ]:
keyword[if] identifier[I] <= identifier[a] keyword[and] identifier[b] <= identifier[F] :
identifier[H] . identifier[add_interaction] ( identifier[u] , identifier[v] , identifier[a] , identifier[b] )
keyword[elif] identifier[a] <= identifier[I] keyword[and] identifier[F] <= identifier[b] :
identifier[H] . identifier[add_interaction] ( identifier[u] , identifier[v] , identifier[I] , identifier[F] )
keyword[elif] identifier[a] <= identifier[I] <= identifier[b] keyword[and] identifier[b] <= identifier[F] :
identifier[H] . identifier[add_interaction] ( identifier[u] , identifier[v] , identifier[I] , identifier[b] )
keyword[elif] identifier[I] <= identifier[a] <= identifier[F] keyword[and] identifier[F] <= identifier[b] :
identifier[H] . identifier[add_interaction] ( identifier[u] , identifier[v] , identifier[a] , identifier[F] )
keyword[return] identifier[H]
|
def time_slice(self, t_from, t_to=None):
"""Return an new graph containing nodes and interactions present in [t_from, t_to].
Parameters
----------
t_from : snapshot id, mandatory
t_to : snapshot id, optional (default=None)
If None t_to will be set equal to t_from
Returns
-------
H : a DynGraph object
the graph described by interactions in [t_from, t_to]
Examples
--------
>>> G = dn.DynGraph()
>>> G.add_path([0,1,2,3], t=0)
>>> G.add_path([0,4,5,6], t=1)
>>> G.add_path([7,1,2,3], t=2)
>>> H = G.time_slice(0)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3)]
>>> H = G.time_slice(0, 1)
>>> H.interactions()
[(0, 1), (1, 2), (1, 3), (0, 4), (4, 5), (5, 6)]
"""
# create new graph and copy subgraph into it
H = self.__class__()
if t_to is not None:
if t_to < t_from:
raise ValueError('Invalid range: t_to must be grater that t_from') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['t_to']]
else:
t_to = t_from
for (u, v, ts) in self.interactions_iter():
I = t_to
F = t_from
for (a, b) in ts['t']:
if I <= a and b <= F:
H.add_interaction(u, v, a, b) # depends on [control=['if'], data=[]]
elif a <= I and F <= b:
H.add_interaction(u, v, I, F) # depends on [control=['if'], data=[]]
elif a <= I <= b and b <= F:
H.add_interaction(u, v, I, b) # depends on [control=['if'], data=[]]
elif I <= a <= F and F <= b:
H.add_interaction(u, v, a, F) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return H
|
def napi_compare(left, ops, comparators, **kwargs):
"""Make pairwise comparisons of comparators."""
values = []
for op, right in zip(ops, comparators):
value = COMPARE[op](left, right)
values.append(value)
left = right
result = napi_and(values, **kwargs)
if isinstance(result, ndarray):
return result
else:
return bool(result)
|
def function[napi_compare, parameter[left, ops, comparators]]:
constant[Make pairwise comparisons of comparators.]
variable[values] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b277c430>, <ast.Name object at 0x7da1b277c460>]]] in starred[call[name[zip], parameter[name[ops], name[comparators]]]] begin[:]
variable[value] assign[=] call[call[name[COMPARE]][name[op]], parameter[name[left], name[right]]]
call[name[values].append, parameter[name[value]]]
variable[left] assign[=] name[right]
variable[result] assign[=] call[name[napi_and], parameter[name[values]]]
if call[name[isinstance], parameter[name[result], name[ndarray]]] begin[:]
return[name[result]]
|
keyword[def] identifier[napi_compare] ( identifier[left] , identifier[ops] , identifier[comparators] ,** identifier[kwargs] ):
literal[string]
identifier[values] =[]
keyword[for] identifier[op] , identifier[right] keyword[in] identifier[zip] ( identifier[ops] , identifier[comparators] ):
identifier[value] = identifier[COMPARE] [ identifier[op] ]( identifier[left] , identifier[right] )
identifier[values] . identifier[append] ( identifier[value] )
identifier[left] = identifier[right]
identifier[result] = identifier[napi_and] ( identifier[values] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[result] , identifier[ndarray] ):
keyword[return] identifier[result]
keyword[else] :
keyword[return] identifier[bool] ( identifier[result] )
|
def napi_compare(left, ops, comparators, **kwargs):
"""Make pairwise comparisons of comparators."""
values = []
for (op, right) in zip(ops, comparators):
value = COMPARE[op](left, right)
values.append(value)
left = right # depends on [control=['for'], data=[]]
result = napi_and(values, **kwargs)
if isinstance(result, ndarray):
return result # depends on [control=['if'], data=[]]
else:
return bool(result)
|
def __filter(filterable, filter_, logic_operation='and'):
""" filtering DataFrame using filter_ key-value conditions applying logic_operation
only find rows strictly fitting the filter_ criterion"""
condition = []
if not filter_:
return filterable
elif filter_.get('type') == '__ANY__':
return filterable
else:
for key, value in filter_.items():
condition.append('{key} == "{value}"'.format(key=key, value=value))
try:
res = filterable.query(" {operation} ".format(operation=logic_operation).join(condition))
except pd.core.computation.ops.UndefinedVariableError:
return pd.DataFrame()
else:
return res
|
def function[__filter, parameter[filterable, filter_, logic_operation]]:
constant[ filtering DataFrame using filter_ key-value conditions applying logic_operation
only find rows strictly fitting the filter_ criterion]
variable[condition] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b1a1ece0> begin[:]
return[name[filterable]]
<ast.Try object at 0x7da1b1a768f0>
|
keyword[def] identifier[__filter] ( identifier[filterable] , identifier[filter_] , identifier[logic_operation] = literal[string] ):
literal[string]
identifier[condition] =[]
keyword[if] keyword[not] identifier[filter_] :
keyword[return] identifier[filterable]
keyword[elif] identifier[filter_] . identifier[get] ( literal[string] )== literal[string] :
keyword[return] identifier[filterable]
keyword[else] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[filter_] . identifier[items] ():
identifier[condition] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] = identifier[key] , identifier[value] = identifier[value] ))
keyword[try] :
identifier[res] = identifier[filterable] . identifier[query] ( literal[string] . identifier[format] ( identifier[operation] = identifier[logic_operation] ). identifier[join] ( identifier[condition] ))
keyword[except] identifier[pd] . identifier[core] . identifier[computation] . identifier[ops] . identifier[UndefinedVariableError] :
keyword[return] identifier[pd] . identifier[DataFrame] ()
keyword[else] :
keyword[return] identifier[res]
|
def __filter(filterable, filter_, logic_operation='and'):
""" filtering DataFrame using filter_ key-value conditions applying logic_operation
only find rows strictly fitting the filter_ criterion"""
condition = []
if not filter_:
return filterable # depends on [control=['if'], data=[]]
elif filter_.get('type') == '__ANY__':
return filterable # depends on [control=['if'], data=[]]
else:
for (key, value) in filter_.items():
condition.append('{key} == "{value}"'.format(key=key, value=value)) # depends on [control=['for'], data=[]]
try:
res = filterable.query(' {operation} '.format(operation=logic_operation).join(condition)) # depends on [control=['try'], data=[]]
except pd.core.computation.ops.UndefinedVariableError:
return pd.DataFrame() # depends on [control=['except'], data=[]]
else:
return res
|
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
|
def function[qry_coords, parameter[self]]:
constant[Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence]
return[call[name[pyfastaq].intervals.Interval, parameter[call[name[min], parameter[name[self].qry_start, name[self].qry_end]], call[name[max], parameter[name[self].qry_start, name[self].qry_end]]]]]
|
keyword[def] identifier[qry_coords] ( identifier[self] ):
literal[string]
keyword[return] identifier[pyfastaq] . identifier[intervals] . identifier[Interval] ( identifier[min] ( identifier[self] . identifier[qry_start] , identifier[self] . identifier[qry_end] ), identifier[max] ( identifier[self] . identifier[qry_start] , identifier[self] . identifier[qry_end] ))
|
def qry_coords(self):
"""Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence"""
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
|
def get_file_history_2(self, path):
"""
Returns history of file as reversed list of ``Changeset`` objects for
which file at given ``path`` has been modified.
"""
self._get_filectx(path)
from dulwich.walk import Walker
include = [self.id]
walker = Walker(self.repository._repo.object_store, include,
paths=[path], max_entries=1)
return [self.repository.get_changeset(sha)
for sha in (x.commit.id for x in walker)]
|
def function[get_file_history_2, parameter[self, path]]:
constant[
Returns history of file as reversed list of ``Changeset`` objects for
which file at given ``path`` has been modified.
]
call[name[self]._get_filectx, parameter[name[path]]]
from relative_module[dulwich.walk] import module[Walker]
variable[include] assign[=] list[[<ast.Attribute object at 0x7da18bcc8550>]]
variable[walker] assign[=] call[name[Walker], parameter[name[self].repository._repo.object_store, name[include]]]
return[<ast.ListComp object at 0x7da18bccadd0>]
|
keyword[def] identifier[get_file_history_2] ( identifier[self] , identifier[path] ):
literal[string]
identifier[self] . identifier[_get_filectx] ( identifier[path] )
keyword[from] identifier[dulwich] . identifier[walk] keyword[import] identifier[Walker]
identifier[include] =[ identifier[self] . identifier[id] ]
identifier[walker] = identifier[Walker] ( identifier[self] . identifier[repository] . identifier[_repo] . identifier[object_store] , identifier[include] ,
identifier[paths] =[ identifier[path] ], identifier[max_entries] = literal[int] )
keyword[return] [ identifier[self] . identifier[repository] . identifier[get_changeset] ( identifier[sha] )
keyword[for] identifier[sha] keyword[in] ( identifier[x] . identifier[commit] . identifier[id] keyword[for] identifier[x] keyword[in] identifier[walker] )]
|
def get_file_history_2(self, path):
"""
Returns history of file as reversed list of ``Changeset`` objects for
which file at given ``path`` has been modified.
"""
self._get_filectx(path)
from dulwich.walk import Walker
include = [self.id]
walker = Walker(self.repository._repo.object_store, include, paths=[path], max_entries=1)
return [self.repository.get_changeset(sha) for sha in (x.commit.id for x in walker)]
|
def _connect(self):
"""
Attemps connection to the server
"""
self.logger.info("Attempting connection to %s:%s", self.server[0], self.server[1])
try:
self._open_socket()
peer = self.sock.getpeername()
self.logger.info("Connected to %s", str(peer))
# 5 second timeout to receive server banner
self.sock.setblocking(1)
self.sock.settimeout(5)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
banner = self.sock.recv(512)
if is_py3:
banner = banner.decode('latin-1')
if banner[0] == "#":
self.logger.debug("Banner: %s", banner.rstrip())
else:
raise ConnectionError("invalid banner from server")
except ConnectionError as e:
self.logger.error(str(e))
self.close()
raise
except (socket.error, socket.timeout) as e:
self.close()
self.logger.error("Socket error: %s" % str(e))
if str(e) == "timed out":
raise ConnectionError("no banner from server")
else:
raise ConnectionError(e)
self._connected = True
|
def function[_connect, parameter[self]]:
constant[
Attemps connection to the server
]
call[name[self].logger.info, parameter[constant[Attempting connection to %s:%s], call[name[self].server][constant[0]], call[name[self].server][constant[1]]]]
<ast.Try object at 0x7da1aff02d70>
name[self]._connected assign[=] constant[True]
|
keyword[def] identifier[_connect] ( identifier[self] ):
literal[string]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[server] [ literal[int] ], identifier[self] . identifier[server] [ literal[int] ])
keyword[try] :
identifier[self] . identifier[_open_socket] ()
identifier[peer] = identifier[self] . identifier[sock] . identifier[getpeername] ()
identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[str] ( identifier[peer] ))
identifier[self] . identifier[sock] . identifier[setblocking] ( literal[int] )
identifier[self] . identifier[sock] . identifier[settimeout] ( literal[int] )
identifier[self] . identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_KEEPALIVE] , literal[int] )
identifier[banner] = identifier[self] . identifier[sock] . identifier[recv] ( literal[int] )
keyword[if] identifier[is_py3] :
identifier[banner] = identifier[banner] . identifier[decode] ( literal[string] )
keyword[if] identifier[banner] [ literal[int] ]== literal[string] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[banner] . identifier[rstrip] ())
keyword[else] :
keyword[raise] identifier[ConnectionError] ( literal[string] )
keyword[except] identifier[ConnectionError] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[error] ( identifier[str] ( identifier[e] ))
identifier[self] . identifier[close] ()
keyword[raise]
keyword[except] ( identifier[socket] . identifier[error] , identifier[socket] . identifier[timeout] ) keyword[as] identifier[e] :
identifier[self] . identifier[close] ()
identifier[self] . identifier[logger] . identifier[error] ( literal[string] % identifier[str] ( identifier[e] ))
keyword[if] identifier[str] ( identifier[e] )== literal[string] :
keyword[raise] identifier[ConnectionError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ConnectionError] ( identifier[e] )
identifier[self] . identifier[_connected] = keyword[True]
|
def _connect(self):
"""
Attemps connection to the server
"""
self.logger.info('Attempting connection to %s:%s', self.server[0], self.server[1])
try:
self._open_socket()
peer = self.sock.getpeername()
self.logger.info('Connected to %s', str(peer))
# 5 second timeout to receive server banner
self.sock.setblocking(1)
self.sock.settimeout(5)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
banner = self.sock.recv(512)
if is_py3:
banner = banner.decode('latin-1') # depends on [control=['if'], data=[]]
if banner[0] == '#':
self.logger.debug('Banner: %s', banner.rstrip()) # depends on [control=['if'], data=[]]
else:
raise ConnectionError('invalid banner from server') # depends on [control=['try'], data=[]]
except ConnectionError as e:
self.logger.error(str(e))
self.close()
raise # depends on [control=['except'], data=['e']]
except (socket.error, socket.timeout) as e:
self.close()
self.logger.error('Socket error: %s' % str(e))
if str(e) == 'timed out':
raise ConnectionError('no banner from server') # depends on [control=['if'], data=[]]
else:
raise ConnectionError(e) # depends on [control=['except'], data=['e']]
self._connected = True
|
def classes(self):
"""return all class nodes in the diagram"""
return [o for o in self.objects if isinstance(o.node, astroid.ClassDef)]
|
def function[classes, parameter[self]]:
constant[return all class nodes in the diagram]
return[<ast.ListComp object at 0x7da1b020ffa0>]
|
keyword[def] identifier[classes] ( identifier[self] ):
literal[string]
keyword[return] [ identifier[o] keyword[for] identifier[o] keyword[in] identifier[self] . identifier[objects] keyword[if] identifier[isinstance] ( identifier[o] . identifier[node] , identifier[astroid] . identifier[ClassDef] )]
|
def classes(self):
"""return all class nodes in the diagram"""
return [o for o in self.objects if isinstance(o.node, astroid.ClassDef)]
|
def handle_import(self, options):
"""
Gets the posts from either the provided URL or the path if it
is local.
"""
url = options.get("url")
if url is None:
raise CommandError("Usage is import_wordpress %s" % self.args)
try:
import feedparser
except ImportError:
raise CommandError("Could not import the feedparser library.")
feed = feedparser.parse(url)
# We use the minidom parser as well because feedparser won't
# interpret WXR comments correctly and ends up munging them.
# xml.dom.minidom is used simply to pull the comments when we
# get to them.
xml = parse(url)
xmlitems = xml.getElementsByTagName("item")
for (i, entry) in enumerate(feed["entries"]):
# Get a pointer to the right position in the minidom as well.
xmlitem = xmlitems[i]
content = linebreaks(self.wp_caption(entry.content[0]["value"]))
# Get the time struct of the published date if possible and
# the updated date if we can't.
pub_date = getattr(entry, "published_parsed", entry.updated_parsed)
if pub_date:
pub_date = datetime.fromtimestamp(mktime(pub_date))
pub_date -= timedelta(seconds=timezone)
# Tags and categories are all under "tags" marked with a scheme.
terms = defaultdict(set)
for item in getattr(entry, "tags", []):
terms[item.scheme].add(item.term)
if entry.wp_post_type == "post":
post = self.add_post(title=entry.title, content=content,
pub_date=pub_date, tags=terms["tag"],
categories=terms["category"],
old_url=entry.id)
# Get the comments from the xml doc.
for c in xmlitem.getElementsByTagName("wp:comment"):
name = self.get_text(c, "author")
email = self.get_text(c, "author_email")
url = self.get_text(c, "author_url")
body = self.get_text(c, "content")
pub_date = self.get_text(c, "date_gmt")
fmt = "%Y-%m-%d %H:%M:%S"
pub_date = datetime.strptime(pub_date, fmt)
pub_date -= timedelta(seconds=timezone)
self.add_comment(post=post, name=name, email=email,
body=body, website=url,
pub_date=pub_date)
elif entry.wp_post_type == "page":
old_id = getattr(entry, "wp_post_id")
parent_id = getattr(entry, "wp_post_parent")
self.add_page(title=entry.title, content=content,
tags=terms["tag"], old_id=old_id,
old_parent_id=parent_id)
|
def function[handle_import, parameter[self, options]]:
constant[
Gets the posts from either the provided URL or the path if it
is local.
]
variable[url] assign[=] call[name[options].get, parameter[constant[url]]]
if compare[name[url] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b2346530>
<ast.Try object at 0x7da1b2346fe0>
variable[feed] assign[=] call[name[feedparser].parse, parameter[name[url]]]
variable[xml] assign[=] call[name[parse], parameter[name[url]]]
variable[xmlitems] assign[=] call[name[xml].getElementsByTagName, parameter[constant[item]]]
for taget[tuple[[<ast.Name object at 0x7da1b2345e70>, <ast.Name object at 0x7da1b2345060>]]] in starred[call[name[enumerate], parameter[call[name[feed]][constant[entries]]]]] begin[:]
variable[xmlitem] assign[=] call[name[xmlitems]][name[i]]
variable[content] assign[=] call[name[linebreaks], parameter[call[name[self].wp_caption, parameter[call[call[name[entry].content][constant[0]]][constant[value]]]]]]
variable[pub_date] assign[=] call[name[getattr], parameter[name[entry], constant[published_parsed], name[entry].updated_parsed]]
if name[pub_date] begin[:]
variable[pub_date] assign[=] call[name[datetime].fromtimestamp, parameter[call[name[mktime], parameter[name[pub_date]]]]]
<ast.AugAssign object at 0x7da204346a10>
variable[terms] assign[=] call[name[defaultdict], parameter[name[set]]]
for taget[name[item]] in starred[call[name[getattr], parameter[name[entry], constant[tags], list[[]]]]] begin[:]
call[call[name[terms]][name[item].scheme].add, parameter[name[item].term]]
if compare[name[entry].wp_post_type equal[==] constant[post]] begin[:]
variable[post] assign[=] call[name[self].add_post, parameter[]]
for taget[name[c]] in starred[call[name[xmlitem].getElementsByTagName, parameter[constant[wp:comment]]]] begin[:]
variable[name] assign[=] call[name[self].get_text, parameter[name[c], constant[author]]]
variable[email] assign[=] call[name[self].get_text, parameter[name[c], constant[author_email]]]
variable[url] assign[=] call[name[self].get_text, parameter[name[c], constant[author_url]]]
variable[body] assign[=] call[name[self].get_text, parameter[name[c], constant[content]]]
variable[pub_date] assign[=] call[name[self].get_text, parameter[name[c], constant[date_gmt]]]
variable[fmt] assign[=] constant[%Y-%m-%d %H:%M:%S]
variable[pub_date] assign[=] call[name[datetime].strptime, parameter[name[pub_date], name[fmt]]]
<ast.AugAssign object at 0x7da18f09fd00>
call[name[self].add_comment, parameter[]]
|
keyword[def] identifier[handle_import] ( identifier[self] , identifier[options] ):
literal[string]
identifier[url] = identifier[options] . identifier[get] ( literal[string] )
keyword[if] identifier[url] keyword[is] keyword[None] :
keyword[raise] identifier[CommandError] ( literal[string] % identifier[self] . identifier[args] )
keyword[try] :
keyword[import] identifier[feedparser]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[CommandError] ( literal[string] )
identifier[feed] = identifier[feedparser] . identifier[parse] ( identifier[url] )
identifier[xml] = identifier[parse] ( identifier[url] )
identifier[xmlitems] = identifier[xml] . identifier[getElementsByTagName] ( literal[string] )
keyword[for] ( identifier[i] , identifier[entry] ) keyword[in] identifier[enumerate] ( identifier[feed] [ literal[string] ]):
identifier[xmlitem] = identifier[xmlitems] [ identifier[i] ]
identifier[content] = identifier[linebreaks] ( identifier[self] . identifier[wp_caption] ( identifier[entry] . identifier[content] [ literal[int] ][ literal[string] ]))
identifier[pub_date] = identifier[getattr] ( identifier[entry] , literal[string] , identifier[entry] . identifier[updated_parsed] )
keyword[if] identifier[pub_date] :
identifier[pub_date] = identifier[datetime] . identifier[fromtimestamp] ( identifier[mktime] ( identifier[pub_date] ))
identifier[pub_date] -= identifier[timedelta] ( identifier[seconds] = identifier[timezone] )
identifier[terms] = identifier[defaultdict] ( identifier[set] )
keyword[for] identifier[item] keyword[in] identifier[getattr] ( identifier[entry] , literal[string] ,[]):
identifier[terms] [ identifier[item] . identifier[scheme] ]. identifier[add] ( identifier[item] . identifier[term] )
keyword[if] identifier[entry] . identifier[wp_post_type] == literal[string] :
identifier[post] = identifier[self] . identifier[add_post] ( identifier[title] = identifier[entry] . identifier[title] , identifier[content] = identifier[content] ,
identifier[pub_date] = identifier[pub_date] , identifier[tags] = identifier[terms] [ literal[string] ],
identifier[categories] = identifier[terms] [ literal[string] ],
identifier[old_url] = identifier[entry] . identifier[id] )
keyword[for] identifier[c] keyword[in] identifier[xmlitem] . identifier[getElementsByTagName] ( literal[string] ):
identifier[name] = identifier[self] . identifier[get_text] ( identifier[c] , literal[string] )
identifier[email] = identifier[self] . identifier[get_text] ( identifier[c] , literal[string] )
identifier[url] = identifier[self] . identifier[get_text] ( identifier[c] , literal[string] )
identifier[body] = identifier[self] . identifier[get_text] ( identifier[c] , literal[string] )
identifier[pub_date] = identifier[self] . identifier[get_text] ( identifier[c] , literal[string] )
identifier[fmt] = literal[string]
identifier[pub_date] = identifier[datetime] . identifier[strptime] ( identifier[pub_date] , identifier[fmt] )
identifier[pub_date] -= identifier[timedelta] ( identifier[seconds] = identifier[timezone] )
identifier[self] . identifier[add_comment] ( identifier[post] = identifier[post] , identifier[name] = identifier[name] , identifier[email] = identifier[email] ,
identifier[body] = identifier[body] , identifier[website] = identifier[url] ,
identifier[pub_date] = identifier[pub_date] )
keyword[elif] identifier[entry] . identifier[wp_post_type] == literal[string] :
identifier[old_id] = identifier[getattr] ( identifier[entry] , literal[string] )
identifier[parent_id] = identifier[getattr] ( identifier[entry] , literal[string] )
identifier[self] . identifier[add_page] ( identifier[title] = identifier[entry] . identifier[title] , identifier[content] = identifier[content] ,
identifier[tags] = identifier[terms] [ literal[string] ], identifier[old_id] = identifier[old_id] ,
identifier[old_parent_id] = identifier[parent_id] )
|
def handle_import(self, options):
"""
Gets the posts from either the provided URL or the path if it
is local.
"""
url = options.get('url')
if url is None:
raise CommandError('Usage is import_wordpress %s' % self.args) # depends on [control=['if'], data=[]]
try:
import feedparser # depends on [control=['try'], data=[]]
except ImportError:
raise CommandError('Could not import the feedparser library.') # depends on [control=['except'], data=[]]
feed = feedparser.parse(url)
# We use the minidom parser as well because feedparser won't
# interpret WXR comments correctly and ends up munging them.
# xml.dom.minidom is used simply to pull the comments when we
# get to them.
xml = parse(url)
xmlitems = xml.getElementsByTagName('item')
for (i, entry) in enumerate(feed['entries']):
# Get a pointer to the right position in the minidom as well.
xmlitem = xmlitems[i]
content = linebreaks(self.wp_caption(entry.content[0]['value']))
# Get the time struct of the published date if possible and
# the updated date if we can't.
pub_date = getattr(entry, 'published_parsed', entry.updated_parsed)
if pub_date:
pub_date = datetime.fromtimestamp(mktime(pub_date))
pub_date -= timedelta(seconds=timezone) # depends on [control=['if'], data=[]]
# Tags and categories are all under "tags" marked with a scheme.
terms = defaultdict(set)
for item in getattr(entry, 'tags', []):
terms[item.scheme].add(item.term) # depends on [control=['for'], data=['item']]
if entry.wp_post_type == 'post':
post = self.add_post(title=entry.title, content=content, pub_date=pub_date, tags=terms['tag'], categories=terms['category'], old_url=entry.id)
# Get the comments from the xml doc.
for c in xmlitem.getElementsByTagName('wp:comment'):
name = self.get_text(c, 'author')
email = self.get_text(c, 'author_email')
url = self.get_text(c, 'author_url')
body = self.get_text(c, 'content')
pub_date = self.get_text(c, 'date_gmt')
fmt = '%Y-%m-%d %H:%M:%S'
pub_date = datetime.strptime(pub_date, fmt)
pub_date -= timedelta(seconds=timezone)
self.add_comment(post=post, name=name, email=email, body=body, website=url, pub_date=pub_date) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]]
elif entry.wp_post_type == 'page':
old_id = getattr(entry, 'wp_post_id')
parent_id = getattr(entry, 'wp_post_parent')
self.add_page(title=entry.title, content=content, tags=terms['tag'], old_id=old_id, old_parent_id=parent_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def outputjson(self, obj):
"""
Serialize `obj` with JSON and output to the client
"""
self.header('Content-Type', 'application/json')
self.outputdata(json.dumps(obj).encode('ascii'))
|
def function[outputjson, parameter[self, obj]]:
constant[
Serialize `obj` with JSON and output to the client
]
call[name[self].header, parameter[constant[Content-Type], constant[application/json]]]
call[name[self].outputdata, parameter[call[call[name[json].dumps, parameter[name[obj]]].encode, parameter[constant[ascii]]]]]
|
keyword[def] identifier[outputjson] ( identifier[self] , identifier[obj] ):
literal[string]
identifier[self] . identifier[header] ( literal[string] , literal[string] )
identifier[self] . identifier[outputdata] ( identifier[json] . identifier[dumps] ( identifier[obj] ). identifier[encode] ( literal[string] ))
|
def outputjson(self, obj):
"""
Serialize `obj` with JSON and output to the client
"""
self.header('Content-Type', 'application/json')
self.outputdata(json.dumps(obj).encode('ascii'))
|
def elevate_element(node, adopt_name=None, adopt_attrs=None):
"""
This method serves a specialized function. It comes up most often when
working with block level elements that may not be contained within
paragraph elements, which are presented in the source document as
inline elements (inside a paragraph element).
It would be inappropriate to merely insert the block element at the
level of the parent, since this disorders the document by placing
the child out of place with its siblings. So this method will elevate
the node to the parent level and also create a new parent to adopt all
of the siblings after the elevated child.
The adopting parent node will have identical attributes and tag name
as the original parent unless specified otherwise.
"""
#These must be collected before modifying the xml
parent = node.getparent()
grandparent = parent.getparent()
child_index = parent.index(node)
parent_index = grandparent.index(parent)
#Get a list of the siblings
siblings = list(parent)[child_index+1:]
#Insert the node after the parent
grandparent.insert(parent_index+1, node)
#Only create the adoptive parent if there are siblings
if len(siblings) > 0 or node.tail is not None:
#Create the adoptive parent
if adopt_name is None:
adopt = etree.Element(parent.tag)
else:
adopt = etree.Element(adopt_name)
if adopt_attrs is None:
for key in parent.attrib.keys():
adopt.attrib[key] = parent.attrib[key]
else:
for key in adopt_attrs.keys():
adopt.attrib[key] = adopt_attrs[key]
#Insert the adoptive parent after the elevated child
grandparent.insert(grandparent.index(node)+1, adopt)
#Transfer the siblings to the adoptive parent
for sibling in siblings:
adopt.append(sibling)
#lxml's element.tail attribute presents a slight problem, requiring the
#following oddity
#Set the adoptive parent's text to the node.tail
if node.tail is not None:
adopt.text = node.tail
node.tail = None
|
def function[elevate_element, parameter[node, adopt_name, adopt_attrs]]:
constant[
This method serves a specialized function. It comes up most often when
working with block level elements that may not be contained within
paragraph elements, which are presented in the source document as
inline elements (inside a paragraph element).
It would be inappropriate to merely insert the block element at the
level of the parent, since this disorders the document by placing
the child out of place with its siblings. So this method will elevate
the node to the parent level and also create a new parent to adopt all
of the siblings after the elevated child.
The adopting parent node will have identical attributes and tag name
as the original parent unless specified otherwise.
]
variable[parent] assign[=] call[name[node].getparent, parameter[]]
variable[grandparent] assign[=] call[name[parent].getparent, parameter[]]
variable[child_index] assign[=] call[name[parent].index, parameter[name[node]]]
variable[parent_index] assign[=] call[name[grandparent].index, parameter[name[parent]]]
variable[siblings] assign[=] call[call[name[list], parameter[name[parent]]]][<ast.Slice object at 0x7da2054a7bb0>]
call[name[grandparent].insert, parameter[binary_operation[name[parent_index] + constant[1]], name[node]]]
if <ast.BoolOp object at 0x7da2054a5b10> begin[:]
if compare[name[adopt_name] is constant[None]] begin[:]
variable[adopt] assign[=] call[name[etree].Element, parameter[name[parent].tag]]
if compare[name[adopt_attrs] is constant[None]] begin[:]
for taget[name[key]] in starred[call[name[parent].attrib.keys, parameter[]]] begin[:]
call[name[adopt].attrib][name[key]] assign[=] call[name[parent].attrib][name[key]]
call[name[grandparent].insert, parameter[binary_operation[call[name[grandparent].index, parameter[name[node]]] + constant[1]], name[adopt]]]
for taget[name[sibling]] in starred[name[siblings]] begin[:]
call[name[adopt].append, parameter[name[sibling]]]
if compare[name[node].tail is_not constant[None]] begin[:]
name[adopt].text assign[=] name[node].tail
name[node].tail assign[=] constant[None]
|
keyword[def] identifier[elevate_element] ( identifier[node] , identifier[adopt_name] = keyword[None] , identifier[adopt_attrs] = keyword[None] ):
literal[string]
identifier[parent] = identifier[node] . identifier[getparent] ()
identifier[grandparent] = identifier[parent] . identifier[getparent] ()
identifier[child_index] = identifier[parent] . identifier[index] ( identifier[node] )
identifier[parent_index] = identifier[grandparent] . identifier[index] ( identifier[parent] )
identifier[siblings] = identifier[list] ( identifier[parent] )[ identifier[child_index] + literal[int] :]
identifier[grandparent] . identifier[insert] ( identifier[parent_index] + literal[int] , identifier[node] )
keyword[if] identifier[len] ( identifier[siblings] )> literal[int] keyword[or] identifier[node] . identifier[tail] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[adopt_name] keyword[is] keyword[None] :
identifier[adopt] = identifier[etree] . identifier[Element] ( identifier[parent] . identifier[tag] )
keyword[else] :
identifier[adopt] = identifier[etree] . identifier[Element] ( identifier[adopt_name] )
keyword[if] identifier[adopt_attrs] keyword[is] keyword[None] :
keyword[for] identifier[key] keyword[in] identifier[parent] . identifier[attrib] . identifier[keys] ():
identifier[adopt] . identifier[attrib] [ identifier[key] ]= identifier[parent] . identifier[attrib] [ identifier[key] ]
keyword[else] :
keyword[for] identifier[key] keyword[in] identifier[adopt_attrs] . identifier[keys] ():
identifier[adopt] . identifier[attrib] [ identifier[key] ]= identifier[adopt_attrs] [ identifier[key] ]
identifier[grandparent] . identifier[insert] ( identifier[grandparent] . identifier[index] ( identifier[node] )+ literal[int] , identifier[adopt] )
keyword[for] identifier[sibling] keyword[in] identifier[siblings] :
identifier[adopt] . identifier[append] ( identifier[sibling] )
keyword[if] identifier[node] . identifier[tail] keyword[is] keyword[not] keyword[None] :
identifier[adopt] . identifier[text] = identifier[node] . identifier[tail]
identifier[node] . identifier[tail] = keyword[None]
|
def elevate_element(node, adopt_name=None, adopt_attrs=None):
"""
This method serves a specialized function. It comes up most often when
working with block level elements that may not be contained within
paragraph elements, which are presented in the source document as
inline elements (inside a paragraph element).
It would be inappropriate to merely insert the block element at the
level of the parent, since this disorders the document by placing
the child out of place with its siblings. So this method will elevate
the node to the parent level and also create a new parent to adopt all
of the siblings after the elevated child.
The adopting parent node will have identical attributes and tag name
as the original parent unless specified otherwise.
"""
#These must be collected before modifying the xml
parent = node.getparent()
grandparent = parent.getparent()
child_index = parent.index(node)
parent_index = grandparent.index(parent)
#Get a list of the siblings
siblings = list(parent)[child_index + 1:]
#Insert the node after the parent
grandparent.insert(parent_index + 1, node)
#Only create the adoptive parent if there are siblings
if len(siblings) > 0 or node.tail is not None:
#Create the adoptive parent
if adopt_name is None:
adopt = etree.Element(parent.tag) # depends on [control=['if'], data=[]]
else:
adopt = etree.Element(adopt_name)
if adopt_attrs is None:
for key in parent.attrib.keys():
adopt.attrib[key] = parent.attrib[key] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
else:
for key in adopt_attrs.keys():
adopt.attrib[key] = adopt_attrs[key] # depends on [control=['for'], data=['key']]
#Insert the adoptive parent after the elevated child
grandparent.insert(grandparent.index(node) + 1, adopt) # depends on [control=['if'], data=[]]
#Transfer the siblings to the adoptive parent
for sibling in siblings:
adopt.append(sibling) # depends on [control=['for'], data=['sibling']]
#lxml's element.tail attribute presents a slight problem, requiring the
#following oddity
#Set the adoptive parent's text to the node.tail
if node.tail is not None:
adopt.text = node.tail
node.tail = None # depends on [control=['if'], data=[]]
|
def _find_matching_collections_externally(collections, record):
"""Find matching collections with percolator engine.
:param collections: set of collections where search
:param record: record to match
"""
index, doc_type = RecordIndexer().record_to_index(record)
body = {"doc": record.dumps()}
results = current_search_client.percolate(
index=index,
doc_type=doc_type,
allow_no_indices=True,
ignore_unavailable=True,
body=body
)
prefix_len = len('collection-')
for match in results['matches']:
collection_name = match['_id']
if collection_name.startswith('collection-'):
name = collection_name[prefix_len:]
if name in collections:
yield collections[name]['ancestors']
raise StopIteration
|
def function[_find_matching_collections_externally, parameter[collections, record]]:
constant[Find matching collections with percolator engine.
:param collections: set of collections where search
:param record: record to match
]
<ast.Tuple object at 0x7da1b0a71f60> assign[=] call[call[name[RecordIndexer], parameter[]].record_to_index, parameter[name[record]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b0a72dd0>], [<ast.Call object at 0x7da1b0a72b00>]]
variable[results] assign[=] call[name[current_search_client].percolate, parameter[]]
variable[prefix_len] assign[=] call[name[len], parameter[constant[collection-]]]
for taget[name[match]] in starred[call[name[results]][constant[matches]]] begin[:]
variable[collection_name] assign[=] call[name[match]][constant[_id]]
if call[name[collection_name].startswith, parameter[constant[collection-]]] begin[:]
variable[name] assign[=] call[name[collection_name]][<ast.Slice object at 0x7da1b0af8700>]
if compare[name[name] in name[collections]] begin[:]
<ast.Yield object at 0x7da1b0a70100>
<ast.Raise object at 0x7da1b0a73dc0>
|
keyword[def] identifier[_find_matching_collections_externally] ( identifier[collections] , identifier[record] ):
literal[string]
identifier[index] , identifier[doc_type] = identifier[RecordIndexer] (). identifier[record_to_index] ( identifier[record] )
identifier[body] ={ literal[string] : identifier[record] . identifier[dumps] ()}
identifier[results] = identifier[current_search_client] . identifier[percolate] (
identifier[index] = identifier[index] ,
identifier[doc_type] = identifier[doc_type] ,
identifier[allow_no_indices] = keyword[True] ,
identifier[ignore_unavailable] = keyword[True] ,
identifier[body] = identifier[body]
)
identifier[prefix_len] = identifier[len] ( literal[string] )
keyword[for] identifier[match] keyword[in] identifier[results] [ literal[string] ]:
identifier[collection_name] = identifier[match] [ literal[string] ]
keyword[if] identifier[collection_name] . identifier[startswith] ( literal[string] ):
identifier[name] = identifier[collection_name] [ identifier[prefix_len] :]
keyword[if] identifier[name] keyword[in] identifier[collections] :
keyword[yield] identifier[collections] [ identifier[name] ][ literal[string] ]
keyword[raise] identifier[StopIteration]
|
def _find_matching_collections_externally(collections, record):
"""Find matching collections with percolator engine.
:param collections: set of collections where search
:param record: record to match
"""
(index, doc_type) = RecordIndexer().record_to_index(record)
body = {'doc': record.dumps()}
results = current_search_client.percolate(index=index, doc_type=doc_type, allow_no_indices=True, ignore_unavailable=True, body=body)
prefix_len = len('collection-')
for match in results['matches']:
collection_name = match['_id']
if collection_name.startswith('collection-'):
name = collection_name[prefix_len:]
if name in collections:
yield collections[name]['ancestors'] # depends on [control=['if'], data=['name', 'collections']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['match']]
raise StopIteration
|
def _format_name(self, name, surname, snake_case=False):
"""Format a first name and a surname into a cohesive string.
Note that either name or surname can be empty strings, and
formatting will still succeed.
:param str name: A first name.
:param str surname: A surname.
:param bool snake_case: If True, format the name as
"snake_case", also stripping diacritics if any. (default:
False)
:return str: The formatted name.
"""
if not name or not surname:
sep = ''
elif snake_case:
sep = '_'
else:
sep = ' '
if snake_case:
name = self._snakify_name(name)
surname = self._snakify_name(surname)
disp_name = '{}{}{}'.format(name, sep, surname)
return disp_name
|
def function[_format_name, parameter[self, name, surname, snake_case]]:
constant[Format a first name and a surname into a cohesive string.
Note that either name or surname can be empty strings, and
formatting will still succeed.
:param str name: A first name.
:param str surname: A surname.
:param bool snake_case: If True, format the name as
"snake_case", also stripping diacritics if any. (default:
False)
:return str: The formatted name.
]
if <ast.BoolOp object at 0x7da1b15f4b80> begin[:]
variable[sep] assign[=] constant[]
if name[snake_case] begin[:]
variable[name] assign[=] call[name[self]._snakify_name, parameter[name[name]]]
variable[surname] assign[=] call[name[self]._snakify_name, parameter[name[surname]]]
variable[disp_name] assign[=] call[constant[{}{}{}].format, parameter[name[name], name[sep], name[surname]]]
return[name[disp_name]]
|
keyword[def] identifier[_format_name] ( identifier[self] , identifier[name] , identifier[surname] , identifier[snake_case] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[name] keyword[or] keyword[not] identifier[surname] :
identifier[sep] = literal[string]
keyword[elif] identifier[snake_case] :
identifier[sep] = literal[string]
keyword[else] :
identifier[sep] = literal[string]
keyword[if] identifier[snake_case] :
identifier[name] = identifier[self] . identifier[_snakify_name] ( identifier[name] )
identifier[surname] = identifier[self] . identifier[_snakify_name] ( identifier[surname] )
identifier[disp_name] = literal[string] . identifier[format] ( identifier[name] , identifier[sep] , identifier[surname] )
keyword[return] identifier[disp_name]
|
def _format_name(self, name, surname, snake_case=False):
"""Format a first name and a surname into a cohesive string.
Note that either name or surname can be empty strings, and
formatting will still succeed.
:param str name: A first name.
:param str surname: A surname.
:param bool snake_case: If True, format the name as
"snake_case", also stripping diacritics if any. (default:
False)
:return str: The formatted name.
"""
if not name or not surname:
sep = '' # depends on [control=['if'], data=[]]
elif snake_case:
sep = '_' # depends on [control=['if'], data=[]]
else:
sep = ' '
if snake_case:
name = self._snakify_name(name)
surname = self._snakify_name(surname) # depends on [control=['if'], data=[]]
disp_name = '{}{}{}'.format(name, sep, surname)
return disp_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.