code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def has_unchecked_field(self, locator, **kwargs):
"""
Checks if the page or current node has a radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
kwargs["checked"] = False
return self.has_selector("field", locator, **kwargs)
|
def function[has_unchecked_field, parameter[self, locator]]:
constant[
Checks if the page or current node has a radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
]
call[name[kwargs]][constant[checked]] assign[=] constant[False]
return[call[name[self].has_selector, parameter[constant[field], name[locator]]]]
|
keyword[def] identifier[has_unchecked_field] ( identifier[self] , identifier[locator] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[False]
keyword[return] identifier[self] . identifier[has_selector] ( literal[string] , identifier[locator] ,** identifier[kwargs] )
|
def has_unchecked_field(self, locator, **kwargs):
"""
Checks if the page or current node has a radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
kwargs['checked'] = False
return self.has_selector('field', locator, **kwargs)
|
def build(self, autobuilding=False):
"""Build wheels.
:param unpack: If True, replace the sdist we built from the with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
"""
assert self._wheel_dir or (autobuilding and self._cache_root)
# unpack sdists and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = []
for req in reqset:
if req.is_wheel:
if not autobuilding:
logger.info(
'Skipping %s, due to already being wheel.', req.name)
elif req.editable:
if not autobuilding:
logger.info(
'Skipping bdist_wheel for %s, due to being editable',
req.name)
elif autobuilding and req.link and not req.link.is_artifact:
pass
elif autobuilding and not req.source_dir:
pass
else:
if autobuilding:
link = req.link
base, ext = link.splitext()
if pip.index.egg_info_matches(base, None, link) is None:
# Doesn't look like a package - don't autobuild a wheel
# because we'll have no way to lookup the result sanely
continue
if "binary" not in pip.index.fmt_ctl_formats(
self.finder.format_control,
pkg_resources.safe_name(req.name).lower()):
logger.info(
"Skipping bdist_wheel for %s, due to binaries "
"being disabled for it.", req.name)
continue
buildset.append(req)
if not buildset:
return True
# Build the wheels.
logger.info(
'Building wheels for collected packages: %s',
', '.join([req.name for req in buildset]),
)
with indent_log():
build_success, build_failure = [], []
for req in buildset:
if autobuilding:
output_dir = _cache_for_link(self._cache_root, req.link)
ensure_dir(output_dir)
else:
output_dir = self._wheel_dir
wheel_file = self._build_one(req, output_dir)
if wheel_file:
build_success.append(req)
if autobuilding:
# XXX: This is mildly duplicative with prepare_files,
# but not close enough to pull out to a single common
# method.
# The code below assumes temporary source dirs -
# prevent it doing bad things.
if req.source_dir and not os.path.exists(os.path.join(
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
raise AssertionError(
"bad source dir - missing marker")
# Delete the source we built the wheel from
req.remove_temporary_source()
# set the build directory again - name is known from
# the work prepare_files did.
req.source_dir = req.build_location(
self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(
path_to_url(wheel_file), trusted=True)
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(
req.link, req.source_dir, None, False,
session=self.requirement_set.session)
else:
build_failure.append(req)
# notify success/failure
if build_success:
logger.info(
'Successfully built %s',
' '.join([req.name for req in build_success]),
)
if build_failure:
logger.info(
'Failed to build %s',
' '.join([req.name for req in build_failure]),
)
# Return True if all builds were successful
return len(build_failure) == 0
|
def function[build, parameter[self, autobuilding]]:
constant[Build wheels.
:param unpack: If True, replace the sdist we built from the with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
]
assert[<ast.BoolOp object at 0x7da18bc70220>]
call[name[self].requirement_set.prepare_files, parameter[name[self].finder]]
variable[reqset] assign[=] call[name[self].requirement_set.requirements.values, parameter[]]
variable[buildset] assign[=] list[[]]
for taget[name[req]] in starred[name[reqset]] begin[:]
if name[req].is_wheel begin[:]
if <ast.UnaryOp object at 0x7da18bc70610> begin[:]
call[name[logger].info, parameter[constant[Skipping %s, due to already being wheel.], name[req].name]]
if <ast.UnaryOp object at 0x7da18bc700d0> begin[:]
return[constant[True]]
call[name[logger].info, parameter[constant[Building wheels for collected packages: %s], call[constant[, ].join, parameter[<ast.ListComp object at 0x7da18bc73b20>]]]]
with call[name[indent_log], parameter[]] begin[:]
<ast.Tuple object at 0x7da1b2346710> assign[=] tuple[[<ast.List object at 0x7da1b2345420>, <ast.List object at 0x7da1b2347490>]]
for taget[name[req]] in starred[name[buildset]] begin[:]
if name[autobuilding] begin[:]
variable[output_dir] assign[=] call[name[_cache_for_link], parameter[name[self]._cache_root, name[req].link]]
call[name[ensure_dir], parameter[name[output_dir]]]
variable[wheel_file] assign[=] call[name[self]._build_one, parameter[name[req], name[output_dir]]]
if name[wheel_file] begin[:]
call[name[build_success].append, parameter[name[req]]]
if name[autobuilding] begin[:]
if <ast.BoolOp object at 0x7da18bc71570> begin[:]
<ast.Raise object at 0x7da18bc73340>
call[name[req].remove_temporary_source, parameter[]]
name[req].source_dir assign[=] call[name[req].build_location, parameter[name[self].requirement_set.build_dir]]
name[req].link assign[=] call[name[pip].index.Link, parameter[call[name[path_to_url], parameter[name[wheel_file]]]]]
assert[name[req].link.is_wheel]
call[name[unpack_url], parameter[name[req].link, name[req].source_dir, constant[None], constant[False]]]
if name[build_success] begin[:]
call[name[logger].info, parameter[constant[Successfully built %s], call[constant[ ].join, parameter[<ast.ListComp object at 0x7da20e9621d0>]]]]
if name[build_failure] begin[:]
call[name[logger].info, parameter[constant[Failed to build %s], call[constant[ ].join, parameter[<ast.ListComp object at 0x7da20e961630>]]]]
return[compare[call[name[len], parameter[name[build_failure]]] equal[==] constant[0]]]
|
keyword[def] identifier[build] ( identifier[self] , identifier[autobuilding] = keyword[False] ):
literal[string]
keyword[assert] identifier[self] . identifier[_wheel_dir] keyword[or] ( identifier[autobuilding] keyword[and] identifier[self] . identifier[_cache_root] )
identifier[self] . identifier[requirement_set] . identifier[prepare_files] ( identifier[self] . identifier[finder] )
identifier[reqset] = identifier[self] . identifier[requirement_set] . identifier[requirements] . identifier[values] ()
identifier[buildset] =[]
keyword[for] identifier[req] keyword[in] identifier[reqset] :
keyword[if] identifier[req] . identifier[is_wheel] :
keyword[if] keyword[not] identifier[autobuilding] :
identifier[logger] . identifier[info] (
literal[string] , identifier[req] . identifier[name] )
keyword[elif] identifier[req] . identifier[editable] :
keyword[if] keyword[not] identifier[autobuilding] :
identifier[logger] . identifier[info] (
literal[string] ,
identifier[req] . identifier[name] )
keyword[elif] identifier[autobuilding] keyword[and] identifier[req] . identifier[link] keyword[and] keyword[not] identifier[req] . identifier[link] . identifier[is_artifact] :
keyword[pass]
keyword[elif] identifier[autobuilding] keyword[and] keyword[not] identifier[req] . identifier[source_dir] :
keyword[pass]
keyword[else] :
keyword[if] identifier[autobuilding] :
identifier[link] = identifier[req] . identifier[link]
identifier[base] , identifier[ext] = identifier[link] . identifier[splitext] ()
keyword[if] identifier[pip] . identifier[index] . identifier[egg_info_matches] ( identifier[base] , keyword[None] , identifier[link] ) keyword[is] keyword[None] :
keyword[continue]
keyword[if] literal[string] keyword[not] keyword[in] identifier[pip] . identifier[index] . identifier[fmt_ctl_formats] (
identifier[self] . identifier[finder] . identifier[format_control] ,
identifier[pkg_resources] . identifier[safe_name] ( identifier[req] . identifier[name] ). identifier[lower] ()):
identifier[logger] . identifier[info] (
literal[string]
literal[string] , identifier[req] . identifier[name] )
keyword[continue]
identifier[buildset] . identifier[append] ( identifier[req] )
keyword[if] keyword[not] identifier[buildset] :
keyword[return] keyword[True]
identifier[logger] . identifier[info] (
literal[string] ,
literal[string] . identifier[join] ([ identifier[req] . identifier[name] keyword[for] identifier[req] keyword[in] identifier[buildset] ]),
)
keyword[with] identifier[indent_log] ():
identifier[build_success] , identifier[build_failure] =[],[]
keyword[for] identifier[req] keyword[in] identifier[buildset] :
keyword[if] identifier[autobuilding] :
identifier[output_dir] = identifier[_cache_for_link] ( identifier[self] . identifier[_cache_root] , identifier[req] . identifier[link] )
identifier[ensure_dir] ( identifier[output_dir] )
keyword[else] :
identifier[output_dir] = identifier[self] . identifier[_wheel_dir]
identifier[wheel_file] = identifier[self] . identifier[_build_one] ( identifier[req] , identifier[output_dir] )
keyword[if] identifier[wheel_file] :
identifier[build_success] . identifier[append] ( identifier[req] )
keyword[if] identifier[autobuilding] :
keyword[if] identifier[req] . identifier[source_dir] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] (
identifier[req] . identifier[source_dir] , identifier[PIP_DELETE_MARKER_FILENAME] )):
keyword[raise] identifier[AssertionError] (
literal[string] )
identifier[req] . identifier[remove_temporary_source] ()
identifier[req] . identifier[source_dir] = identifier[req] . identifier[build_location] (
identifier[self] . identifier[requirement_set] . identifier[build_dir] )
identifier[req] . identifier[link] = identifier[pip] . identifier[index] . identifier[Link] (
identifier[path_to_url] ( identifier[wheel_file] ), identifier[trusted] = keyword[True] )
keyword[assert] identifier[req] . identifier[link] . identifier[is_wheel]
identifier[unpack_url] (
identifier[req] . identifier[link] , identifier[req] . identifier[source_dir] , keyword[None] , keyword[False] ,
identifier[session] = identifier[self] . identifier[requirement_set] . identifier[session] )
keyword[else] :
identifier[build_failure] . identifier[append] ( identifier[req] )
keyword[if] identifier[build_success] :
identifier[logger] . identifier[info] (
literal[string] ,
literal[string] . identifier[join] ([ identifier[req] . identifier[name] keyword[for] identifier[req] keyword[in] identifier[build_success] ]),
)
keyword[if] identifier[build_failure] :
identifier[logger] . identifier[info] (
literal[string] ,
literal[string] . identifier[join] ([ identifier[req] . identifier[name] keyword[for] identifier[req] keyword[in] identifier[build_failure] ]),
)
keyword[return] identifier[len] ( identifier[build_failure] )== literal[int]
|
def build(self, autobuilding=False):
"""Build wheels.
:param unpack: If True, replace the sdist we built from the with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
"""
assert self._wheel_dir or (autobuilding and self._cache_root)
# unpack sdists and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = []
for req in reqset:
if req.is_wheel:
if not autobuilding:
logger.info('Skipping %s, due to already being wheel.', req.name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif req.editable:
if not autobuilding:
logger.info('Skipping bdist_wheel for %s, due to being editable', req.name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif autobuilding and req.link and (not req.link.is_artifact):
pass # depends on [control=['if'], data=[]]
elif autobuilding and (not req.source_dir):
pass # depends on [control=['if'], data=[]]
else:
if autobuilding:
link = req.link
(base, ext) = link.splitext()
if pip.index.egg_info_matches(base, None, link) is None:
# Doesn't look like a package - don't autobuild a wheel
# because we'll have no way to lookup the result sanely
continue # depends on [control=['if'], data=[]]
if 'binary' not in pip.index.fmt_ctl_formats(self.finder.format_control, pkg_resources.safe_name(req.name).lower()):
logger.info('Skipping bdist_wheel for %s, due to binaries being disabled for it.', req.name)
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
buildset.append(req) # depends on [control=['for'], data=['req']]
if not buildset:
return True # depends on [control=['if'], data=[]]
# Build the wheels.
logger.info('Building wheels for collected packages: %s', ', '.join([req.name for req in buildset]))
with indent_log():
(build_success, build_failure) = ([], [])
for req in buildset:
if autobuilding:
output_dir = _cache_for_link(self._cache_root, req.link)
ensure_dir(output_dir) # depends on [control=['if'], data=[]]
else:
output_dir = self._wheel_dir
wheel_file = self._build_one(req, output_dir)
if wheel_file:
build_success.append(req)
if autobuilding:
# XXX: This is mildly duplicative with prepare_files,
# but not close enough to pull out to a single common
# method.
# The code below assumes temporary source dirs -
# prevent it doing bad things.
if req.source_dir and (not os.path.exists(os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME))):
raise AssertionError('bad source dir - missing marker') # depends on [control=['if'], data=[]]
# Delete the source we built the wheel from
req.remove_temporary_source()
# set the build directory again - name is known from
# the work prepare_files did.
req.source_dir = req.build_location(self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(path_to_url(wheel_file), trusted=True)
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(req.link, req.source_dir, None, False, session=self.requirement_set.session) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
build_failure.append(req) # depends on [control=['for'], data=['req']] # depends on [control=['with'], data=[]]
# notify success/failure
if build_success:
logger.info('Successfully built %s', ' '.join([req.name for req in build_success])) # depends on [control=['if'], data=[]]
if build_failure:
logger.info('Failed to build %s', ' '.join([req.name for req in build_failure])) # depends on [control=['if'], data=[]]
# Return True if all builds were successful
return len(build_failure) == 0
|
def public_notes(self, key, value):
"""Populate the ``public_notes`` key.
Also populates the ``curated`` and ``thesis_info`` keys through side effects.
"""
def _means_not_curated(public_note):
return public_note in [
'*Brief entry*',
'* Brief entry *',
'*Temporary entry*',
'* Temporary entry *',
'*Temporary record*',
'* Temporary record *',
]
public_notes = self.get('public_notes', [])
thesis_info = self.get('thesis_info', {})
source = force_single_element(value.get('9', ''))
for value in force_list(value):
for public_note in force_list(value.get('a')):
match = IS_DEFENSE_DATE.match(public_note)
if match:
try:
thesis_info['defense_date'] = normalize_date(match.group('defense_date'))
except ValueError:
public_notes.append({
'source': source,
'value': public_note,
})
elif _means_not_curated(public_note):
self['curated'] = False
else:
public_notes.append({
'source': source,
'value': public_note,
})
self['thesis_info'] = thesis_info
return public_notes
|
def function[public_notes, parameter[self, key, value]]:
constant[Populate the ``public_notes`` key.
Also populates the ``curated`` and ``thesis_info`` keys through side effects.
]
def function[_means_not_curated, parameter[public_note]]:
return[compare[name[public_note] in list[[<ast.Constant object at 0x7da18dc986d0>, <ast.Constant object at 0x7da18dc9ab90>, <ast.Constant object at 0x7da18dc9b0a0>, <ast.Constant object at 0x7da18dc98e50>, <ast.Constant object at 0x7da18dc9abf0>, <ast.Constant object at 0x7da18dc9a0e0>]]]]
variable[public_notes] assign[=] call[name[self].get, parameter[constant[public_notes], list[[]]]]
variable[thesis_info] assign[=] call[name[self].get, parameter[constant[thesis_info], dictionary[[], []]]]
variable[source] assign[=] call[name[force_single_element], parameter[call[name[value].get, parameter[constant[9], constant[]]]]]
for taget[name[value]] in starred[call[name[force_list], parameter[name[value]]]] begin[:]
for taget[name[public_note]] in starred[call[name[force_list], parameter[call[name[value].get, parameter[constant[a]]]]]] begin[:]
variable[match] assign[=] call[name[IS_DEFENSE_DATE].match, parameter[name[public_note]]]
if name[match] begin[:]
<ast.Try object at 0x7da18dc9a0b0>
call[name[self]][constant[thesis_info]] assign[=] name[thesis_info]
return[name[public_notes]]
|
keyword[def] identifier[public_notes] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[def] identifier[_means_not_curated] ( identifier[public_note] ):
keyword[return] identifier[public_note] keyword[in] [
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
identifier[public_notes] = identifier[self] . identifier[get] ( literal[string] ,[])
identifier[thesis_info] = identifier[self] . identifier[get] ( literal[string] ,{})
identifier[source] = identifier[force_single_element] ( identifier[value] . identifier[get] ( literal[string] , literal[string] ))
keyword[for] identifier[value] keyword[in] identifier[force_list] ( identifier[value] ):
keyword[for] identifier[public_note] keyword[in] identifier[force_list] ( identifier[value] . identifier[get] ( literal[string] )):
identifier[match] = identifier[IS_DEFENSE_DATE] . identifier[match] ( identifier[public_note] )
keyword[if] identifier[match] :
keyword[try] :
identifier[thesis_info] [ literal[string] ]= identifier[normalize_date] ( identifier[match] . identifier[group] ( literal[string] ))
keyword[except] identifier[ValueError] :
identifier[public_notes] . identifier[append] ({
literal[string] : identifier[source] ,
literal[string] : identifier[public_note] ,
})
keyword[elif] identifier[_means_not_curated] ( identifier[public_note] ):
identifier[self] [ literal[string] ]= keyword[False]
keyword[else] :
identifier[public_notes] . identifier[append] ({
literal[string] : identifier[source] ,
literal[string] : identifier[public_note] ,
})
identifier[self] [ literal[string] ]= identifier[thesis_info]
keyword[return] identifier[public_notes]
|
def public_notes(self, key, value):
"""Populate the ``public_notes`` key.
Also populates the ``curated`` and ``thesis_info`` keys through side effects.
"""
def _means_not_curated(public_note):
return public_note in ['*Brief entry*', '* Brief entry *', '*Temporary entry*', '* Temporary entry *', '*Temporary record*', '* Temporary record *']
public_notes = self.get('public_notes', [])
thesis_info = self.get('thesis_info', {})
source = force_single_element(value.get('9', ''))
for value in force_list(value):
for public_note in force_list(value.get('a')):
match = IS_DEFENSE_DATE.match(public_note)
if match:
try:
thesis_info['defense_date'] = normalize_date(match.group('defense_date')) # depends on [control=['try'], data=[]]
except ValueError:
public_notes.append({'source': source, 'value': public_note}) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif _means_not_curated(public_note):
self['curated'] = False # depends on [control=['if'], data=[]]
else:
public_notes.append({'source': source, 'value': public_note}) # depends on [control=['for'], data=['public_note']] # depends on [control=['for'], data=['value']]
self['thesis_info'] = thesis_info
return public_notes
|
def _parametersToDefaults(self, parameters):
"""
Extract the defaults from C{parameters}, constructing a dictionary
mapping parameter names to default values, suitable for passing to
L{ListChangeParameter}.
@type parameters: C{list} of L{liveform.Parameter} or
L{liveform.ChoiceParameter}.
@rtype: C{dict}
"""
defaults = {}
for p in parameters:
if isinstance(p, liveform.ChoiceParameter):
selected = []
for choice in p.choices:
if choice.selected:
selected.append(choice.value)
defaults[p.name] = selected
else:
defaults[p.name] = p.default
return defaults
|
def function[_parametersToDefaults, parameter[self, parameters]]:
constant[
Extract the defaults from C{parameters}, constructing a dictionary
mapping parameter names to default values, suitable for passing to
L{ListChangeParameter}.
@type parameters: C{list} of L{liveform.Parameter} or
L{liveform.ChoiceParameter}.
@rtype: C{dict}
]
variable[defaults] assign[=] dictionary[[], []]
for taget[name[p]] in starred[name[parameters]] begin[:]
if call[name[isinstance], parameter[name[p], name[liveform].ChoiceParameter]] begin[:]
variable[selected] assign[=] list[[]]
for taget[name[choice]] in starred[name[p].choices] begin[:]
if name[choice].selected begin[:]
call[name[selected].append, parameter[name[choice].value]]
call[name[defaults]][name[p].name] assign[=] name[selected]
return[name[defaults]]
|
keyword[def] identifier[_parametersToDefaults] ( identifier[self] , identifier[parameters] ):
literal[string]
identifier[defaults] ={}
keyword[for] identifier[p] keyword[in] identifier[parameters] :
keyword[if] identifier[isinstance] ( identifier[p] , identifier[liveform] . identifier[ChoiceParameter] ):
identifier[selected] =[]
keyword[for] identifier[choice] keyword[in] identifier[p] . identifier[choices] :
keyword[if] identifier[choice] . identifier[selected] :
identifier[selected] . identifier[append] ( identifier[choice] . identifier[value] )
identifier[defaults] [ identifier[p] . identifier[name] ]= identifier[selected]
keyword[else] :
identifier[defaults] [ identifier[p] . identifier[name] ]= identifier[p] . identifier[default]
keyword[return] identifier[defaults]
|
def _parametersToDefaults(self, parameters):
"""
Extract the defaults from C{parameters}, constructing a dictionary
mapping parameter names to default values, suitable for passing to
L{ListChangeParameter}.
@type parameters: C{list} of L{liveform.Parameter} or
L{liveform.ChoiceParameter}.
@rtype: C{dict}
"""
defaults = {}
for p in parameters:
if isinstance(p, liveform.ChoiceParameter):
selected = []
for choice in p.choices:
if choice.selected:
selected.append(choice.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['choice']]
defaults[p.name] = selected # depends on [control=['if'], data=[]]
else:
defaults[p.name] = p.default # depends on [control=['for'], data=['p']]
return defaults
|
def _gripper_visualization(self):
"""
Do any needed visualization here. Overrides superclass implementations.
"""
# color the gripper site appropriately based on distance to nearest object
if self.gripper_visualization:
# find closest object
square_dist = lambda x: np.sum(
np.square(x - self.sim.data.get_site_xpos("grip_site"))
)
dists = np.array(list(map(square_dist, self.sim.data.site_xpos)))
dists[self.eef_site_id] = np.inf # make sure we don't pick the same site
dists[self.eef_cylinder_id] = np.inf
ob_dists = dists[
self.object_site_ids
] # filter out object sites we care about
min_dist = np.min(ob_dists)
ob_id = np.argmin(ob_dists)
ob_name = self.object_names[ob_id]
# set RGBA for the EEF site here
max_dist = 0.1
scaled = (1.0 - min(min_dist / max_dist, 1.)) ** 15
rgba = np.zeros(4)
rgba[0] = 1 - scaled
rgba[1] = scaled
rgba[3] = 0.5
self.sim.model.site_rgba[self.eef_site_id] = rgba
|
def function[_gripper_visualization, parameter[self]]:
constant[
Do any needed visualization here. Overrides superclass implementations.
]
if name[self].gripper_visualization begin[:]
variable[square_dist] assign[=] <ast.Lambda object at 0x7da207f99060>
variable[dists] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[map], parameter[name[square_dist], name[self].sim.data.site_xpos]]]]]]
call[name[dists]][name[self].eef_site_id] assign[=] name[np].inf
call[name[dists]][name[self].eef_cylinder_id] assign[=] name[np].inf
variable[ob_dists] assign[=] call[name[dists]][name[self].object_site_ids]
variable[min_dist] assign[=] call[name[np].min, parameter[name[ob_dists]]]
variable[ob_id] assign[=] call[name[np].argmin, parameter[name[ob_dists]]]
variable[ob_name] assign[=] call[name[self].object_names][name[ob_id]]
variable[max_dist] assign[=] constant[0.1]
variable[scaled] assign[=] binary_operation[binary_operation[constant[1.0] - call[name[min], parameter[binary_operation[name[min_dist] / name[max_dist]], constant[1.0]]]] ** constant[15]]
variable[rgba] assign[=] call[name[np].zeros, parameter[constant[4]]]
call[name[rgba]][constant[0]] assign[=] binary_operation[constant[1] - name[scaled]]
call[name[rgba]][constant[1]] assign[=] name[scaled]
call[name[rgba]][constant[3]] assign[=] constant[0.5]
call[name[self].sim.model.site_rgba][name[self].eef_site_id] assign[=] name[rgba]
|
keyword[def] identifier[_gripper_visualization] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[gripper_visualization] :
identifier[square_dist] = keyword[lambda] identifier[x] : identifier[np] . identifier[sum] (
identifier[np] . identifier[square] ( identifier[x] - identifier[self] . identifier[sim] . identifier[data] . identifier[get_site_xpos] ( literal[string] ))
)
identifier[dists] = identifier[np] . identifier[array] ( identifier[list] ( identifier[map] ( identifier[square_dist] , identifier[self] . identifier[sim] . identifier[data] . identifier[site_xpos] )))
identifier[dists] [ identifier[self] . identifier[eef_site_id] ]= identifier[np] . identifier[inf]
identifier[dists] [ identifier[self] . identifier[eef_cylinder_id] ]= identifier[np] . identifier[inf]
identifier[ob_dists] = identifier[dists] [
identifier[self] . identifier[object_site_ids]
]
identifier[min_dist] = identifier[np] . identifier[min] ( identifier[ob_dists] )
identifier[ob_id] = identifier[np] . identifier[argmin] ( identifier[ob_dists] )
identifier[ob_name] = identifier[self] . identifier[object_names] [ identifier[ob_id] ]
identifier[max_dist] = literal[int]
identifier[scaled] =( literal[int] - identifier[min] ( identifier[min_dist] / identifier[max_dist] , literal[int] ))** literal[int]
identifier[rgba] = identifier[np] . identifier[zeros] ( literal[int] )
identifier[rgba] [ literal[int] ]= literal[int] - identifier[scaled]
identifier[rgba] [ literal[int] ]= identifier[scaled]
identifier[rgba] [ literal[int] ]= literal[int]
identifier[self] . identifier[sim] . identifier[model] . identifier[site_rgba] [ identifier[self] . identifier[eef_site_id] ]= identifier[rgba]
|
def _gripper_visualization(self):
"""
Do any needed visualization here. Overrides superclass implementations.
"""
# color the gripper site appropriately based on distance to nearest object
if self.gripper_visualization:
# find closest object
square_dist = lambda x: np.sum(np.square(x - self.sim.data.get_site_xpos('grip_site')))
dists = np.array(list(map(square_dist, self.sim.data.site_xpos)))
dists[self.eef_site_id] = np.inf # make sure we don't pick the same site
dists[self.eef_cylinder_id] = np.inf
ob_dists = dists[self.object_site_ids] # filter out object sites we care about
min_dist = np.min(ob_dists)
ob_id = np.argmin(ob_dists)
ob_name = self.object_names[ob_id]
# set RGBA for the EEF site here
max_dist = 0.1
scaled = (1.0 - min(min_dist / max_dist, 1.0)) ** 15
rgba = np.zeros(4)
rgba[0] = 1 - scaled
rgba[1] = scaled
rgba[3] = 0.5
self.sim.model.site_rgba[self.eef_site_id] = rgba # depends on [control=['if'], data=[]]
|
def get_target_from_spec(self, spec, relative_to=''):
"""Converts `spec` into an address and returns the result of `get_target`
:API: public
"""
return self.get_target(Address.parse(spec, relative_to=relative_to))
|
def function[get_target_from_spec, parameter[self, spec, relative_to]]:
constant[Converts `spec` into an address and returns the result of `get_target`
:API: public
]
return[call[name[self].get_target, parameter[call[name[Address].parse, parameter[name[spec]]]]]]
|
keyword[def] identifier[get_target_from_spec] ( identifier[self] , identifier[spec] , identifier[relative_to] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[get_target] ( identifier[Address] . identifier[parse] ( identifier[spec] , identifier[relative_to] = identifier[relative_to] ))
|
def get_target_from_spec(self, spec, relative_to=''):
"""Converts `spec` into an address and returns the result of `get_target`
:API: public
"""
return self.get_target(Address.parse(spec, relative_to=relative_to))
|
def receive_message(self, msg):
"""
Responds to messages from other participants.
"""
if isinstance(msg, Resolution):
return
paxos = self.paxos_instance
while msg:
if isinstance(msg, Resolution):
self.print_if_verbose("{} resolved value {}".format(self.network_uid, msg.value))
break
else:
self.print_if_verbose("{} <- {} <- {}".format(self.network_uid, msg.__class__.__name__, msg.from_uid))
msg = paxos.receive(msg)
# Todo: Make it optional not to announce resolution (without which it's hard to see final value).
do_announce_resolution = True
if msg and (do_announce_resolution or not isinstance(msg, Resolution)):
self.announce(msg)
self.setattrs_from_paxos(paxos)
|
def function[receive_message, parameter[self, msg]]:
constant[
Responds to messages from other participants.
]
if call[name[isinstance], parameter[name[msg], name[Resolution]]] begin[:]
return[None]
variable[paxos] assign[=] name[self].paxos_instance
while name[msg] begin[:]
if call[name[isinstance], parameter[name[msg], name[Resolution]]] begin[:]
call[name[self].print_if_verbose, parameter[call[constant[{} resolved value {}].format, parameter[name[self].network_uid, name[msg].value]]]]
break
call[name[self].setattrs_from_paxos, parameter[name[paxos]]]
|
keyword[def] identifier[receive_message] ( identifier[self] , identifier[msg] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[msg] , identifier[Resolution] ):
keyword[return]
identifier[paxos] = identifier[self] . identifier[paxos_instance]
keyword[while] identifier[msg] :
keyword[if] identifier[isinstance] ( identifier[msg] , identifier[Resolution] ):
identifier[self] . identifier[print_if_verbose] ( literal[string] . identifier[format] ( identifier[self] . identifier[network_uid] , identifier[msg] . identifier[value] ))
keyword[break]
keyword[else] :
identifier[self] . identifier[print_if_verbose] ( literal[string] . identifier[format] ( identifier[self] . identifier[network_uid] , identifier[msg] . identifier[__class__] . identifier[__name__] , identifier[msg] . identifier[from_uid] ))
identifier[msg] = identifier[paxos] . identifier[receive] ( identifier[msg] )
identifier[do_announce_resolution] = keyword[True]
keyword[if] identifier[msg] keyword[and] ( identifier[do_announce_resolution] keyword[or] keyword[not] identifier[isinstance] ( identifier[msg] , identifier[Resolution] )):
identifier[self] . identifier[announce] ( identifier[msg] )
identifier[self] . identifier[setattrs_from_paxos] ( identifier[paxos] )
|
def receive_message(self, msg):
"""
Responds to messages from other participants.
"""
if isinstance(msg, Resolution):
return # depends on [control=['if'], data=[]]
paxos = self.paxos_instance
while msg:
if isinstance(msg, Resolution):
self.print_if_verbose('{} resolved value {}'.format(self.network_uid, msg.value))
break # depends on [control=['if'], data=[]]
else:
self.print_if_verbose('{} <- {} <- {}'.format(self.network_uid, msg.__class__.__name__, msg.from_uid))
msg = paxos.receive(msg)
# Todo: Make it optional not to announce resolution (without which it's hard to see final value).
do_announce_resolution = True
if msg and (do_announce_resolution or not isinstance(msg, Resolution)):
self.announce(msg) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
self.setattrs_from_paxos(paxos)
|
def colum_avg(self, state):
"""Toggle backgroundcolor"""
self.colum_avg_enabled = state > 0
if self.colum_avg_enabled:
self.return_max = lambda col_vals, index: col_vals[index]
else:
self.return_max = global_max
self.reset()
|
def function[colum_avg, parameter[self, state]]:
constant[Toggle backgroundcolor]
name[self].colum_avg_enabled assign[=] compare[name[state] greater[>] constant[0]]
if name[self].colum_avg_enabled begin[:]
name[self].return_max assign[=] <ast.Lambda object at 0x7da204344340>
call[name[self].reset, parameter[]]
|
keyword[def] identifier[colum_avg] ( identifier[self] , identifier[state] ):
literal[string]
identifier[self] . identifier[colum_avg_enabled] = identifier[state] > literal[int]
keyword[if] identifier[self] . identifier[colum_avg_enabled] :
identifier[self] . identifier[return_max] = keyword[lambda] identifier[col_vals] , identifier[index] : identifier[col_vals] [ identifier[index] ]
keyword[else] :
identifier[self] . identifier[return_max] = identifier[global_max]
identifier[self] . identifier[reset] ()
|
def colum_avg(self, state):
"""Toggle backgroundcolor"""
self.colum_avg_enabled = state > 0
if self.colum_avg_enabled:
self.return_max = lambda col_vals, index: col_vals[index] # depends on [control=['if'], data=[]]
else:
self.return_max = global_max
self.reset()
|
def get_request_setting(self, service_id, version_number, name):
"""Gets the specified Request Settings object."""
content = self._fetch("/service/%s/version/%d/request_settings/%s" % (service_id, version_number, name))
return FastlyRequestSetting(self, content)
|
def function[get_request_setting, parameter[self, service_id, version_number, name]]:
constant[Gets the specified Request Settings object.]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/request_settings/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f42a10>, <ast.Name object at 0x7da1b0f40d60>, <ast.Name object at 0x7da1b0f40700>]]]]]
return[call[name[FastlyRequestSetting], parameter[name[self], name[content]]]]
|
keyword[def] identifier[get_request_setting] ( identifier[self] , identifier[service_id] , identifier[version_number] , identifier[name] ):
literal[string]
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] , identifier[name] ))
keyword[return] identifier[FastlyRequestSetting] ( identifier[self] , identifier[content] )
|
def get_request_setting(self, service_id, version_number, name):
"""Gets the specified Request Settings object."""
content = self._fetch('/service/%s/version/%d/request_settings/%s' % (service_id, version_number, name))
return FastlyRequestSetting(self, content)
|
def serve_API(self, client, addr):
""" Read client request and call target SDK API"""
self.log_debug("(%s:%s) Handling new request from client." %
(addr[0], addr[1]))
results = None
try:
data = client.recv(4096)
data = bytes.decode(data)
# When client failed to send the data or quit before sending the
# data, server side would receive null data.
# In such case, server would not send back any info and just
# terminate this thread.
if not data:
self.log_warn("(%s:%s) Failed to receive data from client." %
(addr[0], addr[1]))
return
api_data = json.loads(data)
# API_data should be in the form [funcname, args_list, kwargs_dict]
if not isinstance(api_data, list) or len(api_data) != 3:
msg = ("(%s:%s) SDK server got wrong input: '%s' from client."
% (addr[0], addr[1], data))
results = self.construct_internal_error(msg)
return
# Check called API is supported by SDK
(func_name, api_args, api_kwargs) = api_data
self.log_debug("(%s:%s) Request func: %s, args: %s, kwargs: %s" %
(addr[0], addr[1], func_name, str(api_args),
str(api_kwargs)))
try:
api_func = getattr(self.sdkapi, func_name)
except AttributeError:
msg = ("(%s:%s) SDK server got wrong API name: %s from"
"client." % (addr[0], addr[1], func_name))
results = self.construct_api_name_error(msg)
return
# invoke target API function
return_data = api_func(*api_args, **api_kwargs)
except exception.SDKBaseException as e:
self.log_error("(%s:%s) %s" % (addr[0], addr[1],
traceback.format_exc()))
# get the error info from exception attribute
# All SDKbaseexception should eventually has a
# results attribute defined which can be used by
# sdkserver here
if e.results is None:
msg = ("(%s:%s) SDK server got exception without results "
"defined, error: %s" % (addr[0], addr[1],
e.format_message()))
results = self.construct_internal_error(msg)
else:
results = {'overallRC': e.results['overallRC'],
'modID': e.results['modID'],
'rc': e.results['rc'],
'rs': e.results['rs'],
'errmsg': e.format_message(),
'output': ''}
except Exception as e:
self.log_error("(%s:%s) %s" % (addr[0], addr[1],
traceback.format_exc()))
msg = ("(%s:%s) SDK server got unexpected exception: "
"%s" % (addr[0], addr[1], repr(e)))
results = self.construct_internal_error(msg)
else:
if return_data is None:
return_data = ''
results = {'overallRC': 0, 'modID': None,
'rc': 0, 'rs': 0,
'errmsg': '',
'output': return_data}
# Send back the final results
try:
if results is not None:
self.send_results(client, addr, results)
except Exception as e:
# This should not happen in normal case.
# A special case is the server side socket is closed/removed
# before the send() action.
self.log_error("(%s:%s) %s" % (addr[0], addr[1], repr(e)))
finally:
# Close the connection to make sure the thread socket got
# closed even when it got unexpected exceptions.
self.log_debug("(%s:%s) Finish handling request, closing "
"socket." % (addr[0], addr[1]))
client.close()
|
def function[serve_API, parameter[self, client, addr]]:
constant[ Read client request and call target SDK API]
call[name[self].log_debug, parameter[binary_operation[constant[(%s:%s) Handling new request from client.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20c993a00>, <ast.Subscript object at 0x7da20c990b20>]]]]]
variable[results] assign[=] constant[None]
<ast.Try object at 0x7da20c992b30>
<ast.Try object at 0x7da20e954a30>
|
keyword[def] identifier[serve_API] ( identifier[self] , identifier[client] , identifier[addr] ):
literal[string]
identifier[self] . identifier[log_debug] ( literal[string] %
( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ]))
identifier[results] = keyword[None]
keyword[try] :
identifier[data] = identifier[client] . identifier[recv] ( literal[int] )
identifier[data] = identifier[bytes] . identifier[decode] ( identifier[data] )
keyword[if] keyword[not] identifier[data] :
identifier[self] . identifier[log_warn] ( literal[string] %
( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ]))
keyword[return]
identifier[api_data] = identifier[json] . identifier[loads] ( identifier[data] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[api_data] , identifier[list] ) keyword[or] identifier[len] ( identifier[api_data] )!= literal[int] :
identifier[msg] =( literal[string]
%( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ], identifier[data] ))
identifier[results] = identifier[self] . identifier[construct_internal_error] ( identifier[msg] )
keyword[return]
( identifier[func_name] , identifier[api_args] , identifier[api_kwargs] )= identifier[api_data]
identifier[self] . identifier[log_debug] ( literal[string] %
( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ], identifier[func_name] , identifier[str] ( identifier[api_args] ),
identifier[str] ( identifier[api_kwargs] )))
keyword[try] :
identifier[api_func] = identifier[getattr] ( identifier[self] . identifier[sdkapi] , identifier[func_name] )
keyword[except] identifier[AttributeError] :
identifier[msg] =( literal[string]
literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ], identifier[func_name] ))
identifier[results] = identifier[self] . identifier[construct_api_name_error] ( identifier[msg] )
keyword[return]
identifier[return_data] = identifier[api_func] (* identifier[api_args] ,** identifier[api_kwargs] )
keyword[except] identifier[exception] . identifier[SDKBaseException] keyword[as] identifier[e] :
identifier[self] . identifier[log_error] ( literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ],
identifier[traceback] . identifier[format_exc] ()))
keyword[if] identifier[e] . identifier[results] keyword[is] keyword[None] :
identifier[msg] =( literal[string]
literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ],
identifier[e] . identifier[format_message] ()))
identifier[results] = identifier[self] . identifier[construct_internal_error] ( identifier[msg] )
keyword[else] :
identifier[results] ={ literal[string] : identifier[e] . identifier[results] [ literal[string] ],
literal[string] : identifier[e] . identifier[results] [ literal[string] ],
literal[string] : identifier[e] . identifier[results] [ literal[string] ],
literal[string] : identifier[e] . identifier[results] [ literal[string] ],
literal[string] : identifier[e] . identifier[format_message] (),
literal[string] : literal[string] }
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log_error] ( literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ],
identifier[traceback] . identifier[format_exc] ()))
identifier[msg] =( literal[string]
literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ], identifier[repr] ( identifier[e] )))
identifier[results] = identifier[self] . identifier[construct_internal_error] ( identifier[msg] )
keyword[else] :
keyword[if] identifier[return_data] keyword[is] keyword[None] :
identifier[return_data] = literal[string]
identifier[results] ={ literal[string] : literal[int] , literal[string] : keyword[None] ,
literal[string] : literal[int] , literal[string] : literal[int] ,
literal[string] : literal[string] ,
literal[string] : identifier[return_data] }
keyword[try] :
keyword[if] identifier[results] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[send_results] ( identifier[client] , identifier[addr] , identifier[results] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log_error] ( literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ], identifier[repr] ( identifier[e] )))
keyword[finally] :
identifier[self] . identifier[log_debug] ( literal[string]
literal[string] %( identifier[addr] [ literal[int] ], identifier[addr] [ literal[int] ]))
identifier[client] . identifier[close] ()
|
def serve_API(self, client, addr):
""" Read client request and call target SDK API"""
self.log_debug('(%s:%s) Handling new request from client.' % (addr[0], addr[1]))
results = None
try:
data = client.recv(4096)
data = bytes.decode(data)
# When client failed to send the data or quit before sending the
# data, server side would receive null data.
# In such case, server would not send back any info and just
# terminate this thread.
if not data:
self.log_warn('(%s:%s) Failed to receive data from client.' % (addr[0], addr[1]))
return # depends on [control=['if'], data=[]]
api_data = json.loads(data)
# API_data should be in the form [funcname, args_list, kwargs_dict]
if not isinstance(api_data, list) or len(api_data) != 3:
msg = "(%s:%s) SDK server got wrong input: '%s' from client." % (addr[0], addr[1], data)
results = self.construct_internal_error(msg)
return # depends on [control=['if'], data=[]]
# Check called API is supported by SDK
(func_name, api_args, api_kwargs) = api_data
self.log_debug('(%s:%s) Request func: %s, args: %s, kwargs: %s' % (addr[0], addr[1], func_name, str(api_args), str(api_kwargs)))
try:
api_func = getattr(self.sdkapi, func_name) # depends on [control=['try'], data=[]]
except AttributeError:
msg = '(%s:%s) SDK server got wrong API name: %s fromclient.' % (addr[0], addr[1], func_name)
results = self.construct_api_name_error(msg)
return # depends on [control=['except'], data=[]]
# invoke target API function
return_data = api_func(*api_args, **api_kwargs) # depends on [control=['try'], data=[]]
except exception.SDKBaseException as e:
self.log_error('(%s:%s) %s' % (addr[0], addr[1], traceback.format_exc()))
# get the error info from exception attribute
# All SDKbaseexception should eventually has a
# results attribute defined which can be used by
# sdkserver here
if e.results is None:
msg = '(%s:%s) SDK server got exception without results defined, error: %s' % (addr[0], addr[1], e.format_message())
results = self.construct_internal_error(msg) # depends on [control=['if'], data=[]]
else:
results = {'overallRC': e.results['overallRC'], 'modID': e.results['modID'], 'rc': e.results['rc'], 'rs': e.results['rs'], 'errmsg': e.format_message(), 'output': ''} # depends on [control=['except'], data=['e']]
except Exception as e:
self.log_error('(%s:%s) %s' % (addr[0], addr[1], traceback.format_exc()))
msg = '(%s:%s) SDK server got unexpected exception: %s' % (addr[0], addr[1], repr(e))
results = self.construct_internal_error(msg) # depends on [control=['except'], data=['e']]
else:
if return_data is None:
return_data = '' # depends on [control=['if'], data=['return_data']]
results = {'overallRC': 0, 'modID': None, 'rc': 0, 'rs': 0, 'errmsg': '', 'output': return_data}
# Send back the final results
try:
if results is not None:
self.send_results(client, addr, results) # depends on [control=['if'], data=['results']] # depends on [control=['try'], data=[]]
except Exception as e:
# This should not happen in normal case.
# A special case is the server side socket is closed/removed
# before the send() action.
self.log_error('(%s:%s) %s' % (addr[0], addr[1], repr(e))) # depends on [control=['except'], data=['e']]
finally:
# Close the connection to make sure the thread socket got
# closed even when it got unexpected exceptions.
self.log_debug('(%s:%s) Finish handling request, closing socket.' % (addr[0], addr[1]))
client.close()
|
def close(self):
"""Write the contents of the cache to disk (only if `update_cache`
parameter during the object initialization was not set to `False`) and
clear the in memory cache."""
if self._cache:
if self._update_cache:
self._write_cache_to_file()
self._cache = None
|
def function[close, parameter[self]]:
constant[Write the contents of the cache to disk (only if `update_cache`
parameter during the object initialization was not set to `False`) and
clear the in memory cache.]
if name[self]._cache begin[:]
if name[self]._update_cache begin[:]
call[name[self]._write_cache_to_file, parameter[]]
name[self]._cache assign[=] constant[None]
|
keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_cache] :
keyword[if] identifier[self] . identifier[_update_cache] :
identifier[self] . identifier[_write_cache_to_file] ()
identifier[self] . identifier[_cache] = keyword[None]
|
def close(self):
"""Write the contents of the cache to disk (only if `update_cache`
parameter during the object initialization was not set to `False`) and
clear the in memory cache."""
if self._cache:
if self._update_cache:
self._write_cache_to_file() # depends on [control=['if'], data=[]]
self._cache = None # depends on [control=['if'], data=[]]
|
def get_properties(zos_obj):
"""Returns a lists of properties bound to the object `zos_obj`
@param zos_obj: ZOS API Python COM object
@return prop_get: list of properties that are only getters
@return prop_set: list of properties that are both getters and setters
"""
prop_get = set(zos_obj._prop_map_get_.keys())
prop_set = set(zos_obj._prop_map_put_.keys())
if prop_set.issubset(prop_get):
prop_get = prop_get.difference(prop_set)
else:
msg = 'Assumption all getters are also setters is incorrect!'
raise NotImplementedError(msg)
return list(prop_get), list(prop_set)
|
def function[get_properties, parameter[zos_obj]]:
constant[Returns a lists of properties bound to the object `zos_obj`
@param zos_obj: ZOS API Python COM object
@return prop_get: list of properties that are only getters
@return prop_set: list of properties that are both getters and setters
]
variable[prop_get] assign[=] call[name[set], parameter[call[name[zos_obj]._prop_map_get_.keys, parameter[]]]]
variable[prop_set] assign[=] call[name[set], parameter[call[name[zos_obj]._prop_map_put_.keys, parameter[]]]]
if call[name[prop_set].issubset, parameter[name[prop_get]]] begin[:]
variable[prop_get] assign[=] call[name[prop_get].difference, parameter[name[prop_set]]]
return[tuple[[<ast.Call object at 0x7da1b26af0a0>, <ast.Call object at 0x7da1b26ac880>]]]
|
keyword[def] identifier[get_properties] ( identifier[zos_obj] ):
literal[string]
identifier[prop_get] = identifier[set] ( identifier[zos_obj] . identifier[_prop_map_get_] . identifier[keys] ())
identifier[prop_set] = identifier[set] ( identifier[zos_obj] . identifier[_prop_map_put_] . identifier[keys] ())
keyword[if] identifier[prop_set] . identifier[issubset] ( identifier[prop_get] ):
identifier[prop_get] = identifier[prop_get] . identifier[difference] ( identifier[prop_set] )
keyword[else] :
identifier[msg] = literal[string]
keyword[raise] identifier[NotImplementedError] ( identifier[msg] )
keyword[return] identifier[list] ( identifier[prop_get] ), identifier[list] ( identifier[prop_set] )
|
def get_properties(zos_obj):
"""Returns a lists of properties bound to the object `zos_obj`
@param zos_obj: ZOS API Python COM object
@return prop_get: list of properties that are only getters
@return prop_set: list of properties that are both getters and setters
"""
prop_get = set(zos_obj._prop_map_get_.keys())
prop_set = set(zos_obj._prop_map_put_.keys())
if prop_set.issubset(prop_get):
prop_get = prop_get.difference(prop_set) # depends on [control=['if'], data=[]]
else:
msg = 'Assumption all getters are also setters is incorrect!'
raise NotImplementedError(msg)
return (list(prop_get), list(prop_set))
|
def __error_middleware(self, res, res_json):
"""
Middleware that raises an exception when HTTP statuscode is an error code.
"""
if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]):
err_dict = res_json.get('error', {})
raise UpCloudAPIError(error_code=err_dict.get('error_code'),
error_message=err_dict.get('error_message'))
return res_json
|
def function[__error_middleware, parameter[self, res, res_json]]:
constant[
Middleware that raises an exception when HTTP statuscode is an error code.
]
if compare[name[res].status_code in list[[<ast.Constant object at 0x7da1b0e32e90>, <ast.Constant object at 0x7da1b0e317b0>, <ast.Constant object at 0x7da1b0e31570>, <ast.Constant object at 0x7da1b0e301f0>, <ast.Constant object at 0x7da1b0e32bc0>, <ast.Constant object at 0x7da1b0e33910>, <ast.Constant object at 0x7da1b0e33c10>, <ast.Constant object at 0x7da1b0e325f0>]]] begin[:]
variable[err_dict] assign[=] call[name[res_json].get, parameter[constant[error], dictionary[[], []]]]
<ast.Raise object at 0x7da1b0ef7f10>
return[name[res_json]]
|
keyword[def] identifier[__error_middleware] ( identifier[self] , identifier[res] , identifier[res_json] ):
literal[string]
keyword[if] ( identifier[res] . identifier[status_code] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]):
identifier[err_dict] = identifier[res_json] . identifier[get] ( literal[string] ,{})
keyword[raise] identifier[UpCloudAPIError] ( identifier[error_code] = identifier[err_dict] . identifier[get] ( literal[string] ),
identifier[error_message] = identifier[err_dict] . identifier[get] ( literal[string] ))
keyword[return] identifier[res_json]
|
def __error_middleware(self, res, res_json):
"""
Middleware that raises an exception when HTTP statuscode is an error code.
"""
if res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]:
err_dict = res_json.get('error', {})
raise UpCloudAPIError(error_code=err_dict.get('error_code'), error_message=err_dict.get('error_message')) # depends on [control=['if'], data=[]]
return res_json
|
def check_auth(user):
'''
Check if the user should or shouldn't be inside the system:
- If the user is staff or superuser: LOGIN GRANTED
- If the user has a Person and it is not "disabled": LOGIN GRANTED
- Elsewhere: LOGIN DENIED
'''
# Initialize authentication
auth = None
person = None
# Check if there is an user
if user:
# It means that Django accepted the user and it is active
if user.is_staff or user.is_superuser:
# This is an administrator, let it in
auth = user
else:
# It is a normal user, check if there is a person behind
person = getattr(user, "person", None)
if not person:
# Check if there is related one
person_related = getattr(user, "people", None)
if person_related:
# Must be only one
if person_related.count() == 1:
person = person_related.get()
if person and ((person.disabled is None) or (person.disabled > timezone.now())):
# There is a person, no disabled found or the found one is fine to log in
auth = user
# Return back the final decision
return auth
|
def function[check_auth, parameter[user]]:
constant[
Check if the user should or shouldn't be inside the system:
- If the user is staff or superuser: LOGIN GRANTED
- If the user has a Person and it is not "disabled": LOGIN GRANTED
- Elsewhere: LOGIN DENIED
]
variable[auth] assign[=] constant[None]
variable[person] assign[=] constant[None]
if name[user] begin[:]
if <ast.BoolOp object at 0x7da1b0ebf520> begin[:]
variable[auth] assign[=] name[user]
return[name[auth]]
|
keyword[def] identifier[check_auth] ( identifier[user] ):
literal[string]
identifier[auth] = keyword[None]
identifier[person] = keyword[None]
keyword[if] identifier[user] :
keyword[if] identifier[user] . identifier[is_staff] keyword[or] identifier[user] . identifier[is_superuser] :
identifier[auth] = identifier[user]
keyword[else] :
identifier[person] = identifier[getattr] ( identifier[user] , literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[person] :
identifier[person_related] = identifier[getattr] ( identifier[user] , literal[string] , keyword[None] )
keyword[if] identifier[person_related] :
keyword[if] identifier[person_related] . identifier[count] ()== literal[int] :
identifier[person] = identifier[person_related] . identifier[get] ()
keyword[if] identifier[person] keyword[and] (( identifier[person] . identifier[disabled] keyword[is] keyword[None] ) keyword[or] ( identifier[person] . identifier[disabled] > identifier[timezone] . identifier[now] ())):
identifier[auth] = identifier[user]
keyword[return] identifier[auth]
|
def check_auth(user):
"""
Check if the user should or shouldn't be inside the system:
- If the user is staff or superuser: LOGIN GRANTED
- If the user has a Person and it is not "disabled": LOGIN GRANTED
- Elsewhere: LOGIN DENIED
"""
# Initialize authentication
auth = None
person = None
# Check if there is an user
if user:
# It means that Django accepted the user and it is active
if user.is_staff or user.is_superuser:
# This is an administrator, let it in
auth = user # depends on [control=['if'], data=[]]
else:
# It is a normal user, check if there is a person behind
person = getattr(user, 'person', None)
if not person:
# Check if there is related one
person_related = getattr(user, 'people', None)
if person_related:
# Must be only one
if person_related.count() == 1:
person = person_related.get() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if person and (person.disabled is None or person.disabled > timezone.now()):
# There is a person, no disabled found or the found one is fine to log in
auth = user # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Return back the final decision
return auth
|
def _check_for_dyn_timed_auto_backup(self):
""" The method implements the timed storage feature.
The method re-initiating a new timed thread if the state-machine not already stored to backup
(what could be caused by the force_temp_storage_interval) or force the storing of the state-machine if there
is no new request for a timed backup. New timed backup request are intrinsically represented by
self._timer_request_time and initiated by the check_for_auto_backup-method.
The feature uses only one thread for each ModificationHistoryModel and lock to be thread save.
"""
current_time = time.time()
self.timer_request_lock.acquire()
# sm = self.state_machine_model.state_machine
# TODO check for self._timer_request_time is None to avoid and reset auto-backup in case and fix it better
if self._timer_request_time is None:
# logger.warning("timer_request is None")
return self.timer_request_lock.release()
if self.timed_temp_storage_interval < current_time - self._timer_request_time:
# logger.info("{0} Perform timed auto-backup of state-machine {1}.".format(time.time(),
# sm.state_machine_id))
self.check_for_auto_backup(force=True)
else:
duration_to_wait = self.timed_temp_storage_interval - (current_time - self._timer_request_time)
hard_limit_duration_to_wait = self.force_temp_storage_interval - (current_time - self.last_backup_time)
hard_limit_active = hard_limit_duration_to_wait < duration_to_wait
# logger.info('{2} restart_thread {0} time to go {1}, hard limit {3}'.format(sm.state_machine_id,
# duration_to_wait, time.time(),
# hard_limit_active))
if hard_limit_active:
self.set_timed_thread(hard_limit_duration_to_wait, self.check_for_auto_backup, True)
else:
self.set_timed_thread(duration_to_wait, self._check_for_dyn_timed_auto_backup)
self.timer_request_lock.release()
|
def function[_check_for_dyn_timed_auto_backup, parameter[self]]:
constant[ The method implements the timed storage feature.
The method re-initiating a new timed thread if the state-machine not already stored to backup
(what could be caused by the force_temp_storage_interval) or force the storing of the state-machine if there
is no new request for a timed backup. New timed backup request are intrinsically represented by
self._timer_request_time and initiated by the check_for_auto_backup-method.
The feature uses only one thread for each ModificationHistoryModel and lock to be thread save.
]
variable[current_time] assign[=] call[name[time].time, parameter[]]
call[name[self].timer_request_lock.acquire, parameter[]]
if compare[name[self]._timer_request_time is constant[None]] begin[:]
return[call[name[self].timer_request_lock.release, parameter[]]]
if compare[name[self].timed_temp_storage_interval less[<] binary_operation[name[current_time] - name[self]._timer_request_time]] begin[:]
call[name[self].check_for_auto_backup, parameter[]]
call[name[self].timer_request_lock.release, parameter[]]
|
keyword[def] identifier[_check_for_dyn_timed_auto_backup] ( identifier[self] ):
literal[string]
identifier[current_time] = identifier[time] . identifier[time] ()
identifier[self] . identifier[timer_request_lock] . identifier[acquire] ()
keyword[if] identifier[self] . identifier[_timer_request_time] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[timer_request_lock] . identifier[release] ()
keyword[if] identifier[self] . identifier[timed_temp_storage_interval] < identifier[current_time] - identifier[self] . identifier[_timer_request_time] :
identifier[self] . identifier[check_for_auto_backup] ( identifier[force] = keyword[True] )
keyword[else] :
identifier[duration_to_wait] = identifier[self] . identifier[timed_temp_storage_interval] -( identifier[current_time] - identifier[self] . identifier[_timer_request_time] )
identifier[hard_limit_duration_to_wait] = identifier[self] . identifier[force_temp_storage_interval] -( identifier[current_time] - identifier[self] . identifier[last_backup_time] )
identifier[hard_limit_active] = identifier[hard_limit_duration_to_wait] < identifier[duration_to_wait]
keyword[if] identifier[hard_limit_active] :
identifier[self] . identifier[set_timed_thread] ( identifier[hard_limit_duration_to_wait] , identifier[self] . identifier[check_for_auto_backup] , keyword[True] )
keyword[else] :
identifier[self] . identifier[set_timed_thread] ( identifier[duration_to_wait] , identifier[self] . identifier[_check_for_dyn_timed_auto_backup] )
identifier[self] . identifier[timer_request_lock] . identifier[release] ()
|
def _check_for_dyn_timed_auto_backup(self):
""" The method implements the timed storage feature.
The method re-initiating a new timed thread if the state-machine not already stored to backup
(what could be caused by the force_temp_storage_interval) or force the storing of the state-machine if there
is no new request for a timed backup. New timed backup request are intrinsically represented by
self._timer_request_time and initiated by the check_for_auto_backup-method.
The feature uses only one thread for each ModificationHistoryModel and lock to be thread save.
"""
current_time = time.time()
self.timer_request_lock.acquire()
# sm = self.state_machine_model.state_machine
# TODO check for self._timer_request_time is None to avoid and reset auto-backup in case and fix it better
if self._timer_request_time is None:
# logger.warning("timer_request is None")
return self.timer_request_lock.release() # depends on [control=['if'], data=[]]
if self.timed_temp_storage_interval < current_time - self._timer_request_time:
# logger.info("{0} Perform timed auto-backup of state-machine {1}.".format(time.time(),
# sm.state_machine_id))
self.check_for_auto_backup(force=True) # depends on [control=['if'], data=[]]
else:
duration_to_wait = self.timed_temp_storage_interval - (current_time - self._timer_request_time)
hard_limit_duration_to_wait = self.force_temp_storage_interval - (current_time - self.last_backup_time)
hard_limit_active = hard_limit_duration_to_wait < duration_to_wait
# logger.info('{2} restart_thread {0} time to go {1}, hard limit {3}'.format(sm.state_machine_id,
# duration_to_wait, time.time(),
# hard_limit_active))
if hard_limit_active:
self.set_timed_thread(hard_limit_duration_to_wait, self.check_for_auto_backup, True) # depends on [control=['if'], data=[]]
else:
self.set_timed_thread(duration_to_wait, self._check_for_dyn_timed_auto_backup)
self.timer_request_lock.release()
|
def apply_T7(word):
'''If a VVV-sequence does not contain a potential /i/-final diphthong,
there is a syllable boundary between the second and third vowels, e.g.
[kau.an], [leu.an], [kiu.as].'''
T7 = ''
WORD = word.split('.')
for i, v in enumerate(WORD):
if contains_VVV(v):
for I, V in enumerate(v[::-1]):
if is_vowel(V):
WORD[i] = v[:I] + '.' + v[I:]
T7 = ' T7'
word = '.'.join(WORD)
return word, T7
|
def function[apply_T7, parameter[word]]:
constant[If a VVV-sequence does not contain a potential /i/-final diphthong,
there is a syllable boundary between the second and third vowels, e.g.
[kau.an], [leu.an], [kiu.as].]
variable[T7] assign[=] constant[]
variable[WORD] assign[=] call[name[word].split, parameter[constant[.]]]
for taget[tuple[[<ast.Name object at 0x7da1b0f40b20>, <ast.Name object at 0x7da1b0f40ac0>]]] in starred[call[name[enumerate], parameter[name[WORD]]]] begin[:]
if call[name[contains_VVV], parameter[name[v]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0f41f00>, <ast.Name object at 0x7da1b0f43eb0>]]] in starred[call[name[enumerate], parameter[call[name[v]][<ast.Slice object at 0x7da1b0f42590>]]]] begin[:]
if call[name[is_vowel], parameter[name[V]]] begin[:]
call[name[WORD]][name[i]] assign[=] binary_operation[binary_operation[call[name[v]][<ast.Slice object at 0x7da1b11e27d0>] + constant[.]] + call[name[v]][<ast.Slice object at 0x7da1b11e33a0>]]
variable[T7] assign[=] constant[ T7]
variable[word] assign[=] call[constant[.].join, parameter[name[WORD]]]
return[tuple[[<ast.Name object at 0x7da1b11109a0>, <ast.Name object at 0x7da1b1113220>]]]
|
keyword[def] identifier[apply_T7] ( identifier[word] ):
literal[string]
identifier[T7] = literal[string]
identifier[WORD] = identifier[word] . identifier[split] ( literal[string] )
keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[WORD] ):
keyword[if] identifier[contains_VVV] ( identifier[v] ):
keyword[for] identifier[I] , identifier[V] keyword[in] identifier[enumerate] ( identifier[v] [::- literal[int] ]):
keyword[if] identifier[is_vowel] ( identifier[V] ):
identifier[WORD] [ identifier[i] ]= identifier[v] [: identifier[I] ]+ literal[string] + identifier[v] [ identifier[I] :]
identifier[T7] = literal[string]
identifier[word] = literal[string] . identifier[join] ( identifier[WORD] )
keyword[return] identifier[word] , identifier[T7]
|
def apply_T7(word):
"""If a VVV-sequence does not contain a potential /i/-final diphthong,
there is a syllable boundary between the second and third vowels, e.g.
[kau.an], [leu.an], [kiu.as]."""
T7 = ''
WORD = word.split('.')
for (i, v) in enumerate(WORD):
if contains_VVV(v):
for (I, V) in enumerate(v[::-1]):
if is_vowel(V):
WORD[i] = v[:I] + '.' + v[I:]
T7 = ' T7' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
word = '.'.join(WORD)
return (word, T7)
|
def get_attributes(self, obj):
""" Get all object's attributes.
Sends multi-parameter info/config queries and returns the result as dictionary.
:param obj: requested object.
:returns: dictionary of <name, value> of all attributes returned by the query.
:rtype: dict of (str, str)
"""
return self._get_attributes('{}/{}'.format(self.session_url, obj.ref))
|
def function[get_attributes, parameter[self, obj]]:
constant[ Get all object's attributes.
Sends multi-parameter info/config queries and returns the result as dictionary.
:param obj: requested object.
:returns: dictionary of <name, value> of all attributes returned by the query.
:rtype: dict of (str, str)
]
return[call[name[self]._get_attributes, parameter[call[constant[{}/{}].format, parameter[name[self].session_url, name[obj].ref]]]]]
|
keyword[def] identifier[get_attributes] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_attributes] ( literal[string] . identifier[format] ( identifier[self] . identifier[session_url] , identifier[obj] . identifier[ref] ))
|
def get_attributes(self, obj):
""" Get all object's attributes.
Sends multi-parameter info/config queries and returns the result as dictionary.
:param obj: requested object.
:returns: dictionary of <name, value> of all attributes returned by the query.
:rtype: dict of (str, str)
"""
return self._get_attributes('{}/{}'.format(self.session_url, obj.ref))
|
def process_needtables(app, doctree, fromdocname):
"""
Replace all needtables nodes with a tale of filtered noded.
:param app:
:param doctree:
:param fromdocname:
:return:
"""
env = app.builder.env
for node in doctree.traverse(Needtable):
if not app.config.needs_include_needs:
# Ok, this is really dirty.
# If we replace a node, docutils checks, if it will not lose any attributes.
# But this is here the case, because we are using the attribute "ids" of a node.
# However, I do not understand, why losing an attribute is such a big deal, so we delete everything
# before docutils claims about it.
for att in ('ids', 'names', 'classes', 'dupnames'):
node[att] = []
node.replace_self([])
continue
id = node.attributes["ids"][0]
current_needtable = env.need_all_needtables[id]
all_needs = env.needs_all_needs
if current_needtable["style"] == "" or current_needtable["style"].upper() not in ["TABLE", "DATATABLES"]:
if app.config.needs_table_style == "":
style = "DATATABLES"
else:
style = app.config.needs_table_style.upper()
else:
style = current_needtable["style"].upper()
# Prepare table
classes = ["NEEDS_{style}".format(style=style)]
content = nodes.table(classes=classes)
tgroup = nodes.tgroup()
# Define Table column width
# ToDo: Find a way to chosen to perfect width automatically.
for col in current_needtable["columns"]:
if col == "TITLE":
tgroup += nodes.colspec(colwidth=15)
else:
tgroup += nodes.colspec(colwidth=5)
node_columns = []
for col in current_needtable["columns"]:
header_name = col.title() if col != "ID" else col
header_name = header_name.replace("_", " ")
node_columns.append(nodes.entry('', nodes.paragraph('', header_name)))
tgroup += nodes.thead('', nodes.row(
'', *node_columns))
tbody = nodes.tbody()
tgroup += tbody
content += tgroup
all_needs = list(all_needs.values())
if current_needtable["sort_by"] is not None:
if current_needtable["sort_by"] == "id":
all_needs = sorted(all_needs, key=lambda node: node["id"])
elif current_needtable["sort_by"] == "status":
all_needs = sorted(all_needs, key=status_sorter)
# Perform filtering of needs
found_needs = procces_filters(all_needs, current_needtable)
for need_info in found_needs:
temp_need = need_info.copy()
if temp_need['is_need']:
row = nodes.row(classes=['need'])
prefix = ''
else:
row = nodes.row(classes=['need_part'])
temp_need['id'] = temp_need['id_complete']
prefix = app.config.needs_part_prefix
temp_need['title'] = temp_need['content']
for col in current_needtable["columns"]:
if col == "ID":
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, "id", make_ref=True,
prefix=prefix)
elif col == "TITLE":
row += row_col_maker(
app, fromdocname, env.needs_all_needs, temp_need, "title",
prefix=app.config.needs_part_prefix)
elif col == "INCOMING":
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need,
"links_back", ref_lookup=True)
elif col == "OUTGOING":
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, "links", ref_lookup=True)
else:
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, col.lower())
tbody += row
# Need part rows
if current_needtable["show_parts"] and need_info['is_need']:
for key, part in need_info["parts"].items():
row = nodes.row(classes=['need_part'])
temp_part = part.copy() # The dict needs to be manipulated, so that row_col_maker() can be used
temp_part['docname'] = need_info['docname']
for col in current_needtable["columns"]:
if col == "ID":
temp_part['id'] = '.'.join([need_info['id'], part['id']])
row += row_col_maker(
app, fromdocname, env.needs_all_needs, temp_part, "id",
make_ref=True, prefix=app.config.needs_part_prefix)
elif col == "TITLE":
row += row_col_maker(
app, fromdocname, env.needs_all_needs, temp_part, "content",
prefix=app.config.needs_part_prefix)
elif col == "INCOMING":
row += row_col_maker(
app, fromdocname, env.needs_all_needs, temp_part, "links_back", ref_lookup=True)
else:
row += row_col_maker(
app, fromdocname, env.needs_all_needs, temp_part, col.lower())
tbody += row
if len(found_needs) == 0:
content.append(no_needs_found_paragraph())
# add filter information to output
if current_needtable["show_filters"]:
content.append(used_filter_paragraph(current_needtable))
node.replace_self(content)
|
def function[process_needtables, parameter[app, doctree, fromdocname]]:
constant[
Replace all needtables nodes with a tale of filtered noded.
:param app:
:param doctree:
:param fromdocname:
:return:
]
variable[env] assign[=] name[app].builder.env
for taget[name[node]] in starred[call[name[doctree].traverse, parameter[name[Needtable]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b26ae860> begin[:]
for taget[name[att]] in starred[tuple[[<ast.Constant object at 0x7da1b26aeaa0>, <ast.Constant object at 0x7da1b26ae9b0>, <ast.Constant object at 0x7da1b26ad270>, <ast.Constant object at 0x7da1b26af1f0>]]] begin[:]
call[name[node]][name[att]] assign[=] list[[]]
call[name[node].replace_self, parameter[list[[]]]]
continue
variable[id] assign[=] call[call[name[node].attributes][constant[ids]]][constant[0]]
variable[current_needtable] assign[=] call[name[env].need_all_needtables][name[id]]
variable[all_needs] assign[=] name[env].needs_all_needs
if <ast.BoolOp object at 0x7da1b26aee00> begin[:]
if compare[name[app].config.needs_table_style equal[==] constant[]] begin[:]
variable[style] assign[=] constant[DATATABLES]
variable[classes] assign[=] list[[<ast.Call object at 0x7da1b26acc40>]]
variable[content] assign[=] call[name[nodes].table, parameter[]]
variable[tgroup] assign[=] call[name[nodes].tgroup, parameter[]]
for taget[name[col]] in starred[call[name[current_needtable]][constant[columns]]] begin[:]
if compare[name[col] equal[==] constant[TITLE]] begin[:]
<ast.AugAssign object at 0x7da1b1906290>
variable[node_columns] assign[=] list[[]]
for taget[name[col]] in starred[call[name[current_needtable]][constant[columns]]] begin[:]
variable[header_name] assign[=] <ast.IfExp object at 0x7da1b19071c0>
variable[header_name] assign[=] call[name[header_name].replace, parameter[constant[_], constant[ ]]]
call[name[node_columns].append, parameter[call[name[nodes].entry, parameter[constant[], call[name[nodes].paragraph, parameter[constant[], name[header_name]]]]]]]
<ast.AugAssign object at 0x7da1b26aca90>
variable[tbody] assign[=] call[name[nodes].tbody, parameter[]]
<ast.AugAssign object at 0x7da1b26ad6f0>
<ast.AugAssign object at 0x7da1b26af250>
variable[all_needs] assign[=] call[name[list], parameter[call[name[all_needs].values, parameter[]]]]
if compare[call[name[current_needtable]][constant[sort_by]] is_not constant[None]] begin[:]
if compare[call[name[current_needtable]][constant[sort_by]] equal[==] constant[id]] begin[:]
variable[all_needs] assign[=] call[name[sorted], parameter[name[all_needs]]]
variable[found_needs] assign[=] call[name[procces_filters], parameter[name[all_needs], name[current_needtable]]]
for taget[name[need_info]] in starred[name[found_needs]] begin[:]
variable[temp_need] assign[=] call[name[need_info].copy, parameter[]]
if call[name[temp_need]][constant[is_need]] begin[:]
variable[row] assign[=] call[name[nodes].row, parameter[]]
variable[prefix] assign[=] constant[]
for taget[name[col]] in starred[call[name[current_needtable]][constant[columns]]] begin[:]
if compare[name[col] equal[==] constant[ID]] begin[:]
<ast.AugAssign object at 0x7da1b26ac400>
<ast.AugAssign object at 0x7da18ede5570>
if <ast.BoolOp object at 0x7da18ede4340> begin[:]
for taget[tuple[[<ast.Name object at 0x7da18ede5930>, <ast.Name object at 0x7da18ede7460>]]] in starred[call[call[name[need_info]][constant[parts]].items, parameter[]]] begin[:]
variable[row] assign[=] call[name[nodes].row, parameter[]]
variable[temp_part] assign[=] call[name[part].copy, parameter[]]
call[name[temp_part]][constant[docname]] assign[=] call[name[need_info]][constant[docname]]
for taget[name[col]] in starred[call[name[current_needtable]][constant[columns]]] begin[:]
if compare[name[col] equal[==] constant[ID]] begin[:]
call[name[temp_part]][constant[id]] assign[=] call[constant[.].join, parameter[list[[<ast.Subscript object at 0x7da18ede7f70>, <ast.Subscript object at 0x7da18ede7dc0>]]]]
<ast.AugAssign object at 0x7da18ede7880>
<ast.AugAssign object at 0x7da18ede43d0>
if compare[call[name[len], parameter[name[found_needs]]] equal[==] constant[0]] begin[:]
call[name[content].append, parameter[call[name[no_needs_found_paragraph], parameter[]]]]
if call[name[current_needtable]][constant[show_filters]] begin[:]
call[name[content].append, parameter[call[name[used_filter_paragraph], parameter[name[current_needtable]]]]]
call[name[node].replace_self, parameter[name[content]]]
|
keyword[def] identifier[process_needtables] ( identifier[app] , identifier[doctree] , identifier[fromdocname] ):
literal[string]
identifier[env] = identifier[app] . identifier[builder] . identifier[env]
keyword[for] identifier[node] keyword[in] identifier[doctree] . identifier[traverse] ( identifier[Needtable] ):
keyword[if] keyword[not] identifier[app] . identifier[config] . identifier[needs_include_needs] :
keyword[for] identifier[att] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[node] [ identifier[att] ]=[]
identifier[node] . identifier[replace_self] ([])
keyword[continue]
identifier[id] = identifier[node] . identifier[attributes] [ literal[string] ][ literal[int] ]
identifier[current_needtable] = identifier[env] . identifier[need_all_needtables] [ identifier[id] ]
identifier[all_needs] = identifier[env] . identifier[needs_all_needs]
keyword[if] identifier[current_needtable] [ literal[string] ]== literal[string] keyword[or] identifier[current_needtable] [ literal[string] ]. identifier[upper] () keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[app] . identifier[config] . identifier[needs_table_style] == literal[string] :
identifier[style] = literal[string]
keyword[else] :
identifier[style] = identifier[app] . identifier[config] . identifier[needs_table_style] . identifier[upper] ()
keyword[else] :
identifier[style] = identifier[current_needtable] [ literal[string] ]. identifier[upper] ()
identifier[classes] =[ literal[string] . identifier[format] ( identifier[style] = identifier[style] )]
identifier[content] = identifier[nodes] . identifier[table] ( identifier[classes] = identifier[classes] )
identifier[tgroup] = identifier[nodes] . identifier[tgroup] ()
keyword[for] identifier[col] keyword[in] identifier[current_needtable] [ literal[string] ]:
keyword[if] identifier[col] == literal[string] :
identifier[tgroup] += identifier[nodes] . identifier[colspec] ( identifier[colwidth] = literal[int] )
keyword[else] :
identifier[tgroup] += identifier[nodes] . identifier[colspec] ( identifier[colwidth] = literal[int] )
identifier[node_columns] =[]
keyword[for] identifier[col] keyword[in] identifier[current_needtable] [ literal[string] ]:
identifier[header_name] = identifier[col] . identifier[title] () keyword[if] identifier[col] != literal[string] keyword[else] identifier[col]
identifier[header_name] = identifier[header_name] . identifier[replace] ( literal[string] , literal[string] )
identifier[node_columns] . identifier[append] ( identifier[nodes] . identifier[entry] ( literal[string] , identifier[nodes] . identifier[paragraph] ( literal[string] , identifier[header_name] )))
identifier[tgroup] += identifier[nodes] . identifier[thead] ( literal[string] , identifier[nodes] . identifier[row] (
literal[string] ,* identifier[node_columns] ))
identifier[tbody] = identifier[nodes] . identifier[tbody] ()
identifier[tgroup] += identifier[tbody]
identifier[content] += identifier[tgroup]
identifier[all_needs] = identifier[list] ( identifier[all_needs] . identifier[values] ())
keyword[if] identifier[current_needtable] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[current_needtable] [ literal[string] ]== literal[string] :
identifier[all_needs] = identifier[sorted] ( identifier[all_needs] , identifier[key] = keyword[lambda] identifier[node] : identifier[node] [ literal[string] ])
keyword[elif] identifier[current_needtable] [ literal[string] ]== literal[string] :
identifier[all_needs] = identifier[sorted] ( identifier[all_needs] , identifier[key] = identifier[status_sorter] )
identifier[found_needs] = identifier[procces_filters] ( identifier[all_needs] , identifier[current_needtable] )
keyword[for] identifier[need_info] keyword[in] identifier[found_needs] :
identifier[temp_need] = identifier[need_info] . identifier[copy] ()
keyword[if] identifier[temp_need] [ literal[string] ]:
identifier[row] = identifier[nodes] . identifier[row] ( identifier[classes] =[ literal[string] ])
identifier[prefix] = literal[string]
keyword[else] :
identifier[row] = identifier[nodes] . identifier[row] ( identifier[classes] =[ literal[string] ])
identifier[temp_need] [ literal[string] ]= identifier[temp_need] [ literal[string] ]
identifier[prefix] = identifier[app] . identifier[config] . identifier[needs_part_prefix]
identifier[temp_need] [ literal[string] ]= identifier[temp_need] [ literal[string] ]
keyword[for] identifier[col] keyword[in] identifier[current_needtable] [ literal[string] ]:
keyword[if] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] ( identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_need] , literal[string] , identifier[make_ref] = keyword[True] ,
identifier[prefix] = identifier[prefix] )
keyword[elif] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] (
identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_need] , literal[string] ,
identifier[prefix] = identifier[app] . identifier[config] . identifier[needs_part_prefix] )
keyword[elif] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] ( identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_need] ,
literal[string] , identifier[ref_lookup] = keyword[True] )
keyword[elif] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] ( identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_need] , literal[string] , identifier[ref_lookup] = keyword[True] )
keyword[else] :
identifier[row] += identifier[row_col_maker] ( identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_need] , identifier[col] . identifier[lower] ())
identifier[tbody] += identifier[row]
keyword[if] identifier[current_needtable] [ literal[string] ] keyword[and] identifier[need_info] [ literal[string] ]:
keyword[for] identifier[key] , identifier[part] keyword[in] identifier[need_info] [ literal[string] ]. identifier[items] ():
identifier[row] = identifier[nodes] . identifier[row] ( identifier[classes] =[ literal[string] ])
identifier[temp_part] = identifier[part] . identifier[copy] ()
identifier[temp_part] [ literal[string] ]= identifier[need_info] [ literal[string] ]
keyword[for] identifier[col] keyword[in] identifier[current_needtable] [ literal[string] ]:
keyword[if] identifier[col] == literal[string] :
identifier[temp_part] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[need_info] [ literal[string] ], identifier[part] [ literal[string] ]])
identifier[row] += identifier[row_col_maker] (
identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_part] , literal[string] ,
identifier[make_ref] = keyword[True] , identifier[prefix] = identifier[app] . identifier[config] . identifier[needs_part_prefix] )
keyword[elif] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] (
identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_part] , literal[string] ,
identifier[prefix] = identifier[app] . identifier[config] . identifier[needs_part_prefix] )
keyword[elif] identifier[col] == literal[string] :
identifier[row] += identifier[row_col_maker] (
identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_part] , literal[string] , identifier[ref_lookup] = keyword[True] )
keyword[else] :
identifier[row] += identifier[row_col_maker] (
identifier[app] , identifier[fromdocname] , identifier[env] . identifier[needs_all_needs] , identifier[temp_part] , identifier[col] . identifier[lower] ())
identifier[tbody] += identifier[row]
keyword[if] identifier[len] ( identifier[found_needs] )== literal[int] :
identifier[content] . identifier[append] ( identifier[no_needs_found_paragraph] ())
keyword[if] identifier[current_needtable] [ literal[string] ]:
identifier[content] . identifier[append] ( identifier[used_filter_paragraph] ( identifier[current_needtable] ))
identifier[node] . identifier[replace_self] ( identifier[content] )
|
def process_needtables(app, doctree, fromdocname):
"""
Replace all needtables nodes with a tale of filtered noded.
:param app:
:param doctree:
:param fromdocname:
:return:
"""
env = app.builder.env
for node in doctree.traverse(Needtable):
if not app.config.needs_include_needs:
# Ok, this is really dirty.
# If we replace a node, docutils checks, if it will not lose any attributes.
# But this is here the case, because we are using the attribute "ids" of a node.
# However, I do not understand, why losing an attribute is such a big deal, so we delete everything
# before docutils claims about it.
for att in ('ids', 'names', 'classes', 'dupnames'):
node[att] = [] # depends on [control=['for'], data=['att']]
node.replace_self([])
continue # depends on [control=['if'], data=[]]
id = node.attributes['ids'][0]
current_needtable = env.need_all_needtables[id]
all_needs = env.needs_all_needs
if current_needtable['style'] == '' or current_needtable['style'].upper() not in ['TABLE', 'DATATABLES']:
if app.config.needs_table_style == '':
style = 'DATATABLES' # depends on [control=['if'], data=[]]
else:
style = app.config.needs_table_style.upper() # depends on [control=['if'], data=[]]
else:
style = current_needtable['style'].upper()
# Prepare table
classes = ['NEEDS_{style}'.format(style=style)]
content = nodes.table(classes=classes)
tgroup = nodes.tgroup()
# Define Table column width
# ToDo: Find a way to chosen to perfect width automatically.
for col in current_needtable['columns']:
if col == 'TITLE':
tgroup += nodes.colspec(colwidth=15) # depends on [control=['if'], data=[]]
else:
tgroup += nodes.colspec(colwidth=5) # depends on [control=['for'], data=['col']]
node_columns = []
for col in current_needtable['columns']:
header_name = col.title() if col != 'ID' else col
header_name = header_name.replace('_', ' ')
node_columns.append(nodes.entry('', nodes.paragraph('', header_name))) # depends on [control=['for'], data=['col']]
tgroup += nodes.thead('', nodes.row('', *node_columns))
tbody = nodes.tbody()
tgroup += tbody
content += tgroup
all_needs = list(all_needs.values())
if current_needtable['sort_by'] is not None:
if current_needtable['sort_by'] == 'id':
all_needs = sorted(all_needs, key=lambda node: node['id']) # depends on [control=['if'], data=[]]
elif current_needtable['sort_by'] == 'status':
all_needs = sorted(all_needs, key=status_sorter) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Perform filtering of needs
found_needs = procces_filters(all_needs, current_needtable)
for need_info in found_needs:
temp_need = need_info.copy()
if temp_need['is_need']:
row = nodes.row(classes=['need'])
prefix = '' # depends on [control=['if'], data=[]]
else:
row = nodes.row(classes=['need_part'])
temp_need['id'] = temp_need['id_complete']
prefix = app.config.needs_part_prefix
temp_need['title'] = temp_need['content']
for col in current_needtable['columns']:
if col == 'ID':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, 'id', make_ref=True, prefix=prefix) # depends on [control=['if'], data=[]]
elif col == 'TITLE':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, 'title', prefix=app.config.needs_part_prefix) # depends on [control=['if'], data=[]]
elif col == 'INCOMING':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, 'links_back', ref_lookup=True) # depends on [control=['if'], data=[]]
elif col == 'OUTGOING':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, 'links', ref_lookup=True) # depends on [control=['if'], data=[]]
else:
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_need, col.lower()) # depends on [control=['for'], data=['col']]
tbody += row
# Need part rows
if current_needtable['show_parts'] and need_info['is_need']:
for (key, part) in need_info['parts'].items():
row = nodes.row(classes=['need_part'])
temp_part = part.copy() # The dict needs to be manipulated, so that row_col_maker() can be used
temp_part['docname'] = need_info['docname']
for col in current_needtable['columns']:
if col == 'ID':
temp_part['id'] = '.'.join([need_info['id'], part['id']])
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_part, 'id', make_ref=True, prefix=app.config.needs_part_prefix) # depends on [control=['if'], data=[]]
elif col == 'TITLE':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_part, 'content', prefix=app.config.needs_part_prefix) # depends on [control=['if'], data=[]]
elif col == 'INCOMING':
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_part, 'links_back', ref_lookup=True) # depends on [control=['if'], data=[]]
else:
row += row_col_maker(app, fromdocname, env.needs_all_needs, temp_part, col.lower()) # depends on [control=['for'], data=['col']]
tbody += row # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['need_info']]
if len(found_needs) == 0:
content.append(no_needs_found_paragraph()) # depends on [control=['if'], data=[]]
# add filter information to output
if current_needtable['show_filters']:
content.append(used_filter_paragraph(current_needtable)) # depends on [control=['if'], data=[]]
node.replace_self(content) # depends on [control=['for'], data=['node']]
|
def _find_zone_by_id(self, zone_id):
"""Return zone by id."""
if not self.zones:
return None
zone = list(filter(
lambda zone: zone.id == zone_id, self.zones))
return zone[0] if zone else None
|
def function[_find_zone_by_id, parameter[self, zone_id]]:
constant[Return zone by id.]
if <ast.UnaryOp object at 0x7da1b12cd210> begin[:]
return[constant[None]]
variable[zone] assign[=] call[name[list], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b12ccac0>, name[self].zones]]]]
return[<ast.IfExp object at 0x7da1b12cc400>]
|
keyword[def] identifier[_find_zone_by_id] ( identifier[self] , identifier[zone_id] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[zones] :
keyword[return] keyword[None]
identifier[zone] = identifier[list] ( identifier[filter] (
keyword[lambda] identifier[zone] : identifier[zone] . identifier[id] == identifier[zone_id] , identifier[self] . identifier[zones] ))
keyword[return] identifier[zone] [ literal[int] ] keyword[if] identifier[zone] keyword[else] keyword[None]
|
def _find_zone_by_id(self, zone_id):
"""Return zone by id."""
if not self.zones:
return None # depends on [control=['if'], data=[]]
zone = list(filter(lambda zone: zone.id == zone_id, self.zones))
return zone[0] if zone else None
|
def json_data(self):
"""Return json description of a participant."""
return {
"type": self.type,
"recruiter": self.recruiter_id,
"assignment_id": self.assignment_id,
"hit_id": self.hit_id,
"mode": self.mode,
"end_time": self.end_time,
"base_pay": self.base_pay,
"bonus": self.bonus,
"status": self.status,
}
|
def function[json_data, parameter[self]]:
constant[Return json description of a participant.]
return[dictionary[[<ast.Constant object at 0x7da1b04b6200>, <ast.Constant object at 0x7da1b04b4a90>, <ast.Constant object at 0x7da1b04b5210>, <ast.Constant object at 0x7da1b04b60e0>, <ast.Constant object at 0x7da1b04b4b50>, <ast.Constant object at 0x7da1b04b6fb0>, <ast.Constant object at 0x7da1b04b63e0>, <ast.Constant object at 0x7da1b04b67a0>, <ast.Constant object at 0x7da1b04b6530>], [<ast.Attribute object at 0x7da1b04b6020>, <ast.Attribute object at 0x7da1b04b6950>, <ast.Attribute object at 0x7da1b04b4e50>, <ast.Attribute object at 0x7da1b04b62c0>, <ast.Attribute object at 0x7da1b04b4dc0>, <ast.Attribute object at 0x7da1b04b5a50>, <ast.Attribute object at 0x7da1b04b5cf0>, <ast.Attribute object at 0x7da1b04b5690>, <ast.Attribute object at 0x7da1b04b5600>]]]
|
keyword[def] identifier[json_data] ( identifier[self] ):
literal[string]
keyword[return] {
literal[string] : identifier[self] . identifier[type] ,
literal[string] : identifier[self] . identifier[recruiter_id] ,
literal[string] : identifier[self] . identifier[assignment_id] ,
literal[string] : identifier[self] . identifier[hit_id] ,
literal[string] : identifier[self] . identifier[mode] ,
literal[string] : identifier[self] . identifier[end_time] ,
literal[string] : identifier[self] . identifier[base_pay] ,
literal[string] : identifier[self] . identifier[bonus] ,
literal[string] : identifier[self] . identifier[status] ,
}
|
def json_data(self):
"""Return json description of a participant."""
return {'type': self.type, 'recruiter': self.recruiter_id, 'assignment_id': self.assignment_id, 'hit_id': self.hit_id, 'mode': self.mode, 'end_time': self.end_time, 'base_pay': self.base_pay, 'bonus': self.bonus, 'status': self.status}
|
def execute_over_ssh(cmd, ssh, cwd=None, shell='bash'):
"""Excecute command on remote machine using SSH
:param cmd: Command to execute
:param ssh: Server to connect. Port is optional
:param cwd: current working directory
:return: None
"""
port = None
parts = ssh.split(':', 1)
if len(parts) > 1 and not parts[1].isdigit():
raise InvalidConfig(extra_body='Invalid port number on ssh config: {}'.format(parts[1]))
elif len(parts) > 1:
port = parts[1]
quoted_cmd = ' '.join([x.replace("'", """'"'"'""") for x in cmd.split(' ')])
remote_cmd = ' '.join([
' '.join(get_shell(shell)), # /usr/bin/env bash
' '.join([EXECUTE_SHELL_PARAM, "'", ' '.join((['cd', cwd, ';'] if cwd else []) + [quoted_cmd]), "'"])],
)
return ['ssh', parts[0]] + (['-p', port] if port else []) + ['-C'] + [remote_cmd]
|
def function[execute_over_ssh, parameter[cmd, ssh, cwd, shell]]:
constant[Excecute command on remote machine using SSH
:param cmd: Command to execute
:param ssh: Server to connect. Port is optional
:param cwd: current working directory
:return: None
]
variable[port] assign[=] constant[None]
variable[parts] assign[=] call[name[ssh].split, parameter[constant[:], constant[1]]]
if <ast.BoolOp object at 0x7da18f7235e0> begin[:]
<ast.Raise object at 0x7da18f721d80>
variable[quoted_cmd] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18dc05f00>]]
variable[remote_cmd] assign[=] call[constant[ ].join, parameter[list[[<ast.Call object at 0x7da18dc04b20>, <ast.Call object at 0x7da18dc079a0>]]]]
return[binary_operation[binary_operation[binary_operation[list[[<ast.Constant object at 0x7da18dc05660>, <ast.Subscript object at 0x7da18dc053f0>]] + <ast.IfExp object at 0x7da18dc07a30>] + list[[<ast.Constant object at 0x7da18dc06a70>]]] + list[[<ast.Name object at 0x7da18dc06560>]]]]
|
keyword[def] identifier[execute_over_ssh] ( identifier[cmd] , identifier[ssh] , identifier[cwd] = keyword[None] , identifier[shell] = literal[string] ):
literal[string]
identifier[port] = keyword[None]
identifier[parts] = identifier[ssh] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[parts] )> literal[int] keyword[and] keyword[not] identifier[parts] [ literal[int] ]. identifier[isdigit] ():
keyword[raise] identifier[InvalidConfig] ( identifier[extra_body] = literal[string] . identifier[format] ( identifier[parts] [ literal[int] ]))
keyword[elif] identifier[len] ( identifier[parts] )> literal[int] :
identifier[port] = identifier[parts] [ literal[int] ]
identifier[quoted_cmd] = literal[string] . identifier[join] ([ identifier[x] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[x] keyword[in] identifier[cmd] . identifier[split] ( literal[string] )])
identifier[remote_cmd] = literal[string] . identifier[join] ([
literal[string] . identifier[join] ( identifier[get_shell] ( identifier[shell] )),
literal[string] . identifier[join] ([ identifier[EXECUTE_SHELL_PARAM] , literal[string] , literal[string] . identifier[join] (([ literal[string] , identifier[cwd] , literal[string] ] keyword[if] identifier[cwd] keyword[else] [])+[ identifier[quoted_cmd] ]), literal[string] ])],
)
keyword[return] [ literal[string] , identifier[parts] [ literal[int] ]]+([ literal[string] , identifier[port] ] keyword[if] identifier[port] keyword[else] [])+[ literal[string] ]+[ identifier[remote_cmd] ]
|
def execute_over_ssh(cmd, ssh, cwd=None, shell='bash'):
"""Excecute command on remote machine using SSH
:param cmd: Command to execute
:param ssh: Server to connect. Port is optional
:param cwd: current working directory
:return: None
"""
port = None
parts = ssh.split(':', 1)
if len(parts) > 1 and (not parts[1].isdigit()):
raise InvalidConfig(extra_body='Invalid port number on ssh config: {}'.format(parts[1])) # depends on [control=['if'], data=[]]
elif len(parts) > 1:
port = parts[1] # depends on [control=['if'], data=[]]
quoted_cmd = ' '.join([x.replace("'", '\'"\'"\'') for x in cmd.split(' ')]) # /usr/bin/env bash
remote_cmd = ' '.join([' '.join(get_shell(shell)), ' '.join([EXECUTE_SHELL_PARAM, "'", ' '.join((['cd', cwd, ';'] if cwd else []) + [quoted_cmd]), "'"])])
return ['ssh', parts[0]] + (['-p', port] if port else []) + ['-C'] + [remote_cmd]
|
def register(self, *args):
"""Register a configurable component in the configuration schema
store"""
super(ConfigurableMeta, self).register(*args)
from hfos.database import configschemastore
# self.log('ADDING SCHEMA:')
# pprint(self.configschema)
configschemastore[self.name] = self.configschema
|
def function[register, parameter[self]]:
constant[Register a configurable component in the configuration schema
store]
call[call[name[super], parameter[name[ConfigurableMeta], name[self]]].register, parameter[<ast.Starred object at 0x7da1b0fe6920>]]
from relative_module[hfos.database] import module[configschemastore]
call[name[configschemastore]][name[self].name] assign[=] name[self].configschema
|
keyword[def] identifier[register] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[super] ( identifier[ConfigurableMeta] , identifier[self] ). identifier[register] (* identifier[args] )
keyword[from] identifier[hfos] . identifier[database] keyword[import] identifier[configschemastore]
identifier[configschemastore] [ identifier[self] . identifier[name] ]= identifier[self] . identifier[configschema]
|
def register(self, *args):
"""Register a configurable component in the configuration schema
store"""
super(ConfigurableMeta, self).register(*args)
from hfos.database import configschemastore
# self.log('ADDING SCHEMA:')
# pprint(self.configschema)
configschemastore[self.name] = self.configschema
|
def is_dynamic(self):
''' Return true if the route contains dynamic parts '''
if not self._static:
for token, value in self.tokens():
if token != 'TXT':
return True
self._static = True
return False
|
def function[is_dynamic, parameter[self]]:
constant[ Return true if the route contains dynamic parts ]
if <ast.UnaryOp object at 0x7da18dc05d80> begin[:]
for taget[tuple[[<ast.Name object at 0x7da18dc056c0>, <ast.Name object at 0x7da18dc07fa0>]]] in starred[call[name[self].tokens, parameter[]]] begin[:]
if compare[name[token] not_equal[!=] constant[TXT]] begin[:]
return[constant[True]]
name[self]._static assign[=] constant[True]
return[constant[False]]
|
keyword[def] identifier[is_dynamic] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_static] :
keyword[for] identifier[token] , identifier[value] keyword[in] identifier[self] . identifier[tokens] ():
keyword[if] identifier[token] != literal[string] :
keyword[return] keyword[True]
identifier[self] . identifier[_static] = keyword[True]
keyword[return] keyword[False]
|
def is_dynamic(self):
""" Return true if the route contains dynamic parts """
if not self._static:
for (token, value) in self.tokens():
if token != 'TXT':
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
self._static = True
return False
|
def validate(raw_schema, target=None, **kwargs):
"""
Given the python representation of a JSONschema as defined in the swagger
spec, validate that the schema complies to spec. If `target` is provided,
that target will be validated against the provided schema.
"""
schema = schema_validator(raw_schema, **kwargs)
if target is not None:
validate_object(target, schema=schema, **kwargs)
|
def function[validate, parameter[raw_schema, target]]:
constant[
Given the python representation of a JSONschema as defined in the swagger
spec, validate that the schema complies to spec. If `target` is provided,
that target will be validated against the provided schema.
]
variable[schema] assign[=] call[name[schema_validator], parameter[name[raw_schema]]]
if compare[name[target] is_not constant[None]] begin[:]
call[name[validate_object], parameter[name[target]]]
|
keyword[def] identifier[validate] ( identifier[raw_schema] , identifier[target] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[schema] = identifier[schema_validator] ( identifier[raw_schema] ,** identifier[kwargs] )
keyword[if] identifier[target] keyword[is] keyword[not] keyword[None] :
identifier[validate_object] ( identifier[target] , identifier[schema] = identifier[schema] ,** identifier[kwargs] )
|
def validate(raw_schema, target=None, **kwargs):
"""
Given the python representation of a JSONschema as defined in the swagger
spec, validate that the schema complies to spec. If `target` is provided,
that target will be validated against the provided schema.
"""
schema = schema_validator(raw_schema, **kwargs)
if target is not None:
validate_object(target, schema=schema, **kwargs) # depends on [control=['if'], data=['target']]
|
def search(self, index, query, **params):
"""
Performs a search query.
"""
if index is None:
index = 'search'
options = {}
if 'op' in params:
op = params.pop('op')
options['q.op'] = op
options.update(params)
url = self.solr_select_path(index, query, **options)
status, headers, data = self._request('GET', url)
self.check_http_code(status, [200])
if 'json' in headers['content-type']:
results = json.loads(bytes_to_str(data))
return self._normalize_json_search_response(results)
elif 'xml' in headers['content-type']:
return self._normalize_xml_search_response(data)
else:
raise ValueError("Could not decode search response")
|
def function[search, parameter[self, index, query]]:
constant[
Performs a search query.
]
if compare[name[index] is constant[None]] begin[:]
variable[index] assign[=] constant[search]
variable[options] assign[=] dictionary[[], []]
if compare[constant[op] in name[params]] begin[:]
variable[op] assign[=] call[name[params].pop, parameter[constant[op]]]
call[name[options]][constant[q.op]] assign[=] name[op]
call[name[options].update, parameter[name[params]]]
variable[url] assign[=] call[name[self].solr_select_path, parameter[name[index], name[query]]]
<ast.Tuple object at 0x7da207f9a710> assign[=] call[name[self]._request, parameter[constant[GET], name[url]]]
call[name[self].check_http_code, parameter[name[status], list[[<ast.Constant object at 0x7da207f98ca0>]]]]
if compare[constant[json] in call[name[headers]][constant[content-type]]] begin[:]
variable[results] assign[=] call[name[json].loads, parameter[call[name[bytes_to_str], parameter[name[data]]]]]
return[call[name[self]._normalize_json_search_response, parameter[name[results]]]]
|
keyword[def] identifier[search] ( identifier[self] , identifier[index] , identifier[query] ,** identifier[params] ):
literal[string]
keyword[if] identifier[index] keyword[is] keyword[None] :
identifier[index] = literal[string]
identifier[options] ={}
keyword[if] literal[string] keyword[in] identifier[params] :
identifier[op] = identifier[params] . identifier[pop] ( literal[string] )
identifier[options] [ literal[string] ]= identifier[op]
identifier[options] . identifier[update] ( identifier[params] )
identifier[url] = identifier[self] . identifier[solr_select_path] ( identifier[index] , identifier[query] ,** identifier[options] )
identifier[status] , identifier[headers] , identifier[data] = identifier[self] . identifier[_request] ( literal[string] , identifier[url] )
identifier[self] . identifier[check_http_code] ( identifier[status] ,[ literal[int] ])
keyword[if] literal[string] keyword[in] identifier[headers] [ literal[string] ]:
identifier[results] = identifier[json] . identifier[loads] ( identifier[bytes_to_str] ( identifier[data] ))
keyword[return] identifier[self] . identifier[_normalize_json_search_response] ( identifier[results] )
keyword[elif] literal[string] keyword[in] identifier[headers] [ literal[string] ]:
keyword[return] identifier[self] . identifier[_normalize_xml_search_response] ( identifier[data] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
|
def search(self, index, query, **params):
"""
Performs a search query.
"""
if index is None:
index = 'search' # depends on [control=['if'], data=['index']]
options = {}
if 'op' in params:
op = params.pop('op')
options['q.op'] = op # depends on [control=['if'], data=['params']]
options.update(params)
url = self.solr_select_path(index, query, **options)
(status, headers, data) = self._request('GET', url)
self.check_http_code(status, [200])
if 'json' in headers['content-type']:
results = json.loads(bytes_to_str(data))
return self._normalize_json_search_response(results) # depends on [control=['if'], data=[]]
elif 'xml' in headers['content-type']:
return self._normalize_xml_search_response(data) # depends on [control=['if'], data=[]]
else:
raise ValueError('Could not decode search response')
|
def check_species_object(species_name_or_object):
"""
Helper for validating user supplied species names or objects.
"""
if isinstance(species_name_or_object, Species):
return species_name_or_object
elif isinstance(species_name_or_object, str):
return find_species_by_name(species_name_or_object)
else:
raise ValueError("Unexpected type for species: %s : %s" % (
species_name_or_object, type(species_name_or_object)))
|
def function[check_species_object, parameter[species_name_or_object]]:
constant[
Helper for validating user supplied species names or objects.
]
if call[name[isinstance], parameter[name[species_name_or_object], name[Species]]] begin[:]
return[name[species_name_or_object]]
|
keyword[def] identifier[check_species_object] ( identifier[species_name_or_object] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[species_name_or_object] , identifier[Species] ):
keyword[return] identifier[species_name_or_object]
keyword[elif] identifier[isinstance] ( identifier[species_name_or_object] , identifier[str] ):
keyword[return] identifier[find_species_by_name] ( identifier[species_name_or_object] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %(
identifier[species_name_or_object] , identifier[type] ( identifier[species_name_or_object] )))
|
def check_species_object(species_name_or_object):
"""
Helper for validating user supplied species names or objects.
"""
if isinstance(species_name_or_object, Species):
return species_name_or_object # depends on [control=['if'], data=[]]
elif isinstance(species_name_or_object, str):
return find_species_by_name(species_name_or_object) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unexpected type for species: %s : %s' % (species_name_or_object, type(species_name_or_object)))
|
def _compute_distance(self, rup, dists, C):
"""
Compute the distance function, equation (9):
"""
mref = 3.6
rref = 1.0
rval = np.sqrt(dists.rhypo ** 2 + C['h'] ** 2)
return (C['c1'] + C['c2'] * (rup.mag - mref)) *\
np.log10(rval / rref) + C['c3'] * (rval - rref)
|
def function[_compute_distance, parameter[self, rup, dists, C]]:
constant[
Compute the distance function, equation (9):
]
variable[mref] assign[=] constant[3.6]
variable[rref] assign[=] constant[1.0]
variable[rval] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[dists].rhypo ** constant[2]] + binary_operation[call[name[C]][constant[h]] ** constant[2]]]]]
return[binary_operation[binary_operation[binary_operation[call[name[C]][constant[c1]] + binary_operation[call[name[C]][constant[c2]] * binary_operation[name[rup].mag - name[mref]]]] * call[name[np].log10, parameter[binary_operation[name[rval] / name[rref]]]]] + binary_operation[call[name[C]][constant[c3]] * binary_operation[name[rval] - name[rref]]]]]
|
keyword[def] identifier[_compute_distance] ( identifier[self] , identifier[rup] , identifier[dists] , identifier[C] ):
literal[string]
identifier[mref] = literal[int]
identifier[rref] = literal[int]
identifier[rval] = identifier[np] . identifier[sqrt] ( identifier[dists] . identifier[rhypo] ** literal[int] + identifier[C] [ literal[string] ]** literal[int] )
keyword[return] ( identifier[C] [ literal[string] ]+ identifier[C] [ literal[string] ]*( identifier[rup] . identifier[mag] - identifier[mref] ))* identifier[np] . identifier[log10] ( identifier[rval] / identifier[rref] )+ identifier[C] [ literal[string] ]*( identifier[rval] - identifier[rref] )
|
def _compute_distance(self, rup, dists, C):
"""
Compute the distance function, equation (9):
"""
mref = 3.6
rref = 1.0
rval = np.sqrt(dists.rhypo ** 2 + C['h'] ** 2)
return (C['c1'] + C['c2'] * (rup.mag - mref)) * np.log10(rval / rref) + C['c3'] * (rval - rref)
|
def alignment(self):
"""
The alignment of the type in bytes.
"""
if self._arch is None:
return NotImplemented
return self.size // self._arch.byte_width
|
def function[alignment, parameter[self]]:
constant[
The alignment of the type in bytes.
]
if compare[name[self]._arch is constant[None]] begin[:]
return[name[NotImplemented]]
return[binary_operation[name[self].size <ast.FloorDiv object at 0x7da2590d6bc0> name[self]._arch.byte_width]]
|
keyword[def] identifier[alignment] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_arch] keyword[is] keyword[None] :
keyword[return] identifier[NotImplemented]
keyword[return] identifier[self] . identifier[size] // identifier[self] . identifier[_arch] . identifier[byte_width]
|
def alignment(self):
"""
The alignment of the type in bytes.
"""
if self._arch is None:
return NotImplemented # depends on [control=['if'], data=[]]
return self.size // self._arch.byte_width
|
def parse_req_file(req_file, verbatim=False):
"""Take a file and return a dict of (requirement, versions, ignore) based
on the files requirements specs.
"""
req_list = []
requirements = req_file.readlines()
for requirement in requirements:
requirement_no_comments = requirement.split('#')[0].strip()
# if matching requirement line (Thing==1.2.3), update dict, continue
req_match = re.match(
r'\s*(?P<package>[^\s\[\]]+)(?P<extras>\[\S+\])?==(?P<version>\S+)',
requirement_no_comments
)
req_ignore = requirement.strip().endswith(' # norot')
if req_match:
req_list.append((req_match.group('package'),
req_match.group('version'),
req_ignore))
elif requirement_no_comments.startswith('-r'):
try:
base_dir = os.path.dirname(os.path.abspath(req_file.name))
except AttributeError:
print(
'Recursive requirements are not supported in URL based '
'lookups'
)
continue
# replace the -r and ensure there are no leading spaces
file_name = requirement_no_comments.replace('-r', '').strip()
new_path = os.path.join(base_dir, file_name)
try:
if verbatim:
req_list.append((None, requirement, req_ignore))
req_list.extend(
parse_req_file(
open(new_path),
verbatim=verbatim
)
)
except IOError:
print('Failed to import {}'.format(file_name))
elif verbatim:
req_list.append((None, requirement, req_ignore))
return req_list
|
def function[parse_req_file, parameter[req_file, verbatim]]:
constant[Take a file and return a dict of (requirement, versions, ignore) based
on the files requirements specs.
]
variable[req_list] assign[=] list[[]]
variable[requirements] assign[=] call[name[req_file].readlines, parameter[]]
for taget[name[requirement]] in starred[name[requirements]] begin[:]
variable[requirement_no_comments] assign[=] call[call[call[name[requirement].split, parameter[constant[#]]]][constant[0]].strip, parameter[]]
variable[req_match] assign[=] call[name[re].match, parameter[constant[\s*(?P<package>[^\s\[\]]+)(?P<extras>\[\S+\])?==(?P<version>\S+)], name[requirement_no_comments]]]
variable[req_ignore] assign[=] call[call[name[requirement].strip, parameter[]].endswith, parameter[constant[ # norot]]]
if name[req_match] begin[:]
call[name[req_list].append, parameter[tuple[[<ast.Call object at 0x7da20e9b1de0>, <ast.Call object at 0x7da20e9b14e0>, <ast.Name object at 0x7da20e9b10f0>]]]]
return[name[req_list]]
|
keyword[def] identifier[parse_req_file] ( identifier[req_file] , identifier[verbatim] = keyword[False] ):
literal[string]
identifier[req_list] =[]
identifier[requirements] = identifier[req_file] . identifier[readlines] ()
keyword[for] identifier[requirement] keyword[in] identifier[requirements] :
identifier[requirement_no_comments] = identifier[requirement] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ()
identifier[req_match] = identifier[re] . identifier[match] (
literal[string] ,
identifier[requirement_no_comments]
)
identifier[req_ignore] = identifier[requirement] . identifier[strip] (). identifier[endswith] ( literal[string] )
keyword[if] identifier[req_match] :
identifier[req_list] . identifier[append] (( identifier[req_match] . identifier[group] ( literal[string] ),
identifier[req_match] . identifier[group] ( literal[string] ),
identifier[req_ignore] ))
keyword[elif] identifier[requirement_no_comments] . identifier[startswith] ( literal[string] ):
keyword[try] :
identifier[base_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[req_file] . identifier[name] ))
keyword[except] identifier[AttributeError] :
identifier[print] (
literal[string]
literal[string]
)
keyword[continue]
identifier[file_name] = identifier[requirement_no_comments] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
identifier[new_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[file_name] )
keyword[try] :
keyword[if] identifier[verbatim] :
identifier[req_list] . identifier[append] (( keyword[None] , identifier[requirement] , identifier[req_ignore] ))
identifier[req_list] . identifier[extend] (
identifier[parse_req_file] (
identifier[open] ( identifier[new_path] ),
identifier[verbatim] = identifier[verbatim]
)
)
keyword[except] identifier[IOError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[file_name] ))
keyword[elif] identifier[verbatim] :
identifier[req_list] . identifier[append] (( keyword[None] , identifier[requirement] , identifier[req_ignore] ))
keyword[return] identifier[req_list]
|
def parse_req_file(req_file, verbatim=False):
"""Take a file and return a dict of (requirement, versions, ignore) based
on the files requirements specs.
"""
req_list = []
requirements = req_file.readlines()
for requirement in requirements:
requirement_no_comments = requirement.split('#')[0].strip()
# if matching requirement line (Thing==1.2.3), update dict, continue
req_match = re.match('\\s*(?P<package>[^\\s\\[\\]]+)(?P<extras>\\[\\S+\\])?==(?P<version>\\S+)', requirement_no_comments)
req_ignore = requirement.strip().endswith(' # norot')
if req_match:
req_list.append((req_match.group('package'), req_match.group('version'), req_ignore)) # depends on [control=['if'], data=[]]
elif requirement_no_comments.startswith('-r'):
try:
base_dir = os.path.dirname(os.path.abspath(req_file.name)) # depends on [control=['try'], data=[]]
except AttributeError:
print('Recursive requirements are not supported in URL based lookups')
continue # depends on [control=['except'], data=[]]
# replace the -r and ensure there are no leading spaces
file_name = requirement_no_comments.replace('-r', '').strip()
new_path = os.path.join(base_dir, file_name)
try:
if verbatim:
req_list.append((None, requirement, req_ignore)) # depends on [control=['if'], data=[]]
req_list.extend(parse_req_file(open(new_path), verbatim=verbatim)) # depends on [control=['try'], data=[]]
except IOError:
print('Failed to import {}'.format(file_name)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif verbatim:
req_list.append((None, requirement, req_ignore)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['requirement']]
return req_list
|
def status(self, value):
"""Set the workflow stage status."""
# FIXME(BM) This is currently a hack because workflow stages
# don't each have their own db entry.
pb_key = SchedulingObject.get_key(PB_KEY, self._pb_id)
stages = DB.get_hash_value(pb_key, 'workflow_stages')
stages = ast.literal_eval(stages)
stages[self._index]['status'] = value
DB.set_hash_value(pb_key, 'workflow_stages', stages)
|
def function[status, parameter[self, value]]:
constant[Set the workflow stage status.]
variable[pb_key] assign[=] call[name[SchedulingObject].get_key, parameter[name[PB_KEY], name[self]._pb_id]]
variable[stages] assign[=] call[name[DB].get_hash_value, parameter[name[pb_key], constant[workflow_stages]]]
variable[stages] assign[=] call[name[ast].literal_eval, parameter[name[stages]]]
call[call[name[stages]][name[self]._index]][constant[status]] assign[=] name[value]
call[name[DB].set_hash_value, parameter[name[pb_key], constant[workflow_stages], name[stages]]]
|
keyword[def] identifier[status] ( identifier[self] , identifier[value] ):
literal[string]
identifier[pb_key] = identifier[SchedulingObject] . identifier[get_key] ( identifier[PB_KEY] , identifier[self] . identifier[_pb_id] )
identifier[stages] = identifier[DB] . identifier[get_hash_value] ( identifier[pb_key] , literal[string] )
identifier[stages] = identifier[ast] . identifier[literal_eval] ( identifier[stages] )
identifier[stages] [ identifier[self] . identifier[_index] ][ literal[string] ]= identifier[value]
identifier[DB] . identifier[set_hash_value] ( identifier[pb_key] , literal[string] , identifier[stages] )
|
def status(self, value):
"""Set the workflow stage status."""
# FIXME(BM) This is currently a hack because workflow stages
# don't each have their own db entry.
pb_key = SchedulingObject.get_key(PB_KEY, self._pb_id)
stages = DB.get_hash_value(pb_key, 'workflow_stages')
stages = ast.literal_eval(stages)
stages[self._index]['status'] = value
DB.set_hash_value(pb_key, 'workflow_stages', stages)
|
def get_children(self, id_):
"""Gets the children of the given ``Id``.
arg: id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the ``id``
raise: NotFound - ``id`` is not found
raise: NullArgument - ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
id_list = []
for r in self._rls.get_relationships_by_genus_type_for_source(id_, self._relationship_type):
id_list.append(r.get_destination_id())
return IdList(id_list)
|
def function[get_children, parameter[self, id_]]:
constant[Gets the children of the given ``Id``.
arg: id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the ``id``
raise: NotFound - ``id`` is not found
raise: NullArgument - ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[id_list] assign[=] list[[]]
for taget[name[r]] in starred[call[name[self]._rls.get_relationships_by_genus_type_for_source, parameter[name[id_], name[self]._relationship_type]]] begin[:]
call[name[id_list].append, parameter[call[name[r].get_destination_id, parameter[]]]]
return[call[name[IdList], parameter[name[id_list]]]]
|
keyword[def] identifier[get_children] ( identifier[self] , identifier[id_] ):
literal[string]
identifier[id_list] =[]
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[_rls] . identifier[get_relationships_by_genus_type_for_source] ( identifier[id_] , identifier[self] . identifier[_relationship_type] ):
identifier[id_list] . identifier[append] ( identifier[r] . identifier[get_destination_id] ())
keyword[return] identifier[IdList] ( identifier[id_list] )
|
def get_children(self, id_):
"""Gets the children of the given ``Id``.
arg: id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the ``id``
raise: NotFound - ``id`` is not found
raise: NullArgument - ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
id_list = []
for r in self._rls.get_relationships_by_genus_type_for_source(id_, self._relationship_type):
id_list.append(r.get_destination_id()) # depends on [control=['for'], data=['r']]
return IdList(id_list)
|
def parse(self, method, endpoint, body):
''' calls parse on list or detail '''
if isinstance(body, dict): # request body was already parsed
return body
if endpoint == 'list':
return self.parse_list(body)
return self.parse_detail(body)
|
def function[parse, parameter[self, method, endpoint, body]]:
constant[ calls parse on list or detail ]
if call[name[isinstance], parameter[name[body], name[dict]]] begin[:]
return[name[body]]
if compare[name[endpoint] equal[==] constant[list]] begin[:]
return[call[name[self].parse_list, parameter[name[body]]]]
return[call[name[self].parse_detail, parameter[name[body]]]]
|
keyword[def] identifier[parse] ( identifier[self] , identifier[method] , identifier[endpoint] , identifier[body] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[body] , identifier[dict] ):
keyword[return] identifier[body]
keyword[if] identifier[endpoint] == literal[string] :
keyword[return] identifier[self] . identifier[parse_list] ( identifier[body] )
keyword[return] identifier[self] . identifier[parse_detail] ( identifier[body] )
|
def parse(self, method, endpoint, body):
""" calls parse on list or detail """
if isinstance(body, dict): # request body was already parsed
return body # depends on [control=['if'], data=[]]
if endpoint == 'list':
return self.parse_list(body) # depends on [control=['if'], data=[]]
return self.parse_detail(body)
|
def add_entity_errors(
self,
property_name,
direct_errors=None,
schema_errors=None
):
"""
Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result
"""
if direct_errors is None and schema_errors is None:
return self
# direct errors
if direct_errors is not None:
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = []
if type(direct_errors) is not list:
direct_errors = [direct_errors]
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error))
self.errors[property_name]['direct'].append(error)
# schema errors
if schema_errors is not None:
if isinstance(schema_errors, Result):
schema_errors = schema_errors.errors
if not schema_errors:
return self
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'schema' not in self.errors[property_name]:
self.errors[property_name]['schema'] = schema_errors
else:
self.errors[property_name]['schema'] = self.merge_errors(
self.errors[property_name]['schema'],
schema_errors
)
return self
|
def function[add_entity_errors, parameter[self, property_name, direct_errors, schema_errors]]:
constant[
Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result
]
if <ast.BoolOp object at 0x7da20c6c5720> begin[:]
return[name[self]]
if compare[name[direct_errors] is_not constant[None]] begin[:]
if compare[name[property_name] <ast.NotIn object at 0x7da2590d7190> name[self].errors] begin[:]
call[name[self].errors][name[property_name]] assign[=] call[name[dict], parameter[]]
if compare[constant[direct] <ast.NotIn object at 0x7da2590d7190> call[name[self].errors][name[property_name]]] begin[:]
call[call[name[self].errors][name[property_name]]][constant[direct]] assign[=] list[[]]
if compare[call[name[type], parameter[name[direct_errors]]] is_not name[list]] begin[:]
variable[direct_errors] assign[=] list[[<ast.Name object at 0x7da20c6c60b0>]]
for taget[name[error]] in starred[name[direct_errors]] begin[:]
if <ast.UnaryOp object at 0x7da20c6c52a0> begin[:]
variable[err] assign[=] constant[Error must be of type {}]
<ast.Raise object at 0x7da20c6c6350>
call[call[call[name[self].errors][name[property_name]]][constant[direct]].append, parameter[name[error]]]
if compare[name[schema_errors] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[schema_errors], name[Result]]] begin[:]
variable[schema_errors] assign[=] name[schema_errors].errors
if <ast.UnaryOp object at 0x7da20c6c4f10> begin[:]
return[name[self]]
if compare[name[property_name] <ast.NotIn object at 0x7da2590d7190> name[self].errors] begin[:]
call[name[self].errors][name[property_name]] assign[=] call[name[dict], parameter[]]
if compare[constant[schema] <ast.NotIn object at 0x7da2590d7190> call[name[self].errors][name[property_name]]] begin[:]
call[call[name[self].errors][name[property_name]]][constant[schema]] assign[=] name[schema_errors]
return[name[self]]
|
keyword[def] identifier[add_entity_errors] (
identifier[self] ,
identifier[property_name] ,
identifier[direct_errors] = keyword[None] ,
identifier[schema_errors] = keyword[None]
):
literal[string]
keyword[if] identifier[direct_errors] keyword[is] keyword[None] keyword[and] identifier[schema_errors] keyword[is] keyword[None] :
keyword[return] identifier[self]
keyword[if] identifier[direct_errors] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[property_name] keyword[not] keyword[in] identifier[self] . identifier[errors] :
identifier[self] . identifier[errors] [ identifier[property_name] ]= identifier[dict] ()
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[errors] [ identifier[property_name] ]:
identifier[self] . identifier[errors] [ identifier[property_name] ][ literal[string] ]=[]
keyword[if] identifier[type] ( identifier[direct_errors] ) keyword[is] keyword[not] identifier[list] :
identifier[direct_errors] =[ identifier[direct_errors] ]
keyword[for] identifier[error] keyword[in] identifier[direct_errors] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[error] , identifier[Error] ):
identifier[err] = literal[string]
keyword[raise] identifier[x] . identifier[InvalidErrorType] ( identifier[err] . identifier[format] ( identifier[Error] ))
identifier[self] . identifier[errors] [ identifier[property_name] ][ literal[string] ]. identifier[append] ( identifier[error] )
keyword[if] identifier[schema_errors] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[schema_errors] , identifier[Result] ):
identifier[schema_errors] = identifier[schema_errors] . identifier[errors]
keyword[if] keyword[not] identifier[schema_errors] :
keyword[return] identifier[self]
keyword[if] identifier[property_name] keyword[not] keyword[in] identifier[self] . identifier[errors] :
identifier[self] . identifier[errors] [ identifier[property_name] ]= identifier[dict] ()
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[errors] [ identifier[property_name] ]:
identifier[self] . identifier[errors] [ identifier[property_name] ][ literal[string] ]= identifier[schema_errors]
keyword[else] :
identifier[self] . identifier[errors] [ identifier[property_name] ][ literal[string] ]= identifier[self] . identifier[merge_errors] (
identifier[self] . identifier[errors] [ identifier[property_name] ][ literal[string] ],
identifier[schema_errors]
)
keyword[return] identifier[self]
|
def add_entity_errors(self, property_name, direct_errors=None, schema_errors=None):
"""
Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result
"""
if direct_errors is None and schema_errors is None:
return self # depends on [control=['if'], data=[]]
# direct errors
if direct_errors is not None:
if property_name not in self.errors:
self.errors[property_name] = dict() # depends on [control=['if'], data=['property_name']]
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = [] # depends on [control=['if'], data=[]]
if type(direct_errors) is not list:
direct_errors = [direct_errors] # depends on [control=['if'], data=[]]
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error)) # depends on [control=['if'], data=[]]
self.errors[property_name]['direct'].append(error) # depends on [control=['for'], data=['error']] # depends on [control=['if'], data=['direct_errors']]
# schema errors
if schema_errors is not None:
if isinstance(schema_errors, Result):
schema_errors = schema_errors.errors # depends on [control=['if'], data=[]]
if not schema_errors:
return self # depends on [control=['if'], data=[]]
if property_name not in self.errors:
self.errors[property_name] = dict() # depends on [control=['if'], data=['property_name']]
if 'schema' not in self.errors[property_name]:
self.errors[property_name]['schema'] = schema_errors # depends on [control=['if'], data=[]]
else:
self.errors[property_name]['schema'] = self.merge_errors(self.errors[property_name]['schema'], schema_errors) # depends on [control=['if'], data=['schema_errors']]
return self
|
def _physical_column(self, cube, column_name):
""" Return the SQLAlchemy Column object matching a given, possibly
qualified, column name (i.e.: 'table.column'). If no table is named,
the fact table is assumed. """
table_name = self.model.fact_table_name
if '.' in column_name:
table_name, column_name = column_name.split('.', 1)
table = cube._load_table(table_name)
if column_name not in table.columns:
raise BindingException('Column %r does not exist on table %r' % (
column_name, table_name), table=table_name,
column=column_name)
return table, table.columns[column_name]
|
def function[_physical_column, parameter[self, cube, column_name]]:
constant[ Return the SQLAlchemy Column object matching a given, possibly
qualified, column name (i.e.: 'table.column'). If no table is named,
the fact table is assumed. ]
variable[table_name] assign[=] name[self].model.fact_table_name
if compare[constant[.] in name[column_name]] begin[:]
<ast.Tuple object at 0x7da207f03790> assign[=] call[name[column_name].split, parameter[constant[.], constant[1]]]
variable[table] assign[=] call[name[cube]._load_table, parameter[name[table_name]]]
if compare[name[column_name] <ast.NotIn object at 0x7da2590d7190> name[table].columns] begin[:]
<ast.Raise object at 0x7da207f02830>
return[tuple[[<ast.Name object at 0x7da2044c21a0>, <ast.Subscript object at 0x7da2044c2500>]]]
|
keyword[def] identifier[_physical_column] ( identifier[self] , identifier[cube] , identifier[column_name] ):
literal[string]
identifier[table_name] = identifier[self] . identifier[model] . identifier[fact_table_name]
keyword[if] literal[string] keyword[in] identifier[column_name] :
identifier[table_name] , identifier[column_name] = identifier[column_name] . identifier[split] ( literal[string] , literal[int] )
identifier[table] = identifier[cube] . identifier[_load_table] ( identifier[table_name] )
keyword[if] identifier[column_name] keyword[not] keyword[in] identifier[table] . identifier[columns] :
keyword[raise] identifier[BindingException] ( literal[string] %(
identifier[column_name] , identifier[table_name] ), identifier[table] = identifier[table_name] ,
identifier[column] = identifier[column_name] )
keyword[return] identifier[table] , identifier[table] . identifier[columns] [ identifier[column_name] ]
|
def _physical_column(self, cube, column_name):
""" Return the SQLAlchemy Column object matching a given, possibly
qualified, column name (i.e.: 'table.column'). If no table is named,
the fact table is assumed. """
table_name = self.model.fact_table_name
if '.' in column_name:
(table_name, column_name) = column_name.split('.', 1) # depends on [control=['if'], data=['column_name']]
table = cube._load_table(table_name)
if column_name not in table.columns:
raise BindingException('Column %r does not exist on table %r' % (column_name, table_name), table=table_name, column=column_name) # depends on [control=['if'], data=['column_name']]
return (table, table.columns[column_name])
|
def credibleregions(self, probs):
""" Calculates the credible regions.
"""
return [brentq(lambda l: self.pdf[self.pdf > l].sum() - p, 0.0, 1.0) for p in probs]
|
def function[credibleregions, parameter[self, probs]]:
constant[ Calculates the credible regions.
]
return[<ast.ListComp object at 0x7da18bc70d00>]
|
keyword[def] identifier[credibleregions] ( identifier[self] , identifier[probs] ):
literal[string]
keyword[return] [ identifier[brentq] ( keyword[lambda] identifier[l] : identifier[self] . identifier[pdf] [ identifier[self] . identifier[pdf] > identifier[l] ]. identifier[sum] ()- identifier[p] , literal[int] , literal[int] ) keyword[for] identifier[p] keyword[in] identifier[probs] ]
|
def credibleregions(self, probs):
""" Calculates the credible regions.
"""
return [brentq(lambda l: self.pdf[self.pdf > l].sum() - p, 0.0, 1.0) for p in probs]
|
def implfuncs(self):
"""generator on all implemented functions"""
for f in self.body:
if (hasattr(f, '_ctype')
and isinstance(f._ctype, FuncType)
and hasattr(f, 'body')):
yield f
|
def function[implfuncs, parameter[self]]:
constant[generator on all implemented functions]
for taget[name[f]] in starred[name[self].body] begin[:]
if <ast.BoolOp object at 0x7da1b16484f0> begin[:]
<ast.Yield object at 0x7da20e9550f0>
|
keyword[def] identifier[implfuncs] ( identifier[self] ):
literal[string]
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[body] :
keyword[if] ( identifier[hasattr] ( identifier[f] , literal[string] )
keyword[and] identifier[isinstance] ( identifier[f] . identifier[_ctype] , identifier[FuncType] )
keyword[and] identifier[hasattr] ( identifier[f] , literal[string] )):
keyword[yield] identifier[f]
|
def implfuncs(self):
"""generator on all implemented functions"""
for f in self.body:
if hasattr(f, '_ctype') and isinstance(f._ctype, FuncType) and hasattr(f, 'body'):
yield f # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
|
def delete_transaction_by_id(cls, transaction_id, **kwargs):
"""Delete Transaction
Delete an instance of Transaction by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_transaction_by_id(transaction_id, async=True)
>>> result = thread.get()
:param async bool
:param str transaction_id: ID of transaction to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_transaction_by_id_with_http_info(transaction_id, **kwargs)
else:
(data) = cls._delete_transaction_by_id_with_http_info(transaction_id, **kwargs)
return data
|
def function[delete_transaction_by_id, parameter[cls, transaction_id]]:
constant[Delete Transaction
Delete an instance of Transaction by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_transaction_by_id(transaction_id, async=True)
>>> result = thread.get()
:param async bool
:param str transaction_id: ID of transaction to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._delete_transaction_by_id_with_http_info, parameter[name[transaction_id]]]]
|
keyword[def] identifier[delete_transaction_by_id] ( identifier[cls] , identifier[transaction_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_delete_transaction_by_id_with_http_info] ( identifier[transaction_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_delete_transaction_by_id_with_http_info] ( identifier[transaction_id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def delete_transaction_by_id(cls, transaction_id, **kwargs):
"""Delete Transaction
Delete an instance of Transaction by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_transaction_by_id(transaction_id, async=True)
>>> result = thread.get()
:param async bool
:param str transaction_id: ID of transaction to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_transaction_by_id_with_http_info(transaction_id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._delete_transaction_by_id_with_http_info(transaction_id, **kwargs)
return data
|
def strategy(self, *names, **kwargs):
"""
StrategyDict wrapping method for adding a new strategy.
Parameters
----------
*names :
Positional arguments with all names (strings) that could be used to
call the strategy to be added, to be used both as key items and as
attribute names.
keep_name :
Boolean keyword-only parameter for choosing whether the ``__name__``
attribute of the decorated/wrapped function should be changed or kept.
Defaults to False (i.e., changes the name by default).
Returns
-------
A decorator/wrapper function to be used once on the new strategy to be
added.
Example
-------
Let's create a StrategyDict that knows its name:
>>> txt_proc = StrategyDict("txt_proc")
Add a first strategy ``swapcase``, using this method as a decorator
factory:
>>> @txt_proc.strategy("swapcase")
... def txt_proc(txt):
... return txt.swapcase()
Let's do it again, but wrapping the strategy functions inline. First two
strategies have multiple names, the last keeps the function name, which
would otherwise be replaced by the first given name:
>>> txt_proc.strategy("lower", "low")(lambda txt: txt.lower())
{(...): <function ... at 0x...>, (...): <function ... at 0x...>}
>>> txt_proc.strategy("upper", "up")(lambda txt: txt.upper())
{...}
>>> txt_proc.strategy("keep", keep_name=True)(lambda txt: txt)
{...}
We can now iterate through the strategies to call them or see their
function names
>>> sorted(st("Just a Test") for st in txt_proc)
['JUST A TEST', 'Just a Test', 'jUST A tEST', 'just a test']
>>> sorted(st.__name__ for st in txt_proc) # Just the first name
['<lambda>', 'lower', 'swapcase', 'upper']
Calling a single strategy:
>>> txt_proc.low("TeStInG")
'testing'
>>> txt_proc["upper"]("TeStInG")
'TESTING'
>>> txt_proc("TeStInG") # Default is the first: swapcase
'tEsTiNg'
>>> txt_proc.default("TeStInG")
'tEsTiNg'
>>> txt_proc.default = txt_proc.up # Manually changing the default
>>> txt_proc("TeStInG")
'TESTING'
Hint
----
Default strategy is the one stored as the ``default`` attribute, you can
change or remove it at any time. When removing all keys that are assigned
to the default strategy, the default attribute will be removed from the
StrategyDict instance as well. The first strategy added afterwards is the
one that will become the new default, unless the attribute is created or
changed manually.
"""
def decorator(func):
keep_name = kwargs.pop("keep_name", False)
if kwargs:
key = next(iter(kwargs))
raise TypeError("Unknown keyword argument '{}'".format(key))
if not keep_name:
func.__name__ = str(names[0])
self[names] = func
return self
return decorator
|
def function[strategy, parameter[self]]:
constant[
StrategyDict wrapping method for adding a new strategy.
Parameters
----------
*names :
Positional arguments with all names (strings) that could be used to
call the strategy to be added, to be used both as key items and as
attribute names.
keep_name :
Boolean keyword-only parameter for choosing whether the ``__name__``
attribute of the decorated/wrapped function should be changed or kept.
Defaults to False (i.e., changes the name by default).
Returns
-------
A decorator/wrapper function to be used once on the new strategy to be
added.
Example
-------
Let's create a StrategyDict that knows its name:
>>> txt_proc = StrategyDict("txt_proc")
Add a first strategy ``swapcase``, using this method as a decorator
factory:
>>> @txt_proc.strategy("swapcase")
... def txt_proc(txt):
... return txt.swapcase()
Let's do it again, but wrapping the strategy functions inline. First two
strategies have multiple names, the last keeps the function name, which
would otherwise be replaced by the first given name:
>>> txt_proc.strategy("lower", "low")(lambda txt: txt.lower())
{(...): <function ... at 0x...>, (...): <function ... at 0x...>}
>>> txt_proc.strategy("upper", "up")(lambda txt: txt.upper())
{...}
>>> txt_proc.strategy("keep", keep_name=True)(lambda txt: txt)
{...}
We can now iterate through the strategies to call them or see their
function names
>>> sorted(st("Just a Test") for st in txt_proc)
['JUST A TEST', 'Just a Test', 'jUST A tEST', 'just a test']
>>> sorted(st.__name__ for st in txt_proc) # Just the first name
['<lambda>', 'lower', 'swapcase', 'upper']
Calling a single strategy:
>>> txt_proc.low("TeStInG")
'testing'
>>> txt_proc["upper"]("TeStInG")
'TESTING'
>>> txt_proc("TeStInG") # Default is the first: swapcase
'tEsTiNg'
>>> txt_proc.default("TeStInG")
'tEsTiNg'
>>> txt_proc.default = txt_proc.up # Manually changing the default
>>> txt_proc("TeStInG")
'TESTING'
Hint
----
Default strategy is the one stored as the ``default`` attribute, you can
change or remove it at any time. When removing all keys that are assigned
to the default strategy, the default attribute will be removed from the
StrategyDict instance as well. The first strategy added afterwards is the
one that will become the new default, unless the attribute is created or
changed manually.
]
def function[decorator, parameter[func]]:
variable[keep_name] assign[=] call[name[kwargs].pop, parameter[constant[keep_name], constant[False]]]
if name[kwargs] begin[:]
variable[key] assign[=] call[name[next], parameter[call[name[iter], parameter[name[kwargs]]]]]
<ast.Raise object at 0x7da1b06c5420>
if <ast.UnaryOp object at 0x7da1b06c57e0> begin[:]
name[func].__name__ assign[=] call[name[str], parameter[call[name[names]][constant[0]]]]
call[name[self]][name[names]] assign[=] name[func]
return[name[self]]
return[name[decorator]]
|
keyword[def] identifier[strategy] ( identifier[self] ,* identifier[names] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
identifier[keep_name] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
keyword[if] identifier[kwargs] :
identifier[key] = identifier[next] ( identifier[iter] ( identifier[kwargs] ))
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[key] ))
keyword[if] keyword[not] identifier[keep_name] :
identifier[func] . identifier[__name__] = identifier[str] ( identifier[names] [ literal[int] ])
identifier[self] [ identifier[names] ]= identifier[func]
keyword[return] identifier[self]
keyword[return] identifier[decorator]
|
def strategy(self, *names, **kwargs):
"""
StrategyDict wrapping method for adding a new strategy.
Parameters
----------
*names :
Positional arguments with all names (strings) that could be used to
call the strategy to be added, to be used both as key items and as
attribute names.
keep_name :
Boolean keyword-only parameter for choosing whether the ``__name__``
attribute of the decorated/wrapped function should be changed or kept.
Defaults to False (i.e., changes the name by default).
Returns
-------
A decorator/wrapper function to be used once on the new strategy to be
added.
Example
-------
Let's create a StrategyDict that knows its name:
>>> txt_proc = StrategyDict("txt_proc")
Add a first strategy ``swapcase``, using this method as a decorator
factory:
>>> @txt_proc.strategy("swapcase")
... def txt_proc(txt):
... return txt.swapcase()
Let's do it again, but wrapping the strategy functions inline. First two
strategies have multiple names, the last keeps the function name, which
would otherwise be replaced by the first given name:
>>> txt_proc.strategy("lower", "low")(lambda txt: txt.lower())
{(...): <function ... at 0x...>, (...): <function ... at 0x...>}
>>> txt_proc.strategy("upper", "up")(lambda txt: txt.upper())
{...}
>>> txt_proc.strategy("keep", keep_name=True)(lambda txt: txt)
{...}
We can now iterate through the strategies to call them or see their
function names
>>> sorted(st("Just a Test") for st in txt_proc)
['JUST A TEST', 'Just a Test', 'jUST A tEST', 'just a test']
>>> sorted(st.__name__ for st in txt_proc) # Just the first name
['<lambda>', 'lower', 'swapcase', 'upper']
Calling a single strategy:
>>> txt_proc.low("TeStInG")
'testing'
>>> txt_proc["upper"]("TeStInG")
'TESTING'
>>> txt_proc("TeStInG") # Default is the first: swapcase
'tEsTiNg'
>>> txt_proc.default("TeStInG")
'tEsTiNg'
>>> txt_proc.default = txt_proc.up # Manually changing the default
>>> txt_proc("TeStInG")
'TESTING'
Hint
----
Default strategy is the one stored as the ``default`` attribute, you can
change or remove it at any time. When removing all keys that are assigned
to the default strategy, the default attribute will be removed from the
StrategyDict instance as well. The first strategy added afterwards is the
one that will become the new default, unless the attribute is created or
changed manually.
"""
def decorator(func):
keep_name = kwargs.pop('keep_name', False)
if kwargs:
key = next(iter(kwargs))
raise TypeError("Unknown keyword argument '{}'".format(key)) # depends on [control=['if'], data=[]]
if not keep_name:
func.__name__ = str(names[0]) # depends on [control=['if'], data=[]]
self[names] = func
return self
return decorator
|
def save_model(self):
'''
Saves all of the de-trending information to disk in an `npz` file
and saves the DVS as a `pdf`.
'''
# Save the data
log.info("Saving data to '%s.npz'..." % self.name)
d = dict(self.__dict__)
d.pop('_weights', None)
d.pop('_A', None)
d.pop('_B', None)
d.pop('_f', None)
d.pop('_mK', None)
d.pop('K', None)
d.pop('dvs', None)
d.pop('clobber', None)
d.pop('clobber_tpf', None)
d.pop('_mission', None)
d.pop('debug', None)
d.pop('transit_model', None)
d.pop('_transit_model', None)
np.savez(os.path.join(self.dir, self.name + '.npz'), **d)
# Save the DVS
pdf = PdfPages(os.path.join(self.dir, self.name + '.pdf'))
pdf.savefig(self.dvs.fig)
pl.close(self.dvs.fig)
d = pdf.infodict()
d['Title'] = 'EVEREST: %s de-trending of %s %d' % (
self.name, self._mission.IDSTRING, self.ID)
d['Author'] = 'Rodrigo Luger'
pdf.close()
|
def function[save_model, parameter[self]]:
constant[
Saves all of the de-trending information to disk in an `npz` file
and saves the DVS as a `pdf`.
]
call[name[log].info, parameter[binary_operation[constant[Saving data to '%s.npz'...] <ast.Mod object at 0x7da2590d6920> name[self].name]]]
variable[d] assign[=] call[name[dict], parameter[name[self].__dict__]]
call[name[d].pop, parameter[constant[_weights], constant[None]]]
call[name[d].pop, parameter[constant[_A], constant[None]]]
call[name[d].pop, parameter[constant[_B], constant[None]]]
call[name[d].pop, parameter[constant[_f], constant[None]]]
call[name[d].pop, parameter[constant[_mK], constant[None]]]
call[name[d].pop, parameter[constant[K], constant[None]]]
call[name[d].pop, parameter[constant[dvs], constant[None]]]
call[name[d].pop, parameter[constant[clobber], constant[None]]]
call[name[d].pop, parameter[constant[clobber_tpf], constant[None]]]
call[name[d].pop, parameter[constant[_mission], constant[None]]]
call[name[d].pop, parameter[constant[debug], constant[None]]]
call[name[d].pop, parameter[constant[transit_model], constant[None]]]
call[name[d].pop, parameter[constant[_transit_model], constant[None]]]
call[name[np].savez, parameter[call[name[os].path.join, parameter[name[self].dir, binary_operation[name[self].name + constant[.npz]]]]]]
variable[pdf] assign[=] call[name[PdfPages], parameter[call[name[os].path.join, parameter[name[self].dir, binary_operation[name[self].name + constant[.pdf]]]]]]
call[name[pdf].savefig, parameter[name[self].dvs.fig]]
call[name[pl].close, parameter[name[self].dvs.fig]]
variable[d] assign[=] call[name[pdf].infodict, parameter[]]
call[name[d]][constant[Title]] assign[=] binary_operation[constant[EVEREST: %s de-trending of %s %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0e483d0>, <ast.Attribute object at 0x7da1b0e484f0>, <ast.Attribute object at 0x7da1b0e484c0>]]]
call[name[d]][constant[Author]] assign[=] constant[Rodrigo Luger]
call[name[pdf].close, parameter[]]
|
keyword[def] identifier[save_model] ( identifier[self] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] % identifier[self] . identifier[name] )
identifier[d] = identifier[dict] ( identifier[self] . identifier[__dict__] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[d] . identifier[pop] ( literal[string] , keyword[None] )
identifier[np] . identifier[savez] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[dir] , identifier[self] . identifier[name] + literal[string] ),** identifier[d] )
identifier[pdf] = identifier[PdfPages] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[dir] , identifier[self] . identifier[name] + literal[string] ))
identifier[pdf] . identifier[savefig] ( identifier[self] . identifier[dvs] . identifier[fig] )
identifier[pl] . identifier[close] ( identifier[self] . identifier[dvs] . identifier[fig] )
identifier[d] = identifier[pdf] . identifier[infodict] ()
identifier[d] [ literal[string] ]= literal[string] %(
identifier[self] . identifier[name] , identifier[self] . identifier[_mission] . identifier[IDSTRING] , identifier[self] . identifier[ID] )
identifier[d] [ literal[string] ]= literal[string]
identifier[pdf] . identifier[close] ()
|
def save_model(self):
"""
Saves all of the de-trending information to disk in an `npz` file
and saves the DVS as a `pdf`.
"""
# Save the data
log.info("Saving data to '%s.npz'..." % self.name)
d = dict(self.__dict__)
d.pop('_weights', None)
d.pop('_A', None)
d.pop('_B', None)
d.pop('_f', None)
d.pop('_mK', None)
d.pop('K', None)
d.pop('dvs', None)
d.pop('clobber', None)
d.pop('clobber_tpf', None)
d.pop('_mission', None)
d.pop('debug', None)
d.pop('transit_model', None)
d.pop('_transit_model', None)
np.savez(os.path.join(self.dir, self.name + '.npz'), **d)
# Save the DVS
pdf = PdfPages(os.path.join(self.dir, self.name + '.pdf'))
pdf.savefig(self.dvs.fig)
pl.close(self.dvs.fig)
d = pdf.infodict()
d['Title'] = 'EVEREST: %s de-trending of %s %d' % (self.name, self._mission.IDSTRING, self.ID)
d['Author'] = 'Rodrigo Luger'
pdf.close()
|
def get_browser_datetime(webdriver):
"""
Get the current date/time on the web browser as a Python datetime object.
This date matches 'new Date();' when ran in JavaScript console.
Args:
webdriver: Selenium WebDriver instance
Returns:
datetime - Python datetime object.
Usage::
browser_datetime = WebUtils.get_browser_datetime(driver)
local_datetime = datetime.now()
print("Difference time difference between browser and your local machine is:",
local_datetime - browser_datetime)
"""
js_stmt = """
var wtf_get_date = new Date();
return {'month':wtf_get_date.getMonth(),
'day':wtf_get_date.getDate(),
'year':wtf_get_date.getFullYear(),
'hours':wtf_get_date.getHours(),
'minutes':wtf_get_date.getMinutes(),
'seconds':wtf_get_date.getSeconds(),
'milliseconds':wtf_get_date.getMilliseconds()};
"""
browser_date = webdriver.execute_script(js_stmt)
return datetime(int(browser_date['year']),
int(browser_date['month']) + 1, # javascript months start at 0
int(browser_date['day']),
int(browser_date['hours']),
int(browser_date['minutes']),
int(browser_date['seconds']),
int(browser_date['milliseconds']))
|
def function[get_browser_datetime, parameter[webdriver]]:
constant[
Get the current date/time on the web browser as a Python datetime object.
This date matches 'new Date();' when ran in JavaScript console.
Args:
webdriver: Selenium WebDriver instance
Returns:
datetime - Python datetime object.
Usage::
browser_datetime = WebUtils.get_browser_datetime(driver)
local_datetime = datetime.now()
print("Difference time difference between browser and your local machine is:",
local_datetime - browser_datetime)
]
variable[js_stmt] assign[=] constant[
var wtf_get_date = new Date();
return {'month':wtf_get_date.getMonth(),
'day':wtf_get_date.getDate(),
'year':wtf_get_date.getFullYear(),
'hours':wtf_get_date.getHours(),
'minutes':wtf_get_date.getMinutes(),
'seconds':wtf_get_date.getSeconds(),
'milliseconds':wtf_get_date.getMilliseconds()};
]
variable[browser_date] assign[=] call[name[webdriver].execute_script, parameter[name[js_stmt]]]
return[call[name[datetime], parameter[call[name[int], parameter[call[name[browser_date]][constant[year]]]], binary_operation[call[name[int], parameter[call[name[browser_date]][constant[month]]]] + constant[1]], call[name[int], parameter[call[name[browser_date]][constant[day]]]], call[name[int], parameter[call[name[browser_date]][constant[hours]]]], call[name[int], parameter[call[name[browser_date]][constant[minutes]]]], call[name[int], parameter[call[name[browser_date]][constant[seconds]]]], call[name[int], parameter[call[name[browser_date]][constant[milliseconds]]]]]]]
|
keyword[def] identifier[get_browser_datetime] ( identifier[webdriver] ):
literal[string]
identifier[js_stmt] = literal[string]
identifier[browser_date] = identifier[webdriver] . identifier[execute_script] ( identifier[js_stmt] )
keyword[return] identifier[datetime] ( identifier[int] ( identifier[browser_date] [ literal[string] ]),
identifier[int] ( identifier[browser_date] [ literal[string] ])+ literal[int] ,
identifier[int] ( identifier[browser_date] [ literal[string] ]),
identifier[int] ( identifier[browser_date] [ literal[string] ]),
identifier[int] ( identifier[browser_date] [ literal[string] ]),
identifier[int] ( identifier[browser_date] [ literal[string] ]),
identifier[int] ( identifier[browser_date] [ literal[string] ]))
|
def get_browser_datetime(webdriver):
"""
Get the current date/time on the web browser as a Python datetime object.
This date matches 'new Date();' when ran in JavaScript console.
Args:
webdriver: Selenium WebDriver instance
Returns:
datetime - Python datetime object.
Usage::
browser_datetime = WebUtils.get_browser_datetime(driver)
local_datetime = datetime.now()
print("Difference time difference between browser and your local machine is:",
local_datetime - browser_datetime)
"""
js_stmt = "\n var wtf_get_date = new Date();\n return {'month':wtf_get_date.getMonth(), \n 'day':wtf_get_date.getDate(), \n 'year':wtf_get_date.getFullYear(),\n 'hours':wtf_get_date.getHours(),\n 'minutes':wtf_get_date.getMinutes(),\n 'seconds':wtf_get_date.getSeconds(),\n 'milliseconds':wtf_get_date.getMilliseconds()};\n "
browser_date = webdriver.execute_script(js_stmt) # javascript months start at 0
return datetime(int(browser_date['year']), int(browser_date['month']) + 1, int(browser_date['day']), int(browser_date['hours']), int(browser_date['minutes']), int(browser_date['seconds']), int(browser_date['milliseconds']))
|
def _writeResponse(self, response):
"""
Serializes the response to JSON, and writes it to the transport.
"""
encoded = dumps(response, default=_default)
self.transport.write(encoded)
|
def function[_writeResponse, parameter[self, response]]:
constant[
Serializes the response to JSON, and writes it to the transport.
]
variable[encoded] assign[=] call[name[dumps], parameter[name[response]]]
call[name[self].transport.write, parameter[name[encoded]]]
|
keyword[def] identifier[_writeResponse] ( identifier[self] , identifier[response] ):
literal[string]
identifier[encoded] = identifier[dumps] ( identifier[response] , identifier[default] = identifier[_default] )
identifier[self] . identifier[transport] . identifier[write] ( identifier[encoded] )
|
def _writeResponse(self, response):
"""
Serializes the response to JSON, and writes it to the transport.
"""
encoded = dumps(response, default=_default)
self.transport.write(encoded)
|
def brier_score_components(self):
"""
Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty.
"""
rel_curve = self.reliability_curve()
total = self.frequencies["Total_Freq"].sum()
climo_freq = float(self.frequencies["Positive_Freq"].sum()) / self.frequencies["Total_Freq"].sum()
reliability = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Bin_Start"] -
rel_curve["Positive_Relative_Freq"]) ** 2) / total
resolution = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Positive_Relative_Freq"] - climo_freq) ** 2) \
/ total
uncertainty = climo_freq * (1 - climo_freq)
return reliability, resolution, uncertainty
|
def function[brier_score_components, parameter[self]]:
constant[
Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty.
]
variable[rel_curve] assign[=] call[name[self].reliability_curve, parameter[]]
variable[total] assign[=] call[call[name[self].frequencies][constant[Total_Freq]].sum, parameter[]]
variable[climo_freq] assign[=] binary_operation[call[name[float], parameter[call[call[name[self].frequencies][constant[Positive_Freq]].sum, parameter[]]]] / call[call[name[self].frequencies][constant[Total_Freq]].sum, parameter[]]]
variable[reliability] assign[=] binary_operation[call[name[np].sum, parameter[binary_operation[call[name[self].frequencies][constant[Total_Freq]] * binary_operation[binary_operation[call[name[rel_curve]][constant[Bin_Start]] - call[name[rel_curve]][constant[Positive_Relative_Freq]]] ** constant[2]]]]] / name[total]]
variable[resolution] assign[=] binary_operation[call[name[np].sum, parameter[binary_operation[call[name[self].frequencies][constant[Total_Freq]] * binary_operation[binary_operation[call[name[rel_curve]][constant[Positive_Relative_Freq]] - name[climo_freq]] ** constant[2]]]]] / name[total]]
variable[uncertainty] assign[=] binary_operation[name[climo_freq] * binary_operation[constant[1] - name[climo_freq]]]
return[tuple[[<ast.Name object at 0x7da18f8131f0>, <ast.Name object at 0x7da18f812aa0>, <ast.Name object at 0x7da18f811630>]]]
|
keyword[def] identifier[brier_score_components] ( identifier[self] ):
literal[string]
identifier[rel_curve] = identifier[self] . identifier[reliability_curve] ()
identifier[total] = identifier[self] . identifier[frequencies] [ literal[string] ]. identifier[sum] ()
identifier[climo_freq] = identifier[float] ( identifier[self] . identifier[frequencies] [ literal[string] ]. identifier[sum] ())/ identifier[self] . identifier[frequencies] [ literal[string] ]. identifier[sum] ()
identifier[reliability] = identifier[np] . identifier[sum] ( identifier[self] . identifier[frequencies] [ literal[string] ]*( identifier[rel_curve] [ literal[string] ]-
identifier[rel_curve] [ literal[string] ])** literal[int] )/ identifier[total]
identifier[resolution] = identifier[np] . identifier[sum] ( identifier[self] . identifier[frequencies] [ literal[string] ]*( identifier[rel_curve] [ literal[string] ]- identifier[climo_freq] )** literal[int] )/ identifier[total]
identifier[uncertainty] = identifier[climo_freq] *( literal[int] - identifier[climo_freq] )
keyword[return] identifier[reliability] , identifier[resolution] , identifier[uncertainty]
|
def brier_score_components(self):
"""
Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty.
"""
rel_curve = self.reliability_curve()
total = self.frequencies['Total_Freq'].sum()
climo_freq = float(self.frequencies['Positive_Freq'].sum()) / self.frequencies['Total_Freq'].sum()
reliability = np.sum(self.frequencies['Total_Freq'] * (rel_curve['Bin_Start'] - rel_curve['Positive_Relative_Freq']) ** 2) / total
resolution = np.sum(self.frequencies['Total_Freq'] * (rel_curve['Positive_Relative_Freq'] - climo_freq) ** 2) / total
uncertainty = climo_freq * (1 - climo_freq)
return (reliability, resolution, uncertainty)
|
def context_exclude(zap_helper, name, pattern):
"""Exclude a pattern from a given context."""
console.info('Excluding regex {0} from context with name: {1}'.format(pattern, name))
with zap_error_handler():
result = zap_helper.zap.context.exclude_from_context(contextname=name, regex=pattern)
if result != 'OK':
raise ZAPError('Excluding regex from context failed: {}'.format(result))
|
def function[context_exclude, parameter[zap_helper, name, pattern]]:
constant[Exclude a pattern from a given context.]
call[name[console].info, parameter[call[constant[Excluding regex {0} from context with name: {1}].format, parameter[name[pattern], name[name]]]]]
with call[name[zap_error_handler], parameter[]] begin[:]
variable[result] assign[=] call[name[zap_helper].zap.context.exclude_from_context, parameter[]]
if compare[name[result] not_equal[!=] constant[OK]] begin[:]
<ast.Raise object at 0x7da20e9b3340>
|
keyword[def] identifier[context_exclude] ( identifier[zap_helper] , identifier[name] , identifier[pattern] ):
literal[string]
identifier[console] . identifier[info] ( literal[string] . identifier[format] ( identifier[pattern] , identifier[name] ))
keyword[with] identifier[zap_error_handler] ():
identifier[result] = identifier[zap_helper] . identifier[zap] . identifier[context] . identifier[exclude_from_context] ( identifier[contextname] = identifier[name] , identifier[regex] = identifier[pattern] )
keyword[if] identifier[result] != literal[string] :
keyword[raise] identifier[ZAPError] ( literal[string] . identifier[format] ( identifier[result] ))
|
def context_exclude(zap_helper, name, pattern):
"""Exclude a pattern from a given context."""
console.info('Excluding regex {0} from context with name: {1}'.format(pattern, name))
with zap_error_handler():
result = zap_helper.zap.context.exclude_from_context(contextname=name, regex=pattern)
if result != 'OK':
raise ZAPError('Excluding regex from context failed: {}'.format(result)) # depends on [control=['if'], data=['result']] # depends on [control=['with'], data=[]]
|
def relpath(dataset_uri, item_identifier):
"""Return relpath associated with the item.
"""
dataset = dtoolcore.DataSet.from_uri(dataset_uri)
try:
props = dataset.item_properties(item_identifier)
except KeyError:
click.secho(
"No such item in dataset: {}".format(item_identifier),
fg="red",
err=True
)
sys.exit(21)
click.secho(props["relpath"])
|
def function[relpath, parameter[dataset_uri, item_identifier]]:
constant[Return relpath associated with the item.
]
variable[dataset] assign[=] call[name[dtoolcore].DataSet.from_uri, parameter[name[dataset_uri]]]
<ast.Try object at 0x7da1b13348e0>
call[name[click].secho, parameter[call[name[props]][constant[relpath]]]]
|
keyword[def] identifier[relpath] ( identifier[dataset_uri] , identifier[item_identifier] ):
literal[string]
identifier[dataset] = identifier[dtoolcore] . identifier[DataSet] . identifier[from_uri] ( identifier[dataset_uri] )
keyword[try] :
identifier[props] = identifier[dataset] . identifier[item_properties] ( identifier[item_identifier] )
keyword[except] identifier[KeyError] :
identifier[click] . identifier[secho] (
literal[string] . identifier[format] ( identifier[item_identifier] ),
identifier[fg] = literal[string] ,
identifier[err] = keyword[True]
)
identifier[sys] . identifier[exit] ( literal[int] )
identifier[click] . identifier[secho] ( identifier[props] [ literal[string] ])
|
def relpath(dataset_uri, item_identifier):
"""Return relpath associated with the item.
"""
dataset = dtoolcore.DataSet.from_uri(dataset_uri)
try:
props = dataset.item_properties(item_identifier) # depends on [control=['try'], data=[]]
except KeyError:
click.secho('No such item in dataset: {}'.format(item_identifier), fg='red', err=True)
sys.exit(21) # depends on [control=['except'], data=[]]
click.secho(props['relpath'])
|
def count_invalid_entries(self):
"""Sum of all invalid entry counts from cache levels."""
return sum([c.count_invalid_entries() for c in self.levels(with_mem=False)])
|
def function[count_invalid_entries, parameter[self]]:
constant[Sum of all invalid entry counts from cache levels.]
return[call[name[sum], parameter[<ast.ListComp object at 0x7da20c993010>]]]
|
keyword[def] identifier[count_invalid_entries] ( identifier[self] ):
literal[string]
keyword[return] identifier[sum] ([ identifier[c] . identifier[count_invalid_entries] () keyword[for] identifier[c] keyword[in] identifier[self] . identifier[levels] ( identifier[with_mem] = keyword[False] )])
|
def count_invalid_entries(self):
"""Sum of all invalid entry counts from cache levels."""
return sum([c.count_invalid_entries() for c in self.levels(with_mem=False)])
|
def socket_close(self):
"""Close our socket."""
if self.sock != NC.INVALID_SOCKET:
self.sock.close()
self.sock = NC.INVALID_SOCKET
|
def function[socket_close, parameter[self]]:
constant[Close our socket.]
if compare[name[self].sock not_equal[!=] name[NC].INVALID_SOCKET] begin[:]
call[name[self].sock.close, parameter[]]
name[self].sock assign[=] name[NC].INVALID_SOCKET
|
keyword[def] identifier[socket_close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[sock] != identifier[NC] . identifier[INVALID_SOCKET] :
identifier[self] . identifier[sock] . identifier[close] ()
identifier[self] . identifier[sock] = identifier[NC] . identifier[INVALID_SOCKET]
|
def socket_close(self):
"""Close our socket."""
if self.sock != NC.INVALID_SOCKET:
self.sock.close() # depends on [control=['if'], data=[]]
self.sock = NC.INVALID_SOCKET
|
def filter_by(self, text=(), types=(), units=(), include_unset=False):
"""Return subset of values which match the given text, types and/or units. For a value to be matched, at least
one item from each specified filter category has to apply to a value. Each of the categories must be specified
as a sequence of strings. If `include_unset` is set, unset values will also be considered."""
if not (isinstance(text, Sequence) and all(isinstance(phrase, string_types) for phrase in text)):
raise TypeError('text should be sequence of strings')
values = ([self.__values[name] for name in self.__filter.filter_by(types=types, units=units)
if include_unset or not self.__values[name].unset]
if types or units else self.__values)
if text:
# avoid unexpected search by individual characters if a single string was specified
if isinstance(text, string_types):
text = (ensure_unicode(text),)
text = [phrase.lower() for phrase in text]
new_values = []
for value in values:
label = value.label.lower()
description = value.description.lower() if value.description else ''
if any(phrase in label or (description and phrase in description) for phrase in text):
new_values.append(value)
values = new_values
return values
|
def function[filter_by, parameter[self, text, types, units, include_unset]]:
constant[Return subset of values which match the given text, types and/or units. For a value to be matched, at least
one item from each specified filter category has to apply to a value. Each of the categories must be specified
as a sequence of strings. If `include_unset` is set, unset values will also be considered.]
if <ast.UnaryOp object at 0x7da1b1be63e0> begin[:]
<ast.Raise object at 0x7da1b1b7c7f0>
variable[values] assign[=] <ast.IfExp object at 0x7da1b1b7c790>
if name[text] begin[:]
if call[name[isinstance], parameter[name[text], name[string_types]]] begin[:]
variable[text] assign[=] tuple[[<ast.Call object at 0x7da1b1b37d00>]]
variable[text] assign[=] <ast.ListComp object at 0x7da1b1b36890>
variable[new_values] assign[=] list[[]]
for taget[name[value]] in starred[name[values]] begin[:]
variable[label] assign[=] call[name[value].label.lower, parameter[]]
variable[description] assign[=] <ast.IfExp object at 0x7da1b1b36560>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1b373a0>]] begin[:]
call[name[new_values].append, parameter[name[value]]]
variable[values] assign[=] name[new_values]
return[name[values]]
|
keyword[def] identifier[filter_by] ( identifier[self] , identifier[text] =(), identifier[types] =(), identifier[units] =(), identifier[include_unset] = keyword[False] ):
literal[string]
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[text] , identifier[Sequence] ) keyword[and] identifier[all] ( identifier[isinstance] ( identifier[phrase] , identifier[string_types] ) keyword[for] identifier[phrase] keyword[in] identifier[text] )):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[values] =([ identifier[self] . identifier[__values] [ identifier[name] ] keyword[for] identifier[name] keyword[in] identifier[self] . identifier[__filter] . identifier[filter_by] ( identifier[types] = identifier[types] , identifier[units] = identifier[units] )
keyword[if] identifier[include_unset] keyword[or] keyword[not] identifier[self] . identifier[__values] [ identifier[name] ]. identifier[unset] ]
keyword[if] identifier[types] keyword[or] identifier[units] keyword[else] identifier[self] . identifier[__values] )
keyword[if] identifier[text] :
keyword[if] identifier[isinstance] ( identifier[text] , identifier[string_types] ):
identifier[text] =( identifier[ensure_unicode] ( identifier[text] ),)
identifier[text] =[ identifier[phrase] . identifier[lower] () keyword[for] identifier[phrase] keyword[in] identifier[text] ]
identifier[new_values] =[]
keyword[for] identifier[value] keyword[in] identifier[values] :
identifier[label] = identifier[value] . identifier[label] . identifier[lower] ()
identifier[description] = identifier[value] . identifier[description] . identifier[lower] () keyword[if] identifier[value] . identifier[description] keyword[else] literal[string]
keyword[if] identifier[any] ( identifier[phrase] keyword[in] identifier[label] keyword[or] ( identifier[description] keyword[and] identifier[phrase] keyword[in] identifier[description] ) keyword[for] identifier[phrase] keyword[in] identifier[text] ):
identifier[new_values] . identifier[append] ( identifier[value] )
identifier[values] = identifier[new_values]
keyword[return] identifier[values]
|
def filter_by(self, text=(), types=(), units=(), include_unset=False):
"""Return subset of values which match the given text, types and/or units. For a value to be matched, at least
one item from each specified filter category has to apply to a value. Each of the categories must be specified
as a sequence of strings. If `include_unset` is set, unset values will also be considered."""
if not (isinstance(text, Sequence) and all((isinstance(phrase, string_types) for phrase in text))):
raise TypeError('text should be sequence of strings') # depends on [control=['if'], data=[]]
values = [self.__values[name] for name in self.__filter.filter_by(types=types, units=units) if include_unset or not self.__values[name].unset] if types or units else self.__values
if text:
# avoid unexpected search by individual characters if a single string was specified
if isinstance(text, string_types):
text = (ensure_unicode(text),) # depends on [control=['if'], data=[]]
text = [phrase.lower() for phrase in text]
new_values = []
for value in values:
label = value.label.lower()
description = value.description.lower() if value.description else ''
if any((phrase in label or (description and phrase in description) for phrase in text)):
new_values.append(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['value']]
values = new_values # depends on [control=['if'], data=[]]
return values
|
def customProperties(self):
"""Document custom properties added by the document author.
We canot convert the properties as indicated
with the http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes
namespace
:return: mapping of metadata
"""
rval = {}
if len(self.content_types.getPathsForContentType(contenttypes.CT_CUSTOM_PROPS)) == 0:
# We may have no custom properties at all.
return rval
XPath = lxml.etree.XPath # Class shortcut
properties_xpath = XPath('custom-properties:property', namespaces=ns_map)
propname_xpath = XPath('@name')
propvalue_xpath = XPath('*/text()')
for tree in self.content_types.getTreesFor(self, contenttypes.CT_CUSTOM_PROPS):
for elt in properties_xpath(tree.getroot()):
rval[toUnicode(propname_xpath(elt)[0])] = u" ".join(propvalue_xpath(elt))
return rval
|
def function[customProperties, parameter[self]]:
constant[Document custom properties added by the document author.
We canot convert the properties as indicated
with the http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes
namespace
:return: mapping of metadata
]
variable[rval] assign[=] dictionary[[], []]
if compare[call[name[len], parameter[call[name[self].content_types.getPathsForContentType, parameter[name[contenttypes].CT_CUSTOM_PROPS]]]] equal[==] constant[0]] begin[:]
return[name[rval]]
variable[XPath] assign[=] name[lxml].etree.XPath
variable[properties_xpath] assign[=] call[name[XPath], parameter[constant[custom-properties:property]]]
variable[propname_xpath] assign[=] call[name[XPath], parameter[constant[@name]]]
variable[propvalue_xpath] assign[=] call[name[XPath], parameter[constant[*/text()]]]
for taget[name[tree]] in starred[call[name[self].content_types.getTreesFor, parameter[name[self], name[contenttypes].CT_CUSTOM_PROPS]]] begin[:]
for taget[name[elt]] in starred[call[name[properties_xpath], parameter[call[name[tree].getroot, parameter[]]]]] begin[:]
call[name[rval]][call[name[toUnicode], parameter[call[call[name[propname_xpath], parameter[name[elt]]]][constant[0]]]]] assign[=] call[constant[ ].join, parameter[call[name[propvalue_xpath], parameter[name[elt]]]]]
return[name[rval]]
|
keyword[def] identifier[customProperties] ( identifier[self] ):
literal[string]
identifier[rval] ={}
keyword[if] identifier[len] ( identifier[self] . identifier[content_types] . identifier[getPathsForContentType] ( identifier[contenttypes] . identifier[CT_CUSTOM_PROPS] ))== literal[int] :
keyword[return] identifier[rval]
identifier[XPath] = identifier[lxml] . identifier[etree] . identifier[XPath]
identifier[properties_xpath] = identifier[XPath] ( literal[string] , identifier[namespaces] = identifier[ns_map] )
identifier[propname_xpath] = identifier[XPath] ( literal[string] )
identifier[propvalue_xpath] = identifier[XPath] ( literal[string] )
keyword[for] identifier[tree] keyword[in] identifier[self] . identifier[content_types] . identifier[getTreesFor] ( identifier[self] , identifier[contenttypes] . identifier[CT_CUSTOM_PROPS] ):
keyword[for] identifier[elt] keyword[in] identifier[properties_xpath] ( identifier[tree] . identifier[getroot] ()):
identifier[rval] [ identifier[toUnicode] ( identifier[propname_xpath] ( identifier[elt] )[ literal[int] ])]= literal[string] . identifier[join] ( identifier[propvalue_xpath] ( identifier[elt] ))
keyword[return] identifier[rval]
|
def customProperties(self):
"""Document custom properties added by the document author.
We canot convert the properties as indicated
with the http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes
namespace
:return: mapping of metadata
"""
rval = {}
if len(self.content_types.getPathsForContentType(contenttypes.CT_CUSTOM_PROPS)) == 0:
# We may have no custom properties at all.
return rval # depends on [control=['if'], data=[]]
XPath = lxml.etree.XPath # Class shortcut
properties_xpath = XPath('custom-properties:property', namespaces=ns_map)
propname_xpath = XPath('@name')
propvalue_xpath = XPath('*/text()')
for tree in self.content_types.getTreesFor(self, contenttypes.CT_CUSTOM_PROPS):
for elt in properties_xpath(tree.getroot()):
rval[toUnicode(propname_xpath(elt)[0])] = u' '.join(propvalue_xpath(elt)) # depends on [control=['for'], data=['elt']] # depends on [control=['for'], data=['tree']]
return rval
|
def rollback(self):
"""Roll back a transaction on the database."""
self._check_state()
database = self._session._database
api = database.spanner_api
metadata = _metadata_with_prefix(database.name)
api.rollback(self._session.name, self._transaction_id, metadata=metadata)
self._rolled_back = True
del self._session._transaction
|
def function[rollback, parameter[self]]:
constant[Roll back a transaction on the database.]
call[name[self]._check_state, parameter[]]
variable[database] assign[=] name[self]._session._database
variable[api] assign[=] name[database].spanner_api
variable[metadata] assign[=] call[name[_metadata_with_prefix], parameter[name[database].name]]
call[name[api].rollback, parameter[name[self]._session.name, name[self]._transaction_id]]
name[self]._rolled_back assign[=] constant[True]
<ast.Delete object at 0x7da20e9571c0>
|
keyword[def] identifier[rollback] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_check_state] ()
identifier[database] = identifier[self] . identifier[_session] . identifier[_database]
identifier[api] = identifier[database] . identifier[spanner_api]
identifier[metadata] = identifier[_metadata_with_prefix] ( identifier[database] . identifier[name] )
identifier[api] . identifier[rollback] ( identifier[self] . identifier[_session] . identifier[name] , identifier[self] . identifier[_transaction_id] , identifier[metadata] = identifier[metadata] )
identifier[self] . identifier[_rolled_back] = keyword[True]
keyword[del] identifier[self] . identifier[_session] . identifier[_transaction]
|
def rollback(self):
"""Roll back a transaction on the database."""
self._check_state()
database = self._session._database
api = database.spanner_api
metadata = _metadata_with_prefix(database.name)
api.rollback(self._session.name, self._transaction_id, metadata=metadata)
self._rolled_back = True
del self._session._transaction
|
def tangent_only_intersections(all_types):
"""Determine intersection in the case of only-tangent intersections.
If the only intersections are tangencies, then either the surfaces
are tangent but don't meet ("kissing" edges) or one surface is
internally tangent to the other.
Thus we expect every intersection to be classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
What's more, we expect all intersections to be classified the same for
a given pairing.
Args:
all_types (Set[.IntersectionClassification]): The set of all
intersection classifications encountered among the intersections
for the given surface-surface pair.
Returns:
Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of
* Edges info list; will be empty or :data:`None`
* "Contained" boolean. If not :data:`None`, indicates
that one of the surfaces is contained in the other.
Raises:
ValueError: If there are intersections of more than one type among
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
ValueError: If there is a unique classification, but it isn't one
of the tangent types.
"""
if len(all_types) != 1:
raise ValueError("Unexpected value, types should all match", all_types)
point_type = all_types.pop()
if point_type == CLASSIFICATION_T.OPPOSED:
return [], None
elif point_type == CLASSIFICATION_T.IGNORED_CORNER:
return [], None
elif point_type == CLASSIFICATION_T.TANGENT_FIRST:
return None, True
elif point_type == CLASSIFICATION_T.TANGENT_SECOND:
return None, False
elif point_type == CLASSIFICATION_T.COINCIDENT_UNUSED:
return [], None
else:
raise ValueError("Point type not for tangency", point_type)
|
def function[tangent_only_intersections, parameter[all_types]]:
constant[Determine intersection in the case of only-tangent intersections.
If the only intersections are tangencies, then either the surfaces
are tangent but don't meet ("kissing" edges) or one surface is
internally tangent to the other.
Thus we expect every intersection to be classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
What's more, we expect all intersections to be classified the same for
a given pairing.
Args:
all_types (Set[.IntersectionClassification]): The set of all
intersection classifications encountered among the intersections
for the given surface-surface pair.
Returns:
Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of
* Edges info list; will be empty or :data:`None`
* "Contained" boolean. If not :data:`None`, indicates
that one of the surfaces is contained in the other.
Raises:
ValueError: If there are intersections of more than one type among
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
ValueError: If there is a unique classification, but it isn't one
of the tangent types.
]
if compare[call[name[len], parameter[name[all_types]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da18eb54f70>
variable[point_type] assign[=] call[name[all_types].pop, parameter[]]
if compare[name[point_type] equal[==] name[CLASSIFICATION_T].OPPOSED] begin[:]
return[tuple[[<ast.List object at 0x7da18eb55300>, <ast.Constant object at 0x7da18eb54400>]]]
|
keyword[def] identifier[tangent_only_intersections] ( identifier[all_types] ):
literal[string]
keyword[if] identifier[len] ( identifier[all_types] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] , identifier[all_types] )
identifier[point_type] = identifier[all_types] . identifier[pop] ()
keyword[if] identifier[point_type] == identifier[CLASSIFICATION_T] . identifier[OPPOSED] :
keyword[return] [], keyword[None]
keyword[elif] identifier[point_type] == identifier[CLASSIFICATION_T] . identifier[IGNORED_CORNER] :
keyword[return] [], keyword[None]
keyword[elif] identifier[point_type] == identifier[CLASSIFICATION_T] . identifier[TANGENT_FIRST] :
keyword[return] keyword[None] , keyword[True]
keyword[elif] identifier[point_type] == identifier[CLASSIFICATION_T] . identifier[TANGENT_SECOND] :
keyword[return] keyword[None] , keyword[False]
keyword[elif] identifier[point_type] == identifier[CLASSIFICATION_T] . identifier[COINCIDENT_UNUSED] :
keyword[return] [], keyword[None]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] , identifier[point_type] )
|
def tangent_only_intersections(all_types):
"""Determine intersection in the case of only-tangent intersections.
If the only intersections are tangencies, then either the surfaces
are tangent but don't meet ("kissing" edges) or one surface is
internally tangent to the other.
Thus we expect every intersection to be classified as
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
What's more, we expect all intersections to be classified the same for
a given pairing.
Args:
all_types (Set[.IntersectionClassification]): The set of all
intersection classifications encountered among the intersections
for the given surface-surface pair.
Returns:
Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of
* Edges info list; will be empty or :data:`None`
* "Contained" boolean. If not :data:`None`, indicates
that one of the surfaces is contained in the other.
Raises:
ValueError: If there are intersections of more than one type among
:attr:`~.IntersectionClassification.TANGENT_FIRST`,
:attr:`~.IntersectionClassification.TANGENT_SECOND`,
:attr:`~.IntersectionClassification.OPPOSED`,
:attr:`~.IntersectionClassification.IGNORED_CORNER` or
:attr:`~.IntersectionClassification.COINCIDENT_UNUSED`.
ValueError: If there is a unique classification, but it isn't one
of the tangent types.
"""
if len(all_types) != 1:
raise ValueError('Unexpected value, types should all match', all_types) # depends on [control=['if'], data=[]]
point_type = all_types.pop()
if point_type == CLASSIFICATION_T.OPPOSED:
return ([], None) # depends on [control=['if'], data=[]]
elif point_type == CLASSIFICATION_T.IGNORED_CORNER:
return ([], None) # depends on [control=['if'], data=[]]
elif point_type == CLASSIFICATION_T.TANGENT_FIRST:
return (None, True) # depends on [control=['if'], data=[]]
elif point_type == CLASSIFICATION_T.TANGENT_SECOND:
return (None, False) # depends on [control=['if'], data=[]]
elif point_type == CLASSIFICATION_T.COINCIDENT_UNUSED:
return ([], None) # depends on [control=['if'], data=[]]
else:
raise ValueError('Point type not for tangency', point_type)
|
def aliased_as(self, name):
"""
Create an alias of this stream.
Returns an alias of this stream with name `name`.
When invocation of an SPL operator requires an
:py:class:`~streamsx.spl.op.Expression` against
an input port this can be used to ensure expression
matches the input port alias regardless of the name
of the actual stream.
Example use where the filter expression for a ``Filter`` SPL operator
uses ``IN`` to access input tuple attribute ``seq``::
s = ...
s = s.aliased_as('IN')
params = {'filter': op.Expression.expression('IN.seq % 4ul == 0ul')}
f = op.Map('spl.relational::Filter', stream, params = params)
Args:
name(str): Name for returned stream.
Returns:
Stream: Alias of this stream with ``name`` equal to `name`.
.. versionadded:: 1.9
"""
stream = copy.copy(self)
stream._alias = name
return stream
|
def function[aliased_as, parameter[self, name]]:
constant[
Create an alias of this stream.
Returns an alias of this stream with name `name`.
When invocation of an SPL operator requires an
:py:class:`~streamsx.spl.op.Expression` against
an input port this can be used to ensure expression
matches the input port alias regardless of the name
of the actual stream.
Example use where the filter expression for a ``Filter`` SPL operator
uses ``IN`` to access input tuple attribute ``seq``::
s = ...
s = s.aliased_as('IN')
params = {'filter': op.Expression.expression('IN.seq % 4ul == 0ul')}
f = op.Map('spl.relational::Filter', stream, params = params)
Args:
name(str): Name for returned stream.
Returns:
Stream: Alias of this stream with ``name`` equal to `name`.
.. versionadded:: 1.9
]
variable[stream] assign[=] call[name[copy].copy, parameter[name[self]]]
name[stream]._alias assign[=] name[name]
return[name[stream]]
|
keyword[def] identifier[aliased_as] ( identifier[self] , identifier[name] ):
literal[string]
identifier[stream] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[stream] . identifier[_alias] = identifier[name]
keyword[return] identifier[stream]
|
def aliased_as(self, name):
"""
Create an alias of this stream.
Returns an alias of this stream with name `name`.
When invocation of an SPL operator requires an
:py:class:`~streamsx.spl.op.Expression` against
an input port this can be used to ensure expression
matches the input port alias regardless of the name
of the actual stream.
Example use where the filter expression for a ``Filter`` SPL operator
uses ``IN`` to access input tuple attribute ``seq``::
s = ...
s = s.aliased_as('IN')
params = {'filter': op.Expression.expression('IN.seq % 4ul == 0ul')}
f = op.Map('spl.relational::Filter', stream, params = params)
Args:
name(str): Name for returned stream.
Returns:
Stream: Alias of this stream with ``name`` equal to `name`.
.. versionadded:: 1.9
"""
stream = copy.copy(self)
stream._alias = name
return stream
|
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
network_info = {}
signatures = registry_key.GetSubkeyByName('Signatures')
if signatures:
network_info = self._GetNetworkInfo(signatures)
profiles = registry_key.GetSubkeyByName('Profiles')
if not profiles:
return
for subkey in profiles.GetSubkeys():
default_gateway_mac, dns_suffix = network_info.get(
subkey.name, (None, None))
event_data = WindowsRegistryNetworkEventData()
event_data.default_gateway_mac = default_gateway_mac
event_data.dns_suffix = dns_suffix
ssid_value = subkey.GetValueByName('ProfileName')
if ssid_value:
event_data.ssid = ssid_value.GetDataAsObject()
description_value = subkey.GetValueByName('Description')
if description_value:
event_data.description = description_value.GetDataAsObject()
connection_type_value = subkey.GetValueByName('NameType')
if connection_type_value:
connection_type = connection_type_value.GetDataAsObject()
# TODO: move to formatter.
connection_type = self._CONNECTION_TYPE.get(
connection_type, 'unknown')
event_data.connection_type = connection_type
date_created_value = subkey.GetValueByName('DateCreated')
if date_created_value:
try:
date_time = self._ParseSystemTime(date_created_value.data)
except errors.ParseError as exception:
date_time = None
parser_mediator.ProduceExtractionWarning(
'unable to parse date created with error: {0!s}'.format(
exception))
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
date_last_connected_value = subkey.GetValueByName('DateLastConnected')
if date_last_connected_value:
try:
date_time = self._ParseSystemTime(date_last_connected_value.data)
except errors.ParseError as exception:
date_time = None
parser_mediator.ProduceExtractionWarning(
'unable to parse date last connected with error: {0!s}'.format(
exception))
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
parser_mediator.ProduceEventWithEventData(event, event_data)
|
def function[ExtractEvents, parameter[self, parser_mediator, registry_key]]:
constant[Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
]
variable[network_info] assign[=] dictionary[[], []]
variable[signatures] assign[=] call[name[registry_key].GetSubkeyByName, parameter[constant[Signatures]]]
if name[signatures] begin[:]
variable[network_info] assign[=] call[name[self]._GetNetworkInfo, parameter[name[signatures]]]
variable[profiles] assign[=] call[name[registry_key].GetSubkeyByName, parameter[constant[Profiles]]]
if <ast.UnaryOp object at 0x7da18bc70b80> begin[:]
return[None]
for taget[name[subkey]] in starred[call[name[profiles].GetSubkeys, parameter[]]] begin[:]
<ast.Tuple object at 0x7da18bc70c10> assign[=] call[name[network_info].get, parameter[name[subkey].name, tuple[[<ast.Constant object at 0x7da18bc72bc0>, <ast.Constant object at 0x7da18bc72830>]]]]
variable[event_data] assign[=] call[name[WindowsRegistryNetworkEventData], parameter[]]
name[event_data].default_gateway_mac assign[=] name[default_gateway_mac]
name[event_data].dns_suffix assign[=] name[dns_suffix]
variable[ssid_value] assign[=] call[name[subkey].GetValueByName, parameter[constant[ProfileName]]]
if name[ssid_value] begin[:]
name[event_data].ssid assign[=] call[name[ssid_value].GetDataAsObject, parameter[]]
variable[description_value] assign[=] call[name[subkey].GetValueByName, parameter[constant[Description]]]
if name[description_value] begin[:]
name[event_data].description assign[=] call[name[description_value].GetDataAsObject, parameter[]]
variable[connection_type_value] assign[=] call[name[subkey].GetValueByName, parameter[constant[NameType]]]
if name[connection_type_value] begin[:]
variable[connection_type] assign[=] call[name[connection_type_value].GetDataAsObject, parameter[]]
variable[connection_type] assign[=] call[name[self]._CONNECTION_TYPE.get, parameter[name[connection_type], constant[unknown]]]
name[event_data].connection_type assign[=] name[connection_type]
variable[date_created_value] assign[=] call[name[subkey].GetValueByName, parameter[constant[DateCreated]]]
if name[date_created_value] begin[:]
<ast.Try object at 0x7da18c4cc310>
if name[date_time] begin[:]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[date_time], name[definitions].TIME_DESCRIPTION_CREATION]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
variable[date_last_connected_value] assign[=] call[name[subkey].GetValueByName, parameter[constant[DateLastConnected]]]
if name[date_last_connected_value] begin[:]
<ast.Try object at 0x7da18c4cd060>
if name[date_time] begin[:]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[date_time], name[definitions].TIME_DESCRIPTION_LAST_CONNECTED]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
|
keyword[def] identifier[ExtractEvents] ( identifier[self] , identifier[parser_mediator] , identifier[registry_key] ,** identifier[kwargs] ):
literal[string]
identifier[network_info] ={}
identifier[signatures] = identifier[registry_key] . identifier[GetSubkeyByName] ( literal[string] )
keyword[if] identifier[signatures] :
identifier[network_info] = identifier[self] . identifier[_GetNetworkInfo] ( identifier[signatures] )
identifier[profiles] = identifier[registry_key] . identifier[GetSubkeyByName] ( literal[string] )
keyword[if] keyword[not] identifier[profiles] :
keyword[return]
keyword[for] identifier[subkey] keyword[in] identifier[profiles] . identifier[GetSubkeys] ():
identifier[default_gateway_mac] , identifier[dns_suffix] = identifier[network_info] . identifier[get] (
identifier[subkey] . identifier[name] ,( keyword[None] , keyword[None] ))
identifier[event_data] = identifier[WindowsRegistryNetworkEventData] ()
identifier[event_data] . identifier[default_gateway_mac] = identifier[default_gateway_mac]
identifier[event_data] . identifier[dns_suffix] = identifier[dns_suffix]
identifier[ssid_value] = identifier[subkey] . identifier[GetValueByName] ( literal[string] )
keyword[if] identifier[ssid_value] :
identifier[event_data] . identifier[ssid] = identifier[ssid_value] . identifier[GetDataAsObject] ()
identifier[description_value] = identifier[subkey] . identifier[GetValueByName] ( literal[string] )
keyword[if] identifier[description_value] :
identifier[event_data] . identifier[description] = identifier[description_value] . identifier[GetDataAsObject] ()
identifier[connection_type_value] = identifier[subkey] . identifier[GetValueByName] ( literal[string] )
keyword[if] identifier[connection_type_value] :
identifier[connection_type] = identifier[connection_type_value] . identifier[GetDataAsObject] ()
identifier[connection_type] = identifier[self] . identifier[_CONNECTION_TYPE] . identifier[get] (
identifier[connection_type] , literal[string] )
identifier[event_data] . identifier[connection_type] = identifier[connection_type]
identifier[date_created_value] = identifier[subkey] . identifier[GetValueByName] ( literal[string] )
keyword[if] identifier[date_created_value] :
keyword[try] :
identifier[date_time] = identifier[self] . identifier[_ParseSystemTime] ( identifier[date_created_value] . identifier[data] )
keyword[except] identifier[errors] . identifier[ParseError] keyword[as] identifier[exception] :
identifier[date_time] = keyword[None]
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] (
identifier[exception] ))
keyword[if] identifier[date_time] :
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[date_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_CREATION] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
identifier[date_last_connected_value] = identifier[subkey] . identifier[GetValueByName] ( literal[string] )
keyword[if] identifier[date_last_connected_value] :
keyword[try] :
identifier[date_time] = identifier[self] . identifier[_ParseSystemTime] ( identifier[date_last_connected_value] . identifier[data] )
keyword[except] identifier[errors] . identifier[ParseError] keyword[as] identifier[exception] :
identifier[date_time] = keyword[None]
identifier[parser_mediator] . identifier[ProduceExtractionWarning] (
literal[string] . identifier[format] (
identifier[exception] ))
keyword[if] identifier[date_time] :
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[date_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_LAST_CONNECTED] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
|
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
network_info = {}
signatures = registry_key.GetSubkeyByName('Signatures')
if signatures:
network_info = self._GetNetworkInfo(signatures) # depends on [control=['if'], data=[]]
profiles = registry_key.GetSubkeyByName('Profiles')
if not profiles:
return # depends on [control=['if'], data=[]]
for subkey in profiles.GetSubkeys():
(default_gateway_mac, dns_suffix) = network_info.get(subkey.name, (None, None))
event_data = WindowsRegistryNetworkEventData()
event_data.default_gateway_mac = default_gateway_mac
event_data.dns_suffix = dns_suffix
ssid_value = subkey.GetValueByName('ProfileName')
if ssid_value:
event_data.ssid = ssid_value.GetDataAsObject() # depends on [control=['if'], data=[]]
description_value = subkey.GetValueByName('Description')
if description_value:
event_data.description = description_value.GetDataAsObject() # depends on [control=['if'], data=[]]
connection_type_value = subkey.GetValueByName('NameType')
if connection_type_value:
connection_type = connection_type_value.GetDataAsObject()
# TODO: move to formatter.
connection_type = self._CONNECTION_TYPE.get(connection_type, 'unknown')
event_data.connection_type = connection_type # depends on [control=['if'], data=[]]
date_created_value = subkey.GetValueByName('DateCreated')
if date_created_value:
try:
date_time = self._ParseSystemTime(date_created_value.data) # depends on [control=['try'], data=[]]
except errors.ParseError as exception:
date_time = None
parser_mediator.ProduceExtractionWarning('unable to parse date created with error: {0!s}'.format(exception)) # depends on [control=['except'], data=['exception']]
if date_time:
event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
date_last_connected_value = subkey.GetValueByName('DateLastConnected')
if date_last_connected_value:
try:
date_time = self._ParseSystemTime(date_last_connected_value.data) # depends on [control=['try'], data=[]]
except errors.ParseError as exception:
date_time = None
parser_mediator.ProduceExtractionWarning('unable to parse date last connected with error: {0!s}'.format(exception)) # depends on [control=['except'], data=['exception']]
if date_time:
event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subkey']]
|
def guess_input_handler(seqs, add_seq_names=False):
"""Returns the name of the input handler for seqs."""
if isinstance(seqs, str):
if '\n' in seqs: # can't be a filename...
return '_input_as_multiline_string'
else: # assume it was a filename
return '_input_as_string'
if isinstance(seqs, list) and len(seqs) and isinstance(seqs[0], tuple):
return '_input_as_seq_id_seq_pairs'
if add_seq_names:
return '_input_as_seqs'
return '_input_as_lines'
|
def function[guess_input_handler, parameter[seqs, add_seq_names]]:
constant[Returns the name of the input handler for seqs.]
if call[name[isinstance], parameter[name[seqs], name[str]]] begin[:]
if compare[constant[
] in name[seqs]] begin[:]
return[constant[_input_as_multiline_string]]
if <ast.BoolOp object at 0x7da1b0bd9660> begin[:]
return[constant[_input_as_seq_id_seq_pairs]]
if name[add_seq_names] begin[:]
return[constant[_input_as_seqs]]
return[constant[_input_as_lines]]
|
keyword[def] identifier[guess_input_handler] ( identifier[seqs] , identifier[add_seq_names] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[seqs] , identifier[str] ):
keyword[if] literal[string] keyword[in] identifier[seqs] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
keyword[if] identifier[isinstance] ( identifier[seqs] , identifier[list] ) keyword[and] identifier[len] ( identifier[seqs] ) keyword[and] identifier[isinstance] ( identifier[seqs] [ literal[int] ], identifier[tuple] ):
keyword[return] literal[string]
keyword[if] identifier[add_seq_names] :
keyword[return] literal[string]
keyword[return] literal[string]
|
def guess_input_handler(seqs, add_seq_names=False):
"""Returns the name of the input handler for seqs."""
if isinstance(seqs, str):
if '\n' in seqs: # can't be a filename...
return '_input_as_multiline_string' # depends on [control=['if'], data=[]]
else: # assume it was a filename
return '_input_as_string' # depends on [control=['if'], data=[]]
if isinstance(seqs, list) and len(seqs) and isinstance(seqs[0], tuple):
return '_input_as_seq_id_seq_pairs' # depends on [control=['if'], data=[]]
if add_seq_names:
return '_input_as_seqs' # depends on [control=['if'], data=[]]
return '_input_as_lines'
|
def from_response(response):
'''
Create an error of the right class from an API response.
:param response dict Response JSON
'''
cls = PLAID_ERROR_TYPE_MAP.get(response['error_type'], PlaidError)
return cls(response['error_message'],
response['error_type'],
response['error_code'],
response['display_message'],
response['request_id'],
response.get('causes'))
|
def function[from_response, parameter[response]]:
constant[
Create an error of the right class from an API response.
:param response dict Response JSON
]
variable[cls] assign[=] call[name[PLAID_ERROR_TYPE_MAP].get, parameter[call[name[response]][constant[error_type]], name[PlaidError]]]
return[call[name[cls], parameter[call[name[response]][constant[error_message]], call[name[response]][constant[error_type]], call[name[response]][constant[error_code]], call[name[response]][constant[display_message]], call[name[response]][constant[request_id]], call[name[response].get, parameter[constant[causes]]]]]]
|
keyword[def] identifier[from_response] ( identifier[response] ):
literal[string]
identifier[cls] = identifier[PLAID_ERROR_TYPE_MAP] . identifier[get] ( identifier[response] [ literal[string] ], identifier[PlaidError] )
keyword[return] identifier[cls] ( identifier[response] [ literal[string] ],
identifier[response] [ literal[string] ],
identifier[response] [ literal[string] ],
identifier[response] [ literal[string] ],
identifier[response] [ literal[string] ],
identifier[response] . identifier[get] ( literal[string] ))
|
def from_response(response):
"""
Create an error of the right class from an API response.
:param response dict Response JSON
"""
cls = PLAID_ERROR_TYPE_MAP.get(response['error_type'], PlaidError)
return cls(response['error_message'], response['error_type'], response['error_code'], response['display_message'], response['request_id'], response.get('causes'))
|
def buildcontainer(self):
"""generate HTML div"""
if self.container:
return
# Create SVG div with style
if self.width:
if self.width[-1] != '%':
self.style += 'width:%spx;' % self.width
else:
self.style += 'width:%s;' % self.width
if self.height:
if self.height[-1] != '%':
self.style += 'height:%spx;' % self.height
else:
self.style += 'height:%s;' % self.height
if self.style:
self.style = 'style="%s"' % self.style
self.container = self.containerheader + \
'<div id="%s"><svg %s></svg></div>\n' % (self.name, self.style)
|
def function[buildcontainer, parameter[self]]:
constant[generate HTML div]
if name[self].container begin[:]
return[None]
if name[self].width begin[:]
if compare[call[name[self].width][<ast.UnaryOp object at 0x7da1b0323790>] not_equal[!=] constant[%]] begin[:]
<ast.AugAssign object at 0x7da1b0323940>
if name[self].height begin[:]
if compare[call[name[self].height][<ast.UnaryOp object at 0x7da1b03a0f10>] not_equal[!=] constant[%]] begin[:]
<ast.AugAssign object at 0x7da1b03a30a0>
if name[self].style begin[:]
name[self].style assign[=] binary_operation[constant[style="%s"] <ast.Mod object at 0x7da2590d6920> name[self].style]
name[self].container assign[=] binary_operation[name[self].containerheader + binary_operation[constant[<div id="%s"><svg %s></svg></div>
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b03f9660>, <ast.Attribute object at 0x7da1b03f9570>]]]]
|
keyword[def] identifier[buildcontainer] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[container] :
keyword[return]
keyword[if] identifier[self] . identifier[width] :
keyword[if] identifier[self] . identifier[width] [- literal[int] ]!= literal[string] :
identifier[self] . identifier[style] += literal[string] % identifier[self] . identifier[width]
keyword[else] :
identifier[self] . identifier[style] += literal[string] % identifier[self] . identifier[width]
keyword[if] identifier[self] . identifier[height] :
keyword[if] identifier[self] . identifier[height] [- literal[int] ]!= literal[string] :
identifier[self] . identifier[style] += literal[string] % identifier[self] . identifier[height]
keyword[else] :
identifier[self] . identifier[style] += literal[string] % identifier[self] . identifier[height]
keyword[if] identifier[self] . identifier[style] :
identifier[self] . identifier[style] = literal[string] % identifier[self] . identifier[style]
identifier[self] . identifier[container] = identifier[self] . identifier[containerheader] + literal[string] %( identifier[self] . identifier[name] , identifier[self] . identifier[style] )
|
def buildcontainer(self):
"""generate HTML div"""
if self.container:
return # depends on [control=['if'], data=[]]
# Create SVG div with style
if self.width:
if self.width[-1] != '%':
self.style += 'width:%spx;' % self.width # depends on [control=['if'], data=[]]
else:
self.style += 'width:%s;' % self.width # depends on [control=['if'], data=[]]
if self.height:
if self.height[-1] != '%':
self.style += 'height:%spx;' % self.height # depends on [control=['if'], data=[]]
else:
self.style += 'height:%s;' % self.height # depends on [control=['if'], data=[]]
if self.style:
self.style = 'style="%s"' % self.style # depends on [control=['if'], data=[]]
self.container = self.containerheader + '<div id="%s"><svg %s></svg></div>\n' % (self.name, self.style)
|
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
|
def function[read_file, parameter[file]]:
constant[Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
]
if call[name[hasattr], parameter[name[file], constant[read]]] begin[:]
return[call[name[file].read, parameter[]]]
if call[name[hasattr], parameter[name[file], constant[read_bytes]]] begin[:]
return[call[name[file].read_bytes, parameter[]]]
with call[name[open], parameter[name[file], constant[rb]]] begin[:]
return[call[name[f].read, parameter[]]]
|
keyword[def] identifier[read_file] ( identifier[file] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[file] , literal[string] ):
keyword[return] identifier[file] . identifier[read] ()
keyword[if] identifier[hasattr] ( identifier[file] , literal[string] ):
keyword[return] identifier[file] . identifier[read_bytes] ()
keyword[with] identifier[open] ( identifier[file] , literal[string] ) keyword[as] identifier[f] :
keyword[return] identifier[f] . identifier[read] ()
|
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, 'read'):
return file.read() # depends on [control=['if'], data=[]]
if hasattr(file, 'read_bytes'):
return file.read_bytes() # depends on [control=['if'], data=[]]
with open(file, 'rb') as f:
return f.read() # depends on [control=['with'], data=['f']]
|
def _create_transfer_spec(self, call_args):
''' pass the transfer details to aspera and receive back a
populated transfer spec complete with access token '''
_paths = []
for _file_pair in call_args.file_pair_list:
_path = OrderedDict()
if call_args.direction == enumAsperaDirection.SEND:
_action = "upload_setup"
_path['source'] = _file_pair.fileobj
_path['destination'] = _file_pair.key
else:
_action = "download_setup"
_path['source'] = _file_pair.key
_path['destination'] = _file_pair.fileobj
_paths.append(_path)
# Add credentials before the transfer spec is requested.
delegated_token = self._delegated_token_manager.get_token()
_response = self._fetch_transfer_spec(_action, delegated_token, call_args.bucket, _paths)
tspec_dict = json.loads(_response.content)['transfer_specs'][0]['transfer_spec']
tspec_dict["destination_root"] = "/"
if (call_args.transfer_config):
tspec_dict.update(call_args.transfer_config.dict)
if call_args.transfer_config.is_multi_session_all:
tspec_dict['multi_session'] = 0
_remote_host = tspec_dict['remote_host'].split('.')
# now we append '-all' to the remote host
_remote_host[0] += "-all"
tspec_dict['remote_host'] = ".".join(_remote_host)
logger.info("New remote_host(%s)" % tspec_dict['remote_host'])
call_args.transfer_spec = json.dumps(tspec_dict)
return True
|
def function[_create_transfer_spec, parameter[self, call_args]]:
constant[ pass the transfer details to aspera and receive back a
populated transfer spec complete with access token ]
variable[_paths] assign[=] list[[]]
for taget[name[_file_pair]] in starred[name[call_args].file_pair_list] begin[:]
variable[_path] assign[=] call[name[OrderedDict], parameter[]]
if compare[name[call_args].direction equal[==] name[enumAsperaDirection].SEND] begin[:]
variable[_action] assign[=] constant[upload_setup]
call[name[_path]][constant[source]] assign[=] name[_file_pair].fileobj
call[name[_path]][constant[destination]] assign[=] name[_file_pair].key
call[name[_paths].append, parameter[name[_path]]]
variable[delegated_token] assign[=] call[name[self]._delegated_token_manager.get_token, parameter[]]
variable[_response] assign[=] call[name[self]._fetch_transfer_spec, parameter[name[_action], name[delegated_token], name[call_args].bucket, name[_paths]]]
variable[tspec_dict] assign[=] call[call[call[call[name[json].loads, parameter[name[_response].content]]][constant[transfer_specs]]][constant[0]]][constant[transfer_spec]]
call[name[tspec_dict]][constant[destination_root]] assign[=] constant[/]
if name[call_args].transfer_config begin[:]
call[name[tspec_dict].update, parameter[name[call_args].transfer_config.dict]]
if name[call_args].transfer_config.is_multi_session_all begin[:]
call[name[tspec_dict]][constant[multi_session]] assign[=] constant[0]
variable[_remote_host] assign[=] call[call[name[tspec_dict]][constant[remote_host]].split, parameter[constant[.]]]
<ast.AugAssign object at 0x7da20c76da80>
call[name[tspec_dict]][constant[remote_host]] assign[=] call[constant[.].join, parameter[name[_remote_host]]]
call[name[logger].info, parameter[binary_operation[constant[New remote_host(%s)] <ast.Mod object at 0x7da2590d6920> call[name[tspec_dict]][constant[remote_host]]]]]
name[call_args].transfer_spec assign[=] call[name[json].dumps, parameter[name[tspec_dict]]]
return[constant[True]]
|
keyword[def] identifier[_create_transfer_spec] ( identifier[self] , identifier[call_args] ):
literal[string]
identifier[_paths] =[]
keyword[for] identifier[_file_pair] keyword[in] identifier[call_args] . identifier[file_pair_list] :
identifier[_path] = identifier[OrderedDict] ()
keyword[if] identifier[call_args] . identifier[direction] == identifier[enumAsperaDirection] . identifier[SEND] :
identifier[_action] = literal[string]
identifier[_path] [ literal[string] ]= identifier[_file_pair] . identifier[fileobj]
identifier[_path] [ literal[string] ]= identifier[_file_pair] . identifier[key]
keyword[else] :
identifier[_action] = literal[string]
identifier[_path] [ literal[string] ]= identifier[_file_pair] . identifier[key]
identifier[_path] [ literal[string] ]= identifier[_file_pair] . identifier[fileobj]
identifier[_paths] . identifier[append] ( identifier[_path] )
identifier[delegated_token] = identifier[self] . identifier[_delegated_token_manager] . identifier[get_token] ()
identifier[_response] = identifier[self] . identifier[_fetch_transfer_spec] ( identifier[_action] , identifier[delegated_token] , identifier[call_args] . identifier[bucket] , identifier[_paths] )
identifier[tspec_dict] = identifier[json] . identifier[loads] ( identifier[_response] . identifier[content] )[ literal[string] ][ literal[int] ][ literal[string] ]
identifier[tspec_dict] [ literal[string] ]= literal[string]
keyword[if] ( identifier[call_args] . identifier[transfer_config] ):
identifier[tspec_dict] . identifier[update] ( identifier[call_args] . identifier[transfer_config] . identifier[dict] )
keyword[if] identifier[call_args] . identifier[transfer_config] . identifier[is_multi_session_all] :
identifier[tspec_dict] [ literal[string] ]= literal[int]
identifier[_remote_host] = identifier[tspec_dict] [ literal[string] ]. identifier[split] ( literal[string] )
identifier[_remote_host] [ literal[int] ]+= literal[string]
identifier[tspec_dict] [ literal[string] ]= literal[string] . identifier[join] ( identifier[_remote_host] )
identifier[logger] . identifier[info] ( literal[string] % identifier[tspec_dict] [ literal[string] ])
identifier[call_args] . identifier[transfer_spec] = identifier[json] . identifier[dumps] ( identifier[tspec_dict] )
keyword[return] keyword[True]
|
def _create_transfer_spec(self, call_args):
""" pass the transfer details to aspera and receive back a
populated transfer spec complete with access token """
_paths = []
for _file_pair in call_args.file_pair_list:
_path = OrderedDict()
if call_args.direction == enumAsperaDirection.SEND:
_action = 'upload_setup'
_path['source'] = _file_pair.fileobj
_path['destination'] = _file_pair.key # depends on [control=['if'], data=[]]
else:
_action = 'download_setup'
_path['source'] = _file_pair.key
_path['destination'] = _file_pair.fileobj
_paths.append(_path) # depends on [control=['for'], data=['_file_pair']]
# Add credentials before the transfer spec is requested.
delegated_token = self._delegated_token_manager.get_token()
_response = self._fetch_transfer_spec(_action, delegated_token, call_args.bucket, _paths)
tspec_dict = json.loads(_response.content)['transfer_specs'][0]['transfer_spec']
tspec_dict['destination_root'] = '/'
if call_args.transfer_config:
tspec_dict.update(call_args.transfer_config.dict)
if call_args.transfer_config.is_multi_session_all:
tspec_dict['multi_session'] = 0
_remote_host = tspec_dict['remote_host'].split('.')
# now we append '-all' to the remote host
_remote_host[0] += '-all'
tspec_dict['remote_host'] = '.'.join(_remote_host)
logger.info('New remote_host(%s)' % tspec_dict['remote_host']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
call_args.transfer_spec = json.dumps(tspec_dict)
return True
|
def _json_safe(cls, value):
"""Return a JSON safe value"""
# Date
if type(value) == date:
return str(value)
# Datetime
elif type(value) == datetime:
return value.strftime('%Y-%m-%d %H:%M:%S')
# Object Id
elif isinstance(value, ObjectId):
return str(value)
# Frame
elif isinstance(value, _BaseFrame):
return value.to_json_type()
# Lists
elif isinstance(value, (list, tuple)):
return [cls._json_safe(v) for v in value]
# Dictionaries
elif isinstance(value, dict):
return {k:cls._json_safe(v) for k, v in value.items()}
return value
|
def function[_json_safe, parameter[cls, value]]:
constant[Return a JSON safe value]
if compare[call[name[type], parameter[name[value]]] equal[==] name[date]] begin[:]
return[call[name[str], parameter[name[value]]]]
return[name[value]]
|
keyword[def] identifier[_json_safe] ( identifier[cls] , identifier[value] ):
literal[string]
keyword[if] identifier[type] ( identifier[value] )== identifier[date] :
keyword[return] identifier[str] ( identifier[value] )
keyword[elif] identifier[type] ( identifier[value] )== identifier[datetime] :
keyword[return] identifier[value] . identifier[strftime] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[ObjectId] ):
keyword[return] identifier[str] ( identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[_BaseFrame] ):
keyword[return] identifier[value] . identifier[to_json_type] ()
keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )):
keyword[return] [ identifier[cls] . identifier[_json_safe] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[dict] ):
keyword[return] { identifier[k] : identifier[cls] . identifier[_json_safe] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[value] . identifier[items] ()}
keyword[return] identifier[value]
|
def _json_safe(cls, value):
"""Return a JSON safe value"""
# Date
if type(value) == date:
return str(value) # depends on [control=['if'], data=[]]
# Datetime
elif type(value) == datetime:
return value.strftime('%Y-%m-%d %H:%M:%S') # depends on [control=['if'], data=[]]
# Object Id
elif isinstance(value, ObjectId):
return str(value) # depends on [control=['if'], data=[]]
# Frame
elif isinstance(value, _BaseFrame):
return value.to_json_type() # depends on [control=['if'], data=[]]
# Lists
elif isinstance(value, (list, tuple)):
return [cls._json_safe(v) for v in value] # depends on [control=['if'], data=[]]
# Dictionaries
elif isinstance(value, dict):
return {k: cls._json_safe(v) for (k, v) in value.items()} # depends on [control=['if'], data=[]]
return value
|
def start_mon_service(distro, cluster, hostname):
"""
start mon service depending on distro init
"""
if distro.init == 'sysvinit':
service = distro.conn.remote_module.which_service()
remoto.process.run(
distro.conn,
[
service,
'ceph',
'-c',
'/etc/ceph/{cluster}.conf'.format(cluster=cluster),
'start',
'mon.{hostname}'.format(hostname=hostname)
],
timeout=7,
)
system.enable_service(distro.conn)
elif distro.init == 'upstart':
remoto.process.run(
distro.conn,
[
'initctl',
'emit',
'ceph-mon',
'cluster={cluster}'.format(cluster=cluster),
'id={hostname}'.format(hostname=hostname),
],
timeout=7,
)
elif distro.init == 'systemd':
# enable ceph target for this host (in case it isn't already enabled)
remoto.process.run(
distro.conn,
[
'systemctl',
'enable',
'ceph.target'
],
timeout=7,
)
# enable and start this mon instance
remoto.process.run(
distro.conn,
[
'systemctl',
'enable',
'ceph-mon@{hostname}'.format(hostname=hostname),
],
timeout=7,
)
remoto.process.run(
distro.conn,
[
'systemctl',
'start',
'ceph-mon@{hostname}'.format(hostname=hostname),
],
timeout=7,
)
|
def function[start_mon_service, parameter[distro, cluster, hostname]]:
constant[
start mon service depending on distro init
]
if compare[name[distro].init equal[==] constant[sysvinit]] begin[:]
variable[service] assign[=] call[name[distro].conn.remote_module.which_service, parameter[]]
call[name[remoto].process.run, parameter[name[distro].conn, list[[<ast.Name object at 0x7da1b1644880>, <ast.Constant object at 0x7da1b1647760>, <ast.Constant object at 0x7da1b1645930>, <ast.Call object at 0x7da1b1645ba0>, <ast.Constant object at 0x7da1b16458d0>, <ast.Call object at 0x7da1b16463e0>]]]]
call[name[system].enable_service, parameter[name[distro].conn]]
|
keyword[def] identifier[start_mon_service] ( identifier[distro] , identifier[cluster] , identifier[hostname] ):
literal[string]
keyword[if] identifier[distro] . identifier[init] == literal[string] :
identifier[service] = identifier[distro] . identifier[conn] . identifier[remote_module] . identifier[which_service] ()
identifier[remoto] . identifier[process] . identifier[run] (
identifier[distro] . identifier[conn] ,
[
identifier[service] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[cluster] = identifier[cluster] ),
literal[string] ,
literal[string] . identifier[format] ( identifier[hostname] = identifier[hostname] )
],
identifier[timeout] = literal[int] ,
)
identifier[system] . identifier[enable_service] ( identifier[distro] . identifier[conn] )
keyword[elif] identifier[distro] . identifier[init] == literal[string] :
identifier[remoto] . identifier[process] . identifier[run] (
identifier[distro] . identifier[conn] ,
[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[cluster] = identifier[cluster] ),
literal[string] . identifier[format] ( identifier[hostname] = identifier[hostname] ),
],
identifier[timeout] = literal[int] ,
)
keyword[elif] identifier[distro] . identifier[init] == literal[string] :
identifier[remoto] . identifier[process] . identifier[run] (
identifier[distro] . identifier[conn] ,
[
literal[string] ,
literal[string] ,
literal[string]
],
identifier[timeout] = literal[int] ,
)
identifier[remoto] . identifier[process] . identifier[run] (
identifier[distro] . identifier[conn] ,
[
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[hostname] = identifier[hostname] ),
],
identifier[timeout] = literal[int] ,
)
identifier[remoto] . identifier[process] . identifier[run] (
identifier[distro] . identifier[conn] ,
[
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[hostname] = identifier[hostname] ),
],
identifier[timeout] = literal[int] ,
)
|
def start_mon_service(distro, cluster, hostname):
"""
start mon service depending on distro init
"""
if distro.init == 'sysvinit':
service = distro.conn.remote_module.which_service()
remoto.process.run(distro.conn, [service, 'ceph', '-c', '/etc/ceph/{cluster}.conf'.format(cluster=cluster), 'start', 'mon.{hostname}'.format(hostname=hostname)], timeout=7)
system.enable_service(distro.conn) # depends on [control=['if'], data=[]]
elif distro.init == 'upstart':
remoto.process.run(distro.conn, ['initctl', 'emit', 'ceph-mon', 'cluster={cluster}'.format(cluster=cluster), 'id={hostname}'.format(hostname=hostname)], timeout=7) # depends on [control=['if'], data=[]]
elif distro.init == 'systemd':
# enable ceph target for this host (in case it isn't already enabled)
remoto.process.run(distro.conn, ['systemctl', 'enable', 'ceph.target'], timeout=7)
# enable and start this mon instance
remoto.process.run(distro.conn, ['systemctl', 'enable', 'ceph-mon@{hostname}'.format(hostname=hostname)], timeout=7)
remoto.process.run(distro.conn, ['systemctl', 'start', 'ceph-mon@{hostname}'.format(hostname=hostname)], timeout=7) # depends on [control=['if'], data=[]]
|
def sample_stats_to_xarray(self):
"""Extract sample_stats from posterior."""
posterior = self.posterior
posterior_model = self.posterior_model
# copy dims and coords
dims = deepcopy(self.dims) if self.dims is not None else {}
coords = deepcopy(self.coords) if self.coords is not None else {}
# log_likelihood
log_likelihood = self.log_likelihood
if log_likelihood is not None:
if isinstance(log_likelihood, str) and log_likelihood in dims:
dims["log_likelihood"] = dims.pop(log_likelihood)
data = get_sample_stats_stan3(
posterior, model=posterior_model, log_likelihood=log_likelihood
)
return dict_to_dataset(data, library=self.stan, coords=coords, dims=dims)
|
def function[sample_stats_to_xarray, parameter[self]]:
constant[Extract sample_stats from posterior.]
variable[posterior] assign[=] name[self].posterior
variable[posterior_model] assign[=] name[self].posterior_model
variable[dims] assign[=] <ast.IfExp object at 0x7da1b1c60430>
variable[coords] assign[=] <ast.IfExp object at 0x7da1b1c60370>
variable[log_likelihood] assign[=] name[self].log_likelihood
if compare[name[log_likelihood] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b1b2a9e0> begin[:]
call[name[dims]][constant[log_likelihood]] assign[=] call[name[dims].pop, parameter[name[log_likelihood]]]
variable[data] assign[=] call[name[get_sample_stats_stan3], parameter[name[posterior]]]
return[call[name[dict_to_dataset], parameter[name[data]]]]
|
keyword[def] identifier[sample_stats_to_xarray] ( identifier[self] ):
literal[string]
identifier[posterior] = identifier[self] . identifier[posterior]
identifier[posterior_model] = identifier[self] . identifier[posterior_model]
identifier[dims] = identifier[deepcopy] ( identifier[self] . identifier[dims] ) keyword[if] identifier[self] . identifier[dims] keyword[is] keyword[not] keyword[None] keyword[else] {}
identifier[coords] = identifier[deepcopy] ( identifier[self] . identifier[coords] ) keyword[if] identifier[self] . identifier[coords] keyword[is] keyword[not] keyword[None] keyword[else] {}
identifier[log_likelihood] = identifier[self] . identifier[log_likelihood]
keyword[if] identifier[log_likelihood] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[log_likelihood] , identifier[str] ) keyword[and] identifier[log_likelihood] keyword[in] identifier[dims] :
identifier[dims] [ literal[string] ]= identifier[dims] . identifier[pop] ( identifier[log_likelihood] )
identifier[data] = identifier[get_sample_stats_stan3] (
identifier[posterior] , identifier[model] = identifier[posterior_model] , identifier[log_likelihood] = identifier[log_likelihood]
)
keyword[return] identifier[dict_to_dataset] ( identifier[data] , identifier[library] = identifier[self] . identifier[stan] , identifier[coords] = identifier[coords] , identifier[dims] = identifier[dims] )
|
def sample_stats_to_xarray(self):
"""Extract sample_stats from posterior."""
posterior = self.posterior
posterior_model = self.posterior_model # copy dims and coords
dims = deepcopy(self.dims) if self.dims is not None else {}
coords = deepcopy(self.coords) if self.coords is not None else {} # log_likelihood
log_likelihood = self.log_likelihood
if log_likelihood is not None:
if isinstance(log_likelihood, str) and log_likelihood in dims:
dims['log_likelihood'] = dims.pop(log_likelihood) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['log_likelihood']]
data = get_sample_stats_stan3(posterior, model=posterior_model, log_likelihood=log_likelihood)
return dict_to_dataset(data, library=self.stan, coords=coords, dims=dims)
|
async def reset_user_password(self, username):
"""Reset user password.
:param str username: Username
:returns: A :class:`~juju.user.User` instance
"""
user_facade = client.UserManagerFacade.from_connection(
self.connection())
entity = client.Entity(tag.user(username))
results = await user_facade.ResetPassword([entity])
secret_key = results.results[0].secret_key
return await self.get_user(username, secret_key=secret_key)
|
<ast.AsyncFunctionDef object at 0x7da1b0d0eb90>
|
keyword[async] keyword[def] identifier[reset_user_password] ( identifier[self] , identifier[username] ):
literal[string]
identifier[user_facade] = identifier[client] . identifier[UserManagerFacade] . identifier[from_connection] (
identifier[self] . identifier[connection] ())
identifier[entity] = identifier[client] . identifier[Entity] ( identifier[tag] . identifier[user] ( identifier[username] ))
identifier[results] = keyword[await] identifier[user_facade] . identifier[ResetPassword] ([ identifier[entity] ])
identifier[secret_key] = identifier[results] . identifier[results] [ literal[int] ]. identifier[secret_key]
keyword[return] keyword[await] identifier[self] . identifier[get_user] ( identifier[username] , identifier[secret_key] = identifier[secret_key] )
|
async def reset_user_password(self, username):
"""Reset user password.
:param str username: Username
:returns: A :class:`~juju.user.User` instance
"""
user_facade = client.UserManagerFacade.from_connection(self.connection())
entity = client.Entity(tag.user(username))
results = await user_facade.ResetPassword([entity])
secret_key = results.results[0].secret_key
return await self.get_user(username, secret_key=secret_key)
|
def _mse_converged(self):
"""Check convergence based on mean squared difference between
prior and posterior
Returns
-------
converged : boolean
Whether the parameter estimation converged.
mse : float
Mean squared error between prior and posterior.
"""
prior = self.global_prior_[0:self.prior_size]
posterior = self.global_posterior_[0:self.prior_size]
mse = mean_squared_error(prior, posterior,
multioutput='uniform_average')
if mse > self.threshold:
return False, mse
else:
return True, mse
|
def function[_mse_converged, parameter[self]]:
constant[Check convergence based on mean squared difference between
prior and posterior
Returns
-------
converged : boolean
Whether the parameter estimation converged.
mse : float
Mean squared error between prior and posterior.
]
variable[prior] assign[=] call[name[self].global_prior_][<ast.Slice object at 0x7da1b0790670>]
variable[posterior] assign[=] call[name[self].global_posterior_][<ast.Slice object at 0x7da1b0791ed0>]
variable[mse] assign[=] call[name[mean_squared_error], parameter[name[prior], name[posterior]]]
if compare[name[mse] greater[>] name[self].threshold] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b07923b0>, <ast.Name object at 0x7da1b07920b0>]]]
|
keyword[def] identifier[_mse_converged] ( identifier[self] ):
literal[string]
identifier[prior] = identifier[self] . identifier[global_prior_] [ literal[int] : identifier[self] . identifier[prior_size] ]
identifier[posterior] = identifier[self] . identifier[global_posterior_] [ literal[int] : identifier[self] . identifier[prior_size] ]
identifier[mse] = identifier[mean_squared_error] ( identifier[prior] , identifier[posterior] ,
identifier[multioutput] = literal[string] )
keyword[if] identifier[mse] > identifier[self] . identifier[threshold] :
keyword[return] keyword[False] , identifier[mse]
keyword[else] :
keyword[return] keyword[True] , identifier[mse]
|
def _mse_converged(self):
"""Check convergence based on mean squared difference between
prior and posterior
Returns
-------
converged : boolean
Whether the parameter estimation converged.
mse : float
Mean squared error between prior and posterior.
"""
prior = self.global_prior_[0:self.prior_size]
posterior = self.global_posterior_[0:self.prior_size]
mse = mean_squared_error(prior, posterior, multioutput='uniform_average')
if mse > self.threshold:
return (False, mse) # depends on [control=['if'], data=['mse']]
else:
return (True, mse)
|
def _run(cmd,
cwd=None,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
output_encoding=None,
output_loglevel='debug',
log_callback=None,
runas=None,
group=None,
shell=DEFAULT_SHELL,
python_shell=False,
env=None,
clean_env=False,
prepend_path=None,
rstrip=True,
template=None,
umask=None,
timeout=None,
with_communicate=True,
reset_system_locale=True,
ignore_retcode=False,
saltenv='base',
pillarenv=None,
pillar_override=None,
use_vt=False,
password=None,
bg=False,
encoded_cmd=False,
success_retcodes=None,
success_stdout=None,
success_stderr=None,
**kwargs):
'''
Do the DRY thing and only call subprocess.Popen() once
'''
if 'pillar' in kwargs and not pillar_override:
pillar_override = kwargs['pillar']
if output_loglevel != 'quiet' and _is_valid_shell(shell) is False:
log.warning(
'Attempt to run a shell command with what may be an invalid shell! '
'Check to ensure that the shell <%s> is valid for this user.',
shell
)
output_loglevel = _check_loglevel(output_loglevel)
log_callback = _check_cb(log_callback)
use_sudo = False
if runas is None and '__context__' in globals():
runas = __context__.get('runas')
if password is None and '__context__' in globals():
password = __context__.get('runas_password')
# Set the default working directory to the home directory of the user
# salt-minion is running as. Defaults to home directory of user under which
# the minion is running.
if not cwd:
cwd = os.path.expanduser('~{0}'.format('' if not runas else runas))
# make sure we can access the cwd
# when run from sudo or another environment where the euid is
# changed ~ will expand to the home of the original uid and
# the euid might not have access to it. See issue #1844
if not os.access(cwd, os.R_OK):
cwd = '/'
if salt.utils.platform.is_windows():
cwd = os.path.abspath(os.sep)
else:
# Handle edge cases where numeric/other input is entered, and would be
# yaml-ified into non-string types
cwd = six.text_type(cwd)
if bg:
ignore_retcode = True
use_vt = False
if not salt.utils.platform.is_windows():
if not os.path.isfile(shell) or not os.access(shell, os.X_OK):
msg = 'The shell {0} is not available'.format(shell)
raise CommandExecutionError(msg)
if salt.utils.platform.is_windows() and use_vt: # Memozation so not much overhead
raise CommandExecutionError('VT not available on windows')
if shell.lower().strip() == 'powershell':
# Strip whitespace
if isinstance(cmd, six.string_types):
cmd = cmd.strip()
# If we were called by script(), then fakeout the Windows
# shell to run a Powershell script.
# Else just run a Powershell command.
stack = traceback.extract_stack(limit=2)
# extract_stack() returns a list of tuples.
# The last item in the list [-1] is the current method.
# The third item[2] in each tuple is the name of that method.
if stack[-2][2] == 'script':
cmd = 'Powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -File ' + cmd
elif encoded_cmd:
cmd = 'Powershell -NonInteractive -EncodedCommand {0}'.format(cmd)
else:
cmd = 'Powershell -NonInteractive -NoProfile "{0}"'.format(cmd.replace('"', '\\"'))
# munge the cmd and cwd through the template
(cmd, cwd) = _render_cmd(cmd, cwd, template, saltenv, pillarenv, pillar_override)
ret = {}
# If the pub jid is here then this is a remote ex or salt call command and needs to be
# checked if blacklisted
if '__pub_jid' in kwargs:
if not _check_avail(cmd):
raise CommandExecutionError(
'The shell command "{0}" is not permitted'.format(cmd)
)
env = _parse_env(env)
for bad_env_key in (x for x, y in six.iteritems(env) if y is None):
log.error('Environment variable \'%s\' passed without a value. '
'Setting value to an empty string', bad_env_key)
env[bad_env_key] = ''
def _get_stripped(cmd):
# Return stripped command string copies to improve logging.
if isinstance(cmd, list):
return [x.strip() if isinstance(x, six.string_types) else x for x in cmd]
elif isinstance(cmd, six.string_types):
return cmd.strip()
else:
return cmd
if output_loglevel is not None:
# Always log the shell commands at INFO unless quiet logging is
# requested. The command output is what will be controlled by the
# 'loglevel' parameter.
msg = (
'Executing command {0}{1}{0} {2}{3}in directory \'{4}\'{5}'.format(
'\'' if not isinstance(cmd, list) else '',
_get_stripped(cmd),
'as user \'{0}\' '.format(runas) if runas else '',
'in group \'{0}\' '.format(group) if group else '',
cwd,
'. Executing command in the background, no output will be '
'logged.' if bg else ''
)
)
log.info(log_callback(msg))
if runas and salt.utils.platform.is_windows():
if not HAS_WIN_RUNAS:
msg = 'missing salt/utils/win_runas.py'
raise CommandExecutionError(msg)
if isinstance(cmd, (list, tuple)):
cmd = ' '.join(cmd)
return win_runas(cmd, runas, password, cwd)
if runas and salt.utils.platform.is_darwin():
# we need to insert the user simulation into the command itself and not
# just run it from the environment on macOS as that
# method doesn't work properly when run as root for certain commands.
if isinstance(cmd, (list, tuple)):
cmd = ' '.join(map(_cmd_quote, cmd))
cmd = 'su -l {0} -c "cd {1}; {2}"'.format(runas, cwd, cmd)
# set runas to None, because if you try to run `su -l` as well as
# simulate the environment macOS will prompt for the password of the
# user and will cause salt to hang.
runas = None
if runas:
# Save the original command before munging it
try:
pwd.getpwnam(runas)
except KeyError:
raise CommandExecutionError(
'User \'{0}\' is not available'.format(runas)
)
if group:
if salt.utils.platform.is_windows():
msg = 'group is not currently available on Windows'
raise SaltInvocationError(msg)
if not which_bin(['sudo']):
msg = 'group argument requires sudo but not found'
raise CommandExecutionError(msg)
try:
grp.getgrnam(group)
except KeyError:
raise CommandExecutionError(
'Group \'{0}\' is not available'.format(runas)
)
else:
use_sudo = True
if runas or group:
try:
# Getting the environment for the runas user
# Use markers to thwart any stdout noise
# There must be a better way to do this.
import uuid
marker = '<<<' + str(uuid.uuid4()) + '>>>'
marker_b = marker.encode(__salt_system_encoding__)
py_code = (
'import sys, os, itertools; '
'sys.stdout.write(\"' + marker + '\"); '
'sys.stdout.write(\"\\0\".join(itertools.chain(*os.environ.items()))); '
'sys.stdout.write(\"' + marker + '\");'
)
if use_sudo or __grains__['os'] in ['MacOS', 'Darwin']:
env_cmd = ['sudo']
# runas is optional if use_sudo is set.
if runas:
env_cmd.extend(['-u', runas])
if group:
env_cmd.extend(['-g', group])
if shell != DEFAULT_SHELL:
env_cmd.extend(['-s', '--', shell, '-c'])
else:
env_cmd.extend(['-i', '--'])
env_cmd.extend([sys.executable])
elif __grains__['os'] in ['FreeBSD']:
env_cmd = ('su', '-', runas, '-c',
"{0} -c {1}".format(shell, sys.executable))
elif __grains__['os_family'] in ['Solaris']:
env_cmd = ('su', '-', runas, '-c', sys.executable)
elif __grains__['os_family'] in ['AIX']:
env_cmd = ('su', '-', runas, '-c', sys.executable)
else:
env_cmd = ('su', '-s', shell, '-', runas, '-c', sys.executable)
msg = 'env command: {0}'.format(env_cmd)
log.debug(log_callback(msg))
env_bytes, env_encoded_err = subprocess.Popen(
env_cmd,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE
).communicate(salt.utils.stringutils.to_bytes(py_code))
marker_count = env_bytes.count(marker_b)
if marker_count == 0:
# Possibly PAM prevented the login
log.error(
'Environment could not be retrieved for user \'%s\': '
'stderr=%r stdout=%r',
runas, env_encoded_err, env_bytes
)
# Ensure that we get an empty env_runas dict below since we
# were not able to get the environment.
env_bytes = b''
elif marker_count != 2:
raise CommandExecutionError(
'Environment could not be retrieved for user \'{0}\'',
info={'stderr': repr(env_encoded_err),
'stdout': repr(env_bytes)}
)
else:
# Strip the marker
env_bytes = env_bytes.split(marker_b)[1]
if six.PY2:
import itertools
env_runas = dict(itertools.izip(*[iter(env_bytes.split(b'\0'))]*2))
elif six.PY3:
env_runas = dict(list(zip(*[iter(env_bytes.split(b'\0'))]*2)))
env_runas = dict(
(salt.utils.stringutils.to_str(k),
salt.utils.stringutils.to_str(v))
for k, v in six.iteritems(env_runas)
)
env_runas.update(env)
# Fix platforms like Solaris that don't set a USER env var in the
# user's default environment as obtained above.
if env_runas.get('USER') != runas:
env_runas['USER'] = runas
# Fix some corner cases where shelling out to get the user's
# environment returns the wrong home directory.
runas_home = os.path.expanduser('~{0}'.format(runas))
if env_runas.get('HOME') != runas_home:
env_runas['HOME'] = runas_home
env = env_runas
except ValueError as exc:
log.exception('Error raised retrieving environment for user %s', runas)
raise CommandExecutionError(
'Environment could not be retrieved for user \'{0}\': {1}'.format(
runas, exc
)
)
if reset_system_locale is True:
if not salt.utils.platform.is_windows():
# Default to C!
# Salt only knows how to parse English words
# Don't override if the user has passed LC_ALL
env.setdefault('LC_CTYPE', 'C')
env.setdefault('LC_NUMERIC', 'C')
env.setdefault('LC_TIME', 'C')
env.setdefault('LC_COLLATE', 'C')
env.setdefault('LC_MONETARY', 'C')
env.setdefault('LC_MESSAGES', 'C')
env.setdefault('LC_PAPER', 'C')
env.setdefault('LC_NAME', 'C')
env.setdefault('LC_ADDRESS', 'C')
env.setdefault('LC_TELEPHONE', 'C')
env.setdefault('LC_MEASUREMENT', 'C')
env.setdefault('LC_IDENTIFICATION', 'C')
env.setdefault('LANGUAGE', 'C')
else:
# On Windows set the codepage to US English.
if python_shell:
cmd = 'chcp 437 > nul & ' + cmd
if clean_env:
run_env = env
else:
run_env = os.environ.copy()
run_env.update(env)
if prepend_path:
run_env['PATH'] = ':'.join((prepend_path, run_env['PATH']))
if python_shell is None:
python_shell = False
new_kwargs = {'cwd': cwd,
'shell': python_shell,
'env': run_env if six.PY3 else salt.utils.data.encode(run_env),
'stdin': six.text_type(stdin) if stdin is not None else stdin,
'stdout': stdout,
'stderr': stderr,
'with_communicate': with_communicate,
'timeout': timeout,
'bg': bg,
}
if 'stdin_raw_newlines' in kwargs:
new_kwargs['stdin_raw_newlines'] = kwargs['stdin_raw_newlines']
if umask is not None:
_umask = six.text_type(umask).lstrip('0')
if _umask == '':
msg = 'Zero umask is not allowed.'
raise CommandExecutionError(msg)
try:
_umask = int(_umask, 8)
except ValueError:
raise CommandExecutionError("Invalid umask: '{0}'".format(umask))
else:
_umask = None
if runas or group or umask:
new_kwargs['preexec_fn'] = functools.partial(
salt.utils.user.chugid_and_umask,
runas,
_umask,
group)
if not salt.utils.platform.is_windows():
# close_fds is not supported on Windows platforms if you redirect
# stdin/stdout/stderr
if new_kwargs['shell'] is True:
new_kwargs['executable'] = shell
new_kwargs['close_fds'] = True
if not os.path.isabs(cwd) or not os.path.isdir(cwd):
raise CommandExecutionError(
'Specified cwd \'{0}\' either not absolute or does not exist'
.format(cwd)
)
if python_shell is not True \
and not salt.utils.platform.is_windows() \
and not isinstance(cmd, list):
cmd = salt.utils.args.shlex_split(cmd)
if success_retcodes is None:
success_retcodes = [0]
else:
try:
success_retcodes = [int(i) for i in
salt.utils.args.split_input(
success_retcodes
)]
except ValueError:
raise SaltInvocationError(
'success_retcodes must be a list of integers'
)
if success_stdout is None:
success_stdout = []
else:
try:
success_stdout = [i for i in
salt.utils.args.split_input(
success_stdout
)]
except ValueError:
raise SaltInvocationError(
'success_stdout must be a list of integers'
)
if success_stderr is None:
success_stderr = []
else:
try:
success_stderr = [i for i in
salt.utils.args.split_input(
success_stderr
)]
except ValueError:
raise SaltInvocationError(
'success_stderr must be a list of integers'
)
if not use_vt:
# This is where the magic happens
try:
proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs)
except (OSError, IOError) as exc:
msg = (
'Unable to run command \'{0}\' with the context \'{1}\', '
'reason: {2}'.format(
cmd if output_loglevel is not None else 'REDACTED',
new_kwargs,
exc
)
)
raise CommandExecutionError(msg)
try:
proc.run()
except TimedProcTimeoutError as exc:
ret['stdout'] = six.text_type(exc)
ret['stderr'] = ''
ret['retcode'] = None
ret['pid'] = proc.process.pid
# ok return code for timeouts?
ret['retcode'] = 1
return ret
if output_loglevel != 'quiet' and output_encoding is not None:
log.debug('Decoding output from command %s using %s encoding',
cmd, output_encoding)
try:
out = salt.utils.stringutils.to_unicode(
proc.stdout,
encoding=output_encoding)
except TypeError:
# stdout is None
out = ''
except UnicodeDecodeError:
out = salt.utils.stringutils.to_unicode(
proc.stdout,
encoding=output_encoding,
errors='replace')
if output_loglevel != 'quiet':
log.error(
'Failed to decode stdout from command %s, non-decodable '
'characters have been replaced', cmd
)
try:
err = salt.utils.stringutils.to_unicode(
proc.stderr,
encoding=output_encoding)
except TypeError:
# stderr is None
err = ''
except UnicodeDecodeError:
err = salt.utils.stringutils.to_unicode(
proc.stderr,
encoding=output_encoding,
errors='replace')
if output_loglevel != 'quiet':
log.error(
'Failed to decode stderr from command %s, non-decodable '
'characters have been replaced', cmd
)
if rstrip:
if out is not None:
out = out.rstrip()
if err is not None:
err = err.rstrip()
ret['pid'] = proc.process.pid
ret['retcode'] = proc.process.returncode
if ret['retcode'] in success_retcodes:
ret['retcode'] = 0
ret['stdout'] = out
ret['stderr'] = err
if ret['stdout'] in success_stdout or ret['stderr'] in success_stderr:
ret['retcode'] = 0
else:
formatted_timeout = ''
if timeout:
formatted_timeout = ' (timeout: {0}s)'.format(timeout)
if output_loglevel is not None:
msg = 'Running {0} in VT{1}'.format(cmd, formatted_timeout)
log.debug(log_callback(msg))
stdout, stderr = '', ''
now = time.time()
if timeout:
will_timeout = now + timeout
else:
will_timeout = -1
try:
proc = salt.utils.vt.Terminal(
cmd,
shell=True,
log_stdout=True,
log_stderr=True,
cwd=cwd,
preexec_fn=new_kwargs.get('preexec_fn', None),
env=run_env,
log_stdin_level=output_loglevel,
log_stdout_level=output_loglevel,
log_stderr_level=output_loglevel,
stream_stdout=True,
stream_stderr=True
)
ret['pid'] = proc.pid
while proc.has_unread_data:
try:
try:
time.sleep(0.5)
try:
cstdout, cstderr = proc.recv()
except IOError:
cstdout, cstderr = '', ''
if cstdout:
stdout += cstdout
else:
cstdout = ''
if cstderr:
stderr += cstderr
else:
cstderr = ''
if timeout and (time.time() > will_timeout):
ret['stderr'] = (
'SALT: Timeout after {0}s\n{1}').format(
timeout, stderr)
ret['retcode'] = None
break
except KeyboardInterrupt:
ret['stderr'] = 'SALT: User break\n{0}'.format(stderr)
ret['retcode'] = 1
break
except salt.utils.vt.TerminalException as exc:
log.error('VT: %s', exc,
exc_info_on_loglevel=logging.DEBUG)
ret = {'retcode': 1, 'pid': '2'}
break
# only set stdout on success as we already mangled in other
# cases
ret['stdout'] = stdout
if not proc.isalive():
# Process terminated, i.e., not canceled by the user or by
# the timeout
ret['stderr'] = stderr
ret['retcode'] = proc.exitstatus
if ret['retcode'] in success_retcodes:
ret['retcode'] = 0
if ret['stdout'] in success_stdout or ret['stderr'] in success_stderr:
ret['retcode'] = 0
ret['pid'] = proc.pid
finally:
proc.close(terminate=True, kill=True)
try:
if ignore_retcode:
__context__['retcode'] = 0
else:
__context__['retcode'] = ret['retcode']
except NameError:
# Ignore the context error during grain generation
pass
# Log the output
if output_loglevel is not None:
if not ignore_retcode and ret['retcode'] != 0:
if output_loglevel < LOG_LEVELS['error']:
output_loglevel = LOG_LEVELS['error']
msg = (
'Command \'{0}\' failed with return code: {1}'.format(
cmd,
ret['retcode']
)
)
log.error(log_callback(msg))
if ret['stdout']:
log.log(output_loglevel, 'stdout: %s', log_callback(ret['stdout']))
if ret['stderr']:
log.log(output_loglevel, 'stderr: %s', log_callback(ret['stderr']))
if ret['retcode']:
log.log(output_loglevel, 'retcode: %s', ret['retcode'])
return ret
|
def function[_run, parameter[cmd, cwd, stdin, stdout, stderr, output_encoding, output_loglevel, log_callback, runas, group, shell, python_shell, env, clean_env, prepend_path, rstrip, template, umask, timeout, with_communicate, reset_system_locale, ignore_retcode, saltenv, pillarenv, pillar_override, use_vt, password, bg, encoded_cmd, success_retcodes, success_stdout, success_stderr]]:
constant[
Do the DRY thing and only call subprocess.Popen() once
]
if <ast.BoolOp object at 0x7da2041d9420> begin[:]
variable[pillar_override] assign[=] call[name[kwargs]][constant[pillar]]
if <ast.BoolOp object at 0x7da2041d8a60> begin[:]
call[name[log].warning, parameter[constant[Attempt to run a shell command with what may be an invalid shell! Check to ensure that the shell <%s> is valid for this user.], name[shell]]]
variable[output_loglevel] assign[=] call[name[_check_loglevel], parameter[name[output_loglevel]]]
variable[log_callback] assign[=] call[name[_check_cb], parameter[name[log_callback]]]
variable[use_sudo] assign[=] constant[False]
if <ast.BoolOp object at 0x7da2041da050> begin[:]
variable[runas] assign[=] call[name[__context__].get, parameter[constant[runas]]]
if <ast.BoolOp object at 0x7da2041d98a0> begin[:]
variable[password] assign[=] call[name[__context__].get, parameter[constant[runas_password]]]
if <ast.UnaryOp object at 0x7da2041d9840> begin[:]
variable[cwd] assign[=] call[name[os].path.expanduser, parameter[call[constant[~{0}].format, parameter[<ast.IfExp object at 0x7da2041dbdc0>]]]]
if <ast.UnaryOp object at 0x7da2041d9060> begin[:]
variable[cwd] assign[=] constant[/]
if call[name[salt].utils.platform.is_windows, parameter[]] begin[:]
variable[cwd] assign[=] call[name[os].path.abspath, parameter[name[os].sep]]
if name[bg] begin[:]
variable[ignore_retcode] assign[=] constant[True]
variable[use_vt] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da2041da890> begin[:]
if <ast.BoolOp object at 0x7da2041d9300> begin[:]
variable[msg] assign[=] call[constant[The shell {0} is not available].format, parameter[name[shell]]]
<ast.Raise object at 0x7da2041dbcd0>
if <ast.BoolOp object at 0x7da2041dae00> begin[:]
<ast.Raise object at 0x7da2041da650>
if compare[call[call[name[shell].lower, parameter[]].strip, parameter[]] equal[==] constant[powershell]] begin[:]
if call[name[isinstance], parameter[name[cmd], name[six].string_types]] begin[:]
variable[cmd] assign[=] call[name[cmd].strip, parameter[]]
variable[stack] assign[=] call[name[traceback].extract_stack, parameter[]]
if compare[call[call[name[stack]][<ast.UnaryOp object at 0x7da2041da620>]][constant[2]] equal[==] constant[script]] begin[:]
variable[cmd] assign[=] binary_operation[constant[Powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -File ] + name[cmd]]
<ast.Tuple object at 0x7da2041d9330> assign[=] call[name[_render_cmd], parameter[name[cmd], name[cwd], name[template], name[saltenv], name[pillarenv], name[pillar_override]]]
variable[ret] assign[=] dictionary[[], []]
if compare[constant[__pub_jid] in name[kwargs]] begin[:]
if <ast.UnaryOp object at 0x7da2041daa70> begin[:]
<ast.Raise object at 0x7da2041d90f0>
variable[env] assign[=] call[name[_parse_env], parameter[name[env]]]
for taget[name[bad_env_key]] in starred[<ast.GeneratorExp object at 0x7da2041daef0>] begin[:]
call[name[log].error, parameter[constant[Environment variable '%s' passed without a value. Setting value to an empty string], name[bad_env_key]]]
call[name[env]][name[bad_env_key]] assign[=] constant[]
def function[_get_stripped, parameter[cmd]]:
if call[name[isinstance], parameter[name[cmd], name[list]]] begin[:]
return[<ast.ListComp object at 0x7da20e9b04c0>]
if compare[name[output_loglevel] is_not constant[None]] begin[:]
variable[msg] assign[=] call[constant[Executing command {0}{1}{0} {2}{3}in directory '{4}'{5}].format, parameter[<ast.IfExp object at 0x7da20e9b2110>, call[name[_get_stripped], parameter[name[cmd]]], <ast.IfExp object at 0x7da20e9b3160>, <ast.IfExp object at 0x7da20e9b05e0>, name[cwd], <ast.IfExp object at 0x7da20e9b08b0>]]
call[name[log].info, parameter[call[name[log_callback], parameter[name[msg]]]]]
if <ast.BoolOp object at 0x7da20e9b3fd0> begin[:]
if <ast.UnaryOp object at 0x7da20e9b1b10> begin[:]
variable[msg] assign[=] constant[missing salt/utils/win_runas.py]
<ast.Raise object at 0x7da20e9b31c0>
if call[name[isinstance], parameter[name[cmd], tuple[[<ast.Name object at 0x7da20e9b2fe0>, <ast.Name object at 0x7da20e9b3130>]]]] begin[:]
variable[cmd] assign[=] call[constant[ ].join, parameter[name[cmd]]]
return[call[name[win_runas], parameter[name[cmd], name[runas], name[password], name[cwd]]]]
if <ast.BoolOp object at 0x7da20e9b3550> begin[:]
if call[name[isinstance], parameter[name[cmd], tuple[[<ast.Name object at 0x7da20e9b23e0>, <ast.Name object at 0x7da20e9b29e0>]]]] begin[:]
variable[cmd] assign[=] call[constant[ ].join, parameter[call[name[map], parameter[name[_cmd_quote], name[cmd]]]]]
variable[cmd] assign[=] call[constant[su -l {0} -c "cd {1}; {2}"].format, parameter[name[runas], name[cwd], name[cmd]]]
variable[runas] assign[=] constant[None]
if name[runas] begin[:]
<ast.Try object at 0x7da20e9b3e80>
if name[group] begin[:]
if call[name[salt].utils.platform.is_windows, parameter[]] begin[:]
variable[msg] assign[=] constant[group is not currently available on Windows]
<ast.Raise object at 0x7da20e9b37c0>
if <ast.UnaryOp object at 0x7da20e9b2c50> begin[:]
variable[msg] assign[=] constant[group argument requires sudo but not found]
<ast.Raise object at 0x7da20e9b1270>
<ast.Try object at 0x7da20e9b2fb0>
if <ast.BoolOp object at 0x7da20e9b0a00> begin[:]
<ast.Try object at 0x7da20e9b1e70>
if compare[name[reset_system_locale] is constant[True]] begin[:]
if <ast.UnaryOp object at 0x7da20c7c8e20> begin[:]
call[name[env].setdefault, parameter[constant[LC_CTYPE], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_NUMERIC], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_TIME], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_COLLATE], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_MONETARY], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_MESSAGES], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_PAPER], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_NAME], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_ADDRESS], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_TELEPHONE], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_MEASUREMENT], constant[C]]]
call[name[env].setdefault, parameter[constant[LC_IDENTIFICATION], constant[C]]]
call[name[env].setdefault, parameter[constant[LANGUAGE], constant[C]]]
if name[clean_env] begin[:]
variable[run_env] assign[=] name[env]
if name[prepend_path] begin[:]
call[name[run_env]][constant[PATH]] assign[=] call[constant[:].join, parameter[tuple[[<ast.Name object at 0x7da20c7c87f0>, <ast.Subscript object at 0x7da20c7c8c10>]]]]
if compare[name[python_shell] is constant[None]] begin[:]
variable[python_shell] assign[=] constant[False]
variable[new_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20c7cbfa0>, <ast.Constant object at 0x7da20c7c8790>, <ast.Constant object at 0x7da20c7cb130>, <ast.Constant object at 0x7da20c7cb400>, <ast.Constant object at 0x7da20c7c9f00>, <ast.Constant object at 0x7da20c7c8d60>, <ast.Constant object at 0x7da20c7c8a00>, <ast.Constant object at 0x7da20c7cb310>, <ast.Constant object at 0x7da20c7c9210>], [<ast.Name object at 0x7da20c7c8c70>, <ast.Name object at 0x7da20c7cb070>, <ast.IfExp object at 0x7da20c7ca6e0>, <ast.IfExp object at 0x7da20c7ca470>, <ast.Name object at 0x7da20c7cbd30>, <ast.Name object at 0x7da20c7c9d50>, <ast.Name object at 0x7da20c7c89d0>, <ast.Name object at 0x7da20c7ca2f0>, <ast.Name object at 0x7da20c7c9660>]]
if compare[constant[stdin_raw_newlines] in name[kwargs]] begin[:]
call[name[new_kwargs]][constant[stdin_raw_newlines]] assign[=] call[name[kwargs]][constant[stdin_raw_newlines]]
if compare[name[umask] is_not constant[None]] begin[:]
variable[_umask] assign[=] call[call[name[six].text_type, parameter[name[umask]]].lstrip, parameter[constant[0]]]
if compare[name[_umask] equal[==] constant[]] begin[:]
variable[msg] assign[=] constant[Zero umask is not allowed.]
<ast.Raise object at 0x7da20c7c80d0>
<ast.Try object at 0x7da20c7cbb80>
if <ast.BoolOp object at 0x7da20c7c8400> begin[:]
call[name[new_kwargs]][constant[preexec_fn]] assign[=] call[name[functools].partial, parameter[name[salt].utils.user.chugid_and_umask, name[runas], name[_umask], name[group]]]
if <ast.UnaryOp object at 0x7da20c7ca350> begin[:]
if compare[call[name[new_kwargs]][constant[shell]] is constant[True]] begin[:]
call[name[new_kwargs]][constant[executable]] assign[=] name[shell]
call[name[new_kwargs]][constant[close_fds]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c7ca500> begin[:]
<ast.Raise object at 0x7da20c7cbb50>
if <ast.BoolOp object at 0x7da20c7cae60> begin[:]
variable[cmd] assign[=] call[name[salt].utils.args.shlex_split, parameter[name[cmd]]]
if compare[name[success_retcodes] is constant[None]] begin[:]
variable[success_retcodes] assign[=] list[[<ast.Constant object at 0x7da204621ba0>]]
if compare[name[success_stdout] is constant[None]] begin[:]
variable[success_stdout] assign[=] list[[]]
if compare[name[success_stderr] is constant[None]] begin[:]
variable[success_stderr] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da204620160> begin[:]
<ast.Try object at 0x7da204622b90>
<ast.Try object at 0x7da2046223b0>
if <ast.BoolOp object at 0x7da2046239a0> begin[:]
call[name[log].debug, parameter[constant[Decoding output from command %s using %s encoding], name[cmd], name[output_encoding]]]
<ast.Try object at 0x7da204623c70>
<ast.Try object at 0x7da2046226e0>
if name[rstrip] begin[:]
if compare[name[out] is_not constant[None]] begin[:]
variable[out] assign[=] call[name[out].rstrip, parameter[]]
if compare[name[err] is_not constant[None]] begin[:]
variable[err] assign[=] call[name[err].rstrip, parameter[]]
call[name[ret]][constant[pid]] assign[=] name[proc].process.pid
call[name[ret]][constant[retcode]] assign[=] name[proc].process.returncode
if compare[call[name[ret]][constant[retcode]] in name[success_retcodes]] begin[:]
call[name[ret]][constant[retcode]] assign[=] constant[0]
call[name[ret]][constant[stdout]] assign[=] name[out]
call[name[ret]][constant[stderr]] assign[=] name[err]
if <ast.BoolOp object at 0x7da1b23479d0> begin[:]
call[name[ret]][constant[retcode]] assign[=] constant[0]
<ast.Try object at 0x7da1b2346080>
if compare[name[output_loglevel] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b2345660> begin[:]
if compare[name[output_loglevel] less[<] call[name[LOG_LEVELS]][constant[error]]] begin[:]
variable[output_loglevel] assign[=] call[name[LOG_LEVELS]][constant[error]]
variable[msg] assign[=] call[constant[Command '{0}' failed with return code: {1}].format, parameter[name[cmd], call[name[ret]][constant[retcode]]]]
call[name[log].error, parameter[call[name[log_callback], parameter[name[msg]]]]]
if call[name[ret]][constant[stdout]] begin[:]
call[name[log].log, parameter[name[output_loglevel], constant[stdout: %s], call[name[log_callback], parameter[call[name[ret]][constant[stdout]]]]]]
if call[name[ret]][constant[stderr]] begin[:]
call[name[log].log, parameter[name[output_loglevel], constant[stderr: %s], call[name[log_callback], parameter[call[name[ret]][constant[stderr]]]]]]
if call[name[ret]][constant[retcode]] begin[:]
call[name[log].log, parameter[name[output_loglevel], constant[retcode: %s], call[name[ret]][constant[retcode]]]]
return[name[ret]]
|
keyword[def] identifier[_run] ( identifier[cmd] ,
identifier[cwd] = keyword[None] ,
identifier[stdin] = keyword[None] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
identifier[output_encoding] = keyword[None] ,
identifier[output_loglevel] = literal[string] ,
identifier[log_callback] = keyword[None] ,
identifier[runas] = keyword[None] ,
identifier[group] = keyword[None] ,
identifier[shell] = identifier[DEFAULT_SHELL] ,
identifier[python_shell] = keyword[False] ,
identifier[env] = keyword[None] ,
identifier[clean_env] = keyword[False] ,
identifier[prepend_path] = keyword[None] ,
identifier[rstrip] = keyword[True] ,
identifier[template] = keyword[None] ,
identifier[umask] = keyword[None] ,
identifier[timeout] = keyword[None] ,
identifier[with_communicate] = keyword[True] ,
identifier[reset_system_locale] = keyword[True] ,
identifier[ignore_retcode] = keyword[False] ,
identifier[saltenv] = literal[string] ,
identifier[pillarenv] = keyword[None] ,
identifier[pillar_override] = keyword[None] ,
identifier[use_vt] = keyword[False] ,
identifier[password] = keyword[None] ,
identifier[bg] = keyword[False] ,
identifier[encoded_cmd] = keyword[False] ,
identifier[success_retcodes] = keyword[None] ,
identifier[success_stdout] = keyword[None] ,
identifier[success_stderr] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] keyword[not] identifier[pillar_override] :
identifier[pillar_override] = identifier[kwargs] [ literal[string] ]
keyword[if] identifier[output_loglevel] != literal[string] keyword[and] identifier[_is_valid_shell] ( identifier[shell] ) keyword[is] keyword[False] :
identifier[log] . identifier[warning] (
literal[string]
literal[string] ,
identifier[shell]
)
identifier[output_loglevel] = identifier[_check_loglevel] ( identifier[output_loglevel] )
identifier[log_callback] = identifier[_check_cb] ( identifier[log_callback] )
identifier[use_sudo] = keyword[False]
keyword[if] identifier[runas] keyword[is] keyword[None] keyword[and] literal[string] keyword[in] identifier[globals] ():
identifier[runas] = identifier[__context__] . identifier[get] ( literal[string] )
keyword[if] identifier[password] keyword[is] keyword[None] keyword[and] literal[string] keyword[in] identifier[globals] ():
identifier[password] = identifier[__context__] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[cwd] :
identifier[cwd] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] . identifier[format] ( literal[string] keyword[if] keyword[not] identifier[runas] keyword[else] identifier[runas] ))
keyword[if] keyword[not] identifier[os] . identifier[access] ( identifier[cwd] , identifier[os] . identifier[R_OK] ):
identifier[cwd] = literal[string]
keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
identifier[cwd] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[sep] )
keyword[else] :
identifier[cwd] = identifier[six] . identifier[text_type] ( identifier[cwd] )
keyword[if] identifier[bg] :
identifier[ignore_retcode] = keyword[True]
identifier[use_vt] = keyword[False]
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[shell] ) keyword[or] keyword[not] identifier[os] . identifier[access] ( identifier[shell] , identifier[os] . identifier[X_OK] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[shell] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] () keyword[and] identifier[use_vt] :
keyword[raise] identifier[CommandExecutionError] ( literal[string] )
keyword[if] identifier[shell] . identifier[lower] (). identifier[strip] ()== literal[string] :
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[six] . identifier[string_types] ):
identifier[cmd] = identifier[cmd] . identifier[strip] ()
identifier[stack] = identifier[traceback] . identifier[extract_stack] ( identifier[limit] = literal[int] )
keyword[if] identifier[stack] [- literal[int] ][ literal[int] ]== literal[string] :
identifier[cmd] = literal[string] + identifier[cmd]
keyword[elif] identifier[encoded_cmd] :
identifier[cmd] = literal[string] . identifier[format] ( identifier[cmd] )
keyword[else] :
identifier[cmd] = literal[string] . identifier[format] ( identifier[cmd] . identifier[replace] ( literal[string] , literal[string] ))
( identifier[cmd] , identifier[cwd] )= identifier[_render_cmd] ( identifier[cmd] , identifier[cwd] , identifier[template] , identifier[saltenv] , identifier[pillarenv] , identifier[pillar_override] )
identifier[ret] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[if] keyword[not] identifier[_check_avail] ( identifier[cmd] ):
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[cmd] )
)
identifier[env] = identifier[_parse_env] ( identifier[env] )
keyword[for] identifier[bad_env_key] keyword[in] ( identifier[x] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[six] . identifier[iteritems] ( identifier[env] ) keyword[if] identifier[y] keyword[is] keyword[None] ):
identifier[log] . identifier[error] ( literal[string]
literal[string] , identifier[bad_env_key] )
identifier[env] [ identifier[bad_env_key] ]= literal[string]
keyword[def] identifier[_get_stripped] ( identifier[cmd] ):
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[list] ):
keyword[return] [ identifier[x] . identifier[strip] () keyword[if] identifier[isinstance] ( identifier[x] , identifier[six] . identifier[string_types] ) keyword[else] identifier[x] keyword[for] identifier[x] keyword[in] identifier[cmd] ]
keyword[elif] identifier[isinstance] ( identifier[cmd] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[cmd] . identifier[strip] ()
keyword[else] :
keyword[return] identifier[cmd]
keyword[if] identifier[output_loglevel] keyword[is] keyword[not] keyword[None] :
identifier[msg] =(
literal[string] . identifier[format] (
literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[cmd] , identifier[list] ) keyword[else] literal[string] ,
identifier[_get_stripped] ( identifier[cmd] ),
literal[string] . identifier[format] ( identifier[runas] ) keyword[if] identifier[runas] keyword[else] literal[string] ,
literal[string] . identifier[format] ( identifier[group] ) keyword[if] identifier[group] keyword[else] literal[string] ,
identifier[cwd] ,
literal[string]
literal[string] keyword[if] identifier[bg] keyword[else] literal[string]
)
)
identifier[log] . identifier[info] ( identifier[log_callback] ( identifier[msg] ))
keyword[if] identifier[runas] keyword[and] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
keyword[if] keyword[not] identifier[HAS_WIN_RUNAS] :
identifier[msg] = literal[string]
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[if] identifier[isinstance] ( identifier[cmd] ,( identifier[list] , identifier[tuple] )):
identifier[cmd] = literal[string] . identifier[join] ( identifier[cmd] )
keyword[return] identifier[win_runas] ( identifier[cmd] , identifier[runas] , identifier[password] , identifier[cwd] )
keyword[if] identifier[runas] keyword[and] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_darwin] ():
keyword[if] identifier[isinstance] ( identifier[cmd] ,( identifier[list] , identifier[tuple] )):
identifier[cmd] = literal[string] . identifier[join] ( identifier[map] ( identifier[_cmd_quote] , identifier[cmd] ))
identifier[cmd] = literal[string] . identifier[format] ( identifier[runas] , identifier[cwd] , identifier[cmd] )
identifier[runas] = keyword[None]
keyword[if] identifier[runas] :
keyword[try] :
identifier[pwd] . identifier[getpwnam] ( identifier[runas] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[runas] )
)
keyword[if] identifier[group] :
keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
identifier[msg] = literal[string]
keyword[raise] identifier[SaltInvocationError] ( identifier[msg] )
keyword[if] keyword[not] identifier[which_bin] ([ literal[string] ]):
identifier[msg] = literal[string]
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[try] :
identifier[grp] . identifier[getgrnam] ( identifier[group] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[runas] )
)
keyword[else] :
identifier[use_sudo] = keyword[True]
keyword[if] identifier[runas] keyword[or] identifier[group] :
keyword[try] :
keyword[import] identifier[uuid]
identifier[marker] = literal[string] + identifier[str] ( identifier[uuid] . identifier[uuid4] ())+ literal[string]
identifier[marker_b] = identifier[marker] . identifier[encode] ( identifier[__salt_system_encoding__] )
identifier[py_code] =(
literal[string]
literal[string] + identifier[marker] + literal[string]
literal[string]
literal[string] + identifier[marker] + literal[string]
)
keyword[if] identifier[use_sudo] keyword[or] identifier[__grains__] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
identifier[env_cmd] =[ literal[string] ]
keyword[if] identifier[runas] :
identifier[env_cmd] . identifier[extend] ([ literal[string] , identifier[runas] ])
keyword[if] identifier[group] :
identifier[env_cmd] . identifier[extend] ([ literal[string] , identifier[group] ])
keyword[if] identifier[shell] != identifier[DEFAULT_SHELL] :
identifier[env_cmd] . identifier[extend] ([ literal[string] , literal[string] , identifier[shell] , literal[string] ])
keyword[else] :
identifier[env_cmd] . identifier[extend] ([ literal[string] , literal[string] ])
identifier[env_cmd] . identifier[extend] ([ identifier[sys] . identifier[executable] ])
keyword[elif] identifier[__grains__] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[env_cmd] =( literal[string] , literal[string] , identifier[runas] , literal[string] ,
literal[string] . identifier[format] ( identifier[shell] , identifier[sys] . identifier[executable] ))
keyword[elif] identifier[__grains__] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[env_cmd] =( literal[string] , literal[string] , identifier[runas] , literal[string] , identifier[sys] . identifier[executable] )
keyword[elif] identifier[__grains__] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[env_cmd] =( literal[string] , literal[string] , identifier[runas] , literal[string] , identifier[sys] . identifier[executable] )
keyword[else] :
identifier[env_cmd] =( literal[string] , literal[string] , identifier[shell] , literal[string] , identifier[runas] , literal[string] , identifier[sys] . identifier[executable] )
identifier[msg] = literal[string] . identifier[format] ( identifier[env_cmd] )
identifier[log] . identifier[debug] ( identifier[log_callback] ( identifier[msg] ))
identifier[env_bytes] , identifier[env_encoded_err] = identifier[subprocess] . identifier[Popen] (
identifier[env_cmd] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stdin] = identifier[subprocess] . identifier[PIPE]
). identifier[communicate] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_bytes] ( identifier[py_code] ))
identifier[marker_count] = identifier[env_bytes] . identifier[count] ( identifier[marker_b] )
keyword[if] identifier[marker_count] == literal[int] :
identifier[log] . identifier[error] (
literal[string]
literal[string] ,
identifier[runas] , identifier[env_encoded_err] , identifier[env_bytes]
)
identifier[env_bytes] = literal[string]
keyword[elif] identifier[marker_count] != literal[int] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] ,
identifier[info] ={ literal[string] : identifier[repr] ( identifier[env_encoded_err] ),
literal[string] : identifier[repr] ( identifier[env_bytes] )}
)
keyword[else] :
identifier[env_bytes] = identifier[env_bytes] . identifier[split] ( identifier[marker_b] )[ literal[int] ]
keyword[if] identifier[six] . identifier[PY2] :
keyword[import] identifier[itertools]
identifier[env_runas] = identifier[dict] ( identifier[itertools] . identifier[izip] (*[ identifier[iter] ( identifier[env_bytes] . identifier[split] ( literal[string] ))]* literal[int] ))
keyword[elif] identifier[six] . identifier[PY3] :
identifier[env_runas] = identifier[dict] ( identifier[list] ( identifier[zip] (*[ identifier[iter] ( identifier[env_bytes] . identifier[split] ( literal[string] ))]* literal[int] )))
identifier[env_runas] = identifier[dict] (
( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[k] ),
identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[v] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[env_runas] )
)
identifier[env_runas] . identifier[update] ( identifier[env] )
keyword[if] identifier[env_runas] . identifier[get] ( literal[string] )!= identifier[runas] :
identifier[env_runas] [ literal[string] ]= identifier[runas]
identifier[runas_home] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] . identifier[format] ( identifier[runas] ))
keyword[if] identifier[env_runas] . identifier[get] ( literal[string] )!= identifier[runas_home] :
identifier[env_runas] [ literal[string] ]= identifier[runas_home]
identifier[env] = identifier[env_runas]
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
identifier[log] . identifier[exception] ( literal[string] , identifier[runas] )
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] (
identifier[runas] , identifier[exc]
)
)
keyword[if] identifier[reset_system_locale] keyword[is] keyword[True] :
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[env] . identifier[setdefault] ( literal[string] , literal[string] )
keyword[else] :
keyword[if] identifier[python_shell] :
identifier[cmd] = literal[string] + identifier[cmd]
keyword[if] identifier[clean_env] :
identifier[run_env] = identifier[env]
keyword[else] :
identifier[run_env] = identifier[os] . identifier[environ] . identifier[copy] ()
identifier[run_env] . identifier[update] ( identifier[env] )
keyword[if] identifier[prepend_path] :
identifier[run_env] [ literal[string] ]= literal[string] . identifier[join] (( identifier[prepend_path] , identifier[run_env] [ literal[string] ]))
keyword[if] identifier[python_shell] keyword[is] keyword[None] :
identifier[python_shell] = keyword[False]
identifier[new_kwargs] ={ literal[string] : identifier[cwd] ,
literal[string] : identifier[python_shell] ,
literal[string] : identifier[run_env] keyword[if] identifier[six] . identifier[PY3] keyword[else] identifier[salt] . identifier[utils] . identifier[data] . identifier[encode] ( identifier[run_env] ),
literal[string] : identifier[six] . identifier[text_type] ( identifier[stdin] ) keyword[if] identifier[stdin] keyword[is] keyword[not] keyword[None] keyword[else] identifier[stdin] ,
literal[string] : identifier[stdout] ,
literal[string] : identifier[stderr] ,
literal[string] : identifier[with_communicate] ,
literal[string] : identifier[timeout] ,
literal[string] : identifier[bg] ,
}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[new_kwargs] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] identifier[umask] keyword[is] keyword[not] keyword[None] :
identifier[_umask] = identifier[six] . identifier[text_type] ( identifier[umask] ). identifier[lstrip] ( literal[string] )
keyword[if] identifier[_umask] == literal[string] :
identifier[msg] = literal[string]
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[try] :
identifier[_umask] = identifier[int] ( identifier[_umask] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[umask] ))
keyword[else] :
identifier[_umask] = keyword[None]
keyword[if] identifier[runas] keyword[or] identifier[group] keyword[or] identifier[umask] :
identifier[new_kwargs] [ literal[string] ]= identifier[functools] . identifier[partial] (
identifier[salt] . identifier[utils] . identifier[user] . identifier[chugid_and_umask] ,
identifier[runas] ,
identifier[_umask] ,
identifier[group] )
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
keyword[if] identifier[new_kwargs] [ literal[string] ] keyword[is] keyword[True] :
identifier[new_kwargs] [ literal[string] ]= identifier[shell]
identifier[new_kwargs] [ literal[string] ]= keyword[True]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[cwd] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[cwd] ):
keyword[raise] identifier[CommandExecutionError] (
literal[string]
. identifier[format] ( identifier[cwd] )
)
keyword[if] identifier[python_shell] keyword[is] keyword[not] keyword[True] keyword[and] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] () keyword[and] keyword[not] identifier[isinstance] ( identifier[cmd] , identifier[list] ):
identifier[cmd] = identifier[salt] . identifier[utils] . identifier[args] . identifier[shlex_split] ( identifier[cmd] )
keyword[if] identifier[success_retcodes] keyword[is] keyword[None] :
identifier[success_retcodes] =[ literal[int] ]
keyword[else] :
keyword[try] :
identifier[success_retcodes] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in]
identifier[salt] . identifier[utils] . identifier[args] . identifier[split_input] (
identifier[success_retcodes]
)]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
)
keyword[if] identifier[success_stdout] keyword[is] keyword[None] :
identifier[success_stdout] =[]
keyword[else] :
keyword[try] :
identifier[success_stdout] =[ identifier[i] keyword[for] identifier[i] keyword[in]
identifier[salt] . identifier[utils] . identifier[args] . identifier[split_input] (
identifier[success_stdout]
)]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
)
keyword[if] identifier[success_stderr] keyword[is] keyword[None] :
identifier[success_stderr] =[]
keyword[else] :
keyword[try] :
identifier[success_stderr] =[ identifier[i] keyword[for] identifier[i] keyword[in]
identifier[salt] . identifier[utils] . identifier[args] . identifier[split_input] (
identifier[success_stderr]
)]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
)
keyword[if] keyword[not] identifier[use_vt] :
keyword[try] :
identifier[proc] = identifier[salt] . identifier[utils] . identifier[timed_subprocess] . identifier[TimedProc] ( identifier[cmd] ,** identifier[new_kwargs] )
keyword[except] ( identifier[OSError] , identifier[IOError] ) keyword[as] identifier[exc] :
identifier[msg] =(
literal[string]
literal[string] . identifier[format] (
identifier[cmd] keyword[if] identifier[output_loglevel] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ,
identifier[new_kwargs] ,
identifier[exc]
)
)
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[try] :
identifier[proc] . identifier[run] ()
keyword[except] identifier[TimedProcTimeoutError] keyword[as] identifier[exc] :
identifier[ret] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[exc] )
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[process] . identifier[pid]
identifier[ret] [ literal[string] ]= literal[int]
keyword[return] identifier[ret]
keyword[if] identifier[output_loglevel] != literal[string] keyword[and] identifier[output_encoding] keyword[is] keyword[not] keyword[None] :
identifier[log] . identifier[debug] ( literal[string] ,
identifier[cmd] , identifier[output_encoding] )
keyword[try] :
identifier[out] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] (
identifier[proc] . identifier[stdout] ,
identifier[encoding] = identifier[output_encoding] )
keyword[except] identifier[TypeError] :
identifier[out] = literal[string]
keyword[except] identifier[UnicodeDecodeError] :
identifier[out] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] (
identifier[proc] . identifier[stdout] ,
identifier[encoding] = identifier[output_encoding] ,
identifier[errors] = literal[string] )
keyword[if] identifier[output_loglevel] != literal[string] :
identifier[log] . identifier[error] (
literal[string]
literal[string] , identifier[cmd]
)
keyword[try] :
identifier[err] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] (
identifier[proc] . identifier[stderr] ,
identifier[encoding] = identifier[output_encoding] )
keyword[except] identifier[TypeError] :
identifier[err] = literal[string]
keyword[except] identifier[UnicodeDecodeError] :
identifier[err] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] (
identifier[proc] . identifier[stderr] ,
identifier[encoding] = identifier[output_encoding] ,
identifier[errors] = literal[string] )
keyword[if] identifier[output_loglevel] != literal[string] :
identifier[log] . identifier[error] (
literal[string]
literal[string] , identifier[cmd]
)
keyword[if] identifier[rstrip] :
keyword[if] identifier[out] keyword[is] keyword[not] keyword[None] :
identifier[out] = identifier[out] . identifier[rstrip] ()
keyword[if] identifier[err] keyword[is] keyword[not] keyword[None] :
identifier[err] = identifier[err] . identifier[rstrip] ()
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[process] . identifier[pid]
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[process] . identifier[returncode]
keyword[if] identifier[ret] [ literal[string] ] keyword[in] identifier[success_retcodes] :
identifier[ret] [ literal[string] ]= literal[int]
identifier[ret] [ literal[string] ]= identifier[out]
identifier[ret] [ literal[string] ]= identifier[err]
keyword[if] identifier[ret] [ literal[string] ] keyword[in] identifier[success_stdout] keyword[or] identifier[ret] [ literal[string] ] keyword[in] identifier[success_stderr] :
identifier[ret] [ literal[string] ]= literal[int]
keyword[else] :
identifier[formatted_timeout] = literal[string]
keyword[if] identifier[timeout] :
identifier[formatted_timeout] = literal[string] . identifier[format] ( identifier[timeout] )
keyword[if] identifier[output_loglevel] keyword[is] keyword[not] keyword[None] :
identifier[msg] = literal[string] . identifier[format] ( identifier[cmd] , identifier[formatted_timeout] )
identifier[log] . identifier[debug] ( identifier[log_callback] ( identifier[msg] ))
identifier[stdout] , identifier[stderr] = literal[string] , literal[string]
identifier[now] = identifier[time] . identifier[time] ()
keyword[if] identifier[timeout] :
identifier[will_timeout] = identifier[now] + identifier[timeout]
keyword[else] :
identifier[will_timeout] =- literal[int]
keyword[try] :
identifier[proc] = identifier[salt] . identifier[utils] . identifier[vt] . identifier[Terminal] (
identifier[cmd] ,
identifier[shell] = keyword[True] ,
identifier[log_stdout] = keyword[True] ,
identifier[log_stderr] = keyword[True] ,
identifier[cwd] = identifier[cwd] ,
identifier[preexec_fn] = identifier[new_kwargs] . identifier[get] ( literal[string] , keyword[None] ),
identifier[env] = identifier[run_env] ,
identifier[log_stdin_level] = identifier[output_loglevel] ,
identifier[log_stdout_level] = identifier[output_loglevel] ,
identifier[log_stderr_level] = identifier[output_loglevel] ,
identifier[stream_stdout] = keyword[True] ,
identifier[stream_stderr] = keyword[True]
)
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[pid]
keyword[while] identifier[proc] . identifier[has_unread_data] :
keyword[try] :
keyword[try] :
identifier[time] . identifier[sleep] ( literal[int] )
keyword[try] :
identifier[cstdout] , identifier[cstderr] = identifier[proc] . identifier[recv] ()
keyword[except] identifier[IOError] :
identifier[cstdout] , identifier[cstderr] = literal[string] , literal[string]
keyword[if] identifier[cstdout] :
identifier[stdout] += identifier[cstdout]
keyword[else] :
identifier[cstdout] = literal[string]
keyword[if] identifier[cstderr] :
identifier[stderr] += identifier[cstderr]
keyword[else] :
identifier[cstderr] = literal[string]
keyword[if] identifier[timeout] keyword[and] ( identifier[time] . identifier[time] ()> identifier[will_timeout] ):
identifier[ret] [ literal[string] ]=(
literal[string] ). identifier[format] (
identifier[timeout] , identifier[stderr] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[break]
keyword[except] identifier[KeyboardInterrupt] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[stderr] )
identifier[ret] [ literal[string] ]= literal[int]
keyword[break]
keyword[except] identifier[salt] . identifier[utils] . identifier[vt] . identifier[TerminalException] keyword[as] identifier[exc] :
identifier[log] . identifier[error] ( literal[string] , identifier[exc] ,
identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG] )
identifier[ret] ={ literal[string] : literal[int] , literal[string] : literal[string] }
keyword[break]
identifier[ret] [ literal[string] ]= identifier[stdout]
keyword[if] keyword[not] identifier[proc] . identifier[isalive] ():
identifier[ret] [ literal[string] ]= identifier[stderr]
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[exitstatus]
keyword[if] identifier[ret] [ literal[string] ] keyword[in] identifier[success_retcodes] :
identifier[ret] [ literal[string] ]= literal[int]
keyword[if] identifier[ret] [ literal[string] ] keyword[in] identifier[success_stdout] keyword[or] identifier[ret] [ literal[string] ] keyword[in] identifier[success_stderr] :
identifier[ret] [ literal[string] ]= literal[int]
identifier[ret] [ literal[string] ]= identifier[proc] . identifier[pid]
keyword[finally] :
identifier[proc] . identifier[close] ( identifier[terminate] = keyword[True] , identifier[kill] = keyword[True] )
keyword[try] :
keyword[if] identifier[ignore_retcode] :
identifier[__context__] [ literal[string] ]= literal[int]
keyword[else] :
identifier[__context__] [ literal[string] ]= identifier[ret] [ literal[string] ]
keyword[except] identifier[NameError] :
keyword[pass]
keyword[if] identifier[output_loglevel] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[ignore_retcode] keyword[and] identifier[ret] [ literal[string] ]!= literal[int] :
keyword[if] identifier[output_loglevel] < identifier[LOG_LEVELS] [ literal[string] ]:
identifier[output_loglevel] = identifier[LOG_LEVELS] [ literal[string] ]
identifier[msg] =(
literal[string] . identifier[format] (
identifier[cmd] ,
identifier[ret] [ literal[string] ]
)
)
identifier[log] . identifier[error] ( identifier[log_callback] ( identifier[msg] ))
keyword[if] identifier[ret] [ literal[string] ]:
identifier[log] . identifier[log] ( identifier[output_loglevel] , literal[string] , identifier[log_callback] ( identifier[ret] [ literal[string] ]))
keyword[if] identifier[ret] [ literal[string] ]:
identifier[log] . identifier[log] ( identifier[output_loglevel] , literal[string] , identifier[log_callback] ( identifier[ret] [ literal[string] ]))
keyword[if] identifier[ret] [ literal[string] ]:
identifier[log] . identifier[log] ( identifier[output_loglevel] , literal[string] , identifier[ret] [ literal[string] ])
keyword[return] identifier[ret]
|
def _run(cmd, cwd=None, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, output_encoding=None, output_loglevel='debug', log_callback=None, runas=None, group=None, shell=DEFAULT_SHELL, python_shell=False, env=None, clean_env=False, prepend_path=None, rstrip=True, template=None, umask=None, timeout=None, with_communicate=True, reset_system_locale=True, ignore_retcode=False, saltenv='base', pillarenv=None, pillar_override=None, use_vt=False, password=None, bg=False, encoded_cmd=False, success_retcodes=None, success_stdout=None, success_stderr=None, **kwargs):
"""
Do the DRY thing and only call subprocess.Popen() once
"""
if 'pillar' in kwargs and (not pillar_override):
pillar_override = kwargs['pillar'] # depends on [control=['if'], data=[]]
if output_loglevel != 'quiet' and _is_valid_shell(shell) is False:
log.warning('Attempt to run a shell command with what may be an invalid shell! Check to ensure that the shell <%s> is valid for this user.', shell) # depends on [control=['if'], data=[]]
output_loglevel = _check_loglevel(output_loglevel)
log_callback = _check_cb(log_callback)
use_sudo = False
if runas is None and '__context__' in globals():
runas = __context__.get('runas') # depends on [control=['if'], data=[]]
if password is None and '__context__' in globals():
password = __context__.get('runas_password') # depends on [control=['if'], data=[]]
# Set the default working directory to the home directory of the user
# salt-minion is running as. Defaults to home directory of user under which
# the minion is running.
if not cwd:
cwd = os.path.expanduser('~{0}'.format('' if not runas else runas))
# make sure we can access the cwd
# when run from sudo or another environment where the euid is
# changed ~ will expand to the home of the original uid and
# the euid might not have access to it. See issue #1844
if not os.access(cwd, os.R_OK):
cwd = '/'
if salt.utils.platform.is_windows():
cwd = os.path.abspath(os.sep) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Handle edge cases where numeric/other input is entered, and would be
# yaml-ified into non-string types
cwd = six.text_type(cwd)
if bg:
ignore_retcode = True
use_vt = False # depends on [control=['if'], data=[]]
if not salt.utils.platform.is_windows():
if not os.path.isfile(shell) or not os.access(shell, os.X_OK):
msg = 'The shell {0} is not available'.format(shell)
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if salt.utils.platform.is_windows() and use_vt: # Memozation so not much overhead
raise CommandExecutionError('VT not available on windows') # depends on [control=['if'], data=[]]
if shell.lower().strip() == 'powershell':
# Strip whitespace
if isinstance(cmd, six.string_types):
cmd = cmd.strip() # depends on [control=['if'], data=[]]
# If we were called by script(), then fakeout the Windows
# shell to run a Powershell script.
# Else just run a Powershell command.
stack = traceback.extract_stack(limit=2)
# extract_stack() returns a list of tuples.
# The last item in the list [-1] is the current method.
# The third item[2] in each tuple is the name of that method.
if stack[-2][2] == 'script':
cmd = 'Powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -File ' + cmd # depends on [control=['if'], data=[]]
elif encoded_cmd:
cmd = 'Powershell -NonInteractive -EncodedCommand {0}'.format(cmd) # depends on [control=['if'], data=[]]
else:
cmd = 'Powershell -NonInteractive -NoProfile "{0}"'.format(cmd.replace('"', '\\"')) # depends on [control=['if'], data=[]]
# munge the cmd and cwd through the template
(cmd, cwd) = _render_cmd(cmd, cwd, template, saltenv, pillarenv, pillar_override)
ret = {}
# If the pub jid is here then this is a remote ex or salt call command and needs to be
# checked if blacklisted
if '__pub_jid' in kwargs:
if not _check_avail(cmd):
raise CommandExecutionError('The shell command "{0}" is not permitted'.format(cmd)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
env = _parse_env(env)
for bad_env_key in (x for (x, y) in six.iteritems(env) if y is None):
log.error("Environment variable '%s' passed without a value. Setting value to an empty string", bad_env_key)
env[bad_env_key] = '' # depends on [control=['for'], data=['bad_env_key']]
def _get_stripped(cmd):
# Return stripped command string copies to improve logging.
if isinstance(cmd, list):
return [x.strip() if isinstance(x, six.string_types) else x for x in cmd] # depends on [control=['if'], data=[]]
elif isinstance(cmd, six.string_types):
return cmd.strip() # depends on [control=['if'], data=[]]
else:
return cmd
if output_loglevel is not None:
# Always log the shell commands at INFO unless quiet logging is
# requested. The command output is what will be controlled by the
# 'loglevel' parameter.
msg = "Executing command {0}{1}{0} {2}{3}in directory '{4}'{5}".format("'" if not isinstance(cmd, list) else '', _get_stripped(cmd), "as user '{0}' ".format(runas) if runas else '', "in group '{0}' ".format(group) if group else '', cwd, '. Executing command in the background, no output will be logged.' if bg else '')
log.info(log_callback(msg)) # depends on [control=['if'], data=[]]
if runas and salt.utils.platform.is_windows():
if not HAS_WIN_RUNAS:
msg = 'missing salt/utils/win_runas.py'
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
if isinstance(cmd, (list, tuple)):
cmd = ' '.join(cmd) # depends on [control=['if'], data=[]]
return win_runas(cmd, runas, password, cwd) # depends on [control=['if'], data=[]]
if runas and salt.utils.platform.is_darwin():
# we need to insert the user simulation into the command itself and not
# just run it from the environment on macOS as that
# method doesn't work properly when run as root for certain commands.
if isinstance(cmd, (list, tuple)):
cmd = ' '.join(map(_cmd_quote, cmd)) # depends on [control=['if'], data=[]]
cmd = 'su -l {0} -c "cd {1}; {2}"'.format(runas, cwd, cmd)
# set runas to None, because if you try to run `su -l` as well as
# simulate the environment macOS will prompt for the password of the
# user and will cause salt to hang.
runas = None # depends on [control=['if'], data=[]]
if runas:
# Save the original command before munging it
try:
pwd.getpwnam(runas) # depends on [control=['try'], data=[]]
except KeyError:
raise CommandExecutionError("User '{0}' is not available".format(runas)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if group:
if salt.utils.platform.is_windows():
msg = 'group is not currently available on Windows'
raise SaltInvocationError(msg) # depends on [control=['if'], data=[]]
if not which_bin(['sudo']):
msg = 'group argument requires sudo but not found'
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
try:
grp.getgrnam(group) # depends on [control=['try'], data=[]]
except KeyError:
raise CommandExecutionError("Group '{0}' is not available".format(runas)) # depends on [control=['except'], data=[]]
else:
use_sudo = True # depends on [control=['if'], data=[]]
if runas or group:
try:
# Getting the environment for the runas user
# Use markers to thwart any stdout noise
# There must be a better way to do this.
import uuid
marker = '<<<' + str(uuid.uuid4()) + '>>>'
marker_b = marker.encode(__salt_system_encoding__)
py_code = 'import sys, os, itertools; sys.stdout.write("' + marker + '"); sys.stdout.write("\\0".join(itertools.chain(*os.environ.items()))); sys.stdout.write("' + marker + '");'
if use_sudo or __grains__['os'] in ['MacOS', 'Darwin']:
env_cmd = ['sudo']
# runas is optional if use_sudo is set.
if runas:
env_cmd.extend(['-u', runas]) # depends on [control=['if'], data=[]]
if group:
env_cmd.extend(['-g', group]) # depends on [control=['if'], data=[]]
if shell != DEFAULT_SHELL:
env_cmd.extend(['-s', '--', shell, '-c']) # depends on [control=['if'], data=['shell']]
else:
env_cmd.extend(['-i', '--'])
env_cmd.extend([sys.executable]) # depends on [control=['if'], data=[]]
elif __grains__['os'] in ['FreeBSD']:
env_cmd = ('su', '-', runas, '-c', '{0} -c {1}'.format(shell, sys.executable)) # depends on [control=['if'], data=[]]
elif __grains__['os_family'] in ['Solaris']:
env_cmd = ('su', '-', runas, '-c', sys.executable) # depends on [control=['if'], data=[]]
elif __grains__['os_family'] in ['AIX']:
env_cmd = ('su', '-', runas, '-c', sys.executable) # depends on [control=['if'], data=[]]
else:
env_cmd = ('su', '-s', shell, '-', runas, '-c', sys.executable)
msg = 'env command: {0}'.format(env_cmd)
log.debug(log_callback(msg))
(env_bytes, env_encoded_err) = subprocess.Popen(env_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, stdin=subprocess.PIPE).communicate(salt.utils.stringutils.to_bytes(py_code))
marker_count = env_bytes.count(marker_b)
if marker_count == 0:
# Possibly PAM prevented the login
log.error("Environment could not be retrieved for user '%s': stderr=%r stdout=%r", runas, env_encoded_err, env_bytes)
# Ensure that we get an empty env_runas dict below since we
# were not able to get the environment.
env_bytes = b'' # depends on [control=['if'], data=[]]
elif marker_count != 2:
raise CommandExecutionError("Environment could not be retrieved for user '{0}'", info={'stderr': repr(env_encoded_err), 'stdout': repr(env_bytes)}) # depends on [control=['if'], data=[]]
else:
# Strip the marker
env_bytes = env_bytes.split(marker_b)[1]
if six.PY2:
import itertools
env_runas = dict(itertools.izip(*[iter(env_bytes.split(b'\x00'))] * 2)) # depends on [control=['if'], data=[]]
elif six.PY3:
env_runas = dict(list(zip(*[iter(env_bytes.split(b'\x00'))] * 2))) # depends on [control=['if'], data=[]]
env_runas = dict(((salt.utils.stringutils.to_str(k), salt.utils.stringutils.to_str(v)) for (k, v) in six.iteritems(env_runas)))
env_runas.update(env)
# Fix platforms like Solaris that don't set a USER env var in the
# user's default environment as obtained above.
if env_runas.get('USER') != runas:
env_runas['USER'] = runas # depends on [control=['if'], data=['runas']]
# Fix some corner cases where shelling out to get the user's
# environment returns the wrong home directory.
runas_home = os.path.expanduser('~{0}'.format(runas))
if env_runas.get('HOME') != runas_home:
env_runas['HOME'] = runas_home # depends on [control=['if'], data=['runas_home']]
env = env_runas # depends on [control=['try'], data=[]]
except ValueError as exc:
log.exception('Error raised retrieving environment for user %s', runas)
raise CommandExecutionError("Environment could not be retrieved for user '{0}': {1}".format(runas, exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]]
if reset_system_locale is True:
if not salt.utils.platform.is_windows():
# Default to C!
# Salt only knows how to parse English words
# Don't override if the user has passed LC_ALL
env.setdefault('LC_CTYPE', 'C')
env.setdefault('LC_NUMERIC', 'C')
env.setdefault('LC_TIME', 'C')
env.setdefault('LC_COLLATE', 'C')
env.setdefault('LC_MONETARY', 'C')
env.setdefault('LC_MESSAGES', 'C')
env.setdefault('LC_PAPER', 'C')
env.setdefault('LC_NAME', 'C')
env.setdefault('LC_ADDRESS', 'C')
env.setdefault('LC_TELEPHONE', 'C')
env.setdefault('LC_MEASUREMENT', 'C')
env.setdefault('LC_IDENTIFICATION', 'C')
env.setdefault('LANGUAGE', 'C') # depends on [control=['if'], data=[]]
# On Windows set the codepage to US English.
elif python_shell:
cmd = 'chcp 437 > nul & ' + cmd # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if clean_env:
run_env = env # depends on [control=['if'], data=[]]
else:
run_env = os.environ.copy()
run_env.update(env)
if prepend_path:
run_env['PATH'] = ':'.join((prepend_path, run_env['PATH'])) # depends on [control=['if'], data=[]]
if python_shell is None:
python_shell = False # depends on [control=['if'], data=['python_shell']]
new_kwargs = {'cwd': cwd, 'shell': python_shell, 'env': run_env if six.PY3 else salt.utils.data.encode(run_env), 'stdin': six.text_type(stdin) if stdin is not None else stdin, 'stdout': stdout, 'stderr': stderr, 'with_communicate': with_communicate, 'timeout': timeout, 'bg': bg}
if 'stdin_raw_newlines' in kwargs:
new_kwargs['stdin_raw_newlines'] = kwargs['stdin_raw_newlines'] # depends on [control=['if'], data=['kwargs']]
if umask is not None:
_umask = six.text_type(umask).lstrip('0')
if _umask == '':
msg = 'Zero umask is not allowed.'
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
try:
_umask = int(_umask, 8) # depends on [control=['try'], data=[]]
except ValueError:
raise CommandExecutionError("Invalid umask: '{0}'".format(umask)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['umask']]
else:
_umask = None
if runas or group or umask:
new_kwargs['preexec_fn'] = functools.partial(salt.utils.user.chugid_and_umask, runas, _umask, group) # depends on [control=['if'], data=[]]
if not salt.utils.platform.is_windows():
# close_fds is not supported on Windows platforms if you redirect
# stdin/stdout/stderr
if new_kwargs['shell'] is True:
new_kwargs['executable'] = shell # depends on [control=['if'], data=[]]
new_kwargs['close_fds'] = True # depends on [control=['if'], data=[]]
if not os.path.isabs(cwd) or not os.path.isdir(cwd):
raise CommandExecutionError("Specified cwd '{0}' either not absolute or does not exist".format(cwd)) # depends on [control=['if'], data=[]]
if python_shell is not True and (not salt.utils.platform.is_windows()) and (not isinstance(cmd, list)):
cmd = salt.utils.args.shlex_split(cmd) # depends on [control=['if'], data=[]]
if success_retcodes is None:
success_retcodes = [0] # depends on [control=['if'], data=['success_retcodes']]
else:
try:
success_retcodes = [int(i) for i in salt.utils.args.split_input(success_retcodes)] # depends on [control=['try'], data=[]]
except ValueError:
raise SaltInvocationError('success_retcodes must be a list of integers') # depends on [control=['except'], data=[]]
if success_stdout is None:
success_stdout = [] # depends on [control=['if'], data=['success_stdout']]
else:
try:
success_stdout = [i for i in salt.utils.args.split_input(success_stdout)] # depends on [control=['try'], data=[]]
except ValueError:
raise SaltInvocationError('success_stdout must be a list of integers') # depends on [control=['except'], data=[]]
if success_stderr is None:
success_stderr = [] # depends on [control=['if'], data=['success_stderr']]
else:
try:
success_stderr = [i for i in salt.utils.args.split_input(success_stderr)] # depends on [control=['try'], data=[]]
except ValueError:
raise SaltInvocationError('success_stderr must be a list of integers') # depends on [control=['except'], data=[]]
if not use_vt:
# This is where the magic happens
try:
proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs) # depends on [control=['try'], data=[]]
except (OSError, IOError) as exc:
msg = "Unable to run command '{0}' with the context '{1}', reason: {2}".format(cmd if output_loglevel is not None else 'REDACTED', new_kwargs, exc)
raise CommandExecutionError(msg) # depends on [control=['except'], data=['exc']]
try:
proc.run() # depends on [control=['try'], data=[]]
except TimedProcTimeoutError as exc:
ret['stdout'] = six.text_type(exc)
ret['stderr'] = ''
ret['retcode'] = None
ret['pid'] = proc.process.pid
# ok return code for timeouts?
ret['retcode'] = 1
return ret # depends on [control=['except'], data=['exc']]
if output_loglevel != 'quiet' and output_encoding is not None:
log.debug('Decoding output from command %s using %s encoding', cmd, output_encoding) # depends on [control=['if'], data=[]]
try:
out = salt.utils.stringutils.to_unicode(proc.stdout, encoding=output_encoding) # depends on [control=['try'], data=[]]
except TypeError:
# stdout is None
out = '' # depends on [control=['except'], data=[]]
except UnicodeDecodeError:
out = salt.utils.stringutils.to_unicode(proc.stdout, encoding=output_encoding, errors='replace')
if output_loglevel != 'quiet':
log.error('Failed to decode stdout from command %s, non-decodable characters have been replaced', cmd) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
try:
err = salt.utils.stringutils.to_unicode(proc.stderr, encoding=output_encoding) # depends on [control=['try'], data=[]]
except TypeError:
# stderr is None
err = '' # depends on [control=['except'], data=[]]
except UnicodeDecodeError:
err = salt.utils.stringutils.to_unicode(proc.stderr, encoding=output_encoding, errors='replace')
if output_loglevel != 'quiet':
log.error('Failed to decode stderr from command %s, non-decodable characters have been replaced', cmd) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
if rstrip:
if out is not None:
out = out.rstrip() # depends on [control=['if'], data=['out']]
if err is not None:
err = err.rstrip() # depends on [control=['if'], data=['err']] # depends on [control=['if'], data=[]]
ret['pid'] = proc.process.pid
ret['retcode'] = proc.process.returncode
if ret['retcode'] in success_retcodes:
ret['retcode'] = 0 # depends on [control=['if'], data=[]]
ret['stdout'] = out
ret['stderr'] = err
if ret['stdout'] in success_stdout or ret['stderr'] in success_stderr:
ret['retcode'] = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
formatted_timeout = ''
if timeout:
formatted_timeout = ' (timeout: {0}s)'.format(timeout) # depends on [control=['if'], data=[]]
if output_loglevel is not None:
msg = 'Running {0} in VT{1}'.format(cmd, formatted_timeout)
log.debug(log_callback(msg)) # depends on [control=['if'], data=[]]
(stdout, stderr) = ('', '')
now = time.time()
if timeout:
will_timeout = now + timeout # depends on [control=['if'], data=[]]
else:
will_timeout = -1
try:
proc = salt.utils.vt.Terminal(cmd, shell=True, log_stdout=True, log_stderr=True, cwd=cwd, preexec_fn=new_kwargs.get('preexec_fn', None), env=run_env, log_stdin_level=output_loglevel, log_stdout_level=output_loglevel, log_stderr_level=output_loglevel, stream_stdout=True, stream_stderr=True)
ret['pid'] = proc.pid
while proc.has_unread_data:
try:
try:
time.sleep(0.5)
try:
(cstdout, cstderr) = proc.recv() # depends on [control=['try'], data=[]]
except IOError:
(cstdout, cstderr) = ('', '') # depends on [control=['except'], data=[]]
if cstdout:
stdout += cstdout # depends on [control=['if'], data=[]]
else:
cstdout = ''
if cstderr:
stderr += cstderr # depends on [control=['if'], data=[]]
else:
cstderr = ''
if timeout and time.time() > will_timeout:
ret['stderr'] = 'SALT: Timeout after {0}s\n{1}'.format(timeout, stderr)
ret['retcode'] = None
break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
ret['stderr'] = 'SALT: User break\n{0}'.format(stderr)
ret['retcode'] = 1
break # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except salt.utils.vt.TerminalException as exc:
log.error('VT: %s', exc, exc_info_on_loglevel=logging.DEBUG)
ret = {'retcode': 1, 'pid': '2'}
break # depends on [control=['except'], data=['exc']]
# only set stdout on success as we already mangled in other
# cases
ret['stdout'] = stdout
if not proc.isalive():
# Process terminated, i.e., not canceled by the user or by
# the timeout
ret['stderr'] = stderr
ret['retcode'] = proc.exitstatus
if ret['retcode'] in success_retcodes:
ret['retcode'] = 0 # depends on [control=['if'], data=[]]
if ret['stdout'] in success_stdout or ret['stderr'] in success_stderr:
ret['retcode'] = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
ret['pid'] = proc.pid # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
finally:
proc.close(terminate=True, kill=True)
try:
if ignore_retcode:
__context__['retcode'] = 0 # depends on [control=['if'], data=[]]
else:
__context__['retcode'] = ret['retcode'] # depends on [control=['try'], data=[]]
except NameError:
# Ignore the context error during grain generation
pass # depends on [control=['except'], data=[]]
# Log the output
if output_loglevel is not None:
if not ignore_retcode and ret['retcode'] != 0:
if output_loglevel < LOG_LEVELS['error']:
output_loglevel = LOG_LEVELS['error'] # depends on [control=['if'], data=['output_loglevel']]
msg = "Command '{0}' failed with return code: {1}".format(cmd, ret['retcode'])
log.error(log_callback(msg)) # depends on [control=['if'], data=[]]
if ret['stdout']:
log.log(output_loglevel, 'stdout: %s', log_callback(ret['stdout'])) # depends on [control=['if'], data=[]]
if ret['stderr']:
log.log(output_loglevel, 'stderr: %s', log_callback(ret['stderr'])) # depends on [control=['if'], data=[]]
if ret['retcode']:
log.log(output_loglevel, 'retcode: %s', ret['retcode']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['output_loglevel']]
return ret
|
def notification_message(self, title, content, icon=""):
"""This function sends "javascript" message to the client, that executes its content.
In this particular code, a notification message is shown
"""
code = """
var options = {
body: "%(content)s",
icon: "%(icon)s"
}
if (!("Notification" in window)) {
alert("%(content)s");
}else if (Notification.permission === "granted") {
var notification = new Notification("%(title)s", options);
}else if (Notification.permission !== 'denied') {
Notification.requestPermission(function (permission) {
if (permission === "granted") {
var notification = new Notification("%(title)s", options);
}
});
}
""" % {'title': title, 'content': content, 'icon': icon}
self.execute_javascript(code)
|
def function[notification_message, parameter[self, title, content, icon]]:
constant[This function sends "javascript" message to the client, that executes its content.
In this particular code, a notification message is shown
]
variable[code] assign[=] binary_operation[constant[
var options = {
body: "%(content)s",
icon: "%(icon)s"
}
if (!("Notification" in window)) {
alert("%(content)s");
}else if (Notification.permission === "granted") {
var notification = new Notification("%(title)s", options);
}else if (Notification.permission !== 'denied') {
Notification.requestPermission(function (permission) {
if (permission === "granted") {
var notification = new Notification("%(title)s", options);
}
});
}
] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da207f038e0>, <ast.Constant object at 0x7da207f01060>, <ast.Constant object at 0x7da207f02920>], [<ast.Name object at 0x7da207f00520>, <ast.Name object at 0x7da207f00400>, <ast.Name object at 0x7da207f02c50>]]]
call[name[self].execute_javascript, parameter[name[code]]]
|
keyword[def] identifier[notification_message] ( identifier[self] , identifier[title] , identifier[content] , identifier[icon] = literal[string] ):
literal[string]
identifier[code] = literal[string] %{ literal[string] : identifier[title] , literal[string] : identifier[content] , literal[string] : identifier[icon] }
identifier[self] . identifier[execute_javascript] ( identifier[code] )
|
def notification_message(self, title, content, icon=''):
"""This function sends "javascript" message to the client, that executes its content.
In this particular code, a notification message is shown
"""
code = '\n var options = {\n body: "%(content)s",\n icon: "%(icon)s"\n }\n if (!("Notification" in window)) {\n alert("%(content)s");\n }else if (Notification.permission === "granted") {\n var notification = new Notification("%(title)s", options);\n }else if (Notification.permission !== \'denied\') {\n Notification.requestPermission(function (permission) {\n if (permission === "granted") {\n var notification = new Notification("%(title)s", options);\n }\n });\n }\n ' % {'title': title, 'content': content, 'icon': icon}
self.execute_javascript(code)
|
def create_document(self, data, throw_on_exists=False):
"""
Creates a new document in the remote and locally cached database, using
the data provided. If an _id is included in the data then depending on
that _id either a :class:`~cloudant.document.Document` or a
:class:`~cloudant.design_document.DesignDocument`
object will be added to the locally cached database and returned by this
method.
:param dict data: Dictionary of document JSON data, containing _id.
:param bool throw_on_exists: Optional flag dictating whether to raise
an exception if the document already exists in the database.
:returns: A :class:`~cloudant.document.Document` or
:class:`~cloudant.design_document.DesignDocument` instance
corresponding to the new document in the database.
"""
docid = data.get('_id', None)
doc = None
if docid and docid.startswith('_design/'):
doc = DesignDocument(self, docid)
else:
doc = Document(self, docid)
doc.update(data)
try:
doc.create()
except HTTPError as error:
if error.response.status_code == 409:
if throw_on_exists:
raise CloudantDatabaseException(409, docid)
else:
raise
super(CouchDatabase, self).__setitem__(doc['_id'], doc)
return doc
|
def function[create_document, parameter[self, data, throw_on_exists]]:
constant[
Creates a new document in the remote and locally cached database, using
the data provided. If an _id is included in the data then depending on
that _id either a :class:`~cloudant.document.Document` or a
:class:`~cloudant.design_document.DesignDocument`
object will be added to the locally cached database and returned by this
method.
:param dict data: Dictionary of document JSON data, containing _id.
:param bool throw_on_exists: Optional flag dictating whether to raise
an exception if the document already exists in the database.
:returns: A :class:`~cloudant.document.Document` or
:class:`~cloudant.design_document.DesignDocument` instance
corresponding to the new document in the database.
]
variable[docid] assign[=] call[name[data].get, parameter[constant[_id], constant[None]]]
variable[doc] assign[=] constant[None]
if <ast.BoolOp object at 0x7da204623a60> begin[:]
variable[doc] assign[=] call[name[DesignDocument], parameter[name[self], name[docid]]]
call[name[doc].update, parameter[name[data]]]
<ast.Try object at 0x7da1b2345f00>
call[call[name[super], parameter[name[CouchDatabase], name[self]]].__setitem__, parameter[call[name[doc]][constant[_id]], name[doc]]]
return[name[doc]]
|
keyword[def] identifier[create_document] ( identifier[self] , identifier[data] , identifier[throw_on_exists] = keyword[False] ):
literal[string]
identifier[docid] = identifier[data] . identifier[get] ( literal[string] , keyword[None] )
identifier[doc] = keyword[None]
keyword[if] identifier[docid] keyword[and] identifier[docid] . identifier[startswith] ( literal[string] ):
identifier[doc] = identifier[DesignDocument] ( identifier[self] , identifier[docid] )
keyword[else] :
identifier[doc] = identifier[Document] ( identifier[self] , identifier[docid] )
identifier[doc] . identifier[update] ( identifier[data] )
keyword[try] :
identifier[doc] . identifier[create] ()
keyword[except] identifier[HTTPError] keyword[as] identifier[error] :
keyword[if] identifier[error] . identifier[response] . identifier[status_code] == literal[int] :
keyword[if] identifier[throw_on_exists] :
keyword[raise] identifier[CloudantDatabaseException] ( literal[int] , identifier[docid] )
keyword[else] :
keyword[raise]
identifier[super] ( identifier[CouchDatabase] , identifier[self] ). identifier[__setitem__] ( identifier[doc] [ literal[string] ], identifier[doc] )
keyword[return] identifier[doc]
|
def create_document(self, data, throw_on_exists=False):
"""
Creates a new document in the remote and locally cached database, using
the data provided. If an _id is included in the data then depending on
that _id either a :class:`~cloudant.document.Document` or a
:class:`~cloudant.design_document.DesignDocument`
object will be added to the locally cached database and returned by this
method.
:param dict data: Dictionary of document JSON data, containing _id.
:param bool throw_on_exists: Optional flag dictating whether to raise
an exception if the document already exists in the database.
:returns: A :class:`~cloudant.document.Document` or
:class:`~cloudant.design_document.DesignDocument` instance
corresponding to the new document in the database.
"""
docid = data.get('_id', None)
doc = None
if docid and docid.startswith('_design/'):
doc = DesignDocument(self, docid) # depends on [control=['if'], data=[]]
else:
doc = Document(self, docid)
doc.update(data)
try:
doc.create() # depends on [control=['try'], data=[]]
except HTTPError as error:
if error.response.status_code == 409:
if throw_on_exists:
raise CloudantDatabaseException(409, docid) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['error']]
super(CouchDatabase, self).__setitem__(doc['_id'], doc)
return doc
|
def to_tag(self) -> str:
"""
Convert a Language back to a standard language tag, as a string.
This is also the str() representation of a Language object.
>>> Language.make(language='en', region='GB').to_tag()
'en-GB'
>>> Language.make(language='yue', script='Hant', region='HK').to_tag()
'yue-Hant-HK'
>>> Language.make(script='Arab').to_tag()
'und-Arab'
>>> str(Language.make(region='IN'))
'und-IN'
"""
if self._str_tag is not None:
return self._str_tag
subtags = ['und']
if self.language:
subtags[0] = self.language
if self.extlangs:
for extlang in sorted(self.extlangs):
subtags.append(extlang)
if self.script:
subtags.append(self.script)
if self.region:
subtags.append(self.region)
if self.variants:
for variant in sorted(self.variants):
subtags.append(variant)
if self.extensions:
for ext in self.extensions:
subtags.append(ext)
if self.private:
subtags.append(self.private)
self._str_tag = '-'.join(subtags)
return self._str_tag
|
def function[to_tag, parameter[self]]:
constant[
Convert a Language back to a standard language tag, as a string.
This is also the str() representation of a Language object.
>>> Language.make(language='en', region='GB').to_tag()
'en-GB'
>>> Language.make(language='yue', script='Hant', region='HK').to_tag()
'yue-Hant-HK'
>>> Language.make(script='Arab').to_tag()
'und-Arab'
>>> str(Language.make(region='IN'))
'und-IN'
]
if compare[name[self]._str_tag is_not constant[None]] begin[:]
return[name[self]._str_tag]
variable[subtags] assign[=] list[[<ast.Constant object at 0x7da20c6e4190>]]
if name[self].language begin[:]
call[name[subtags]][constant[0]] assign[=] name[self].language
if name[self].extlangs begin[:]
for taget[name[extlang]] in starred[call[name[sorted], parameter[name[self].extlangs]]] begin[:]
call[name[subtags].append, parameter[name[extlang]]]
if name[self].script begin[:]
call[name[subtags].append, parameter[name[self].script]]
if name[self].region begin[:]
call[name[subtags].append, parameter[name[self].region]]
if name[self].variants begin[:]
for taget[name[variant]] in starred[call[name[sorted], parameter[name[self].variants]]] begin[:]
call[name[subtags].append, parameter[name[variant]]]
if name[self].extensions begin[:]
for taget[name[ext]] in starred[name[self].extensions] begin[:]
call[name[subtags].append, parameter[name[ext]]]
if name[self].private begin[:]
call[name[subtags].append, parameter[name[self].private]]
name[self]._str_tag assign[=] call[constant[-].join, parameter[name[subtags]]]
return[name[self]._str_tag]
|
keyword[def] identifier[to_tag] ( identifier[self] )-> identifier[str] :
literal[string]
keyword[if] identifier[self] . identifier[_str_tag] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_str_tag]
identifier[subtags] =[ literal[string] ]
keyword[if] identifier[self] . identifier[language] :
identifier[subtags] [ literal[int] ]= identifier[self] . identifier[language]
keyword[if] identifier[self] . identifier[extlangs] :
keyword[for] identifier[extlang] keyword[in] identifier[sorted] ( identifier[self] . identifier[extlangs] ):
identifier[subtags] . identifier[append] ( identifier[extlang] )
keyword[if] identifier[self] . identifier[script] :
identifier[subtags] . identifier[append] ( identifier[self] . identifier[script] )
keyword[if] identifier[self] . identifier[region] :
identifier[subtags] . identifier[append] ( identifier[self] . identifier[region] )
keyword[if] identifier[self] . identifier[variants] :
keyword[for] identifier[variant] keyword[in] identifier[sorted] ( identifier[self] . identifier[variants] ):
identifier[subtags] . identifier[append] ( identifier[variant] )
keyword[if] identifier[self] . identifier[extensions] :
keyword[for] identifier[ext] keyword[in] identifier[self] . identifier[extensions] :
identifier[subtags] . identifier[append] ( identifier[ext] )
keyword[if] identifier[self] . identifier[private] :
identifier[subtags] . identifier[append] ( identifier[self] . identifier[private] )
identifier[self] . identifier[_str_tag] = literal[string] . identifier[join] ( identifier[subtags] )
keyword[return] identifier[self] . identifier[_str_tag]
|
def to_tag(self) -> str:
"""
Convert a Language back to a standard language tag, as a string.
This is also the str() representation of a Language object.
>>> Language.make(language='en', region='GB').to_tag()
'en-GB'
>>> Language.make(language='yue', script='Hant', region='HK').to_tag()
'yue-Hant-HK'
>>> Language.make(script='Arab').to_tag()
'und-Arab'
>>> str(Language.make(region='IN'))
'und-IN'
"""
if self._str_tag is not None:
return self._str_tag # depends on [control=['if'], data=[]]
subtags = ['und']
if self.language:
subtags[0] = self.language # depends on [control=['if'], data=[]]
if self.extlangs:
for extlang in sorted(self.extlangs):
subtags.append(extlang) # depends on [control=['for'], data=['extlang']] # depends on [control=['if'], data=[]]
if self.script:
subtags.append(self.script) # depends on [control=['if'], data=[]]
if self.region:
subtags.append(self.region) # depends on [control=['if'], data=[]]
if self.variants:
for variant in sorted(self.variants):
subtags.append(variant) # depends on [control=['for'], data=['variant']] # depends on [control=['if'], data=[]]
if self.extensions:
for ext in self.extensions:
subtags.append(ext) # depends on [control=['for'], data=['ext']] # depends on [control=['if'], data=[]]
if self.private:
subtags.append(self.private) # depends on [control=['if'], data=[]]
self._str_tag = '-'.join(subtags)
return self._str_tag
|
def keep_episodes(show, keep):
""" Delete all but last count episodes in show. """
deleted = 0
print('%s Cleaning %s to %s episodes.' % (datestr(), show.title, keep))
sort = lambda x:x.originallyAvailableAt or x.addedAt
items = sorted(show.episodes(), key=sort, reverse=True)
for episode in items[keep:]:
delete_episode(episode)
deleted += 1
return deleted
|
def function[keep_episodes, parameter[show, keep]]:
constant[ Delete all but last count episodes in show. ]
variable[deleted] assign[=] constant[0]
call[name[print], parameter[binary_operation[constant[%s Cleaning %s to %s episodes.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20e956ef0>, <ast.Attribute object at 0x7da20e955e10>, <ast.Name object at 0x7da20e956a40>]]]]]
variable[sort] assign[=] <ast.Lambda object at 0x7da20e956a70>
variable[items] assign[=] call[name[sorted], parameter[call[name[show].episodes, parameter[]]]]
for taget[name[episode]] in starred[call[name[items]][<ast.Slice object at 0x7da20e957100>]] begin[:]
call[name[delete_episode], parameter[name[episode]]]
<ast.AugAssign object at 0x7da20e9557e0>
return[name[deleted]]
|
keyword[def] identifier[keep_episodes] ( identifier[show] , identifier[keep] ):
literal[string]
identifier[deleted] = literal[int]
identifier[print] ( literal[string] %( identifier[datestr] (), identifier[show] . identifier[title] , identifier[keep] ))
identifier[sort] = keyword[lambda] identifier[x] : identifier[x] . identifier[originallyAvailableAt] keyword[or] identifier[x] . identifier[addedAt]
identifier[items] = identifier[sorted] ( identifier[show] . identifier[episodes] (), identifier[key] = identifier[sort] , identifier[reverse] = keyword[True] )
keyword[for] identifier[episode] keyword[in] identifier[items] [ identifier[keep] :]:
identifier[delete_episode] ( identifier[episode] )
identifier[deleted] += literal[int]
keyword[return] identifier[deleted]
|
def keep_episodes(show, keep):
""" Delete all but last count episodes in show. """
deleted = 0
print('%s Cleaning %s to %s episodes.' % (datestr(), show.title, keep))
sort = lambda x: x.originallyAvailableAt or x.addedAt
items = sorted(show.episodes(), key=sort, reverse=True)
for episode in items[keep:]:
delete_episode(episode)
deleted += 1 # depends on [control=['for'], data=['episode']]
return deleted
|
def setColor( self, color ):
"""
Sets the color for this widget to the inputed color.
:param color | <QColor>
"""
self._color = QColor(color)
self.setAlternateColor(self._color.lighter(140))
|
def function[setColor, parameter[self, color]]:
constant[
Sets the color for this widget to the inputed color.
:param color | <QColor>
]
name[self]._color assign[=] call[name[QColor], parameter[name[color]]]
call[name[self].setAlternateColor, parameter[call[name[self]._color.lighter, parameter[constant[140]]]]]
|
keyword[def] identifier[setColor] ( identifier[self] , identifier[color] ):
literal[string]
identifier[self] . identifier[_color] = identifier[QColor] ( identifier[color] )
identifier[self] . identifier[setAlternateColor] ( identifier[self] . identifier[_color] . identifier[lighter] ( literal[int] ))
|
def setColor(self, color):
"""
Sets the color for this widget to the inputed color.
:param color | <QColor>
"""
self._color = QColor(color)
self.setAlternateColor(self._color.lighter(140))
|
def disconnect(self, func=None):
"""Disconnect a function call to the signal. If None, all connections
are disconnected"""
if func is None:
self._connections = []
else:
self._connections.remove(func)
|
def function[disconnect, parameter[self, func]]:
constant[Disconnect a function call to the signal. If None, all connections
are disconnected]
if compare[name[func] is constant[None]] begin[:]
name[self]._connections assign[=] list[[]]
|
keyword[def] identifier[disconnect] ( identifier[self] , identifier[func] = keyword[None] ):
literal[string]
keyword[if] identifier[func] keyword[is] keyword[None] :
identifier[self] . identifier[_connections] =[]
keyword[else] :
identifier[self] . identifier[_connections] . identifier[remove] ( identifier[func] )
|
def disconnect(self, func=None):
"""Disconnect a function call to the signal. If None, all connections
are disconnected"""
if func is None:
self._connections = [] # depends on [control=['if'], data=[]]
else:
self._connections.remove(func)
|
def _input_filter(self, keys, raw):
"""
handles keypresses.
This function gets triggered directly by class:`urwid.MainLoop`
upon user input and is supposed to pass on its `keys` parameter
to let the root widget handle keys. We intercept the input here
to trigger custom commands as defined in our keybindings.
"""
logging.debug("Got key (%s, %s)", keys, raw)
# work around: escape triggers this twice, with keys = raw = []
# the first time..
if not keys:
return
# let widgets handle input if key is virtual window resize keypress
# or we are in "passall" mode
elif 'window resize' in keys or self._passall:
return keys
# end "lockdown" mode if the right key was pressed
elif self._locked and keys[0] == self._unlock_key:
self._locked = False
self.mainloop.widget = self.root_widget
if callable(self._unlock_callback):
self._unlock_callback()
# otherwise interpret keybinding
else:
def clear(*_):
"""Callback that resets the input queue."""
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self.input_queue = []
async def _apply_fire(cmdline):
try:
await self.apply_commandline(cmdline)
except CommandParseError as e:
self.notify(str(e), priority='error')
def fire(_, cmdline):
clear()
logging.debug("cmdline: '%s'", cmdline)
if not self._locked:
loop = asyncio.get_event_loop()
loop.create_task(_apply_fire(cmdline))
# move keys are always passed
elif cmdline in ['move up', 'move down', 'move page up',
'move page down']:
return [cmdline[5:]]
key = keys[0]
if key and 'mouse' in key[0]:
key = key[0] + ' %i' % key[1]
self.input_queue.append(key)
keyseq = ' '.join(self.input_queue)
candidates = settings.get_mapped_input_keysequences(self.mode,
prefix=keyseq)
if keyseq in candidates:
# case: current input queue is a mapped keysequence
# get binding and interpret it if non-null
cmdline = settings.get_keybinding(self.mode, keyseq)
if cmdline:
if len(candidates) > 1:
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self._alarm = self.mainloop.set_alarm_in(
timeout, fire, cmdline)
else:
return fire(self.mainloop, cmdline)
elif not candidates:
# case: no sequence with prefix keyseq is mapped
# just clear the input queue
clear()
else:
# case: some sequences with proper prefix keyseq is mapped
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self._alarm = self.mainloop.set_alarm_in(timeout, clear)
# update statusbar
self.update()
|
def function[_input_filter, parameter[self, keys, raw]]:
constant[
handles keypresses.
This function gets triggered directly by class:`urwid.MainLoop`
upon user input and is supposed to pass on its `keys` parameter
to let the root widget handle keys. We intercept the input here
to trigger custom commands as defined in our keybindings.
]
call[name[logging].debug, parameter[constant[Got key (%s, %s)], name[keys], name[raw]]]
if <ast.UnaryOp object at 0x7da1b0719f30> begin[:]
return[None]
|
keyword[def] identifier[_input_filter] ( identifier[self] , identifier[keys] , identifier[raw] ):
literal[string]
identifier[logging] . identifier[debug] ( literal[string] , identifier[keys] , identifier[raw] )
keyword[if] keyword[not] identifier[keys] :
keyword[return]
keyword[elif] literal[string] keyword[in] identifier[keys] keyword[or] identifier[self] . identifier[_passall] :
keyword[return] identifier[keys]
keyword[elif] identifier[self] . identifier[_locked] keyword[and] identifier[keys] [ literal[int] ]== identifier[self] . identifier[_unlock_key] :
identifier[self] . identifier[_locked] = keyword[False]
identifier[self] . identifier[mainloop] . identifier[widget] = identifier[self] . identifier[root_widget]
keyword[if] identifier[callable] ( identifier[self] . identifier[_unlock_callback] ):
identifier[self] . identifier[_unlock_callback] ()
keyword[else] :
keyword[def] identifier[clear] (* identifier[_] ):
literal[string]
keyword[if] identifier[self] . identifier[_alarm] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[mainloop] . identifier[remove_alarm] ( identifier[self] . identifier[_alarm] )
identifier[self] . identifier[input_queue] =[]
keyword[async] keyword[def] identifier[_apply_fire] ( identifier[cmdline] ):
keyword[try] :
keyword[await] identifier[self] . identifier[apply_commandline] ( identifier[cmdline] )
keyword[except] identifier[CommandParseError] keyword[as] identifier[e] :
identifier[self] . identifier[notify] ( identifier[str] ( identifier[e] ), identifier[priority] = literal[string] )
keyword[def] identifier[fire] ( identifier[_] , identifier[cmdline] ):
identifier[clear] ()
identifier[logging] . identifier[debug] ( literal[string] , identifier[cmdline] )
keyword[if] keyword[not] identifier[self] . identifier[_locked] :
identifier[loop] = identifier[asyncio] . identifier[get_event_loop] ()
identifier[loop] . identifier[create_task] ( identifier[_apply_fire] ( identifier[cmdline] ))
keyword[elif] identifier[cmdline] keyword[in] [ literal[string] , literal[string] , literal[string] ,
literal[string] ]:
keyword[return] [ identifier[cmdline] [ literal[int] :]]
identifier[key] = identifier[keys] [ literal[int] ]
keyword[if] identifier[key] keyword[and] literal[string] keyword[in] identifier[key] [ literal[int] ]:
identifier[key] = identifier[key] [ literal[int] ]+ literal[string] % identifier[key] [ literal[int] ]
identifier[self] . identifier[input_queue] . identifier[append] ( identifier[key] )
identifier[keyseq] = literal[string] . identifier[join] ( identifier[self] . identifier[input_queue] )
identifier[candidates] = identifier[settings] . identifier[get_mapped_input_keysequences] ( identifier[self] . identifier[mode] ,
identifier[prefix] = identifier[keyseq] )
keyword[if] identifier[keyseq] keyword[in] identifier[candidates] :
identifier[cmdline] = identifier[settings] . identifier[get_keybinding] ( identifier[self] . identifier[mode] , identifier[keyseq] )
keyword[if] identifier[cmdline] :
keyword[if] identifier[len] ( identifier[candidates] )> literal[int] :
identifier[timeout] = identifier[float] ( identifier[settings] . identifier[get] ( literal[string] ))
keyword[if] identifier[self] . identifier[_alarm] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[mainloop] . identifier[remove_alarm] ( identifier[self] . identifier[_alarm] )
identifier[self] . identifier[_alarm] = identifier[self] . identifier[mainloop] . identifier[set_alarm_in] (
identifier[timeout] , identifier[fire] , identifier[cmdline] )
keyword[else] :
keyword[return] identifier[fire] ( identifier[self] . identifier[mainloop] , identifier[cmdline] )
keyword[elif] keyword[not] identifier[candidates] :
identifier[clear] ()
keyword[else] :
identifier[timeout] = identifier[float] ( identifier[settings] . identifier[get] ( literal[string] ))
keyword[if] identifier[self] . identifier[_alarm] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[mainloop] . identifier[remove_alarm] ( identifier[self] . identifier[_alarm] )
identifier[self] . identifier[_alarm] = identifier[self] . identifier[mainloop] . identifier[set_alarm_in] ( identifier[timeout] , identifier[clear] )
identifier[self] . identifier[update] ()
|
def _input_filter(self, keys, raw):
"""
handles keypresses.
This function gets triggered directly by class:`urwid.MainLoop`
upon user input and is supposed to pass on its `keys` parameter
to let the root widget handle keys. We intercept the input here
to trigger custom commands as defined in our keybindings.
"""
logging.debug('Got key (%s, %s)', keys, raw)
# work around: escape triggers this twice, with keys = raw = []
# the first time..
if not keys:
return # depends on [control=['if'], data=[]]
# let widgets handle input if key is virtual window resize keypress
# or we are in "passall" mode
elif 'window resize' in keys or self._passall:
return keys # depends on [control=['if'], data=[]]
# end "lockdown" mode if the right key was pressed
elif self._locked and keys[0] == self._unlock_key:
self._locked = False
self.mainloop.widget = self.root_widget
if callable(self._unlock_callback):
self._unlock_callback() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# otherwise interpret keybinding
def clear(*_):
"""Callback that resets the input queue."""
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm) # depends on [control=['if'], data=[]]
self.input_queue = []
async def _apply_fire(cmdline):
try:
await self.apply_commandline(cmdline) # depends on [control=['try'], data=[]]
except CommandParseError as e:
self.notify(str(e), priority='error') # depends on [control=['except'], data=['e']]
def fire(_, cmdline):
clear()
logging.debug("cmdline: '%s'", cmdline)
if not self._locked:
loop = asyncio.get_event_loop()
loop.create_task(_apply_fire(cmdline)) # depends on [control=['if'], data=[]]
# move keys are always passed
elif cmdline in ['move up', 'move down', 'move page up', 'move page down']:
return [cmdline[5:]] # depends on [control=['if'], data=['cmdline']]
key = keys[0]
if key and 'mouse' in key[0]:
key = key[0] + ' %i' % key[1] # depends on [control=['if'], data=[]]
self.input_queue.append(key)
keyseq = ' '.join(self.input_queue)
candidates = settings.get_mapped_input_keysequences(self.mode, prefix=keyseq)
if keyseq in candidates:
# case: current input queue is a mapped keysequence
# get binding and interpret it if non-null
cmdline = settings.get_keybinding(self.mode, keyseq)
if cmdline:
if len(candidates) > 1:
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm) # depends on [control=['if'], data=[]]
self._alarm = self.mainloop.set_alarm_in(timeout, fire, cmdline) # depends on [control=['if'], data=[]]
else:
return fire(self.mainloop, cmdline) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['keyseq', 'candidates']]
elif not candidates:
# case: no sequence with prefix keyseq is mapped
# just clear the input queue
clear() # depends on [control=['if'], data=[]]
else:
# case: some sequences with proper prefix keyseq is mapped
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm) # depends on [control=['if'], data=[]]
self._alarm = self.mainloop.set_alarm_in(timeout, clear)
# update statusbar
self.update()
|
def is_promisc(ip, fake_bcast="ff:ff:00:00:00:00", **kargs):
"""Try to guess if target is in Promisc mode. The target is provided by its ip.""" # noqa: E501
responses = srp1(Ether(dst=fake_bcast) / ARP(op="who-has", pdst=ip), type=ETH_P_ARP, iface_hint=ip, timeout=1, verbose=0, **kargs) # noqa: E501
return responses is not None
|
def function[is_promisc, parameter[ip, fake_bcast]]:
constant[Try to guess if target is in Promisc mode. The target is provided by its ip.]
variable[responses] assign[=] call[name[srp1], parameter[binary_operation[call[name[Ether], parameter[]] / call[name[ARP], parameter[]]]]]
return[compare[name[responses] is_not constant[None]]]
|
keyword[def] identifier[is_promisc] ( identifier[ip] , identifier[fake_bcast] = literal[string] ,** identifier[kargs] ):
literal[string]
identifier[responses] = identifier[srp1] ( identifier[Ether] ( identifier[dst] = identifier[fake_bcast] )/ identifier[ARP] ( identifier[op] = literal[string] , identifier[pdst] = identifier[ip] ), identifier[type] = identifier[ETH_P_ARP] , identifier[iface_hint] = identifier[ip] , identifier[timeout] = literal[int] , identifier[verbose] = literal[int] ,** identifier[kargs] )
keyword[return] identifier[responses] keyword[is] keyword[not] keyword[None]
|
def is_promisc(ip, fake_bcast='ff:ff:00:00:00:00', **kargs):
"""Try to guess if target is in Promisc mode. The target is provided by its ip.""" # noqa: E501
responses = srp1(Ether(dst=fake_bcast) / ARP(op='who-has', pdst=ip), type=ETH_P_ARP, iface_hint=ip, timeout=1, verbose=0, **kargs) # noqa: E501
return responses is not None
|
def _connect(self, host, port, proc, timeout_seconds):
"""Connect to the websocket, retrying as needed. Returns the socket."""
if ":" in host and not host.startswith("["): # Support ipv6 addresses.
host = "[%s]" % host
url = "ws://%s:%s/sc2api" % (host, port)
was_running = False
for i in range(timeout_seconds):
is_running = proc and proc.running
was_running = was_running or is_running
if (i >= timeout_seconds // 4 or was_running) and not is_running:
logging.warning(
"SC2 isn't running, so bailing early on the websocket connection.")
break
logging.info("Connecting to: %s, attempt: %s, running: %s", url, i,
is_running)
try:
return websocket.create_connection(url, timeout=timeout_seconds)
except socket.error:
pass # SC2 hasn't started listening yet.
except websocket.WebSocketBadStatusException as err:
if err.status_code == 404:
pass # SC2 is listening, but hasn't set up the /sc2api endpoint yet.
else:
raise
time.sleep(1)
raise ConnectError("Failed to connect to the SC2 websocket. Is it up?")
|
def function[_connect, parameter[self, host, port, proc, timeout_seconds]]:
constant[Connect to the websocket, retrying as needed. Returns the socket.]
if <ast.BoolOp object at 0x7da20cabf160> begin[:]
variable[host] assign[=] binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> name[host]]
variable[url] assign[=] binary_operation[constant[ws://%s:%s/sc2api] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f58e290>, <ast.Name object at 0x7da18f58ff70>]]]
variable[was_running] assign[=] constant[False]
for taget[name[i]] in starred[call[name[range], parameter[name[timeout_seconds]]]] begin[:]
variable[is_running] assign[=] <ast.BoolOp object at 0x7da18f58c610>
variable[was_running] assign[=] <ast.BoolOp object at 0x7da18f58c1f0>
if <ast.BoolOp object at 0x7da18bcc8ca0> begin[:]
call[name[logging].warning, parameter[constant[SC2 isn't running, so bailing early on the websocket connection.]]]
break
call[name[logging].info, parameter[constant[Connecting to: %s, attempt: %s, running: %s], name[url], name[i], name[is_running]]]
<ast.Try object at 0x7da18bcc9a80>
call[name[time].sleep, parameter[constant[1]]]
<ast.Raise object at 0x7da20cabd090>
|
keyword[def] identifier[_connect] ( identifier[self] , identifier[host] , identifier[port] , identifier[proc] , identifier[timeout_seconds] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[host] keyword[and] keyword[not] identifier[host] . identifier[startswith] ( literal[string] ):
identifier[host] = literal[string] % identifier[host]
identifier[url] = literal[string] %( identifier[host] , identifier[port] )
identifier[was_running] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[timeout_seconds] ):
identifier[is_running] = identifier[proc] keyword[and] identifier[proc] . identifier[running]
identifier[was_running] = identifier[was_running] keyword[or] identifier[is_running]
keyword[if] ( identifier[i] >= identifier[timeout_seconds] // literal[int] keyword[or] identifier[was_running] ) keyword[and] keyword[not] identifier[is_running] :
identifier[logging] . identifier[warning] (
literal[string] )
keyword[break]
identifier[logging] . identifier[info] ( literal[string] , identifier[url] , identifier[i] ,
identifier[is_running] )
keyword[try] :
keyword[return] identifier[websocket] . identifier[create_connection] ( identifier[url] , identifier[timeout] = identifier[timeout_seconds] )
keyword[except] identifier[socket] . identifier[error] :
keyword[pass]
keyword[except] identifier[websocket] . identifier[WebSocketBadStatusException] keyword[as] identifier[err] :
keyword[if] identifier[err] . identifier[status_code] == literal[int] :
keyword[pass]
keyword[else] :
keyword[raise]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[raise] identifier[ConnectError] ( literal[string] )
|
def _connect(self, host, port, proc, timeout_seconds):
"""Connect to the websocket, retrying as needed. Returns the socket."""
if ':' in host and (not host.startswith('[')): # Support ipv6 addresses.
host = '[%s]' % host # depends on [control=['if'], data=[]]
url = 'ws://%s:%s/sc2api' % (host, port)
was_running = False
for i in range(timeout_seconds):
is_running = proc and proc.running
was_running = was_running or is_running
if (i >= timeout_seconds // 4 or was_running) and (not is_running):
logging.warning("SC2 isn't running, so bailing early on the websocket connection.")
break # depends on [control=['if'], data=[]]
logging.info('Connecting to: %s, attempt: %s, running: %s', url, i, is_running)
try:
return websocket.create_connection(url, timeout=timeout_seconds) # depends on [control=['try'], data=[]]
except socket.error:
pass # SC2 hasn't started listening yet. # depends on [control=['except'], data=[]]
except websocket.WebSocketBadStatusException as err:
if err.status_code == 404:
pass # SC2 is listening, but hasn't set up the /sc2api endpoint yet. # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['err']]
time.sleep(1) # depends on [control=['for'], data=['i']]
raise ConnectError('Failed to connect to the SC2 websocket. Is it up?')
|
def doFeedback(self, item_id, use_comment_template, buyer_id, comment, comment_type, op):
"""http://allegro.pl/webapi/documentation.php/show/id,42"""
return self.__ask__('doFeedback',
feItemId=item_id,
feUseCommentTemplate=use_comment_template,
feToUserId=buyer_id,
feComment=comment,
feCommentType=comment_type,
feOp=op)['feedbackId']
|
def function[doFeedback, parameter[self, item_id, use_comment_template, buyer_id, comment, comment_type, op]]:
constant[http://allegro.pl/webapi/documentation.php/show/id,42]
return[call[call[name[self].__ask__, parameter[constant[doFeedback]]]][constant[feedbackId]]]
|
keyword[def] identifier[doFeedback] ( identifier[self] , identifier[item_id] , identifier[use_comment_template] , identifier[buyer_id] , identifier[comment] , identifier[comment_type] , identifier[op] ):
literal[string]
keyword[return] identifier[self] . identifier[__ask__] ( literal[string] ,
identifier[feItemId] = identifier[item_id] ,
identifier[feUseCommentTemplate] = identifier[use_comment_template] ,
identifier[feToUserId] = identifier[buyer_id] ,
identifier[feComment] = identifier[comment] ,
identifier[feCommentType] = identifier[comment_type] ,
identifier[feOp] = identifier[op] )[ literal[string] ]
|
def doFeedback(self, item_id, use_comment_template, buyer_id, comment, comment_type, op):
"""http://allegro.pl/webapi/documentation.php/show/id,42"""
return self.__ask__('doFeedback', feItemId=item_id, feUseCommentTemplate=use_comment_template, feToUserId=buyer_id, feComment=comment, feCommentType=comment_type, feOp=op)['feedbackId']
|
def estimate_tt_to_rectify(self, order, slitlet2d=None):
"""Estimate the polynomial transformation to rectify the image.
Parameters
----------
order : int
Order of the polynomial transformation.
slitlet2d : numpy array
Slitlet image to be displayed with the computed boundaries
and intersecting points overplotted. This argument is
optional.
"""
# protections
if self.x_inter_orig is None \
or self.y_inter_orig is None \
or self.x_inter_rect is None \
or self.y_inter_rect is None:
raise ValueError('Intersection points not computed')
npoints = len(self.x_inter_orig)
if len(self.y_inter_orig) != npoints \
or len(self.x_inter_rect) != npoints \
or len(self.y_inter_rect) != npoints:
raise ValueError('Unexpected different number of points')
# IMPORTANT: correct coordinates from origin in order to manipulate
# coordinates corresponding to image indices
x_inter_orig_shifted = self.x_inter_orig - self.bb_nc1_orig
y_inter_orig_shifted = self.y_inter_orig - self.bb_ns1_orig
x_inter_rect_shifted = self.x_inter_rect - self.bb_nc1_orig
y_inter_rect_shifted = self.y_inter_rect - self.bb_ns1_orig
# compute 2D transformation
self.ttd_order = order
self.ttd_aij, self.ttd_bij = compute_distortion(
x_inter_orig_shifted, y_inter_orig_shifted,
x_inter_rect_shifted, y_inter_rect_shifted,
order,
self.debugplot
)
self.tti_aij, self.tti_bij = compute_distortion(
x_inter_rect_shifted, y_inter_rect_shifted,
x_inter_orig_shifted, y_inter_orig_shifted,
order,
self.debugplot
)
# display slitlet with intersection points and grid indicating
# the fitted transformation
if abs(self.debugplot % 10) != 0 and slitlet2d is not None:
# display image with zscale cuts
title = "Slitlet#" + str(self.islitlet) + \
" (estimate_tt_to_rectify)"
ax = ximshow(slitlet2d, title=title,
first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig),
show=False)
# intersection points
ax.plot(self.x_inter_orig, self.y_inter_orig, 'co')
ax.plot(self.x_inter_rect, self.y_inter_rect, 'bo')
# grid with fitted transformation: spectrum trails
xx = np.arange(0, self.bb_nc2_orig - self.bb_nc1_orig + 1,
dtype=np.float)
for spectrail in self.list_spectrails:
yy0 = self.corr_yrect_a + \
self.corr_yrect_b * spectrail.y_rectified
yy = np.tile([yy0 - self.bb_ns1_orig], xx.size)
ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, "b")
xxx, yyy = fmap(self.ttd_order, self.ttd_aij, self.ttd_bij,
xx, yy)
ax.plot(xxx + self.bb_nc1_orig, yyy + self.bb_ns1_orig, "g")
# grid with fitted transformation: arc lines
ylower_line = \
self.list_spectrails[self.i_lower_spectrail].y_rectified
ylower_line = self.corr_yrect_a + self.corr_yrect_b * ylower_line
yupper_line = \
self.list_spectrails[self.i_upper_spectrail].y_rectified
yupper_line = self.corr_yrect_a + self.corr_yrect_b * yupper_line
n_points = int(yupper_line - ylower_line + 0.5) + 1
yy = np.linspace(ylower_line - self.bb_ns1_orig,
yupper_line - self.bb_ns1_orig,
num=n_points,
dtype=np.float)
for arc_line in self.list_arc_lines:
xline = arc_line.x_rectified - self.bb_nc1_orig
xx = np.array([xline] * n_points)
ax.plot(xx + self.bb_nc1_orig,yy + self.bb_ns1_orig, "b" )
xxx, yyy = fmap(self.ttd_order, self.ttd_aij, self.ttd_bij,
xx, yy)
ax.plot(xxx + self.bb_nc1_orig, yyy + self.bb_ns1_orig, "c")
# show plot
pause_debugplot(self.debugplot, pltshow=True)
|
def function[estimate_tt_to_rectify, parameter[self, order, slitlet2d]]:
constant[Estimate the polynomial transformation to rectify the image.
Parameters
----------
order : int
Order of the polynomial transformation.
slitlet2d : numpy array
Slitlet image to be displayed with the computed boundaries
and intersecting points overplotted. This argument is
optional.
]
if <ast.BoolOp object at 0x7da20e962110> begin[:]
<ast.Raise object at 0x7da20e961390>
variable[npoints] assign[=] call[name[len], parameter[name[self].x_inter_orig]]
if <ast.BoolOp object at 0x7da20e961420> begin[:]
<ast.Raise object at 0x7da20e9605e0>
variable[x_inter_orig_shifted] assign[=] binary_operation[name[self].x_inter_orig - name[self].bb_nc1_orig]
variable[y_inter_orig_shifted] assign[=] binary_operation[name[self].y_inter_orig - name[self].bb_ns1_orig]
variable[x_inter_rect_shifted] assign[=] binary_operation[name[self].x_inter_rect - name[self].bb_nc1_orig]
variable[y_inter_rect_shifted] assign[=] binary_operation[name[self].y_inter_rect - name[self].bb_ns1_orig]
name[self].ttd_order assign[=] name[order]
<ast.Tuple object at 0x7da20e961c00> assign[=] call[name[compute_distortion], parameter[name[x_inter_orig_shifted], name[y_inter_orig_shifted], name[x_inter_rect_shifted], name[y_inter_rect_shifted], name[order], name[self].debugplot]]
<ast.Tuple object at 0x7da2054a7220> assign[=] call[name[compute_distortion], parameter[name[x_inter_rect_shifted], name[y_inter_rect_shifted], name[x_inter_orig_shifted], name[y_inter_orig_shifted], name[order], name[self].debugplot]]
if <ast.BoolOp object at 0x7da2054a52a0> begin[:]
variable[title] assign[=] binary_operation[binary_operation[constant[Slitlet#] + call[name[str], parameter[name[self].islitlet]]] + constant[ (estimate_tt_to_rectify)]]
variable[ax] assign[=] call[name[ximshow], parameter[name[slitlet2d]]]
call[name[ax].plot, parameter[name[self].x_inter_orig, name[self].y_inter_orig, constant[co]]]
call[name[ax].plot, parameter[name[self].x_inter_rect, name[self].y_inter_rect, constant[bo]]]
variable[xx] assign[=] call[name[np].arange, parameter[constant[0], binary_operation[binary_operation[name[self].bb_nc2_orig - name[self].bb_nc1_orig] + constant[1]]]]
for taget[name[spectrail]] in starred[name[self].list_spectrails] begin[:]
variable[yy0] assign[=] binary_operation[name[self].corr_yrect_a + binary_operation[name[self].corr_yrect_b * name[spectrail].y_rectified]]
variable[yy] assign[=] call[name[np].tile, parameter[list[[<ast.BinOp object at 0x7da18c4cf040>]], name[xx].size]]
call[name[ax].plot, parameter[binary_operation[name[xx] + name[self].bb_nc1_orig], binary_operation[name[yy] + name[self].bb_ns1_orig], constant[b]]]
<ast.Tuple object at 0x7da18c4cf0d0> assign[=] call[name[fmap], parameter[name[self].ttd_order, name[self].ttd_aij, name[self].ttd_bij, name[xx], name[yy]]]
call[name[ax].plot, parameter[binary_operation[name[xxx] + name[self].bb_nc1_orig], binary_operation[name[yyy] + name[self].bb_ns1_orig], constant[g]]]
variable[ylower_line] assign[=] call[name[self].list_spectrails][name[self].i_lower_spectrail].y_rectified
variable[ylower_line] assign[=] binary_operation[name[self].corr_yrect_a + binary_operation[name[self].corr_yrect_b * name[ylower_line]]]
variable[yupper_line] assign[=] call[name[self].list_spectrails][name[self].i_upper_spectrail].y_rectified
variable[yupper_line] assign[=] binary_operation[name[self].corr_yrect_a + binary_operation[name[self].corr_yrect_b * name[yupper_line]]]
variable[n_points] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[name[yupper_line] - name[ylower_line]] + constant[0.5]]]] + constant[1]]
variable[yy] assign[=] call[name[np].linspace, parameter[binary_operation[name[ylower_line] - name[self].bb_ns1_orig], binary_operation[name[yupper_line] - name[self].bb_ns1_orig]]]
for taget[name[arc_line]] in starred[name[self].list_arc_lines] begin[:]
variable[xline] assign[=] binary_operation[name[arc_line].x_rectified - name[self].bb_nc1_orig]
variable[xx] assign[=] call[name[np].array, parameter[binary_operation[list[[<ast.Name object at 0x7da18c4cceb0>]] * name[n_points]]]]
call[name[ax].plot, parameter[binary_operation[name[xx] + name[self].bb_nc1_orig], binary_operation[name[yy] + name[self].bb_ns1_orig], constant[b]]]
<ast.Tuple object at 0x7da18c4cdd80> assign[=] call[name[fmap], parameter[name[self].ttd_order, name[self].ttd_aij, name[self].ttd_bij, name[xx], name[yy]]]
call[name[ax].plot, parameter[binary_operation[name[xxx] + name[self].bb_nc1_orig], binary_operation[name[yyy] + name[self].bb_ns1_orig], constant[c]]]
call[name[pause_debugplot], parameter[name[self].debugplot]]
|
keyword[def] identifier[estimate_tt_to_rectify] ( identifier[self] , identifier[order] , identifier[slitlet2d] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[x_inter_orig] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[y_inter_orig] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[x_inter_rect] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[y_inter_rect] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[npoints] = identifier[len] ( identifier[self] . identifier[x_inter_orig] )
keyword[if] identifier[len] ( identifier[self] . identifier[y_inter_orig] )!= identifier[npoints] keyword[or] identifier[len] ( identifier[self] . identifier[x_inter_rect] )!= identifier[npoints] keyword[or] identifier[len] ( identifier[self] . identifier[y_inter_rect] )!= identifier[npoints] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[x_inter_orig_shifted] = identifier[self] . identifier[x_inter_orig] - identifier[self] . identifier[bb_nc1_orig]
identifier[y_inter_orig_shifted] = identifier[self] . identifier[y_inter_orig] - identifier[self] . identifier[bb_ns1_orig]
identifier[x_inter_rect_shifted] = identifier[self] . identifier[x_inter_rect] - identifier[self] . identifier[bb_nc1_orig]
identifier[y_inter_rect_shifted] = identifier[self] . identifier[y_inter_rect] - identifier[self] . identifier[bb_ns1_orig]
identifier[self] . identifier[ttd_order] = identifier[order]
identifier[self] . identifier[ttd_aij] , identifier[self] . identifier[ttd_bij] = identifier[compute_distortion] (
identifier[x_inter_orig_shifted] , identifier[y_inter_orig_shifted] ,
identifier[x_inter_rect_shifted] , identifier[y_inter_rect_shifted] ,
identifier[order] ,
identifier[self] . identifier[debugplot]
)
identifier[self] . identifier[tti_aij] , identifier[self] . identifier[tti_bij] = identifier[compute_distortion] (
identifier[x_inter_rect_shifted] , identifier[y_inter_rect_shifted] ,
identifier[x_inter_orig_shifted] , identifier[y_inter_orig_shifted] ,
identifier[order] ,
identifier[self] . identifier[debugplot]
)
keyword[if] identifier[abs] ( identifier[self] . identifier[debugplot] % literal[int] )!= literal[int] keyword[and] identifier[slitlet2d] keyword[is] keyword[not] keyword[None] :
identifier[title] = literal[string] + identifier[str] ( identifier[self] . identifier[islitlet] )+ literal[string]
identifier[ax] = identifier[ximshow] ( identifier[slitlet2d] , identifier[title] = identifier[title] ,
identifier[first_pixel] =( identifier[self] . identifier[bb_nc1_orig] , identifier[self] . identifier[bb_ns1_orig] ),
identifier[show] = keyword[False] )
identifier[ax] . identifier[plot] ( identifier[self] . identifier[x_inter_orig] , identifier[self] . identifier[y_inter_orig] , literal[string] )
identifier[ax] . identifier[plot] ( identifier[self] . identifier[x_inter_rect] , identifier[self] . identifier[y_inter_rect] , literal[string] )
identifier[xx] = identifier[np] . identifier[arange] ( literal[int] , identifier[self] . identifier[bb_nc2_orig] - identifier[self] . identifier[bb_nc1_orig] + literal[int] ,
identifier[dtype] = identifier[np] . identifier[float] )
keyword[for] identifier[spectrail] keyword[in] identifier[self] . identifier[list_spectrails] :
identifier[yy0] = identifier[self] . identifier[corr_yrect_a] + identifier[self] . identifier[corr_yrect_b] * identifier[spectrail] . identifier[y_rectified]
identifier[yy] = identifier[np] . identifier[tile] ([ identifier[yy0] - identifier[self] . identifier[bb_ns1_orig] ], identifier[xx] . identifier[size] )
identifier[ax] . identifier[plot] ( identifier[xx] + identifier[self] . identifier[bb_nc1_orig] , identifier[yy] + identifier[self] . identifier[bb_ns1_orig] , literal[string] )
identifier[xxx] , identifier[yyy] = identifier[fmap] ( identifier[self] . identifier[ttd_order] , identifier[self] . identifier[ttd_aij] , identifier[self] . identifier[ttd_bij] ,
identifier[xx] , identifier[yy] )
identifier[ax] . identifier[plot] ( identifier[xxx] + identifier[self] . identifier[bb_nc1_orig] , identifier[yyy] + identifier[self] . identifier[bb_ns1_orig] , literal[string] )
identifier[ylower_line] = identifier[self] . identifier[list_spectrails] [ identifier[self] . identifier[i_lower_spectrail] ]. identifier[y_rectified]
identifier[ylower_line] = identifier[self] . identifier[corr_yrect_a] + identifier[self] . identifier[corr_yrect_b] * identifier[ylower_line]
identifier[yupper_line] = identifier[self] . identifier[list_spectrails] [ identifier[self] . identifier[i_upper_spectrail] ]. identifier[y_rectified]
identifier[yupper_line] = identifier[self] . identifier[corr_yrect_a] + identifier[self] . identifier[corr_yrect_b] * identifier[yupper_line]
identifier[n_points] = identifier[int] ( identifier[yupper_line] - identifier[ylower_line] + literal[int] )+ literal[int]
identifier[yy] = identifier[np] . identifier[linspace] ( identifier[ylower_line] - identifier[self] . identifier[bb_ns1_orig] ,
identifier[yupper_line] - identifier[self] . identifier[bb_ns1_orig] ,
identifier[num] = identifier[n_points] ,
identifier[dtype] = identifier[np] . identifier[float] )
keyword[for] identifier[arc_line] keyword[in] identifier[self] . identifier[list_arc_lines] :
identifier[xline] = identifier[arc_line] . identifier[x_rectified] - identifier[self] . identifier[bb_nc1_orig]
identifier[xx] = identifier[np] . identifier[array] ([ identifier[xline] ]* identifier[n_points] )
identifier[ax] . identifier[plot] ( identifier[xx] + identifier[self] . identifier[bb_nc1_orig] , identifier[yy] + identifier[self] . identifier[bb_ns1_orig] , literal[string] )
identifier[xxx] , identifier[yyy] = identifier[fmap] ( identifier[self] . identifier[ttd_order] , identifier[self] . identifier[ttd_aij] , identifier[self] . identifier[ttd_bij] ,
identifier[xx] , identifier[yy] )
identifier[ax] . identifier[plot] ( identifier[xxx] + identifier[self] . identifier[bb_nc1_orig] , identifier[yyy] + identifier[self] . identifier[bb_ns1_orig] , literal[string] )
identifier[pause_debugplot] ( identifier[self] . identifier[debugplot] , identifier[pltshow] = keyword[True] )
|
def estimate_tt_to_rectify(self, order, slitlet2d=None):
"""Estimate the polynomial transformation to rectify the image.
Parameters
----------
order : int
Order of the polynomial transformation.
slitlet2d : numpy array
Slitlet image to be displayed with the computed boundaries
and intersecting points overplotted. This argument is
optional.
"""
# protections
if self.x_inter_orig is None or self.y_inter_orig is None or self.x_inter_rect is None or (self.y_inter_rect is None):
raise ValueError('Intersection points not computed') # depends on [control=['if'], data=[]]
npoints = len(self.x_inter_orig)
if len(self.y_inter_orig) != npoints or len(self.x_inter_rect) != npoints or len(self.y_inter_rect) != npoints:
raise ValueError('Unexpected different number of points') # depends on [control=['if'], data=[]]
# IMPORTANT: correct coordinates from origin in order to manipulate
# coordinates corresponding to image indices
x_inter_orig_shifted = self.x_inter_orig - self.bb_nc1_orig
y_inter_orig_shifted = self.y_inter_orig - self.bb_ns1_orig
x_inter_rect_shifted = self.x_inter_rect - self.bb_nc1_orig
y_inter_rect_shifted = self.y_inter_rect - self.bb_ns1_orig
# compute 2D transformation
self.ttd_order = order
(self.ttd_aij, self.ttd_bij) = compute_distortion(x_inter_orig_shifted, y_inter_orig_shifted, x_inter_rect_shifted, y_inter_rect_shifted, order, self.debugplot)
(self.tti_aij, self.tti_bij) = compute_distortion(x_inter_rect_shifted, y_inter_rect_shifted, x_inter_orig_shifted, y_inter_orig_shifted, order, self.debugplot)
# display slitlet with intersection points and grid indicating
# the fitted transformation
if abs(self.debugplot % 10) != 0 and slitlet2d is not None:
# display image with zscale cuts
title = 'Slitlet#' + str(self.islitlet) + ' (estimate_tt_to_rectify)'
ax = ximshow(slitlet2d, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False)
# intersection points
ax.plot(self.x_inter_orig, self.y_inter_orig, 'co')
ax.plot(self.x_inter_rect, self.y_inter_rect, 'bo')
# grid with fitted transformation: spectrum trails
xx = np.arange(0, self.bb_nc2_orig - self.bb_nc1_orig + 1, dtype=np.float)
for spectrail in self.list_spectrails:
yy0 = self.corr_yrect_a + self.corr_yrect_b * spectrail.y_rectified
yy = np.tile([yy0 - self.bb_ns1_orig], xx.size)
ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, 'b')
(xxx, yyy) = fmap(self.ttd_order, self.ttd_aij, self.ttd_bij, xx, yy)
ax.plot(xxx + self.bb_nc1_orig, yyy + self.bb_ns1_orig, 'g') # depends on [control=['for'], data=['spectrail']]
# grid with fitted transformation: arc lines
ylower_line = self.list_spectrails[self.i_lower_spectrail].y_rectified
ylower_line = self.corr_yrect_a + self.corr_yrect_b * ylower_line
yupper_line = self.list_spectrails[self.i_upper_spectrail].y_rectified
yupper_line = self.corr_yrect_a + self.corr_yrect_b * yupper_line
n_points = int(yupper_line - ylower_line + 0.5) + 1
yy = np.linspace(ylower_line - self.bb_ns1_orig, yupper_line - self.bb_ns1_orig, num=n_points, dtype=np.float)
for arc_line in self.list_arc_lines:
xline = arc_line.x_rectified - self.bb_nc1_orig
xx = np.array([xline] * n_points)
ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, 'b')
(xxx, yyy) = fmap(self.ttd_order, self.ttd_aij, self.ttd_bij, xx, yy)
ax.plot(xxx + self.bb_nc1_orig, yyy + self.bb_ns1_orig, 'c') # depends on [control=['for'], data=['arc_line']]
# show plot
pause_debugplot(self.debugplot, pltshow=True) # depends on [control=['if'], data=[]]
|
def load_commodities(self):
"""
Load the commodities for Amounts in this object.
"""
base, quote = self.market.split("_")
if isinstance(self.price, Amount):
self.price = Amount("{0:.8f} {1}".format(self.price.to_double(), quote))
else:
self.price = Amount("{0:.8f} {1}".format(float(self.price), quote))
if isinstance(self.amount, Amount):
self.amount = Amount("{0:.8f} {1}".format(self.amount.to_double(), base))
else:
self.amount = Amount("{0:.8f} {1}".format(float(self.amount), base))
fee_currency = base if self.fee_side == 'base' else quote
if isinstance(self.fee, Amount):
self.fee = Amount("{0:.8f} {1}".format(float(self.fee.to_double()), fee_currency))
else:
self.fee = Amount("{0:.8f} {1}".format(float(self.fee), fee_currency))
|
def function[load_commodities, parameter[self]]:
constant[
Load the commodities for Amounts in this object.
]
<ast.Tuple object at 0x7da18dc99f30> assign[=] call[name[self].market.split, parameter[constant[_]]]
if call[name[isinstance], parameter[name[self].price, name[Amount]]] begin[:]
name[self].price assign[=] call[name[Amount], parameter[call[constant[{0:.8f} {1}].format, parameter[call[name[self].price.to_double, parameter[]], name[quote]]]]]
if call[name[isinstance], parameter[name[self].amount, name[Amount]]] begin[:]
name[self].amount assign[=] call[name[Amount], parameter[call[constant[{0:.8f} {1}].format, parameter[call[name[self].amount.to_double, parameter[]], name[base]]]]]
variable[fee_currency] assign[=] <ast.IfExp object at 0x7da207f99120>
if call[name[isinstance], parameter[name[self].fee, name[Amount]]] begin[:]
name[self].fee assign[=] call[name[Amount], parameter[call[constant[{0:.8f} {1}].format, parameter[call[name[float], parameter[call[name[self].fee.to_double, parameter[]]]], name[fee_currency]]]]]
|
keyword[def] identifier[load_commodities] ( identifier[self] ):
literal[string]
identifier[base] , identifier[quote] = identifier[self] . identifier[market] . identifier[split] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[self] . identifier[price] , identifier[Amount] ):
identifier[self] . identifier[price] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[self] . identifier[price] . identifier[to_double] (), identifier[quote] ))
keyword[else] :
identifier[self] . identifier[price] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[float] ( identifier[self] . identifier[price] ), identifier[quote] ))
keyword[if] identifier[isinstance] ( identifier[self] . identifier[amount] , identifier[Amount] ):
identifier[self] . identifier[amount] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[self] . identifier[amount] . identifier[to_double] (), identifier[base] ))
keyword[else] :
identifier[self] . identifier[amount] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[float] ( identifier[self] . identifier[amount] ), identifier[base] ))
identifier[fee_currency] = identifier[base] keyword[if] identifier[self] . identifier[fee_side] == literal[string] keyword[else] identifier[quote]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[fee] , identifier[Amount] ):
identifier[self] . identifier[fee] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[float] ( identifier[self] . identifier[fee] . identifier[to_double] ()), identifier[fee_currency] ))
keyword[else] :
identifier[self] . identifier[fee] = identifier[Amount] ( literal[string] . identifier[format] ( identifier[float] ( identifier[self] . identifier[fee] ), identifier[fee_currency] ))
|
def load_commodities(self):
"""
Load the commodities for Amounts in this object.
"""
(base, quote) = self.market.split('_')
if isinstance(self.price, Amount):
self.price = Amount('{0:.8f} {1}'.format(self.price.to_double(), quote)) # depends on [control=['if'], data=[]]
else:
self.price = Amount('{0:.8f} {1}'.format(float(self.price), quote))
if isinstance(self.amount, Amount):
self.amount = Amount('{0:.8f} {1}'.format(self.amount.to_double(), base)) # depends on [control=['if'], data=[]]
else:
self.amount = Amount('{0:.8f} {1}'.format(float(self.amount), base))
fee_currency = base if self.fee_side == 'base' else quote
if isinstance(self.fee, Amount):
self.fee = Amount('{0:.8f} {1}'.format(float(self.fee.to_double()), fee_currency)) # depends on [control=['if'], data=[]]
else:
self.fee = Amount('{0:.8f} {1}'.format(float(self.fee), fee_currency))
|
def get_command(self):
"""
Get a line of text that was received from the DE. The class's
cmd_ready attribute will be true if lines are available.
"""
cmd = None
count = len(self.command_list)
if count > 0:
cmd = self.command_list.pop(0)
## If that was the last line, turn off lines_pending
if count == 1:
self.cmd_ready = False
return cmd
|
def function[get_command, parameter[self]]:
constant[
Get a line of text that was received from the DE. The class's
cmd_ready attribute will be true if lines are available.
]
variable[cmd] assign[=] constant[None]
variable[count] assign[=] call[name[len], parameter[name[self].command_list]]
if compare[name[count] greater[>] constant[0]] begin[:]
variable[cmd] assign[=] call[name[self].command_list.pop, parameter[constant[0]]]
if compare[name[count] equal[==] constant[1]] begin[:]
name[self].cmd_ready assign[=] constant[False]
return[name[cmd]]
|
keyword[def] identifier[get_command] ( identifier[self] ):
literal[string]
identifier[cmd] = keyword[None]
identifier[count] = identifier[len] ( identifier[self] . identifier[command_list] )
keyword[if] identifier[count] > literal[int] :
identifier[cmd] = identifier[self] . identifier[command_list] . identifier[pop] ( literal[int] )
keyword[if] identifier[count] == literal[int] :
identifier[self] . identifier[cmd_ready] = keyword[False]
keyword[return] identifier[cmd]
|
def get_command(self):
"""
Get a line of text that was received from the DE. The class's
cmd_ready attribute will be true if lines are available.
"""
cmd = None
count = len(self.command_list)
if count > 0:
cmd = self.command_list.pop(0) # depends on [control=['if'], data=[]]
## If that was the last line, turn off lines_pending
if count == 1:
self.cmd_ready = False # depends on [control=['if'], data=[]]
return cmd
|
def _fetch(self, entry_point, params):
"""Fetch a resource.
:param entrypoint: entrypoint to access
:param params: dict with the HTTP parameters needed to access the
given entry point
"""
url = self.API_URL % {'base_url': self.base_url, 'entrypoint': entry_point}
logger.debug("Mattermost client requests: %s params: %s",
entry_point, str(params))
r = self.fetch(url, payload=params)
return r.text
|
def function[_fetch, parameter[self, entry_point, params]]:
constant[Fetch a resource.
:param entrypoint: entrypoint to access
:param params: dict with the HTTP parameters needed to access the
given entry point
]
variable[url] assign[=] binary_operation[name[self].API_URL <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b020fd30>, <ast.Constant object at 0x7da1b020da80>], [<ast.Attribute object at 0x7da1b020cf70>, <ast.Name object at 0x7da1b020f760>]]]
call[name[logger].debug, parameter[constant[Mattermost client requests: %s params: %s], name[entry_point], call[name[str], parameter[name[params]]]]]
variable[r] assign[=] call[name[self].fetch, parameter[name[url]]]
return[name[r].text]
|
keyword[def] identifier[_fetch] ( identifier[self] , identifier[entry_point] , identifier[params] ):
literal[string]
identifier[url] = identifier[self] . identifier[API_URL] %{ literal[string] : identifier[self] . identifier[base_url] , literal[string] : identifier[entry_point] }
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[entry_point] , identifier[str] ( identifier[params] ))
identifier[r] = identifier[self] . identifier[fetch] ( identifier[url] , identifier[payload] = identifier[params] )
keyword[return] identifier[r] . identifier[text]
|
def _fetch(self, entry_point, params):
"""Fetch a resource.
:param entrypoint: entrypoint to access
:param params: dict with the HTTP parameters needed to access the
given entry point
"""
url = self.API_URL % {'base_url': self.base_url, 'entrypoint': entry_point}
logger.debug('Mattermost client requests: %s params: %s', entry_point, str(params))
r = self.fetch(url, payload=params)
return r.text
|
def get_socket(host, port, timeout=None):
"""
Return a socket.
:param str host: the hostname to connect to
:param int port: the port number to connect to
:param timeout: if specified, set the socket timeout
"""
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not None:
sock.settimeout(timeout)
sock.connect(sa)
return sock
except error:
if sock is not None:
sock.close()
raise error
|
def function[get_socket, parameter[host, port, timeout]]:
constant[
Return a socket.
:param str host: the hostname to connect to
:param int port: the port number to connect to
:param timeout: if specified, set the socket timeout
]
for taget[name[res]] in starred[call[name[getaddrinfo], parameter[name[host], name[port], constant[0], name[SOCK_STREAM]]]] begin[:]
<ast.Tuple object at 0x7da18f720880> assign[=] name[res]
variable[sock] assign[=] constant[None]
<ast.Try object at 0x7da18f7217e0>
<ast.Raise object at 0x7da2041d93f0>
|
keyword[def] identifier[get_socket] ( identifier[host] , identifier[port] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[for] identifier[res] keyword[in] identifier[getaddrinfo] ( identifier[host] , identifier[port] , literal[int] , identifier[SOCK_STREAM] ):
identifier[af] , identifier[socktype] , identifier[proto] , identifier[canonname] , identifier[sa] = identifier[res]
identifier[sock] = keyword[None]
keyword[try] :
identifier[sock] = identifier[socket] ( identifier[af] , identifier[socktype] , identifier[proto] )
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[sock] . identifier[settimeout] ( identifier[timeout] )
identifier[sock] . identifier[connect] ( identifier[sa] )
keyword[return] identifier[sock]
keyword[except] identifier[error] :
keyword[if] identifier[sock] keyword[is] keyword[not] keyword[None] :
identifier[sock] . identifier[close] ()
keyword[raise] identifier[error]
|
def get_socket(host, port, timeout=None):
"""
Return a socket.
:param str host: the hostname to connect to
:param int port: the port number to connect to
:param timeout: if specified, set the socket timeout
"""
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
(af, socktype, proto, canonname, sa) = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not None:
sock.settimeout(timeout) # depends on [control=['if'], data=['timeout']]
sock.connect(sa)
return sock # depends on [control=['try'], data=[]]
except error:
if sock is not None:
sock.close() # depends on [control=['if'], data=['sock']] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['res']]
raise error
|
def add_extension(self, extension):
"""
Adds the path to the extension to a list that will be used to extract it
to the ChromeDriver
:Args:
- extension: path to the \\*.crx file
"""
if extension:
extension_to_add = os.path.abspath(os.path.expanduser(extension))
if os.path.exists(extension_to_add):
self._extension_files.append(extension_to_add)
else:
raise IOError("Path to the extension doesn't exist")
else:
raise ValueError("argument can not be null")
|
def function[add_extension, parameter[self, extension]]:
constant[
Adds the path to the extension to a list that will be used to extract it
to the ChromeDriver
:Args:
- extension: path to the \*.crx file
]
if name[extension] begin[:]
variable[extension_to_add] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.expanduser, parameter[name[extension]]]]]
if call[name[os].path.exists, parameter[name[extension_to_add]]] begin[:]
call[name[self]._extension_files.append, parameter[name[extension_to_add]]]
|
keyword[def] identifier[add_extension] ( identifier[self] , identifier[extension] ):
literal[string]
keyword[if] identifier[extension] :
identifier[extension_to_add] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[extension] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[extension_to_add] ):
identifier[self] . identifier[_extension_files] . identifier[append] ( identifier[extension_to_add] )
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
|
def add_extension(self, extension):
"""
Adds the path to the extension to a list that will be used to extract it
to the ChromeDriver
:Args:
- extension: path to the \\*.crx file
"""
if extension:
extension_to_add = os.path.abspath(os.path.expanduser(extension))
if os.path.exists(extension_to_add):
self._extension_files.append(extension_to_add) # depends on [control=['if'], data=[]]
else:
raise IOError("Path to the extension doesn't exist") # depends on [control=['if'], data=[]]
else:
raise ValueError('argument can not be null')
|
def com_google_fonts_check_name_ascii_only_entries(ttFont):
"""Are there non-ASCII characters in ASCII-only NAME table entries?"""
bad_entries = []
for name in ttFont["name"].names:
if name.nameID == NameID.COPYRIGHT_NOTICE or \
name.nameID == NameID.POSTSCRIPT_NAME:
string = name.string.decode(name.getEncoding())
try:
string.encode('ascii')
except:
bad_entries.append(name)
yield INFO, ("Bad string at"
" [nameID {}, '{}']:"
" '{}'"
"").format(name.nameID,
name.getEncoding(),
string.encode("ascii",
errors='xmlcharrefreplace'))
if len(bad_entries) > 0:
yield FAIL, ("There are {} strings containing"
" non-ASCII characters in the ASCII-only"
" NAME table entries.").format(len(bad_entries))
else:
yield PASS, ("None of the ASCII-only NAME table entries"
" contain non-ASCII characteres.")
|
def function[com_google_fonts_check_name_ascii_only_entries, parameter[ttFont]]:
constant[Are there non-ASCII characters in ASCII-only NAME table entries?]
variable[bad_entries] assign[=] list[[]]
for taget[name[name]] in starred[call[name[ttFont]][constant[name]].names] begin[:]
if <ast.BoolOp object at 0x7da1b12f39a0> begin[:]
variable[string] assign[=] call[name[name].string.decode, parameter[call[name[name].getEncoding, parameter[]]]]
<ast.Try object at 0x7da1b12f3e50>
if compare[call[name[len], parameter[name[bad_entries]]] greater[>] constant[0]] begin[:]
<ast.Yield object at 0x7da1b12f0d90>
|
keyword[def] identifier[com_google_fonts_check_name_ascii_only_entries] ( identifier[ttFont] ):
literal[string]
identifier[bad_entries] =[]
keyword[for] identifier[name] keyword[in] identifier[ttFont] [ literal[string] ]. identifier[names] :
keyword[if] identifier[name] . identifier[nameID] == identifier[NameID] . identifier[COPYRIGHT_NOTICE] keyword[or] identifier[name] . identifier[nameID] == identifier[NameID] . identifier[POSTSCRIPT_NAME] :
identifier[string] = identifier[name] . identifier[string] . identifier[decode] ( identifier[name] . identifier[getEncoding] ())
keyword[try] :
identifier[string] . identifier[encode] ( literal[string] )
keyword[except] :
identifier[bad_entries] . identifier[append] ( identifier[name] )
keyword[yield] identifier[INFO] ,( literal[string]
literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[name] . identifier[nameID] ,
identifier[name] . identifier[getEncoding] (),
identifier[string] . identifier[encode] ( literal[string] ,
identifier[errors] = literal[string] ))
keyword[if] identifier[len] ( identifier[bad_entries] )> literal[int] :
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[len] ( identifier[bad_entries] ))
keyword[else] :
keyword[yield] identifier[PASS] ,( literal[string]
literal[string] )
|
def com_google_fonts_check_name_ascii_only_entries(ttFont):
"""Are there non-ASCII characters in ASCII-only NAME table entries?"""
bad_entries = []
for name in ttFont['name'].names:
if name.nameID == NameID.COPYRIGHT_NOTICE or name.nameID == NameID.POSTSCRIPT_NAME:
string = name.string.decode(name.getEncoding())
try:
string.encode('ascii') # depends on [control=['try'], data=[]]
except:
bad_entries.append(name)
yield (INFO, "Bad string at [nameID {}, '{}']: '{}'".format(name.nameID, name.getEncoding(), string.encode('ascii', errors='xmlcharrefreplace'))) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
if len(bad_entries) > 0:
yield (FAIL, 'There are {} strings containing non-ASCII characters in the ASCII-only NAME table entries.'.format(len(bad_entries))) # depends on [control=['if'], data=[]]
else:
yield (PASS, 'None of the ASCII-only NAME table entries contain non-ASCII characteres.')
|
def add_agent_pool(self, pool):
"""AddAgentPool.
[Preview API] Create an agent pool.
:param :class:`<TaskAgentPool> <azure.devops.v5_1.task_agent.models.TaskAgentPool>` pool: Details about the new agent pool
:rtype: :class:`<TaskAgentPool> <azure.devops.v5_1.task-agent.models.TaskAgentPool>`
"""
content = self._serialize.body(pool, 'TaskAgentPool')
response = self._send(http_method='POST',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='5.1-preview.1',
content=content)
return self._deserialize('TaskAgentPool', response)
|
def function[add_agent_pool, parameter[self, pool]]:
constant[AddAgentPool.
[Preview API] Create an agent pool.
:param :class:`<TaskAgentPool> <azure.devops.v5_1.task_agent.models.TaskAgentPool>` pool: Details about the new agent pool
:rtype: :class:`<TaskAgentPool> <azure.devops.v5_1.task-agent.models.TaskAgentPool>`
]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[pool], constant[TaskAgentPool]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[TaskAgentPool], name[response]]]]
|
keyword[def] identifier[add_agent_pool] ( identifier[self] , identifier[pool] ):
literal[string]
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[pool] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
|
def add_agent_pool(self, pool):
"""AddAgentPool.
[Preview API] Create an agent pool.
:param :class:`<TaskAgentPool> <azure.devops.v5_1.task_agent.models.TaskAgentPool>` pool: Details about the new agent pool
:rtype: :class:`<TaskAgentPool> <azure.devops.v5_1.task-agent.models.TaskAgentPool>`
"""
content = self._serialize.body(pool, 'TaskAgentPool')
response = self._send(http_method='POST', location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be', version='5.1-preview.1', content=content)
return self._deserialize('TaskAgentPool', response)
|
def press(*keys):
"""
Simulates a key-press for all the keys passed to the function
:param keys: list of keys to be pressed
:return: None
"""
for key in keys:
win32api.keybd_event(codes[key], 0, 0, 0)
release(key)
|
def function[press, parameter[]]:
constant[
Simulates a key-press for all the keys passed to the function
:param keys: list of keys to be pressed
:return: None
]
for taget[name[key]] in starred[name[keys]] begin[:]
call[name[win32api].keybd_event, parameter[call[name[codes]][name[key]], constant[0], constant[0], constant[0]]]
call[name[release], parameter[name[key]]]
|
keyword[def] identifier[press] (* identifier[keys] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[win32api] . identifier[keybd_event] ( identifier[codes] [ identifier[key] ], literal[int] , literal[int] , literal[int] )
identifier[release] ( identifier[key] )
|
def press(*keys):
"""
Simulates a key-press for all the keys passed to the function
:param keys: list of keys to be pressed
:return: None
"""
for key in keys:
win32api.keybd_event(codes[key], 0, 0, 0)
release(key) # depends on [control=['for'], data=['key']]
|
def argsort(self, *args, **kwargs):
"""
Return the integer indices that would sort the index.
Parameters
----------
*args
Passed to `numpy.ndarray.argsort`.
**kwargs
Passed to `numpy.ndarray.argsort`.
Returns
-------
numpy.ndarray
Integer indices that would sort the index if used as
an indexer.
See Also
--------
numpy.argsort : Similar method for NumPy arrays.
Index.sort_values : Return sorted copy of Index.
Examples
--------
>>> idx = pd.Index(['b', 'a', 'd', 'c'])
>>> idx
Index(['b', 'a', 'd', 'c'], dtype='object')
>>> order = idx.argsort()
>>> order
array([1, 0, 3, 2])
>>> idx[order]
Index(['a', 'b', 'c', 'd'], dtype='object')
"""
result = self.asi8
if result is None:
result = np.array(self)
return result.argsort(*args, **kwargs)
|
def function[argsort, parameter[self]]:
constant[
Return the integer indices that would sort the index.
Parameters
----------
*args
Passed to `numpy.ndarray.argsort`.
**kwargs
Passed to `numpy.ndarray.argsort`.
Returns
-------
numpy.ndarray
Integer indices that would sort the index if used as
an indexer.
See Also
--------
numpy.argsort : Similar method for NumPy arrays.
Index.sort_values : Return sorted copy of Index.
Examples
--------
>>> idx = pd.Index(['b', 'a', 'd', 'c'])
>>> idx
Index(['b', 'a', 'd', 'c'], dtype='object')
>>> order = idx.argsort()
>>> order
array([1, 0, 3, 2])
>>> idx[order]
Index(['a', 'b', 'c', 'd'], dtype='object')
]
variable[result] assign[=] name[self].asi8
if compare[name[result] is constant[None]] begin[:]
variable[result] assign[=] call[name[np].array, parameter[name[self]]]
return[call[name[result].argsort, parameter[<ast.Starred object at 0x7da18dc99cf0>]]]
|
keyword[def] identifier[argsort] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[result] = identifier[self] . identifier[asi8]
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[result] = identifier[np] . identifier[array] ( identifier[self] )
keyword[return] identifier[result] . identifier[argsort] (* identifier[args] ,** identifier[kwargs] )
|
def argsort(self, *args, **kwargs):
"""
Return the integer indices that would sort the index.
Parameters
----------
*args
Passed to `numpy.ndarray.argsort`.
**kwargs
Passed to `numpy.ndarray.argsort`.
Returns
-------
numpy.ndarray
Integer indices that would sort the index if used as
an indexer.
See Also
--------
numpy.argsort : Similar method for NumPy arrays.
Index.sort_values : Return sorted copy of Index.
Examples
--------
>>> idx = pd.Index(['b', 'a', 'd', 'c'])
>>> idx
Index(['b', 'a', 'd', 'c'], dtype='object')
>>> order = idx.argsort()
>>> order
array([1, 0, 3, 2])
>>> idx[order]
Index(['a', 'b', 'c', 'd'], dtype='object')
"""
result = self.asi8
if result is None:
result = np.array(self) # depends on [control=['if'], data=['result']]
return result.argsort(*args, **kwargs)
|
def cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--convert",
help="Path to compiled Python module, e.g. my_ui.py")
parser.add_argument("--compile",
help="Accept raw .ui file and compile with native "
"PySide2 compiler.")
parser.add_argument("--stdout",
help="Write to stdout instead of file",
action="store_true")
parser.add_argument("--stdin",
help="Read from stdin instead of file",
action="store_true")
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError("--stdout")
if args.stdin:
raise NotImplementedError("--stdin")
if args.compile:
raise NotImplementedError("--compile")
if args.convert:
sys.stdout.write("#\n"
"# WARNING: --convert is an ALPHA feature.\n#\n"
"# See https://github.com/mottosso/Qt.py/pull/132\n"
"# for details.\n"
"#\n")
#
# ------> Read
#
with open(args.convert) as f:
lines = convert(f.readlines())
backup = "%s_backup%s" % os.path.splitext(args.convert)
sys.stdout.write("Creating \"%s\"..\n" % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, "w") as f:
f.write("".join(lines))
sys.stdout.write("Successfully converted \"%s\"\n" % args.convert)
|
def function[cli, parameter[args]]:
constant[Qt.py command-line interface]
import module[argparse]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--convert]]]
call[name[parser].add_argument, parameter[constant[--compile]]]
call[name[parser].add_argument, parameter[constant[--stdout]]]
call[name[parser].add_argument, parameter[constant[--stdin]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[args]]]
if name[args].stdout begin[:]
<ast.Raise object at 0x7da1b0217580>
if name[args].stdin begin[:]
<ast.Raise object at 0x7da1b0215630>
if name[args].compile begin[:]
<ast.Raise object at 0x7da1b02146a0>
if name[args].convert begin[:]
call[name[sys].stdout.write, parameter[constant[#
# WARNING: --convert is an ALPHA feature.
#
# See https://github.com/mottosso/Qt.py/pull/132
# for details.
#
]]]
with call[name[open], parameter[name[args].convert]] begin[:]
variable[lines] assign[=] call[name[convert], parameter[call[name[f].readlines, parameter[]]]]
variable[backup] assign[=] binary_operation[constant[%s_backup%s] <ast.Mod object at 0x7da2590d6920> call[name[os].path.splitext, parameter[name[args].convert]]]
call[name[sys].stdout.write, parameter[binary_operation[constant[Creating "%s"..
] <ast.Mod object at 0x7da2590d6920> name[backup]]]]
call[name[shutil].copy, parameter[name[args].convert, name[backup]]]
with call[name[open], parameter[name[args].convert, constant[w]]] begin[:]
call[name[f].write, parameter[call[constant[].join, parameter[name[lines]]]]]
call[name[sys].stdout.write, parameter[binary_operation[constant[Successfully converted "%s"
] <ast.Mod object at 0x7da2590d6920> name[args].convert]]]
|
keyword[def] identifier[cli] ( identifier[args] ):
literal[string]
keyword[import] identifier[argparse]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string] ,
identifier[action] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string] ,
identifier[action] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[args] . identifier[stdout] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[args] . identifier[stdin] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[args] . identifier[compile] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[args] . identifier[convert] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] )
keyword[with] identifier[open] ( identifier[args] . identifier[convert] ) keyword[as] identifier[f] :
identifier[lines] = identifier[convert] ( identifier[f] . identifier[readlines] ())
identifier[backup] = literal[string] % identifier[os] . identifier[path] . identifier[splitext] ( identifier[args] . identifier[convert] )
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] % identifier[backup] )
identifier[shutil] . identifier[copy] ( identifier[args] . identifier[convert] , identifier[backup] )
keyword[with] identifier[open] ( identifier[args] . identifier[convert] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[lines] ))
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] % identifier[args] . identifier[convert] )
|
def cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--convert', help='Path to compiled Python module, e.g. my_ui.py')
parser.add_argument('--compile', help='Accept raw .ui file and compile with native PySide2 compiler.')
parser.add_argument('--stdout', help='Write to stdout instead of file', action='store_true')
parser.add_argument('--stdin', help='Read from stdin instead of file', action='store_true')
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError('--stdout') # depends on [control=['if'], data=[]]
if args.stdin:
raise NotImplementedError('--stdin') # depends on [control=['if'], data=[]]
if args.compile:
raise NotImplementedError('--compile') # depends on [control=['if'], data=[]]
if args.convert:
sys.stdout.write('#\n# WARNING: --convert is an ALPHA feature.\n#\n# See https://github.com/mottosso/Qt.py/pull/132\n# for details.\n#\n')
#
# ------> Read
#
with open(args.convert) as f:
lines = convert(f.readlines()) # depends on [control=['with'], data=['f']]
backup = '%s_backup%s' % os.path.splitext(args.convert)
sys.stdout.write('Creating "%s"..\n' % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, 'w') as f:
f.write(''.join(lines)) # depends on [control=['with'], data=['f']]
sys.stdout.write('Successfully converted "%s"\n' % args.convert) # depends on [control=['if'], data=[]]
|
def node_insert_before(self, node, new_node):
"""Insert the new node before node."""
assert(not self.node_is_on_list(new_node))
assert(node is not new_node)
prev = self.node_prev(node)
assert(prev is not None)
self.node_set_prev(node, new_node)
self.node_set_next(new_node, node)
self.node_set_prev(new_node, prev)
self.node_set_next(prev, new_node)
|
def function[node_insert_before, parameter[self, node, new_node]]:
constant[Insert the new node before node.]
assert[<ast.UnaryOp object at 0x7da1b1a45f90>]
assert[compare[name[node] is_not name[new_node]]]
variable[prev] assign[=] call[name[self].node_prev, parameter[name[node]]]
assert[compare[name[prev] is_not constant[None]]]
call[name[self].node_set_prev, parameter[name[node], name[new_node]]]
call[name[self].node_set_next, parameter[name[new_node], name[node]]]
call[name[self].node_set_prev, parameter[name[new_node], name[prev]]]
call[name[self].node_set_next, parameter[name[prev], name[new_node]]]
|
keyword[def] identifier[node_insert_before] ( identifier[self] , identifier[node] , identifier[new_node] ):
literal[string]
keyword[assert] ( keyword[not] identifier[self] . identifier[node_is_on_list] ( identifier[new_node] ))
keyword[assert] ( identifier[node] keyword[is] keyword[not] identifier[new_node] )
identifier[prev] = identifier[self] . identifier[node_prev] ( identifier[node] )
keyword[assert] ( identifier[prev] keyword[is] keyword[not] keyword[None] )
identifier[self] . identifier[node_set_prev] ( identifier[node] , identifier[new_node] )
identifier[self] . identifier[node_set_next] ( identifier[new_node] , identifier[node] )
identifier[self] . identifier[node_set_prev] ( identifier[new_node] , identifier[prev] )
identifier[self] . identifier[node_set_next] ( identifier[prev] , identifier[new_node] )
|
def node_insert_before(self, node, new_node):
"""Insert the new node before node."""
assert not self.node_is_on_list(new_node)
assert node is not new_node
prev = self.node_prev(node)
assert prev is not None
self.node_set_prev(node, new_node)
self.node_set_next(new_node, node)
self.node_set_prev(new_node, prev)
self.node_set_next(prev, new_node)
|
def query(query, ts, **kwargs):
"""
Perform *query* on the testsuite *ts*.
Note: currently only 'select' queries are supported.
Args:
query (str): TSQL query string
ts (:class:`delphin.itsdb.TestSuite`): testsuite to query over
kwargs: keyword arguments passed to the more specific query
function (e.g., :func:`select`)
Example:
>>> list(tsql.query('select i-id where i-length < 4', ts))
[[142], [1061]]
"""
queryobj = _parse_query(query)
if queryobj['querytype'] in ('select', 'retrieve'):
return _select(
queryobj['projection'],
queryobj['tables'],
queryobj['where'],
ts,
mode=kwargs.get('mode', 'list'),
cast=kwargs.get('cast', True))
else:
# not really a syntax error; replace with TSQLError or something
# when the proper exception class exists
raise TSQLSyntaxError(queryobj['querytype'] +
' queries are not supported')
|
def function[query, parameter[query, ts]]:
constant[
Perform *query* on the testsuite *ts*.
Note: currently only 'select' queries are supported.
Args:
query (str): TSQL query string
ts (:class:`delphin.itsdb.TestSuite`): testsuite to query over
kwargs: keyword arguments passed to the more specific query
function (e.g., :func:`select`)
Example:
>>> list(tsql.query('select i-id where i-length < 4', ts))
[[142], [1061]]
]
variable[queryobj] assign[=] call[name[_parse_query], parameter[name[query]]]
if compare[call[name[queryobj]][constant[querytype]] in tuple[[<ast.Constant object at 0x7da1b06ca3e0>, <ast.Constant object at 0x7da1b06c8fa0>]]] begin[:]
return[call[name[_select], parameter[call[name[queryobj]][constant[projection]], call[name[queryobj]][constant[tables]], call[name[queryobj]][constant[where]], name[ts]]]]
|
keyword[def] identifier[query] ( identifier[query] , identifier[ts] ,** identifier[kwargs] ):
literal[string]
identifier[queryobj] = identifier[_parse_query] ( identifier[query] )
keyword[if] identifier[queryobj] [ literal[string] ] keyword[in] ( literal[string] , literal[string] ):
keyword[return] identifier[_select] (
identifier[queryobj] [ literal[string] ],
identifier[queryobj] [ literal[string] ],
identifier[queryobj] [ literal[string] ],
identifier[ts] ,
identifier[mode] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ),
identifier[cast] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ))
keyword[else] :
keyword[raise] identifier[TSQLSyntaxError] ( identifier[queryobj] [ literal[string] ]+
literal[string] )
|
def query(query, ts, **kwargs):
"""
Perform *query* on the testsuite *ts*.
Note: currently only 'select' queries are supported.
Args:
query (str): TSQL query string
ts (:class:`delphin.itsdb.TestSuite`): testsuite to query over
kwargs: keyword arguments passed to the more specific query
function (e.g., :func:`select`)
Example:
>>> list(tsql.query('select i-id where i-length < 4', ts))
[[142], [1061]]
"""
queryobj = _parse_query(query)
if queryobj['querytype'] in ('select', 'retrieve'):
return _select(queryobj['projection'], queryobj['tables'], queryobj['where'], ts, mode=kwargs.get('mode', 'list'), cast=kwargs.get('cast', True)) # depends on [control=['if'], data=[]]
else:
# not really a syntax error; replace with TSQLError or something
# when the proper exception class exists
raise TSQLSyntaxError(queryobj['querytype'] + ' queries are not supported')
|
def apply_func_to_select_indices(self, axis, func, indices, keep_remaining=False):
"""Applies a function to select indices.
Note: Your internal function must take a kwarg `internal_indices` for
this to work correctly. This prevents information leakage of the
internal index to the external representation.
Args:
axis: The axis to apply the func over.
func: The function to apply to these indices.
indices: The indices to apply the function to.
keep_remaining: Whether or not to keep the other partitions.
Some operations may want to drop the remaining partitions and
keep only the results.
Returns:
A new BaseFrameManager object, the type of object that called this.
"""
if self.partitions.size == 0:
return np.array([[]])
# Handling dictionaries has to be done differently, but we still want
# to figure out the partitions that need to be applied to, so we will
# store the dictionary in a separate variable and assign `indices` to
# the keys to handle it the same as we normally would.
if isinstance(indices, dict):
dict_indices = indices
indices = list(indices.keys())
else:
dict_indices = None
if not isinstance(indices, list):
indices = [indices]
partitions_dict = self._get_dict_of_block_index(
axis, indices, ordered=not keep_remaining
)
if not axis:
partitions_for_apply = self.partitions.T
else:
partitions_for_apply = self.partitions
# We may have a command to perform different functions on different
# columns at the same time. We attempt to handle this as efficiently as
# possible here. Functions that use this in the dictionary format must
# accept a keyword argument `func_dict`.
if dict_indices is not None:
def local_to_global_idx(partition_id, local_idx):
if partition_id == 0:
return local_idx
if axis == 0:
cumulative_axis = np.cumsum(self.block_widths)
else:
cumulative_axis = np.cumsum(self.block_lengths)
return cumulative_axis[partition_id - 1] + local_idx
if not keep_remaining:
result = np.array(
[
self._apply_func_to_list_of_partitions(
func,
partitions_for_apply[o_idx],
func_dict={
i_idx: dict_indices[local_to_global_idx(o_idx, i_idx)]
for i_idx in list_to_apply
if i_idx >= 0
},
)
for o_idx, list_to_apply in partitions_dict
]
)
else:
result = np.array(
[
partitions_for_apply[i]
if i not in partitions_dict
else self._apply_func_to_list_of_partitions(
func,
partitions_for_apply[i],
func_dict={
idx: dict_indices[local_to_global_idx(i, idx)]
for idx in partitions_dict[i]
if idx >= 0
},
)
for i in range(len(partitions_for_apply))
]
)
else:
if not keep_remaining:
# We are passing internal indices in here. In order for func to
# actually be able to use this information, it must be able to take in
# the internal indices. This might mean an iloc in the case of Pandas
# or some other way to index into the internal representation.
result = np.array(
[
self._apply_func_to_list_of_partitions(
func,
partitions_for_apply[idx],
internal_indices=list_to_apply,
)
for idx, list_to_apply in partitions_dict
]
)
else:
# The difference here is that we modify a subset and return the
# remaining (non-updated) blocks in their original position.
result = np.array(
[
partitions_for_apply[i]
if i not in partitions_dict
else self._apply_func_to_list_of_partitions(
func,
partitions_for_apply[i],
internal_indices=partitions_dict[i],
)
for i in range(len(partitions_for_apply))
]
)
return (
self.__constructor__(result.T) if not axis else self.__constructor__(result)
)
|
def function[apply_func_to_select_indices, parameter[self, axis, func, indices, keep_remaining]]:
constant[Applies a function to select indices.
Note: Your internal function must take a kwarg `internal_indices` for
this to work correctly. This prevents information leakage of the
internal index to the external representation.
Args:
axis: The axis to apply the func over.
func: The function to apply to these indices.
indices: The indices to apply the function to.
keep_remaining: Whether or not to keep the other partitions.
Some operations may want to drop the remaining partitions and
keep only the results.
Returns:
A new BaseFrameManager object, the type of object that called this.
]
if compare[name[self].partitions.size equal[==] constant[0]] begin[:]
return[call[name[np].array, parameter[list[[<ast.List object at 0x7da20e957970>]]]]]
if call[name[isinstance], parameter[name[indices], name[dict]]] begin[:]
variable[dict_indices] assign[=] name[indices]
variable[indices] assign[=] call[name[list], parameter[call[name[indices].keys, parameter[]]]]
if <ast.UnaryOp object at 0x7da20e957490> begin[:]
variable[indices] assign[=] list[[<ast.Name object at 0x7da20e9549d0>]]
variable[partitions_dict] assign[=] call[name[self]._get_dict_of_block_index, parameter[name[axis], name[indices]]]
if <ast.UnaryOp object at 0x7da20e957b80> begin[:]
variable[partitions_for_apply] assign[=] name[self].partitions.T
if compare[name[dict_indices] is_not constant[None]] begin[:]
def function[local_to_global_idx, parameter[partition_id, local_idx]]:
if compare[name[partition_id] equal[==] constant[0]] begin[:]
return[name[local_idx]]
if compare[name[axis] equal[==] constant[0]] begin[:]
variable[cumulative_axis] assign[=] call[name[np].cumsum, parameter[name[self].block_widths]]
return[binary_operation[call[name[cumulative_axis]][binary_operation[name[partition_id] - constant[1]]] + name[local_idx]]]
if <ast.UnaryOp object at 0x7da20e9564d0> begin[:]
variable[result] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20e956a40>]]
return[<ast.IfExp object at 0x7da20e956290>]
|
keyword[def] identifier[apply_func_to_select_indices] ( identifier[self] , identifier[axis] , identifier[func] , identifier[indices] , identifier[keep_remaining] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[partitions] . identifier[size] == literal[int] :
keyword[return] identifier[np] . identifier[array] ([[]])
keyword[if] identifier[isinstance] ( identifier[indices] , identifier[dict] ):
identifier[dict_indices] = identifier[indices]
identifier[indices] = identifier[list] ( identifier[indices] . identifier[keys] ())
keyword[else] :
identifier[dict_indices] = keyword[None]
keyword[if] keyword[not] identifier[isinstance] ( identifier[indices] , identifier[list] ):
identifier[indices] =[ identifier[indices] ]
identifier[partitions_dict] = identifier[self] . identifier[_get_dict_of_block_index] (
identifier[axis] , identifier[indices] , identifier[ordered] = keyword[not] identifier[keep_remaining]
)
keyword[if] keyword[not] identifier[axis] :
identifier[partitions_for_apply] = identifier[self] . identifier[partitions] . identifier[T]
keyword[else] :
identifier[partitions_for_apply] = identifier[self] . identifier[partitions]
keyword[if] identifier[dict_indices] keyword[is] keyword[not] keyword[None] :
keyword[def] identifier[local_to_global_idx] ( identifier[partition_id] , identifier[local_idx] ):
keyword[if] identifier[partition_id] == literal[int] :
keyword[return] identifier[local_idx]
keyword[if] identifier[axis] == literal[int] :
identifier[cumulative_axis] = identifier[np] . identifier[cumsum] ( identifier[self] . identifier[block_widths] )
keyword[else] :
identifier[cumulative_axis] = identifier[np] . identifier[cumsum] ( identifier[self] . identifier[block_lengths] )
keyword[return] identifier[cumulative_axis] [ identifier[partition_id] - literal[int] ]+ identifier[local_idx]
keyword[if] keyword[not] identifier[keep_remaining] :
identifier[result] = identifier[np] . identifier[array] (
[
identifier[self] . identifier[_apply_func_to_list_of_partitions] (
identifier[func] ,
identifier[partitions_for_apply] [ identifier[o_idx] ],
identifier[func_dict] ={
identifier[i_idx] : identifier[dict_indices] [ identifier[local_to_global_idx] ( identifier[o_idx] , identifier[i_idx] )]
keyword[for] identifier[i_idx] keyword[in] identifier[list_to_apply]
keyword[if] identifier[i_idx] >= literal[int]
},
)
keyword[for] identifier[o_idx] , identifier[list_to_apply] keyword[in] identifier[partitions_dict]
]
)
keyword[else] :
identifier[result] = identifier[np] . identifier[array] (
[
identifier[partitions_for_apply] [ identifier[i] ]
keyword[if] identifier[i] keyword[not] keyword[in] identifier[partitions_dict]
keyword[else] identifier[self] . identifier[_apply_func_to_list_of_partitions] (
identifier[func] ,
identifier[partitions_for_apply] [ identifier[i] ],
identifier[func_dict] ={
identifier[idx] : identifier[dict_indices] [ identifier[local_to_global_idx] ( identifier[i] , identifier[idx] )]
keyword[for] identifier[idx] keyword[in] identifier[partitions_dict] [ identifier[i] ]
keyword[if] identifier[idx] >= literal[int]
},
)
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[partitions_for_apply] ))
]
)
keyword[else] :
keyword[if] keyword[not] identifier[keep_remaining] :
identifier[result] = identifier[np] . identifier[array] (
[
identifier[self] . identifier[_apply_func_to_list_of_partitions] (
identifier[func] ,
identifier[partitions_for_apply] [ identifier[idx] ],
identifier[internal_indices] = identifier[list_to_apply] ,
)
keyword[for] identifier[idx] , identifier[list_to_apply] keyword[in] identifier[partitions_dict]
]
)
keyword[else] :
identifier[result] = identifier[np] . identifier[array] (
[
identifier[partitions_for_apply] [ identifier[i] ]
keyword[if] identifier[i] keyword[not] keyword[in] identifier[partitions_dict]
keyword[else] identifier[self] . identifier[_apply_func_to_list_of_partitions] (
identifier[func] ,
identifier[partitions_for_apply] [ identifier[i] ],
identifier[internal_indices] = identifier[partitions_dict] [ identifier[i] ],
)
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[partitions_for_apply] ))
]
)
keyword[return] (
identifier[self] . identifier[__constructor__] ( identifier[result] . identifier[T] ) keyword[if] keyword[not] identifier[axis] keyword[else] identifier[self] . identifier[__constructor__] ( identifier[result] )
)
|
def apply_func_to_select_indices(self, axis, func, indices, keep_remaining=False):
"""Applies a function to select indices.
Note: Your internal function must take a kwarg `internal_indices` for
this to work correctly. This prevents information leakage of the
internal index to the external representation.
Args:
axis: The axis to apply the func over.
func: The function to apply to these indices.
indices: The indices to apply the function to.
keep_remaining: Whether or not to keep the other partitions.
Some operations may want to drop the remaining partitions and
keep only the results.
Returns:
A new BaseFrameManager object, the type of object that called this.
"""
if self.partitions.size == 0:
return np.array([[]]) # depends on [control=['if'], data=[]]
# Handling dictionaries has to be done differently, but we still want
# to figure out the partitions that need to be applied to, so we will
# store the dictionary in a separate variable and assign `indices` to
# the keys to handle it the same as we normally would.
if isinstance(indices, dict):
dict_indices = indices
indices = list(indices.keys()) # depends on [control=['if'], data=[]]
else:
dict_indices = None
if not isinstance(indices, list):
indices = [indices] # depends on [control=['if'], data=[]]
partitions_dict = self._get_dict_of_block_index(axis, indices, ordered=not keep_remaining)
if not axis:
partitions_for_apply = self.partitions.T # depends on [control=['if'], data=[]]
else:
partitions_for_apply = self.partitions
# We may have a command to perform different functions on different
# columns at the same time. We attempt to handle this as efficiently as
# possible here. Functions that use this in the dictionary format must
# accept a keyword argument `func_dict`.
if dict_indices is not None:
def local_to_global_idx(partition_id, local_idx):
if partition_id == 0:
return local_idx # depends on [control=['if'], data=[]]
if axis == 0:
cumulative_axis = np.cumsum(self.block_widths) # depends on [control=['if'], data=[]]
else:
cumulative_axis = np.cumsum(self.block_lengths)
return cumulative_axis[partition_id - 1] + local_idx
if not keep_remaining:
result = np.array([self._apply_func_to_list_of_partitions(func, partitions_for_apply[o_idx], func_dict={i_idx: dict_indices[local_to_global_idx(o_idx, i_idx)] for i_idx in list_to_apply if i_idx >= 0}) for (o_idx, list_to_apply) in partitions_dict]) # depends on [control=['if'], data=[]]
else:
result = np.array([partitions_for_apply[i] if i not in partitions_dict else self._apply_func_to_list_of_partitions(func, partitions_for_apply[i], func_dict={idx: dict_indices[local_to_global_idx(i, idx)] for idx in partitions_dict[i] if idx >= 0}) for i in range(len(partitions_for_apply))]) # depends on [control=['if'], data=['dict_indices']]
elif not keep_remaining:
# We are passing internal indices in here. In order for func to
# actually be able to use this information, it must be able to take in
# the internal indices. This might mean an iloc in the case of Pandas
# or some other way to index into the internal representation.
result = np.array([self._apply_func_to_list_of_partitions(func, partitions_for_apply[idx], internal_indices=list_to_apply) for (idx, list_to_apply) in partitions_dict]) # depends on [control=['if'], data=[]]
else:
# The difference here is that we modify a subset and return the
# remaining (non-updated) blocks in their original position.
result = np.array([partitions_for_apply[i] if i not in partitions_dict else self._apply_func_to_list_of_partitions(func, partitions_for_apply[i], internal_indices=partitions_dict[i]) for i in range(len(partitions_for_apply))])
return self.__constructor__(result.T) if not axis else self.__constructor__(result)
|
def set_current_client_working_directory(self, directory):
"""Set current client working directory."""
shellwidget = self.get_current_shellwidget()
if shellwidget is not None:
shellwidget.set_cwd(directory)
|
def function[set_current_client_working_directory, parameter[self, directory]]:
constant[Set current client working directory.]
variable[shellwidget] assign[=] call[name[self].get_current_shellwidget, parameter[]]
if compare[name[shellwidget] is_not constant[None]] begin[:]
call[name[shellwidget].set_cwd, parameter[name[directory]]]
|
keyword[def] identifier[set_current_client_working_directory] ( identifier[self] , identifier[directory] ):
literal[string]
identifier[shellwidget] = identifier[self] . identifier[get_current_shellwidget] ()
keyword[if] identifier[shellwidget] keyword[is] keyword[not] keyword[None] :
identifier[shellwidget] . identifier[set_cwd] ( identifier[directory] )
|
def set_current_client_working_directory(self, directory):
"""Set current client working directory."""
shellwidget = self.get_current_shellwidget()
if shellwidget is not None:
shellwidget.set_cwd(directory) # depends on [control=['if'], data=['shellwidget']]
|
def deploy_to(self, displays=None, exclude=[], lock=[]):
"""
Deploys page to listed display (specify with display). If display is None,
deploy to all display. Can specify exclude for which display to exclude.
This overwrites the first argument.
"""
if displays is None:
signs = Sign.objects.all()
else:
signs = Sign.objects.filter(display__in=displays)
for sign in signs.exclude(display__in=exclude):
sign.pages.add(self)
sign.save()
|
def function[deploy_to, parameter[self, displays, exclude, lock]]:
constant[
Deploys page to listed display (specify with display). If display is None,
deploy to all display. Can specify exclude for which display to exclude.
This overwrites the first argument.
]
if compare[name[displays] is constant[None]] begin[:]
variable[signs] assign[=] call[name[Sign].objects.all, parameter[]]
for taget[name[sign]] in starred[call[name[signs].exclude, parameter[]]] begin[:]
call[name[sign].pages.add, parameter[name[self]]]
call[name[sign].save, parameter[]]
|
keyword[def] identifier[deploy_to] ( identifier[self] , identifier[displays] = keyword[None] , identifier[exclude] =[], identifier[lock] =[]):
literal[string]
keyword[if] identifier[displays] keyword[is] keyword[None] :
identifier[signs] = identifier[Sign] . identifier[objects] . identifier[all] ()
keyword[else] :
identifier[signs] = identifier[Sign] . identifier[objects] . identifier[filter] ( identifier[display__in] = identifier[displays] )
keyword[for] identifier[sign] keyword[in] identifier[signs] . identifier[exclude] ( identifier[display__in] = identifier[exclude] ):
identifier[sign] . identifier[pages] . identifier[add] ( identifier[self] )
identifier[sign] . identifier[save] ()
|
def deploy_to(self, displays=None, exclude=[], lock=[]):
"""
Deploys page to listed display (specify with display). If display is None,
deploy to all display. Can specify exclude for which display to exclude.
This overwrites the first argument.
"""
if displays is None:
signs = Sign.objects.all() # depends on [control=['if'], data=[]]
else:
signs = Sign.objects.filter(display__in=displays)
for sign in signs.exclude(display__in=exclude):
sign.pages.add(self)
sign.save() # depends on [control=['for'], data=['sign']]
|
def add_router_to_hosting_device(self, client, hosting_device_id, body):
"""Adds a router to hosting device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.post((res_path + DEVICE_L3_ROUTERS) %
hosting_device_id, body=body)
|
def function[add_router_to_hosting_device, parameter[self, client, hosting_device_id, body]]:
constant[Adds a router to hosting device.]
variable[res_path] assign[=] name[hostingdevice].HostingDevice.resource_path
return[call[name[client].post, parameter[binary_operation[binary_operation[name[res_path] + name[DEVICE_L3_ROUTERS]] <ast.Mod object at 0x7da2590d6920> name[hosting_device_id]]]]]
|
keyword[def] identifier[add_router_to_hosting_device] ( identifier[self] , identifier[client] , identifier[hosting_device_id] , identifier[body] ):
literal[string]
identifier[res_path] = identifier[hostingdevice] . identifier[HostingDevice] . identifier[resource_path]
keyword[return] identifier[client] . identifier[post] (( identifier[res_path] + identifier[DEVICE_L3_ROUTERS] )%
identifier[hosting_device_id] , identifier[body] = identifier[body] )
|
def add_router_to_hosting_device(self, client, hosting_device_id, body):
"""Adds a router to hosting device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.post((res_path + DEVICE_L3_ROUTERS) % hosting_device_id, body=body)
|
def config(self, key, value=None, **kwargs):
"""Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
"""
args = (key, value)
return self._client.request('/config', args, decoder='json', **kwargs)
|
def function[config, parameter[self, key, value]]:
constant[Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da20c6c5c30>, <ast.Name object at 0x7da20c6c4d60>]]
return[call[name[self]._client.request, parameter[constant[/config], name[args]]]]
|
keyword[def] identifier[config] ( identifier[self] , identifier[key] , identifier[value] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[args] =( identifier[key] , identifier[value] )
keyword[return] identifier[self] . identifier[_client] . identifier[request] ( literal[string] , identifier[args] , identifier[decoder] = literal[string] ,** identifier[kwargs] )
|
def config(self, key, value=None, **kwargs):
"""Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
"""
args = (key, value)
return self._client.request('/config', args, decoder='json', **kwargs)
|
def convert_ruptureCollection(self, node):
"""
:param node: a ruptureCollection node
:returns: a dictionary grp_id -> EBRuptures
"""
coll = {}
for grpnode in node:
grp_id = int(grpnode['id'])
coll[grp_id] = ebrs = []
for node in grpnode:
rup = self.convert_node(node)
rup.serial = int(node['id'])
sesnodes = node.stochasticEventSets
n = 0 # number of events
for sesnode in sesnodes:
with context(self.fname, sesnode):
n += len(sesnode.text.split())
ebr = source.rupture.EBRupture(rup, 0, 0, numpy.array([n]))
ebrs.append(ebr)
return coll
|
def function[convert_ruptureCollection, parameter[self, node]]:
constant[
:param node: a ruptureCollection node
:returns: a dictionary grp_id -> EBRuptures
]
variable[coll] assign[=] dictionary[[], []]
for taget[name[grpnode]] in starred[name[node]] begin[:]
variable[grp_id] assign[=] call[name[int], parameter[call[name[grpnode]][constant[id]]]]
call[name[coll]][name[grp_id]] assign[=] list[[]]
for taget[name[node]] in starred[name[grpnode]] begin[:]
variable[rup] assign[=] call[name[self].convert_node, parameter[name[node]]]
name[rup].serial assign[=] call[name[int], parameter[call[name[node]][constant[id]]]]
variable[sesnodes] assign[=] name[node].stochasticEventSets
variable[n] assign[=] constant[0]
for taget[name[sesnode]] in starred[name[sesnodes]] begin[:]
with call[name[context], parameter[name[self].fname, name[sesnode]]] begin[:]
<ast.AugAssign object at 0x7da1b138efb0>
variable[ebr] assign[=] call[name[source].rupture.EBRupture, parameter[name[rup], constant[0], constant[0], call[name[numpy].array, parameter[list[[<ast.Name object at 0x7da1b138e680>]]]]]]
call[name[ebrs].append, parameter[name[ebr]]]
return[name[coll]]
|
keyword[def] identifier[convert_ruptureCollection] ( identifier[self] , identifier[node] ):
literal[string]
identifier[coll] ={}
keyword[for] identifier[grpnode] keyword[in] identifier[node] :
identifier[grp_id] = identifier[int] ( identifier[grpnode] [ literal[string] ])
identifier[coll] [ identifier[grp_id] ]= identifier[ebrs] =[]
keyword[for] identifier[node] keyword[in] identifier[grpnode] :
identifier[rup] = identifier[self] . identifier[convert_node] ( identifier[node] )
identifier[rup] . identifier[serial] = identifier[int] ( identifier[node] [ literal[string] ])
identifier[sesnodes] = identifier[node] . identifier[stochasticEventSets]
identifier[n] = literal[int]
keyword[for] identifier[sesnode] keyword[in] identifier[sesnodes] :
keyword[with] identifier[context] ( identifier[self] . identifier[fname] , identifier[sesnode] ):
identifier[n] += identifier[len] ( identifier[sesnode] . identifier[text] . identifier[split] ())
identifier[ebr] = identifier[source] . identifier[rupture] . identifier[EBRupture] ( identifier[rup] , literal[int] , literal[int] , identifier[numpy] . identifier[array] ([ identifier[n] ]))
identifier[ebrs] . identifier[append] ( identifier[ebr] )
keyword[return] identifier[coll]
|
def convert_ruptureCollection(self, node):
"""
:param node: a ruptureCollection node
:returns: a dictionary grp_id -> EBRuptures
"""
coll = {}
for grpnode in node:
grp_id = int(grpnode['id'])
coll[grp_id] = ebrs = []
for node in grpnode:
rup = self.convert_node(node)
rup.serial = int(node['id'])
sesnodes = node.stochasticEventSets
n = 0 # number of events
for sesnode in sesnodes:
with context(self.fname, sesnode):
n += len(sesnode.text.split()) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['sesnode']]
ebr = source.rupture.EBRupture(rup, 0, 0, numpy.array([n]))
ebrs.append(ebr) # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=['grpnode']]
return coll
|
def get_changelog(project_dir=os.curdir, bugtracker_url='', rpm_format=False):
"""
Retrieves the changelog, from the CHANGELOG file (if in a package) or
generates it from the git history. Optionally in rpm-compatible format.
:param project_dir: Path to the git repo of the project.
:type project_dir: str
:param bugtracker_url: Url to the bug tracker for the issues.
:type bugtracker_url: str
:param rpm_format: if set to True, will make the changelog rpm-compatible
:returns: changelog
:rtype: str
:rises RuntimeError: If the changelog could not be retrieved
"""
changelog = ''
pkg_info_file = os.path.join(project_dir, 'PKG-INFO')
changelog_file = os.path.join(project_dir, 'CHANGELOG')
if os.path.exists(pkg_info_file) and os.path.exists(changelog_file):
with open(changelog_file) as changelog_fd:
changelog = changelog_fd.read()
else:
changelog = api.get_changelog(
repo_path=project_dir,
bugtracker_url=bugtracker_url,
rpm_format=rpm_format,
)
return changelog
|
def function[get_changelog, parameter[project_dir, bugtracker_url, rpm_format]]:
constant[
Retrieves the changelog, from the CHANGELOG file (if in a package) or
generates it from the git history. Optionally in rpm-compatible format.
:param project_dir: Path to the git repo of the project.
:type project_dir: str
:param bugtracker_url: Url to the bug tracker for the issues.
:type bugtracker_url: str
:param rpm_format: if set to True, will make the changelog rpm-compatible
:returns: changelog
:rtype: str
:rises RuntimeError: If the changelog could not be retrieved
]
variable[changelog] assign[=] constant[]
variable[pkg_info_file] assign[=] call[name[os].path.join, parameter[name[project_dir], constant[PKG-INFO]]]
variable[changelog_file] assign[=] call[name[os].path.join, parameter[name[project_dir], constant[CHANGELOG]]]
if <ast.BoolOp object at 0x7da1b098a1a0> begin[:]
with call[name[open], parameter[name[changelog_file]]] begin[:]
variable[changelog] assign[=] call[name[changelog_fd].read, parameter[]]
return[name[changelog]]
|
keyword[def] identifier[get_changelog] ( identifier[project_dir] = identifier[os] . identifier[curdir] , identifier[bugtracker_url] = literal[string] , identifier[rpm_format] = keyword[False] ):
literal[string]
identifier[changelog] = literal[string]
identifier[pkg_info_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[project_dir] , literal[string] )
identifier[changelog_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[project_dir] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pkg_info_file] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[changelog_file] ):
keyword[with] identifier[open] ( identifier[changelog_file] ) keyword[as] identifier[changelog_fd] :
identifier[changelog] = identifier[changelog_fd] . identifier[read] ()
keyword[else] :
identifier[changelog] = identifier[api] . identifier[get_changelog] (
identifier[repo_path] = identifier[project_dir] ,
identifier[bugtracker_url] = identifier[bugtracker_url] ,
identifier[rpm_format] = identifier[rpm_format] ,
)
keyword[return] identifier[changelog]
|
def get_changelog(project_dir=os.curdir, bugtracker_url='', rpm_format=False):
"""
Retrieves the changelog, from the CHANGELOG file (if in a package) or
generates it from the git history. Optionally in rpm-compatible format.
:param project_dir: Path to the git repo of the project.
:type project_dir: str
:param bugtracker_url: Url to the bug tracker for the issues.
:type bugtracker_url: str
:param rpm_format: if set to True, will make the changelog rpm-compatible
:returns: changelog
:rtype: str
:rises RuntimeError: If the changelog could not be retrieved
"""
changelog = ''
pkg_info_file = os.path.join(project_dir, 'PKG-INFO')
changelog_file = os.path.join(project_dir, 'CHANGELOG')
if os.path.exists(pkg_info_file) and os.path.exists(changelog_file):
with open(changelog_file) as changelog_fd:
changelog = changelog_fd.read() # depends on [control=['with'], data=['changelog_fd']] # depends on [control=['if'], data=[]]
else:
changelog = api.get_changelog(repo_path=project_dir, bugtracker_url=bugtracker_url, rpm_format=rpm_format)
return changelog
|
def main(unused_argv):
"""Freeze a model to a GraphDef proto."""
if FLAGS.use_tpu:
dual_net.freeze_graph_tpu(FLAGS.model_path)
else:
dual_net.freeze_graph(FLAGS.model_path)
|
def function[main, parameter[unused_argv]]:
constant[Freeze a model to a GraphDef proto.]
if name[FLAGS].use_tpu begin[:]
call[name[dual_net].freeze_graph_tpu, parameter[name[FLAGS].model_path]]
|
keyword[def] identifier[main] ( identifier[unused_argv] ):
literal[string]
keyword[if] identifier[FLAGS] . identifier[use_tpu] :
identifier[dual_net] . identifier[freeze_graph_tpu] ( identifier[FLAGS] . identifier[model_path] )
keyword[else] :
identifier[dual_net] . identifier[freeze_graph] ( identifier[FLAGS] . identifier[model_path] )
|
def main(unused_argv):
"""Freeze a model to a GraphDef proto."""
if FLAGS.use_tpu:
dual_net.freeze_graph_tpu(FLAGS.model_path) # depends on [control=['if'], data=[]]
else:
dual_net.freeze_graph(FLAGS.model_path)
|
def info(self):
"""Returns the name and version of the current shell"""
proc = Popen(['fish', '--version'],
stdout=PIPE, stderr=DEVNULL)
version = proc.stdout.read().decode('utf-8').split()[-1]
return u'Fish Shell {}'.format(version)
|
def function[info, parameter[self]]:
constant[Returns the name and version of the current shell]
variable[proc] assign[=] call[name[Popen], parameter[list[[<ast.Constant object at 0x7da1b1efbd00>, <ast.Constant object at 0x7da1b1ef84c0>]]]]
variable[version] assign[=] call[call[call[call[name[proc].stdout.read, parameter[]].decode, parameter[constant[utf-8]]].split, parameter[]]][<ast.UnaryOp object at 0x7da1b1efad10>]
return[call[constant[Fish Shell {}].format, parameter[name[version]]]]
|
keyword[def] identifier[info] ( identifier[self] ):
literal[string]
identifier[proc] = identifier[Popen] ([ literal[string] , literal[string] ],
identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[DEVNULL] )
identifier[version] = identifier[proc] . identifier[stdout] . identifier[read] (). identifier[decode] ( literal[string] ). identifier[split] ()[- literal[int] ]
keyword[return] literal[string] . identifier[format] ( identifier[version] )
|
def info(self):
"""Returns the name and version of the current shell"""
proc = Popen(['fish', '--version'], stdout=PIPE, stderr=DEVNULL)
version = proc.stdout.read().decode('utf-8').split()[-1]
return u'Fish Shell {}'.format(version)
|
def other_causatives(self, case_obj, variant_obj):
"""Find the same variant in other cases marked causative.
Args:
case_obj(dict)
variant_obj(dict)
Yields:
other_variant(dict)
"""
# variant id without "*_[variant_type]"
variant_id = variant_obj['display_name'].rsplit('_', 1)[0]
institute_causatives = self.get_causatives(variant_obj['institute'])
for causative_id in institute_causatives:
other_variant = self.variant(causative_id)
if not other_variant:
continue
not_same_case = other_variant['case_id'] != case_obj['_id']
same_variant = other_variant['display_name'].startswith(variant_id)
if not_same_case and same_variant:
yield other_variant
|
def function[other_causatives, parameter[self, case_obj, variant_obj]]:
constant[Find the same variant in other cases marked causative.
Args:
case_obj(dict)
variant_obj(dict)
Yields:
other_variant(dict)
]
variable[variant_id] assign[=] call[call[call[name[variant_obj]][constant[display_name]].rsplit, parameter[constant[_], constant[1]]]][constant[0]]
variable[institute_causatives] assign[=] call[name[self].get_causatives, parameter[call[name[variant_obj]][constant[institute]]]]
for taget[name[causative_id]] in starred[name[institute_causatives]] begin[:]
variable[other_variant] assign[=] call[name[self].variant, parameter[name[causative_id]]]
if <ast.UnaryOp object at 0x7da20c6e5210> begin[:]
continue
variable[not_same_case] assign[=] compare[call[name[other_variant]][constant[case_id]] not_equal[!=] call[name[case_obj]][constant[_id]]]
variable[same_variant] assign[=] call[call[name[other_variant]][constant[display_name]].startswith, parameter[name[variant_id]]]
if <ast.BoolOp object at 0x7da18f00f5b0> begin[:]
<ast.Yield object at 0x7da18f00f1c0>
|
keyword[def] identifier[other_causatives] ( identifier[self] , identifier[case_obj] , identifier[variant_obj] ):
literal[string]
identifier[variant_id] = identifier[variant_obj] [ literal[string] ]. identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[institute_causatives] = identifier[self] . identifier[get_causatives] ( identifier[variant_obj] [ literal[string] ])
keyword[for] identifier[causative_id] keyword[in] identifier[institute_causatives] :
identifier[other_variant] = identifier[self] . identifier[variant] ( identifier[causative_id] )
keyword[if] keyword[not] identifier[other_variant] :
keyword[continue]
identifier[not_same_case] = identifier[other_variant] [ literal[string] ]!= identifier[case_obj] [ literal[string] ]
identifier[same_variant] = identifier[other_variant] [ literal[string] ]. identifier[startswith] ( identifier[variant_id] )
keyword[if] identifier[not_same_case] keyword[and] identifier[same_variant] :
keyword[yield] identifier[other_variant]
|
def other_causatives(self, case_obj, variant_obj):
"""Find the same variant in other cases marked causative.
Args:
case_obj(dict)
variant_obj(dict)
Yields:
other_variant(dict)
"""
# variant id without "*_[variant_type]"
variant_id = variant_obj['display_name'].rsplit('_', 1)[0]
institute_causatives = self.get_causatives(variant_obj['institute'])
for causative_id in institute_causatives:
other_variant = self.variant(causative_id)
if not other_variant:
continue # depends on [control=['if'], data=[]]
not_same_case = other_variant['case_id'] != case_obj['_id']
same_variant = other_variant['display_name'].startswith(variant_id)
if not_same_case and same_variant:
yield other_variant # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['causative_id']]
|
def action_findip(reader, *args):
"""Find Flow Log records involving a specific IP or IPs."""
target_ips = set(args)
for record in reader:
if (record.srcaddr in target_ips) or (record.dstaddr in target_ips):
print(record.to_message())
|
def function[action_findip, parameter[reader]]:
constant[Find Flow Log records involving a specific IP or IPs.]
variable[target_ips] assign[=] call[name[set], parameter[name[args]]]
for taget[name[record]] in starred[name[reader]] begin[:]
if <ast.BoolOp object at 0x7da18fe92f20> begin[:]
call[name[print], parameter[call[name[record].to_message, parameter[]]]]
|
keyword[def] identifier[action_findip] ( identifier[reader] ,* identifier[args] ):
literal[string]
identifier[target_ips] = identifier[set] ( identifier[args] )
keyword[for] identifier[record] keyword[in] identifier[reader] :
keyword[if] ( identifier[record] . identifier[srcaddr] keyword[in] identifier[target_ips] ) keyword[or] ( identifier[record] . identifier[dstaddr] keyword[in] identifier[target_ips] ):
identifier[print] ( identifier[record] . identifier[to_message] ())
|
def action_findip(reader, *args):
"""Find Flow Log records involving a specific IP or IPs."""
target_ips = set(args)
for record in reader:
if record.srcaddr in target_ips or record.dstaddr in target_ips:
print(record.to_message()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['record']]
|
def freqz_cas(sos,w):
"""
Cascade frequency response
Mark Wickert October 2016
"""
Ns,Mcol = sos.shape
w,Hcas = signal.freqz(sos[0,:3],sos[0,3:],w)
for k in range(1,Ns):
w,Htemp = signal.freqz(sos[k,:3],sos[k,3:],w)
Hcas *= Htemp
return w, Hcas
|
def function[freqz_cas, parameter[sos, w]]:
constant[
Cascade frequency response
Mark Wickert October 2016
]
<ast.Tuple object at 0x7da20c76d450> assign[=] name[sos].shape
<ast.Tuple object at 0x7da20c76e320> assign[=] call[name[signal].freqz, parameter[call[name[sos]][tuple[[<ast.Constant object at 0x7da20c76f700>, <ast.Slice object at 0x7da20c76d8d0>]]], call[name[sos]][tuple[[<ast.Constant object at 0x7da20c76d6f0>, <ast.Slice object at 0x7da20c76c340>]]], name[w]]]
for taget[name[k]] in starred[call[name[range], parameter[constant[1], name[Ns]]]] begin[:]
<ast.Tuple object at 0x7da20c76d600> assign[=] call[name[signal].freqz, parameter[call[name[sos]][tuple[[<ast.Name object at 0x7da20c76df30>, <ast.Slice object at 0x7da20c76ed40>]]], call[name[sos]][tuple[[<ast.Name object at 0x7da20c76c550>, <ast.Slice object at 0x7da20c76c910>]]], name[w]]]
<ast.AugAssign object at 0x7da20c76fa60>
return[tuple[[<ast.Name object at 0x7da20c76e0b0>, <ast.Name object at 0x7da20c76f310>]]]
|
keyword[def] identifier[freqz_cas] ( identifier[sos] , identifier[w] ):
literal[string]
identifier[Ns] , identifier[Mcol] = identifier[sos] . identifier[shape]
identifier[w] , identifier[Hcas] = identifier[signal] . identifier[freqz] ( identifier[sos] [ literal[int] ,: literal[int] ], identifier[sos] [ literal[int] , literal[int] :], identifier[w] )
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[Ns] ):
identifier[w] , identifier[Htemp] = identifier[signal] . identifier[freqz] ( identifier[sos] [ identifier[k] ,: literal[int] ], identifier[sos] [ identifier[k] , literal[int] :], identifier[w] )
identifier[Hcas] *= identifier[Htemp]
keyword[return] identifier[w] , identifier[Hcas]
|
def freqz_cas(sos, w):
"""
Cascade frequency response
Mark Wickert October 2016
"""
(Ns, Mcol) = sos.shape
(w, Hcas) = signal.freqz(sos[0, :3], sos[0, 3:], w)
for k in range(1, Ns):
(w, Htemp) = signal.freqz(sos[k, :3], sos[k, 3:], w)
Hcas *= Htemp # depends on [control=['for'], data=['k']]
return (w, Hcas)
|
def index_corpus(self):
"""Make a Whoosh index out of a pre-processed corpus, ie TLG, PHI5,
or PHI7.
TLG takes almost 13 min; PHI5 1.5 min.
To setup index parameters
>>> # cltk_index = CLTKIndex('latin', 'phi5') # 1.5 min, 363 docs
>>> # cltk_index = CLTKIndex('latin', 'phi5', chunk='work') # 2 min, 837 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg') # 13 min, 1823 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg', chunk='work') #15.5 min, 6625 docs
# And to start indexing:
>>> # cltk_index.index_corpus()
TODO: Prevent overwriting. Ask user to rm old dir before re-indexing.
TODO: Add option for lemmatizing.
TODO: Add for figure out lower() options.
TODO: Process TLG through forthcoming normalize().
TODO: Add name to each index.
TODO: Turn off any language-specific mods (eg, stemming, case) that
Whoosh might be doing by default.
"""
# Setup index dir
schema = Schema(path=ID(stored=True),
author=TEXT(stored=True),
content=TEXT)
try:
_index = create_in(self.index_path, schema)
except FileNotFoundError:
os.makedirs(self.index_path)
_index = create_in(self.index_path, schema)
writer = _index.writer()
# Setup corpus to be indexed
if self.lang == 'greek' and self.corpus == 'tlg':
corpus_path = os.path.expanduser('~/cltk_data/greek/text/tlg/plaintext/')
if self.chunk == 'work':
corpus_path = os.path.expanduser('~/cltk_data/greek/text/tlg/individual_works/')
elif self.lang == 'latin' and self.corpus == 'phi5':
corpus_path = os.path.expanduser('~/cltk_data/latin/text/phi5/plaintext/')
if self.chunk == 'work':
corpus_path = os.path.expanduser('~/cltk_data/latin/text/phi5/individual_works/')
assert os.path.isdir(corpus_path), 'Corpus does not exist in the following location: "%s". Use CLTK Corpus Importer and TLGU to create transformed corpus.' % corpus_path # pylint: disable=line-too-long
files = os.listdir(corpus_path)
if self.lang == 'greek' and self.corpus == 'tlg':
files = [f[:-4] for f in files if f.startswith('TLG')]
corpus_index = TLG_AUTHOR_MAP
elif self.lang == 'latin' and self.corpus == 'phi5':
files = [f[:-4] for f in files if f.startswith('LAT')]
corpus_index = PHI5_AUTHOR_MAP
time_0 = time.time()
logger.info("Commencing indexing of %s documents of '%s' corpus." % (len(files), self.corpus)) # pylint: disable=line-too-long
logger.info('Index will be written to: "%s".' % self.index_path)
if self.chunk == 'author':
for count, file in enumerate(files, 1):
try:
if self.lang == 'greek' and self.corpus == 'tlg':
file = file[3:]
author = corpus_index[file]
path = os.path.join(corpus_path, 'TLG' + file + '.TXT')
if self.lang == 'latin' and self.corpus == 'phi5':
author = corpus_index[file]
path = os.path.join(corpus_path, file + '.TXT')
except KeyError as key_error:
if file.startswith('LAT9999'):
continue
logger.error(key_error)
raise
with open(path) as file_open:
content = file_open.read()
writer.add_document(path=path,
author=author,
content=content)
if count % 100 == 0:
logger.info('Indexed doc %s.' % count)
if self.chunk == 'work':
for count, file in enumerate(files, 1):
try:
if self.lang == 'greek' and self.corpus == 'tlg':
path = os.path.join(corpus_path, file + '.TXT')
author = corpus_index[file[3:-8]]
if self.lang == 'latin' and self.corpus == 'phi5':
path = os.path.join(corpus_path, file + '.TXT')
author = corpus_index[file[:-8]]
except KeyError as key_error:
if file.startswith('LAT9999'):
continue
logger.error(key_error)
raise
with open(path) as file_open:
content = file_open.read()
writer.add_document(path=path,
author=author,
content=content)
if count % 100 == 0:
logger.info('Indexed doc %s.' % count)
logger.info('Commencing to commit changes.')
writer.commit()
time_1 = time.time()
elapsed = time_1 - time_0
logger.info('Finished indexing all documents in %s seconds (averaging %s docs per sec.)' % (elapsed, (len(files) / elapsed)))
|
def function[index_corpus, parameter[self]]:
constant[Make a Whoosh index out of a pre-processed corpus, ie TLG, PHI5,
or PHI7.
TLG takes almost 13 min; PHI5 1.5 min.
To setup index parameters
>>> # cltk_index = CLTKIndex('latin', 'phi5') # 1.5 min, 363 docs
>>> # cltk_index = CLTKIndex('latin', 'phi5', chunk='work') # 2 min, 837 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg') # 13 min, 1823 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg', chunk='work') #15.5 min, 6625 docs
# And to start indexing:
>>> # cltk_index.index_corpus()
TODO: Prevent overwriting. Ask user to rm old dir before re-indexing.
TODO: Add option for lemmatizing.
TODO: Add for figure out lower() options.
TODO: Process TLG through forthcoming normalize().
TODO: Add name to each index.
TODO: Turn off any language-specific mods (eg, stemming, case) that
Whoosh might be doing by default.
]
variable[schema] assign[=] call[name[Schema], parameter[]]
<ast.Try object at 0x7da1b26add80>
variable[writer] assign[=] call[name[_index].writer, parameter[]]
if <ast.BoolOp object at 0x7da1b26ada80> begin[:]
variable[corpus_path] assign[=] call[name[os].path.expanduser, parameter[constant[~/cltk_data/greek/text/tlg/plaintext/]]]
if compare[name[self].chunk equal[==] constant[work]] begin[:]
variable[corpus_path] assign[=] call[name[os].path.expanduser, parameter[constant[~/cltk_data/greek/text/tlg/individual_works/]]]
assert[call[name[os].path.isdir, parameter[name[corpus_path]]]]
variable[files] assign[=] call[name[os].listdir, parameter[name[corpus_path]]]
if <ast.BoolOp object at 0x7da237eefa90> begin[:]
variable[files] assign[=] <ast.ListComp object at 0x7da18eb54130>
variable[corpus_index] assign[=] name[TLG_AUTHOR_MAP]
variable[time_0] assign[=] call[name[time].time, parameter[]]
call[name[logger].info, parameter[binary_operation[constant[Commencing indexing of %s documents of '%s' corpus.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18eb54a30>, <ast.Attribute object at 0x7da18eb55150>]]]]]
call[name[logger].info, parameter[binary_operation[constant[Index will be written to: "%s".] <ast.Mod object at 0x7da2590d6920> name[self].index_path]]]
if compare[name[self].chunk equal[==] constant[author]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18eb54df0>, <ast.Name object at 0x7da18eb55420>]]] in starred[call[name[enumerate], parameter[name[files], constant[1]]]] begin[:]
<ast.Try object at 0x7da18eb56800>
with call[name[open], parameter[name[path]]] begin[:]
variable[content] assign[=] call[name[file_open].read, parameter[]]
call[name[writer].add_document, parameter[]]
if compare[binary_operation[name[count] <ast.Mod object at 0x7da2590d6920> constant[100]] equal[==] constant[0]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Indexed doc %s.] <ast.Mod object at 0x7da2590d6920> name[count]]]]
if compare[name[self].chunk equal[==] constant[work]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b26ad960>, <ast.Name object at 0x7da1b26afc40>]]] in starred[call[name[enumerate], parameter[name[files], constant[1]]]] begin[:]
<ast.Try object at 0x7da1b26ae680>
with call[name[open], parameter[name[path]]] begin[:]
variable[content] assign[=] call[name[file_open].read, parameter[]]
call[name[writer].add_document, parameter[]]
if compare[binary_operation[name[count] <ast.Mod object at 0x7da2590d6920> constant[100]] equal[==] constant[0]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Indexed doc %s.] <ast.Mod object at 0x7da2590d6920> name[count]]]]
call[name[logger].info, parameter[constant[Commencing to commit changes.]]]
call[name[writer].commit, parameter[]]
variable[time_1] assign[=] call[name[time].time, parameter[]]
variable[elapsed] assign[=] binary_operation[name[time_1] - name[time_0]]
call[name[logger].info, parameter[binary_operation[constant[Finished indexing all documents in %s seconds (averaging %s docs per sec.)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e955240>, <ast.BinOp object at 0x7da20e9546a0>]]]]]
|
keyword[def] identifier[index_corpus] ( identifier[self] ):
literal[string]
identifier[schema] = identifier[Schema] ( identifier[path] = identifier[ID] ( identifier[stored] = keyword[True] ),
identifier[author] = identifier[TEXT] ( identifier[stored] = keyword[True] ),
identifier[content] = identifier[TEXT] )
keyword[try] :
identifier[_index] = identifier[create_in] ( identifier[self] . identifier[index_path] , identifier[schema] )
keyword[except] identifier[FileNotFoundError] :
identifier[os] . identifier[makedirs] ( identifier[self] . identifier[index_path] )
identifier[_index] = identifier[create_in] ( identifier[self] . identifier[index_path] , identifier[schema] )
identifier[writer] = identifier[_index] . identifier[writer] ()
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[corpus_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
keyword[if] identifier[self] . identifier[chunk] == literal[string] :
identifier[corpus_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
keyword[elif] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[corpus_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
keyword[if] identifier[self] . identifier[chunk] == literal[string] :
identifier[corpus_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
keyword[assert] identifier[os] . identifier[path] . identifier[isdir] ( identifier[corpus_path] ), literal[string] % identifier[corpus_path]
identifier[files] = identifier[os] . identifier[listdir] ( identifier[corpus_path] )
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[files] =[ identifier[f] [:- literal[int] ] keyword[for] identifier[f] keyword[in] identifier[files] keyword[if] identifier[f] . identifier[startswith] ( literal[string] )]
identifier[corpus_index] = identifier[TLG_AUTHOR_MAP]
keyword[elif] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[files] =[ identifier[f] [:- literal[int] ] keyword[for] identifier[f] keyword[in] identifier[files] keyword[if] identifier[f] . identifier[startswith] ( literal[string] )]
identifier[corpus_index] = identifier[PHI5_AUTHOR_MAP]
identifier[time_0] = identifier[time] . identifier[time] ()
identifier[logger] . identifier[info] ( literal[string] %( identifier[len] ( identifier[files] ), identifier[self] . identifier[corpus] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[self] . identifier[index_path] )
keyword[if] identifier[self] . identifier[chunk] == literal[string] :
keyword[for] identifier[count] , identifier[file] keyword[in] identifier[enumerate] ( identifier[files] , literal[int] ):
keyword[try] :
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[file] = identifier[file] [ literal[int] :]
identifier[author] = identifier[corpus_index] [ identifier[file] ]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[corpus_path] , literal[string] + identifier[file] + literal[string] )
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[author] = identifier[corpus_index] [ identifier[file] ]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[corpus_path] , identifier[file] + literal[string] )
keyword[except] identifier[KeyError] keyword[as] identifier[key_error] :
keyword[if] identifier[file] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[logger] . identifier[error] ( identifier[key_error] )
keyword[raise]
keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[file_open] :
identifier[content] = identifier[file_open] . identifier[read] ()
identifier[writer] . identifier[add_document] ( identifier[path] = identifier[path] ,
identifier[author] = identifier[author] ,
identifier[content] = identifier[content] )
keyword[if] identifier[count] % literal[int] == literal[int] :
identifier[logger] . identifier[info] ( literal[string] % identifier[count] )
keyword[if] identifier[self] . identifier[chunk] == literal[string] :
keyword[for] identifier[count] , identifier[file] keyword[in] identifier[enumerate] ( identifier[files] , literal[int] ):
keyword[try] :
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[corpus_path] , identifier[file] + literal[string] )
identifier[author] = identifier[corpus_index] [ identifier[file] [ literal[int] :- literal[int] ]]
keyword[if] identifier[self] . identifier[lang] == literal[string] keyword[and] identifier[self] . identifier[corpus] == literal[string] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[corpus_path] , identifier[file] + literal[string] )
identifier[author] = identifier[corpus_index] [ identifier[file] [:- literal[int] ]]
keyword[except] identifier[KeyError] keyword[as] identifier[key_error] :
keyword[if] identifier[file] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[logger] . identifier[error] ( identifier[key_error] )
keyword[raise]
keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[file_open] :
identifier[content] = identifier[file_open] . identifier[read] ()
identifier[writer] . identifier[add_document] ( identifier[path] = identifier[path] ,
identifier[author] = identifier[author] ,
identifier[content] = identifier[content] )
keyword[if] identifier[count] % literal[int] == literal[int] :
identifier[logger] . identifier[info] ( literal[string] % identifier[count] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[writer] . identifier[commit] ()
identifier[time_1] = identifier[time] . identifier[time] ()
identifier[elapsed] = identifier[time_1] - identifier[time_0]
identifier[logger] . identifier[info] ( literal[string] %( identifier[elapsed] ,( identifier[len] ( identifier[files] )/ identifier[elapsed] )))
|
def index_corpus(self):
"""Make a Whoosh index out of a pre-processed corpus, ie TLG, PHI5,
or PHI7.
TLG takes almost 13 min; PHI5 1.5 min.
To setup index parameters
>>> # cltk_index = CLTKIndex('latin', 'phi5') # 1.5 min, 363 docs
>>> # cltk_index = CLTKIndex('latin', 'phi5', chunk='work') # 2 min, 837 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg') # 13 min, 1823 docs
>>> # cltk_index = CLTKIndex('greek', 'tlg', chunk='work') #15.5 min, 6625 docs
# And to start indexing:
>>> # cltk_index.index_corpus()
TODO: Prevent overwriting. Ask user to rm old dir before re-indexing.
TODO: Add option for lemmatizing.
TODO: Add for figure out lower() options.
TODO: Process TLG through forthcoming normalize().
TODO: Add name to each index.
TODO: Turn off any language-specific mods (eg, stemming, case) that
Whoosh might be doing by default.
"""
# Setup index dir
schema = Schema(path=ID(stored=True), author=TEXT(stored=True), content=TEXT)
try:
_index = create_in(self.index_path, schema) # depends on [control=['try'], data=[]]
except FileNotFoundError:
os.makedirs(self.index_path)
_index = create_in(self.index_path, schema) # depends on [control=['except'], data=[]]
writer = _index.writer()
# Setup corpus to be indexed
if self.lang == 'greek' and self.corpus == 'tlg':
corpus_path = os.path.expanduser('~/cltk_data/greek/text/tlg/plaintext/')
if self.chunk == 'work':
corpus_path = os.path.expanduser('~/cltk_data/greek/text/tlg/individual_works/') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.lang == 'latin' and self.corpus == 'phi5':
corpus_path = os.path.expanduser('~/cltk_data/latin/text/phi5/plaintext/')
if self.chunk == 'work':
corpus_path = os.path.expanduser('~/cltk_data/latin/text/phi5/individual_works/') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
assert os.path.isdir(corpus_path), 'Corpus does not exist in the following location: "%s". Use CLTK Corpus Importer and TLGU to create transformed corpus.' % corpus_path # pylint: disable=line-too-long
files = os.listdir(corpus_path)
if self.lang == 'greek' and self.corpus == 'tlg':
files = [f[:-4] for f in files if f.startswith('TLG')]
corpus_index = TLG_AUTHOR_MAP # depends on [control=['if'], data=[]]
elif self.lang == 'latin' and self.corpus == 'phi5':
files = [f[:-4] for f in files if f.startswith('LAT')]
corpus_index = PHI5_AUTHOR_MAP # depends on [control=['if'], data=[]]
time_0 = time.time()
logger.info("Commencing indexing of %s documents of '%s' corpus." % (len(files), self.corpus)) # pylint: disable=line-too-long
logger.info('Index will be written to: "%s".' % self.index_path)
if self.chunk == 'author':
for (count, file) in enumerate(files, 1):
try:
if self.lang == 'greek' and self.corpus == 'tlg':
file = file[3:]
author = corpus_index[file]
path = os.path.join(corpus_path, 'TLG' + file + '.TXT') # depends on [control=['if'], data=[]]
if self.lang == 'latin' and self.corpus == 'phi5':
author = corpus_index[file]
path = os.path.join(corpus_path, file + '.TXT') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError as key_error:
if file.startswith('LAT9999'):
continue # depends on [control=['if'], data=[]]
logger.error(key_error)
raise # depends on [control=['except'], data=['key_error']]
with open(path) as file_open:
content = file_open.read() # depends on [control=['with'], data=['file_open']]
writer.add_document(path=path, author=author, content=content)
if count % 100 == 0:
logger.info('Indexed doc %s.' % count) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if self.chunk == 'work':
for (count, file) in enumerate(files, 1):
try:
if self.lang == 'greek' and self.corpus == 'tlg':
path = os.path.join(corpus_path, file + '.TXT')
author = corpus_index[file[3:-8]] # depends on [control=['if'], data=[]]
if self.lang == 'latin' and self.corpus == 'phi5':
path = os.path.join(corpus_path, file + '.TXT')
author = corpus_index[file[:-8]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError as key_error:
if file.startswith('LAT9999'):
continue # depends on [control=['if'], data=[]]
logger.error(key_error)
raise # depends on [control=['except'], data=['key_error']]
with open(path) as file_open:
content = file_open.read() # depends on [control=['with'], data=['file_open']]
writer.add_document(path=path, author=author, content=content)
if count % 100 == 0:
logger.info('Indexed doc %s.' % count) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
logger.info('Commencing to commit changes.')
writer.commit()
time_1 = time.time()
elapsed = time_1 - time_0
logger.info('Finished indexing all documents in %s seconds (averaging %s docs per sec.)' % (elapsed, len(files) / elapsed))
|
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = b""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
|
def function[close, parameter[self]]:
constant[Close the _Stream object. No operation should be
done on it afterwards.
]
if name[self].closed begin[:]
return[None]
if <ast.BoolOp object at 0x7da1b1e94c40> begin[:]
<ast.AugAssign object at 0x7da1b1e955a0>
if <ast.BoolOp object at 0x7da1b1e95390> begin[:]
call[name[self].fileobj.write, parameter[name[self].buf]]
name[self].buf assign[=] constant[b'']
if compare[name[self].comptype equal[==] constant[gz]] begin[:]
call[name[self].fileobj.write, parameter[call[name[struct].pack, parameter[constant[<L], binary_operation[name[self].crc <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]]]]]]
call[name[self].fileobj.write, parameter[call[name[struct].pack, parameter[constant[<L], binary_operation[name[self].pos <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]]]]]]
if <ast.UnaryOp object at 0x7da1b1e955d0> begin[:]
call[name[self].fileobj.close, parameter[]]
name[self].closed assign[=] constant[True]
|
keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[closed] :
keyword[return]
keyword[if] identifier[self] . identifier[mode] == literal[string] keyword[and] identifier[self] . identifier[comptype] != literal[string] :
identifier[self] . identifier[buf] += identifier[self] . identifier[cmp] . identifier[flush] ()
keyword[if] identifier[self] . identifier[mode] == literal[string] keyword[and] identifier[self] . identifier[buf] :
identifier[self] . identifier[fileobj] . identifier[write] ( identifier[self] . identifier[buf] )
identifier[self] . identifier[buf] = literal[string]
keyword[if] identifier[self] . identifier[comptype] == literal[string] :
identifier[self] . identifier[fileobj] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[crc] & literal[int] ))
identifier[self] . identifier[fileobj] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[pos] & literal[int] ))
keyword[if] keyword[not] identifier[self] . identifier[_extfileobj] :
identifier[self] . identifier[fileobj] . identifier[close] ()
identifier[self] . identifier[closed] = keyword[True]
|
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return # depends on [control=['if'], data=[]]
if self.mode == 'w' and self.comptype != 'tar':
self.buf += self.cmp.flush() # depends on [control=['if'], data=[]]
if self.mode == 'w' and self.buf:
self.fileobj.write(self.buf)
self.buf = b''
if self.comptype == 'gz':
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack('<L', self.crc & 4294967295))
self.fileobj.write(struct.pack('<L', self.pos & 4294967295)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self._extfileobj:
self.fileobj.close() # depends on [control=['if'], data=[]]
self.closed = True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.