code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def sample(self, batch_size, batch_idxs=None):
"""Return a randomized batch of experiences
# Argument
batch_size (int): Size of the all batch
batch_idxs (int): Indexes to extract
# Returns
A list of experiences randomly selected
"""
# It is not possible to tell whether the first state in the memory is terminal, because it
# would require access to the "terminal" flag associated to the previous state. As a result
# we will never return this first state (only using `self.terminals[0]` to know whether the
# second state is terminal).
# In addition we need enough entries to fill the desired window length.
assert self.nb_entries >= self.window_length + 2, 'not enough entries in the memory'
if batch_idxs is None:
# Draw random indexes such that we have enough entries before each index to fill the
# desired window length.
batch_idxs = sample_batch_indexes(
self.window_length, self.nb_entries - 1, size=batch_size)
batch_idxs = np.array(batch_idxs) + 1
assert np.min(batch_idxs) >= self.window_length + 1
assert np.max(batch_idxs) < self.nb_entries
assert len(batch_idxs) == batch_size
# Create experiences
experiences = []
for idx in batch_idxs:
terminal0 = self.terminals[idx - 2]
while terminal0:
# Skip this transition because the environment was reset here. Select a new, random
# transition and use this instead. This may cause the batch to contain the same
# transition twice.
idx = sample_batch_indexes(self.window_length + 1, self.nb_entries, size=1)[0]
terminal0 = self.terminals[idx - 2]
assert self.window_length + 1 <= idx < self.nb_entries
# This code is slightly complicated by the fact that subsequent observations might be
# from different episodes. We ensure that an experience never spans multiple episodes.
# This is probably not that important in practice but it seems cleaner.
state0 = [self.observations[idx - 1]]
for offset in range(0, self.window_length - 1):
current_idx = idx - 2 - offset
assert current_idx >= 1
current_terminal = self.terminals[current_idx - 1]
if current_terminal and not self.ignore_episode_boundaries:
# The previously handled observation was terminal, don't add the current one.
# Otherwise we would leak into a different episode.
break
state0.insert(0, self.observations[current_idx])
while len(state0) < self.window_length:
state0.insert(0, zeroed_observation(state0[0]))
action = self.actions[idx - 1]
reward = self.rewards[idx - 1]
terminal1 = self.terminals[idx - 1]
# Okay, now we need to create the follow-up state. This is state0 shifted on timestep
# to the right. Again, we need to be careful to not include an observation from the next
# episode if the last state is terminal.
state1 = [np.copy(x) for x in state0[1:]]
state1.append(self.observations[idx])
assert len(state0) == self.window_length
assert len(state1) == len(state0)
experiences.append(Experience(state0=state0, action=action, reward=reward,
state1=state1, terminal1=terminal1))
assert len(experiences) == batch_size
return experiences
|
def function[sample, parameter[self, batch_size, batch_idxs]]:
constant[Return a randomized batch of experiences
# Argument
batch_size (int): Size of the all batch
batch_idxs (int): Indexes to extract
# Returns
A list of experiences randomly selected
]
assert[compare[name[self].nb_entries greater_or_equal[>=] binary_operation[name[self].window_length + constant[2]]]]
if compare[name[batch_idxs] is constant[None]] begin[:]
variable[batch_idxs] assign[=] call[name[sample_batch_indexes], parameter[name[self].window_length, binary_operation[name[self].nb_entries - constant[1]]]]
variable[batch_idxs] assign[=] binary_operation[call[name[np].array, parameter[name[batch_idxs]]] + constant[1]]
assert[compare[call[name[np].min, parameter[name[batch_idxs]]] greater_or_equal[>=] binary_operation[name[self].window_length + constant[1]]]]
assert[compare[call[name[np].max, parameter[name[batch_idxs]]] less[<] name[self].nb_entries]]
assert[compare[call[name[len], parameter[name[batch_idxs]]] equal[==] name[batch_size]]]
variable[experiences] assign[=] list[[]]
for taget[name[idx]] in starred[name[batch_idxs]] begin[:]
variable[terminal0] assign[=] call[name[self].terminals][binary_operation[name[idx] - constant[2]]]
while name[terminal0] begin[:]
variable[idx] assign[=] call[call[name[sample_batch_indexes], parameter[binary_operation[name[self].window_length + constant[1]], name[self].nb_entries]]][constant[0]]
variable[terminal0] assign[=] call[name[self].terminals][binary_operation[name[idx] - constant[2]]]
assert[compare[binary_operation[name[self].window_length + constant[1]] less_or_equal[<=] name[idx]]]
variable[state0] assign[=] list[[<ast.Subscript object at 0x7da1b20ba4d0>]]
for taget[name[offset]] in starred[call[name[range], parameter[constant[0], binary_operation[name[self].window_length - constant[1]]]]] begin[:]
variable[current_idx] assign[=] binary_operation[binary_operation[name[idx] - constant[2]] - name[offset]]
assert[compare[name[current_idx] greater_or_equal[>=] constant[1]]]
variable[current_terminal] assign[=] call[name[self].terminals][binary_operation[name[current_idx] - constant[1]]]
if <ast.BoolOp object at 0x7da1b20ba740> begin[:]
break
call[name[state0].insert, parameter[constant[0], call[name[self].observations][name[current_idx]]]]
while compare[call[name[len], parameter[name[state0]]] less[<] name[self].window_length] begin[:]
call[name[state0].insert, parameter[constant[0], call[name[zeroed_observation], parameter[call[name[state0]][constant[0]]]]]]
variable[action] assign[=] call[name[self].actions][binary_operation[name[idx] - constant[1]]]
variable[reward] assign[=] call[name[self].rewards][binary_operation[name[idx] - constant[1]]]
variable[terminal1] assign[=] call[name[self].terminals][binary_operation[name[idx] - constant[1]]]
variable[state1] assign[=] <ast.ListComp object at 0x7da1b20b8cd0>
call[name[state1].append, parameter[call[name[self].observations][name[idx]]]]
assert[compare[call[name[len], parameter[name[state0]]] equal[==] name[self].window_length]]
assert[compare[call[name[len], parameter[name[state1]]] equal[==] call[name[len], parameter[name[state0]]]]]
call[name[experiences].append, parameter[call[name[Experience], parameter[]]]]
assert[compare[call[name[len], parameter[name[experiences]]] equal[==] name[batch_size]]]
return[name[experiences]]
|
keyword[def] identifier[sample] ( identifier[self] , identifier[batch_size] , identifier[batch_idxs] = keyword[None] ):
literal[string]
keyword[assert] identifier[self] . identifier[nb_entries] >= identifier[self] . identifier[window_length] + literal[int] , literal[string]
keyword[if] identifier[batch_idxs] keyword[is] keyword[None] :
identifier[batch_idxs] = identifier[sample_batch_indexes] (
identifier[self] . identifier[window_length] , identifier[self] . identifier[nb_entries] - literal[int] , identifier[size] = identifier[batch_size] )
identifier[batch_idxs] = identifier[np] . identifier[array] ( identifier[batch_idxs] )+ literal[int]
keyword[assert] identifier[np] . identifier[min] ( identifier[batch_idxs] )>= identifier[self] . identifier[window_length] + literal[int]
keyword[assert] identifier[np] . identifier[max] ( identifier[batch_idxs] )< identifier[self] . identifier[nb_entries]
keyword[assert] identifier[len] ( identifier[batch_idxs] )== identifier[batch_size]
identifier[experiences] =[]
keyword[for] identifier[idx] keyword[in] identifier[batch_idxs] :
identifier[terminal0] = identifier[self] . identifier[terminals] [ identifier[idx] - literal[int] ]
keyword[while] identifier[terminal0] :
identifier[idx] = identifier[sample_batch_indexes] ( identifier[self] . identifier[window_length] + literal[int] , identifier[self] . identifier[nb_entries] , identifier[size] = literal[int] )[ literal[int] ]
identifier[terminal0] = identifier[self] . identifier[terminals] [ identifier[idx] - literal[int] ]
keyword[assert] identifier[self] . identifier[window_length] + literal[int] <= identifier[idx] < identifier[self] . identifier[nb_entries]
identifier[state0] =[ identifier[self] . identifier[observations] [ identifier[idx] - literal[int] ]]
keyword[for] identifier[offset] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[window_length] - literal[int] ):
identifier[current_idx] = identifier[idx] - literal[int] - identifier[offset]
keyword[assert] identifier[current_idx] >= literal[int]
identifier[current_terminal] = identifier[self] . identifier[terminals] [ identifier[current_idx] - literal[int] ]
keyword[if] identifier[current_terminal] keyword[and] keyword[not] identifier[self] . identifier[ignore_episode_boundaries] :
keyword[break]
identifier[state0] . identifier[insert] ( literal[int] , identifier[self] . identifier[observations] [ identifier[current_idx] ])
keyword[while] identifier[len] ( identifier[state0] )< identifier[self] . identifier[window_length] :
identifier[state0] . identifier[insert] ( literal[int] , identifier[zeroed_observation] ( identifier[state0] [ literal[int] ]))
identifier[action] = identifier[self] . identifier[actions] [ identifier[idx] - literal[int] ]
identifier[reward] = identifier[self] . identifier[rewards] [ identifier[idx] - literal[int] ]
identifier[terminal1] = identifier[self] . identifier[terminals] [ identifier[idx] - literal[int] ]
identifier[state1] =[ identifier[np] . identifier[copy] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[state0] [ literal[int] :]]
identifier[state1] . identifier[append] ( identifier[self] . identifier[observations] [ identifier[idx] ])
keyword[assert] identifier[len] ( identifier[state0] )== identifier[self] . identifier[window_length]
keyword[assert] identifier[len] ( identifier[state1] )== identifier[len] ( identifier[state0] )
identifier[experiences] . identifier[append] ( identifier[Experience] ( identifier[state0] = identifier[state0] , identifier[action] = identifier[action] , identifier[reward] = identifier[reward] ,
identifier[state1] = identifier[state1] , identifier[terminal1] = identifier[terminal1] ))
keyword[assert] identifier[len] ( identifier[experiences] )== identifier[batch_size]
keyword[return] identifier[experiences]
|
def sample(self, batch_size, batch_idxs=None):
"""Return a randomized batch of experiences
# Argument
batch_size (int): Size of the all batch
batch_idxs (int): Indexes to extract
# Returns
A list of experiences randomly selected
"""
# It is not possible to tell whether the first state in the memory is terminal, because it
# would require access to the "terminal" flag associated to the previous state. As a result
# we will never return this first state (only using `self.terminals[0]` to know whether the
# second state is terminal).
# In addition we need enough entries to fill the desired window length.
assert self.nb_entries >= self.window_length + 2, 'not enough entries in the memory'
if batch_idxs is None:
# Draw random indexes such that we have enough entries before each index to fill the
# desired window length.
batch_idxs = sample_batch_indexes(self.window_length, self.nb_entries - 1, size=batch_size) # depends on [control=['if'], data=['batch_idxs']]
batch_idxs = np.array(batch_idxs) + 1
assert np.min(batch_idxs) >= self.window_length + 1
assert np.max(batch_idxs) < self.nb_entries
assert len(batch_idxs) == batch_size
# Create experiences
experiences = []
for idx in batch_idxs:
terminal0 = self.terminals[idx - 2]
while terminal0:
# Skip this transition because the environment was reset here. Select a new, random
# transition and use this instead. This may cause the batch to contain the same
# transition twice.
idx = sample_batch_indexes(self.window_length + 1, self.nb_entries, size=1)[0]
terminal0 = self.terminals[idx - 2] # depends on [control=['while'], data=[]]
assert self.window_length + 1 <= idx < self.nb_entries
# This code is slightly complicated by the fact that subsequent observations might be
# from different episodes. We ensure that an experience never spans multiple episodes.
# This is probably not that important in practice but it seems cleaner.
state0 = [self.observations[idx - 1]]
for offset in range(0, self.window_length - 1):
current_idx = idx - 2 - offset
assert current_idx >= 1
current_terminal = self.terminals[current_idx - 1]
if current_terminal and (not self.ignore_episode_boundaries):
# The previously handled observation was terminal, don't add the current one.
# Otherwise we would leak into a different episode.
break # depends on [control=['if'], data=[]]
state0.insert(0, self.observations[current_idx]) # depends on [control=['for'], data=['offset']]
while len(state0) < self.window_length:
state0.insert(0, zeroed_observation(state0[0])) # depends on [control=['while'], data=[]]
action = self.actions[idx - 1]
reward = self.rewards[idx - 1]
terminal1 = self.terminals[idx - 1]
# Okay, now we need to create the follow-up state. This is state0 shifted on timestep
# to the right. Again, we need to be careful to not include an observation from the next
# episode if the last state is terminal.
state1 = [np.copy(x) for x in state0[1:]]
state1.append(self.observations[idx])
assert len(state0) == self.window_length
assert len(state1) == len(state0)
experiences.append(Experience(state0=state0, action=action, reward=reward, state1=state1, terminal1=terminal1)) # depends on [control=['for'], data=['idx']]
assert len(experiences) == batch_size
return experiences
|
def substitute_xml(cls, value, make_quoted_attribute=False):
"""Substitute XML entities for special XML characters.
:param value: A string to be substituted. The less-than sign
will become <, the greater-than sign will become >,
and any ampersands will become &. If you want ampersands
that appear to be part of an entity definition to be left
alone, use substitute_xml_containing_entities() instead.
:param make_quoted_attribute: If True, then the string will be
quoted, as befits an attribute value.
"""
# Escape angle brackets and ampersands.
value = cls.AMPERSAND_OR_BRACKET.sub(
cls._substitute_xml_entity, value)
if make_quoted_attribute:
value = cls.quoted_attribute_value(value)
return value
|
def function[substitute_xml, parameter[cls, value, make_quoted_attribute]]:
constant[Substitute XML entities for special XML characters.
:param value: A string to be substituted. The less-than sign
will become <, the greater-than sign will become >,
and any ampersands will become &. If you want ampersands
that appear to be part of an entity definition to be left
alone, use substitute_xml_containing_entities() instead.
:param make_quoted_attribute: If True, then the string will be
quoted, as befits an attribute value.
]
variable[value] assign[=] call[name[cls].AMPERSAND_OR_BRACKET.sub, parameter[name[cls]._substitute_xml_entity, name[value]]]
if name[make_quoted_attribute] begin[:]
variable[value] assign[=] call[name[cls].quoted_attribute_value, parameter[name[value]]]
return[name[value]]
|
keyword[def] identifier[substitute_xml] ( identifier[cls] , identifier[value] , identifier[make_quoted_attribute] = keyword[False] ):
literal[string]
identifier[value] = identifier[cls] . identifier[AMPERSAND_OR_BRACKET] . identifier[sub] (
identifier[cls] . identifier[_substitute_xml_entity] , identifier[value] )
keyword[if] identifier[make_quoted_attribute] :
identifier[value] = identifier[cls] . identifier[quoted_attribute_value] ( identifier[value] )
keyword[return] identifier[value]
|
def substitute_xml(cls, value, make_quoted_attribute=False):
"""Substitute XML entities for special XML characters.
:param value: A string to be substituted. The less-than sign
will become <, the greater-than sign will become >,
and any ampersands will become &. If you want ampersands
that appear to be part of an entity definition to be left
alone, use substitute_xml_containing_entities() instead.
:param make_quoted_attribute: If True, then the string will be
quoted, as befits an attribute value.
"""
# Escape angle brackets and ampersands.
value = cls.AMPERSAND_OR_BRACKET.sub(cls._substitute_xml_entity, value)
if make_quoted_attribute:
value = cls.quoted_attribute_value(value) # depends on [control=['if'], data=[]]
return value
|
def draw(self, surface):
""" Draw all sprites and map onto the surface
:param surface: pygame surface to draw to
:type surface: pygame.surface.Surface
"""
ox, oy = self._map_layer.get_center_offset()
new_surfaces = list()
spritedict = self.spritedict
gl = self.get_layer_of_sprite
new_surfaces_append = new_surfaces.append
for spr in self.sprites():
new_rect = spr.rect.move(ox, oy)
try:
new_surfaces_append((spr.image, new_rect, gl(spr), spr.blendmode))
except AttributeError: # generally should only fail when no blendmode available
new_surfaces_append((spr.image, new_rect, gl(spr)))
spritedict[spr] = new_rect
self.lostsprites = []
return self._map_layer.draw(surface, surface.get_rect(), new_surfaces)
|
def function[draw, parameter[self, surface]]:
constant[ Draw all sprites and map onto the surface
:param surface: pygame surface to draw to
:type surface: pygame.surface.Surface
]
<ast.Tuple object at 0x7da207f02650> assign[=] call[name[self]._map_layer.get_center_offset, parameter[]]
variable[new_surfaces] assign[=] call[name[list], parameter[]]
variable[spritedict] assign[=] name[self].spritedict
variable[gl] assign[=] name[self].get_layer_of_sprite
variable[new_surfaces_append] assign[=] name[new_surfaces].append
for taget[name[spr]] in starred[call[name[self].sprites, parameter[]]] begin[:]
variable[new_rect] assign[=] call[name[spr].rect.move, parameter[name[ox], name[oy]]]
<ast.Try object at 0x7da207f03df0>
call[name[spritedict]][name[spr]] assign[=] name[new_rect]
name[self].lostsprites assign[=] list[[]]
return[call[name[self]._map_layer.draw, parameter[name[surface], call[name[surface].get_rect, parameter[]], name[new_surfaces]]]]
|
keyword[def] identifier[draw] ( identifier[self] , identifier[surface] ):
literal[string]
identifier[ox] , identifier[oy] = identifier[self] . identifier[_map_layer] . identifier[get_center_offset] ()
identifier[new_surfaces] = identifier[list] ()
identifier[spritedict] = identifier[self] . identifier[spritedict]
identifier[gl] = identifier[self] . identifier[get_layer_of_sprite]
identifier[new_surfaces_append] = identifier[new_surfaces] . identifier[append]
keyword[for] identifier[spr] keyword[in] identifier[self] . identifier[sprites] ():
identifier[new_rect] = identifier[spr] . identifier[rect] . identifier[move] ( identifier[ox] , identifier[oy] )
keyword[try] :
identifier[new_surfaces_append] (( identifier[spr] . identifier[image] , identifier[new_rect] , identifier[gl] ( identifier[spr] ), identifier[spr] . identifier[blendmode] ))
keyword[except] identifier[AttributeError] :
identifier[new_surfaces_append] (( identifier[spr] . identifier[image] , identifier[new_rect] , identifier[gl] ( identifier[spr] )))
identifier[spritedict] [ identifier[spr] ]= identifier[new_rect]
identifier[self] . identifier[lostsprites] =[]
keyword[return] identifier[self] . identifier[_map_layer] . identifier[draw] ( identifier[surface] , identifier[surface] . identifier[get_rect] (), identifier[new_surfaces] )
|
def draw(self, surface):
""" Draw all sprites and map onto the surface
:param surface: pygame surface to draw to
:type surface: pygame.surface.Surface
"""
(ox, oy) = self._map_layer.get_center_offset()
new_surfaces = list()
spritedict = self.spritedict
gl = self.get_layer_of_sprite
new_surfaces_append = new_surfaces.append
for spr in self.sprites():
new_rect = spr.rect.move(ox, oy)
try:
new_surfaces_append((spr.image, new_rect, gl(spr), spr.blendmode)) # depends on [control=['try'], data=[]]
except AttributeError: # generally should only fail when no blendmode available
new_surfaces_append((spr.image, new_rect, gl(spr))) # depends on [control=['except'], data=[]]
spritedict[spr] = new_rect # depends on [control=['for'], data=['spr']]
self.lostsprites = []
return self._map_layer.draw(surface, surface.get_rect(), new_surfaces)
|
def handle(self, key, value):
'''
Processes a vaild stats request
@param key: The key that matched the request
@param value: The value associated with the key
'''
# break down key
elements = key.split(":")
stats = elements[1]
appid = elements[2]
uuid = value
# log we received the stats request
extras = self.get_log_dict('stats', appid, uuid=uuid)
self.logger.info('Received {s} stats request'.format(s=stats),
extra=extras)
extras = {}
if stats == 'all':
extras = self.get_all_stats()
elif stats == 'kafka-monitor':
extras = self.get_kafka_monitor_stats()
elif stats == 'redis-monitor':
extras = self.get_redis_monitor_stats()
elif stats == 'crawler':
extras = self.get_crawler_stats()
elif stats == 'spider':
extras = self.get_spider_stats()
elif stats == 'machine':
extras = self.get_machine_stats()
elif stats == 'queue':
extras = self.get_queue_stats()
elif stats == 'rest':
extras = self.get_rest_stats()
else:
self.logger.warn('Received invalid stats request: {s}'\
.format(s=stats),
extra=extras)
return
extras['stats'] = stats
extras['appid'] = appid
extras['uuid'] = uuid
extras['server_time'] = int(self.get_current_time())
if self._send_to_kafka(extras):
extras['success'] = True
self.logger.info('Sent stats to kafka', extra=extras)
else:
extras['success'] = False
self.logger.error('Failed to send stats to kafka', extra=extras)
|
def function[handle, parameter[self, key, value]]:
constant[
Processes a vaild stats request
@param key: The key that matched the request
@param value: The value associated with the key
]
variable[elements] assign[=] call[name[key].split, parameter[constant[:]]]
variable[stats] assign[=] call[name[elements]][constant[1]]
variable[appid] assign[=] call[name[elements]][constant[2]]
variable[uuid] assign[=] name[value]
variable[extras] assign[=] call[name[self].get_log_dict, parameter[constant[stats], name[appid]]]
call[name[self].logger.info, parameter[call[constant[Received {s} stats request].format, parameter[]]]]
variable[extras] assign[=] dictionary[[], []]
if compare[name[stats] equal[==] constant[all]] begin[:]
variable[extras] assign[=] call[name[self].get_all_stats, parameter[]]
call[name[extras]][constant[stats]] assign[=] name[stats]
call[name[extras]][constant[appid]] assign[=] name[appid]
call[name[extras]][constant[uuid]] assign[=] name[uuid]
call[name[extras]][constant[server_time]] assign[=] call[name[int], parameter[call[name[self].get_current_time, parameter[]]]]
if call[name[self]._send_to_kafka, parameter[name[extras]]] begin[:]
call[name[extras]][constant[success]] assign[=] constant[True]
call[name[self].logger.info, parameter[constant[Sent stats to kafka]]]
|
keyword[def] identifier[handle] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
identifier[elements] = identifier[key] . identifier[split] ( literal[string] )
identifier[stats] = identifier[elements] [ literal[int] ]
identifier[appid] = identifier[elements] [ literal[int] ]
identifier[uuid] = identifier[value]
identifier[extras] = identifier[self] . identifier[get_log_dict] ( literal[string] , identifier[appid] , identifier[uuid] = identifier[uuid] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[s] = identifier[stats] ),
identifier[extra] = identifier[extras] )
identifier[extras] ={}
keyword[if] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_all_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_kafka_monitor_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_redis_monitor_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_crawler_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_spider_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_machine_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_queue_stats] ()
keyword[elif] identifier[stats] == literal[string] :
identifier[extras] = identifier[self] . identifier[get_rest_stats] ()
keyword[else] :
identifier[self] . identifier[logger] . identifier[warn] ( literal[string] . identifier[format] ( identifier[s] = identifier[stats] ),
identifier[extra] = identifier[extras] )
keyword[return]
identifier[extras] [ literal[string] ]= identifier[stats]
identifier[extras] [ literal[string] ]= identifier[appid]
identifier[extras] [ literal[string] ]= identifier[uuid]
identifier[extras] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[get_current_time] ())
keyword[if] identifier[self] . identifier[_send_to_kafka] ( identifier[extras] ):
identifier[extras] [ literal[string] ]= keyword[True]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[extra] = identifier[extras] )
keyword[else] :
identifier[extras] [ literal[string] ]= keyword[False]
identifier[self] . identifier[logger] . identifier[error] ( literal[string] , identifier[extra] = identifier[extras] )
|
def handle(self, key, value):
"""
Processes a vaild stats request
@param key: The key that matched the request
@param value: The value associated with the key
"""
# break down key
elements = key.split(':')
stats = elements[1]
appid = elements[2]
uuid = value
# log we received the stats request
extras = self.get_log_dict('stats', appid, uuid=uuid)
self.logger.info('Received {s} stats request'.format(s=stats), extra=extras)
extras = {}
if stats == 'all':
extras = self.get_all_stats() # depends on [control=['if'], data=[]]
elif stats == 'kafka-monitor':
extras = self.get_kafka_monitor_stats() # depends on [control=['if'], data=[]]
elif stats == 'redis-monitor':
extras = self.get_redis_monitor_stats() # depends on [control=['if'], data=[]]
elif stats == 'crawler':
extras = self.get_crawler_stats() # depends on [control=['if'], data=[]]
elif stats == 'spider':
extras = self.get_spider_stats() # depends on [control=['if'], data=[]]
elif stats == 'machine':
extras = self.get_machine_stats() # depends on [control=['if'], data=[]]
elif stats == 'queue':
extras = self.get_queue_stats() # depends on [control=['if'], data=[]]
elif stats == 'rest':
extras = self.get_rest_stats() # depends on [control=['if'], data=[]]
else:
self.logger.warn('Received invalid stats request: {s}'.format(s=stats), extra=extras)
return
extras['stats'] = stats
extras['appid'] = appid
extras['uuid'] = uuid
extras['server_time'] = int(self.get_current_time())
if self._send_to_kafka(extras):
extras['success'] = True
self.logger.info('Sent stats to kafka', extra=extras) # depends on [control=['if'], data=[]]
else:
extras['success'] = False
self.logger.error('Failed to send stats to kafka', extra=extras)
|
def reload_module(module):
"""
Reload the Python module
"""
try:
# For Python 2.x
reload(module)
except (ImportError, NameError):
# For <= Python3.3:
import imp
imp.reload(module)
except (ImportError, NameError):
# For >= Python3.4
import importlib
importlib.reload(module)
|
def function[reload_module, parameter[module]]:
constant[
Reload the Python module
]
<ast.Try object at 0x7da1b1fbb070>
|
keyword[def] identifier[reload_module] ( identifier[module] ):
literal[string]
keyword[try] :
identifier[reload] ( identifier[module] )
keyword[except] ( identifier[ImportError] , identifier[NameError] ):
keyword[import] identifier[imp]
identifier[imp] . identifier[reload] ( identifier[module] )
keyword[except] ( identifier[ImportError] , identifier[NameError] ):
keyword[import] identifier[importlib]
identifier[importlib] . identifier[reload] ( identifier[module] )
|
def reload_module(module):
"""
Reload the Python module
"""
try:
# For Python 2.x
reload(module) # depends on [control=['try'], data=[]]
except (ImportError, NameError):
# For <= Python3.3:
import imp
imp.reload(module) # depends on [control=['except'], data=[]]
except (ImportError, NameError):
# For >= Python3.4
import importlib
importlib.reload(module) # depends on [control=['except'], data=[]]
|
def _generate_injection_cmd(self, meta_data):
""" example injector response:
[
{
"config_path":"/configs/cli_config.py",
"path":"/opt/cia-cli/cia_sdk/config",
"user":"injector",
"status_code":201,
"chmod":755,
"checksum":"a4bcf3939dd3a6aa4e04ee9f92131df4",
"created":"2015-08-04T22:48:25Z",
"updated":"2015-08-04T22:48:25Z",
"group":"injector"
}
]
"""
cmd = []
self._validate_templates(meta_data)
for i, config_data in enumerate(meta_data):
container_path = os.path.join(config_data['path'], config_data['name'])
cmd.append("mkdir")
cmd.append("-p")
cmd.append(config_data['path'])
cmd.append("&&")
cmd.append("cp")
cmd.append("-f")
cmd.append(config_data['config_path'])
cmd.append(container_path)
cmd.append("&&")
cmd.append("chown")
cmd.append("{0}:{1}".format(config_data['user'], config_data['group']))
cmd.append(container_path)
cmd.append("&&")
cmd.append("chmod")
cmd.append(six.text_type(config_data['chmod']))
cmd.append(container_path)
if i + 1 < len(meta_data):
cmd.append("&&")
cmd = ["-c", " ".join(cmd)]
return cmd
|
def function[_generate_injection_cmd, parameter[self, meta_data]]:
constant[ example injector response:
[
{
"config_path":"/configs/cli_config.py",
"path":"/opt/cia-cli/cia_sdk/config",
"user":"injector",
"status_code":201,
"chmod":755,
"checksum":"a4bcf3939dd3a6aa4e04ee9f92131df4",
"created":"2015-08-04T22:48:25Z",
"updated":"2015-08-04T22:48:25Z",
"group":"injector"
}
]
]
variable[cmd] assign[=] list[[]]
call[name[self]._validate_templates, parameter[name[meta_data]]]
for taget[tuple[[<ast.Name object at 0x7da1b22983d0>, <ast.Name object at 0x7da1b2298d00>]]] in starred[call[name[enumerate], parameter[name[meta_data]]]] begin[:]
variable[container_path] assign[=] call[name[os].path.join, parameter[call[name[config_data]][constant[path]], call[name[config_data]][constant[name]]]]
call[name[cmd].append, parameter[constant[mkdir]]]
call[name[cmd].append, parameter[constant[-p]]]
call[name[cmd].append, parameter[call[name[config_data]][constant[path]]]]
call[name[cmd].append, parameter[constant[&&]]]
call[name[cmd].append, parameter[constant[cp]]]
call[name[cmd].append, parameter[constant[-f]]]
call[name[cmd].append, parameter[call[name[config_data]][constant[config_path]]]]
call[name[cmd].append, parameter[name[container_path]]]
call[name[cmd].append, parameter[constant[&&]]]
call[name[cmd].append, parameter[constant[chown]]]
call[name[cmd].append, parameter[call[constant[{0}:{1}].format, parameter[call[name[config_data]][constant[user]], call[name[config_data]][constant[group]]]]]]
call[name[cmd].append, parameter[name[container_path]]]
call[name[cmd].append, parameter[constant[&&]]]
call[name[cmd].append, parameter[constant[chmod]]]
call[name[cmd].append, parameter[call[name[six].text_type, parameter[call[name[config_data]][constant[chmod]]]]]]
call[name[cmd].append, parameter[name[container_path]]]
if compare[binary_operation[name[i] + constant[1]] less[<] call[name[len], parameter[name[meta_data]]]] begin[:]
call[name[cmd].append, parameter[constant[&&]]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b228c550>, <ast.Call object at 0x7da1b228c190>]]
return[name[cmd]]
|
keyword[def] identifier[_generate_injection_cmd] ( identifier[self] , identifier[meta_data] ):
literal[string]
identifier[cmd] =[]
identifier[self] . identifier[_validate_templates] ( identifier[meta_data] )
keyword[for] identifier[i] , identifier[config_data] keyword[in] identifier[enumerate] ( identifier[meta_data] ):
identifier[container_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[config_data] [ literal[string] ], identifier[config_data] [ literal[string] ])
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( identifier[config_data] [ literal[string] ])
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( identifier[config_data] [ literal[string] ])
identifier[cmd] . identifier[append] ( identifier[container_path] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[config_data] [ literal[string] ], identifier[config_data] [ literal[string] ]))
identifier[cmd] . identifier[append] ( identifier[container_path] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( identifier[six] . identifier[text_type] ( identifier[config_data] [ literal[string] ]))
identifier[cmd] . identifier[append] ( identifier[container_path] )
keyword[if] identifier[i] + literal[int] < identifier[len] ( identifier[meta_data] ):
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] =[ literal[string] , literal[string] . identifier[join] ( identifier[cmd] )]
keyword[return] identifier[cmd]
|
def _generate_injection_cmd(self, meta_data):
""" example injector response:
[
{
"config_path":"/configs/cli_config.py",
"path":"/opt/cia-cli/cia_sdk/config",
"user":"injector",
"status_code":201,
"chmod":755,
"checksum":"a4bcf3939dd3a6aa4e04ee9f92131df4",
"created":"2015-08-04T22:48:25Z",
"updated":"2015-08-04T22:48:25Z",
"group":"injector"
}
]
"""
cmd = []
self._validate_templates(meta_data)
for (i, config_data) in enumerate(meta_data):
container_path = os.path.join(config_data['path'], config_data['name'])
cmd.append('mkdir')
cmd.append('-p')
cmd.append(config_data['path'])
cmd.append('&&')
cmd.append('cp')
cmd.append('-f')
cmd.append(config_data['config_path'])
cmd.append(container_path)
cmd.append('&&')
cmd.append('chown')
cmd.append('{0}:{1}'.format(config_data['user'], config_data['group']))
cmd.append(container_path)
cmd.append('&&')
cmd.append('chmod')
cmd.append(six.text_type(config_data['chmod']))
cmd.append(container_path)
if i + 1 < len(meta_data):
cmd.append('&&') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
cmd = ['-c', ' '.join(cmd)]
return cmd
|
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False) :
"A shorthand for createEdge that takes two documents as input"
if type(doc1) is DOC.Document :
if not doc1._id :
doc1.save()
doc1_id = doc1._id
else :
doc1_id = doc1
if type(doc2) is DOC.Document :
if not doc2._id :
doc2.save()
doc2_id = doc2._id
else :
doc2_id = doc2
return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync)
|
def function[link, parameter[self, definition, doc1, doc2, edgeAttributes, waitForSync]]:
constant[A shorthand for createEdge that takes two documents as input]
if compare[call[name[type], parameter[name[doc1]]] is name[DOC].Document] begin[:]
if <ast.UnaryOp object at 0x7da1b0f5bfa0> begin[:]
call[name[doc1].save, parameter[]]
variable[doc1_id] assign[=] name[doc1]._id
if compare[call[name[type], parameter[name[doc2]]] is name[DOC].Document] begin[:]
if <ast.UnaryOp object at 0x7da1b0dc1d20> begin[:]
call[name[doc2].save, parameter[]]
variable[doc2_id] assign[=] name[doc2]._id
return[call[name[self].createEdge, parameter[name[definition], name[doc1_id], name[doc2_id], name[edgeAttributes], name[waitForSync]]]]
|
keyword[def] identifier[link] ( identifier[self] , identifier[definition] , identifier[doc1] , identifier[doc2] , identifier[edgeAttributes] , identifier[waitForSync] = keyword[False] ):
literal[string]
keyword[if] identifier[type] ( identifier[doc1] ) keyword[is] identifier[DOC] . identifier[Document] :
keyword[if] keyword[not] identifier[doc1] . identifier[_id] :
identifier[doc1] . identifier[save] ()
identifier[doc1_id] = identifier[doc1] . identifier[_id]
keyword[else] :
identifier[doc1_id] = identifier[doc1]
keyword[if] identifier[type] ( identifier[doc2] ) keyword[is] identifier[DOC] . identifier[Document] :
keyword[if] keyword[not] identifier[doc2] . identifier[_id] :
identifier[doc2] . identifier[save] ()
identifier[doc2_id] = identifier[doc2] . identifier[_id]
keyword[else] :
identifier[doc2_id] = identifier[doc2]
keyword[return] identifier[self] . identifier[createEdge] ( identifier[definition] , identifier[doc1_id] , identifier[doc2_id] , identifier[edgeAttributes] , identifier[waitForSync] )
|
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync=False):
"""A shorthand for createEdge that takes two documents as input"""
if type(doc1) is DOC.Document:
if not doc1._id:
doc1.save() # depends on [control=['if'], data=[]]
doc1_id = doc1._id # depends on [control=['if'], data=[]]
else:
doc1_id = doc1
if type(doc2) is DOC.Document:
if not doc2._id:
doc2.save() # depends on [control=['if'], data=[]]
doc2_id = doc2._id # depends on [control=['if'], data=[]]
else:
doc2_id = doc2
return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync)
|
def _from_dict(cls, _dict):
"""Initialize a LeadingSentence object from a json dictionary."""
args = {}
if 'text' in _dict:
args['text'] = _dict.get('text')
if 'location' in _dict:
args['location'] = Location._from_dict(_dict.get('location'))
if 'element_locations' in _dict:
args['element_locations'] = [
ElementLocations._from_dict(x)
for x in (_dict.get('element_locations'))
]
return cls(**args)
|
def function[_from_dict, parameter[cls, _dict]]:
constant[Initialize a LeadingSentence object from a json dictionary.]
variable[args] assign[=] dictionary[[], []]
if compare[constant[text] in name[_dict]] begin[:]
call[name[args]][constant[text]] assign[=] call[name[_dict].get, parameter[constant[text]]]
if compare[constant[location] in name[_dict]] begin[:]
call[name[args]][constant[location]] assign[=] call[name[Location]._from_dict, parameter[call[name[_dict].get, parameter[constant[location]]]]]
if compare[constant[element_locations] in name[_dict]] begin[:]
call[name[args]][constant[element_locations]] assign[=] <ast.ListComp object at 0x7da1b23469b0>
return[call[name[cls], parameter[]]]
|
keyword[def] identifier[_from_dict] ( identifier[cls] , identifier[_dict] ):
literal[string]
identifier[args] ={}
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]= identifier[_dict] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]= identifier[Location] . identifier[_from_dict] ( identifier[_dict] . identifier[get] ( literal[string] ))
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]=[
identifier[ElementLocations] . identifier[_from_dict] ( identifier[x] )
keyword[for] identifier[x] keyword[in] ( identifier[_dict] . identifier[get] ( literal[string] ))
]
keyword[return] identifier[cls] (** identifier[args] )
|
def _from_dict(cls, _dict):
"""Initialize a LeadingSentence object from a json dictionary."""
args = {}
if 'text' in _dict:
args['text'] = _dict.get('text') # depends on [control=['if'], data=['_dict']]
if 'location' in _dict:
args['location'] = Location._from_dict(_dict.get('location')) # depends on [control=['if'], data=['_dict']]
if 'element_locations' in _dict:
args['element_locations'] = [ElementLocations._from_dict(x) for x in _dict.get('element_locations')] # depends on [control=['if'], data=['_dict']]
return cls(**args)
|
def get_class_that_defined_method(meth):
""" Gets the class object which defined a given method
@meth: a class method
-> owner class object
"""
if inspect.ismethod(meth):
for cls in inspect.getmro(meth.__self__.__class__):
if cls.__dict__.get(meth.__name__) is meth:
return cls
meth = meth.__func__ # fallback to __qualname__ parsing
if inspect.isfunction(meth):
cls = getattr(
inspect.getmodule(meth),
meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0])
if isinstance(cls, type):
return cls
return None
|
def function[get_class_that_defined_method, parameter[meth]]:
constant[ Gets the class object which defined a given method
@meth: a class method
-> owner class object
]
if call[name[inspect].ismethod, parameter[name[meth]]] begin[:]
for taget[name[cls]] in starred[call[name[inspect].getmro, parameter[name[meth].__self__.__class__]]] begin[:]
if compare[call[name[cls].__dict__.get, parameter[name[meth].__name__]] is name[meth]] begin[:]
return[name[cls]]
variable[meth] assign[=] name[meth].__func__
if call[name[inspect].isfunction, parameter[name[meth]]] begin[:]
variable[cls] assign[=] call[name[getattr], parameter[call[name[inspect].getmodule, parameter[name[meth]]], call[call[call[call[name[meth].__qualname__.split, parameter[constant[.<locals>], constant[1]]]][constant[0]].rsplit, parameter[constant[.], constant[1]]]][constant[0]]]]
if call[name[isinstance], parameter[name[cls], name[type]]] begin[:]
return[name[cls]]
return[constant[None]]
|
keyword[def] identifier[get_class_that_defined_method] ( identifier[meth] ):
literal[string]
keyword[if] identifier[inspect] . identifier[ismethod] ( identifier[meth] ):
keyword[for] identifier[cls] keyword[in] identifier[inspect] . identifier[getmro] ( identifier[meth] . identifier[__self__] . identifier[__class__] ):
keyword[if] identifier[cls] . identifier[__dict__] . identifier[get] ( identifier[meth] . identifier[__name__] ) keyword[is] identifier[meth] :
keyword[return] identifier[cls]
identifier[meth] = identifier[meth] . identifier[__func__]
keyword[if] identifier[inspect] . identifier[isfunction] ( identifier[meth] ):
identifier[cls] = identifier[getattr] (
identifier[inspect] . identifier[getmodule] ( identifier[meth] ),
identifier[meth] . identifier[__qualname__] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ])
keyword[if] identifier[isinstance] ( identifier[cls] , identifier[type] ):
keyword[return] identifier[cls]
keyword[return] keyword[None]
|
def get_class_that_defined_method(meth):
""" Gets the class object which defined a given method
@meth: a class method
-> owner class object
"""
if inspect.ismethod(meth):
for cls in inspect.getmro(meth.__self__.__class__):
if cls.__dict__.get(meth.__name__) is meth:
return cls # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls']]
meth = meth.__func__ # fallback to __qualname__ parsing # depends on [control=['if'], data=[]]
if inspect.isfunction(meth):
cls = getattr(inspect.getmodule(meth), meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0])
if isinstance(cls, type):
return cls # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return None
|
def get_configs(args, command_args, ansible_args=()):
"""
Glob the current directory for Molecule config files, instantiate config
objects, and returns a list.
:param args: A dict of options, arguments and commands from the CLI.
:param command_args: A dict of options passed to the subcommand from
the CLI.
:param ansible_args: An optional tuple of arguments provided to the
`ansible-playbook` command.
:return: list
"""
configs = [
config.Config(
molecule_file=util.abs_path(c),
args=args,
command_args=command_args,
ansible_args=ansible_args,
) for c in glob.glob(MOLECULE_GLOB)
]
_verify_configs(configs)
return configs
|
def function[get_configs, parameter[args, command_args, ansible_args]]:
constant[
Glob the current directory for Molecule config files, instantiate config
objects, and returns a list.
:param args: A dict of options, arguments and commands from the CLI.
:param command_args: A dict of options passed to the subcommand from
the CLI.
:param ansible_args: An optional tuple of arguments provided to the
`ansible-playbook` command.
:return: list
]
variable[configs] assign[=] <ast.ListComp object at 0x7da20cabe560>
call[name[_verify_configs], parameter[name[configs]]]
return[name[configs]]
|
keyword[def] identifier[get_configs] ( identifier[args] , identifier[command_args] , identifier[ansible_args] =()):
literal[string]
identifier[configs] =[
identifier[config] . identifier[Config] (
identifier[molecule_file] = identifier[util] . identifier[abs_path] ( identifier[c] ),
identifier[args] = identifier[args] ,
identifier[command_args] = identifier[command_args] ,
identifier[ansible_args] = identifier[ansible_args] ,
) keyword[for] identifier[c] keyword[in] identifier[glob] . identifier[glob] ( identifier[MOLECULE_GLOB] )
]
identifier[_verify_configs] ( identifier[configs] )
keyword[return] identifier[configs]
|
def get_configs(args, command_args, ansible_args=()):
"""
Glob the current directory for Molecule config files, instantiate config
objects, and returns a list.
:param args: A dict of options, arguments and commands from the CLI.
:param command_args: A dict of options passed to the subcommand from
the CLI.
:param ansible_args: An optional tuple of arguments provided to the
`ansible-playbook` command.
:return: list
"""
configs = [config.Config(molecule_file=util.abs_path(c), args=args, command_args=command_args, ansible_args=ansible_args) for c in glob.glob(MOLECULE_GLOB)]
_verify_configs(configs)
return configs
|
def get_jobs(self, prefix=None):
""" Lists all the jobs registered with Nomad.
https://www.nomadproject.io/docs/http/jobs.html
arguments:
- prefix :(str) optional, specifies a string to filter jobs on based on an prefix.
This is specified as a querystring parameter.
returns: list
raises:
- nomad.api.exceptions.BaseNomadException
- nomad.api.exceptions.URLNotFoundNomadException
"""
params = {"prefix": prefix}
return self.request(method="get", params=params).json()
|
def function[get_jobs, parameter[self, prefix]]:
constant[ Lists all the jobs registered with Nomad.
https://www.nomadproject.io/docs/http/jobs.html
arguments:
- prefix :(str) optional, specifies a string to filter jobs on based on an prefix.
This is specified as a querystring parameter.
returns: list
raises:
- nomad.api.exceptions.BaseNomadException
- nomad.api.exceptions.URLNotFoundNomadException
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7e20>], [<ast.Name object at 0x7da20c6e68c0>]]
return[call[call[name[self].request, parameter[]].json, parameter[]]]
|
keyword[def] identifier[get_jobs] ( identifier[self] , identifier[prefix] = keyword[None] ):
literal[string]
identifier[params] ={ literal[string] : identifier[prefix] }
keyword[return] identifier[self] . identifier[request] ( identifier[method] = literal[string] , identifier[params] = identifier[params] ). identifier[json] ()
|
def get_jobs(self, prefix=None):
""" Lists all the jobs registered with Nomad.
https://www.nomadproject.io/docs/http/jobs.html
arguments:
- prefix :(str) optional, specifies a string to filter jobs on based on an prefix.
This is specified as a querystring parameter.
returns: list
raises:
- nomad.api.exceptions.BaseNomadException
- nomad.api.exceptions.URLNotFoundNomadException
"""
params = {'prefix': prefix}
return self.request(method='get', params=params).json()
|
def isoformat(self, sep='T'):
"""
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
date and time portions
:param set:
A single character of the separator to place between the date and
time
:return:
The formatted datetime as a unicode string in Python 3 and a byte
string in Python 2
"""
if self.microsecond == 0:
return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S' % sep)
return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S.%%f' % sep)
|
def function[isoformat, parameter[self, sep]]:
constant[
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
date and time portions
:param set:
A single character of the separator to place between the date and
time
:return:
The formatted datetime as a unicode string in Python 3 and a byte
string in Python 2
]
if compare[name[self].microsecond equal[==] constant[0]] begin[:]
return[call[name[self].strftime, parameter[binary_operation[constant[0000-%%m-%%d%s%%H:%%M:%%S] <ast.Mod object at 0x7da2590d6920> name[sep]]]]]
return[call[name[self].strftime, parameter[binary_operation[constant[0000-%%m-%%d%s%%H:%%M:%%S.%%f] <ast.Mod object at 0x7da2590d6920> name[sep]]]]]
|
keyword[def] identifier[isoformat] ( identifier[self] , identifier[sep] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[microsecond] == literal[int] :
keyword[return] identifier[self] . identifier[strftime] ( literal[string] % identifier[sep] )
keyword[return] identifier[self] . identifier[strftime] ( literal[string] % identifier[sep] )
|
def isoformat(self, sep='T'):
"""
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
date and time portions
:param set:
A single character of the separator to place between the date and
time
:return:
The formatted datetime as a unicode string in Python 3 and a byte
string in Python 2
"""
if self.microsecond == 0:
return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S' % sep) # depends on [control=['if'], data=[]]
return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S.%%f' % sep)
|
def key_set(self, predicate=None):
"""
Returns a List clone of the keys contained in this map or the keys of the entries filtered with the predicate if
provided.
**Warning:
The list is NOT backed by the map, so changes to the map are NOT reflected in the list, and vice-versa.**
:param predicate: (Predicate), predicate to filter the entries (optional).
:return: (Sequence), a list of the clone of the keys.
.. seealso:: :class:`~hazelcast.serialization.predicate.Predicate` for more info about predicates.
"""
if predicate:
predicate_data = self._to_data(predicate)
return self._encode_invoke(map_key_set_with_predicate_codec, predicate=predicate_data)
else:
return self._encode_invoke(map_key_set_codec)
|
def function[key_set, parameter[self, predicate]]:
constant[
Returns a List clone of the keys contained in this map or the keys of the entries filtered with the predicate if
provided.
**Warning:
The list is NOT backed by the map, so changes to the map are NOT reflected in the list, and vice-versa.**
:param predicate: (Predicate), predicate to filter the entries (optional).
:return: (Sequence), a list of the clone of the keys.
.. seealso:: :class:`~hazelcast.serialization.predicate.Predicate` for more info about predicates.
]
if name[predicate] begin[:]
variable[predicate_data] assign[=] call[name[self]._to_data, parameter[name[predicate]]]
return[call[name[self]._encode_invoke, parameter[name[map_key_set_with_predicate_codec]]]]
|
keyword[def] identifier[key_set] ( identifier[self] , identifier[predicate] = keyword[None] ):
literal[string]
keyword[if] identifier[predicate] :
identifier[predicate_data] = identifier[self] . identifier[_to_data] ( identifier[predicate] )
keyword[return] identifier[self] . identifier[_encode_invoke] ( identifier[map_key_set_with_predicate_codec] , identifier[predicate] = identifier[predicate_data] )
keyword[else] :
keyword[return] identifier[self] . identifier[_encode_invoke] ( identifier[map_key_set_codec] )
|
def key_set(self, predicate=None):
"""
Returns a List clone of the keys contained in this map or the keys of the entries filtered with the predicate if
provided.
**Warning:
The list is NOT backed by the map, so changes to the map are NOT reflected in the list, and vice-versa.**
:param predicate: (Predicate), predicate to filter the entries (optional).
:return: (Sequence), a list of the clone of the keys.
.. seealso:: :class:`~hazelcast.serialization.predicate.Predicate` for more info about predicates.
"""
if predicate:
predicate_data = self._to_data(predicate)
return self._encode_invoke(map_key_set_with_predicate_codec, predicate=predicate_data) # depends on [control=['if'], data=[]]
else:
return self._encode_invoke(map_key_set_codec)
|
def notConnectedNodes(self) -> Set[str]:
"""
Returns the names of nodes in the registry this node is NOT connected
to.
"""
return set(self.registry.keys()) - self.conns
|
def function[notConnectedNodes, parameter[self]]:
constant[
Returns the names of nodes in the registry this node is NOT connected
to.
]
return[binary_operation[call[name[set], parameter[call[name[self].registry.keys, parameter[]]]] - name[self].conns]]
|
keyword[def] identifier[notConnectedNodes] ( identifier[self] )-> identifier[Set] [ identifier[str] ]:
literal[string]
keyword[return] identifier[set] ( identifier[self] . identifier[registry] . identifier[keys] ())- identifier[self] . identifier[conns]
|
def notConnectedNodes(self) -> Set[str]:
"""
Returns the names of nodes in the registry this node is NOT connected
to.
"""
return set(self.registry.keys()) - self.conns
|
def phot(fits_filename, x_in, y_in, aperture=15, sky=20, swidth=10, apcor=0.3,
maxcount=30000.0, exptime=1.0, zmag=None, extno=0, centroid=True):
"""
Compute the centroids and magnitudes of a bunch sources on fits image.
:rtype : astropy.table.Table
:param fits_filename: Name of fits image to measure source photometry on.
:type fits_filename: str
:param x_in: x location of source to measure
:type x_in: float, numpy.array
:param y_in: y location of source to measure
:type y_in: float, numpy.array
:param aperture: radius of circular aperture to use.
:type aperture: float
:param sky: radius of inner sky annulus
:type sky: float
:param swidth: width of the sky annulus
:type swidth: float
:param apcor: Aperture correction to take aperture flux to full flux.
:type apcor: float
:param maxcount: maximum linearity in the image.
:type maxcount: float
:param exptime: exposure time, relative to zmag supplied
:type exptime: float
:param zmag: zeropoint magnitude
:param extno: extension of fits_filename the x/y location refers to.
"""
if not hasattr(x_in, '__iter__'):
x_in = [x_in, ]
if not hasattr(y_in, '__iter__'):
y_in = [y_in, ]
if (not os.path.exists(fits_filename) and
not fits_filename.endswith(".fits")):
# For convenience, see if we just forgot to provide the extension
fits_filename += ".fits"
try:
input_hdulist = fits.open(fits_filename)
except Exception as err:
logger.debug(str(err))
raise TaskError("Failed to open input image: %s" % err.message)
# get the filter for this image
filter_name = input_hdulist[extno].header.get('FILTER', 'DEFAULT')
# Some nominal CFHT zeropoints that might be useful
zeropoints = {"I": 25.77,
"R": 26.07,
"V": 26.07,
"B": 25.92,
"DEFAULT": 26.0,
"g.MP9401": 32.0,
'r.MP9601': 31.9,
'gri.MP9603': 33.520}
if zmag is None:
logger.warning("No zmag supplied to daophot, looking for header or default values.")
zmag = input_hdulist[extno].header.get('PHOTZP', zeropoints[filter_name])
logger.warning("Setting zmag to: {}".format(zmag))
# check for magic 'zeropoint.used' files
for zpu_file in ["{}.zeropoint.used".format(os.path.splitext(fits_filename)[0]), "zeropoint.used"]:
if os.access(zpu_file, os.R_OK):
with open(zpu_file) as zpu_fh:
zmag = float(zpu_fh.read())
logger.warning("Using file {} to set zmag to: {}".format(zpu_file, zmag))
break
photzp = input_hdulist[extno].header.get('PHOTZP', zeropoints.get(filter_name, zeropoints["DEFAULT"]))
if zmag != photzp:
logger.warning(("zmag sent to daophot: ({}) "
"doesn't match PHOTZP value in image header: ({})".format(zmag, photzp)))
# setup IRAF to do the magnitude/centroid measurements
iraf.set(uparm="./")
iraf.digiphot()
iraf.apphot()
iraf.daophot(_doprint=0)
iraf.photpars.apertures = aperture
iraf.photpars.zmag = zmag
iraf.datapars.datamin = 0
iraf.datapars.datamax = maxcount
iraf.datapars.exposur = ""
iraf.datapars.itime = exptime
iraf.fitskypars.annulus = sky
iraf.fitskypars.dannulus = swidth
iraf.fitskypars.salgorithm = "mode"
iraf.fitskypars.sloclip = 5.0
iraf.fitskypars.shiclip = 5.0
if centroid:
iraf.centerpars.calgori = "centroid"
iraf.centerpars.cbox = 5.
iraf.centerpars.cthreshold = 0.
iraf.centerpars.maxshift = 2.
iraf.centerpars.clean = 'no'
else:
iraf.centerpars.calgori = "none"
iraf.phot.update = 'no'
iraf.phot.verbose = 'no'
iraf.phot.verify = 'no'
iraf.phot.interactive = 'no'
# Used for passing the input coordinates
coofile = tempfile.NamedTemporaryFile(suffix=".coo", delete=False)
for i in range(len(x_in)):
coofile.write("%f %f \n" % (x_in[i], y_in[i]))
coofile.flush()
# Used for receiving the results of the task
# mag_fd, mag_path = tempfile.mkstemp(suffix=".mag")
magfile = tempfile.NamedTemporaryFile(suffix=".mag", delete=False)
# Close the temp files before sending to IRAF due to docstring:
# "Whether the name can be used to open the file a second time, while
# the named temporary file is still open, varies across platforms"
coofile.close()
magfile.close()
os.remove(magfile.name)
iraf.phot(fits_filename+"[{}]".format(extno), coofile.name, magfile.name)
pdump_out = ascii.read(magfile.name, format='daophot')
logging.debug("PHOT FILE:\n"+str(pdump_out))
if not len(pdump_out) > 0:
mag_content = open(magfile.name).read()
raise TaskError("photometry failed. {}".format(mag_content))
# apply the aperture correction
pdump_out['MAG'] -= apcor
# if pdump_out['PIER'][0] != 0 or pdump_out['SIER'][0] != 0 or pdump_out['CIER'][0] != 0:
# raise ValueError("Photometry failed:\n {}".format(pdump_out))
# Clean up temporary files generated by IRAF
os.remove(coofile.name)
os.remove(magfile.name)
logger.debug("Computed aperture photometry on {} objects in {}".format(len(pdump_out), fits_filename))
del input_hdulist
return pdump_out
|
def function[phot, parameter[fits_filename, x_in, y_in, aperture, sky, swidth, apcor, maxcount, exptime, zmag, extno, centroid]]:
constant[
Compute the centroids and magnitudes of a bunch sources on fits image.
:rtype : astropy.table.Table
:param fits_filename: Name of fits image to measure source photometry on.
:type fits_filename: str
:param x_in: x location of source to measure
:type x_in: float, numpy.array
:param y_in: y location of source to measure
:type y_in: float, numpy.array
:param aperture: radius of circular aperture to use.
:type aperture: float
:param sky: radius of inner sky annulus
:type sky: float
:param swidth: width of the sky annulus
:type swidth: float
:param apcor: Aperture correction to take aperture flux to full flux.
:type apcor: float
:param maxcount: maximum linearity in the image.
:type maxcount: float
:param exptime: exposure time, relative to zmag supplied
:type exptime: float
:param zmag: zeropoint magnitude
:param extno: extension of fits_filename the x/y location refers to.
]
if <ast.UnaryOp object at 0x7da1b1a1cdc0> begin[:]
variable[x_in] assign[=] list[[<ast.Name object at 0x7da1b1a1f790>]]
if <ast.UnaryOp object at 0x7da1b1a1cbb0> begin[:]
variable[y_in] assign[=] list[[<ast.Name object at 0x7da1b1a1fd30>]]
if <ast.BoolOp object at 0x7da1b1a1f7f0> begin[:]
<ast.AugAssign object at 0x7da1b1a1fa60>
<ast.Try object at 0x7da1b1a1fac0>
variable[filter_name] assign[=] call[call[name[input_hdulist]][name[extno]].header.get, parameter[constant[FILTER], constant[DEFAULT]]]
variable[zeropoints] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b0d5a0>, <ast.Constant object at 0x7da1b1b0d9f0>, <ast.Constant object at 0x7da1b1b0fa00>, <ast.Constant object at 0x7da1b1b0f370>, <ast.Constant object at 0x7da1b1b0d2d0>, <ast.Constant object at 0x7da1b1b0dc60>, <ast.Constant object at 0x7da1b1b0edd0>, <ast.Constant object at 0x7da1b1b0f730>], [<ast.Constant object at 0x7da1b1b0dfc0>, <ast.Constant object at 0x7da1b1b0d840>, <ast.Constant object at 0x7da1b1b0f490>, <ast.Constant object at 0x7da1b1b0f280>, <ast.Constant object at 0x7da1b1b0c4f0>, <ast.Constant object at 0x7da1b1b0e170>, <ast.Constant object at 0x7da1b1b0e920>, <ast.Constant object at 0x7da1b1b0dd80>]]
if compare[name[zmag] is constant[None]] begin[:]
call[name[logger].warning, parameter[constant[No zmag supplied to daophot, looking for header or default values.]]]
variable[zmag] assign[=] call[call[name[input_hdulist]][name[extno]].header.get, parameter[constant[PHOTZP], call[name[zeropoints]][name[filter_name]]]]
call[name[logger].warning, parameter[call[constant[Setting zmag to: {}].format, parameter[name[zmag]]]]]
for taget[name[zpu_file]] in starred[list[[<ast.Call object at 0x7da1b1969360>, <ast.Constant object at 0x7da1b1969390>]]] begin[:]
if call[name[os].access, parameter[name[zpu_file], name[os].R_OK]] begin[:]
with call[name[open], parameter[name[zpu_file]]] begin[:]
variable[zmag] assign[=] call[name[float], parameter[call[name[zpu_fh].read, parameter[]]]]
call[name[logger].warning, parameter[call[constant[Using file {} to set zmag to: {}].format, parameter[name[zpu_file], name[zmag]]]]]
break
variable[photzp] assign[=] call[call[name[input_hdulist]][name[extno]].header.get, parameter[constant[PHOTZP], call[name[zeropoints].get, parameter[name[filter_name], call[name[zeropoints]][constant[DEFAULT]]]]]]
if compare[name[zmag] not_equal[!=] name[photzp]] begin[:]
call[name[logger].warning, parameter[call[constant[zmag sent to daophot: ({}) doesn't match PHOTZP value in image header: ({})].format, parameter[name[zmag], name[photzp]]]]]
call[name[iraf].set, parameter[]]
call[name[iraf].digiphot, parameter[]]
call[name[iraf].apphot, parameter[]]
call[name[iraf].daophot, parameter[]]
name[iraf].photpars.apertures assign[=] name[aperture]
name[iraf].photpars.zmag assign[=] name[zmag]
name[iraf].datapars.datamin assign[=] constant[0]
name[iraf].datapars.datamax assign[=] name[maxcount]
name[iraf].datapars.exposur assign[=] constant[]
name[iraf].datapars.itime assign[=] name[exptime]
name[iraf].fitskypars.annulus assign[=] name[sky]
name[iraf].fitskypars.dannulus assign[=] name[swidth]
name[iraf].fitskypars.salgorithm assign[=] constant[mode]
name[iraf].fitskypars.sloclip assign[=] constant[5.0]
name[iraf].fitskypars.shiclip assign[=] constant[5.0]
if name[centroid] begin[:]
name[iraf].centerpars.calgori assign[=] constant[centroid]
name[iraf].centerpars.cbox assign[=] constant[5.0]
name[iraf].centerpars.cthreshold assign[=] constant[0.0]
name[iraf].centerpars.maxshift assign[=] constant[2.0]
name[iraf].centerpars.clean assign[=] constant[no]
name[iraf].phot.update assign[=] constant[no]
name[iraf].phot.verbose assign[=] constant[no]
name[iraf].phot.verify assign[=] constant[no]
name[iraf].phot.interactive assign[=] constant[no]
variable[coofile] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[x_in]]]]]] begin[:]
call[name[coofile].write, parameter[binary_operation[constant[%f %f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b191f280>, <ast.Subscript object at 0x7da1b191ee60>]]]]]
call[name[coofile].flush, parameter[]]
variable[magfile] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
call[name[coofile].close, parameter[]]
call[name[magfile].close, parameter[]]
call[name[os].remove, parameter[name[magfile].name]]
call[name[iraf].phot, parameter[binary_operation[name[fits_filename] + call[constant[[{}]].format, parameter[name[extno]]]], name[coofile].name, name[magfile].name]]
variable[pdump_out] assign[=] call[name[ascii].read, parameter[name[magfile].name]]
call[name[logging].debug, parameter[binary_operation[constant[PHOT FILE:
] + call[name[str], parameter[name[pdump_out]]]]]]
if <ast.UnaryOp object at 0x7da1b191d090> begin[:]
variable[mag_content] assign[=] call[call[name[open], parameter[name[magfile].name]].read, parameter[]]
<ast.Raise object at 0x7da1b191d630>
<ast.AugAssign object at 0x7da1b191ca60>
call[name[os].remove, parameter[name[coofile].name]]
call[name[os].remove, parameter[name[magfile].name]]
call[name[logger].debug, parameter[call[constant[Computed aperture photometry on {} objects in {}].format, parameter[call[name[len], parameter[name[pdump_out]]], name[fits_filename]]]]]
<ast.Delete object at 0x7da1b191cd90>
return[name[pdump_out]]
|
keyword[def] identifier[phot] ( identifier[fits_filename] , identifier[x_in] , identifier[y_in] , identifier[aperture] = literal[int] , identifier[sky] = literal[int] , identifier[swidth] = literal[int] , identifier[apcor] = literal[int] ,
identifier[maxcount] = literal[int] , identifier[exptime] = literal[int] , identifier[zmag] = keyword[None] , identifier[extno] = literal[int] , identifier[centroid] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[x_in] , literal[string] ):
identifier[x_in] =[ identifier[x_in] ,]
keyword[if] keyword[not] identifier[hasattr] ( identifier[y_in] , literal[string] ):
identifier[y_in] =[ identifier[y_in] ,]
keyword[if] ( keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fits_filename] ) keyword[and]
keyword[not] identifier[fits_filename] . identifier[endswith] ( literal[string] )):
identifier[fits_filename] += literal[string]
keyword[try] :
identifier[input_hdulist] = identifier[fits] . identifier[open] ( identifier[fits_filename] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[logger] . identifier[debug] ( identifier[str] ( identifier[err] ))
keyword[raise] identifier[TaskError] ( literal[string] % identifier[err] . identifier[message] )
identifier[filter_name] = identifier[input_hdulist] [ identifier[extno] ]. identifier[header] . identifier[get] ( literal[string] , literal[string] )
identifier[zeropoints] ={ literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] }
keyword[if] identifier[zmag] keyword[is] keyword[None] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[zmag] = identifier[input_hdulist] [ identifier[extno] ]. identifier[header] . identifier[get] ( literal[string] , identifier[zeropoints] [ identifier[filter_name] ])
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[zmag] ))
keyword[for] identifier[zpu_file] keyword[in] [ literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[splitext] ( identifier[fits_filename] )[ literal[int] ]), literal[string] ]:
keyword[if] identifier[os] . identifier[access] ( identifier[zpu_file] , identifier[os] . identifier[R_OK] ):
keyword[with] identifier[open] ( identifier[zpu_file] ) keyword[as] identifier[zpu_fh] :
identifier[zmag] = identifier[float] ( identifier[zpu_fh] . identifier[read] ())
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[zpu_file] , identifier[zmag] ))
keyword[break]
identifier[photzp] = identifier[input_hdulist] [ identifier[extno] ]. identifier[header] . identifier[get] ( literal[string] , identifier[zeropoints] . identifier[get] ( identifier[filter_name] , identifier[zeropoints] [ literal[string] ]))
keyword[if] identifier[zmag] != identifier[photzp] :
identifier[logger] . identifier[warning] (( literal[string]
literal[string] . identifier[format] ( identifier[zmag] , identifier[photzp] )))
identifier[iraf] . identifier[set] ( identifier[uparm] = literal[string] )
identifier[iraf] . identifier[digiphot] ()
identifier[iraf] . identifier[apphot] ()
identifier[iraf] . identifier[daophot] ( identifier[_doprint] = literal[int] )
identifier[iraf] . identifier[photpars] . identifier[apertures] = identifier[aperture]
identifier[iraf] . identifier[photpars] . identifier[zmag] = identifier[zmag]
identifier[iraf] . identifier[datapars] . identifier[datamin] = literal[int]
identifier[iraf] . identifier[datapars] . identifier[datamax] = identifier[maxcount]
identifier[iraf] . identifier[datapars] . identifier[exposur] = literal[string]
identifier[iraf] . identifier[datapars] . identifier[itime] = identifier[exptime]
identifier[iraf] . identifier[fitskypars] . identifier[annulus] = identifier[sky]
identifier[iraf] . identifier[fitskypars] . identifier[dannulus] = identifier[swidth]
identifier[iraf] . identifier[fitskypars] . identifier[salgorithm] = literal[string]
identifier[iraf] . identifier[fitskypars] . identifier[sloclip] = literal[int]
identifier[iraf] . identifier[fitskypars] . identifier[shiclip] = literal[int]
keyword[if] identifier[centroid] :
identifier[iraf] . identifier[centerpars] . identifier[calgori] = literal[string]
identifier[iraf] . identifier[centerpars] . identifier[cbox] = literal[int]
identifier[iraf] . identifier[centerpars] . identifier[cthreshold] = literal[int]
identifier[iraf] . identifier[centerpars] . identifier[maxshift] = literal[int]
identifier[iraf] . identifier[centerpars] . identifier[clean] = literal[string]
keyword[else] :
identifier[iraf] . identifier[centerpars] . identifier[calgori] = literal[string]
identifier[iraf] . identifier[phot] . identifier[update] = literal[string]
identifier[iraf] . identifier[phot] . identifier[verbose] = literal[string]
identifier[iraf] . identifier[phot] . identifier[verify] = literal[string]
identifier[iraf] . identifier[phot] . identifier[interactive] = literal[string]
identifier[coofile] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] , identifier[delete] = keyword[False] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[x_in] )):
identifier[coofile] . identifier[write] ( literal[string] %( identifier[x_in] [ identifier[i] ], identifier[y_in] [ identifier[i] ]))
identifier[coofile] . identifier[flush] ()
identifier[magfile] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] , identifier[delete] = keyword[False] )
identifier[coofile] . identifier[close] ()
identifier[magfile] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[magfile] . identifier[name] )
identifier[iraf] . identifier[phot] ( identifier[fits_filename] + literal[string] . identifier[format] ( identifier[extno] ), identifier[coofile] . identifier[name] , identifier[magfile] . identifier[name] )
identifier[pdump_out] = identifier[ascii] . identifier[read] ( identifier[magfile] . identifier[name] , identifier[format] = literal[string] )
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[pdump_out] ))
keyword[if] keyword[not] identifier[len] ( identifier[pdump_out] )> literal[int] :
identifier[mag_content] = identifier[open] ( identifier[magfile] . identifier[name] ). identifier[read] ()
keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[mag_content] ))
identifier[pdump_out] [ literal[string] ]-= identifier[apcor]
identifier[os] . identifier[remove] ( identifier[coofile] . identifier[name] )
identifier[os] . identifier[remove] ( identifier[magfile] . identifier[name] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[pdump_out] ), identifier[fits_filename] ))
keyword[del] identifier[input_hdulist]
keyword[return] identifier[pdump_out]
|
def phot(fits_filename, x_in, y_in, aperture=15, sky=20, swidth=10, apcor=0.3, maxcount=30000.0, exptime=1.0, zmag=None, extno=0, centroid=True):
"""
Compute the centroids and magnitudes of a bunch sources on fits image.
:rtype : astropy.table.Table
:param fits_filename: Name of fits image to measure source photometry on.
:type fits_filename: str
:param x_in: x location of source to measure
:type x_in: float, numpy.array
:param y_in: y location of source to measure
:type y_in: float, numpy.array
:param aperture: radius of circular aperture to use.
:type aperture: float
:param sky: radius of inner sky annulus
:type sky: float
:param swidth: width of the sky annulus
:type swidth: float
:param apcor: Aperture correction to take aperture flux to full flux.
:type apcor: float
:param maxcount: maximum linearity in the image.
:type maxcount: float
:param exptime: exposure time, relative to zmag supplied
:type exptime: float
:param zmag: zeropoint magnitude
:param extno: extension of fits_filename the x/y location refers to.
"""
if not hasattr(x_in, '__iter__'):
x_in = [x_in] # depends on [control=['if'], data=[]]
if not hasattr(y_in, '__iter__'):
y_in = [y_in] # depends on [control=['if'], data=[]]
if not os.path.exists(fits_filename) and (not fits_filename.endswith('.fits')):
# For convenience, see if we just forgot to provide the extension
fits_filename += '.fits' # depends on [control=['if'], data=[]]
try:
input_hdulist = fits.open(fits_filename) # depends on [control=['try'], data=[]]
except Exception as err:
logger.debug(str(err))
raise TaskError('Failed to open input image: %s' % err.message) # depends on [control=['except'], data=['err']]
# get the filter for this image
filter_name = input_hdulist[extno].header.get('FILTER', 'DEFAULT')
# Some nominal CFHT zeropoints that might be useful
zeropoints = {'I': 25.77, 'R': 26.07, 'V': 26.07, 'B': 25.92, 'DEFAULT': 26.0, 'g.MP9401': 32.0, 'r.MP9601': 31.9, 'gri.MP9603': 33.52}
if zmag is None:
logger.warning('No zmag supplied to daophot, looking for header or default values.')
zmag = input_hdulist[extno].header.get('PHOTZP', zeropoints[filter_name])
logger.warning('Setting zmag to: {}'.format(zmag))
# check for magic 'zeropoint.used' files
for zpu_file in ['{}.zeropoint.used'.format(os.path.splitext(fits_filename)[0]), 'zeropoint.used']:
if os.access(zpu_file, os.R_OK):
with open(zpu_file) as zpu_fh:
zmag = float(zpu_fh.read())
logger.warning('Using file {} to set zmag to: {}'.format(zpu_file, zmag))
break # depends on [control=['with'], data=['zpu_fh']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['zpu_file']] # depends on [control=['if'], data=['zmag']]
photzp = input_hdulist[extno].header.get('PHOTZP', zeropoints.get(filter_name, zeropoints['DEFAULT']))
if zmag != photzp:
logger.warning("zmag sent to daophot: ({}) doesn't match PHOTZP value in image header: ({})".format(zmag, photzp)) # depends on [control=['if'], data=['zmag', 'photzp']]
# setup IRAF to do the magnitude/centroid measurements
iraf.set(uparm='./')
iraf.digiphot()
iraf.apphot()
iraf.daophot(_doprint=0)
iraf.photpars.apertures = aperture
iraf.photpars.zmag = zmag
iraf.datapars.datamin = 0
iraf.datapars.datamax = maxcount
iraf.datapars.exposur = ''
iraf.datapars.itime = exptime
iraf.fitskypars.annulus = sky
iraf.fitskypars.dannulus = swidth
iraf.fitskypars.salgorithm = 'mode'
iraf.fitskypars.sloclip = 5.0
iraf.fitskypars.shiclip = 5.0
if centroid:
iraf.centerpars.calgori = 'centroid'
iraf.centerpars.cbox = 5.0
iraf.centerpars.cthreshold = 0.0
iraf.centerpars.maxshift = 2.0
iraf.centerpars.clean = 'no' # depends on [control=['if'], data=[]]
else:
iraf.centerpars.calgori = 'none'
iraf.phot.update = 'no'
iraf.phot.verbose = 'no'
iraf.phot.verify = 'no'
iraf.phot.interactive = 'no'
# Used for passing the input coordinates
coofile = tempfile.NamedTemporaryFile(suffix='.coo', delete=False)
for i in range(len(x_in)):
coofile.write('%f %f \n' % (x_in[i], y_in[i])) # depends on [control=['for'], data=['i']]
coofile.flush()
# Used for receiving the results of the task
# mag_fd, mag_path = tempfile.mkstemp(suffix=".mag")
magfile = tempfile.NamedTemporaryFile(suffix='.mag', delete=False)
# Close the temp files before sending to IRAF due to docstring:
# "Whether the name can be used to open the file a second time, while
# the named temporary file is still open, varies across platforms"
coofile.close()
magfile.close()
os.remove(magfile.name)
iraf.phot(fits_filename + '[{}]'.format(extno), coofile.name, magfile.name)
pdump_out = ascii.read(magfile.name, format='daophot')
logging.debug('PHOT FILE:\n' + str(pdump_out))
if not len(pdump_out) > 0:
mag_content = open(magfile.name).read()
raise TaskError('photometry failed. {}'.format(mag_content)) # depends on [control=['if'], data=[]]
# apply the aperture correction
pdump_out['MAG'] -= apcor
# if pdump_out['PIER'][0] != 0 or pdump_out['SIER'][0] != 0 or pdump_out['CIER'][0] != 0:
# raise ValueError("Photometry failed:\n {}".format(pdump_out))
# Clean up temporary files generated by IRAF
os.remove(coofile.name)
os.remove(magfile.name)
logger.debug('Computed aperture photometry on {} objects in {}'.format(len(pdump_out), fits_filename))
del input_hdulist
return pdump_out
|
def ground_height(self):
'''return height above ground in feet'''
lat = self.pkt['I105']['Lat']['val']
lon = self.pkt['I105']['Lon']['val']
global ElevationMap
ret = ElevationMap.GetElevation(lat, lon)
ret -= gen_settings.wgs84_to_AMSL
return ret * 3.2807
|
def function[ground_height, parameter[self]]:
constant[return height above ground in feet]
variable[lat] assign[=] call[call[call[name[self].pkt][constant[I105]]][constant[Lat]]][constant[val]]
variable[lon] assign[=] call[call[call[name[self].pkt][constant[I105]]][constant[Lon]]][constant[val]]
<ast.Global object at 0x7da1b17bb430>
variable[ret] assign[=] call[name[ElevationMap].GetElevation, parameter[name[lat], name[lon]]]
<ast.AugAssign object at 0x7da1b17b8e80>
return[binary_operation[name[ret] * constant[3.2807]]]
|
keyword[def] identifier[ground_height] ( identifier[self] ):
literal[string]
identifier[lat] = identifier[self] . identifier[pkt] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[lon] = identifier[self] . identifier[pkt] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[global] identifier[ElevationMap]
identifier[ret] = identifier[ElevationMap] . identifier[GetElevation] ( identifier[lat] , identifier[lon] )
identifier[ret] -= identifier[gen_settings] . identifier[wgs84_to_AMSL]
keyword[return] identifier[ret] * literal[int]
|
def ground_height(self):
"""return height above ground in feet"""
lat = self.pkt['I105']['Lat']['val']
lon = self.pkt['I105']['Lon']['val']
global ElevationMap
ret = ElevationMap.GetElevation(lat, lon)
ret -= gen_settings.wgs84_to_AMSL
return ret * 3.2807
|
def calc_qib2_v1(self):
"""Calculate the first inflow component released from the soil.
Required control parameters:
|NHRU|
|Lnk|
|NFk|
|DMin|
|DMax|
Required derived parameter:
|WZ|
Required state sequence:
|BoWa|
Calculated flux sequence:
|QIB2|
Basic equation:
:math:`QIB2 = (DMax-DMin) \\cdot
(\\frac{BoWa-WZ}{NFk-WZ})^\\frac{3}{2}`
Examples:
For water and sealed areas, no interflow is calculated (the first
three HRUs are of type |FLUSS|, |SEE|, and |VERS|, respectively).
No principal distinction is made between the remaining land use
classes (arable land |ACKER| has been selected for the last
five HRUs arbitrarily):
>>> from hydpy.models.lland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> nhru(8)
>>> lnk(FLUSS, SEE, VERS, ACKER, ACKER, ACKER, ACKER, ACKER)
>>> dmax(10.0)
>>> dmin(4.0)
>>> nfk(100.0, 100.0, 100.0, 50.0, 100.0, 100.0, 100.0, 200.0)
>>> derived.wz(50.0)
>>> states.bowa = 100.0, 100.0, 100.0, 50.1, 50.0, 75.0, 100.0, 100.0
Note the time dependence of parameters |DMin| (see the example above)
and |DMax|:
>>> dmax
dmax(10.0)
>>> dmax.values
array([ 5., 5., 5., 5., 5., 5., 5., 5.])
The following results show that he calculation of |QIB2| both
resembles those of |QBB| and |QIB1| in some regards:
>>> model.calc_qib2_v1()
>>> fluxes.qib2
qib2(0.0, 0.0, 0.0, 0.0, 0.0, 1.06066, 3.0, 0.57735)
In the given example, the maximum rate of total interflow
generation is 5 mm/12h (parameter |DMax|). For the seventh zone,
which contains a saturated soil, the value calculated for the
second interflow component (|QIB2|) is 3 mm/h. The "missing"
value of 2 mm/12h is be calculated by method |calc_qib1_v1|.
(The fourth zone, which is slightly oversaturated, is only intended
to demonstrate that zero division due to |NFk| = |WZ| is circumvented.)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
sta = self.sequences.states.fastaccess
for k in range(con.nhru):
if ((con.lnk[k] in (VERS, WASSER, FLUSS, SEE)) or
(sta.bowa[k] <= der.wz[k]) or (con.nfk[k] <= der.wz[k])):
flu.qib2[k] = 0.
else:
flu.qib2[k] = ((con.dmax[k]-con.dmin[k]) *
((sta.bowa[k]-der.wz[k]) /
(con.nfk[k]-der.wz[k]))**1.5)
|
def function[calc_qib2_v1, parameter[self]]:
constant[Calculate the first inflow component released from the soil.
Required control parameters:
|NHRU|
|Lnk|
|NFk|
|DMin|
|DMax|
Required derived parameter:
|WZ|
Required state sequence:
|BoWa|
Calculated flux sequence:
|QIB2|
Basic equation:
:math:`QIB2 = (DMax-DMin) \cdot
(\frac{BoWa-WZ}{NFk-WZ})^\frac{3}{2}`
Examples:
For water and sealed areas, no interflow is calculated (the first
three HRUs are of type |FLUSS|, |SEE|, and |VERS|, respectively).
No principal distinction is made between the remaining land use
classes (arable land |ACKER| has been selected for the last
five HRUs arbitrarily):
>>> from hydpy.models.lland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> nhru(8)
>>> lnk(FLUSS, SEE, VERS, ACKER, ACKER, ACKER, ACKER, ACKER)
>>> dmax(10.0)
>>> dmin(4.0)
>>> nfk(100.0, 100.0, 100.0, 50.0, 100.0, 100.0, 100.0, 200.0)
>>> derived.wz(50.0)
>>> states.bowa = 100.0, 100.0, 100.0, 50.1, 50.0, 75.0, 100.0, 100.0
Note the time dependence of parameters |DMin| (see the example above)
and |DMax|:
>>> dmax
dmax(10.0)
>>> dmax.values
array([ 5., 5., 5., 5., 5., 5., 5., 5.])
The following results show that he calculation of |QIB2| both
resembles those of |QBB| and |QIB1| in some regards:
>>> model.calc_qib2_v1()
>>> fluxes.qib2
qib2(0.0, 0.0, 0.0, 0.0, 0.0, 1.06066, 3.0, 0.57735)
In the given example, the maximum rate of total interflow
generation is 5 mm/12h (parameter |DMax|). For the seventh zone,
which contains a saturated soil, the value calculated for the
second interflow component (|QIB2|) is 3 mm/h. The "missing"
value of 2 mm/12h is be calculated by method |calc_qib1_v1|.
(The fourth zone, which is slightly oversaturated, is only intended
to demonstrate that zero division due to |NFk| = |WZ| is circumvented.)
]
variable[con] assign[=] name[self].parameters.control.fastaccess
variable[der] assign[=] name[self].parameters.derived.fastaccess
variable[flu] assign[=] name[self].sequences.fluxes.fastaccess
variable[sta] assign[=] name[self].sequences.states.fastaccess
for taget[name[k]] in starred[call[name[range], parameter[name[con].nhru]]] begin[:]
if <ast.BoolOp object at 0x7da18dc99a20> begin[:]
call[name[flu].qib2][name[k]] assign[=] constant[0.0]
|
keyword[def] identifier[calc_qib2_v1] ( identifier[self] ):
literal[string]
identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess]
identifier[der] = identifier[self] . identifier[parameters] . identifier[derived] . identifier[fastaccess]
identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess]
identifier[sta] = identifier[self] . identifier[sequences] . identifier[states] . identifier[fastaccess]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[con] . identifier[nhru] ):
keyword[if] (( identifier[con] . identifier[lnk] [ identifier[k] ] keyword[in] ( identifier[VERS] , identifier[WASSER] , identifier[FLUSS] , identifier[SEE] )) keyword[or]
( identifier[sta] . identifier[bowa] [ identifier[k] ]<= identifier[der] . identifier[wz] [ identifier[k] ]) keyword[or] ( identifier[con] . identifier[nfk] [ identifier[k] ]<= identifier[der] . identifier[wz] [ identifier[k] ])):
identifier[flu] . identifier[qib2] [ identifier[k] ]= literal[int]
keyword[else] :
identifier[flu] . identifier[qib2] [ identifier[k] ]=(( identifier[con] . identifier[dmax] [ identifier[k] ]- identifier[con] . identifier[dmin] [ identifier[k] ])*
(( identifier[sta] . identifier[bowa] [ identifier[k] ]- identifier[der] . identifier[wz] [ identifier[k] ])/
( identifier[con] . identifier[nfk] [ identifier[k] ]- identifier[der] . identifier[wz] [ identifier[k] ]))** literal[int] )
|
def calc_qib2_v1(self):
"""Calculate the first inflow component released from the soil.
Required control parameters:
|NHRU|
|Lnk|
|NFk|
|DMin|
|DMax|
Required derived parameter:
|WZ|
Required state sequence:
|BoWa|
Calculated flux sequence:
|QIB2|
Basic equation:
:math:`QIB2 = (DMax-DMin) \\cdot
(\\frac{BoWa-WZ}{NFk-WZ})^\\frac{3}{2}`
Examples:
For water and sealed areas, no interflow is calculated (the first
three HRUs are of type |FLUSS|, |SEE|, and |VERS|, respectively).
No principal distinction is made between the remaining land use
classes (arable land |ACKER| has been selected for the last
five HRUs arbitrarily):
>>> from hydpy.models.lland import *
>>> parameterstep('1d')
>>> simulationstep('12h')
>>> nhru(8)
>>> lnk(FLUSS, SEE, VERS, ACKER, ACKER, ACKER, ACKER, ACKER)
>>> dmax(10.0)
>>> dmin(4.0)
>>> nfk(100.0, 100.0, 100.0, 50.0, 100.0, 100.0, 100.0, 200.0)
>>> derived.wz(50.0)
>>> states.bowa = 100.0, 100.0, 100.0, 50.1, 50.0, 75.0, 100.0, 100.0
Note the time dependence of parameters |DMin| (see the example above)
and |DMax|:
>>> dmax
dmax(10.0)
>>> dmax.values
array([ 5., 5., 5., 5., 5., 5., 5., 5.])
The following results show that he calculation of |QIB2| both
resembles those of |QBB| and |QIB1| in some regards:
>>> model.calc_qib2_v1()
>>> fluxes.qib2
qib2(0.0, 0.0, 0.0, 0.0, 0.0, 1.06066, 3.0, 0.57735)
In the given example, the maximum rate of total interflow
generation is 5 mm/12h (parameter |DMax|). For the seventh zone,
which contains a saturated soil, the value calculated for the
second interflow component (|QIB2|) is 3 mm/h. The "missing"
value of 2 mm/12h is be calculated by method |calc_qib1_v1|.
(The fourth zone, which is slightly oversaturated, is only intended
to demonstrate that zero division due to |NFk| = |WZ| is circumvented.)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
sta = self.sequences.states.fastaccess
for k in range(con.nhru):
if con.lnk[k] in (VERS, WASSER, FLUSS, SEE) or sta.bowa[k] <= der.wz[k] or con.nfk[k] <= der.wz[k]:
flu.qib2[k] = 0.0 # depends on [control=['if'], data=[]]
else:
flu.qib2[k] = (con.dmax[k] - con.dmin[k]) * ((sta.bowa[k] - der.wz[k]) / (con.nfk[k] - der.wz[k])) ** 1.5 # depends on [control=['for'], data=['k']]
|
def namedtuple_storable(namedtuple, *args, **kwargs):
"""
Storable factory for named tuples.
"""
return default_storable(namedtuple, namedtuple._fields, *args, **kwargs)
|
def function[namedtuple_storable, parameter[namedtuple]]:
constant[
Storable factory for named tuples.
]
return[call[name[default_storable], parameter[name[namedtuple], name[namedtuple]._fields, <ast.Starred object at 0x7da1b16165c0>]]]
|
keyword[def] identifier[namedtuple_storable] ( identifier[namedtuple] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[default_storable] ( identifier[namedtuple] , identifier[namedtuple] . identifier[_fields] ,* identifier[args] ,** identifier[kwargs] )
|
def namedtuple_storable(namedtuple, *args, **kwargs):
"""
Storable factory for named tuples.
"""
return default_storable(namedtuple, namedtuple._fields, *args, **kwargs)
|
def hstrlen(self, name, key):
"""
Return the number of bytes stored in the value of ``key``
within hash ``name``
"""
with self.pipe as pipe:
return pipe.hstrlen(self.redis_key(name), key)
|
def function[hstrlen, parameter[self, name, key]]:
constant[
Return the number of bytes stored in the value of ``key``
within hash ``name``
]
with name[self].pipe begin[:]
return[call[name[pipe].hstrlen, parameter[call[name[self].redis_key, parameter[name[name]]], name[key]]]]
|
keyword[def] identifier[hstrlen] ( identifier[self] , identifier[name] , identifier[key] ):
literal[string]
keyword[with] identifier[self] . identifier[pipe] keyword[as] identifier[pipe] :
keyword[return] identifier[pipe] . identifier[hstrlen] ( identifier[self] . identifier[redis_key] ( identifier[name] ), identifier[key] )
|
def hstrlen(self, name, key):
"""
Return the number of bytes stored in the value of ``key``
within hash ``name``
"""
with self.pipe as pipe:
return pipe.hstrlen(self.redis_key(name), key) # depends on [control=['with'], data=['pipe']]
|
def bench_report(results):
"""Print a report for given benchmark results to the console."""
table = Table(names=['function', 'nest', 'nside', 'size',
'time_healpy', 'time_self', 'ratio'],
dtype=['S20', bool, int, int, float, float, float], masked=True)
for row in results:
table.add_row(row)
table['time_self'].format = '10.7f'
if HEALPY_INSTALLED:
table['ratio'] = table['time_self'] / table['time_healpy']
table['time_healpy'].format = '10.7f'
table['ratio'].format = '7.2f'
table.pprint(max_lines=-1)
|
def function[bench_report, parameter[results]]:
constant[Print a report for given benchmark results to the console.]
variable[table] assign[=] call[name[Table], parameter[]]
for taget[name[row]] in starred[name[results]] begin[:]
call[name[table].add_row, parameter[name[row]]]
call[name[table]][constant[time_self]].format assign[=] constant[10.7f]
if name[HEALPY_INSTALLED] begin[:]
call[name[table]][constant[ratio]] assign[=] binary_operation[call[name[table]][constant[time_self]] / call[name[table]][constant[time_healpy]]]
call[name[table]][constant[time_healpy]].format assign[=] constant[10.7f]
call[name[table]][constant[ratio]].format assign[=] constant[7.2f]
call[name[table].pprint, parameter[]]
|
keyword[def] identifier[bench_report] ( identifier[results] ):
literal[string]
identifier[table] = identifier[Table] ( identifier[names] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ],
identifier[dtype] =[ literal[string] , identifier[bool] , identifier[int] , identifier[int] , identifier[float] , identifier[float] , identifier[float] ], identifier[masked] = keyword[True] )
keyword[for] identifier[row] keyword[in] identifier[results] :
identifier[table] . identifier[add_row] ( identifier[row] )
identifier[table] [ literal[string] ]. identifier[format] = literal[string]
keyword[if] identifier[HEALPY_INSTALLED] :
identifier[table] [ literal[string] ]= identifier[table] [ literal[string] ]/ identifier[table] [ literal[string] ]
identifier[table] [ literal[string] ]. identifier[format] = literal[string]
identifier[table] [ literal[string] ]. identifier[format] = literal[string]
identifier[table] . identifier[pprint] ( identifier[max_lines] =- literal[int] )
|
def bench_report(results):
"""Print a report for given benchmark results to the console."""
table = Table(names=['function', 'nest', 'nside', 'size', 'time_healpy', 'time_self', 'ratio'], dtype=['S20', bool, int, int, float, float, float], masked=True)
for row in results:
table.add_row(row) # depends on [control=['for'], data=['row']]
table['time_self'].format = '10.7f'
if HEALPY_INSTALLED:
table['ratio'] = table['time_self'] / table['time_healpy']
table['time_healpy'].format = '10.7f'
table['ratio'].format = '7.2f' # depends on [control=['if'], data=[]]
table.pprint(max_lines=-1)
|
def use_winlegacy():
"""
Forces use of the legacy Windows CryptoAPI. This should only be used on
Windows XP or for testing. It is less full-featured than the Cryptography
Next Generation (CNG) API, and as a result the elliptic curve and PSS
padding features are implemented in pure Python. This isn't ideal, but it
a shim for end-user client code. No one is going to run a server on Windows
XP anyway, right?!
:raises:
EnvironmentError - when this function is called on an operating system other than Windows
RuntimeError - when this function is called after another part of oscrypto has been imported
"""
if sys.platform != 'win32':
plat = platform.system() or sys.platform
if plat == 'Darwin':
plat = 'OS X'
raise EnvironmentError('The winlegacy backend can only be used on Windows, not %s' % plat)
with _backend_lock:
if _module_values['backend'] is not None:
raise RuntimeError(
'Another part of oscrypto has already been imported, unable to force use of Windows legacy CryptoAPI'
)
_module_values['backend'] = 'winlegacy'
|
def function[use_winlegacy, parameter[]]:
constant[
Forces use of the legacy Windows CryptoAPI. This should only be used on
Windows XP or for testing. It is less full-featured than the Cryptography
Next Generation (CNG) API, and as a result the elliptic curve and PSS
padding features are implemented in pure Python. This isn't ideal, but it
a shim for end-user client code. No one is going to run a server on Windows
XP anyway, right?!
:raises:
EnvironmentError - when this function is called on an operating system other than Windows
RuntimeError - when this function is called after another part of oscrypto has been imported
]
if compare[name[sys].platform not_equal[!=] constant[win32]] begin[:]
variable[plat] assign[=] <ast.BoolOp object at 0x7da18bcc83a0>
if compare[name[plat] equal[==] constant[Darwin]] begin[:]
variable[plat] assign[=] constant[OS X]
<ast.Raise object at 0x7da18bccb040>
with name[_backend_lock] begin[:]
if compare[call[name[_module_values]][constant[backend]] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da18bcc9420>
call[name[_module_values]][constant[backend]] assign[=] constant[winlegacy]
|
keyword[def] identifier[use_winlegacy] ():
literal[string]
keyword[if] identifier[sys] . identifier[platform] != literal[string] :
identifier[plat] = identifier[platform] . identifier[system] () keyword[or] identifier[sys] . identifier[platform]
keyword[if] identifier[plat] == literal[string] :
identifier[plat] = literal[string]
keyword[raise] identifier[EnvironmentError] ( literal[string] % identifier[plat] )
keyword[with] identifier[_backend_lock] :
keyword[if] identifier[_module_values] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[RuntimeError] (
literal[string]
)
identifier[_module_values] [ literal[string] ]= literal[string]
|
def use_winlegacy():
"""
Forces use of the legacy Windows CryptoAPI. This should only be used on
Windows XP or for testing. It is less full-featured than the Cryptography
Next Generation (CNG) API, and as a result the elliptic curve and PSS
padding features are implemented in pure Python. This isn't ideal, but it
a shim for end-user client code. No one is going to run a server on Windows
XP anyway, right?!
:raises:
EnvironmentError - when this function is called on an operating system other than Windows
RuntimeError - when this function is called after another part of oscrypto has been imported
"""
if sys.platform != 'win32':
plat = platform.system() or sys.platform
if plat == 'Darwin':
plat = 'OS X' # depends on [control=['if'], data=['plat']]
raise EnvironmentError('The winlegacy backend can only be used on Windows, not %s' % plat) # depends on [control=['if'], data=[]]
with _backend_lock:
if _module_values['backend'] is not None:
raise RuntimeError('Another part of oscrypto has already been imported, unable to force use of Windows legacy CryptoAPI') # depends on [control=['if'], data=[]]
_module_values['backend'] = 'winlegacy' # depends on [control=['with'], data=[]]
|
def refresh_from_pdb(self, pdb_state):
"""
Refresh Variable Explorer and Editor from a Pdb session,
after running any pdb command.
See publish_pdb_state and notify_spyder in spyder_kernels
"""
if 'step' in pdb_state and 'fname' in pdb_state['step']:
fname = pdb_state['step']['fname']
lineno = pdb_state['step']['lineno']
self.sig_pdb_step.emit(fname, lineno)
if 'namespace_view' in pdb_state:
self.sig_namespace_view.emit(ast.literal_eval(
pdb_state['namespace_view']))
if 'var_properties' in pdb_state:
self.sig_var_properties.emit(ast.literal_eval(
pdb_state['var_properties']))
|
def function[refresh_from_pdb, parameter[self, pdb_state]]:
constant[
Refresh Variable Explorer and Editor from a Pdb session,
after running any pdb command.
See publish_pdb_state and notify_spyder in spyder_kernels
]
if <ast.BoolOp object at 0x7da20eb286d0> begin[:]
variable[fname] assign[=] call[call[name[pdb_state]][constant[step]]][constant[fname]]
variable[lineno] assign[=] call[call[name[pdb_state]][constant[step]]][constant[lineno]]
call[name[self].sig_pdb_step.emit, parameter[name[fname], name[lineno]]]
if compare[constant[namespace_view] in name[pdb_state]] begin[:]
call[name[self].sig_namespace_view.emit, parameter[call[name[ast].literal_eval, parameter[call[name[pdb_state]][constant[namespace_view]]]]]]
if compare[constant[var_properties] in name[pdb_state]] begin[:]
call[name[self].sig_var_properties.emit, parameter[call[name[ast].literal_eval, parameter[call[name[pdb_state]][constant[var_properties]]]]]]
|
keyword[def] identifier[refresh_from_pdb] ( identifier[self] , identifier[pdb_state] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[pdb_state] keyword[and] literal[string] keyword[in] identifier[pdb_state] [ literal[string] ]:
identifier[fname] = identifier[pdb_state] [ literal[string] ][ literal[string] ]
identifier[lineno] = identifier[pdb_state] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[sig_pdb_step] . identifier[emit] ( identifier[fname] , identifier[lineno] )
keyword[if] literal[string] keyword[in] identifier[pdb_state] :
identifier[self] . identifier[sig_namespace_view] . identifier[emit] ( identifier[ast] . identifier[literal_eval] (
identifier[pdb_state] [ literal[string] ]))
keyword[if] literal[string] keyword[in] identifier[pdb_state] :
identifier[self] . identifier[sig_var_properties] . identifier[emit] ( identifier[ast] . identifier[literal_eval] (
identifier[pdb_state] [ literal[string] ]))
|
def refresh_from_pdb(self, pdb_state):
"""
Refresh Variable Explorer and Editor from a Pdb session,
after running any pdb command.
See publish_pdb_state and notify_spyder in spyder_kernels
"""
if 'step' in pdb_state and 'fname' in pdb_state['step']:
fname = pdb_state['step']['fname']
lineno = pdb_state['step']['lineno']
self.sig_pdb_step.emit(fname, lineno) # depends on [control=['if'], data=[]]
if 'namespace_view' in pdb_state:
self.sig_namespace_view.emit(ast.literal_eval(pdb_state['namespace_view'])) # depends on [control=['if'], data=['pdb_state']]
if 'var_properties' in pdb_state:
self.sig_var_properties.emit(ast.literal_eval(pdb_state['var_properties'])) # depends on [control=['if'], data=['pdb_state']]
|
def setuptools_setup(setup_fpath=None, module=None, **kwargs):
# TODO: Learn this better
# https://docs.python.org/3.1/distutils/apiref.html
# https://pythonhosted.org/an_example_pypi_project/setuptools.html
# https://docs.python.org/2/distutils/setupscript.html https://docs.python.org/2/distutils/setupscript.html
# Useful documentation: http://bashelton.com/2009/04/setuptools-tutorial/#setup.py-package_dir
"""
Arguments which can be passed to setuptools::
============ ===== ===========
Install-Data Value Description
------------ ----- -----------
*packages strlist a list of packages modules to be distributed
py_modules strlist a list of singlefile modules to be distributed
scripts strlist a list of standalone scripts to build and install
*install_requires list e.g: ['distribute == 0.7.3', 'numpy', 'matplotlib']
data_files strlist a list of data files to install
zip_safe bool install efficiently installed as a zipped module?
namespace_packages list packages without meaningful __init__.py's
package_dir dict keys are packagenames ('' is the root)
package_data dict keys are foldernames, values are a list of globstrs
*entry_pionts dict installs a script {'console_scripts': ['script_name_to_install = entry_module:entry_function']}
============ ===== ===========
Meta-Data Value Description
------------ ----- -----------
name short string ('name of the package')
version short string ('version of this release')
author short string ('package authors name')
author_email email address ('email address of the package author')
maintainer short string ('package maintainers name')
maintainer_email email address ('email address of the package maintainer')
url URL ('home page for the package')
description short string ('short, summary description of the package')
long_description long string ('longer description of the package')
download_url URL ('location where the package may be downloaded')
classifiers list of strings ('a list of classifiers')
platforms list of strings ('a list of platforms')
license short string ('license for the package')
"""
from utool.util_inject import inject_colored_exceptions
inject_colored_exceptions() # Fluffly, but nice
if VERBOSE:
print(util_str.repr4(kwargs))
__infer_setup_kwargs(module, kwargs)
presetup_commands(setup_fpath, kwargs)
if VERBOSE:
print(util_str.repr4(kwargs))
return kwargs
|
def function[setuptools_setup, parameter[setup_fpath, module]]:
constant[
Arguments which can be passed to setuptools::
============ ===== ===========
Install-Data Value Description
------------ ----- -----------
*packages strlist a list of packages modules to be distributed
py_modules strlist a list of singlefile modules to be distributed
scripts strlist a list of standalone scripts to build and install
*install_requires list e.g: ['distribute == 0.7.3', 'numpy', 'matplotlib']
data_files strlist a list of data files to install
zip_safe bool install efficiently installed as a zipped module?
namespace_packages list packages without meaningful __init__.py's
package_dir dict keys are packagenames ('' is the root)
package_data dict keys are foldernames, values are a list of globstrs
*entry_pionts dict installs a script {'console_scripts': ['script_name_to_install = entry_module:entry_function']}
============ ===== ===========
Meta-Data Value Description
------------ ----- -----------
name short string ('name of the package')
version short string ('version of this release')
author short string ('package authors name')
author_email email address ('email address of the package author')
maintainer short string ('package maintainers name')
maintainer_email email address ('email address of the package maintainer')
url URL ('home page for the package')
description short string ('short, summary description of the package')
long_description long string ('longer description of the package')
download_url URL ('location where the package may be downloaded')
classifiers list of strings ('a list of classifiers')
platforms list of strings ('a list of platforms')
license short string ('license for the package')
]
from relative_module[utool.util_inject] import module[inject_colored_exceptions]
call[name[inject_colored_exceptions], parameter[]]
if name[VERBOSE] begin[:]
call[name[print], parameter[call[name[util_str].repr4, parameter[name[kwargs]]]]]
call[name[__infer_setup_kwargs], parameter[name[module], name[kwargs]]]
call[name[presetup_commands], parameter[name[setup_fpath], name[kwargs]]]
if name[VERBOSE] begin[:]
call[name[print], parameter[call[name[util_str].repr4, parameter[name[kwargs]]]]]
return[name[kwargs]]
|
keyword[def] identifier[setuptools_setup] ( identifier[setup_fpath] = keyword[None] , identifier[module] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[utool] . identifier[util_inject] keyword[import] identifier[inject_colored_exceptions]
identifier[inject_colored_exceptions] ()
keyword[if] identifier[VERBOSE] :
identifier[print] ( identifier[util_str] . identifier[repr4] ( identifier[kwargs] ))
identifier[__infer_setup_kwargs] ( identifier[module] , identifier[kwargs] )
identifier[presetup_commands] ( identifier[setup_fpath] , identifier[kwargs] )
keyword[if] identifier[VERBOSE] :
identifier[print] ( identifier[util_str] . identifier[repr4] ( identifier[kwargs] ))
keyword[return] identifier[kwargs]
|
def setuptools_setup(setup_fpath=None, module=None, **kwargs):
# TODO: Learn this better
# https://docs.python.org/3.1/distutils/apiref.html
# https://pythonhosted.org/an_example_pypi_project/setuptools.html
# https://docs.python.org/2/distutils/setupscript.html https://docs.python.org/2/distutils/setupscript.html
# Useful documentation: http://bashelton.com/2009/04/setuptools-tutorial/#setup.py-package_dir
"\n Arguments which can be passed to setuptools::\n\n ============ ===== ===========\n Install-Data Value Description\n ------------ ----- -----------\n *packages strlist a list of packages modules to be distributed\n py_modules strlist a list of singlefile modules to be distributed\n scripts strlist a list of standalone scripts to build and install\n *install_requires list e.g: ['distribute == 0.7.3', 'numpy', 'matplotlib']\n data_files strlist a list of data files to install\n zip_safe bool install efficiently installed as a zipped module?\n namespace_packages list packages without meaningful __init__.py's\n package_dir dict keys are packagenames ('' is the root)\n package_data dict keys are foldernames, values are a list of globstrs\n *entry_pionts dict installs a script {'console_scripts': ['script_name_to_install = entry_module:entry_function']}\n\n ============ ===== ===========\n Meta-Data Value Description\n ------------ ----- -----------\n name short string ('name of the package')\n version short string ('version of this release')\n author short string ('package authors name')\n author_email email address ('email address of the package author')\n maintainer short string ('package maintainers name')\n maintainer_email email address ('email address of the package maintainer')\n url URL ('home page for the package')\n description short string ('short, summary description of the package')\n long_description long string ('longer description of the package')\n download_url URL ('location where the package may be downloaded')\n classifiers list of strings ('a list of classifiers')\n platforms list of strings ('a list of platforms')\n license short string ('license for the package')\n "
from utool.util_inject import inject_colored_exceptions
inject_colored_exceptions() # Fluffly, but nice
if VERBOSE:
print(util_str.repr4(kwargs)) # depends on [control=['if'], data=[]]
__infer_setup_kwargs(module, kwargs)
presetup_commands(setup_fpath, kwargs)
if VERBOSE:
print(util_str.repr4(kwargs)) # depends on [control=['if'], data=[]]
return kwargs
|
def filter_instance(inst, plist):
"""Remove properties from an instance that aren't in the PropertyList
inst -- The CIMInstance
plist -- The property List, or None. The list items must be all
lowercase.
"""
if plist is not None:
for pname in inst.properties.keys():
if pname.lower() not in plist:
del inst.properties[pname]
|
def function[filter_instance, parameter[inst, plist]]:
constant[Remove properties from an instance that aren't in the PropertyList
inst -- The CIMInstance
plist -- The property List, or None. The list items must be all
lowercase.
]
if compare[name[plist] is_not constant[None]] begin[:]
for taget[name[pname]] in starred[call[name[inst].properties.keys, parameter[]]] begin[:]
if compare[call[name[pname].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[plist]] begin[:]
<ast.Delete object at 0x7da204345660>
|
keyword[def] identifier[filter_instance] ( identifier[inst] , identifier[plist] ):
literal[string]
keyword[if] identifier[plist] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[pname] keyword[in] identifier[inst] . identifier[properties] . identifier[keys] ():
keyword[if] identifier[pname] . identifier[lower] () keyword[not] keyword[in] identifier[plist] :
keyword[del] identifier[inst] . identifier[properties] [ identifier[pname] ]
|
def filter_instance(inst, plist):
"""Remove properties from an instance that aren't in the PropertyList
inst -- The CIMInstance
plist -- The property List, or None. The list items must be all
lowercase.
"""
if plist is not None:
for pname in inst.properties.keys():
if pname.lower() not in plist:
del inst.properties[pname] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pname']] # depends on [control=['if'], data=['plist']]
|
def readAccelRange( self ):
"""!
Reads the range of accelerometer setup.
@return an int value.
It should be one of the following values:
@see ACCEL_RANGE_2G
@see ACCEL_RANGE_4G
@see ACCEL_RANGE_8G
@see ACCEL_RANGE_16G
"""
raw_data = self._readByte(self.REG_ACCEL_CONFIG)
raw_data = (raw_data | 0xE7) ^ 0xE7
return raw_data
|
def function[readAccelRange, parameter[self]]:
constant[!
Reads the range of accelerometer setup.
@return an int value.
It should be one of the following values:
@see ACCEL_RANGE_2G
@see ACCEL_RANGE_4G
@see ACCEL_RANGE_8G
@see ACCEL_RANGE_16G
]
variable[raw_data] assign[=] call[name[self]._readByte, parameter[name[self].REG_ACCEL_CONFIG]]
variable[raw_data] assign[=] binary_operation[binary_operation[name[raw_data] <ast.BitOr object at 0x7da2590d6aa0> constant[231]] <ast.BitXor object at 0x7da2590d6b00> constant[231]]
return[name[raw_data]]
|
keyword[def] identifier[readAccelRange] ( identifier[self] ):
literal[string]
identifier[raw_data] = identifier[self] . identifier[_readByte] ( identifier[self] . identifier[REG_ACCEL_CONFIG] )
identifier[raw_data] =( identifier[raw_data] | literal[int] )^ literal[int]
keyword[return] identifier[raw_data]
|
def readAccelRange(self):
"""!
Reads the range of accelerometer setup.
@return an int value.
It should be one of the following values:
@see ACCEL_RANGE_2G
@see ACCEL_RANGE_4G
@see ACCEL_RANGE_8G
@see ACCEL_RANGE_16G
"""
raw_data = self._readByte(self.REG_ACCEL_CONFIG)
raw_data = (raw_data | 231) ^ 231
return raw_data
|
def source_pipe(self, source, ps=None):
"""Create a source pipe for a source, giving it access to download files to the local cache"""
if isinstance(source, string_types):
source = self.source(source)
source.dataset = self.dataset
source._bundle = self
iter_source, source_pipe = self._iterable_source(source, ps)
if self.limited_run:
source_pipe.limit = 500
return iter_source, source_pipe
|
def function[source_pipe, parameter[self, source, ps]]:
constant[Create a source pipe for a source, giving it access to download files to the local cache]
if call[name[isinstance], parameter[name[source], name[string_types]]] begin[:]
variable[source] assign[=] call[name[self].source, parameter[name[source]]]
name[source].dataset assign[=] name[self].dataset
name[source]._bundle assign[=] name[self]
<ast.Tuple object at 0x7da20e956770> assign[=] call[name[self]._iterable_source, parameter[name[source], name[ps]]]
if name[self].limited_run begin[:]
name[source_pipe].limit assign[=] constant[500]
return[tuple[[<ast.Name object at 0x7da20e957eb0>, <ast.Name object at 0x7da20e9577f0>]]]
|
keyword[def] identifier[source_pipe] ( identifier[self] , identifier[source] , identifier[ps] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[source] , identifier[string_types] ):
identifier[source] = identifier[self] . identifier[source] ( identifier[source] )
identifier[source] . identifier[dataset] = identifier[self] . identifier[dataset]
identifier[source] . identifier[_bundle] = identifier[self]
identifier[iter_source] , identifier[source_pipe] = identifier[self] . identifier[_iterable_source] ( identifier[source] , identifier[ps] )
keyword[if] identifier[self] . identifier[limited_run] :
identifier[source_pipe] . identifier[limit] = literal[int]
keyword[return] identifier[iter_source] , identifier[source_pipe]
|
def source_pipe(self, source, ps=None):
"""Create a source pipe for a source, giving it access to download files to the local cache"""
if isinstance(source, string_types):
source = self.source(source) # depends on [control=['if'], data=[]]
source.dataset = self.dataset
source._bundle = self
(iter_source, source_pipe) = self._iterable_source(source, ps)
if self.limited_run:
source_pipe.limit = 500 # depends on [control=['if'], data=[]]
return (iter_source, source_pipe)
|
def max_flow_preflowpush(self, source, sink, algo = 'FIFO', display = None):
'''
API: max_flow_preflowpush(self, source, sink, algo = 'FIFO',
display = None)
Description:
Finds maximum flow from source to sink by a depth-first search based
augmenting path algorithm.
Pre:
Assumes a directed graph in which each arc has a 'capacity'
attribute and for which there does does not exist both arcs (i,j)
and (j,i) for any pair of nodes i and j.
Input:
source: Source node name.
sink: Sink node name.
algo: Algorithm choice, 'FIFO', 'SAP' or 'HighestLabel'.
display: display method.
Post:
The 'flow' attribute of each arc gives a maximum flow.
'''
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
nl = self.get_node_list()
# set excess of all nodes to 0
for n in nl:
self.set_node_attr(n, 'excess', 0)
# set flow of all edges to 0
for e in self.edge_attr:
self.edge_attr[e]['flow'] = 0
if 'capacity' in self.edge_attr[e]:
capacity = self.edge_attr[e]['capacity']
self.edge_attr[e]['label'] = str(capacity)+'/0'
else:
self.edge_attr[e]['capacity'] = INF
self.edge_attr[e]['label'] = 'INF/0'
self.display()
self.set_display_mode('off')
self.search(sink, algo = 'UnweightedSPT', reverse = True)
self.set_display_mode(display)
disconnect = False
for n in nl:
if self.get_node_attr(n, 'distance') is None:
disconnect = True
self.set_node_attr(n, 'distance',
2*len(nl) + 1)
if disconnect:
print('Warning: graph contains nodes not connected to the sink...')
if algo == 'FIFO':
q = Queue()
elif algo == 'SAP':
q = Stack()
elif algo == 'HighestLabel':
q = PriorityQueue()
for n in self.get_neighbors(source):
capacity = self.get_edge_attr(source, n, 'capacity')
self.set_edge_attr(source, n, 'flow', capacity)
self.set_node_attr(n, 'excess', capacity)
excess = self.get_node_attr(source, 'excess')
self.set_node_attr(source, 'excess', excess - capacity)
if algo == 'FIFO' or algo == 'SAP':
q.push(n)
elif algo == 'HighestLabel':
q.push(n, -1)
self.set_node_attr(source, 'distance', len(nl))
self.show_flow()
while not q.isEmpty():
relabel = True
current = q.peek()
neighbors = (self.get_neighbors(current) +
self.get_in_neighbors(current))
for n in neighbors:
pushed = self.process_edge_flow(source, sink, current, n, algo,
q)
if pushed:
self.show_flow()
if algo == 'FIFO':
'''With FIFO, we need to add the neighbors to the queue
before the current is added back in or the nodes will
be out of order
'''
if q.peek(n) is None and n != source and n != sink:
q.push(n)
'''Keep pushing while there is excess'''
if self.get_node_attr(current, 'excess') > 0:
continue
'''If we were able to push, then there we should not
relabel
'''
relabel = False
break
q.remove(current)
if current != sink:
if relabel:
self.relabel(current)
self.show_flow()
if self.get_node_attr(current, 'excess') > 0:
if algo == 'FIFO' or algo == 'SAP':
q.push(current)
elif algo == 'HighestLabel':
q.push(current, -self.get_node_attr(current,
'distance'))
if pushed and q.peek(n) is None and n != source:
if algo == 'SAP':
q.push(n)
elif algo == 'HighestLabel':
q.push(n, -self.get_node_attr(n, 'distance'))
|
def function[max_flow_preflowpush, parameter[self, source, sink, algo, display]]:
constant[
API: max_flow_preflowpush(self, source, sink, algo = 'FIFO',
display = None)
Description:
Finds maximum flow from source to sink by a depth-first search based
augmenting path algorithm.
Pre:
Assumes a directed graph in which each arc has a 'capacity'
attribute and for which there does does not exist both arcs (i,j)
and (j,i) for any pair of nodes i and j.
Input:
source: Source node name.
sink: Sink node name.
algo: Algorithm choice, 'FIFO', 'SAP' or 'HighestLabel'.
display: display method.
Post:
The 'flow' attribute of each arc gives a maximum flow.
]
if compare[name[display] equal[==] constant[None]] begin[:]
variable[display] assign[=] call[name[self].attr][constant[display]]
variable[nl] assign[=] call[name[self].get_node_list, parameter[]]
for taget[name[n]] in starred[name[nl]] begin[:]
call[name[self].set_node_attr, parameter[name[n], constant[excess], constant[0]]]
for taget[name[e]] in starred[name[self].edge_attr] begin[:]
call[call[name[self].edge_attr][name[e]]][constant[flow]] assign[=] constant[0]
if compare[constant[capacity] in call[name[self].edge_attr][name[e]]] begin[:]
variable[capacity] assign[=] call[call[name[self].edge_attr][name[e]]][constant[capacity]]
call[call[name[self].edge_attr][name[e]]][constant[label]] assign[=] binary_operation[call[name[str], parameter[name[capacity]]] + constant[/0]]
call[name[self].display, parameter[]]
call[name[self].set_display_mode, parameter[constant[off]]]
call[name[self].search, parameter[name[sink]]]
call[name[self].set_display_mode, parameter[name[display]]]
variable[disconnect] assign[=] constant[False]
for taget[name[n]] in starred[name[nl]] begin[:]
if compare[call[name[self].get_node_attr, parameter[name[n], constant[distance]]] is constant[None]] begin[:]
variable[disconnect] assign[=] constant[True]
call[name[self].set_node_attr, parameter[name[n], constant[distance], binary_operation[binary_operation[constant[2] * call[name[len], parameter[name[nl]]]] + constant[1]]]]
if name[disconnect] begin[:]
call[name[print], parameter[constant[Warning: graph contains nodes not connected to the sink...]]]
if compare[name[algo] equal[==] constant[FIFO]] begin[:]
variable[q] assign[=] call[name[Queue], parameter[]]
for taget[name[n]] in starred[call[name[self].get_neighbors, parameter[name[source]]]] begin[:]
variable[capacity] assign[=] call[name[self].get_edge_attr, parameter[name[source], name[n], constant[capacity]]]
call[name[self].set_edge_attr, parameter[name[source], name[n], constant[flow], name[capacity]]]
call[name[self].set_node_attr, parameter[name[n], constant[excess], name[capacity]]]
variable[excess] assign[=] call[name[self].get_node_attr, parameter[name[source], constant[excess]]]
call[name[self].set_node_attr, parameter[name[source], constant[excess], binary_operation[name[excess] - name[capacity]]]]
if <ast.BoolOp object at 0x7da1b0595330> begin[:]
call[name[q].push, parameter[name[n]]]
call[name[self].set_node_attr, parameter[name[source], constant[distance], call[name[len], parameter[name[nl]]]]]
call[name[self].show_flow, parameter[]]
while <ast.UnaryOp object at 0x7da1b0594b80> begin[:]
variable[relabel] assign[=] constant[True]
variable[current] assign[=] call[name[q].peek, parameter[]]
variable[neighbors] assign[=] binary_operation[call[name[self].get_neighbors, parameter[name[current]]] + call[name[self].get_in_neighbors, parameter[name[current]]]]
for taget[name[n]] in starred[name[neighbors]] begin[:]
variable[pushed] assign[=] call[name[self].process_edge_flow, parameter[name[source], name[sink], name[current], name[n], name[algo], name[q]]]
if name[pushed] begin[:]
call[name[self].show_flow, parameter[]]
if compare[name[algo] equal[==] constant[FIFO]] begin[:]
constant[With FIFO, we need to add the neighbors to the queue
before the current is added back in or the nodes will
be out of order
]
if <ast.BoolOp object at 0x7da1b0535030> begin[:]
call[name[q].push, parameter[name[n]]]
constant[Keep pushing while there is excess]
if compare[call[name[self].get_node_attr, parameter[name[current], constant[excess]]] greater[>] constant[0]] begin[:]
continue
constant[If we were able to push, then there we should not
relabel
]
variable[relabel] assign[=] constant[False]
break
call[name[q].remove, parameter[name[current]]]
if compare[name[current] not_equal[!=] name[sink]] begin[:]
if name[relabel] begin[:]
call[name[self].relabel, parameter[name[current]]]
call[name[self].show_flow, parameter[]]
if compare[call[name[self].get_node_attr, parameter[name[current], constant[excess]]] greater[>] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da1b05368c0> begin[:]
call[name[q].push, parameter[name[current]]]
if <ast.BoolOp object at 0x7da1b0535c60> begin[:]
if compare[name[algo] equal[==] constant[SAP]] begin[:]
call[name[q].push, parameter[name[n]]]
|
keyword[def] identifier[max_flow_preflowpush] ( identifier[self] , identifier[source] , identifier[sink] , identifier[algo] = literal[string] , identifier[display] = keyword[None] ):
literal[string]
keyword[if] identifier[display] == keyword[None] :
identifier[display] = identifier[self] . identifier[attr] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[set_display_mode] ( identifier[display] )
identifier[nl] = identifier[self] . identifier[get_node_list] ()
keyword[for] identifier[n] keyword[in] identifier[nl] :
identifier[self] . identifier[set_node_attr] ( identifier[n] , literal[string] , literal[int] )
keyword[for] identifier[e] keyword[in] identifier[self] . identifier[edge_attr] :
identifier[self] . identifier[edge_attr] [ identifier[e] ][ literal[string] ]= literal[int]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[edge_attr] [ identifier[e] ]:
identifier[capacity] = identifier[self] . identifier[edge_attr] [ identifier[e] ][ literal[string] ]
identifier[self] . identifier[edge_attr] [ identifier[e] ][ literal[string] ]= identifier[str] ( identifier[capacity] )+ literal[string]
keyword[else] :
identifier[self] . identifier[edge_attr] [ identifier[e] ][ literal[string] ]= identifier[INF]
identifier[self] . identifier[edge_attr] [ identifier[e] ][ literal[string] ]= literal[string]
identifier[self] . identifier[display] ()
identifier[self] . identifier[set_display_mode] ( literal[string] )
identifier[self] . identifier[search] ( identifier[sink] , identifier[algo] = literal[string] , identifier[reverse] = keyword[True] )
identifier[self] . identifier[set_display_mode] ( identifier[display] )
identifier[disconnect] = keyword[False]
keyword[for] identifier[n] keyword[in] identifier[nl] :
keyword[if] identifier[self] . identifier[get_node_attr] ( identifier[n] , literal[string] ) keyword[is] keyword[None] :
identifier[disconnect] = keyword[True]
identifier[self] . identifier[set_node_attr] ( identifier[n] , literal[string] ,
literal[int] * identifier[len] ( identifier[nl] )+ literal[int] )
keyword[if] identifier[disconnect] :
identifier[print] ( literal[string] )
keyword[if] identifier[algo] == literal[string] :
identifier[q] = identifier[Queue] ()
keyword[elif] identifier[algo] == literal[string] :
identifier[q] = identifier[Stack] ()
keyword[elif] identifier[algo] == literal[string] :
identifier[q] = identifier[PriorityQueue] ()
keyword[for] identifier[n] keyword[in] identifier[self] . identifier[get_neighbors] ( identifier[source] ):
identifier[capacity] = identifier[self] . identifier[get_edge_attr] ( identifier[source] , identifier[n] , literal[string] )
identifier[self] . identifier[set_edge_attr] ( identifier[source] , identifier[n] , literal[string] , identifier[capacity] )
identifier[self] . identifier[set_node_attr] ( identifier[n] , literal[string] , identifier[capacity] )
identifier[excess] = identifier[self] . identifier[get_node_attr] ( identifier[source] , literal[string] )
identifier[self] . identifier[set_node_attr] ( identifier[source] , literal[string] , identifier[excess] - identifier[capacity] )
keyword[if] identifier[algo] == literal[string] keyword[or] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[n] )
keyword[elif] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[n] ,- literal[int] )
identifier[self] . identifier[set_node_attr] ( identifier[source] , literal[string] , identifier[len] ( identifier[nl] ))
identifier[self] . identifier[show_flow] ()
keyword[while] keyword[not] identifier[q] . identifier[isEmpty] ():
identifier[relabel] = keyword[True]
identifier[current] = identifier[q] . identifier[peek] ()
identifier[neighbors] =( identifier[self] . identifier[get_neighbors] ( identifier[current] )+
identifier[self] . identifier[get_in_neighbors] ( identifier[current] ))
keyword[for] identifier[n] keyword[in] identifier[neighbors] :
identifier[pushed] = identifier[self] . identifier[process_edge_flow] ( identifier[source] , identifier[sink] , identifier[current] , identifier[n] , identifier[algo] ,
identifier[q] )
keyword[if] identifier[pushed] :
identifier[self] . identifier[show_flow] ()
keyword[if] identifier[algo] == literal[string] :
literal[string]
keyword[if] identifier[q] . identifier[peek] ( identifier[n] ) keyword[is] keyword[None] keyword[and] identifier[n] != identifier[source] keyword[and] identifier[n] != identifier[sink] :
identifier[q] . identifier[push] ( identifier[n] )
literal[string]
keyword[if] identifier[self] . identifier[get_node_attr] ( identifier[current] , literal[string] )> literal[int] :
keyword[continue]
literal[string]
identifier[relabel] = keyword[False]
keyword[break]
identifier[q] . identifier[remove] ( identifier[current] )
keyword[if] identifier[current] != identifier[sink] :
keyword[if] identifier[relabel] :
identifier[self] . identifier[relabel] ( identifier[current] )
identifier[self] . identifier[show_flow] ()
keyword[if] identifier[self] . identifier[get_node_attr] ( identifier[current] , literal[string] )> literal[int] :
keyword[if] identifier[algo] == literal[string] keyword[or] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[current] )
keyword[elif] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[current] ,- identifier[self] . identifier[get_node_attr] ( identifier[current] ,
literal[string] ))
keyword[if] identifier[pushed] keyword[and] identifier[q] . identifier[peek] ( identifier[n] ) keyword[is] keyword[None] keyword[and] identifier[n] != identifier[source] :
keyword[if] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[n] )
keyword[elif] identifier[algo] == literal[string] :
identifier[q] . identifier[push] ( identifier[n] ,- identifier[self] . identifier[get_node_attr] ( identifier[n] , literal[string] ))
|
def max_flow_preflowpush(self, source, sink, algo='FIFO', display=None):
"""
API: max_flow_preflowpush(self, source, sink, algo = 'FIFO',
display = None)
Description:
Finds maximum flow from source to sink by a depth-first search based
augmenting path algorithm.
Pre:
Assumes a directed graph in which each arc has a 'capacity'
attribute and for which there does does not exist both arcs (i,j)
and (j,i) for any pair of nodes i and j.
Input:
source: Source node name.
sink: Sink node name.
algo: Algorithm choice, 'FIFO', 'SAP' or 'HighestLabel'.
display: display method.
Post:
The 'flow' attribute of each arc gives a maximum flow.
"""
if display == None:
display = self.attr['display'] # depends on [control=['if'], data=['display']]
else:
self.set_display_mode(display)
nl = self.get_node_list()
# set excess of all nodes to 0
for n in nl:
self.set_node_attr(n, 'excess', 0) # depends on [control=['for'], data=['n']]
# set flow of all edges to 0
for e in self.edge_attr:
self.edge_attr[e]['flow'] = 0
if 'capacity' in self.edge_attr[e]:
capacity = self.edge_attr[e]['capacity']
self.edge_attr[e]['label'] = str(capacity) + '/0' # depends on [control=['if'], data=[]]
else:
self.edge_attr[e]['capacity'] = INF
self.edge_attr[e]['label'] = 'INF/0' # depends on [control=['for'], data=['e']]
self.display()
self.set_display_mode('off')
self.search(sink, algo='UnweightedSPT', reverse=True)
self.set_display_mode(display)
disconnect = False
for n in nl:
if self.get_node_attr(n, 'distance') is None:
disconnect = True
self.set_node_attr(n, 'distance', 2 * len(nl) + 1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
if disconnect:
print('Warning: graph contains nodes not connected to the sink...') # depends on [control=['if'], data=[]]
if algo == 'FIFO':
q = Queue() # depends on [control=['if'], data=[]]
elif algo == 'SAP':
q = Stack() # depends on [control=['if'], data=[]]
elif algo == 'HighestLabel':
q = PriorityQueue() # depends on [control=['if'], data=[]]
for n in self.get_neighbors(source):
capacity = self.get_edge_attr(source, n, 'capacity')
self.set_edge_attr(source, n, 'flow', capacity)
self.set_node_attr(n, 'excess', capacity)
excess = self.get_node_attr(source, 'excess')
self.set_node_attr(source, 'excess', excess - capacity)
if algo == 'FIFO' or algo == 'SAP':
q.push(n) # depends on [control=['if'], data=[]]
elif algo == 'HighestLabel':
q.push(n, -1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
self.set_node_attr(source, 'distance', len(nl))
self.show_flow()
while not q.isEmpty():
relabel = True
current = q.peek()
neighbors = self.get_neighbors(current) + self.get_in_neighbors(current)
for n in neighbors:
pushed = self.process_edge_flow(source, sink, current, n, algo, q)
if pushed:
self.show_flow()
if algo == 'FIFO':
'With FIFO, we need to add the neighbors to the queue\n before the current is added back in or the nodes will\n be out of order\n '
if q.peek(n) is None and n != source and (n != sink):
q.push(n) # depends on [control=['if'], data=[]]
'Keep pushing while there is excess'
if self.get_node_attr(current, 'excess') > 0:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
'If we were able to push, then there we should not\n relabel\n '
relabel = False
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
q.remove(current)
if current != sink:
if relabel:
self.relabel(current)
self.show_flow() # depends on [control=['if'], data=[]]
if self.get_node_attr(current, 'excess') > 0:
if algo == 'FIFO' or algo == 'SAP':
q.push(current) # depends on [control=['if'], data=[]]
elif algo == 'HighestLabel':
q.push(current, -self.get_node_attr(current, 'distance')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['current']]
if pushed and q.peek(n) is None and (n != source):
if algo == 'SAP':
q.push(n) # depends on [control=['if'], data=[]]
elif algo == 'HighestLabel':
q.push(n, -self.get_node_attr(n, 'distance')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def nreturned(self):
"""
Extract counters if available (lazy).
Looks for nreturned, nReturned, or nMatched counter.
"""
if not self._counters_calculated:
self._counters_calculated = True
self._extract_counters()
return self._nreturned
|
def function[nreturned, parameter[self]]:
constant[
Extract counters if available (lazy).
Looks for nreturned, nReturned, or nMatched counter.
]
if <ast.UnaryOp object at 0x7da1b1780be0> begin[:]
name[self]._counters_calculated assign[=] constant[True]
call[name[self]._extract_counters, parameter[]]
return[name[self]._nreturned]
|
keyword[def] identifier[nreturned] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_counters_calculated] :
identifier[self] . identifier[_counters_calculated] = keyword[True]
identifier[self] . identifier[_extract_counters] ()
keyword[return] identifier[self] . identifier[_nreturned]
|
def nreturned(self):
"""
Extract counters if available (lazy).
Looks for nreturned, nReturned, or nMatched counter.
"""
if not self._counters_calculated:
self._counters_calculated = True
self._extract_counters() # depends on [control=['if'], data=[]]
return self._nreturned
|
def get_datasets(dataset_ids,**kwargs):
"""
Get a single dataset, by ID
"""
user_id = int(kwargs.get('user_id'))
datasets = []
if len(dataset_ids) == 0:
return []
try:
dataset_rs = db.DBSession.query(Dataset.id,
Dataset.type,
Dataset.unit_id,
Dataset.name,
Dataset.hidden,
Dataset.cr_date,
Dataset.created_by,
DatasetOwner.user_id,
null().label('metadata'),
case([(and_(Dataset.hidden=='Y', DatasetOwner.user_id is not None), None)],
else_=Dataset.value).label('value')).filter(
Dataset.id.in_(dataset_ids)).outerjoin(DatasetOwner,
and_(DatasetOwner.dataset_id==Dataset.id,
DatasetOwner.user_id==user_id)).all()
#convert the value row into a string as it is returned as a binary
for dataset_row in dataset_rs:
dataset_dict = dataset_row._asdict()
if dataset_row.value is not None:
dataset_dict['value'] = str(dataset_row.value)
if dataset_row.hidden == 'N' or (dataset_row.hidden == 'Y' and dataset_row.user_id is not None):
metadata = db.DBSession.query(Metadata).filter(Metadata.dataset_id == dataset_row.id).all()
dataset_dict['metadata'] = metadata
else:
dataset_dict['metadata'] = []
datasets.append(namedtuple('Dataset', dataset_dict.keys())(**dataset_dict))
except NoResultFound:
raise ResourceNotFoundError("Datasets not found.")
return datasets
|
def function[get_datasets, parameter[dataset_ids]]:
constant[
Get a single dataset, by ID
]
variable[user_id] assign[=] call[name[int], parameter[call[name[kwargs].get, parameter[constant[user_id]]]]]
variable[datasets] assign[=] list[[]]
if compare[call[name[len], parameter[name[dataset_ids]]] equal[==] constant[0]] begin[:]
return[list[[]]]
<ast.Try object at 0x7da204564b20>
return[name[datasets]]
|
keyword[def] identifier[get_datasets] ( identifier[dataset_ids] ,** identifier[kwargs] ):
literal[string]
identifier[user_id] = identifier[int] ( identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[datasets] =[]
keyword[if] identifier[len] ( identifier[dataset_ids] )== literal[int] :
keyword[return] []
keyword[try] :
identifier[dataset_rs] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Dataset] . identifier[id] ,
identifier[Dataset] . identifier[type] ,
identifier[Dataset] . identifier[unit_id] ,
identifier[Dataset] . identifier[name] ,
identifier[Dataset] . identifier[hidden] ,
identifier[Dataset] . identifier[cr_date] ,
identifier[Dataset] . identifier[created_by] ,
identifier[DatasetOwner] . identifier[user_id] ,
identifier[null] (). identifier[label] ( literal[string] ),
identifier[case] ([( identifier[and_] ( identifier[Dataset] . identifier[hidden] == literal[string] , identifier[DatasetOwner] . identifier[user_id] keyword[is] keyword[not] keyword[None] ), keyword[None] )],
identifier[else_] = identifier[Dataset] . identifier[value] ). identifier[label] ( literal[string] )). identifier[filter] (
identifier[Dataset] . identifier[id] . identifier[in_] ( identifier[dataset_ids] )). identifier[outerjoin] ( identifier[DatasetOwner] ,
identifier[and_] ( identifier[DatasetOwner] . identifier[dataset_id] == identifier[Dataset] . identifier[id] ,
identifier[DatasetOwner] . identifier[user_id] == identifier[user_id] )). identifier[all] ()
keyword[for] identifier[dataset_row] keyword[in] identifier[dataset_rs] :
identifier[dataset_dict] = identifier[dataset_row] . identifier[_asdict] ()
keyword[if] identifier[dataset_row] . identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[dataset_dict] [ literal[string] ]= identifier[str] ( identifier[dataset_row] . identifier[value] )
keyword[if] identifier[dataset_row] . identifier[hidden] == literal[string] keyword[or] ( identifier[dataset_row] . identifier[hidden] == literal[string] keyword[and] identifier[dataset_row] . identifier[user_id] keyword[is] keyword[not] keyword[None] ):
identifier[metadata] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Metadata] ). identifier[filter] ( identifier[Metadata] . identifier[dataset_id] == identifier[dataset_row] . identifier[id] ). identifier[all] ()
identifier[dataset_dict] [ literal[string] ]= identifier[metadata]
keyword[else] :
identifier[dataset_dict] [ literal[string] ]=[]
identifier[datasets] . identifier[append] ( identifier[namedtuple] ( literal[string] , identifier[dataset_dict] . identifier[keys] ())(** identifier[dataset_dict] ))
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[ResourceNotFoundError] ( literal[string] )
keyword[return] identifier[datasets]
|
def get_datasets(dataset_ids, **kwargs):
"""
Get a single dataset, by ID
"""
user_id = int(kwargs.get('user_id'))
datasets = []
if len(dataset_ids) == 0:
return [] # depends on [control=['if'], data=[]]
try:
dataset_rs = db.DBSession.query(Dataset.id, Dataset.type, Dataset.unit_id, Dataset.name, Dataset.hidden, Dataset.cr_date, Dataset.created_by, DatasetOwner.user_id, null().label('metadata'), case([(and_(Dataset.hidden == 'Y', DatasetOwner.user_id is not None), None)], else_=Dataset.value).label('value')).filter(Dataset.id.in_(dataset_ids)).outerjoin(DatasetOwner, and_(DatasetOwner.dataset_id == Dataset.id, DatasetOwner.user_id == user_id)).all()
#convert the value row into a string as it is returned as a binary
for dataset_row in dataset_rs:
dataset_dict = dataset_row._asdict()
if dataset_row.value is not None:
dataset_dict['value'] = str(dataset_row.value) # depends on [control=['if'], data=[]]
if dataset_row.hidden == 'N' or (dataset_row.hidden == 'Y' and dataset_row.user_id is not None):
metadata = db.DBSession.query(Metadata).filter(Metadata.dataset_id == dataset_row.id).all()
dataset_dict['metadata'] = metadata # depends on [control=['if'], data=[]]
else:
dataset_dict['metadata'] = []
datasets.append(namedtuple('Dataset', dataset_dict.keys())(**dataset_dict)) # depends on [control=['for'], data=['dataset_row']] # depends on [control=['try'], data=[]]
except NoResultFound:
raise ResourceNotFoundError('Datasets not found.') # depends on [control=['except'], data=[]]
return datasets
|
def from_words(cls, words, prefix=0, flatten=False):
"""
Given an iterable of words, return the corresponding table.
The table is built by accumulating, for each word, for each sub-word,
the number of occurrences of the corresponding next character.
:param words: an iterable of strings made of alphabetic characters;
:param prefix: if greater than 0, the maximum length of the prefix to
store in the table;
:param flatten: whether to flatten the table or not;
:return: the corresponding table.
Example:
>>> Table.from_words(['abaq'])
{'a':{'b': 1, 'q': 1}, 'ab': {'a': 1}, 'aba': {'q': 1}, 'b': {'a': 1},
'ba': {'q': 1}}
"""
table = defaultdict(lambda: defaultdict(int))
for word in words:
word = ">" + word + "<"
for start in range(len(word) - 1):
max_end = ((len(word) - 1) if prefix <= 0
else (min(start + prefix + 1, len(word) - 1)))
for end in range(start + 1, max_end + 1):
sub_word = word[start:end]
weight = 1 if flatten else (table[sub_word][word[end]] + 1)
table[sub_word][word[end]] = weight
for k, v in table.items():
table[k] = dict(v)
return cls(dict(table))
|
def function[from_words, parameter[cls, words, prefix, flatten]]:
constant[
Given an iterable of words, return the corresponding table.
The table is built by accumulating, for each word, for each sub-word,
the number of occurrences of the corresponding next character.
:param words: an iterable of strings made of alphabetic characters;
:param prefix: if greater than 0, the maximum length of the prefix to
store in the table;
:param flatten: whether to flatten the table or not;
:return: the corresponding table.
Example:
>>> Table.from_words(['abaq'])
{'a':{'b': 1, 'q': 1}, 'ab': {'a': 1}, 'aba': {'q': 1}, 'b': {'a': 1},
'ba': {'q': 1}}
]
variable[table] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b133c7f0>]]
for taget[name[word]] in starred[name[words]] begin[:]
variable[word] assign[=] binary_operation[binary_operation[constant[>] + name[word]] + constant[<]]
for taget[name[start]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[word]]] - constant[1]]]]] begin[:]
variable[max_end] assign[=] <ast.IfExp object at 0x7da1b133c550>
for taget[name[end]] in starred[call[name[range], parameter[binary_operation[name[start] + constant[1]], binary_operation[name[max_end] + constant[1]]]]] begin[:]
variable[sub_word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b133c610>]
variable[weight] assign[=] <ast.IfExp object at 0x7da1b13af1f0>
call[call[name[table]][name[sub_word]]][call[name[word]][name[end]]] assign[=] name[weight]
for taget[tuple[[<ast.Name object at 0x7da1b1304eb0>, <ast.Name object at 0x7da1b13044f0>]]] in starred[call[name[table].items, parameter[]]] begin[:]
call[name[table]][name[k]] assign[=] call[name[dict], parameter[name[v]]]
return[call[name[cls], parameter[call[name[dict], parameter[name[table]]]]]]
|
keyword[def] identifier[from_words] ( identifier[cls] , identifier[words] , identifier[prefix] = literal[int] , identifier[flatten] = keyword[False] ):
literal[string]
identifier[table] = identifier[defaultdict] ( keyword[lambda] : identifier[defaultdict] ( identifier[int] ))
keyword[for] identifier[word] keyword[in] identifier[words] :
identifier[word] = literal[string] + identifier[word] + literal[string]
keyword[for] identifier[start] keyword[in] identifier[range] ( identifier[len] ( identifier[word] )- literal[int] ):
identifier[max_end] =(( identifier[len] ( identifier[word] )- literal[int] ) keyword[if] identifier[prefix] <= literal[int]
keyword[else] ( identifier[min] ( identifier[start] + identifier[prefix] + literal[int] , identifier[len] ( identifier[word] )- literal[int] )))
keyword[for] identifier[end] keyword[in] identifier[range] ( identifier[start] + literal[int] , identifier[max_end] + literal[int] ):
identifier[sub_word] = identifier[word] [ identifier[start] : identifier[end] ]
identifier[weight] = literal[int] keyword[if] identifier[flatten] keyword[else] ( identifier[table] [ identifier[sub_word] ][ identifier[word] [ identifier[end] ]]+ literal[int] )
identifier[table] [ identifier[sub_word] ][ identifier[word] [ identifier[end] ]]= identifier[weight]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[table] . identifier[items] ():
identifier[table] [ identifier[k] ]= identifier[dict] ( identifier[v] )
keyword[return] identifier[cls] ( identifier[dict] ( identifier[table] ))
|
def from_words(cls, words, prefix=0, flatten=False):
"""
Given an iterable of words, return the corresponding table.
The table is built by accumulating, for each word, for each sub-word,
the number of occurrences of the corresponding next character.
:param words: an iterable of strings made of alphabetic characters;
:param prefix: if greater than 0, the maximum length of the prefix to
store in the table;
:param flatten: whether to flatten the table or not;
:return: the corresponding table.
Example:
>>> Table.from_words(['abaq'])
{'a':{'b': 1, 'q': 1}, 'ab': {'a': 1}, 'aba': {'q': 1}, 'b': {'a': 1},
'ba': {'q': 1}}
"""
table = defaultdict(lambda : defaultdict(int))
for word in words:
word = '>' + word + '<'
for start in range(len(word) - 1):
max_end = len(word) - 1 if prefix <= 0 else min(start + prefix + 1, len(word) - 1)
for end in range(start + 1, max_end + 1):
sub_word = word[start:end]
weight = 1 if flatten else table[sub_word][word[end]] + 1
table[sub_word][word[end]] = weight # depends on [control=['for'], data=['end']] # depends on [control=['for'], data=['start']] # depends on [control=['for'], data=['word']]
for (k, v) in table.items():
table[k] = dict(v) # depends on [control=['for'], data=[]]
return cls(dict(table))
|
def read_vest_pickle(gname, score_dir):
"""Read in VEST scores for given gene.
Parameters
----------
gname : str
name of gene
score_dir : str
directory containing vest scores
Returns
-------
gene_vest : dict or None
dict containing vest scores for gene. Returns None if not found.
"""
vest_path = os.path.join(score_dir, gname+".vest.pickle")
if os.path.exists(vest_path):
if sys.version_info < (3,):
with open(vest_path) as handle:
gene_vest = pickle.load(handle)
else:
with open(vest_path, 'rb') as handle:
gene_vest = pickle.load(handle, encoding='latin-1')
return gene_vest
else:
return None
|
def function[read_vest_pickle, parameter[gname, score_dir]]:
constant[Read in VEST scores for given gene.
Parameters
----------
gname : str
name of gene
score_dir : str
directory containing vest scores
Returns
-------
gene_vest : dict or None
dict containing vest scores for gene. Returns None if not found.
]
variable[vest_path] assign[=] call[name[os].path.join, parameter[name[score_dir], binary_operation[name[gname] + constant[.vest.pickle]]]]
if call[name[os].path.exists, parameter[name[vest_path]]] begin[:]
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da18eb56c80>]]] begin[:]
with call[name[open], parameter[name[vest_path]]] begin[:]
variable[gene_vest] assign[=] call[name[pickle].load, parameter[name[handle]]]
return[name[gene_vest]]
|
keyword[def] identifier[read_vest_pickle] ( identifier[gname] , identifier[score_dir] ):
literal[string]
identifier[vest_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[score_dir] , identifier[gname] + literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[vest_path] ):
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] ,):
keyword[with] identifier[open] ( identifier[vest_path] ) keyword[as] identifier[handle] :
identifier[gene_vest] = identifier[pickle] . identifier[load] ( identifier[handle] )
keyword[else] :
keyword[with] identifier[open] ( identifier[vest_path] , literal[string] ) keyword[as] identifier[handle] :
identifier[gene_vest] = identifier[pickle] . identifier[load] ( identifier[handle] , identifier[encoding] = literal[string] )
keyword[return] identifier[gene_vest]
keyword[else] :
keyword[return] keyword[None]
|
def read_vest_pickle(gname, score_dir):
"""Read in VEST scores for given gene.
Parameters
----------
gname : str
name of gene
score_dir : str
directory containing vest scores
Returns
-------
gene_vest : dict or None
dict containing vest scores for gene. Returns None if not found.
"""
vest_path = os.path.join(score_dir, gname + '.vest.pickle')
if os.path.exists(vest_path):
if sys.version_info < (3,):
with open(vest_path) as handle:
gene_vest = pickle.load(handle) # depends on [control=['with'], data=['handle']] # depends on [control=['if'], data=[]]
else:
with open(vest_path, 'rb') as handle:
gene_vest = pickle.load(handle, encoding='latin-1') # depends on [control=['with'], data=['handle']]
return gene_vest # depends on [control=['if'], data=[]]
else:
return None
|
def register_repeating_metric(self, metric_name, frequency, getter):
'''Record hits to a metric at a specified interval.
Args:
metric_name: The name of the metric to record with Carbon.
frequency: The frequency with which to poll the getter and record the value with Carbon.
getter: A function which takes no arguments and returns the value to record with Carbon.
Returns:
RepeatingMetricHandle instance. Call .stop() on it to stop recording the metric.
'''
l = task.LoopingCall(self._publish_repeating_metric, metric_name, getter)
repeating_metric_handle = RepeatingMetricHandle(l, frequency)
self._repeating_metric_handles.append(repeating_metric_handle)
if self.running:
repeating_metric_handle.start()
return repeating_metric_handle
|
def function[register_repeating_metric, parameter[self, metric_name, frequency, getter]]:
constant[Record hits to a metric at a specified interval.
Args:
metric_name: The name of the metric to record with Carbon.
frequency: The frequency with which to poll the getter and record the value with Carbon.
getter: A function which takes no arguments and returns the value to record with Carbon.
Returns:
RepeatingMetricHandle instance. Call .stop() on it to stop recording the metric.
]
variable[l] assign[=] call[name[task].LoopingCall, parameter[name[self]._publish_repeating_metric, name[metric_name], name[getter]]]
variable[repeating_metric_handle] assign[=] call[name[RepeatingMetricHandle], parameter[name[l], name[frequency]]]
call[name[self]._repeating_metric_handles.append, parameter[name[repeating_metric_handle]]]
if name[self].running begin[:]
call[name[repeating_metric_handle].start, parameter[]]
return[name[repeating_metric_handle]]
|
keyword[def] identifier[register_repeating_metric] ( identifier[self] , identifier[metric_name] , identifier[frequency] , identifier[getter] ):
literal[string]
identifier[l] = identifier[task] . identifier[LoopingCall] ( identifier[self] . identifier[_publish_repeating_metric] , identifier[metric_name] , identifier[getter] )
identifier[repeating_metric_handle] = identifier[RepeatingMetricHandle] ( identifier[l] , identifier[frequency] )
identifier[self] . identifier[_repeating_metric_handles] . identifier[append] ( identifier[repeating_metric_handle] )
keyword[if] identifier[self] . identifier[running] :
identifier[repeating_metric_handle] . identifier[start] ()
keyword[return] identifier[repeating_metric_handle]
|
def register_repeating_metric(self, metric_name, frequency, getter):
"""Record hits to a metric at a specified interval.
Args:
metric_name: The name of the metric to record with Carbon.
frequency: The frequency with which to poll the getter and record the value with Carbon.
getter: A function which takes no arguments and returns the value to record with Carbon.
Returns:
RepeatingMetricHandle instance. Call .stop() on it to stop recording the metric.
"""
l = task.LoopingCall(self._publish_repeating_metric, metric_name, getter)
repeating_metric_handle = RepeatingMetricHandle(l, frequency)
self._repeating_metric_handles.append(repeating_metric_handle)
if self.running:
repeating_metric_handle.start() # depends on [control=['if'], data=[]]
return repeating_metric_handle
|
def getLocationRepresentation(self):
"""
Get the full population representation of the location layer.
"""
activeCells = np.array([], dtype="uint32")
totalPrevCells = 0
for module in self.L6aModules:
activeCells = np.append(activeCells,
module.getActiveCells() + totalPrevCells)
totalPrevCells += module.numberOfCells()
return activeCells
|
def function[getLocationRepresentation, parameter[self]]:
constant[
Get the full population representation of the location layer.
]
variable[activeCells] assign[=] call[name[np].array, parameter[list[[]]]]
variable[totalPrevCells] assign[=] constant[0]
for taget[name[module]] in starred[name[self].L6aModules] begin[:]
variable[activeCells] assign[=] call[name[np].append, parameter[name[activeCells], binary_operation[call[name[module].getActiveCells, parameter[]] + name[totalPrevCells]]]]
<ast.AugAssign object at 0x7da1b08b0100>
return[name[activeCells]]
|
keyword[def] identifier[getLocationRepresentation] ( identifier[self] ):
literal[string]
identifier[activeCells] = identifier[np] . identifier[array] ([], identifier[dtype] = literal[string] )
identifier[totalPrevCells] = literal[int]
keyword[for] identifier[module] keyword[in] identifier[self] . identifier[L6aModules] :
identifier[activeCells] = identifier[np] . identifier[append] ( identifier[activeCells] ,
identifier[module] . identifier[getActiveCells] ()+ identifier[totalPrevCells] )
identifier[totalPrevCells] += identifier[module] . identifier[numberOfCells] ()
keyword[return] identifier[activeCells]
|
def getLocationRepresentation(self):
"""
Get the full population representation of the location layer.
"""
activeCells = np.array([], dtype='uint32')
totalPrevCells = 0
for module in self.L6aModules:
activeCells = np.append(activeCells, module.getActiveCells() + totalPrevCells)
totalPrevCells += module.numberOfCells() # depends on [control=['for'], data=['module']]
return activeCells
|
def step(sampler, x, delta, fraction=None, tries=0):
"""Sample a new feasible point from the point `x` in direction `delta`."""
prob = sampler.problem
valid = ((np.abs(delta) > sampler.feasibility_tol) &
np.logical_not(prob.variable_fixed))
# permissible alphas for staying in variable bounds
valphas = ((1.0 - sampler.bounds_tol) * prob.variable_bounds -
x)[:, valid]
valphas = (valphas / delta[valid]).flatten()
if prob.bounds.shape[0] > 0:
# permissible alphas for staying in constraint bounds
ineqs = prob.inequalities.dot(delta)
valid = np.abs(ineqs) > sampler.feasibility_tol
balphas = ((1.0 - sampler.bounds_tol) * prob.bounds -
prob.inequalities.dot(x))[:, valid]
balphas = (balphas / ineqs[valid]).flatten()
# combined alphas
alphas = np.hstack([valphas, balphas])
else:
alphas = valphas
pos_alphas = alphas[alphas > 0.0]
neg_alphas = alphas[alphas <= 0.0]
alpha_range = np.array([neg_alphas.max() if len(neg_alphas) > 0 else 0,
pos_alphas.min() if len(pos_alphas) > 0 else 0])
if fraction:
alpha = alpha_range[0] + fraction * (alpha_range[1] - alpha_range[0])
else:
alpha = np.random.uniform(alpha_range[0], alpha_range[1])
p = x + alpha * delta
# Numerical instabilities may cause bounds invalidation
# reset sampler and sample from one of the original warmup directions
# if that occurs. Also reset if we got stuck.
if (np.any(sampler._bounds_dist(p) < -sampler.bounds_tol) or
np.abs(np.abs(alpha_range).max() * delta).max() <
sampler.bounds_tol):
if tries > MAX_TRIES:
raise RuntimeError("Can not escape sampling region, model seems"
" numerically unstable :( Reporting the "
"model to "
"https://github.com/opencobra/cobrapy/issues "
"will help us to fix this :)")
LOGGER.info("found bounds infeasibility in sample, "
"resetting to center")
newdir = sampler.warmup[np.random.randint(sampler.n_warmup)]
sampler.retries += 1
return step(sampler, sampler.center, newdir - sampler.center, None,
tries + 1)
return p
|
def function[step, parameter[sampler, x, delta, fraction, tries]]:
constant[Sample a new feasible point from the point `x` in direction `delta`.]
variable[prob] assign[=] name[sampler].problem
variable[valid] assign[=] binary_operation[compare[call[name[np].abs, parameter[name[delta]]] greater[>] name[sampler].feasibility_tol] <ast.BitAnd object at 0x7da2590d6b60> call[name[np].logical_not, parameter[name[prob].variable_fixed]]]
variable[valphas] assign[=] call[binary_operation[binary_operation[binary_operation[constant[1.0] - name[sampler].bounds_tol] * name[prob].variable_bounds] - name[x]]][tuple[[<ast.Slice object at 0x7da1b01e7160>, <ast.Name object at 0x7da1b01e6950>]]]
variable[valphas] assign[=] call[binary_operation[name[valphas] / call[name[delta]][name[valid]]].flatten, parameter[]]
if compare[call[name[prob].bounds.shape][constant[0]] greater[>] constant[0]] begin[:]
variable[ineqs] assign[=] call[name[prob].inequalities.dot, parameter[name[delta]]]
variable[valid] assign[=] compare[call[name[np].abs, parameter[name[ineqs]]] greater[>] name[sampler].feasibility_tol]
variable[balphas] assign[=] call[binary_operation[binary_operation[binary_operation[constant[1.0] - name[sampler].bounds_tol] * name[prob].bounds] - call[name[prob].inequalities.dot, parameter[name[x]]]]][tuple[[<ast.Slice object at 0x7da1b0001300>, <ast.Name object at 0x7da1b0001ff0>]]]
variable[balphas] assign[=] call[binary_operation[name[balphas] / call[name[ineqs]][name[valid]]].flatten, parameter[]]
variable[alphas] assign[=] call[name[np].hstack, parameter[list[[<ast.Name object at 0x7da1b0002ec0>, <ast.Name object at 0x7da1b0003dc0>]]]]
variable[pos_alphas] assign[=] call[name[alphas]][compare[name[alphas] greater[>] constant[0.0]]]
variable[neg_alphas] assign[=] call[name[alphas]][compare[name[alphas] less_or_equal[<=] constant[0.0]]]
variable[alpha_range] assign[=] call[name[np].array, parameter[list[[<ast.IfExp object at 0x7da1b0001660>, <ast.IfExp object at 0x7da1b0001750>]]]]
if name[fraction] begin[:]
variable[alpha] assign[=] binary_operation[call[name[alpha_range]][constant[0]] + binary_operation[name[fraction] * binary_operation[call[name[alpha_range]][constant[1]] - call[name[alpha_range]][constant[0]]]]]
variable[p] assign[=] binary_operation[name[x] + binary_operation[name[alpha] * name[delta]]]
if <ast.BoolOp object at 0x7da1b0001ba0> begin[:]
if compare[name[tries] greater[>] name[MAX_TRIES]] begin[:]
<ast.Raise object at 0x7da1b0002200>
call[name[LOGGER].info, parameter[constant[found bounds infeasibility in sample, resetting to center]]]
variable[newdir] assign[=] call[name[sampler].warmup][call[name[np].random.randint, parameter[name[sampler].n_warmup]]]
<ast.AugAssign object at 0x7da1b0001060>
return[call[name[step], parameter[name[sampler], name[sampler].center, binary_operation[name[newdir] - name[sampler].center], constant[None], binary_operation[name[tries] + constant[1]]]]]
return[name[p]]
|
keyword[def] identifier[step] ( identifier[sampler] , identifier[x] , identifier[delta] , identifier[fraction] = keyword[None] , identifier[tries] = literal[int] ):
literal[string]
identifier[prob] = identifier[sampler] . identifier[problem]
identifier[valid] =(( identifier[np] . identifier[abs] ( identifier[delta] )> identifier[sampler] . identifier[feasibility_tol] )&
identifier[np] . identifier[logical_not] ( identifier[prob] . identifier[variable_fixed] ))
identifier[valphas] =(( literal[int] - identifier[sampler] . identifier[bounds_tol] )* identifier[prob] . identifier[variable_bounds] -
identifier[x] )[:, identifier[valid] ]
identifier[valphas] =( identifier[valphas] / identifier[delta] [ identifier[valid] ]). identifier[flatten] ()
keyword[if] identifier[prob] . identifier[bounds] . identifier[shape] [ literal[int] ]> literal[int] :
identifier[ineqs] = identifier[prob] . identifier[inequalities] . identifier[dot] ( identifier[delta] )
identifier[valid] = identifier[np] . identifier[abs] ( identifier[ineqs] )> identifier[sampler] . identifier[feasibility_tol]
identifier[balphas] =(( literal[int] - identifier[sampler] . identifier[bounds_tol] )* identifier[prob] . identifier[bounds] -
identifier[prob] . identifier[inequalities] . identifier[dot] ( identifier[x] ))[:, identifier[valid] ]
identifier[balphas] =( identifier[balphas] / identifier[ineqs] [ identifier[valid] ]). identifier[flatten] ()
identifier[alphas] = identifier[np] . identifier[hstack] ([ identifier[valphas] , identifier[balphas] ])
keyword[else] :
identifier[alphas] = identifier[valphas]
identifier[pos_alphas] = identifier[alphas] [ identifier[alphas] > literal[int] ]
identifier[neg_alphas] = identifier[alphas] [ identifier[alphas] <= literal[int] ]
identifier[alpha_range] = identifier[np] . identifier[array] ([ identifier[neg_alphas] . identifier[max] () keyword[if] identifier[len] ( identifier[neg_alphas] )> literal[int] keyword[else] literal[int] ,
identifier[pos_alphas] . identifier[min] () keyword[if] identifier[len] ( identifier[pos_alphas] )> literal[int] keyword[else] literal[int] ])
keyword[if] identifier[fraction] :
identifier[alpha] = identifier[alpha_range] [ literal[int] ]+ identifier[fraction] *( identifier[alpha_range] [ literal[int] ]- identifier[alpha_range] [ literal[int] ])
keyword[else] :
identifier[alpha] = identifier[np] . identifier[random] . identifier[uniform] ( identifier[alpha_range] [ literal[int] ], identifier[alpha_range] [ literal[int] ])
identifier[p] = identifier[x] + identifier[alpha] * identifier[delta]
keyword[if] ( identifier[np] . identifier[any] ( identifier[sampler] . identifier[_bounds_dist] ( identifier[p] )<- identifier[sampler] . identifier[bounds_tol] ) keyword[or]
identifier[np] . identifier[abs] ( identifier[np] . identifier[abs] ( identifier[alpha_range] ). identifier[max] ()* identifier[delta] ). identifier[max] ()<
identifier[sampler] . identifier[bounds_tol] ):
keyword[if] identifier[tries] > identifier[MAX_TRIES] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] )
identifier[LOGGER] . identifier[info] ( literal[string]
literal[string] )
identifier[newdir] = identifier[sampler] . identifier[warmup] [ identifier[np] . identifier[random] . identifier[randint] ( identifier[sampler] . identifier[n_warmup] )]
identifier[sampler] . identifier[retries] += literal[int]
keyword[return] identifier[step] ( identifier[sampler] , identifier[sampler] . identifier[center] , identifier[newdir] - identifier[sampler] . identifier[center] , keyword[None] ,
identifier[tries] + literal[int] )
keyword[return] identifier[p]
|
def step(sampler, x, delta, fraction=None, tries=0):
"""Sample a new feasible point from the point `x` in direction `delta`."""
prob = sampler.problem
valid = (np.abs(delta) > sampler.feasibility_tol) & np.logical_not(prob.variable_fixed)
# permissible alphas for staying in variable bounds
valphas = ((1.0 - sampler.bounds_tol) * prob.variable_bounds - x)[:, valid]
valphas = (valphas / delta[valid]).flatten()
if prob.bounds.shape[0] > 0:
# permissible alphas for staying in constraint bounds
ineqs = prob.inequalities.dot(delta)
valid = np.abs(ineqs) > sampler.feasibility_tol
balphas = ((1.0 - sampler.bounds_tol) * prob.bounds - prob.inequalities.dot(x))[:, valid]
balphas = (balphas / ineqs[valid]).flatten()
# combined alphas
alphas = np.hstack([valphas, balphas]) # depends on [control=['if'], data=[]]
else:
alphas = valphas
pos_alphas = alphas[alphas > 0.0]
neg_alphas = alphas[alphas <= 0.0]
alpha_range = np.array([neg_alphas.max() if len(neg_alphas) > 0 else 0, pos_alphas.min() if len(pos_alphas) > 0 else 0])
if fraction:
alpha = alpha_range[0] + fraction * (alpha_range[1] - alpha_range[0]) # depends on [control=['if'], data=[]]
else:
alpha = np.random.uniform(alpha_range[0], alpha_range[1])
p = x + alpha * delta
# Numerical instabilities may cause bounds invalidation
# reset sampler and sample from one of the original warmup directions
# if that occurs. Also reset if we got stuck.
if np.any(sampler._bounds_dist(p) < -sampler.bounds_tol) or np.abs(np.abs(alpha_range).max() * delta).max() < sampler.bounds_tol:
if tries > MAX_TRIES:
raise RuntimeError('Can not escape sampling region, model seems numerically unstable :( Reporting the model to https://github.com/opencobra/cobrapy/issues will help us to fix this :)') # depends on [control=['if'], data=[]]
LOGGER.info('found bounds infeasibility in sample, resetting to center')
newdir = sampler.warmup[np.random.randint(sampler.n_warmup)]
sampler.retries += 1
return step(sampler, sampler.center, newdir - sampler.center, None, tries + 1) # depends on [control=['if'], data=[]]
return p
|
def json_encoder_default(obj):
"""JSON encoder function that handles some numpy types."""
if isinstance(obj, numbers.Integral) and (obj < min_safe_integer or obj > max_safe_integer):
return str(obj)
if isinstance(obj, np.integer):
return str(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return list(obj)
elif isinstance(obj, (set, frozenset)):
return list(obj)
raise TypeError
|
def function[json_encoder_default, parameter[obj]]:
constant[JSON encoder function that handles some numpy types.]
if <ast.BoolOp object at 0x7da18ede41c0> begin[:]
return[call[name[str], parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], name[np].integer]] begin[:]
return[call[name[str], parameter[name[obj]]]]
<ast.Raise object at 0x7da18ede70d0>
|
keyword[def] identifier[json_encoder_default] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[numbers] . identifier[Integral] ) keyword[and] ( identifier[obj] < identifier[min_safe_integer] keyword[or] identifier[obj] > identifier[max_safe_integer] ):
keyword[return] identifier[str] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[np] . identifier[integer] ):
keyword[return] identifier[str] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[np] . identifier[floating] ):
keyword[return] identifier[float] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[np] . identifier[ndarray] ):
keyword[return] identifier[list] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[set] , identifier[frozenset] )):
keyword[return] identifier[list] ( identifier[obj] )
keyword[raise] identifier[TypeError]
|
def json_encoder_default(obj):
"""JSON encoder function that handles some numpy types."""
if isinstance(obj, numbers.Integral) and (obj < min_safe_integer or obj > max_safe_integer):
return str(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, np.integer):
return str(obj) # depends on [control=['if'], data=[]]
elif isinstance(obj, np.floating):
return float(obj) # depends on [control=['if'], data=[]]
elif isinstance(obj, np.ndarray):
return list(obj) # depends on [control=['if'], data=[]]
elif isinstance(obj, (set, frozenset)):
return list(obj) # depends on [control=['if'], data=[]]
raise TypeError
|
def update_module_types():
"""
Download the repositories for all of the firmware_module_type records and
update them using the `module.json` files from the repositories themselves.
Currently only works for git repositories.
"""
local_url = config["local_server"]["url"]
server = Server(local_url)
db = server[FIRMWARE_MODULE_TYPE]
temp_folder = mkdtemp()
for _id in db:
if _id.startswith("_"):
continue
obj = db[_id]
new_obj = update_record(FirmwareModuleType(obj), temp_folder)
new_obj["_rev"] = obj["_rev"]
if new_obj != obj:
db[_id] = new_obj
rmtree(temp_folder)
|
def function[update_module_types, parameter[]]:
constant[
Download the repositories for all of the firmware_module_type records and
update them using the `module.json` files from the repositories themselves.
Currently only works for git repositories.
]
variable[local_url] assign[=] call[call[name[config]][constant[local_server]]][constant[url]]
variable[server] assign[=] call[name[Server], parameter[name[local_url]]]
variable[db] assign[=] call[name[server]][name[FIRMWARE_MODULE_TYPE]]
variable[temp_folder] assign[=] call[name[mkdtemp], parameter[]]
for taget[name[_id]] in starred[name[db]] begin[:]
if call[name[_id].startswith, parameter[constant[_]]] begin[:]
continue
variable[obj] assign[=] call[name[db]][name[_id]]
variable[new_obj] assign[=] call[name[update_record], parameter[call[name[FirmwareModuleType], parameter[name[obj]]], name[temp_folder]]]
call[name[new_obj]][constant[_rev]] assign[=] call[name[obj]][constant[_rev]]
if compare[name[new_obj] not_equal[!=] name[obj]] begin[:]
call[name[db]][name[_id]] assign[=] name[new_obj]
call[name[rmtree], parameter[name[temp_folder]]]
|
keyword[def] identifier[update_module_types] ():
literal[string]
identifier[local_url] = identifier[config] [ literal[string] ][ literal[string] ]
identifier[server] = identifier[Server] ( identifier[local_url] )
identifier[db] = identifier[server] [ identifier[FIRMWARE_MODULE_TYPE] ]
identifier[temp_folder] = identifier[mkdtemp] ()
keyword[for] identifier[_id] keyword[in] identifier[db] :
keyword[if] identifier[_id] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[obj] = identifier[db] [ identifier[_id] ]
identifier[new_obj] = identifier[update_record] ( identifier[FirmwareModuleType] ( identifier[obj] ), identifier[temp_folder] )
identifier[new_obj] [ literal[string] ]= identifier[obj] [ literal[string] ]
keyword[if] identifier[new_obj] != identifier[obj] :
identifier[db] [ identifier[_id] ]= identifier[new_obj]
identifier[rmtree] ( identifier[temp_folder] )
|
def update_module_types():
"""
Download the repositories for all of the firmware_module_type records and
update them using the `module.json` files from the repositories themselves.
Currently only works for git repositories.
"""
local_url = config['local_server']['url']
server = Server(local_url)
db = server[FIRMWARE_MODULE_TYPE]
temp_folder = mkdtemp()
for _id in db:
if _id.startswith('_'):
continue # depends on [control=['if'], data=[]]
obj = db[_id]
new_obj = update_record(FirmwareModuleType(obj), temp_folder)
new_obj['_rev'] = obj['_rev']
if new_obj != obj:
db[_id] = new_obj # depends on [control=['if'], data=['new_obj']] # depends on [control=['for'], data=['_id']]
rmtree(temp_folder)
|
def add(self, *nonterminals):
# type: (Iterable[Type[Nonterminal]]) -> None
"""
Add nonterminals into the set.
:param nonterminals: Nonterminals to insert.
:raise NotNonterminalException: If the object doesn't inherit from Nonterminal class.
"""
for nonterm in nonterminals:
if nonterm in self:
continue
_NonterminalSet._control_nonterminal(nonterm)
super().add(nonterm)
self._assign_map[nonterm] = set()
|
def function[add, parameter[self]]:
constant[
Add nonterminals into the set.
:param nonterminals: Nonterminals to insert.
:raise NotNonterminalException: If the object doesn't inherit from Nonterminal class.
]
for taget[name[nonterm]] in starred[name[nonterminals]] begin[:]
if compare[name[nonterm] in name[self]] begin[:]
continue
call[name[_NonterminalSet]._control_nonterminal, parameter[name[nonterm]]]
call[call[name[super], parameter[]].add, parameter[name[nonterm]]]
call[name[self]._assign_map][name[nonterm]] assign[=] call[name[set], parameter[]]
|
keyword[def] identifier[add] ( identifier[self] ,* identifier[nonterminals] ):
literal[string]
keyword[for] identifier[nonterm] keyword[in] identifier[nonterminals] :
keyword[if] identifier[nonterm] keyword[in] identifier[self] :
keyword[continue]
identifier[_NonterminalSet] . identifier[_control_nonterminal] ( identifier[nonterm] )
identifier[super] (). identifier[add] ( identifier[nonterm] )
identifier[self] . identifier[_assign_map] [ identifier[nonterm] ]= identifier[set] ()
|
def add(self, *nonterminals):
# type: (Iterable[Type[Nonterminal]]) -> None
"\n Add nonterminals into the set.\n :param nonterminals: Nonterminals to insert.\n :raise NotNonterminalException: If the object doesn't inherit from Nonterminal class.\n "
for nonterm in nonterminals:
if nonterm in self:
continue # depends on [control=['if'], data=[]]
_NonterminalSet._control_nonterminal(nonterm)
super().add(nonterm)
self._assign_map[nonterm] = set() # depends on [control=['for'], data=['nonterm']]
|
def _make_it_so(self, command, calls, *args, **kwargs):
""" Perform some error-checked XMLRPC calls.
"""
observer = kwargs.pop('observer', False)
args = (self._fields["hash"],) + args
try:
for call in calls:
self._engine.LOG.debug("%s%s torrent #%s (%s)" % (
command[0].upper(), command[1:], self._fields["hash"], call))
if call.startswith(':') or call[:2].endswith('.'):
namespace = self._engine._rpc
else:
namespace = self._engine._rpc.d
result = getattr(namespace, call.lstrip(':'))(*args)
if observer:
observer(result)
except xmlrpc.ERRORS as exc:
raise error.EngineError("While %s torrent #%s: %s" % (command, self._fields["hash"], exc))
|
def function[_make_it_so, parameter[self, command, calls]]:
constant[ Perform some error-checked XMLRPC calls.
]
variable[observer] assign[=] call[name[kwargs].pop, parameter[constant[observer], constant[False]]]
variable[args] assign[=] binary_operation[tuple[[<ast.Subscript object at 0x7da2041d8880>]] + name[args]]
<ast.Try object at 0x7da2041d9810>
|
keyword[def] identifier[_make_it_so] ( identifier[self] , identifier[command] , identifier[calls] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[observer] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[args] =( identifier[self] . identifier[_fields] [ literal[string] ],)+ identifier[args]
keyword[try] :
keyword[for] identifier[call] keyword[in] identifier[calls] :
identifier[self] . identifier[_engine] . identifier[LOG] . identifier[debug] ( literal[string] %(
identifier[command] [ literal[int] ]. identifier[upper] (), identifier[command] [ literal[int] :], identifier[self] . identifier[_fields] [ literal[string] ], identifier[call] ))
keyword[if] identifier[call] . identifier[startswith] ( literal[string] ) keyword[or] identifier[call] [: literal[int] ]. identifier[endswith] ( literal[string] ):
identifier[namespace] = identifier[self] . identifier[_engine] . identifier[_rpc]
keyword[else] :
identifier[namespace] = identifier[self] . identifier[_engine] . identifier[_rpc] . identifier[d]
identifier[result] = identifier[getattr] ( identifier[namespace] , identifier[call] . identifier[lstrip] ( literal[string] ))(* identifier[args] )
keyword[if] identifier[observer] :
identifier[observer] ( identifier[result] )
keyword[except] identifier[xmlrpc] . identifier[ERRORS] keyword[as] identifier[exc] :
keyword[raise] identifier[error] . identifier[EngineError] ( literal[string] %( identifier[command] , identifier[self] . identifier[_fields] [ literal[string] ], identifier[exc] ))
|
def _make_it_so(self, command, calls, *args, **kwargs):
""" Perform some error-checked XMLRPC calls.
"""
observer = kwargs.pop('observer', False)
args = (self._fields['hash'],) + args
try:
for call in calls:
self._engine.LOG.debug('%s%s torrent #%s (%s)' % (command[0].upper(), command[1:], self._fields['hash'], call))
if call.startswith(':') or call[:2].endswith('.'):
namespace = self._engine._rpc # depends on [control=['if'], data=[]]
else:
namespace = self._engine._rpc.d
result = getattr(namespace, call.lstrip(':'))(*args)
if observer:
observer(result) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['call']] # depends on [control=['try'], data=[]]
except xmlrpc.ERRORS as exc:
raise error.EngineError('While %s torrent #%s: %s' % (command, self._fields['hash'], exc)) # depends on [control=['except'], data=['exc']]
|
def beta(self):
"""
Courant-Snyder parameter :math:`\\beta`.
"""
beta = _np.sqrt(self.sx)/self.emit
return beta
|
def function[beta, parameter[self]]:
constant[
Courant-Snyder parameter :math:`\beta`.
]
variable[beta] assign[=] binary_operation[call[name[_np].sqrt, parameter[name[self].sx]] / name[self].emit]
return[name[beta]]
|
keyword[def] identifier[beta] ( identifier[self] ):
literal[string]
identifier[beta] = identifier[_np] . identifier[sqrt] ( identifier[self] . identifier[sx] )/ identifier[self] . identifier[emit]
keyword[return] identifier[beta]
|
def beta(self):
"""
Courant-Snyder parameter :math:`\\beta`.
"""
beta = _np.sqrt(self.sx) / self.emit
return beta
|
def pv_count(self):
"""
Returns the physical volume count.
"""
self.open()
count = lvm_vg_get_pv_count(self.handle)
self.close()
return count
|
def function[pv_count, parameter[self]]:
constant[
Returns the physical volume count.
]
call[name[self].open, parameter[]]
variable[count] assign[=] call[name[lvm_vg_get_pv_count], parameter[name[self].handle]]
call[name[self].close, parameter[]]
return[name[count]]
|
keyword[def] identifier[pv_count] ( identifier[self] ):
literal[string]
identifier[self] . identifier[open] ()
identifier[count] = identifier[lvm_vg_get_pv_count] ( identifier[self] . identifier[handle] )
identifier[self] . identifier[close] ()
keyword[return] identifier[count]
|
def pv_count(self):
"""
Returns the physical volume count.
"""
self.open()
count = lvm_vg_get_pv_count(self.handle)
self.close()
return count
|
def prepare_display(self):
"""Prepare the display.
This method gets called by the canvas layout/draw engine after being triggered by a call to `update`.
When data or display parameters change, the internal state of the line plot gets updated. This method takes
that internal state and updates the child canvas items.
This method is always run on a thread and should be fast but doesn't need to be instant.
"""
displayed_dimensional_calibration = self.__displayed_dimensional_calibration
intensity_calibration = self.__intensity_calibration
calibration_style = self.__calibration_style
y_min = self.__y_min
y_max = self.__y_max
y_style = self.__y_style
left_channel = self.__left_channel
right_channel = self.__right_channel
scalar_xdata_list = None
def calculate_scalar_xdata(xdata_list):
scalar_xdata_list = list()
for xdata in xdata_list:
if xdata:
scalar_data = Image.scalar_from_array(xdata.data)
scalar_data = Image.convert_to_grayscale(scalar_data)
scalar_intensity_calibration = calibration_style.get_intensity_calibration(xdata)
scalar_dimensional_calibrations = calibration_style.get_dimensional_calibrations(xdata.dimensional_shape, xdata.dimensional_calibrations)
if displayed_dimensional_calibration.units == scalar_dimensional_calibrations[-1].units and intensity_calibration.units == scalar_intensity_calibration.units:
# the data needs to have an intensity scale matching intensity_calibration. convert the data to use the common scale.
scale = scalar_intensity_calibration.scale / intensity_calibration.scale
offset = (scalar_intensity_calibration.offset - intensity_calibration.offset) / intensity_calibration.scale
scalar_data = scalar_data * scale + offset
scalar_xdata_list.append(DataAndMetadata.new_data_and_metadata(scalar_data, scalar_intensity_calibration, scalar_dimensional_calibrations))
else:
scalar_xdata_list.append(None)
return scalar_xdata_list
data_scale = self.__data_scale
xdata_list = self.__xdata_list
if data_scale is not None:
# update the line graph data
left_channel = left_channel if left_channel is not None else 0
right_channel = right_channel if right_channel is not None else data_scale
left_channel, right_channel = min(left_channel, right_channel), max(left_channel, right_channel)
scalar_data_list = None
if y_min is None or y_max is None and len(xdata_list) > 0:
scalar_xdata_list = calculate_scalar_xdata(xdata_list)
scalar_data_list = [xdata.data if xdata else None for xdata in scalar_xdata_list]
calibrated_data_min, calibrated_data_max, y_ticker = LineGraphCanvasItem.calculate_y_axis(scalar_data_list, y_min, y_max, intensity_calibration, y_style)
axes = LineGraphCanvasItem.LineGraphAxes(data_scale, calibrated_data_min, calibrated_data_max, left_channel, right_channel, displayed_dimensional_calibration, intensity_calibration, y_style, y_ticker)
if scalar_xdata_list is None:
if len(xdata_list) > 0:
scalar_xdata_list = calculate_scalar_xdata(xdata_list)
else:
scalar_xdata_list = list()
if self.__display_frame_rate_id:
Utility.fps_tick("prepare_"+self.__display_frame_rate_id)
colors = ('#1E90FF', "#F00", "#0F0", "#00F", "#FF0", "#0FF", "#F0F", "#888", "#800", "#080", "#008", "#CCC", "#880", "#088", "#808", "#964B00")
display_layers = self.__display_layers
if len(display_layers) == 0:
index = 0
for scalar_index, scalar_xdata in enumerate(scalar_xdata_list):
if scalar_xdata and scalar_xdata.is_data_1d:
if index < 16:
display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index})
index += 1
if scalar_xdata and scalar_xdata.is_data_2d:
for row in range(min(scalar_xdata.data_shape[-1], 16)):
if index < 16:
display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index, "data_row": row})
index += 1
display_layer_count = len(display_layers)
self.___has_valid_drawn_graph_data = False
for index, display_layer in enumerate(display_layers):
if index < 16:
fill_color = display_layer.get("fill_color")
stroke_color = display_layer.get("stroke_color")
data_index = display_layer.get("data_index", 0)
data_row = display_layer.get("data_row", 0)
if 0 <= data_index < len(scalar_xdata_list):
scalar_xdata = scalar_xdata_list[data_index]
if scalar_xdata:
data_row = max(0, min(scalar_xdata.dimensional_shape[0] - 1, data_row))
intensity_calibration = scalar_xdata.intensity_calibration
displayed_dimensional_calibration = scalar_xdata.dimensional_calibrations[-1]
if scalar_xdata.is_data_2d:
scalar_data = scalar_xdata.data[data_row:data_row + 1, :].reshape((scalar_xdata.dimensional_shape[-1],))
scalar_xdata = DataAndMetadata.new_data_and_metadata(scalar_data, intensity_calibration, [displayed_dimensional_calibration])
line_graph_canvas_item = self.__line_graph_stack.canvas_items[display_layer_count - (index + 1)]
line_graph_canvas_item.set_fill_color(fill_color)
line_graph_canvas_item.set_stroke_color(stroke_color)
line_graph_canvas_item.set_axes(axes)
line_graph_canvas_item.set_uncalibrated_xdata(scalar_xdata)
self.___has_valid_drawn_graph_data = scalar_xdata is not None
for index in range(len(display_layers), 16):
line_graph_canvas_item = self.__line_graph_stack.canvas_items[index]
line_graph_canvas_item.set_axes(None)
line_graph_canvas_item.set_uncalibrated_xdata(None)
legend_position = self.__legend_position
LegendEntry = collections.namedtuple("LegendEntry", ["label", "fill_color", "stroke_color"])
legend_entries = list()
for index, display_layer in enumerate(self.__display_layers):
data_index = display_layer.get("data_index", None)
data_row = display_layer.get("data_row", None)
label = display_layer.get("label", str())
if not label:
if data_index is not None and data_row is not None:
label = "Data {}:{}".format(data_index, data_row)
elif data_index is not None:
label = "Data {}".format(data_index)
else:
label = "Unknown"
fill_color = display_layer.get("fill_color")
stroke_color = display_layer.get("stroke_color")
legend_entries.append(LegendEntry(label, fill_color, stroke_color))
self.__update_canvas_items(axes, legend_position, legend_entries)
else:
for line_graph_canvas_item in self.__line_graph_stack.canvas_items:
line_graph_canvas_item.set_axes(None)
line_graph_canvas_item.set_uncalibrated_xdata(None)
self.__line_graph_xdata_list = list()
self.__update_canvas_items(LineGraphCanvasItem.LineGraphAxes(), None, None)
|
def function[prepare_display, parameter[self]]:
constant[Prepare the display.
This method gets called by the canvas layout/draw engine after being triggered by a call to `update`.
When data or display parameters change, the internal state of the line plot gets updated. This method takes
that internal state and updates the child canvas items.
This method is always run on a thread and should be fast but doesn't need to be instant.
]
variable[displayed_dimensional_calibration] assign[=] name[self].__displayed_dimensional_calibration
variable[intensity_calibration] assign[=] name[self].__intensity_calibration
variable[calibration_style] assign[=] name[self].__calibration_style
variable[y_min] assign[=] name[self].__y_min
variable[y_max] assign[=] name[self].__y_max
variable[y_style] assign[=] name[self].__y_style
variable[left_channel] assign[=] name[self].__left_channel
variable[right_channel] assign[=] name[self].__right_channel
variable[scalar_xdata_list] assign[=] constant[None]
def function[calculate_scalar_xdata, parameter[xdata_list]]:
variable[scalar_xdata_list] assign[=] call[name[list], parameter[]]
for taget[name[xdata]] in starred[name[xdata_list]] begin[:]
if name[xdata] begin[:]
variable[scalar_data] assign[=] call[name[Image].scalar_from_array, parameter[name[xdata].data]]
variable[scalar_data] assign[=] call[name[Image].convert_to_grayscale, parameter[name[scalar_data]]]
variable[scalar_intensity_calibration] assign[=] call[name[calibration_style].get_intensity_calibration, parameter[name[xdata]]]
variable[scalar_dimensional_calibrations] assign[=] call[name[calibration_style].get_dimensional_calibrations, parameter[name[xdata].dimensional_shape, name[xdata].dimensional_calibrations]]
if <ast.BoolOp object at 0x7da1b0e9dff0> begin[:]
variable[scale] assign[=] binary_operation[name[scalar_intensity_calibration].scale / name[intensity_calibration].scale]
variable[offset] assign[=] binary_operation[binary_operation[name[scalar_intensity_calibration].offset - name[intensity_calibration].offset] / name[intensity_calibration].scale]
variable[scalar_data] assign[=] binary_operation[binary_operation[name[scalar_data] * name[scale]] + name[offset]]
call[name[scalar_xdata_list].append, parameter[call[name[DataAndMetadata].new_data_and_metadata, parameter[name[scalar_data], name[scalar_intensity_calibration], name[scalar_dimensional_calibrations]]]]]
return[name[scalar_xdata_list]]
variable[data_scale] assign[=] name[self].__data_scale
variable[xdata_list] assign[=] name[self].__xdata_list
if compare[name[data_scale] is_not constant[None]] begin[:]
variable[left_channel] assign[=] <ast.IfExp object at 0x7da1b0e9d570>
variable[right_channel] assign[=] <ast.IfExp object at 0x7da1b0e9d3f0>
<ast.Tuple object at 0x7da1b0e9d360> assign[=] tuple[[<ast.Call object at 0x7da1b0e9d210>, <ast.Call object at 0x7da1b0e9d000>]]
variable[scalar_data_list] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b0e9cf70> begin[:]
variable[scalar_xdata_list] assign[=] call[name[calculate_scalar_xdata], parameter[name[xdata_list]]]
variable[scalar_data_list] assign[=] <ast.ListComp object at 0x7da1b0e9cd00>
<ast.Tuple object at 0x7da1b0e9c9a0> assign[=] call[name[LineGraphCanvasItem].calculate_y_axis, parameter[name[scalar_data_list], name[y_min], name[y_max], name[intensity_calibration], name[y_style]]]
variable[axes] assign[=] call[name[LineGraphCanvasItem].LineGraphAxes, parameter[name[data_scale], name[calibrated_data_min], name[calibrated_data_max], name[left_channel], name[right_channel], name[displayed_dimensional_calibration], name[intensity_calibration], name[y_style], name[y_ticker]]]
if compare[name[scalar_xdata_list] is constant[None]] begin[:]
if compare[call[name[len], parameter[name[xdata_list]]] greater[>] constant[0]] begin[:]
variable[scalar_xdata_list] assign[=] call[name[calculate_scalar_xdata], parameter[name[xdata_list]]]
if name[self].__display_frame_rate_id begin[:]
call[name[Utility].fps_tick, parameter[binary_operation[constant[prepare_] + name[self].__display_frame_rate_id]]]
variable[colors] assign[=] tuple[[<ast.Constant object at 0x7da1b0e9ed40>, <ast.Constant object at 0x7da1b0e9eda0>, <ast.Constant object at 0x7da1b0e9f130>, <ast.Constant object at 0x7da1b0e9ee30>, <ast.Constant object at 0x7da1b0e9f0d0>, <ast.Constant object at 0x7da1b0e9ee00>, <ast.Constant object at 0x7da1b0e9f100>, <ast.Constant object at 0x7da1b0e9f160>, <ast.Constant object at 0x7da1b0e9edd0>, <ast.Constant object at 0x7da1b0e9f190>, <ast.Constant object at 0x7da1b0e9ee60>, <ast.Constant object at 0x7da1b0e9f220>, <ast.Constant object at 0x7da1b0e9f0a0>, <ast.Constant object at 0x7da1b0e9f1f0>, <ast.Constant object at 0x7da1b0e9f070>, <ast.Constant object at 0x7da1b0e9f1c0>]]
variable[display_layers] assign[=] name[self].__display_layers
if compare[call[name[len], parameter[name[display_layers]]] equal[==] constant[0]] begin[:]
variable[index] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b0e9eec0>, <ast.Name object at 0x7da1b0e9c100>]]] in starred[call[name[enumerate], parameter[name[scalar_xdata_list]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0e9e5f0> begin[:]
if compare[name[index] less[<] constant[16]] begin[:]
call[name[display_layers].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0e9ef80>, <ast.Constant object at 0x7da1b0e9c130>, <ast.Constant object at 0x7da1b0e9c160>], [<ast.IfExp object at 0x7da1b0e9c280>, <ast.IfExp object at 0x7da1b0ef50c0>, <ast.Name object at 0x7da1b0ef7790>]]]]
<ast.AugAssign object at 0x7da1b0ef4220>
if <ast.BoolOp object at 0x7da1b0ef6050> begin[:]
for taget[name[row]] in starred[call[name[range], parameter[call[name[min], parameter[call[name[scalar_xdata].data_shape][<ast.UnaryOp object at 0x7da1b0ef7c40>], constant[16]]]]]] begin[:]
if compare[name[index] less[<] constant[16]] begin[:]
call[name[display_layers].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0ef70d0>, <ast.Constant object at 0x7da1b0ef7310>, <ast.Constant object at 0x7da1b0ef6170>, <ast.Constant object at 0x7da1b0ef7190>], [<ast.IfExp object at 0x7da1b0ef7a90>, <ast.IfExp object at 0x7da1b0ef5b40>, <ast.Name object at 0x7da1b0ef73d0>, <ast.Name object at 0x7da1b0ef7820>]]]]
<ast.AugAssign object at 0x7da1b0ef4640>
variable[display_layer_count] assign[=] call[name[len], parameter[name[display_layers]]]
name[self].___has_valid_drawn_graph_data assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b0ef7af0>, <ast.Name object at 0x7da1b0ef4070>]]] in starred[call[name[enumerate], parameter[name[display_layers]]]] begin[:]
if compare[name[index] less[<] constant[16]] begin[:]
variable[fill_color] assign[=] call[name[display_layer].get, parameter[constant[fill_color]]]
variable[stroke_color] assign[=] call[name[display_layer].get, parameter[constant[stroke_color]]]
variable[data_index] assign[=] call[name[display_layer].get, parameter[constant[data_index], constant[0]]]
variable[data_row] assign[=] call[name[display_layer].get, parameter[constant[data_row], constant[0]]]
if compare[constant[0] less_or_equal[<=] name[data_index]] begin[:]
variable[scalar_xdata] assign[=] call[name[scalar_xdata_list]][name[data_index]]
if name[scalar_xdata] begin[:]
variable[data_row] assign[=] call[name[max], parameter[constant[0], call[name[min], parameter[binary_operation[call[name[scalar_xdata].dimensional_shape][constant[0]] - constant[1]], name[data_row]]]]]
variable[intensity_calibration] assign[=] name[scalar_xdata].intensity_calibration
variable[displayed_dimensional_calibration] assign[=] call[name[scalar_xdata].dimensional_calibrations][<ast.UnaryOp object at 0x7da18f811840>]
if name[scalar_xdata].is_data_2d begin[:]
variable[scalar_data] assign[=] call[call[name[scalar_xdata].data][tuple[[<ast.Slice object at 0x7da18f810760>, <ast.Slice object at 0x7da18f813f10>]]].reshape, parameter[tuple[[<ast.Subscript object at 0x7da18f810910>]]]]
variable[scalar_xdata] assign[=] call[name[DataAndMetadata].new_data_and_metadata, parameter[name[scalar_data], name[intensity_calibration], list[[<ast.Name object at 0x7da18f812b90>]]]]
variable[line_graph_canvas_item] assign[=] call[name[self].__line_graph_stack.canvas_items][binary_operation[name[display_layer_count] - binary_operation[name[index] + constant[1]]]]
call[name[line_graph_canvas_item].set_fill_color, parameter[name[fill_color]]]
call[name[line_graph_canvas_item].set_stroke_color, parameter[name[stroke_color]]]
call[name[line_graph_canvas_item].set_axes, parameter[name[axes]]]
call[name[line_graph_canvas_item].set_uncalibrated_xdata, parameter[name[scalar_xdata]]]
name[self].___has_valid_drawn_graph_data assign[=] compare[name[scalar_xdata] is_not constant[None]]
for taget[name[index]] in starred[call[name[range], parameter[call[name[len], parameter[name[display_layers]]], constant[16]]]] begin[:]
variable[line_graph_canvas_item] assign[=] call[name[self].__line_graph_stack.canvas_items][name[index]]
call[name[line_graph_canvas_item].set_axes, parameter[constant[None]]]
call[name[line_graph_canvas_item].set_uncalibrated_xdata, parameter[constant[None]]]
variable[legend_position] assign[=] name[self].__legend_position
variable[LegendEntry] assign[=] call[name[collections].namedtuple, parameter[constant[LegendEntry], list[[<ast.Constant object at 0x7da18f812d10>, <ast.Constant object at 0x7da18f812260>, <ast.Constant object at 0x7da18f812110>]]]]
variable[legend_entries] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f812290>, <ast.Name object at 0x7da18f811990>]]] in starred[call[name[enumerate], parameter[name[self].__display_layers]]] begin[:]
variable[data_index] assign[=] call[name[display_layer].get, parameter[constant[data_index], constant[None]]]
variable[data_row] assign[=] call[name[display_layer].get, parameter[constant[data_row], constant[None]]]
variable[label] assign[=] call[name[display_layer].get, parameter[constant[label], call[name[str], parameter[]]]]
if <ast.UnaryOp object at 0x7da18f812380> begin[:]
if <ast.BoolOp object at 0x7da18f810f10> begin[:]
variable[label] assign[=] call[constant[Data {}:{}].format, parameter[name[data_index], name[data_row]]]
variable[fill_color] assign[=] call[name[display_layer].get, parameter[constant[fill_color]]]
variable[stroke_color] assign[=] call[name[display_layer].get, parameter[constant[stroke_color]]]
call[name[legend_entries].append, parameter[call[name[LegendEntry], parameter[name[label], name[fill_color], name[stroke_color]]]]]
call[name[self].__update_canvas_items, parameter[name[axes], name[legend_position], name[legend_entries]]]
|
keyword[def] identifier[prepare_display] ( identifier[self] ):
literal[string]
identifier[displayed_dimensional_calibration] = identifier[self] . identifier[__displayed_dimensional_calibration]
identifier[intensity_calibration] = identifier[self] . identifier[__intensity_calibration]
identifier[calibration_style] = identifier[self] . identifier[__calibration_style]
identifier[y_min] = identifier[self] . identifier[__y_min]
identifier[y_max] = identifier[self] . identifier[__y_max]
identifier[y_style] = identifier[self] . identifier[__y_style]
identifier[left_channel] = identifier[self] . identifier[__left_channel]
identifier[right_channel] = identifier[self] . identifier[__right_channel]
identifier[scalar_xdata_list] = keyword[None]
keyword[def] identifier[calculate_scalar_xdata] ( identifier[xdata_list] ):
identifier[scalar_xdata_list] = identifier[list] ()
keyword[for] identifier[xdata] keyword[in] identifier[xdata_list] :
keyword[if] identifier[xdata] :
identifier[scalar_data] = identifier[Image] . identifier[scalar_from_array] ( identifier[xdata] . identifier[data] )
identifier[scalar_data] = identifier[Image] . identifier[convert_to_grayscale] ( identifier[scalar_data] )
identifier[scalar_intensity_calibration] = identifier[calibration_style] . identifier[get_intensity_calibration] ( identifier[xdata] )
identifier[scalar_dimensional_calibrations] = identifier[calibration_style] . identifier[get_dimensional_calibrations] ( identifier[xdata] . identifier[dimensional_shape] , identifier[xdata] . identifier[dimensional_calibrations] )
keyword[if] identifier[displayed_dimensional_calibration] . identifier[units] == identifier[scalar_dimensional_calibrations] [- literal[int] ]. identifier[units] keyword[and] identifier[intensity_calibration] . identifier[units] == identifier[scalar_intensity_calibration] . identifier[units] :
identifier[scale] = identifier[scalar_intensity_calibration] . identifier[scale] / identifier[intensity_calibration] . identifier[scale]
identifier[offset] =( identifier[scalar_intensity_calibration] . identifier[offset] - identifier[intensity_calibration] . identifier[offset] )/ identifier[intensity_calibration] . identifier[scale]
identifier[scalar_data] = identifier[scalar_data] * identifier[scale] + identifier[offset]
identifier[scalar_xdata_list] . identifier[append] ( identifier[DataAndMetadata] . identifier[new_data_and_metadata] ( identifier[scalar_data] , identifier[scalar_intensity_calibration] , identifier[scalar_dimensional_calibrations] ))
keyword[else] :
identifier[scalar_xdata_list] . identifier[append] ( keyword[None] )
keyword[return] identifier[scalar_xdata_list]
identifier[data_scale] = identifier[self] . identifier[__data_scale]
identifier[xdata_list] = identifier[self] . identifier[__xdata_list]
keyword[if] identifier[data_scale] keyword[is] keyword[not] keyword[None] :
identifier[left_channel] = identifier[left_channel] keyword[if] identifier[left_channel] keyword[is] keyword[not] keyword[None] keyword[else] literal[int]
identifier[right_channel] = identifier[right_channel] keyword[if] identifier[right_channel] keyword[is] keyword[not] keyword[None] keyword[else] identifier[data_scale]
identifier[left_channel] , identifier[right_channel] = identifier[min] ( identifier[left_channel] , identifier[right_channel] ), identifier[max] ( identifier[left_channel] , identifier[right_channel] )
identifier[scalar_data_list] = keyword[None]
keyword[if] identifier[y_min] keyword[is] keyword[None] keyword[or] identifier[y_max] keyword[is] keyword[None] keyword[and] identifier[len] ( identifier[xdata_list] )> literal[int] :
identifier[scalar_xdata_list] = identifier[calculate_scalar_xdata] ( identifier[xdata_list] )
identifier[scalar_data_list] =[ identifier[xdata] . identifier[data] keyword[if] identifier[xdata] keyword[else] keyword[None] keyword[for] identifier[xdata] keyword[in] identifier[scalar_xdata_list] ]
identifier[calibrated_data_min] , identifier[calibrated_data_max] , identifier[y_ticker] = identifier[LineGraphCanvasItem] . identifier[calculate_y_axis] ( identifier[scalar_data_list] , identifier[y_min] , identifier[y_max] , identifier[intensity_calibration] , identifier[y_style] )
identifier[axes] = identifier[LineGraphCanvasItem] . identifier[LineGraphAxes] ( identifier[data_scale] , identifier[calibrated_data_min] , identifier[calibrated_data_max] , identifier[left_channel] , identifier[right_channel] , identifier[displayed_dimensional_calibration] , identifier[intensity_calibration] , identifier[y_style] , identifier[y_ticker] )
keyword[if] identifier[scalar_xdata_list] keyword[is] keyword[None] :
keyword[if] identifier[len] ( identifier[xdata_list] )> literal[int] :
identifier[scalar_xdata_list] = identifier[calculate_scalar_xdata] ( identifier[xdata_list] )
keyword[else] :
identifier[scalar_xdata_list] = identifier[list] ()
keyword[if] identifier[self] . identifier[__display_frame_rate_id] :
identifier[Utility] . identifier[fps_tick] ( literal[string] + identifier[self] . identifier[__display_frame_rate_id] )
identifier[colors] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
identifier[display_layers] = identifier[self] . identifier[__display_layers]
keyword[if] identifier[len] ( identifier[display_layers] )== literal[int] :
identifier[index] = literal[int]
keyword[for] identifier[scalar_index] , identifier[scalar_xdata] keyword[in] identifier[enumerate] ( identifier[scalar_xdata_list] ):
keyword[if] identifier[scalar_xdata] keyword[and] identifier[scalar_xdata] . identifier[is_data_1d] :
keyword[if] identifier[index] < literal[int] :
identifier[display_layers] . identifier[append] ({ literal[string] : identifier[colors] [ identifier[index] ] keyword[if] identifier[index] == literal[int] keyword[else] keyword[None] , literal[string] : identifier[colors] [ identifier[index] ] keyword[if] identifier[index] > literal[int] keyword[else] keyword[None] , literal[string] : identifier[scalar_index] })
identifier[index] += literal[int]
keyword[if] identifier[scalar_xdata] keyword[and] identifier[scalar_xdata] . identifier[is_data_2d] :
keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[min] ( identifier[scalar_xdata] . identifier[data_shape] [- literal[int] ], literal[int] )):
keyword[if] identifier[index] < literal[int] :
identifier[display_layers] . identifier[append] ({ literal[string] : identifier[colors] [ identifier[index] ] keyword[if] identifier[index] == literal[int] keyword[else] keyword[None] , literal[string] : identifier[colors] [ identifier[index] ] keyword[if] identifier[index] > literal[int] keyword[else] keyword[None] , literal[string] : identifier[scalar_index] , literal[string] : identifier[row] })
identifier[index] += literal[int]
identifier[display_layer_count] = identifier[len] ( identifier[display_layers] )
identifier[self] . identifier[___has_valid_drawn_graph_data] = keyword[False]
keyword[for] identifier[index] , identifier[display_layer] keyword[in] identifier[enumerate] ( identifier[display_layers] ):
keyword[if] identifier[index] < literal[int] :
identifier[fill_color] = identifier[display_layer] . identifier[get] ( literal[string] )
identifier[stroke_color] = identifier[display_layer] . identifier[get] ( literal[string] )
identifier[data_index] = identifier[display_layer] . identifier[get] ( literal[string] , literal[int] )
identifier[data_row] = identifier[display_layer] . identifier[get] ( literal[string] , literal[int] )
keyword[if] literal[int] <= identifier[data_index] < identifier[len] ( identifier[scalar_xdata_list] ):
identifier[scalar_xdata] = identifier[scalar_xdata_list] [ identifier[data_index] ]
keyword[if] identifier[scalar_xdata] :
identifier[data_row] = identifier[max] ( literal[int] , identifier[min] ( identifier[scalar_xdata] . identifier[dimensional_shape] [ literal[int] ]- literal[int] , identifier[data_row] ))
identifier[intensity_calibration] = identifier[scalar_xdata] . identifier[intensity_calibration]
identifier[displayed_dimensional_calibration] = identifier[scalar_xdata] . identifier[dimensional_calibrations] [- literal[int] ]
keyword[if] identifier[scalar_xdata] . identifier[is_data_2d] :
identifier[scalar_data] = identifier[scalar_xdata] . identifier[data] [ identifier[data_row] : identifier[data_row] + literal[int] ,:]. identifier[reshape] (( identifier[scalar_xdata] . identifier[dimensional_shape] [- literal[int] ],))
identifier[scalar_xdata] = identifier[DataAndMetadata] . identifier[new_data_and_metadata] ( identifier[scalar_data] , identifier[intensity_calibration] ,[ identifier[displayed_dimensional_calibration] ])
identifier[line_graph_canvas_item] = identifier[self] . identifier[__line_graph_stack] . identifier[canvas_items] [ identifier[display_layer_count] -( identifier[index] + literal[int] )]
identifier[line_graph_canvas_item] . identifier[set_fill_color] ( identifier[fill_color] )
identifier[line_graph_canvas_item] . identifier[set_stroke_color] ( identifier[stroke_color] )
identifier[line_graph_canvas_item] . identifier[set_axes] ( identifier[axes] )
identifier[line_graph_canvas_item] . identifier[set_uncalibrated_xdata] ( identifier[scalar_xdata] )
identifier[self] . identifier[___has_valid_drawn_graph_data] = identifier[scalar_xdata] keyword[is] keyword[not] keyword[None]
keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[len] ( identifier[display_layers] ), literal[int] ):
identifier[line_graph_canvas_item] = identifier[self] . identifier[__line_graph_stack] . identifier[canvas_items] [ identifier[index] ]
identifier[line_graph_canvas_item] . identifier[set_axes] ( keyword[None] )
identifier[line_graph_canvas_item] . identifier[set_uncalibrated_xdata] ( keyword[None] )
identifier[legend_position] = identifier[self] . identifier[__legend_position]
identifier[LegendEntry] = identifier[collections] . identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] , literal[string] ])
identifier[legend_entries] = identifier[list] ()
keyword[for] identifier[index] , identifier[display_layer] keyword[in] identifier[enumerate] ( identifier[self] . identifier[__display_layers] ):
identifier[data_index] = identifier[display_layer] . identifier[get] ( literal[string] , keyword[None] )
identifier[data_row] = identifier[display_layer] . identifier[get] ( literal[string] , keyword[None] )
identifier[label] = identifier[display_layer] . identifier[get] ( literal[string] , identifier[str] ())
keyword[if] keyword[not] identifier[label] :
keyword[if] identifier[data_index] keyword[is] keyword[not] keyword[None] keyword[and] identifier[data_row] keyword[is] keyword[not] keyword[None] :
identifier[label] = literal[string] . identifier[format] ( identifier[data_index] , identifier[data_row] )
keyword[elif] identifier[data_index] keyword[is] keyword[not] keyword[None] :
identifier[label] = literal[string] . identifier[format] ( identifier[data_index] )
keyword[else] :
identifier[label] = literal[string]
identifier[fill_color] = identifier[display_layer] . identifier[get] ( literal[string] )
identifier[stroke_color] = identifier[display_layer] . identifier[get] ( literal[string] )
identifier[legend_entries] . identifier[append] ( identifier[LegendEntry] ( identifier[label] , identifier[fill_color] , identifier[stroke_color] ))
identifier[self] . identifier[__update_canvas_items] ( identifier[axes] , identifier[legend_position] , identifier[legend_entries] )
keyword[else] :
keyword[for] identifier[line_graph_canvas_item] keyword[in] identifier[self] . identifier[__line_graph_stack] . identifier[canvas_items] :
identifier[line_graph_canvas_item] . identifier[set_axes] ( keyword[None] )
identifier[line_graph_canvas_item] . identifier[set_uncalibrated_xdata] ( keyword[None] )
identifier[self] . identifier[__line_graph_xdata_list] = identifier[list] ()
identifier[self] . identifier[__update_canvas_items] ( identifier[LineGraphCanvasItem] . identifier[LineGraphAxes] (), keyword[None] , keyword[None] )
|
def prepare_display(self):
"""Prepare the display.
This method gets called by the canvas layout/draw engine after being triggered by a call to `update`.
When data or display parameters change, the internal state of the line plot gets updated. This method takes
that internal state and updates the child canvas items.
This method is always run on a thread and should be fast but doesn't need to be instant.
"""
displayed_dimensional_calibration = self.__displayed_dimensional_calibration
intensity_calibration = self.__intensity_calibration
calibration_style = self.__calibration_style
y_min = self.__y_min
y_max = self.__y_max
y_style = self.__y_style
left_channel = self.__left_channel
right_channel = self.__right_channel
scalar_xdata_list = None
def calculate_scalar_xdata(xdata_list):
scalar_xdata_list = list()
for xdata in xdata_list:
if xdata:
scalar_data = Image.scalar_from_array(xdata.data)
scalar_data = Image.convert_to_grayscale(scalar_data)
scalar_intensity_calibration = calibration_style.get_intensity_calibration(xdata)
scalar_dimensional_calibrations = calibration_style.get_dimensional_calibrations(xdata.dimensional_shape, xdata.dimensional_calibrations)
if displayed_dimensional_calibration.units == scalar_dimensional_calibrations[-1].units and intensity_calibration.units == scalar_intensity_calibration.units:
# the data needs to have an intensity scale matching intensity_calibration. convert the data to use the common scale.
scale = scalar_intensity_calibration.scale / intensity_calibration.scale
offset = (scalar_intensity_calibration.offset - intensity_calibration.offset) / intensity_calibration.scale
scalar_data = scalar_data * scale + offset
scalar_xdata_list.append(DataAndMetadata.new_data_and_metadata(scalar_data, scalar_intensity_calibration, scalar_dimensional_calibrations)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
scalar_xdata_list.append(None) # depends on [control=['for'], data=['xdata']]
return scalar_xdata_list
data_scale = self.__data_scale
xdata_list = self.__xdata_list
if data_scale is not None:
# update the line graph data
left_channel = left_channel if left_channel is not None else 0
right_channel = right_channel if right_channel is not None else data_scale
(left_channel, right_channel) = (min(left_channel, right_channel), max(left_channel, right_channel))
scalar_data_list = None
if y_min is None or (y_max is None and len(xdata_list) > 0):
scalar_xdata_list = calculate_scalar_xdata(xdata_list)
scalar_data_list = [xdata.data if xdata else None for xdata in scalar_xdata_list] # depends on [control=['if'], data=[]]
(calibrated_data_min, calibrated_data_max, y_ticker) = LineGraphCanvasItem.calculate_y_axis(scalar_data_list, y_min, y_max, intensity_calibration, y_style)
axes = LineGraphCanvasItem.LineGraphAxes(data_scale, calibrated_data_min, calibrated_data_max, left_channel, right_channel, displayed_dimensional_calibration, intensity_calibration, y_style, y_ticker)
if scalar_xdata_list is None:
if len(xdata_list) > 0:
scalar_xdata_list = calculate_scalar_xdata(xdata_list) # depends on [control=['if'], data=[]]
else:
scalar_xdata_list = list() # depends on [control=['if'], data=['scalar_xdata_list']]
if self.__display_frame_rate_id:
Utility.fps_tick('prepare_' + self.__display_frame_rate_id) # depends on [control=['if'], data=[]]
colors = ('#1E90FF', '#F00', '#0F0', '#00F', '#FF0', '#0FF', '#F0F', '#888', '#800', '#080', '#008', '#CCC', '#880', '#088', '#808', '#964B00')
display_layers = self.__display_layers
if len(display_layers) == 0:
index = 0
for (scalar_index, scalar_xdata) in enumerate(scalar_xdata_list):
if scalar_xdata and scalar_xdata.is_data_1d:
if index < 16:
display_layers.append({'fill_color': colors[index] if index == 0 else None, 'stroke_color': colors[index] if index > 0 else None, 'data_index': scalar_index})
index += 1 # depends on [control=['if'], data=['index']] # depends on [control=['if'], data=[]]
if scalar_xdata and scalar_xdata.is_data_2d:
for row in range(min(scalar_xdata.data_shape[-1], 16)):
if index < 16:
display_layers.append({'fill_color': colors[index] if index == 0 else None, 'stroke_color': colors[index] if index > 0 else None, 'data_index': scalar_index, 'data_row': row})
index += 1 # depends on [control=['if'], data=['index']] # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
display_layer_count = len(display_layers)
self.___has_valid_drawn_graph_data = False
for (index, display_layer) in enumerate(display_layers):
if index < 16:
fill_color = display_layer.get('fill_color')
stroke_color = display_layer.get('stroke_color')
data_index = display_layer.get('data_index', 0)
data_row = display_layer.get('data_row', 0)
if 0 <= data_index < len(scalar_xdata_list):
scalar_xdata = scalar_xdata_list[data_index]
if scalar_xdata:
data_row = max(0, min(scalar_xdata.dimensional_shape[0] - 1, data_row))
intensity_calibration = scalar_xdata.intensity_calibration
displayed_dimensional_calibration = scalar_xdata.dimensional_calibrations[-1]
if scalar_xdata.is_data_2d:
scalar_data = scalar_xdata.data[data_row:data_row + 1, :].reshape((scalar_xdata.dimensional_shape[-1],))
scalar_xdata = DataAndMetadata.new_data_and_metadata(scalar_data, intensity_calibration, [displayed_dimensional_calibration]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
line_graph_canvas_item = self.__line_graph_stack.canvas_items[display_layer_count - (index + 1)]
line_graph_canvas_item.set_fill_color(fill_color)
line_graph_canvas_item.set_stroke_color(stroke_color)
line_graph_canvas_item.set_axes(axes)
line_graph_canvas_item.set_uncalibrated_xdata(scalar_xdata)
self.___has_valid_drawn_graph_data = scalar_xdata is not None # depends on [control=['if'], data=['data_index']] # depends on [control=['if'], data=['index']] # depends on [control=['for'], data=[]]
for index in range(len(display_layers), 16):
line_graph_canvas_item = self.__line_graph_stack.canvas_items[index]
line_graph_canvas_item.set_axes(None)
line_graph_canvas_item.set_uncalibrated_xdata(None) # depends on [control=['for'], data=['index']]
legend_position = self.__legend_position
LegendEntry = collections.namedtuple('LegendEntry', ['label', 'fill_color', 'stroke_color'])
legend_entries = list()
for (index, display_layer) in enumerate(self.__display_layers):
data_index = display_layer.get('data_index', None)
data_row = display_layer.get('data_row', None)
label = display_layer.get('label', str())
if not label:
if data_index is not None and data_row is not None:
label = 'Data {}:{}'.format(data_index, data_row) # depends on [control=['if'], data=[]]
elif data_index is not None:
label = 'Data {}'.format(data_index) # depends on [control=['if'], data=['data_index']]
else:
label = 'Unknown' # depends on [control=['if'], data=[]]
fill_color = display_layer.get('fill_color')
stroke_color = display_layer.get('stroke_color')
legend_entries.append(LegendEntry(label, fill_color, stroke_color)) # depends on [control=['for'], data=[]]
self.__update_canvas_items(axes, legend_position, legend_entries) # depends on [control=['if'], data=['data_scale']]
else:
for line_graph_canvas_item in self.__line_graph_stack.canvas_items:
line_graph_canvas_item.set_axes(None)
line_graph_canvas_item.set_uncalibrated_xdata(None) # depends on [control=['for'], data=['line_graph_canvas_item']]
self.__line_graph_xdata_list = list()
self.__update_canvas_items(LineGraphCanvasItem.LineGraphAxes(), None, None)
|
def signalcommand(func):
"""Python decorator for management command handle defs that sends out a pre/post signal."""
def inner(self, *args, **kwargs):
pre_command.send(self.__class__, args=args, kwargs=kwargs)
ret = func(self, *args, **kwargs)
post_command.send(self.__class__, args=args, kwargs=kwargs, outcome=ret)
return ret
return inner
|
def function[signalcommand, parameter[func]]:
constant[Python decorator for management command handle defs that sends out a pre/post signal.]
def function[inner, parameter[self]]:
call[name[pre_command].send, parameter[name[self].__class__]]
variable[ret] assign[=] call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b17d7ca0>]]
call[name[post_command].send, parameter[name[self].__class__]]
return[name[ret]]
return[name[inner]]
|
keyword[def] identifier[signalcommand] ( identifier[func] ):
literal[string]
keyword[def] identifier[inner] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[pre_command] . identifier[send] ( identifier[self] . identifier[__class__] , identifier[args] = identifier[args] , identifier[kwargs] = identifier[kwargs] )
identifier[ret] = identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[post_command] . identifier[send] ( identifier[self] . identifier[__class__] , identifier[args] = identifier[args] , identifier[kwargs] = identifier[kwargs] , identifier[outcome] = identifier[ret] )
keyword[return] identifier[ret]
keyword[return] identifier[inner]
|
def signalcommand(func):
"""Python decorator for management command handle defs that sends out a pre/post signal."""
def inner(self, *args, **kwargs):
pre_command.send(self.__class__, args=args, kwargs=kwargs)
ret = func(self, *args, **kwargs)
post_command.send(self.__class__, args=args, kwargs=kwargs, outcome=ret)
return ret
return inner
|
def prepare(args):
"""
%prog prepare --rearray_lib=<rearraylibrary> --orig_lib_file=<origlibfile>
Inferred file names
---------------------------------------------
`lookuptblfile` : rearraylibrary.lookup
`rearraylibfile`: rearraylibrary.fasta
Pick sequences from the original library file and the rearrayed library file
based on the mapping information provided in the `lookuptblfile`.
# lookuptblfile format: column number (index)
# 1 (0) 2 (1) 3 (2) 4 (3) 5 (4) 6 (5)
# source_clone source_plate source_well dest_clone dest_plate dest_well
The 1st and 4th column in the `lookuptblfile` form the pair of clones which
constitute the elements used for the per-clone assembly.
"""
from operator import itemgetter
from jcvi.formats.fasta import Fasta, SeqIO
p = OptionParser(prepare.__doc__)
p.add_option("--rearray_lib", default=None,
help="name of the rearrayed library [default: %default]")
p.add_option("--orig_lib_file",
help="fasta file containing reads from the original libraries [default: %default]")
g = OptionGroup(p, "Optional parameters")
g.add_option("--output_folder", default="to_assemble",
help="output folder to write the FASTA files to [default: %default]")
p.add_option_group(g)
opts, args = p.parse_args(args)
if not opts.rearray_lib or not opts.orig_lib_file:
logging.error("Please specify the required parameters")
sys.exit(not p.print_help())
rearraylib, origlibfile = opts.rearray_lib, opts.orig_lib_file
if not op.isfile(origlibfile):
logging.error("Original library reads file `{0}` does not exist!".format(origlibfile))
sys.exit()
lookuptblfile = rearraylib + '.lookup'
logging.debug(lookuptblfile)
if not op.isfile(lookuptblfile):
logging.error("Lookup table file `{0}` does not exist!".format(lookuptblfile))
sys.exit()
rearraylibfile = rearraylib + '.fasta'
logging.debug(rearraylibfile)
if not op.isfile(rearraylibfile):
logging.error("Rearrayed library reads file `{0}` does not exist!".format(rearraylibfile))
sys.exit()
origlibFasta = Fasta(origlibfile)
rearraylibFasta = Fasta(rearraylibfile)
origlibids = [o for o in origlibFasta.iterkeys_ordered()]
rearraylibids = [r for r in rearraylibFasta.iterkeys_ordered()]
if not op.isdir(opts.output_folder):
logging.warning("Output directory `{0}` missing. Creating it now...".format(opts.output_folder))
os.makedirs(opts.output_folder)
logfile = rearraylib + '.log'
log = open(logfile, 'w')
fp = open(lookuptblfile, 'r')
for row in fp:
origprefix, rearrayprefix = itemgetter(0,3)(row.split('\t'))
libpair = origprefix + '_' + rearrayprefix
outfile = opts.output_folder + '/' + libpair + '.fasta'
ofp = open(outfile, 'w')
for o in origlibids:
if re.match(origprefix, o):
SeqIO.write(origlibFasta[o], ofp, 'fasta')
for r in rearraylibids:
if re.match(rearrayprefix, r):
SeqIO.write(rearraylibFasta[r], ofp, 'fasta')
ofp.close()
print(outfile, file=log)
log.close()
logging.debug('Wrote log file `{0}`'.format(logfile))
|
def function[prepare, parameter[args]]:
constant[
%prog prepare --rearray_lib=<rearraylibrary> --orig_lib_file=<origlibfile>
Inferred file names
---------------------------------------------
`lookuptblfile` : rearraylibrary.lookup
`rearraylibfile`: rearraylibrary.fasta
Pick sequences from the original library file and the rearrayed library file
based on the mapping information provided in the `lookuptblfile`.
# lookuptblfile format: column number (index)
# 1 (0) 2 (1) 3 (2) 4 (3) 5 (4) 6 (5)
# source_clone source_plate source_well dest_clone dest_plate dest_well
The 1st and 4th column in the `lookuptblfile` form the pair of clones which
constitute the elements used for the per-clone assembly.
]
from relative_module[operator] import module[itemgetter]
from relative_module[jcvi.formats.fasta] import module[Fasta], module[SeqIO]
variable[p] assign[=] call[name[OptionParser], parameter[name[prepare].__doc__]]
call[name[p].add_option, parameter[constant[--rearray_lib]]]
call[name[p].add_option, parameter[constant[--orig_lib_file]]]
variable[g] assign[=] call[name[OptionGroup], parameter[name[p], constant[Optional parameters]]]
call[name[g].add_option, parameter[constant[--output_folder]]]
call[name[p].add_option_group, parameter[name[g]]]
<ast.Tuple object at 0x7da207f9b040> assign[=] call[name[p].parse_args, parameter[name[args]]]
if <ast.BoolOp object at 0x7da207f9b6d0> begin[:]
call[name[logging].error, parameter[constant[Please specify the required parameters]]]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20c992740>]]
<ast.Tuple object at 0x7da20c991a50> assign[=] tuple[[<ast.Attribute object at 0x7da20c991180>, <ast.Attribute object at 0x7da20c990730>]]
if <ast.UnaryOp object at 0x7da20c9926e0> begin[:]
call[name[logging].error, parameter[call[constant[Original library reads file `{0}` does not exist!].format, parameter[name[origlibfile]]]]]
call[name[sys].exit, parameter[]]
variable[lookuptblfile] assign[=] binary_operation[name[rearraylib] + constant[.lookup]]
call[name[logging].debug, parameter[name[lookuptblfile]]]
if <ast.UnaryOp object at 0x7da20c9909a0> begin[:]
call[name[logging].error, parameter[call[constant[Lookup table file `{0}` does not exist!].format, parameter[name[lookuptblfile]]]]]
call[name[sys].exit, parameter[]]
variable[rearraylibfile] assign[=] binary_operation[name[rearraylib] + constant[.fasta]]
call[name[logging].debug, parameter[name[rearraylibfile]]]
if <ast.UnaryOp object at 0x7da20c990d90> begin[:]
call[name[logging].error, parameter[call[constant[Rearrayed library reads file `{0}` does not exist!].format, parameter[name[rearraylibfile]]]]]
call[name[sys].exit, parameter[]]
variable[origlibFasta] assign[=] call[name[Fasta], parameter[name[origlibfile]]]
variable[rearraylibFasta] assign[=] call[name[Fasta], parameter[name[rearraylibfile]]]
variable[origlibids] assign[=] <ast.ListComp object at 0x7da20c990910>
variable[rearraylibids] assign[=] <ast.ListComp object at 0x7da20c991960>
if <ast.UnaryOp object at 0x7da20c993c10> begin[:]
call[name[logging].warning, parameter[call[constant[Output directory `{0}` missing. Creating it now...].format, parameter[name[opts].output_folder]]]]
call[name[os].makedirs, parameter[name[opts].output_folder]]
variable[logfile] assign[=] binary_operation[name[rearraylib] + constant[.log]]
variable[log] assign[=] call[name[open], parameter[name[logfile], constant[w]]]
variable[fp] assign[=] call[name[open], parameter[name[lookuptblfile], constant[r]]]
for taget[name[row]] in starred[name[fp]] begin[:]
<ast.Tuple object at 0x7da20c9917e0> assign[=] call[call[name[itemgetter], parameter[constant[0], constant[3]]], parameter[call[name[row].split, parameter[constant[ ]]]]]
variable[libpair] assign[=] binary_operation[binary_operation[name[origprefix] + constant[_]] + name[rearrayprefix]]
variable[outfile] assign[=] binary_operation[binary_operation[binary_operation[name[opts].output_folder + constant[/]] + name[libpair]] + constant[.fasta]]
variable[ofp] assign[=] call[name[open], parameter[name[outfile], constant[w]]]
for taget[name[o]] in starred[name[origlibids]] begin[:]
if call[name[re].match, parameter[name[origprefix], name[o]]] begin[:]
call[name[SeqIO].write, parameter[call[name[origlibFasta]][name[o]], name[ofp], constant[fasta]]]
for taget[name[r]] in starred[name[rearraylibids]] begin[:]
if call[name[re].match, parameter[name[rearrayprefix], name[r]]] begin[:]
call[name[SeqIO].write, parameter[call[name[rearraylibFasta]][name[r]], name[ofp], constant[fasta]]]
call[name[ofp].close, parameter[]]
call[name[print], parameter[name[outfile]]]
call[name[log].close, parameter[]]
call[name[logging].debug, parameter[call[constant[Wrote log file `{0}`].format, parameter[name[logfile]]]]]
|
keyword[def] identifier[prepare] ( identifier[args] ):
literal[string]
keyword[from] identifier[operator] keyword[import] identifier[itemgetter]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[fasta] keyword[import] identifier[Fasta] , identifier[SeqIO]
identifier[p] = identifier[OptionParser] ( identifier[prepare] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] ,
identifier[help] = literal[string] )
identifier[g] = identifier[OptionGroup] ( identifier[p] , literal[string] )
identifier[g] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option_group] ( identifier[g] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] keyword[not] identifier[opts] . identifier[rearray_lib] keyword[or] keyword[not] identifier[opts] . identifier[orig_lib_file] :
identifier[logging] . identifier[error] ( literal[string] )
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[rearraylib] , identifier[origlibfile] = identifier[opts] . identifier[rearray_lib] , identifier[opts] . identifier[orig_lib_file]
keyword[if] keyword[not] identifier[op] . identifier[isfile] ( identifier[origlibfile] ):
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[origlibfile] ))
identifier[sys] . identifier[exit] ()
identifier[lookuptblfile] = identifier[rearraylib] + literal[string]
identifier[logging] . identifier[debug] ( identifier[lookuptblfile] )
keyword[if] keyword[not] identifier[op] . identifier[isfile] ( identifier[lookuptblfile] ):
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[lookuptblfile] ))
identifier[sys] . identifier[exit] ()
identifier[rearraylibfile] = identifier[rearraylib] + literal[string]
identifier[logging] . identifier[debug] ( identifier[rearraylibfile] )
keyword[if] keyword[not] identifier[op] . identifier[isfile] ( identifier[rearraylibfile] ):
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[rearraylibfile] ))
identifier[sys] . identifier[exit] ()
identifier[origlibFasta] = identifier[Fasta] ( identifier[origlibfile] )
identifier[rearraylibFasta] = identifier[Fasta] ( identifier[rearraylibfile] )
identifier[origlibids] =[ identifier[o] keyword[for] identifier[o] keyword[in] identifier[origlibFasta] . identifier[iterkeys_ordered] ()]
identifier[rearraylibids] =[ identifier[r] keyword[for] identifier[r] keyword[in] identifier[rearraylibFasta] . identifier[iterkeys_ordered] ()]
keyword[if] keyword[not] identifier[op] . identifier[isdir] ( identifier[opts] . identifier[output_folder] ):
identifier[logging] . identifier[warning] ( literal[string] . identifier[format] ( identifier[opts] . identifier[output_folder] ))
identifier[os] . identifier[makedirs] ( identifier[opts] . identifier[output_folder] )
identifier[logfile] = identifier[rearraylib] + literal[string]
identifier[log] = identifier[open] ( identifier[logfile] , literal[string] )
identifier[fp] = identifier[open] ( identifier[lookuptblfile] , literal[string] )
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[origprefix] , identifier[rearrayprefix] = identifier[itemgetter] ( literal[int] , literal[int] )( identifier[row] . identifier[split] ( literal[string] ))
identifier[libpair] = identifier[origprefix] + literal[string] + identifier[rearrayprefix]
identifier[outfile] = identifier[opts] . identifier[output_folder] + literal[string] + identifier[libpair] + literal[string]
identifier[ofp] = identifier[open] ( identifier[outfile] , literal[string] )
keyword[for] identifier[o] keyword[in] identifier[origlibids] :
keyword[if] identifier[re] . identifier[match] ( identifier[origprefix] , identifier[o] ):
identifier[SeqIO] . identifier[write] ( identifier[origlibFasta] [ identifier[o] ], identifier[ofp] , literal[string] )
keyword[for] identifier[r] keyword[in] identifier[rearraylibids] :
keyword[if] identifier[re] . identifier[match] ( identifier[rearrayprefix] , identifier[r] ):
identifier[SeqIO] . identifier[write] ( identifier[rearraylibFasta] [ identifier[r] ], identifier[ofp] , literal[string] )
identifier[ofp] . identifier[close] ()
identifier[print] ( identifier[outfile] , identifier[file] = identifier[log] )
identifier[log] . identifier[close] ()
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[logfile] ))
|
def prepare(args):
"""
%prog prepare --rearray_lib=<rearraylibrary> --orig_lib_file=<origlibfile>
Inferred file names
---------------------------------------------
`lookuptblfile` : rearraylibrary.lookup
`rearraylibfile`: rearraylibrary.fasta
Pick sequences from the original library file and the rearrayed library file
based on the mapping information provided in the `lookuptblfile`.
# lookuptblfile format: column number (index)
# 1 (0) 2 (1) 3 (2) 4 (3) 5 (4) 6 (5)
# source_clone source_plate source_well dest_clone dest_plate dest_well
The 1st and 4th column in the `lookuptblfile` form the pair of clones which
constitute the elements used for the per-clone assembly.
"""
from operator import itemgetter
from jcvi.formats.fasta import Fasta, SeqIO
p = OptionParser(prepare.__doc__)
p.add_option('--rearray_lib', default=None, help='name of the rearrayed library [default: %default]')
p.add_option('--orig_lib_file', help='fasta file containing reads from the original libraries [default: %default]')
g = OptionGroup(p, 'Optional parameters')
g.add_option('--output_folder', default='to_assemble', help='output folder to write the FASTA files to [default: %default]')
p.add_option_group(g)
(opts, args) = p.parse_args(args)
if not opts.rearray_lib or not opts.orig_lib_file:
logging.error('Please specify the required parameters')
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(rearraylib, origlibfile) = (opts.rearray_lib, opts.orig_lib_file)
if not op.isfile(origlibfile):
logging.error('Original library reads file `{0}` does not exist!'.format(origlibfile))
sys.exit() # depends on [control=['if'], data=[]]
lookuptblfile = rearraylib + '.lookup'
logging.debug(lookuptblfile)
if not op.isfile(lookuptblfile):
logging.error('Lookup table file `{0}` does not exist!'.format(lookuptblfile))
sys.exit() # depends on [control=['if'], data=[]]
rearraylibfile = rearraylib + '.fasta'
logging.debug(rearraylibfile)
if not op.isfile(rearraylibfile):
logging.error('Rearrayed library reads file `{0}` does not exist!'.format(rearraylibfile))
sys.exit() # depends on [control=['if'], data=[]]
origlibFasta = Fasta(origlibfile)
rearraylibFasta = Fasta(rearraylibfile)
origlibids = [o for o in origlibFasta.iterkeys_ordered()]
rearraylibids = [r for r in rearraylibFasta.iterkeys_ordered()]
if not op.isdir(opts.output_folder):
logging.warning('Output directory `{0}` missing. Creating it now...'.format(opts.output_folder))
os.makedirs(opts.output_folder) # depends on [control=['if'], data=[]]
logfile = rearraylib + '.log'
log = open(logfile, 'w')
fp = open(lookuptblfile, 'r')
for row in fp:
(origprefix, rearrayprefix) = itemgetter(0, 3)(row.split('\t'))
libpair = origprefix + '_' + rearrayprefix
outfile = opts.output_folder + '/' + libpair + '.fasta'
ofp = open(outfile, 'w')
for o in origlibids:
if re.match(origprefix, o):
SeqIO.write(origlibFasta[o], ofp, 'fasta') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['o']]
for r in rearraylibids:
if re.match(rearrayprefix, r):
SeqIO.write(rearraylibFasta[r], ofp, 'fasta') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
ofp.close()
print(outfile, file=log) # depends on [control=['for'], data=['row']]
log.close()
logging.debug('Wrote log file `{0}`'.format(logfile))
|
def get_queryset(self):
""" Returns all the approved topics or posts. """
qs = super().get_queryset()
qs = qs.filter(approved=True)
return qs
|
def function[get_queryset, parameter[self]]:
constant[ Returns all the approved topics or posts. ]
variable[qs] assign[=] call[call[name[super], parameter[]].get_queryset, parameter[]]
variable[qs] assign[=] call[name[qs].filter, parameter[]]
return[name[qs]]
|
keyword[def] identifier[get_queryset] ( identifier[self] ):
literal[string]
identifier[qs] = identifier[super] (). identifier[get_queryset] ()
identifier[qs] = identifier[qs] . identifier[filter] ( identifier[approved] = keyword[True] )
keyword[return] identifier[qs]
|
def get_queryset(self):
""" Returns all the approved topics or posts. """
qs = super().get_queryset()
qs = qs.filter(approved=True)
return qs
|
def parseBtop(btopString):
"""
Parse a BTOP string.
The format is described at https://www.ncbi.nlm.nih.gov/books/NBK279682/
@param btopString: A C{str} BTOP sequence.
@raise ValueError: If C{btopString} is not valid BTOP.
@return: A generator that yields a series of integers and 2-tuples of
letters, as found in the BTOP string C{btopString}.
"""
isdigit = str.isdigit
value = None
queryLetter = None
for offset, char in enumerate(btopString):
if isdigit(char):
if queryLetter is not None:
raise ValueError(
'BTOP string %r has a query letter %r at offset %d with '
'no corresponding subject letter' %
(btopString, queryLetter, offset - 1))
value = int(char) if value is None else value * 10 + int(char)
else:
if value is not None:
yield value
value = None
queryLetter = char
else:
if queryLetter is None:
queryLetter = char
else:
if queryLetter == '-' and char == '-':
raise ValueError(
'BTOP string %r has two consecutive gaps at '
'offset %d' % (btopString, offset - 1))
elif queryLetter == char:
raise ValueError(
'BTOP string %r has two consecutive identical %r '
'letters at offset %d' %
(btopString, char, offset - 1))
yield (queryLetter, char)
queryLetter = None
if value is not None:
yield value
elif queryLetter is not None:
raise ValueError(
'BTOP string %r has a trailing query letter %r with '
'no corresponding subject letter' % (btopString, queryLetter))
|
def function[parseBtop, parameter[btopString]]:
constant[
Parse a BTOP string.
The format is described at https://www.ncbi.nlm.nih.gov/books/NBK279682/
@param btopString: A C{str} BTOP sequence.
@raise ValueError: If C{btopString} is not valid BTOP.
@return: A generator that yields a series of integers and 2-tuples of
letters, as found in the BTOP string C{btopString}.
]
variable[isdigit] assign[=] name[str].isdigit
variable[value] assign[=] constant[None]
variable[queryLetter] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da20c794a00>, <ast.Name object at 0x7da20c7950f0>]]] in starred[call[name[enumerate], parameter[name[btopString]]]] begin[:]
if call[name[isdigit], parameter[name[char]]] begin[:]
if compare[name[queryLetter] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da20c794d30>
variable[value] assign[=] <ast.IfExp object at 0x7da20c796560>
if compare[name[value] is_not constant[None]] begin[:]
<ast.Yield object at 0x7da1b0e396c0>
|
keyword[def] identifier[parseBtop] ( identifier[btopString] ):
literal[string]
identifier[isdigit] = identifier[str] . identifier[isdigit]
identifier[value] = keyword[None]
identifier[queryLetter] = keyword[None]
keyword[for] identifier[offset] , identifier[char] keyword[in] identifier[enumerate] ( identifier[btopString] ):
keyword[if] identifier[isdigit] ( identifier[char] ):
keyword[if] identifier[queryLetter] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %
( identifier[btopString] , identifier[queryLetter] , identifier[offset] - literal[int] ))
identifier[value] = identifier[int] ( identifier[char] ) keyword[if] identifier[value] keyword[is] keyword[None] keyword[else] identifier[value] * literal[int] + identifier[int] ( identifier[char] )
keyword[else] :
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[value]
identifier[value] = keyword[None]
identifier[queryLetter] = identifier[char]
keyword[else] :
keyword[if] identifier[queryLetter] keyword[is] keyword[None] :
identifier[queryLetter] = identifier[char]
keyword[else] :
keyword[if] identifier[queryLetter] == literal[string] keyword[and] identifier[char] == literal[string] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[btopString] , identifier[offset] - literal[int] ))
keyword[elif] identifier[queryLetter] == identifier[char] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %
( identifier[btopString] , identifier[char] , identifier[offset] - literal[int] ))
keyword[yield] ( identifier[queryLetter] , identifier[char] )
identifier[queryLetter] = keyword[None]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[value]
keyword[elif] identifier[queryLetter] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[btopString] , identifier[queryLetter] ))
|
def parseBtop(btopString):
"""
Parse a BTOP string.
The format is described at https://www.ncbi.nlm.nih.gov/books/NBK279682/
@param btopString: A C{str} BTOP sequence.
@raise ValueError: If C{btopString} is not valid BTOP.
@return: A generator that yields a series of integers and 2-tuples of
letters, as found in the BTOP string C{btopString}.
"""
isdigit = str.isdigit
value = None
queryLetter = None
for (offset, char) in enumerate(btopString):
if isdigit(char):
if queryLetter is not None:
raise ValueError('BTOP string %r has a query letter %r at offset %d with no corresponding subject letter' % (btopString, queryLetter, offset - 1)) # depends on [control=['if'], data=['queryLetter']]
value = int(char) if value is None else value * 10 + int(char) # depends on [control=['if'], data=[]]
elif value is not None:
yield value
value = None
queryLetter = char # depends on [control=['if'], data=['value']]
elif queryLetter is None:
queryLetter = char # depends on [control=['if'], data=['queryLetter']]
else:
if queryLetter == '-' and char == '-':
raise ValueError('BTOP string %r has two consecutive gaps at offset %d' % (btopString, offset - 1)) # depends on [control=['if'], data=[]]
elif queryLetter == char:
raise ValueError('BTOP string %r has two consecutive identical %r letters at offset %d' % (btopString, char, offset - 1)) # depends on [control=['if'], data=['char']]
yield (queryLetter, char)
queryLetter = None # depends on [control=['for'], data=[]]
if value is not None:
yield value # depends on [control=['if'], data=['value']]
elif queryLetter is not None:
raise ValueError('BTOP string %r has a trailing query letter %r with no corresponding subject letter' % (btopString, queryLetter)) # depends on [control=['if'], data=['queryLetter']]
|
def _get_chromecast_from_host(host, tries=None, retry_wait=None, timeout=None,
blocking=True):
"""Creates a Chromecast object from a zeroconf host."""
# Build device status from the mDNS info, this information is
# the primary source and the remaining will be fetched
# later on.
ip_address, port, uuid, model_name, friendly_name = host
_LOGGER.debug("_get_chromecast_from_host %s", host)
cast_type = CAST_TYPES.get(model_name.lower(),
CAST_TYPE_CHROMECAST)
device = DeviceStatus(
friendly_name=friendly_name, model_name=model_name,
manufacturer=None, uuid=uuid, cast_type=cast_type,
)
return Chromecast(host=ip_address, port=port, device=device, tries=tries,
timeout=timeout, retry_wait=retry_wait,
blocking=blocking)
|
def function[_get_chromecast_from_host, parameter[host, tries, retry_wait, timeout, blocking]]:
constant[Creates a Chromecast object from a zeroconf host.]
<ast.Tuple object at 0x7da18dc9a7d0> assign[=] name[host]
call[name[_LOGGER].debug, parameter[constant[_get_chromecast_from_host %s], name[host]]]
variable[cast_type] assign[=] call[name[CAST_TYPES].get, parameter[call[name[model_name].lower, parameter[]], name[CAST_TYPE_CHROMECAST]]]
variable[device] assign[=] call[name[DeviceStatus], parameter[]]
return[call[name[Chromecast], parameter[]]]
|
keyword[def] identifier[_get_chromecast_from_host] ( identifier[host] , identifier[tries] = keyword[None] , identifier[retry_wait] = keyword[None] , identifier[timeout] = keyword[None] ,
identifier[blocking] = keyword[True] ):
literal[string]
identifier[ip_address] , identifier[port] , identifier[uuid] , identifier[model_name] , identifier[friendly_name] = identifier[host]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[host] )
identifier[cast_type] = identifier[CAST_TYPES] . identifier[get] ( identifier[model_name] . identifier[lower] (),
identifier[CAST_TYPE_CHROMECAST] )
identifier[device] = identifier[DeviceStatus] (
identifier[friendly_name] = identifier[friendly_name] , identifier[model_name] = identifier[model_name] ,
identifier[manufacturer] = keyword[None] , identifier[uuid] = identifier[uuid] , identifier[cast_type] = identifier[cast_type] ,
)
keyword[return] identifier[Chromecast] ( identifier[host] = identifier[ip_address] , identifier[port] = identifier[port] , identifier[device] = identifier[device] , identifier[tries] = identifier[tries] ,
identifier[timeout] = identifier[timeout] , identifier[retry_wait] = identifier[retry_wait] ,
identifier[blocking] = identifier[blocking] )
|
def _get_chromecast_from_host(host, tries=None, retry_wait=None, timeout=None, blocking=True):
"""Creates a Chromecast object from a zeroconf host."""
# Build device status from the mDNS info, this information is
# the primary source and the remaining will be fetched
# later on.
(ip_address, port, uuid, model_name, friendly_name) = host
_LOGGER.debug('_get_chromecast_from_host %s', host)
cast_type = CAST_TYPES.get(model_name.lower(), CAST_TYPE_CHROMECAST)
device = DeviceStatus(friendly_name=friendly_name, model_name=model_name, manufacturer=None, uuid=uuid, cast_type=cast_type)
return Chromecast(host=ip_address, port=port, device=device, tries=tries, timeout=timeout, retry_wait=retry_wait, blocking=blocking)
|
def endpoint_get(auth=None, **kwargs):
'''
Get a single endpoint
CLI Example:
.. code-block:: bash
salt '*' keystoneng.endpoint_get id=02cffaa173b2460f98e40eda3748dae5
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.get_endpoint(**kwargs)
|
def function[endpoint_get, parameter[auth]]:
constant[
Get a single endpoint
CLI Example:
.. code-block:: bash
salt '*' keystoneng.endpoint_get id=02cffaa173b2460f98e40eda3748dae5
]
variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]]
variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]]
return[call[name[cloud].get_endpoint, parameter[]]]
|
keyword[def] identifier[endpoint_get] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] )
identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] )
keyword[return] identifier[cloud] . identifier[get_endpoint] (** identifier[kwargs] )
|
def endpoint_get(auth=None, **kwargs):
"""
Get a single endpoint
CLI Example:
.. code-block:: bash
salt '*' keystoneng.endpoint_get id=02cffaa173b2460f98e40eda3748dae5
"""
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.get_endpoint(**kwargs)
|
def ip_access_control_list_mappings(self):
"""
Access the ip_access_control_list_mappings
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
"""
if self._ip_access_control_list_mappings is None:
self._ip_access_control_list_mappings = IpAccessControlListMappingList(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['sid'],
)
return self._ip_access_control_list_mappings
|
def function[ip_access_control_list_mappings, parameter[self]]:
constant[
Access the ip_access_control_list_mappings
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
]
if compare[name[self]._ip_access_control_list_mappings is constant[None]] begin[:]
name[self]._ip_access_control_list_mappings assign[=] call[name[IpAccessControlListMappingList], parameter[name[self]._version]]
return[name[self]._ip_access_control_list_mappings]
|
keyword[def] identifier[ip_access_control_list_mappings] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_ip_access_control_list_mappings] keyword[is] keyword[None] :
identifier[self] . identifier[_ip_access_control_list_mappings] = identifier[IpAccessControlListMappingList] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[domain_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_ip_access_control_list_mappings]
|
def ip_access_control_list_mappings(self):
"""
Access the ip_access_control_list_mappings
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
"""
if self._ip_access_control_list_mappings is None:
self._ip_access_control_list_mappings = IpAccessControlListMappingList(self._version, account_sid=self._solution['account_sid'], domain_sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._ip_access_control_list_mappings
|
def process_rename(self, client, tag_value, resource_set):
"""
Move source tag value to destination tag value
- Collect value from old tag
- Delete old tag
- Create new tag & assign stored value
"""
self.log.info("Renaming tag on %s instances" % (len(resource_set)))
old_key = self.data.get('old_key')
new_key = self.data.get('new_key')
# We have a preference to creating the new tag when possible first
resource_ids = [r[self.id_key] for r in resource_set if len(
r.get('Tags', [])) < self.tag_count_max]
if resource_ids:
self.create_tag(client, resource_ids, new_key, tag_value)
self.delete_tag(
client, [r[self.id_key] for r in resource_set], old_key, tag_value)
# For resources with 50 tags, we need to delete first and then create.
resource_ids = [r[self.id_key] for r in resource_set if len(
r.get('Tags', [])) > self.tag_count_max - 1]
if resource_ids:
self.create_tag(client, resource_ids, new_key, tag_value)
|
def function[process_rename, parameter[self, client, tag_value, resource_set]]:
constant[
Move source tag value to destination tag value
- Collect value from old tag
- Delete old tag
- Create new tag & assign stored value
]
call[name[self].log.info, parameter[binary_operation[constant[Renaming tag on %s instances] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[resource_set]]]]]]
variable[old_key] assign[=] call[name[self].data.get, parameter[constant[old_key]]]
variable[new_key] assign[=] call[name[self].data.get, parameter[constant[new_key]]]
variable[resource_ids] assign[=] <ast.ListComp object at 0x7da1b2098e20>
if name[resource_ids] begin[:]
call[name[self].create_tag, parameter[name[client], name[resource_ids], name[new_key], name[tag_value]]]
call[name[self].delete_tag, parameter[name[client], <ast.ListComp object at 0x7da1b2098460>, name[old_key], name[tag_value]]]
variable[resource_ids] assign[=] <ast.ListComp object at 0x7da1b1f0a890>
if name[resource_ids] begin[:]
call[name[self].create_tag, parameter[name[client], name[resource_ids], name[new_key], name[tag_value]]]
|
keyword[def] identifier[process_rename] ( identifier[self] , identifier[client] , identifier[tag_value] , identifier[resource_set] ):
literal[string]
identifier[self] . identifier[log] . identifier[info] ( literal[string] %( identifier[len] ( identifier[resource_set] )))
identifier[old_key] = identifier[self] . identifier[data] . identifier[get] ( literal[string] )
identifier[new_key] = identifier[self] . identifier[data] . identifier[get] ( literal[string] )
identifier[resource_ids] =[ identifier[r] [ identifier[self] . identifier[id_key] ] keyword[for] identifier[r] keyword[in] identifier[resource_set] keyword[if] identifier[len] (
identifier[r] . identifier[get] ( literal[string] ,[]))< identifier[self] . identifier[tag_count_max] ]
keyword[if] identifier[resource_ids] :
identifier[self] . identifier[create_tag] ( identifier[client] , identifier[resource_ids] , identifier[new_key] , identifier[tag_value] )
identifier[self] . identifier[delete_tag] (
identifier[client] ,[ identifier[r] [ identifier[self] . identifier[id_key] ] keyword[for] identifier[r] keyword[in] identifier[resource_set] ], identifier[old_key] , identifier[tag_value] )
identifier[resource_ids] =[ identifier[r] [ identifier[self] . identifier[id_key] ] keyword[for] identifier[r] keyword[in] identifier[resource_set] keyword[if] identifier[len] (
identifier[r] . identifier[get] ( literal[string] ,[]))> identifier[self] . identifier[tag_count_max] - literal[int] ]
keyword[if] identifier[resource_ids] :
identifier[self] . identifier[create_tag] ( identifier[client] , identifier[resource_ids] , identifier[new_key] , identifier[tag_value] )
|
def process_rename(self, client, tag_value, resource_set):
"""
Move source tag value to destination tag value
- Collect value from old tag
- Delete old tag
- Create new tag & assign stored value
"""
self.log.info('Renaming tag on %s instances' % len(resource_set))
old_key = self.data.get('old_key')
new_key = self.data.get('new_key')
# We have a preference to creating the new tag when possible first
resource_ids = [r[self.id_key] for r in resource_set if len(r.get('Tags', [])) < self.tag_count_max]
if resource_ids:
self.create_tag(client, resource_ids, new_key, tag_value) # depends on [control=['if'], data=[]]
self.delete_tag(client, [r[self.id_key] for r in resource_set], old_key, tag_value)
# For resources with 50 tags, we need to delete first and then create.
resource_ids = [r[self.id_key] for r in resource_set if len(r.get('Tags', [])) > self.tag_count_max - 1]
if resource_ids:
self.create_tag(client, resource_ids, new_key, tag_value) # depends on [control=['if'], data=[]]
|
def separate(polylines, f_mx_dist=2, mn_group_len=4):
"""
split polylines wherever crinkles are found
"""
s = []
for n in range(len(polylines) - 1, -1, -1):
c = polylines[n]
separated = False
start = 0
for m in range(mn_group_len, len(c) - 1):
if m - start < mn_group_len:
continue
m += 1
group = c[m - mn_group_len:m]
x, y = group[:, 0], group[:, 1]
asc, offs, _, _, _ = linregress(x, y)
yfit = asc * x + offs
# check whether next point would fit in:
p1 = c[m]
l = (x[0], yfit[0], p1[-1], asc * p1[-1] + offs)
std = np.mean([line.distance(l, g) for g in group])
dist = line.distance(l, p1)
if dist > 2 and dist > f_mx_dist * std:
separated = True
s.append(c[start:m - 1])
start = m - 1
if separated:
if len(c) - start >= 2:
s.append(c[start:])
polylines.pop(n)
polylines.extend(s)
return polylines
|
def function[separate, parameter[polylines, f_mx_dist, mn_group_len]]:
constant[
split polylines wherever crinkles are found
]
variable[s] assign[=] list[[]]
for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[polylines]]] - constant[1]], <ast.UnaryOp object at 0x7da18dc985b0>, <ast.UnaryOp object at 0x7da18dc9a0e0>]]] begin[:]
variable[c] assign[=] call[name[polylines]][name[n]]
variable[separated] assign[=] constant[False]
variable[start] assign[=] constant[0]
for taget[name[m]] in starred[call[name[range], parameter[name[mn_group_len], binary_operation[call[name[len], parameter[name[c]]] - constant[1]]]]] begin[:]
if compare[binary_operation[name[m] - name[start]] less[<] name[mn_group_len]] begin[:]
continue
<ast.AugAssign object at 0x7da18dc994b0>
variable[group] assign[=] call[name[c]][<ast.Slice object at 0x7da18dc98220>]
<ast.Tuple object at 0x7da18dc991e0> assign[=] tuple[[<ast.Subscript object at 0x7da18dc9abf0>, <ast.Subscript object at 0x7da18dc9bc70>]]
<ast.Tuple object at 0x7da18dc9aa40> assign[=] call[name[linregress], parameter[name[x], name[y]]]
variable[yfit] assign[=] binary_operation[binary_operation[name[asc] * name[x]] + name[offs]]
variable[p1] assign[=] call[name[c]][name[m]]
variable[l] assign[=] tuple[[<ast.Subscript object at 0x7da18dc987c0>, <ast.Subscript object at 0x7da18dc9bf70>, <ast.Subscript object at 0x7da18dc9aa70>, <ast.BinOp object at 0x7da18dc98ac0>]]
variable[std] assign[=] call[name[np].mean, parameter[<ast.ListComp object at 0x7da18dc9ab90>]]
variable[dist] assign[=] call[name[line].distance, parameter[name[l], name[p1]]]
if <ast.BoolOp object at 0x7da18dc9a740> begin[:]
variable[separated] assign[=] constant[True]
call[name[s].append, parameter[call[name[c]][<ast.Slice object at 0x7da18dc99cc0>]]]
variable[start] assign[=] binary_operation[name[m] - constant[1]]
if name[separated] begin[:]
if compare[binary_operation[call[name[len], parameter[name[c]]] - name[start]] greater_or_equal[>=] constant[2]] begin[:]
call[name[s].append, parameter[call[name[c]][<ast.Slice object at 0x7da18dc98ee0>]]]
call[name[polylines].pop, parameter[name[n]]]
call[name[polylines].extend, parameter[name[s]]]
return[name[polylines]]
|
keyword[def] identifier[separate] ( identifier[polylines] , identifier[f_mx_dist] = literal[int] , identifier[mn_group_len] = literal[int] ):
literal[string]
identifier[s] =[]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[polylines] )- literal[int] ,- literal[int] ,- literal[int] ):
identifier[c] = identifier[polylines] [ identifier[n] ]
identifier[separated] = keyword[False]
identifier[start] = literal[int]
keyword[for] identifier[m] keyword[in] identifier[range] ( identifier[mn_group_len] , identifier[len] ( identifier[c] )- literal[int] ):
keyword[if] identifier[m] - identifier[start] < identifier[mn_group_len] :
keyword[continue]
identifier[m] += literal[int]
identifier[group] = identifier[c] [ identifier[m] - identifier[mn_group_len] : identifier[m] ]
identifier[x] , identifier[y] = identifier[group] [:, literal[int] ], identifier[group] [:, literal[int] ]
identifier[asc] , identifier[offs] , identifier[_] , identifier[_] , identifier[_] = identifier[linregress] ( identifier[x] , identifier[y] )
identifier[yfit] = identifier[asc] * identifier[x] + identifier[offs]
identifier[p1] = identifier[c] [ identifier[m] ]
identifier[l] =( identifier[x] [ literal[int] ], identifier[yfit] [ literal[int] ], identifier[p1] [- literal[int] ], identifier[asc] * identifier[p1] [- literal[int] ]+ identifier[offs] )
identifier[std] = identifier[np] . identifier[mean] ([ identifier[line] . identifier[distance] ( identifier[l] , identifier[g] ) keyword[for] identifier[g] keyword[in] identifier[group] ])
identifier[dist] = identifier[line] . identifier[distance] ( identifier[l] , identifier[p1] )
keyword[if] identifier[dist] > literal[int] keyword[and] identifier[dist] > identifier[f_mx_dist] * identifier[std] :
identifier[separated] = keyword[True]
identifier[s] . identifier[append] ( identifier[c] [ identifier[start] : identifier[m] - literal[int] ])
identifier[start] = identifier[m] - literal[int]
keyword[if] identifier[separated] :
keyword[if] identifier[len] ( identifier[c] )- identifier[start] >= literal[int] :
identifier[s] . identifier[append] ( identifier[c] [ identifier[start] :])
identifier[polylines] . identifier[pop] ( identifier[n] )
identifier[polylines] . identifier[extend] ( identifier[s] )
keyword[return] identifier[polylines]
|
def separate(polylines, f_mx_dist=2, mn_group_len=4):
"""
split polylines wherever crinkles are found
"""
s = []
for n in range(len(polylines) - 1, -1, -1):
c = polylines[n]
separated = False
start = 0
for m in range(mn_group_len, len(c) - 1):
if m - start < mn_group_len:
continue # depends on [control=['if'], data=[]]
m += 1
group = c[m - mn_group_len:m]
(x, y) = (group[:, 0], group[:, 1])
(asc, offs, _, _, _) = linregress(x, y)
yfit = asc * x + offs
# check whether next point would fit in:
p1 = c[m]
l = (x[0], yfit[0], p1[-1], asc * p1[-1] + offs)
std = np.mean([line.distance(l, g) for g in group])
dist = line.distance(l, p1)
if dist > 2 and dist > f_mx_dist * std:
separated = True
s.append(c[start:m - 1])
start = m - 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
if separated:
if len(c) - start >= 2:
s.append(c[start:]) # depends on [control=['if'], data=[]]
polylines.pop(n) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
polylines.extend(s)
return polylines
|
def kwargs_from_client(client, assert_hostname=False):
"""
More or less stolen from docker-py's kwargs_from_env
https://github.com/docker/docker-py/blob/c0ec5512ae7ab90f7fac690064e37181186b1928/docker/utils/utils.py
:type client : docker.Client
"""
from docker import tls
if client.base_url in ('http+docker://localunixsocket', 'http+docker://localhost'):
return {'base_url': 'unix://var/run/docker.sock'}
params = {'base_url': client.base_url}
if client.cert:
# TODO: problem - client.cert is filepaths, and it would be insecure to send those files.
params['tls'] = tls.TLSConfig(
client_cert=client.cert,
ca_cert=client.verify,
verify=bool(client.verify),
assert_hostname=assert_hostname)
return params
|
def function[kwargs_from_client, parameter[client, assert_hostname]]:
constant[
More or less stolen from docker-py's kwargs_from_env
https://github.com/docker/docker-py/blob/c0ec5512ae7ab90f7fac690064e37181186b1928/docker/utils/utils.py
:type client : docker.Client
]
from relative_module[docker] import module[tls]
if compare[name[client].base_url in tuple[[<ast.Constant object at 0x7da18c4cdb10>, <ast.Constant object at 0x7da18c4cf6a0>]]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da18c4ce380>], [<ast.Constant object at 0x7da18c4cf790>]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ccaf0>], [<ast.Attribute object at 0x7da18c4cc040>]]
if name[client].cert begin[:]
call[name[params]][constant[tls]] assign[=] call[name[tls].TLSConfig, parameter[]]
return[name[params]]
|
keyword[def] identifier[kwargs_from_client] ( identifier[client] , identifier[assert_hostname] = keyword[False] ):
literal[string]
keyword[from] identifier[docker] keyword[import] identifier[tls]
keyword[if] identifier[client] . identifier[base_url] keyword[in] ( literal[string] , literal[string] ):
keyword[return] { literal[string] : literal[string] }
identifier[params] ={ literal[string] : identifier[client] . identifier[base_url] }
keyword[if] identifier[client] . identifier[cert] :
identifier[params] [ literal[string] ]= identifier[tls] . identifier[TLSConfig] (
identifier[client_cert] = identifier[client] . identifier[cert] ,
identifier[ca_cert] = identifier[client] . identifier[verify] ,
identifier[verify] = identifier[bool] ( identifier[client] . identifier[verify] ),
identifier[assert_hostname] = identifier[assert_hostname] )
keyword[return] identifier[params]
|
def kwargs_from_client(client, assert_hostname=False):
"""
More or less stolen from docker-py's kwargs_from_env
https://github.com/docker/docker-py/blob/c0ec5512ae7ab90f7fac690064e37181186b1928/docker/utils/utils.py
:type client : docker.Client
"""
from docker import tls
if client.base_url in ('http+docker://localunixsocket', 'http+docker://localhost'):
return {'base_url': 'unix://var/run/docker.sock'} # depends on [control=['if'], data=[]]
params = {'base_url': client.base_url}
if client.cert:
# TODO: problem - client.cert is filepaths, and it would be insecure to send those files.
params['tls'] = tls.TLSConfig(client_cert=client.cert, ca_cert=client.verify, verify=bool(client.verify), assert_hostname=assert_hostname) # depends on [control=['if'], data=[]]
return params
|
def describeSObjects(self, sObjectTypes):
'''
An array-based version of describeSObject; describes metadata (field list
and object properties) for the specified object or array of objects.
'''
self._setHeaders('describeSObjects')
return self._handleResultTyping(self._sforce.service.describeSObjects(sObjectTypes))
|
def function[describeSObjects, parameter[self, sObjectTypes]]:
constant[
An array-based version of describeSObject; describes metadata (field list
and object properties) for the specified object or array of objects.
]
call[name[self]._setHeaders, parameter[constant[describeSObjects]]]
return[call[name[self]._handleResultTyping, parameter[call[name[self]._sforce.service.describeSObjects, parameter[name[sObjectTypes]]]]]]
|
keyword[def] identifier[describeSObjects] ( identifier[self] , identifier[sObjectTypes] ):
literal[string]
identifier[self] . identifier[_setHeaders] ( literal[string] )
keyword[return] identifier[self] . identifier[_handleResultTyping] ( identifier[self] . identifier[_sforce] . identifier[service] . identifier[describeSObjects] ( identifier[sObjectTypes] ))
|
def describeSObjects(self, sObjectTypes):
"""
An array-based version of describeSObject; describes metadata (field list
and object properties) for the specified object or array of objects.
"""
self._setHeaders('describeSObjects')
return self._handleResultTyping(self._sforce.service.describeSObjects(sObjectTypes))
|
def mprocess(name, config_path, port=None, timeout=180, silence_stdout=True):
"""start 'name' process with params from config_path.
Args:
name - process name or path
config_path - path to file where should be stored configuration
port - process's port
timeout - specify how long, in seconds, a command can take before times out.
if timeout <=0 - doesn't wait for complete start process
silence_stdout - if True (default), redirect stdout to /dev/null
return tuple (Popen object, host) if process started, return (None, None) if not
"""
logger.debug(
"mprocess(name={name!r}, config_path={config_path!r}, port={port!r}, "
"timeout={timeout!r})".format(**locals()))
if not (config_path and isinstance(config_path, str) and os.path.exists(config_path)):
raise OSError("can't find config file {config_path}".format(**locals()))
cfg = read_config(config_path)
cmd = [name, "--config", config_path]
if cfg.get('port', None) is None or port:
port = port or PortPool().port(check=True)
cmd.extend(['--port', str(port)])
host = "{host}:{port}".format(host=_host(), port=port)
try:
logger.debug("execute process: %s", ' '.join(cmd))
proc = subprocess.Popen(
cmd,
stdout=DEVNULL if silence_stdout else None,
stderr=subprocess.STDOUT)
if proc.poll() is not None:
logger.debug("process is not alive")
raise OSError("Process started, but died immediately.")
except (OSError, TypeError) as err:
message = "exception while executing process: {err}".format(err=err)
logger.debug(message)
raise OSError(message)
if timeout > 0 and wait_for(port, timeout):
logger.debug("process '{name}' has started: pid={proc.pid}, host={host}".format(**locals()))
return (proc, host)
elif timeout > 0:
logger.debug("hasn't connected to pid={proc.pid} with host={host} during timeout {timeout} ".format(**locals()))
logger.debug("terminate process with pid={proc.pid}".format(**locals()))
kill_mprocess(proc)
proc_alive(proc) and time.sleep(3) # wait while process stoped
message = ("Could not connect to process during "
"{timeout} seconds".format(timeout=timeout))
raise TimeoutError(message, errno.ETIMEDOUT)
return (proc, host)
|
def function[mprocess, parameter[name, config_path, port, timeout, silence_stdout]]:
constant[start 'name' process with params from config_path.
Args:
name - process name or path
config_path - path to file where should be stored configuration
port - process's port
timeout - specify how long, in seconds, a command can take before times out.
if timeout <=0 - doesn't wait for complete start process
silence_stdout - if True (default), redirect stdout to /dev/null
return tuple (Popen object, host) if process started, return (None, None) if not
]
call[name[logger].debug, parameter[call[constant[mprocess(name={name!r}, config_path={config_path!r}, port={port!r}, timeout={timeout!r})].format, parameter[]]]]
if <ast.UnaryOp object at 0x7da18c4cf430> begin[:]
<ast.Raise object at 0x7da18c4cc4c0>
variable[cfg] assign[=] call[name[read_config], parameter[name[config_path]]]
variable[cmd] assign[=] list[[<ast.Name object at 0x7da18c4cf1c0>, <ast.Constant object at 0x7da18c4cdfc0>, <ast.Name object at 0x7da18c4ccc70>]]
if <ast.BoolOp object at 0x7da18c4cffa0> begin[:]
variable[port] assign[=] <ast.BoolOp object at 0x7da18c4cc9a0>
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da18c4cdb10>, <ast.Call object at 0x7da18c4cebc0>]]]]
variable[host] assign[=] call[constant[{host}:{port}].format, parameter[]]
<ast.Try object at 0x7da1b1b62a10>
if <ast.BoolOp object at 0x7da20c6e5870> begin[:]
call[name[logger].debug, parameter[call[constant[process '{name}' has started: pid={proc.pid}, host={host}].format, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da20c6e52a0>, <ast.Name object at 0x7da20c6e59c0>]]]
return[tuple[[<ast.Name object at 0x7da2041dbf40>, <ast.Name object at 0x7da2041da140>]]]
|
keyword[def] identifier[mprocess] ( identifier[name] , identifier[config_path] , identifier[port] = keyword[None] , identifier[timeout] = literal[int] , identifier[silence_stdout] = keyword[True] ):
literal[string]
identifier[logger] . identifier[debug] (
literal[string]
literal[string] . identifier[format] (** identifier[locals] ()))
keyword[if] keyword[not] ( identifier[config_path] keyword[and] identifier[isinstance] ( identifier[config_path] , identifier[str] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[config_path] )):
keyword[raise] identifier[OSError] ( literal[string] . identifier[format] (** identifier[locals] ()))
identifier[cfg] = identifier[read_config] ( identifier[config_path] )
identifier[cmd] =[ identifier[name] , literal[string] , identifier[config_path] ]
keyword[if] identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[None] keyword[or] identifier[port] :
identifier[port] = identifier[port] keyword[or] identifier[PortPool] (). identifier[port] ( identifier[check] = keyword[True] )
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[port] )])
identifier[host] = literal[string] . identifier[format] ( identifier[host] = identifier[_host] (), identifier[port] = identifier[port] )
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[cmd] ))
identifier[proc] = identifier[subprocess] . identifier[Popen] (
identifier[cmd] ,
identifier[stdout] = identifier[DEVNULL] keyword[if] identifier[silence_stdout] keyword[else] keyword[None] ,
identifier[stderr] = identifier[subprocess] . identifier[STDOUT] )
keyword[if] identifier[proc] . identifier[poll] () keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[raise] identifier[OSError] ( literal[string] )
keyword[except] ( identifier[OSError] , identifier[TypeError] ) keyword[as] identifier[err] :
identifier[message] = literal[string] . identifier[format] ( identifier[err] = identifier[err] )
identifier[logger] . identifier[debug] ( identifier[message] )
keyword[raise] identifier[OSError] ( identifier[message] )
keyword[if] identifier[timeout] > literal[int] keyword[and] identifier[wait_for] ( identifier[port] , identifier[timeout] ):
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (** identifier[locals] ()))
keyword[return] ( identifier[proc] , identifier[host] )
keyword[elif] identifier[timeout] > literal[int] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (** identifier[locals] ()))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (** identifier[locals] ()))
identifier[kill_mprocess] ( identifier[proc] )
identifier[proc_alive] ( identifier[proc] ) keyword[and] identifier[time] . identifier[sleep] ( literal[int] )
identifier[message] =( literal[string]
literal[string] . identifier[format] ( identifier[timeout] = identifier[timeout] ))
keyword[raise] identifier[TimeoutError] ( identifier[message] , identifier[errno] . identifier[ETIMEDOUT] )
keyword[return] ( identifier[proc] , identifier[host] )
|
def mprocess(name, config_path, port=None, timeout=180, silence_stdout=True):
"""start 'name' process with params from config_path.
Args:
name - process name or path
config_path - path to file where should be stored configuration
port - process's port
timeout - specify how long, in seconds, a command can take before times out.
if timeout <=0 - doesn't wait for complete start process
silence_stdout - if True (default), redirect stdout to /dev/null
return tuple (Popen object, host) if process started, return (None, None) if not
"""
logger.debug('mprocess(name={name!r}, config_path={config_path!r}, port={port!r}, timeout={timeout!r})'.format(**locals()))
if not (config_path and isinstance(config_path, str) and os.path.exists(config_path)):
raise OSError("can't find config file {config_path}".format(**locals())) # depends on [control=['if'], data=[]]
cfg = read_config(config_path)
cmd = [name, '--config', config_path]
if cfg.get('port', None) is None or port:
port = port or PortPool().port(check=True)
cmd.extend(['--port', str(port)]) # depends on [control=['if'], data=[]]
host = '{host}:{port}'.format(host=_host(), port=port)
try:
logger.debug('execute process: %s', ' '.join(cmd))
proc = subprocess.Popen(cmd, stdout=DEVNULL if silence_stdout else None, stderr=subprocess.STDOUT)
if proc.poll() is not None:
logger.debug('process is not alive')
raise OSError('Process started, but died immediately.') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (OSError, TypeError) as err:
message = 'exception while executing process: {err}'.format(err=err)
logger.debug(message)
raise OSError(message) # depends on [control=['except'], data=['err']]
if timeout > 0 and wait_for(port, timeout):
logger.debug("process '{name}' has started: pid={proc.pid}, host={host}".format(**locals()))
return (proc, host) # depends on [control=['if'], data=[]]
elif timeout > 0:
logger.debug("hasn't connected to pid={proc.pid} with host={host} during timeout {timeout} ".format(**locals()))
logger.debug('terminate process with pid={proc.pid}'.format(**locals()))
kill_mprocess(proc)
proc_alive(proc) and time.sleep(3) # wait while process stoped
message = 'Could not connect to process during {timeout} seconds'.format(timeout=timeout)
raise TimeoutError(message, errno.ETIMEDOUT) # depends on [control=['if'], data=['timeout']]
return (proc, host)
|
def add_environment(self, environment, sync=True):
"""
add an environment to this OS instance.
:param environment: the environment to add on this OS instance
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the environment object on list to be added on next save().
:return:
"""
LOGGER.debug("OSInstance.add_environment")
if not sync:
self.environment_2_add.append(environment)
else:
if environment.id is None:
environment.save()
if self.id is not None and environment.id is not None:
params = {
'id': self.id,
'environmentID': environment.id
}
args = {'http_operation': 'GET', 'operation_path': 'update/environments/add', 'parameters': params}
response = OSInstanceService.requester.call(args)
if response.rc != 0:
LOGGER.warning(
'OSInstance.add_environment - Problem while updating OS instance ' + self.name +
'. Reason: ' + str(response.response_content) + '-' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
else:
self.environment_ids.append(environment.id)
environment.osi_ids.append(self.id)
else:
LOGGER.warning(
'OSInstance.add_environment - Problem while updating OS instance ' +
self.name + '. Reason: application ' + environment.name + ' id is None'
)
|
def function[add_environment, parameter[self, environment, sync]]:
constant[
add an environment to this OS instance.
:param environment: the environment to add on this OS instance
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the environment object on list to be added on next save().
:return:
]
call[name[LOGGER].debug, parameter[constant[OSInstance.add_environment]]]
if <ast.UnaryOp object at 0x7da18eb563b0> begin[:]
call[name[self].environment_2_add.append, parameter[name[environment]]]
|
keyword[def] identifier[add_environment] ( identifier[self] , identifier[environment] , identifier[sync] = keyword[True] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] keyword[not] identifier[sync] :
identifier[self] . identifier[environment_2_add] . identifier[append] ( identifier[environment] )
keyword[else] :
keyword[if] identifier[environment] . identifier[id] keyword[is] keyword[None] :
identifier[environment] . identifier[save] ()
keyword[if] identifier[self] . identifier[id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[environment] . identifier[id] keyword[is] keyword[not] keyword[None] :
identifier[params] ={
literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[environment] . identifier[id]
}
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[OSInstanceService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[response] . identifier[rc] != literal[int] :
identifier[LOGGER] . identifier[warning] (
literal[string] + identifier[self] . identifier[name] +
literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+
literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
)
keyword[else] :
identifier[self] . identifier[environment_ids] . identifier[append] ( identifier[environment] . identifier[id] )
identifier[environment] . identifier[osi_ids] . identifier[append] ( identifier[self] . identifier[id] )
keyword[else] :
identifier[LOGGER] . identifier[warning] (
literal[string] +
identifier[self] . identifier[name] + literal[string] + identifier[environment] . identifier[name] + literal[string]
)
|
def add_environment(self, environment, sync=True):
"""
add an environment to this OS instance.
:param environment: the environment to add on this OS instance
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the environment object on list to be added on next save().
:return:
"""
LOGGER.debug('OSInstance.add_environment')
if not sync:
self.environment_2_add.append(environment) # depends on [control=['if'], data=[]]
else:
if environment.id is None:
environment.save() # depends on [control=['if'], data=[]]
if self.id is not None and environment.id is not None:
params = {'id': self.id, 'environmentID': environment.id}
args = {'http_operation': 'GET', 'operation_path': 'update/environments/add', 'parameters': params}
response = OSInstanceService.requester.call(args)
if response.rc != 0:
LOGGER.warning('OSInstance.add_environment - Problem while updating OS instance ' + self.name + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')') # depends on [control=['if'], data=[]]
else:
self.environment_ids.append(environment.id)
environment.osi_ids.append(self.id) # depends on [control=['if'], data=[]]
else:
LOGGER.warning('OSInstance.add_environment - Problem while updating OS instance ' + self.name + '. Reason: application ' + environment.name + ' id is None')
|
def not_empty(message=None) -> Filter_T:
"""
Validate any object to ensure it's not empty (is None or has no elements).
"""
def validate(value):
if value is None:
_raise_failure(message)
if hasattr(value, '__len__') and value.__len__() == 0:
_raise_failure(message)
return value
return validate
|
def function[not_empty, parameter[message]]:
constant[
Validate any object to ensure it's not empty (is None or has no elements).
]
def function[validate, parameter[value]]:
if compare[name[value] is constant[None]] begin[:]
call[name[_raise_failure], parameter[name[message]]]
if <ast.BoolOp object at 0x7da20c992a10> begin[:]
call[name[_raise_failure], parameter[name[message]]]
return[name[value]]
return[name[validate]]
|
keyword[def] identifier[not_empty] ( identifier[message] = keyword[None] )-> identifier[Filter_T] :
literal[string]
keyword[def] identifier[validate] ( identifier[value] ):
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[_raise_failure] ( identifier[message] )
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ) keyword[and] identifier[value] . identifier[__len__] ()== literal[int] :
identifier[_raise_failure] ( identifier[message] )
keyword[return] identifier[value]
keyword[return] identifier[validate]
|
def not_empty(message=None) -> Filter_T:
"""
Validate any object to ensure it's not empty (is None or has no elements).
"""
def validate(value):
if value is None:
_raise_failure(message) # depends on [control=['if'], data=[]]
if hasattr(value, '__len__') and value.__len__() == 0:
_raise_failure(message) # depends on [control=['if'], data=[]]
return value
return validate
|
def admin_cmd(argv=sys.argv[1:]): # pragma: no cover
"""\
Activate or delete models.
Models are usually made active right after fitting (see command
pld-fit). The 'activate' command allows you to explicitly set the
currently active model. Use 'pld-list' to get an overview of all
available models along with their version identifiers.
Deleting a model will simply remove it from the database.
Usage:
pld-admin activate <version> [options]
pld-admin delete <version> [options]
Options:
-h --help Show this screen.
"""
arguments = docopt(admin_cmd.__doc__, argv=argv)
initialize_config(__mode__='fit')
if arguments['activate']:
activate(model_version=int(arguments['<version>']))
elif arguments['delete']:
delete(model_version=int(arguments['<version>']))
|
def function[admin_cmd, parameter[argv]]:
constant[Activate or delete models.
Models are usually made active right after fitting (see command
pld-fit). The 'activate' command allows you to explicitly set the
currently active model. Use 'pld-list' to get an overview of all
available models along with their version identifiers.
Deleting a model will simply remove it from the database.
Usage:
pld-admin activate <version> [options]
pld-admin delete <version> [options]
Options:
-h --help Show this screen.
]
variable[arguments] assign[=] call[name[docopt], parameter[name[admin_cmd].__doc__]]
call[name[initialize_config], parameter[]]
if call[name[arguments]][constant[activate]] begin[:]
call[name[activate], parameter[]]
|
keyword[def] identifier[admin_cmd] ( identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :]):
literal[string]
identifier[arguments] = identifier[docopt] ( identifier[admin_cmd] . identifier[__doc__] , identifier[argv] = identifier[argv] )
identifier[initialize_config] ( identifier[__mode__] = literal[string] )
keyword[if] identifier[arguments] [ literal[string] ]:
identifier[activate] ( identifier[model_version] = identifier[int] ( identifier[arguments] [ literal[string] ]))
keyword[elif] identifier[arguments] [ literal[string] ]:
identifier[delete] ( identifier[model_version] = identifier[int] ( identifier[arguments] [ literal[string] ]))
|
def admin_cmd(argv=sys.argv[1:]): # pragma: no cover
"Activate or delete models.\n\nModels are usually made active right after fitting (see command\npld-fit). The 'activate' command allows you to explicitly set the\ncurrently active model. Use 'pld-list' to get an overview of all\navailable models along with their version identifiers.\n\nDeleting a model will simply remove it from the database.\n\nUsage:\n pld-admin activate <version> [options]\n pld-admin delete <version> [options]\n\nOptions:\n -h --help Show this screen.\n"
arguments = docopt(admin_cmd.__doc__, argv=argv)
initialize_config(__mode__='fit')
if arguments['activate']:
activate(model_version=int(arguments['<version>'])) # depends on [control=['if'], data=[]]
elif arguments['delete']:
delete(model_version=int(arguments['<version>'])) # depends on [control=['if'], data=[]]
|
def get_paths(path_tokens):
""" Given a list of parser path tokens, return a list of path objects
for them.
"""
if len(path_tokens) == 0:
return []
token = path_tokens.pop()
path = PathToken(token.alias, token.path)
return [path] + get_paths(path_tokens)
|
def function[get_paths, parameter[path_tokens]]:
constant[ Given a list of parser path tokens, return a list of path objects
for them.
]
if compare[call[name[len], parameter[name[path_tokens]]] equal[==] constant[0]] begin[:]
return[list[[]]]
variable[token] assign[=] call[name[path_tokens].pop, parameter[]]
variable[path] assign[=] call[name[PathToken], parameter[name[token].alias, name[token].path]]
return[binary_operation[list[[<ast.Name object at 0x7da1b1ec8730>]] + call[name[get_paths], parameter[name[path_tokens]]]]]
|
keyword[def] identifier[get_paths] ( identifier[path_tokens] ):
literal[string]
keyword[if] identifier[len] ( identifier[path_tokens] )== literal[int] :
keyword[return] []
identifier[token] = identifier[path_tokens] . identifier[pop] ()
identifier[path] = identifier[PathToken] ( identifier[token] . identifier[alias] , identifier[token] . identifier[path] )
keyword[return] [ identifier[path] ]+ identifier[get_paths] ( identifier[path_tokens] )
|
def get_paths(path_tokens):
""" Given a list of parser path tokens, return a list of path objects
for them.
"""
if len(path_tokens) == 0:
return [] # depends on [control=['if'], data=[]]
token = path_tokens.pop()
path = PathToken(token.alias, token.path)
return [path] + get_paths(path_tokens)
|
def create_api_pool_deploy(self):
"""Get an instance of Api Pool Deploy services facade."""
return ApiPoolDeploy(
self.networkapi_url,
self.user,
self.password,
self.user_ldap)
|
def function[create_api_pool_deploy, parameter[self]]:
constant[Get an instance of Api Pool Deploy services facade.]
return[call[name[ApiPoolDeploy], parameter[name[self].networkapi_url, name[self].user, name[self].password, name[self].user_ldap]]]
|
keyword[def] identifier[create_api_pool_deploy] ( identifier[self] ):
literal[string]
keyword[return] identifier[ApiPoolDeploy] (
identifier[self] . identifier[networkapi_url] ,
identifier[self] . identifier[user] ,
identifier[self] . identifier[password] ,
identifier[self] . identifier[user_ldap] )
|
def create_api_pool_deploy(self):
"""Get an instance of Api Pool Deploy services facade."""
return ApiPoolDeploy(self.networkapi_url, self.user, self.password, self.user_ldap)
|
def comments(self, *bug_ids):
"""Get the comments of the given bugs.
:param bug_ids: list of bug identifiers
"""
# Hack. The first value must be a valid bug id
resource = urijoin(self.RBUG, bug_ids[0], self.RCOMMENT)
params = {
self.PIDS: bug_ids
}
response = self.call(resource, params)
return response
|
def function[comments, parameter[self]]:
constant[Get the comments of the given bugs.
:param bug_ids: list of bug identifiers
]
variable[resource] assign[=] call[name[urijoin], parameter[name[self].RBUG, call[name[bug_ids]][constant[0]], name[self].RCOMMENT]]
variable[params] assign[=] dictionary[[<ast.Attribute object at 0x7da1b020f6a0>], [<ast.Name object at 0x7da1b020c220>]]
variable[response] assign[=] call[name[self].call, parameter[name[resource], name[params]]]
return[name[response]]
|
keyword[def] identifier[comments] ( identifier[self] ,* identifier[bug_ids] ):
literal[string]
identifier[resource] = identifier[urijoin] ( identifier[self] . identifier[RBUG] , identifier[bug_ids] [ literal[int] ], identifier[self] . identifier[RCOMMENT] )
identifier[params] ={
identifier[self] . identifier[PIDS] : identifier[bug_ids]
}
identifier[response] = identifier[self] . identifier[call] ( identifier[resource] , identifier[params] )
keyword[return] identifier[response]
|
def comments(self, *bug_ids):
"""Get the comments of the given bugs.
:param bug_ids: list of bug identifiers
"""
# Hack. The first value must be a valid bug id
resource = urijoin(self.RBUG, bug_ids[0], self.RCOMMENT)
params = {self.PIDS: bug_ids}
response = self.call(resource, params)
return response
|
def _backward_slice_indirect(self, cfgnode, sim_successors, current_function_addr):
"""
Try to resolve an indirect jump by slicing backwards
"""
# TODO: make this a real indirect jump resolver under the new paradigm
irsb = sim_successors.artifacts['irsb'] # shorthand
l.debug("Resolving indirect jump at IRSB %s", irsb)
# Let's slice backwards from the end of this exit
next_tmp = irsb.next.tmp
stmt_id = [i for i, s in enumerate(irsb.statements)
if isinstance(s, pyvex.IRStmt.WrTmp) and s.tmp == next_tmp][0]
cdg = self.project.analyses.CDG(cfg=self, fail_fast=self._fail_fast)
ddg = self.project.analyses.DDG(cfg=self, start=current_function_addr, call_depth=0, fail_fast=self._fail_fast)
bc = self.project.analyses.BackwardSlice(self,
cdg,
ddg,
targets=[(cfgnode, stmt_id)],
same_function=True,
fail_fast=self._fail_fast)
taint_graph = bc.taint_graph
# Find the correct taint
next_nodes = [cl for cl in taint_graph.nodes() if cl.block_addr == sim_successors.addr]
if not next_nodes:
l.error('The target exit is not included in the slice. Something is wrong')
return []
next_node = next_nodes[0]
# Get the weakly-connected subgraph that contains `next_node`
all_subgraphs = networkx.weakly_connected_component_subgraphs(taint_graph)
starts = set()
for subgraph in all_subgraphs:
if next_node in subgraph:
# Make sure there is no symbolic read...
# FIXME: This is an over-approximation. We should try to limit the starts more
nodes = [n for n in subgraph.nodes() if subgraph.in_degree(n) == 0]
for n in nodes:
starts.add(n.block_addr)
# Execute the slice
successing_addresses = set()
annotated_cfg = bc.annotated_cfg()
for start in starts:
l.debug('Start symbolic execution at 0x%x on program slice.', start)
# Get the state from our CFG
node = self.get_any_node(start)
if node is None:
# Well, we have to live with an empty state
base_state = self.project.factory.blank_state(addr=start)
else:
base_state = node.input_state.copy()
base_state.set_mode('symbolic')
base_state.ip = start
# Clear all initial taints (register values, memory values, etc.)
initial_nodes = [n for n in bc.taint_graph.nodes() if bc.taint_graph.in_degree(n) == 0]
for cl in initial_nodes:
# Iterate in all actions of this node, and pick corresponding actions
cfg_nodes = self.get_all_nodes(cl.block_addr)
for n in cfg_nodes:
if not n.final_states:
continue
actions = [ac for ac in n.final_states[0].history.recent_actions
# Normally it's enough to only use the first final state
if ac.bbl_addr == cl.block_addr and
ac.stmt_idx == cl.stmt_idx
]
for ac in actions:
if not hasattr(ac, 'action'):
continue
if ac.action == 'read':
if ac.type == 'mem':
unconstrained_value = base_state.solver.Unconstrained('unconstrained',
ac.size.ast * 8)
base_state.memory.store(ac.addr,
unconstrained_value,
endness=self.project.arch.memory_endness)
elif ac.type == 'reg':
unconstrained_value = base_state.solver.Unconstrained('unconstrained',
ac.size.ast * 8)
base_state.registers.store(ac.offset,
unconstrained_value,
endness=self.project.arch.register_endness)
# Clear the constraints!
base_state.release_plugin('solver')
# For speed concerns, we are limiting the timeout for z3 solver to 5 seconds
base_state.solver._solver.timeout = 5000
sc = self.project.factory.simulation_manager(base_state)
sc.use_technique(Slicecutor(annotated_cfg))
sc.use_technique(LoopSeer(bound=1))
sc.run()
if sc.cut or sc.deadended:
all_deadended_states = sc.cut + sc.deadended
for s in all_deadended_states:
if s.addr == sim_successors.addr:
# We want to get its successors
succs = s.step()
for succ in succs.flat_successors:
successing_addresses.add(succ.addr)
else:
l.debug("Cannot determine the exit. You need some better ways to recover the exits :-(")
l.debug('Resolution is done, and we have %d new successors.', len(successing_addresses))
return list(successing_addresses)
|
def function[_backward_slice_indirect, parameter[self, cfgnode, sim_successors, current_function_addr]]:
constant[
Try to resolve an indirect jump by slicing backwards
]
variable[irsb] assign[=] call[name[sim_successors].artifacts][constant[irsb]]
call[name[l].debug, parameter[constant[Resolving indirect jump at IRSB %s], name[irsb]]]
variable[next_tmp] assign[=] name[irsb].next.tmp
variable[stmt_id] assign[=] call[<ast.ListComp object at 0x7da1b1c300d0>][constant[0]]
variable[cdg] assign[=] call[name[self].project.analyses.CDG, parameter[]]
variable[ddg] assign[=] call[name[self].project.analyses.DDG, parameter[]]
variable[bc] assign[=] call[name[self].project.analyses.BackwardSlice, parameter[name[self], name[cdg], name[ddg]]]
variable[taint_graph] assign[=] name[bc].taint_graph
variable[next_nodes] assign[=] <ast.ListComp object at 0x7da18eb57940>
if <ast.UnaryOp object at 0x7da18eb56740> begin[:]
call[name[l].error, parameter[constant[The target exit is not included in the slice. Something is wrong]]]
return[list[[]]]
variable[next_node] assign[=] call[name[next_nodes]][constant[0]]
variable[all_subgraphs] assign[=] call[name[networkx].weakly_connected_component_subgraphs, parameter[name[taint_graph]]]
variable[starts] assign[=] call[name[set], parameter[]]
for taget[name[subgraph]] in starred[name[all_subgraphs]] begin[:]
if compare[name[next_node] in name[subgraph]] begin[:]
variable[nodes] assign[=] <ast.ListComp object at 0x7da18eb56e60>
for taget[name[n]] in starred[name[nodes]] begin[:]
call[name[starts].add, parameter[name[n].block_addr]]
variable[successing_addresses] assign[=] call[name[set], parameter[]]
variable[annotated_cfg] assign[=] call[name[bc].annotated_cfg, parameter[]]
for taget[name[start]] in starred[name[starts]] begin[:]
call[name[l].debug, parameter[constant[Start symbolic execution at 0x%x on program slice.], name[start]]]
variable[node] assign[=] call[name[self].get_any_node, parameter[name[start]]]
if compare[name[node] is constant[None]] begin[:]
variable[base_state] assign[=] call[name[self].project.factory.blank_state, parameter[]]
name[base_state].solver._solver.timeout assign[=] constant[5000]
variable[sc] assign[=] call[name[self].project.factory.simulation_manager, parameter[name[base_state]]]
call[name[sc].use_technique, parameter[call[name[Slicecutor], parameter[name[annotated_cfg]]]]]
call[name[sc].use_technique, parameter[call[name[LoopSeer], parameter[]]]]
call[name[sc].run, parameter[]]
if <ast.BoolOp object at 0x7da18eb56770> begin[:]
variable[all_deadended_states] assign[=] binary_operation[name[sc].cut + name[sc].deadended]
for taget[name[s]] in starred[name[all_deadended_states]] begin[:]
if compare[name[s].addr equal[==] name[sim_successors].addr] begin[:]
variable[succs] assign[=] call[name[s].step, parameter[]]
for taget[name[succ]] in starred[name[succs].flat_successors] begin[:]
call[name[successing_addresses].add, parameter[name[succ].addr]]
call[name[l].debug, parameter[constant[Resolution is done, and we have %d new successors.], call[name[len], parameter[name[successing_addresses]]]]]
return[call[name[list], parameter[name[successing_addresses]]]]
|
keyword[def] identifier[_backward_slice_indirect] ( identifier[self] , identifier[cfgnode] , identifier[sim_successors] , identifier[current_function_addr] ):
literal[string]
identifier[irsb] = identifier[sim_successors] . identifier[artifacts] [ literal[string] ]
identifier[l] . identifier[debug] ( literal[string] , identifier[irsb] )
identifier[next_tmp] = identifier[irsb] . identifier[next] . identifier[tmp]
identifier[stmt_id] =[ identifier[i] keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[irsb] . identifier[statements] )
keyword[if] identifier[isinstance] ( identifier[s] , identifier[pyvex] . identifier[IRStmt] . identifier[WrTmp] ) keyword[and] identifier[s] . identifier[tmp] == identifier[next_tmp] ][ literal[int] ]
identifier[cdg] = identifier[self] . identifier[project] . identifier[analyses] . identifier[CDG] ( identifier[cfg] = identifier[self] , identifier[fail_fast] = identifier[self] . identifier[_fail_fast] )
identifier[ddg] = identifier[self] . identifier[project] . identifier[analyses] . identifier[DDG] ( identifier[cfg] = identifier[self] , identifier[start] = identifier[current_function_addr] , identifier[call_depth] = literal[int] , identifier[fail_fast] = identifier[self] . identifier[_fail_fast] )
identifier[bc] = identifier[self] . identifier[project] . identifier[analyses] . identifier[BackwardSlice] ( identifier[self] ,
identifier[cdg] ,
identifier[ddg] ,
identifier[targets] =[( identifier[cfgnode] , identifier[stmt_id] )],
identifier[same_function] = keyword[True] ,
identifier[fail_fast] = identifier[self] . identifier[_fail_fast] )
identifier[taint_graph] = identifier[bc] . identifier[taint_graph]
identifier[next_nodes] =[ identifier[cl] keyword[for] identifier[cl] keyword[in] identifier[taint_graph] . identifier[nodes] () keyword[if] identifier[cl] . identifier[block_addr] == identifier[sim_successors] . identifier[addr] ]
keyword[if] keyword[not] identifier[next_nodes] :
identifier[l] . identifier[error] ( literal[string] )
keyword[return] []
identifier[next_node] = identifier[next_nodes] [ literal[int] ]
identifier[all_subgraphs] = identifier[networkx] . identifier[weakly_connected_component_subgraphs] ( identifier[taint_graph] )
identifier[starts] = identifier[set] ()
keyword[for] identifier[subgraph] keyword[in] identifier[all_subgraphs] :
keyword[if] identifier[next_node] keyword[in] identifier[subgraph] :
identifier[nodes] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[subgraph] . identifier[nodes] () keyword[if] identifier[subgraph] . identifier[in_degree] ( identifier[n] )== literal[int] ]
keyword[for] identifier[n] keyword[in] identifier[nodes] :
identifier[starts] . identifier[add] ( identifier[n] . identifier[block_addr] )
identifier[successing_addresses] = identifier[set] ()
identifier[annotated_cfg] = identifier[bc] . identifier[annotated_cfg] ()
keyword[for] identifier[start] keyword[in] identifier[starts] :
identifier[l] . identifier[debug] ( literal[string] , identifier[start] )
identifier[node] = identifier[self] . identifier[get_any_node] ( identifier[start] )
keyword[if] identifier[node] keyword[is] keyword[None] :
identifier[base_state] = identifier[self] . identifier[project] . identifier[factory] . identifier[blank_state] ( identifier[addr] = identifier[start] )
keyword[else] :
identifier[base_state] = identifier[node] . identifier[input_state] . identifier[copy] ()
identifier[base_state] . identifier[set_mode] ( literal[string] )
identifier[base_state] . identifier[ip] = identifier[start]
identifier[initial_nodes] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[bc] . identifier[taint_graph] . identifier[nodes] () keyword[if] identifier[bc] . identifier[taint_graph] . identifier[in_degree] ( identifier[n] )== literal[int] ]
keyword[for] identifier[cl] keyword[in] identifier[initial_nodes] :
identifier[cfg_nodes] = identifier[self] . identifier[get_all_nodes] ( identifier[cl] . identifier[block_addr] )
keyword[for] identifier[n] keyword[in] identifier[cfg_nodes] :
keyword[if] keyword[not] identifier[n] . identifier[final_states] :
keyword[continue]
identifier[actions] =[ identifier[ac] keyword[for] identifier[ac] keyword[in] identifier[n] . identifier[final_states] [ literal[int] ]. identifier[history] . identifier[recent_actions]
keyword[if] identifier[ac] . identifier[bbl_addr] == identifier[cl] . identifier[block_addr] keyword[and]
identifier[ac] . identifier[stmt_idx] == identifier[cl] . identifier[stmt_idx]
]
keyword[for] identifier[ac] keyword[in] identifier[actions] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[ac] , literal[string] ):
keyword[continue]
keyword[if] identifier[ac] . identifier[action] == literal[string] :
keyword[if] identifier[ac] . identifier[type] == literal[string] :
identifier[unconstrained_value] = identifier[base_state] . identifier[solver] . identifier[Unconstrained] ( literal[string] ,
identifier[ac] . identifier[size] . identifier[ast] * literal[int] )
identifier[base_state] . identifier[memory] . identifier[store] ( identifier[ac] . identifier[addr] ,
identifier[unconstrained_value] ,
identifier[endness] = identifier[self] . identifier[project] . identifier[arch] . identifier[memory_endness] )
keyword[elif] identifier[ac] . identifier[type] == literal[string] :
identifier[unconstrained_value] = identifier[base_state] . identifier[solver] . identifier[Unconstrained] ( literal[string] ,
identifier[ac] . identifier[size] . identifier[ast] * literal[int] )
identifier[base_state] . identifier[registers] . identifier[store] ( identifier[ac] . identifier[offset] ,
identifier[unconstrained_value] ,
identifier[endness] = identifier[self] . identifier[project] . identifier[arch] . identifier[register_endness] )
identifier[base_state] . identifier[release_plugin] ( literal[string] )
identifier[base_state] . identifier[solver] . identifier[_solver] . identifier[timeout] = literal[int]
identifier[sc] = identifier[self] . identifier[project] . identifier[factory] . identifier[simulation_manager] ( identifier[base_state] )
identifier[sc] . identifier[use_technique] ( identifier[Slicecutor] ( identifier[annotated_cfg] ))
identifier[sc] . identifier[use_technique] ( identifier[LoopSeer] ( identifier[bound] = literal[int] ))
identifier[sc] . identifier[run] ()
keyword[if] identifier[sc] . identifier[cut] keyword[or] identifier[sc] . identifier[deadended] :
identifier[all_deadended_states] = identifier[sc] . identifier[cut] + identifier[sc] . identifier[deadended]
keyword[for] identifier[s] keyword[in] identifier[all_deadended_states] :
keyword[if] identifier[s] . identifier[addr] == identifier[sim_successors] . identifier[addr] :
identifier[succs] = identifier[s] . identifier[step] ()
keyword[for] identifier[succ] keyword[in] identifier[succs] . identifier[flat_successors] :
identifier[successing_addresses] . identifier[add] ( identifier[succ] . identifier[addr] )
keyword[else] :
identifier[l] . identifier[debug] ( literal[string] )
identifier[l] . identifier[debug] ( literal[string] , identifier[len] ( identifier[successing_addresses] ))
keyword[return] identifier[list] ( identifier[successing_addresses] )
|
def _backward_slice_indirect(self, cfgnode, sim_successors, current_function_addr):
"""
Try to resolve an indirect jump by slicing backwards
"""
# TODO: make this a real indirect jump resolver under the new paradigm
irsb = sim_successors.artifacts['irsb'] # shorthand
l.debug('Resolving indirect jump at IRSB %s', irsb)
# Let's slice backwards from the end of this exit
next_tmp = irsb.next.tmp
stmt_id = [i for (i, s) in enumerate(irsb.statements) if isinstance(s, pyvex.IRStmt.WrTmp) and s.tmp == next_tmp][0]
cdg = self.project.analyses.CDG(cfg=self, fail_fast=self._fail_fast)
ddg = self.project.analyses.DDG(cfg=self, start=current_function_addr, call_depth=0, fail_fast=self._fail_fast)
bc = self.project.analyses.BackwardSlice(self, cdg, ddg, targets=[(cfgnode, stmt_id)], same_function=True, fail_fast=self._fail_fast)
taint_graph = bc.taint_graph
# Find the correct taint
next_nodes = [cl for cl in taint_graph.nodes() if cl.block_addr == sim_successors.addr]
if not next_nodes:
l.error('The target exit is not included in the slice. Something is wrong')
return [] # depends on [control=['if'], data=[]]
next_node = next_nodes[0]
# Get the weakly-connected subgraph that contains `next_node`
all_subgraphs = networkx.weakly_connected_component_subgraphs(taint_graph)
starts = set()
for subgraph in all_subgraphs:
if next_node in subgraph:
# Make sure there is no symbolic read...
# FIXME: This is an over-approximation. We should try to limit the starts more
nodes = [n for n in subgraph.nodes() if subgraph.in_degree(n) == 0]
for n in nodes:
starts.add(n.block_addr) # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=['subgraph']] # depends on [control=['for'], data=['subgraph']]
# Execute the slice
successing_addresses = set()
annotated_cfg = bc.annotated_cfg()
for start in starts:
l.debug('Start symbolic execution at 0x%x on program slice.', start)
# Get the state from our CFG
node = self.get_any_node(start)
if node is None:
# Well, we have to live with an empty state
base_state = self.project.factory.blank_state(addr=start) # depends on [control=['if'], data=[]]
else:
base_state = node.input_state.copy()
base_state.set_mode('symbolic')
base_state.ip = start
# Clear all initial taints (register values, memory values, etc.)
initial_nodes = [n for n in bc.taint_graph.nodes() if bc.taint_graph.in_degree(n) == 0]
for cl in initial_nodes:
# Iterate in all actions of this node, and pick corresponding actions
cfg_nodes = self.get_all_nodes(cl.block_addr)
for n in cfg_nodes:
if not n.final_states:
continue # depends on [control=['if'], data=[]]
# Normally it's enough to only use the first final state
actions = [ac for ac in n.final_states[0].history.recent_actions if ac.bbl_addr == cl.block_addr and ac.stmt_idx == cl.stmt_idx]
for ac in actions:
if not hasattr(ac, 'action'):
continue # depends on [control=['if'], data=[]]
if ac.action == 'read':
if ac.type == 'mem':
unconstrained_value = base_state.solver.Unconstrained('unconstrained', ac.size.ast * 8)
base_state.memory.store(ac.addr, unconstrained_value, endness=self.project.arch.memory_endness) # depends on [control=['if'], data=[]]
elif ac.type == 'reg':
unconstrained_value = base_state.solver.Unconstrained('unconstrained', ac.size.ast * 8)
base_state.registers.store(ac.offset, unconstrained_value, endness=self.project.arch.register_endness) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ac']] # depends on [control=['for'], data=['n']] # depends on [control=['for'], data=['cl']]
# Clear the constraints!
base_state.release_plugin('solver')
# For speed concerns, we are limiting the timeout for z3 solver to 5 seconds
base_state.solver._solver.timeout = 5000
sc = self.project.factory.simulation_manager(base_state)
sc.use_technique(Slicecutor(annotated_cfg))
sc.use_technique(LoopSeer(bound=1))
sc.run()
if sc.cut or sc.deadended:
all_deadended_states = sc.cut + sc.deadended
for s in all_deadended_states:
if s.addr == sim_successors.addr:
# We want to get its successors
succs = s.step()
for succ in succs.flat_successors:
successing_addresses.add(succ.addr) # depends on [control=['for'], data=['succ']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
else:
l.debug('Cannot determine the exit. You need some better ways to recover the exits :-(') # depends on [control=['for'], data=['start']]
l.debug('Resolution is done, and we have %d new successors.', len(successing_addresses))
return list(successing_addresses)
|
def break_to_bytes(value):
"""
Breaks a value into values of less than 255 that form value when multiplied.
(Or almost do so with primes)
Returns a tuple
"""
if value < 256:
return (value,)
c = 256
least = (0, 255)
for i in range(254):
c -= 1
rest = value % c
if rest == 0 and value / c < 256:
return (c, int(value / c))
elif rest == 0 and value / c > 255:
parts = list(break_to_bytes(value / c))
parts.insert(0, c)
return tuple(parts)
else:
if rest < least[1]:
least = (c, rest)
return (c, int(value / c))
|
def function[break_to_bytes, parameter[value]]:
constant[
Breaks a value into values of less than 255 that form value when multiplied.
(Or almost do so with primes)
Returns a tuple
]
if compare[name[value] less[<] constant[256]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b1e65690>]]]
variable[c] assign[=] constant[256]
variable[least] assign[=] tuple[[<ast.Constant object at 0x7da1b1e647f0>, <ast.Constant object at 0x7da1b1e66170>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[254]]]] begin[:]
<ast.AugAssign object at 0x7da20c7cb280>
variable[rest] assign[=] binary_operation[name[value] <ast.Mod object at 0x7da2590d6920> name[c]]
if <ast.BoolOp object at 0x7da1b1e97760> begin[:]
return[tuple[[<ast.Name object at 0x7da1b1eb4340>, <ast.Call object at 0x7da1b1eb6ef0>]]]
return[tuple[[<ast.Name object at 0x7da1b1eb4190>, <ast.Call object at 0x7da1b1eb5720>]]]
|
keyword[def] identifier[break_to_bytes] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] < literal[int] :
keyword[return] ( identifier[value] ,)
identifier[c] = literal[int]
identifier[least] =( literal[int] , literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
identifier[c] -= literal[int]
identifier[rest] = identifier[value] % identifier[c]
keyword[if] identifier[rest] == literal[int] keyword[and] identifier[value] / identifier[c] < literal[int] :
keyword[return] ( identifier[c] , identifier[int] ( identifier[value] / identifier[c] ))
keyword[elif] identifier[rest] == literal[int] keyword[and] identifier[value] / identifier[c] > literal[int] :
identifier[parts] = identifier[list] ( identifier[break_to_bytes] ( identifier[value] / identifier[c] ))
identifier[parts] . identifier[insert] ( literal[int] , identifier[c] )
keyword[return] identifier[tuple] ( identifier[parts] )
keyword[else] :
keyword[if] identifier[rest] < identifier[least] [ literal[int] ]:
identifier[least] =( identifier[c] , identifier[rest] )
keyword[return] ( identifier[c] , identifier[int] ( identifier[value] / identifier[c] ))
|
def break_to_bytes(value):
"""
Breaks a value into values of less than 255 that form value when multiplied.
(Or almost do so with primes)
Returns a tuple
"""
if value < 256:
return (value,) # depends on [control=['if'], data=['value']]
c = 256
least = (0, 255)
for i in range(254):
c -= 1
rest = value % c
if rest == 0 and value / c < 256:
return (c, int(value / c)) # depends on [control=['if'], data=[]]
elif rest == 0 and value / c > 255:
parts = list(break_to_bytes(value / c))
parts.insert(0, c)
return tuple(parts) # depends on [control=['if'], data=[]]
elif rest < least[1]:
least = (c, rest) # depends on [control=['if'], data=['rest']] # depends on [control=['for'], data=[]]
return (c, int(value / c))
|
def _pytypes_excepthook(exctype, value, tb):
""""An excepthook suitable for use as sys.excepthook, that strips away
the part of the traceback belonging to pytypes' internals.
Can be switched on and off via pytypes.clean_traceback
or pytypes.set_clean_traceback.
The latter automatically installs this hook in sys.excepthook.
"""
if pytypes.clean_traceback and issubclass(exctype, TypeError):
traceback.print_exception(exctype, value, tb, _calc_traceback_limit(tb))
else:
if _sys_excepthook is None:
sys.__excepthook__(exctype, value, tb)
else:
_sys_excepthook(exctype, value, tb)
|
def function[_pytypes_excepthook, parameter[exctype, value, tb]]:
constant["An excepthook suitable for use as sys.excepthook, that strips away
the part of the traceback belonging to pytypes' internals.
Can be switched on and off via pytypes.clean_traceback
or pytypes.set_clean_traceback.
The latter automatically installs this hook in sys.excepthook.
]
if <ast.BoolOp object at 0x7da1b0dcb730> begin[:]
call[name[traceback].print_exception, parameter[name[exctype], name[value], name[tb], call[name[_calc_traceback_limit], parameter[name[tb]]]]]
|
keyword[def] identifier[_pytypes_excepthook] ( identifier[exctype] , identifier[value] , identifier[tb] ):
literal[string]
keyword[if] identifier[pytypes] . identifier[clean_traceback] keyword[and] identifier[issubclass] ( identifier[exctype] , identifier[TypeError] ):
identifier[traceback] . identifier[print_exception] ( identifier[exctype] , identifier[value] , identifier[tb] , identifier[_calc_traceback_limit] ( identifier[tb] ))
keyword[else] :
keyword[if] identifier[_sys_excepthook] keyword[is] keyword[None] :
identifier[sys] . identifier[__excepthook__] ( identifier[exctype] , identifier[value] , identifier[tb] )
keyword[else] :
identifier[_sys_excepthook] ( identifier[exctype] , identifier[value] , identifier[tb] )
|
def _pytypes_excepthook(exctype, value, tb):
""""An excepthook suitable for use as sys.excepthook, that strips away
the part of the traceback belonging to pytypes' internals.
Can be switched on and off via pytypes.clean_traceback
or pytypes.set_clean_traceback.
The latter automatically installs this hook in sys.excepthook.
"""
if pytypes.clean_traceback and issubclass(exctype, TypeError):
traceback.print_exception(exctype, value, tb, _calc_traceback_limit(tb)) # depends on [control=['if'], data=[]]
elif _sys_excepthook is None:
sys.__excepthook__(exctype, value, tb) # depends on [control=['if'], data=[]]
else:
_sys_excepthook(exctype, value, tb)
|
def range(self, start, end=None, step=1, numPartitions=None):
"""
Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named
``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numPartitions: the number of partitions of the DataFrame
:return: :class:`DataFrame`
>>> sqlContext.range(1, 7, 2).collect()
[Row(id=1), Row(id=3), Row(id=5)]
If only one argument is specified, it will be used as the end value.
>>> sqlContext.range(3).collect()
[Row(id=0), Row(id=1), Row(id=2)]
"""
return self.sparkSession.range(start, end, step, numPartitions)
|
def function[range, parameter[self, start, end, step, numPartitions]]:
constant[
Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named
``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numPartitions: the number of partitions of the DataFrame
:return: :class:`DataFrame`
>>> sqlContext.range(1, 7, 2).collect()
[Row(id=1), Row(id=3), Row(id=5)]
If only one argument is specified, it will be used as the end value.
>>> sqlContext.range(3).collect()
[Row(id=0), Row(id=1), Row(id=2)]
]
return[call[name[self].sparkSession.range, parameter[name[start], name[end], name[step], name[numPartitions]]]]
|
keyword[def] identifier[range] ( identifier[self] , identifier[start] , identifier[end] = keyword[None] , identifier[step] = literal[int] , identifier[numPartitions] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[sparkSession] . identifier[range] ( identifier[start] , identifier[end] , identifier[step] , identifier[numPartitions] )
|
def range(self, start, end=None, step=1, numPartitions=None):
"""
Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named
``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numPartitions: the number of partitions of the DataFrame
:return: :class:`DataFrame`
>>> sqlContext.range(1, 7, 2).collect()
[Row(id=1), Row(id=3), Row(id=5)]
If only one argument is specified, it will be used as the end value.
>>> sqlContext.range(3).collect()
[Row(id=0), Row(id=1), Row(id=2)]
"""
return self.sparkSession.range(start, end, step, numPartitions)
|
def basic_dependencies(self):
"""
Accesses basic dependencies from the XML output
:getter: Returns the dependency graph for basic dependencies
:type: corenlp_xml.dependencies.DependencyGraph
"""
if self._basic_dependencies is None:
deps = self._element.xpath('dependencies[@type="basic-dependencies"]')
if len(deps) > 0:
self._basic_dependencies = DependencyGraph(deps[0])
return self._basic_dependencies
|
def function[basic_dependencies, parameter[self]]:
constant[
Accesses basic dependencies from the XML output
:getter: Returns the dependency graph for basic dependencies
:type: corenlp_xml.dependencies.DependencyGraph
]
if compare[name[self]._basic_dependencies is constant[None]] begin[:]
variable[deps] assign[=] call[name[self]._element.xpath, parameter[constant[dependencies[@type="basic-dependencies"]]]]
if compare[call[name[len], parameter[name[deps]]] greater[>] constant[0]] begin[:]
name[self]._basic_dependencies assign[=] call[name[DependencyGraph], parameter[call[name[deps]][constant[0]]]]
return[name[self]._basic_dependencies]
|
keyword[def] identifier[basic_dependencies] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_basic_dependencies] keyword[is] keyword[None] :
identifier[deps] = identifier[self] . identifier[_element] . identifier[xpath] ( literal[string] )
keyword[if] identifier[len] ( identifier[deps] )> literal[int] :
identifier[self] . identifier[_basic_dependencies] = identifier[DependencyGraph] ( identifier[deps] [ literal[int] ])
keyword[return] identifier[self] . identifier[_basic_dependencies]
|
def basic_dependencies(self):
"""
Accesses basic dependencies from the XML output
:getter: Returns the dependency graph for basic dependencies
:type: corenlp_xml.dependencies.DependencyGraph
"""
if self._basic_dependencies is None:
deps = self._element.xpath('dependencies[@type="basic-dependencies"]')
if len(deps) > 0:
self._basic_dependencies = DependencyGraph(deps[0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self._basic_dependencies
|
def words_to_word_ids(data=None, word_to_id=None, unk_key='UNK'):
"""Convert a list of string (words) to IDs.
Parameters
----------
data : list of string or byte
The context in list format
word_to_id : a dictionary
that maps word to ID.
unk_key : str
Represent the unknown words.
Returns
--------
list of int
A list of IDs to represent the context.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> context = [b'hello', b'how', b'are', b'you']
>>> ids = tl.nlp.words_to_word_ids(words, dictionary)
>>> context = tl.nlp.word_ids_to_words(ids, reverse_dictionary)
>>> print(ids)
[6434, 311, 26, 207]
>>> print(context)
[b'hello', b'how', b'are', b'you']
References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`__
"""
if data is None:
raise Exception("data : list of string or byte")
if word_to_id is None:
raise Exception("word_to_id : a dictionary")
# if isinstance(data[0], six.string_types):
# tl.logging.info(type(data[0]))
# # exit()
# tl.logging.info(data[0])
# tl.logging.info(word_to_id)
# return [word_to_id[str(word)] for word in data]
# else:
word_ids = []
for word in data:
if word_to_id.get(word) is not None:
word_ids.append(word_to_id[word])
else:
word_ids.append(word_to_id[unk_key])
return word_ids
|
def function[words_to_word_ids, parameter[data, word_to_id, unk_key]]:
constant[Convert a list of string (words) to IDs.
Parameters
----------
data : list of string or byte
The context in list format
word_to_id : a dictionary
that maps word to ID.
unk_key : str
Represent the unknown words.
Returns
--------
list of int
A list of IDs to represent the context.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> context = [b'hello', b'how', b'are', b'you']
>>> ids = tl.nlp.words_to_word_ids(words, dictionary)
>>> context = tl.nlp.word_ids_to_words(ids, reverse_dictionary)
>>> print(ids)
[6434, 311, 26, 207]
>>> print(context)
[b'hello', b'how', b'are', b'you']
References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`__
]
if compare[name[data] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c6c20>
if compare[name[word_to_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c7100>
variable[word_ids] assign[=] list[[]]
for taget[name[word]] in starred[name[data]] begin[:]
if compare[call[name[word_to_id].get, parameter[name[word]]] is_not constant[None]] begin[:]
call[name[word_ids].append, parameter[call[name[word_to_id]][name[word]]]]
return[name[word_ids]]
|
keyword[def] identifier[words_to_word_ids] ( identifier[data] = keyword[None] , identifier[word_to_id] = keyword[None] , identifier[unk_key] = literal[string] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[word_to_id] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[word_ids] =[]
keyword[for] identifier[word] keyword[in] identifier[data] :
keyword[if] identifier[word_to_id] . identifier[get] ( identifier[word] ) keyword[is] keyword[not] keyword[None] :
identifier[word_ids] . identifier[append] ( identifier[word_to_id] [ identifier[word] ])
keyword[else] :
identifier[word_ids] . identifier[append] ( identifier[word_to_id] [ identifier[unk_key] ])
keyword[return] identifier[word_ids]
|
def words_to_word_ids(data=None, word_to_id=None, unk_key='UNK'):
"""Convert a list of string (words) to IDs.
Parameters
----------
data : list of string or byte
The context in list format
word_to_id : a dictionary
that maps word to ID.
unk_key : str
Represent the unknown words.
Returns
--------
list of int
A list of IDs to represent the context.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> context = [b'hello', b'how', b'are', b'you']
>>> ids = tl.nlp.words_to_word_ids(words, dictionary)
>>> context = tl.nlp.word_ids_to_words(ids, reverse_dictionary)
>>> print(ids)
[6434, 311, 26, 207]
>>> print(context)
[b'hello', b'how', b'are', b'you']
References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`__
"""
if data is None:
raise Exception('data : list of string or byte') # depends on [control=['if'], data=[]]
if word_to_id is None:
raise Exception('word_to_id : a dictionary') # depends on [control=['if'], data=[]]
# if isinstance(data[0], six.string_types):
# tl.logging.info(type(data[0]))
# # exit()
# tl.logging.info(data[0])
# tl.logging.info(word_to_id)
# return [word_to_id[str(word)] for word in data]
# else:
word_ids = []
for word in data:
if word_to_id.get(word) is not None:
word_ids.append(word_to_id[word]) # depends on [control=['if'], data=[]]
else:
word_ids.append(word_to_id[unk_key]) # depends on [control=['for'], data=['word']]
return word_ids
|
def download(self, directory, structure=True):
"""
Fetches the object from storage, and writes it to the specified
directory. The directory must exist before calling this method.
If the object name represents a nested folder structure, such as
"foo/bar/baz.txt", that folder structure will be created in the target
directory by default. If you do not want the nested folders to be
created, pass `structure=False` in the parameters.
"""
return self.manager.download(self, directory, structure=structure)
|
def function[download, parameter[self, directory, structure]]:
constant[
Fetches the object from storage, and writes it to the specified
directory. The directory must exist before calling this method.
If the object name represents a nested folder structure, such as
"foo/bar/baz.txt", that folder structure will be created in the target
directory by default. If you do not want the nested folders to be
created, pass `structure=False` in the parameters.
]
return[call[name[self].manager.download, parameter[name[self], name[directory]]]]
|
keyword[def] identifier[download] ( identifier[self] , identifier[directory] , identifier[structure] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[manager] . identifier[download] ( identifier[self] , identifier[directory] , identifier[structure] = identifier[structure] )
|
def download(self, directory, structure=True):
"""
Fetches the object from storage, and writes it to the specified
directory. The directory must exist before calling this method.
If the object name represents a nested folder structure, such as
"foo/bar/baz.txt", that folder structure will be created in the target
directory by default. If you do not want the nested folders to be
created, pass `structure=False` in the parameters.
"""
return self.manager.download(self, directory, structure=structure)
|
def count(self, *_clauses, **kwargs):
"""Return the count of results for the given filter set."""
# NOTE: this does not have support for limit and offset since I can't
# see how this is useful. Still, there might be compatibility issues
# with people using these flags. Let's see how it goes.
if not self.exists:
return 0
args = self._args_to_clause(kwargs, clauses=_clauses)
query = select([func.count()], whereclause=args)
query = query.select_from(self.table)
rp = self.db.executable.execute(query)
return rp.fetchone()[0]
|
def function[count, parameter[self]]:
constant[Return the count of results for the given filter set.]
if <ast.UnaryOp object at 0x7da1b1e8de10> begin[:]
return[constant[0]]
variable[args] assign[=] call[name[self]._args_to_clause, parameter[name[kwargs]]]
variable[query] assign[=] call[name[select], parameter[list[[<ast.Call object at 0x7da1b1e8eb30>]]]]
variable[query] assign[=] call[name[query].select_from, parameter[name[self].table]]
variable[rp] assign[=] call[name[self].db.executable.execute, parameter[name[query]]]
return[call[call[name[rp].fetchone, parameter[]]][constant[0]]]
|
keyword[def] identifier[count] ( identifier[self] ,* identifier[_clauses] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[exists] :
keyword[return] literal[int]
identifier[args] = identifier[self] . identifier[_args_to_clause] ( identifier[kwargs] , identifier[clauses] = identifier[_clauses] )
identifier[query] = identifier[select] ([ identifier[func] . identifier[count] ()], identifier[whereclause] = identifier[args] )
identifier[query] = identifier[query] . identifier[select_from] ( identifier[self] . identifier[table] )
identifier[rp] = identifier[self] . identifier[db] . identifier[executable] . identifier[execute] ( identifier[query] )
keyword[return] identifier[rp] . identifier[fetchone] ()[ literal[int] ]
|
def count(self, *_clauses, **kwargs):
"""Return the count of results for the given filter set."""
# NOTE: this does not have support for limit and offset since I can't
# see how this is useful. Still, there might be compatibility issues
# with people using these flags. Let's see how it goes.
if not self.exists:
return 0 # depends on [control=['if'], data=[]]
args = self._args_to_clause(kwargs, clauses=_clauses)
query = select([func.count()], whereclause=args)
query = query.select_from(self.table)
rp = self.db.executable.execute(query)
return rp.fetchone()[0]
|
def centroid(coo):
"""Calculates the centroid from a 3D point cloud and returns the coordinates
:param coo: Array of coordinate arrays
:returns : centroid coordinates as list
"""
return list(map(np.mean, (([c[0] for c in coo]), ([c[1] for c in coo]), ([c[2] for c in coo]))))
|
def function[centroid, parameter[coo]]:
constant[Calculates the centroid from a 3D point cloud and returns the coordinates
:param coo: Array of coordinate arrays
:returns : centroid coordinates as list
]
return[call[name[list], parameter[call[name[map], parameter[name[np].mean, tuple[[<ast.ListComp object at 0x7da207f9ba00>, <ast.ListComp object at 0x7da207f9a9b0>, <ast.ListComp object at 0x7da207f98100>]]]]]]]
|
keyword[def] identifier[centroid] ( identifier[coo] ):
literal[string]
keyword[return] identifier[list] ( identifier[map] ( identifier[np] . identifier[mean] ,(([ identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[coo] ]),([ identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[coo] ]),([ identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[coo] ]))))
|
def centroid(coo):
"""Calculates the centroid from a 3D point cloud and returns the coordinates
:param coo: Array of coordinate arrays
:returns : centroid coordinates as list
"""
return list(map(np.mean, ([c[0] for c in coo], [c[1] for c in coo], [c[2] for c in coo])))
|
def _sortHTML(titlesAlignments, by, limit=None):
"""
Return an C{IPython.display.HTML} object with the alignments sorted by the
given attribute.
@param titlesAlignments: A L{dark.titles.TitlesAlignments} instance.
@param by: A C{str}, one of 'length', 'maxScore', 'medianScore',
'readCount', or 'title'.
@param limit: An C{int} limit on the number of results to show.
@return: An HTML instance with sorted titles and information about
hit read count, length, and e-values.
"""
out = []
for i, title in enumerate(titlesAlignments.sortTitles(by), start=1):
if limit is not None and i > limit:
break
titleAlignments = titlesAlignments[title]
link = NCBISequenceLink(title, title)
out.append(
'%3d: reads=%d, len=%d, max=%s median=%s<br/>'
' %s' %
(i, titleAlignments.readCount(), titleAlignments.subjectLength,
titleAlignments.bestHsp().score.score,
titleAlignments.medianScore(), link))
return HTML('<pre>' + '<br/>'.join(out) + '</pre>')
|
def function[_sortHTML, parameter[titlesAlignments, by, limit]]:
constant[
Return an C{IPython.display.HTML} object with the alignments sorted by the
given attribute.
@param titlesAlignments: A L{dark.titles.TitlesAlignments} instance.
@param by: A C{str}, one of 'length', 'maxScore', 'medianScore',
'readCount', or 'title'.
@param limit: An C{int} limit on the number of results to show.
@return: An HTML instance with sorted titles and information about
hit read count, length, and e-values.
]
variable[out] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cf370>, <ast.Name object at 0x7da18c4cdc30>]]] in starred[call[name[enumerate], parameter[call[name[titlesAlignments].sortTitles, parameter[name[by]]]]]] begin[:]
if <ast.BoolOp object at 0x7da18c4cdb70> begin[:]
break
variable[titleAlignments] assign[=] call[name[titlesAlignments]][name[title]]
variable[link] assign[=] call[name[NCBISequenceLink], parameter[name[title], name[title]]]
call[name[out].append, parameter[binary_operation[constant[%3d: reads=%d, len=%d, max=%s median=%s<br/> %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cf700>, <ast.Call object at 0x7da18c4ce770>, <ast.Attribute object at 0x7da18c4cfeb0>, <ast.Attribute object at 0x7da18c4cd660>, <ast.Call object at 0x7da1b0e39360>, <ast.Name object at 0x7da1b0e3b8b0>]]]]]
return[call[name[HTML], parameter[binary_operation[binary_operation[constant[<pre>] + call[constant[<br/>].join, parameter[name[out]]]] + constant[</pre>]]]]]
|
keyword[def] identifier[_sortHTML] ( identifier[titlesAlignments] , identifier[by] , identifier[limit] = keyword[None] ):
literal[string]
identifier[out] =[]
keyword[for] identifier[i] , identifier[title] keyword[in] identifier[enumerate] ( identifier[titlesAlignments] . identifier[sortTitles] ( identifier[by] ), identifier[start] = literal[int] ):
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[i] > identifier[limit] :
keyword[break]
identifier[titleAlignments] = identifier[titlesAlignments] [ identifier[title] ]
identifier[link] = identifier[NCBISequenceLink] ( identifier[title] , identifier[title] )
identifier[out] . identifier[append] (
literal[string]
literal[string] %
( identifier[i] , identifier[titleAlignments] . identifier[readCount] (), identifier[titleAlignments] . identifier[subjectLength] ,
identifier[titleAlignments] . identifier[bestHsp] (). identifier[score] . identifier[score] ,
identifier[titleAlignments] . identifier[medianScore] (), identifier[link] ))
keyword[return] identifier[HTML] ( literal[string] + literal[string] . identifier[join] ( identifier[out] )+ literal[string] )
|
def _sortHTML(titlesAlignments, by, limit=None):
"""
Return an C{IPython.display.HTML} object with the alignments sorted by the
given attribute.
@param titlesAlignments: A L{dark.titles.TitlesAlignments} instance.
@param by: A C{str}, one of 'length', 'maxScore', 'medianScore',
'readCount', or 'title'.
@param limit: An C{int} limit on the number of results to show.
@return: An HTML instance with sorted titles and information about
hit read count, length, and e-values.
"""
out = []
for (i, title) in enumerate(titlesAlignments.sortTitles(by), start=1):
if limit is not None and i > limit:
break # depends on [control=['if'], data=[]]
titleAlignments = titlesAlignments[title]
link = NCBISequenceLink(title, title)
out.append('%3d: reads=%d, len=%d, max=%s median=%s<br/> %s' % (i, titleAlignments.readCount(), titleAlignments.subjectLength, titleAlignments.bestHsp().score.score, titleAlignments.medianScore(), link)) # depends on [control=['for'], data=[]]
return HTML('<pre>' + '<br/>'.join(out) + '</pre>')
|
def job_is_enabled(self, job_id):
"""
Check if a job is enabled.
:param job_id: Job identifier to check the status of.
:type job_id: :py:class:`uuid.UUID`
:rtype: bool
"""
job_id = normalize_job_id(job_id)
job_desc = self._jobs[job_id]
return job_desc['enabled']
|
def function[job_is_enabled, parameter[self, job_id]]:
constant[
Check if a job is enabled.
:param job_id: Job identifier to check the status of.
:type job_id: :py:class:`uuid.UUID`
:rtype: bool
]
variable[job_id] assign[=] call[name[normalize_job_id], parameter[name[job_id]]]
variable[job_desc] assign[=] call[name[self]._jobs][name[job_id]]
return[call[name[job_desc]][constant[enabled]]]
|
keyword[def] identifier[job_is_enabled] ( identifier[self] , identifier[job_id] ):
literal[string]
identifier[job_id] = identifier[normalize_job_id] ( identifier[job_id] )
identifier[job_desc] = identifier[self] . identifier[_jobs] [ identifier[job_id] ]
keyword[return] identifier[job_desc] [ literal[string] ]
|
def job_is_enabled(self, job_id):
"""
Check if a job is enabled.
:param job_id: Job identifier to check the status of.
:type job_id: :py:class:`uuid.UUID`
:rtype: bool
"""
job_id = normalize_job_id(job_id)
job_desc = self._jobs[job_id]
return job_desc['enabled']
|
def extract_ipv4(roster_order, ipv4):
'''
Extract the preferred IP address from the ipv4 grain
'''
for ip_type in roster_order:
for ip_ in ipv4:
if ':' in ip_:
continue
if not salt.utils.validate.net.ipv4_addr(ip_):
continue
if ip_type == 'local' and ip_.startswith('127.'):
return ip_
elif ip_type == 'private' and not salt.utils.cloud.is_public_ip(ip_):
return ip_
elif ip_type == 'public' and salt.utils.cloud.is_public_ip(ip_):
return ip_
return None
|
def function[extract_ipv4, parameter[roster_order, ipv4]]:
constant[
Extract the preferred IP address from the ipv4 grain
]
for taget[name[ip_type]] in starred[name[roster_order]] begin[:]
for taget[name[ip_]] in starred[name[ipv4]] begin[:]
if compare[constant[:] in name[ip_]] begin[:]
continue
if <ast.UnaryOp object at 0x7da1b26ac0d0> begin[:]
continue
if <ast.BoolOp object at 0x7da1b26afeb0> begin[:]
return[name[ip_]]
return[constant[None]]
|
keyword[def] identifier[extract_ipv4] ( identifier[roster_order] , identifier[ipv4] ):
literal[string]
keyword[for] identifier[ip_type] keyword[in] identifier[roster_order] :
keyword[for] identifier[ip_] keyword[in] identifier[ipv4] :
keyword[if] literal[string] keyword[in] identifier[ip_] :
keyword[continue]
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[validate] . identifier[net] . identifier[ipv4_addr] ( identifier[ip_] ):
keyword[continue]
keyword[if] identifier[ip_type] == literal[string] keyword[and] identifier[ip_] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[ip_]
keyword[elif] identifier[ip_type] == literal[string] keyword[and] keyword[not] identifier[salt] . identifier[utils] . identifier[cloud] . identifier[is_public_ip] ( identifier[ip_] ):
keyword[return] identifier[ip_]
keyword[elif] identifier[ip_type] == literal[string] keyword[and] identifier[salt] . identifier[utils] . identifier[cloud] . identifier[is_public_ip] ( identifier[ip_] ):
keyword[return] identifier[ip_]
keyword[return] keyword[None]
|
def extract_ipv4(roster_order, ipv4):
"""
Extract the preferred IP address from the ipv4 grain
"""
for ip_type in roster_order:
for ip_ in ipv4:
if ':' in ip_:
continue # depends on [control=['if'], data=[]]
if not salt.utils.validate.net.ipv4_addr(ip_):
continue # depends on [control=['if'], data=[]]
if ip_type == 'local' and ip_.startswith('127.'):
return ip_ # depends on [control=['if'], data=[]]
elif ip_type == 'private' and (not salt.utils.cloud.is_public_ip(ip_)):
return ip_ # depends on [control=['if'], data=[]]
elif ip_type == 'public' and salt.utils.cloud.is_public_ip(ip_):
return ip_ # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ip_']] # depends on [control=['for'], data=['ip_type']]
return None
|
async def container(self, container=None, container_type=None, params=None):
"""
Loads/dumps container
:return:
"""
if hasattr(container_type, "serialize_archive"):
container = container_type() if container is None else container
return await container.serialize_archive(
self, elem=container, elem_type=container_type, params=params
)
if self.writing:
return await self._dump_container(
self.iobj, container, container_type, params
)
else:
return await self._load_container(
self.iobj, container_type, params=params, container=container
)
|
<ast.AsyncFunctionDef object at 0x7da2047e9d20>
|
keyword[async] keyword[def] identifier[container] ( identifier[self] , identifier[container] = keyword[None] , identifier[container_type] = keyword[None] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[container_type] , literal[string] ):
identifier[container] = identifier[container_type] () keyword[if] identifier[container] keyword[is] keyword[None] keyword[else] identifier[container]
keyword[return] keyword[await] identifier[container] . identifier[serialize_archive] (
identifier[self] , identifier[elem] = identifier[container] , identifier[elem_type] = identifier[container_type] , identifier[params] = identifier[params]
)
keyword[if] identifier[self] . identifier[writing] :
keyword[return] keyword[await] identifier[self] . identifier[_dump_container] (
identifier[self] . identifier[iobj] , identifier[container] , identifier[container_type] , identifier[params]
)
keyword[else] :
keyword[return] keyword[await] identifier[self] . identifier[_load_container] (
identifier[self] . identifier[iobj] , identifier[container_type] , identifier[params] = identifier[params] , identifier[container] = identifier[container]
)
|
async def container(self, container=None, container_type=None, params=None):
"""
Loads/dumps container
:return:
"""
if hasattr(container_type, 'serialize_archive'):
container = container_type() if container is None else container
return await container.serialize_archive(self, elem=container, elem_type=container_type, params=params) # depends on [control=['if'], data=[]]
if self.writing:
return await self._dump_container(self.iobj, container, container_type, params) # depends on [control=['if'], data=[]]
else:
return await self._load_container(self.iobj, container_type, params=params, container=container)
|
def get_average_length_of_string(strings):
"""Computes average length of words
:param strings: list of words
:return: Average length of word on list
"""
if not strings:
return 0
return sum(len(word) for word in strings) / len(strings)
|
def function[get_average_length_of_string, parameter[strings]]:
constant[Computes average length of words
:param strings: list of words
:return: Average length of word on list
]
if <ast.UnaryOp object at 0x7da18f09e440> begin[:]
return[constant[0]]
return[binary_operation[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f09dcc0>]] / call[name[len], parameter[name[strings]]]]]
|
keyword[def] identifier[get_average_length_of_string] ( identifier[strings] ):
literal[string]
keyword[if] keyword[not] identifier[strings] :
keyword[return] literal[int]
keyword[return] identifier[sum] ( identifier[len] ( identifier[word] ) keyword[for] identifier[word] keyword[in] identifier[strings] )/ identifier[len] ( identifier[strings] )
|
def get_average_length_of_string(strings):
"""Computes average length of words
:param strings: list of words
:return: Average length of word on list
"""
if not strings:
return 0 # depends on [control=['if'], data=[]]
return sum((len(word) for word in strings)) / len(strings)
|
def destroy(name):
'''
removes a container [stops a container if it's running and]
raises ContainerNotExists exception if the specified name is not created
'''
if not exists(name):
raise ContainerNotExists("The container (%s) does not exist!" % name)
cmd = ['lxc-destroy', '-f', '-n', name]
subprocess.check_call(cmd)
|
def function[destroy, parameter[name]]:
constant[
removes a container [stops a container if it's running and]
raises ContainerNotExists exception if the specified name is not created
]
if <ast.UnaryOp object at 0x7da20c6ab010> begin[:]
<ast.Raise object at 0x7da20c6ab6d0>
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da20c6ab850>, <ast.Constant object at 0x7da20c6aa170>, <ast.Constant object at 0x7da20c6a8130>, <ast.Name object at 0x7da20c6a90c0>]]
call[name[subprocess].check_call, parameter[name[cmd]]]
|
keyword[def] identifier[destroy] ( identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[exists] ( identifier[name] ):
keyword[raise] identifier[ContainerNotExists] ( literal[string] % identifier[name] )
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , identifier[name] ]
identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
|
def destroy(name):
"""
removes a container [stops a container if it's running and]
raises ContainerNotExists exception if the specified name is not created
"""
if not exists(name):
raise ContainerNotExists('The container (%s) does not exist!' % name) # depends on [control=['if'], data=[]]
cmd = ['lxc-destroy', '-f', '-n', name]
subprocess.check_call(cmd)
|
def fill_off_diagonal(x, radius, value=0):
"""Sets all cells of a matrix to a given ``value``
if they lie outside a constraint region.
In this case, the constraint region is the
Sakoe-Chiba band which runs with a fixed ``radius``
along the main diagonal.
When ``x.shape[0] != x.shape[1]``, the radius will be
expanded so that ``x[-1, -1] = 1`` always.
``x`` will be modified in place.
Parameters
----------
x : np.ndarray [shape=(N, M)]
Input matrix, will be modified in place.
radius : float
The band radius (1/2 of the width) will be
``int(radius*min(x.shape))``.
value : int
``x[n, m] = value`` when ``(n, m)`` lies outside the band.
Examples
--------
>>> x = np.ones((8, 8))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
>>> x = np.ones((8, 12))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]])
"""
nx, ny = x.shape
# Calculate the radius in indices, rather than proportion
radius = np.round(radius * np.min(x.shape))
nx, ny = x.shape
offset = np.abs((x.shape[0] - x.shape[1]))
if nx < ny:
idx_u = np.triu_indices_from(x, k=radius + offset)
idx_l = np.tril_indices_from(x, k=-radius)
else:
idx_u = np.triu_indices_from(x, k=radius)
idx_l = np.tril_indices_from(x, k=-radius - offset)
# modify input matrix
x[idx_u] = value
x[idx_l] = value
|
def function[fill_off_diagonal, parameter[x, radius, value]]:
constant[Sets all cells of a matrix to a given ``value``
if they lie outside a constraint region.
In this case, the constraint region is the
Sakoe-Chiba band which runs with a fixed ``radius``
along the main diagonal.
When ``x.shape[0] != x.shape[1]``, the radius will be
expanded so that ``x[-1, -1] = 1`` always.
``x`` will be modified in place.
Parameters
----------
x : np.ndarray [shape=(N, M)]
Input matrix, will be modified in place.
radius : float
The band radius (1/2 of the width) will be
``int(radius*min(x.shape))``.
value : int
``x[n, m] = value`` when ``(n, m)`` lies outside the band.
Examples
--------
>>> x = np.ones((8, 8))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
>>> x = np.ones((8, 12))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]])
]
<ast.Tuple object at 0x7da1b05f9720> assign[=] name[x].shape
variable[radius] assign[=] call[name[np].round, parameter[binary_operation[name[radius] * call[name[np].min, parameter[name[x].shape]]]]]
<ast.Tuple object at 0x7da1b05f9570> assign[=] name[x].shape
variable[offset] assign[=] call[name[np].abs, parameter[binary_operation[call[name[x].shape][constant[0]] - call[name[x].shape][constant[1]]]]]
if compare[name[nx] less[<] name[ny]] begin[:]
variable[idx_u] assign[=] call[name[np].triu_indices_from, parameter[name[x]]]
variable[idx_l] assign[=] call[name[np].tril_indices_from, parameter[name[x]]]
call[name[x]][name[idx_u]] assign[=] name[value]
call[name[x]][name[idx_l]] assign[=] name[value]
|
keyword[def] identifier[fill_off_diagonal] ( identifier[x] , identifier[radius] , identifier[value] = literal[int] ):
literal[string]
identifier[nx] , identifier[ny] = identifier[x] . identifier[shape]
identifier[radius] = identifier[np] . identifier[round] ( identifier[radius] * identifier[np] . identifier[min] ( identifier[x] . identifier[shape] ))
identifier[nx] , identifier[ny] = identifier[x] . identifier[shape]
identifier[offset] = identifier[np] . identifier[abs] (( identifier[x] . identifier[shape] [ literal[int] ]- identifier[x] . identifier[shape] [ literal[int] ]))
keyword[if] identifier[nx] < identifier[ny] :
identifier[idx_u] = identifier[np] . identifier[triu_indices_from] ( identifier[x] , identifier[k] = identifier[radius] + identifier[offset] )
identifier[idx_l] = identifier[np] . identifier[tril_indices_from] ( identifier[x] , identifier[k] =- identifier[radius] )
keyword[else] :
identifier[idx_u] = identifier[np] . identifier[triu_indices_from] ( identifier[x] , identifier[k] = identifier[radius] )
identifier[idx_l] = identifier[np] . identifier[tril_indices_from] ( identifier[x] , identifier[k] =- identifier[radius] - identifier[offset] )
identifier[x] [ identifier[idx_u] ]= identifier[value]
identifier[x] [ identifier[idx_l] ]= identifier[value]
|
def fill_off_diagonal(x, radius, value=0):
"""Sets all cells of a matrix to a given ``value``
if they lie outside a constraint region.
In this case, the constraint region is the
Sakoe-Chiba band which runs with a fixed ``radius``
along the main diagonal.
When ``x.shape[0] != x.shape[1]``, the radius will be
expanded so that ``x[-1, -1] = 1`` always.
``x`` will be modified in place.
Parameters
----------
x : np.ndarray [shape=(N, M)]
Input matrix, will be modified in place.
radius : float
The band radius (1/2 of the width) will be
``int(radius*min(x.shape))``.
value : int
``x[n, m] = value`` when ``(n, m)`` lies outside the band.
Examples
--------
>>> x = np.ones((8, 8))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
>>> x = np.ones((8, 12))
>>> librosa.util.fill_off_diagonal(x, 0.25)
>>> x
array([[1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]])
"""
(nx, ny) = x.shape
# Calculate the radius in indices, rather than proportion
radius = np.round(radius * np.min(x.shape))
(nx, ny) = x.shape
offset = np.abs(x.shape[0] - x.shape[1])
if nx < ny:
idx_u = np.triu_indices_from(x, k=radius + offset)
idx_l = np.tril_indices_from(x, k=-radius) # depends on [control=['if'], data=[]]
else:
idx_u = np.triu_indices_from(x, k=radius)
idx_l = np.tril_indices_from(x, k=-radius - offset)
# modify input matrix
x[idx_u] = value
x[idx_l] = value
|
def localized_fact(self):
"""Make sure fact has the correct start_time."""
fact = Fact(self.activity.get_text())
if fact.start_time:
fact.date = self.date
else:
fact.start_time = dt.datetime.now()
return fact
|
def function[localized_fact, parameter[self]]:
constant[Make sure fact has the correct start_time.]
variable[fact] assign[=] call[name[Fact], parameter[call[name[self].activity.get_text, parameter[]]]]
if name[fact].start_time begin[:]
name[fact].date assign[=] name[self].date
return[name[fact]]
|
keyword[def] identifier[localized_fact] ( identifier[self] ):
literal[string]
identifier[fact] = identifier[Fact] ( identifier[self] . identifier[activity] . identifier[get_text] ())
keyword[if] identifier[fact] . identifier[start_time] :
identifier[fact] . identifier[date] = identifier[self] . identifier[date]
keyword[else] :
identifier[fact] . identifier[start_time] = identifier[dt] . identifier[datetime] . identifier[now] ()
keyword[return] identifier[fact]
|
def localized_fact(self):
"""Make sure fact has the correct start_time."""
fact = Fact(self.activity.get_text())
if fact.start_time:
fact.date = self.date # depends on [control=['if'], data=[]]
else:
fact.start_time = dt.datetime.now()
return fact
|
def word_tokenize(self, text, include_punc=True):
"""The Treebank tokenizer uses regular expressions to tokenize text as
in Penn Treebank.
It assumes that the text has already been segmented into sentences,
e.g. using ``self.sent_tokenize()``.
This tokenizer performs the following steps:
- split standard contractions, e.g. ``don't`` -> ``do n't`` and ``they'll`` -> ``they 'll``
- treat most punctuation characters as separate tokens
- split off commas and single quotes, when followed by whitespace
- separate periods that appear at the end of line
Source: NLTK's docstring of ``TreebankWordTokenizer`` (accessed: 02/10/2014)
"""
#: Do not process empty strings (Issue #3)
if text.strip() == "":
return []
_tokens = self.word_tok.tokenize(text)
#: Handle strings consisting of a single punctuation mark seperately (Issue #4)
if len(_tokens) == 1:
if _tokens[0] in PUNCTUATION:
if include_punc:
return _tokens
else:
return []
if include_punc:
return _tokens
else:
# Return each word token
# Strips punctuation unless the word comes from a contraction
# e.g. "gibt's" => ["gibt", "'s"] in "Heute gibt's viel zu tun!"
# e.g. "hat's" => ["hat", "'s"]
# e.g. "home." => ['home']
words = [
word if word.startswith("'") else strip_punc(
word,
all=False) for word in _tokens if strip_punc(
word,
all=False)]
return list(words)
|
def function[word_tokenize, parameter[self, text, include_punc]]:
constant[The Treebank tokenizer uses regular expressions to tokenize text as
in Penn Treebank.
It assumes that the text has already been segmented into sentences,
e.g. using ``self.sent_tokenize()``.
This tokenizer performs the following steps:
- split standard contractions, e.g. ``don't`` -> ``do n't`` and ``they'll`` -> ``they 'll``
- treat most punctuation characters as separate tokens
- split off commas and single quotes, when followed by whitespace
- separate periods that appear at the end of line
Source: NLTK's docstring of ``TreebankWordTokenizer`` (accessed: 02/10/2014)
]
if compare[call[name[text].strip, parameter[]] equal[==] constant[]] begin[:]
return[list[[]]]
variable[_tokens] assign[=] call[name[self].word_tok.tokenize, parameter[name[text]]]
if compare[call[name[len], parameter[name[_tokens]]] equal[==] constant[1]] begin[:]
if compare[call[name[_tokens]][constant[0]] in name[PUNCTUATION]] begin[:]
if name[include_punc] begin[:]
return[name[_tokens]]
if name[include_punc] begin[:]
return[name[_tokens]]
|
keyword[def] identifier[word_tokenize] ( identifier[self] , identifier[text] , identifier[include_punc] = keyword[True] ):
literal[string]
keyword[if] identifier[text] . identifier[strip] ()== literal[string] :
keyword[return] []
identifier[_tokens] = identifier[self] . identifier[word_tok] . identifier[tokenize] ( identifier[text] )
keyword[if] identifier[len] ( identifier[_tokens] )== literal[int] :
keyword[if] identifier[_tokens] [ literal[int] ] keyword[in] identifier[PUNCTUATION] :
keyword[if] identifier[include_punc] :
keyword[return] identifier[_tokens]
keyword[else] :
keyword[return] []
keyword[if] identifier[include_punc] :
keyword[return] identifier[_tokens]
keyword[else] :
identifier[words] =[
identifier[word] keyword[if] identifier[word] . identifier[startswith] ( literal[string] ) keyword[else] identifier[strip_punc] (
identifier[word] ,
identifier[all] = keyword[False] ) keyword[for] identifier[word] keyword[in] identifier[_tokens] keyword[if] identifier[strip_punc] (
identifier[word] ,
identifier[all] = keyword[False] )]
keyword[return] identifier[list] ( identifier[words] )
|
def word_tokenize(self, text, include_punc=True):
"""The Treebank tokenizer uses regular expressions to tokenize text as
in Penn Treebank.
It assumes that the text has already been segmented into sentences,
e.g. using ``self.sent_tokenize()``.
This tokenizer performs the following steps:
- split standard contractions, e.g. ``don't`` -> ``do n't`` and ``they'll`` -> ``they 'll``
- treat most punctuation characters as separate tokens
- split off commas and single quotes, when followed by whitespace
- separate periods that appear at the end of line
Source: NLTK's docstring of ``TreebankWordTokenizer`` (accessed: 02/10/2014)
"""
#: Do not process empty strings (Issue #3)
if text.strip() == '':
return [] # depends on [control=['if'], data=[]]
_tokens = self.word_tok.tokenize(text)
#: Handle strings consisting of a single punctuation mark seperately (Issue #4)
if len(_tokens) == 1:
if _tokens[0] in PUNCTUATION:
if include_punc:
return _tokens # depends on [control=['if'], data=[]]
else:
return [] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if include_punc:
return _tokens # depends on [control=['if'], data=[]]
else:
# Return each word token
# Strips punctuation unless the word comes from a contraction
# e.g. "gibt's" => ["gibt", "'s"] in "Heute gibt's viel zu tun!"
# e.g. "hat's" => ["hat", "'s"]
# e.g. "home." => ['home']
words = [word if word.startswith("'") else strip_punc(word, all=False) for word in _tokens if strip_punc(word, all=False)]
return list(words)
|
def clean_cache_key(key):
""" Replace spaces with '-' and hash if length is greater than 250.
"""
cache_key = re.sub(r'\s+', '-', key)
cache_key = smart_str(cache_key)
if len(cache_key) > 200:
cache_key = cache_key[:150] + '-' + hashlib.md5(cache_key).hexdigest()
return cache_key
|
def function[clean_cache_key, parameter[key]]:
constant[ Replace spaces with '-' and hash if length is greater than 250.
]
variable[cache_key] assign[=] call[name[re].sub, parameter[constant[\s+], constant[-], name[key]]]
variable[cache_key] assign[=] call[name[smart_str], parameter[name[cache_key]]]
if compare[call[name[len], parameter[name[cache_key]]] greater[>] constant[200]] begin[:]
variable[cache_key] assign[=] binary_operation[binary_operation[call[name[cache_key]][<ast.Slice object at 0x7da1b0aa6dd0>] + constant[-]] + call[call[name[hashlib].md5, parameter[name[cache_key]]].hexdigest, parameter[]]]
return[name[cache_key]]
|
keyword[def] identifier[clean_cache_key] ( identifier[key] ):
literal[string]
identifier[cache_key] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[key] )
identifier[cache_key] = identifier[smart_str] ( identifier[cache_key] )
keyword[if] identifier[len] ( identifier[cache_key] )> literal[int] :
identifier[cache_key] = identifier[cache_key] [: literal[int] ]+ literal[string] + identifier[hashlib] . identifier[md5] ( identifier[cache_key] ). identifier[hexdigest] ()
keyword[return] identifier[cache_key]
|
def clean_cache_key(key):
""" Replace spaces with '-' and hash if length is greater than 250.
"""
cache_key = re.sub('\\s+', '-', key)
cache_key = smart_str(cache_key)
if len(cache_key) > 200:
cache_key = cache_key[:150] + '-' + hashlib.md5(cache_key).hexdigest() # depends on [control=['if'], data=[]]
return cache_key
|
def comment_marker(self, value):
"""
Setter for **self.__comment_marker** attribute.
:param value: Attribute value.
:type value: unicode
"""
if value is not None:
assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format(
"comment_marker", value)
self.__comment_marker = value
|
def function[comment_marker, parameter[self, value]]:
constant[
Setter for **self.__comment_marker** attribute.
:param value: Attribute value.
:type value: unicode
]
if compare[name[value] is_not constant[None]] begin[:]
assert[compare[call[name[type], parameter[name[value]]] is name[unicode]]]
name[self].__comment_marker assign[=] name[value]
|
keyword[def] identifier[comment_marker] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[unicode] , literal[string] . identifier[format] (
literal[string] , identifier[value] )
identifier[self] . identifier[__comment_marker] = identifier[value]
|
def comment_marker(self, value):
"""
Setter for **self.__comment_marker** attribute.
:param value: Attribute value.
:type value: unicode
"""
if value is not None:
assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format('comment_marker', value) # depends on [control=['if'], data=['value']]
self.__comment_marker = value
|
def disable_wx(self):
"""Disable event loop integration with wxPython.
This merely sets PyOS_InputHook to NULL.
"""
if self._apps.has_key(GUI_WX):
self._apps[GUI_WX]._in_event_loop = False
self.clear_inputhook()
|
def function[disable_wx, parameter[self]]:
constant[Disable event loop integration with wxPython.
This merely sets PyOS_InputHook to NULL.
]
if call[name[self]._apps.has_key, parameter[name[GUI_WX]]] begin[:]
call[name[self]._apps][name[GUI_WX]]._in_event_loop assign[=] constant[False]
call[name[self].clear_inputhook, parameter[]]
|
keyword[def] identifier[disable_wx] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_apps] . identifier[has_key] ( identifier[GUI_WX] ):
identifier[self] . identifier[_apps] [ identifier[GUI_WX] ]. identifier[_in_event_loop] = keyword[False]
identifier[self] . identifier[clear_inputhook] ()
|
def disable_wx(self):
"""Disable event loop integration with wxPython.
This merely sets PyOS_InputHook to NULL.
"""
if self._apps.has_key(GUI_WX):
self._apps[GUI_WX]._in_event_loop = False # depends on [control=['if'], data=[]]
self.clear_inputhook()
|
def all(self, store_id, product_id, get_all=False, **queryparams):
"""
Get information about a product’s images.
:param store_id: The store id.
:type store_id: :py:class:`str`
:param product_id: The id for the product of a store.
:type product_id: :py:class:`str`
:param get_all: Should the query get all results
:type get_all: :py:class:`bool`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
queryparams['count'] = integer
queryparams['offset'] = integer
"""
self.store_id = store_id
self.product_id = product_id
self.image_id = None
if get_all:
return self._iterate(url=self._build_path(store_id, 'products', product_id, 'images'), **queryparams)
else:
return self._mc_client._post(url=self._build_path(store_id, 'products', product_id, 'images'), **queryparams)
|
def function[all, parameter[self, store_id, product_id, get_all]]:
constant[
Get information about a product’s images.
:param store_id: The store id.
:type store_id: :py:class:`str`
:param product_id: The id for the product of a store.
:type product_id: :py:class:`str`
:param get_all: Should the query get all results
:type get_all: :py:class:`bool`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
queryparams['count'] = integer
queryparams['offset'] = integer
]
name[self].store_id assign[=] name[store_id]
name[self].product_id assign[=] name[product_id]
name[self].image_id assign[=] constant[None]
if name[get_all] begin[:]
return[call[name[self]._iterate, parameter[]]]
|
keyword[def] identifier[all] ( identifier[self] , identifier[store_id] , identifier[product_id] , identifier[get_all] = keyword[False] ,** identifier[queryparams] ):
literal[string]
identifier[self] . identifier[store_id] = identifier[store_id]
identifier[self] . identifier[product_id] = identifier[product_id]
identifier[self] . identifier[image_id] = keyword[None]
keyword[if] identifier[get_all] :
keyword[return] identifier[self] . identifier[_iterate] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[store_id] , literal[string] , identifier[product_id] , literal[string] ),** identifier[queryparams] )
keyword[else] :
keyword[return] identifier[self] . identifier[_mc_client] . identifier[_post] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[store_id] , literal[string] , identifier[product_id] , literal[string] ),** identifier[queryparams] )
|
def all(self, store_id, product_id, get_all=False, **queryparams):
"""
Get information about a product’s images.
:param store_id: The store id.
:type store_id: :py:class:`str`
:param product_id: The id for the product of a store.
:type product_id: :py:class:`str`
:param get_all: Should the query get all results
:type get_all: :py:class:`bool`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
queryparams['count'] = integer
queryparams['offset'] = integer
"""
self.store_id = store_id
self.product_id = product_id
self.image_id = None
if get_all:
return self._iterate(url=self._build_path(store_id, 'products', product_id, 'images'), **queryparams) # depends on [control=['if'], data=[]]
else:
return self._mc_client._post(url=self._build_path(store_id, 'products', product_id, 'images'), **queryparams)
|
def get_hotkey_name(names=None):
"""
Returns a string representation of hotkey from the given key names, or
the currently pressed keys if not given. This function:
- normalizes names;
- removes "left" and "right" prefixes;
- replaces the "+" key name with "plus" to avoid ambiguity;
- puts modifier keys first, in a standardized order;
- sort remaining keys;
- finally, joins everything with "+".
Example:
get_hotkey_name(['+', 'left ctrl', 'shift'])
# "ctrl+shift+plus"
"""
if names is None:
_listener.start_if_necessary()
with _pressed_events_lock:
names = [e.name for e in _pressed_events.values()]
else:
names = [normalize_name(name) for name in names]
clean_names = set(e.replace('left ', '').replace('right ', '').replace('+', 'plus') for e in names)
# https://developer.apple.com/macos/human-interface-guidelines/input-and-output/keyboard/
# > List modifier keys in the correct order. If you use more than one modifier key in a
# > hotkey, always list them in this order: Control, Option, Shift, Command.
modifiers = ['ctrl', 'alt', 'shift', 'windows']
sorting_key = lambda k: (modifiers.index(k) if k in modifiers else 5, str(k))
return '+'.join(sorted(clean_names, key=sorting_key))
|
def function[get_hotkey_name, parameter[names]]:
constant[
Returns a string representation of hotkey from the given key names, or
the currently pressed keys if not given. This function:
- normalizes names;
- removes "left" and "right" prefixes;
- replaces the "+" key name with "plus" to avoid ambiguity;
- puts modifier keys first, in a standardized order;
- sort remaining keys;
- finally, joins everything with "+".
Example:
get_hotkey_name(['+', 'left ctrl', 'shift'])
# "ctrl+shift+plus"
]
if compare[name[names] is constant[None]] begin[:]
call[name[_listener].start_if_necessary, parameter[]]
with name[_pressed_events_lock] begin[:]
variable[names] assign[=] <ast.ListComp object at 0x7da1b1bcbfa0>
variable[clean_names] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1bcbb80>]]
variable[modifiers] assign[=] list[[<ast.Constant object at 0x7da1b1bcad70>, <ast.Constant object at 0x7da1b1bca4d0>, <ast.Constant object at 0x7da1b1bca290>, <ast.Constant object at 0x7da1b1bc9870>]]
variable[sorting_key] assign[=] <ast.Lambda object at 0x7da1b1bca980>
return[call[constant[+].join, parameter[call[name[sorted], parameter[name[clean_names]]]]]]
|
keyword[def] identifier[get_hotkey_name] ( identifier[names] = keyword[None] ):
literal[string]
keyword[if] identifier[names] keyword[is] keyword[None] :
identifier[_listener] . identifier[start_if_necessary] ()
keyword[with] identifier[_pressed_events_lock] :
identifier[names] =[ identifier[e] . identifier[name] keyword[for] identifier[e] keyword[in] identifier[_pressed_events] . identifier[values] ()]
keyword[else] :
identifier[names] =[ identifier[normalize_name] ( identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[names] ]
identifier[clean_names] = identifier[set] ( identifier[e] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[e] keyword[in] identifier[names] )
identifier[modifiers] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[sorting_key] = keyword[lambda] identifier[k] :( identifier[modifiers] . identifier[index] ( identifier[k] ) keyword[if] identifier[k] keyword[in] identifier[modifiers] keyword[else] literal[int] , identifier[str] ( identifier[k] ))
keyword[return] literal[string] . identifier[join] ( identifier[sorted] ( identifier[clean_names] , identifier[key] = identifier[sorting_key] ))
|
def get_hotkey_name(names=None):
"""
Returns a string representation of hotkey from the given key names, or
the currently pressed keys if not given. This function:
- normalizes names;
- removes "left" and "right" prefixes;
- replaces the "+" key name with "plus" to avoid ambiguity;
- puts modifier keys first, in a standardized order;
- sort remaining keys;
- finally, joins everything with "+".
Example:
get_hotkey_name(['+', 'left ctrl', 'shift'])
# "ctrl+shift+plus"
"""
if names is None:
_listener.start_if_necessary()
with _pressed_events_lock:
names = [e.name for e in _pressed_events.values()] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['names']]
else:
names = [normalize_name(name) for name in names]
clean_names = set((e.replace('left ', '').replace('right ', '').replace('+', 'plus') for e in names))
# https://developer.apple.com/macos/human-interface-guidelines/input-and-output/keyboard/
# > List modifier keys in the correct order. If you use more than one modifier key in a
# > hotkey, always list them in this order: Control, Option, Shift, Command.
modifiers = ['ctrl', 'alt', 'shift', 'windows']
sorting_key = lambda k: (modifiers.index(k) if k in modifiers else 5, str(k))
return '+'.join(sorted(clean_names, key=sorting_key))
|
def info(self, msg, indent=0, **kwargs):
"""invoke ``self.info.debug``"""
return self.logger.info(self._indent(msg, indent), **kwargs)
|
def function[info, parameter[self, msg, indent]]:
constant[invoke ``self.info.debug``]
return[call[name[self].logger.info, parameter[call[name[self]._indent, parameter[name[msg], name[indent]]]]]]
|
keyword[def] identifier[info] ( identifier[self] , identifier[msg] , identifier[indent] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[logger] . identifier[info] ( identifier[self] . identifier[_indent] ( identifier[msg] , identifier[indent] ),** identifier[kwargs] )
|
def info(self, msg, indent=0, **kwargs):
"""invoke ``self.info.debug``"""
return self.logger.info(self._indent(msg, indent), **kwargs)
|
def set(zpool, prop, value):
'''
Sets the given property on the specified pool
zpool : string
Name of storage pool
prop : string
Name of property to set
value : string
Value to set for the specified property
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zpool.set myzpool readonly yes
'''
ret = OrderedDict()
# set property
res = __salt__['cmd.run_all'](
__utils__['zfs.zpool_command'](
command='set',
property_name=prop,
property_value=value,
target=zpool,
),
python_shell=False,
)
return __utils__['zfs.parse_command_result'](res, 'set')
|
def function[set, parameter[zpool, prop, value]]:
constant[
Sets the given property on the specified pool
zpool : string
Name of storage pool
prop : string
Name of property to set
value : string
Value to set for the specified property
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zpool.set myzpool readonly yes
]
variable[ret] assign[=] call[name[OrderedDict], parameter[]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[call[name[__utils__]][constant[zfs.zpool_command]], parameter[]]]]
return[call[call[name[__utils__]][constant[zfs.parse_command_result]], parameter[name[res], constant[set]]]]
|
keyword[def] identifier[set] ( identifier[zpool] , identifier[prop] , identifier[value] ):
literal[string]
identifier[ret] = identifier[OrderedDict] ()
identifier[res] = identifier[__salt__] [ literal[string] ](
identifier[__utils__] [ literal[string] ](
identifier[command] = literal[string] ,
identifier[property_name] = identifier[prop] ,
identifier[property_value] = identifier[value] ,
identifier[target] = identifier[zpool] ,
),
identifier[python_shell] = keyword[False] ,
)
keyword[return] identifier[__utils__] [ literal[string] ]( identifier[res] , literal[string] )
|
def set(zpool, prop, value):
"""
Sets the given property on the specified pool
zpool : string
Name of storage pool
prop : string
Name of property to set
value : string
Value to set for the specified property
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zpool.set myzpool readonly yes
"""
ret = OrderedDict()
# set property
res = __salt__['cmd.run_all'](__utils__['zfs.zpool_command'](command='set', property_name=prop, property_value=value, target=zpool), python_shell=False)
return __utils__['zfs.parse_command_result'](res, 'set')
|
def accept(self, logevent):
"""
Process line.
Overwrite BaseFilter.accept() and return True if the provided
logevent should be accepted (causing output), or False if not.
"""
ns = logevent.nscanned
nr = logevent.nreturned
if ns is not None and nr is not None:
if nr == 0:
# avoid division by 0 errors
nr = 1
return (ns > 10000 and ns / nr > 100)
return False
|
def function[accept, parameter[self, logevent]]:
constant[
Process line.
Overwrite BaseFilter.accept() and return True if the provided
logevent should be accepted (causing output), or False if not.
]
variable[ns] assign[=] name[logevent].nscanned
variable[nr] assign[=] name[logevent].nreturned
if <ast.BoolOp object at 0x7da1b16a8d00> begin[:]
if compare[name[nr] equal[==] constant[0]] begin[:]
variable[nr] assign[=] constant[1]
return[<ast.BoolOp object at 0x7da1b18e40a0>]
return[constant[False]]
|
keyword[def] identifier[accept] ( identifier[self] , identifier[logevent] ):
literal[string]
identifier[ns] = identifier[logevent] . identifier[nscanned]
identifier[nr] = identifier[logevent] . identifier[nreturned]
keyword[if] identifier[ns] keyword[is] keyword[not] keyword[None] keyword[and] identifier[nr] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[nr] == literal[int] :
identifier[nr] = literal[int]
keyword[return] ( identifier[ns] > literal[int] keyword[and] identifier[ns] / identifier[nr] > literal[int] )
keyword[return] keyword[False]
|
def accept(self, logevent):
"""
Process line.
Overwrite BaseFilter.accept() and return True if the provided
logevent should be accepted (causing output), or False if not.
"""
ns = logevent.nscanned
nr = logevent.nreturned
if ns is not None and nr is not None:
if nr == 0:
# avoid division by 0 errors
nr = 1 # depends on [control=['if'], data=['nr']]
return ns > 10000 and ns / nr > 100 # depends on [control=['if'], data=[]]
return False
|
def duplicates(base, items):
"""Get an iterator of items similar but not equal to the base.
@param base: base item to perform comparison against
@param items: list of items to compare to the base
@return: generator of items sorted by similarity to the base
"""
for item in items:
if item.similarity(base) and not item.equality(base):
yield item
|
def function[duplicates, parameter[base, items]]:
constant[Get an iterator of items similar but not equal to the base.
@param base: base item to perform comparison against
@param items: list of items to compare to the base
@return: generator of items sorted by similarity to the base
]
for taget[name[item]] in starred[name[items]] begin[:]
if <ast.BoolOp object at 0x7da207f9ad40> begin[:]
<ast.Yield object at 0x7da207f9a4d0>
|
keyword[def] identifier[duplicates] ( identifier[base] , identifier[items] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[items] :
keyword[if] identifier[item] . identifier[similarity] ( identifier[base] ) keyword[and] keyword[not] identifier[item] . identifier[equality] ( identifier[base] ):
keyword[yield] identifier[item]
|
def duplicates(base, items):
"""Get an iterator of items similar but not equal to the base.
@param base: base item to perform comparison against
@param items: list of items to compare to the base
@return: generator of items sorted by similarity to the base
"""
for item in items:
if item.similarity(base) and (not item.equality(base)):
yield item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
|
def nvmlDeviceGetPowerManagementMode(handle):
r"""
/**
* This API has been deprecated.
*
* Retrieves the power management mode associated with this device.
*
* For products from the Fermi family.
* - Requires \a NVML_INFOROM_POWER version 3.0 or higher.
*
* For from the Kepler or newer families.
* - Does not require \a NVML_INFOROM_POWER object.
*
* This flag indicates whether any power management algorithm is currently active on the device. An
* enabled state does not necessarily mean the device is being actively throttled -- only that
* that the driver will do so if the appropriate conditions are met.
*
* See \ref nvmlEnableState_t for details on allowed modes.
*
* @param device The identifier of the target device
* @param mode Reference in which to return the current power management mode
*
* @return
* - \ref NVML_SUCCESS if \a mode has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a mode is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerManagementMode
"""
c_pcapMode = _nvmlEnableState_t()
fn = _nvmlGetFunctionPointer("nvmlDeviceGetPowerManagementMode")
ret = fn(handle, byref(c_pcapMode))
_nvmlCheckReturn(ret)
return bytes_to_str(c_pcapMode.value)
|
def function[nvmlDeviceGetPowerManagementMode, parameter[handle]]:
constant[
/**
* This API has been deprecated.
*
* Retrieves the power management mode associated with this device.
*
* For products from the Fermi family.
* - Requires \a NVML_INFOROM_POWER version 3.0 or higher.
*
* For from the Kepler or newer families.
* - Does not require \a NVML_INFOROM_POWER object.
*
* This flag indicates whether any power management algorithm is currently active on the device. An
* enabled state does not necessarily mean the device is being actively throttled -- only that
* that the driver will do so if the appropriate conditions are met.
*
* See \ref nvmlEnableState_t for details on allowed modes.
*
* @param device The identifier of the target device
* @param mode Reference in which to return the current power management mode
*
* @return
* - \ref NVML_SUCCESS if \a mode has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a mode is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerManagementMode
]
variable[c_pcapMode] assign[=] call[name[_nvmlEnableState_t], parameter[]]
variable[fn] assign[=] call[name[_nvmlGetFunctionPointer], parameter[constant[nvmlDeviceGetPowerManagementMode]]]
variable[ret] assign[=] call[name[fn], parameter[name[handle], call[name[byref], parameter[name[c_pcapMode]]]]]
call[name[_nvmlCheckReturn], parameter[name[ret]]]
return[call[name[bytes_to_str], parameter[name[c_pcapMode].value]]]
|
keyword[def] identifier[nvmlDeviceGetPowerManagementMode] ( identifier[handle] ):
literal[string]
identifier[c_pcapMode] = identifier[_nvmlEnableState_t] ()
identifier[fn] = identifier[_nvmlGetFunctionPointer] ( literal[string] )
identifier[ret] = identifier[fn] ( identifier[handle] , identifier[byref] ( identifier[c_pcapMode] ))
identifier[_nvmlCheckReturn] ( identifier[ret] )
keyword[return] identifier[bytes_to_str] ( identifier[c_pcapMode] . identifier[value] )
|
def nvmlDeviceGetPowerManagementMode(handle):
"""
/**
* This API has been deprecated.
*
* Retrieves the power management mode associated with this device.
*
* For products from the Fermi family.
* - Requires \\a NVML_INFOROM_POWER version 3.0 or higher.
*
* For from the Kepler or newer families.
* - Does not require \\a NVML_INFOROM_POWER object.
*
* This flag indicates whether any power management algorithm is currently active on the device. An
* enabled state does not necessarily mean the device is being actively throttled -- only that
* that the driver will do so if the appropriate conditions are met.
*
* See \\ref nvmlEnableState_t for details on allowed modes.
*
* @param device The identifier of the target device
* @param mode Reference in which to return the current power management mode
*
* @return
* - \\ref NVML_SUCCESS if \\a mode has been set
* - \\ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \\ref NVML_ERROR_INVALID_ARGUMENT if \\a device is invalid or \\a mode is NULL
* - \\ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature
* - \\ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \\ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetPowerManagementMode
"""
c_pcapMode = _nvmlEnableState_t()
fn = _nvmlGetFunctionPointer('nvmlDeviceGetPowerManagementMode')
ret = fn(handle, byref(c_pcapMode))
_nvmlCheckReturn(ret)
return bytes_to_str(c_pcapMode.value)
|
def gauss_jordan(A, x, b):
"""Linear equation system Ax=b by Gauss-Jordan
:param A: n by m matrix
:param x: table of size n
:param b: table of size m
:modifies: x will contain solution if any
:returns int:
0 if no solution,
1 if solution unique,
2 otherwise
:complexity: :math:`O(n^2m)`
"""
n = len(x)
m = len(b)
assert len(A) == m and len(A[0]) == n
S = [] # put linear system in a single matrix S
for i in range(m):
S.append(A[i][:] + [b[i]])
S.append(list(range(n))) # indices in x
k = diagonalize(S, n, m)
if k < m:
for i in range(k, m):
if not is_zero(S[i][n]):
return GJ_ZERO_SOLUTIONS
for j in range(k):
x[S[m][j]] = S[j][n]
if k < n:
for j in range(k, n):
x[S[m][j]] = 0
return GJ_SEVERAL_SOLUTIONS
return GJ_SINGLE_SOLUTION
|
def function[gauss_jordan, parameter[A, x, b]]:
constant[Linear equation system Ax=b by Gauss-Jordan
:param A: n by m matrix
:param x: table of size n
:param b: table of size m
:modifies: x will contain solution if any
:returns int:
0 if no solution,
1 if solution unique,
2 otherwise
:complexity: :math:`O(n^2m)`
]
variable[n] assign[=] call[name[len], parameter[name[x]]]
variable[m] assign[=] call[name[len], parameter[name[b]]]
assert[<ast.BoolOp object at 0x7da1b07f5420>]
variable[S] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[m]]]] begin[:]
call[name[S].append, parameter[binary_operation[call[call[name[A]][name[i]]][<ast.Slice object at 0x7da18bcc8340>] + list[[<ast.Subscript object at 0x7da18bccaf80>]]]]]
call[name[S].append, parameter[call[name[list], parameter[call[name[range], parameter[name[n]]]]]]]
variable[k] assign[=] call[name[diagonalize], parameter[name[S], name[n], name[m]]]
if compare[name[k] less[<] name[m]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[k], name[m]]]] begin[:]
if <ast.UnaryOp object at 0x7da18bcc9720> begin[:]
return[name[GJ_ZERO_SOLUTIONS]]
for taget[name[j]] in starred[call[name[range], parameter[name[k]]]] begin[:]
call[name[x]][call[call[name[S]][name[m]]][name[j]]] assign[=] call[call[name[S]][name[j]]][name[n]]
if compare[name[k] less[<] name[n]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[k], name[n]]]] begin[:]
call[name[x]][call[call[name[S]][name[m]]][name[j]]] assign[=] constant[0]
return[name[GJ_SEVERAL_SOLUTIONS]]
return[name[GJ_SINGLE_SOLUTION]]
|
keyword[def] identifier[gauss_jordan] ( identifier[A] , identifier[x] , identifier[b] ):
literal[string]
identifier[n] = identifier[len] ( identifier[x] )
identifier[m] = identifier[len] ( identifier[b] )
keyword[assert] identifier[len] ( identifier[A] )== identifier[m] keyword[and] identifier[len] ( identifier[A] [ literal[int] ])== identifier[n]
identifier[S] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[m] ):
identifier[S] . identifier[append] ( identifier[A] [ identifier[i] ][:]+[ identifier[b] [ identifier[i] ]])
identifier[S] . identifier[append] ( identifier[list] ( identifier[range] ( identifier[n] )))
identifier[k] = identifier[diagonalize] ( identifier[S] , identifier[n] , identifier[m] )
keyword[if] identifier[k] < identifier[m] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[k] , identifier[m] ):
keyword[if] keyword[not] identifier[is_zero] ( identifier[S] [ identifier[i] ][ identifier[n] ]):
keyword[return] identifier[GJ_ZERO_SOLUTIONS]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[k] ):
identifier[x] [ identifier[S] [ identifier[m] ][ identifier[j] ]]= identifier[S] [ identifier[j] ][ identifier[n] ]
keyword[if] identifier[k] < identifier[n] :
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[k] , identifier[n] ):
identifier[x] [ identifier[S] [ identifier[m] ][ identifier[j] ]]= literal[int]
keyword[return] identifier[GJ_SEVERAL_SOLUTIONS]
keyword[return] identifier[GJ_SINGLE_SOLUTION]
|
def gauss_jordan(A, x, b):
"""Linear equation system Ax=b by Gauss-Jordan
:param A: n by m matrix
:param x: table of size n
:param b: table of size m
:modifies: x will contain solution if any
:returns int:
0 if no solution,
1 if solution unique,
2 otherwise
:complexity: :math:`O(n^2m)`
"""
n = len(x)
m = len(b)
assert len(A) == m and len(A[0]) == n
S = [] # put linear system in a single matrix S
for i in range(m):
S.append(A[i][:] + [b[i]]) # depends on [control=['for'], data=['i']]
S.append(list(range(n))) # indices in x
k = diagonalize(S, n, m)
if k < m:
for i in range(k, m):
if not is_zero(S[i][n]):
return GJ_ZERO_SOLUTIONS # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['k', 'm']]
for j in range(k):
x[S[m][j]] = S[j][n] # depends on [control=['for'], data=['j']]
if k < n:
for j in range(k, n):
x[S[m][j]] = 0 # depends on [control=['for'], data=['j']]
return GJ_SEVERAL_SOLUTIONS # depends on [control=['if'], data=['k', 'n']]
return GJ_SINGLE_SOLUTION
|
def _get_ln_a_n_max(self, C, n_sites, idx, rup):
"""
Defines the rock site amplification defined in equations 10a and 10b
"""
ln_a_n_max = C["lnSC1AM"] * np.ones(n_sites)
for i in [2, 3, 4]:
if np.any(idx[i]):
ln_a_n_max[idx[i]] += C["S{:g}".format(i)]
return ln_a_n_max
|
def function[_get_ln_a_n_max, parameter[self, C, n_sites, idx, rup]]:
constant[
Defines the rock site amplification defined in equations 10a and 10b
]
variable[ln_a_n_max] assign[=] binary_operation[call[name[C]][constant[lnSC1AM]] * call[name[np].ones, parameter[name[n_sites]]]]
for taget[name[i]] in starred[list[[<ast.Constant object at 0x7da18f09d6f0>, <ast.Constant object at 0x7da18f09d120>, <ast.Constant object at 0x7da18f09e3b0>]]] begin[:]
if call[name[np].any, parameter[call[name[idx]][name[i]]]] begin[:]
<ast.AugAssign object at 0x7da18f09c670>
return[name[ln_a_n_max]]
|
keyword[def] identifier[_get_ln_a_n_max] ( identifier[self] , identifier[C] , identifier[n_sites] , identifier[idx] , identifier[rup] ):
literal[string]
identifier[ln_a_n_max] = identifier[C] [ literal[string] ]* identifier[np] . identifier[ones] ( identifier[n_sites] )
keyword[for] identifier[i] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[if] identifier[np] . identifier[any] ( identifier[idx] [ identifier[i] ]):
identifier[ln_a_n_max] [ identifier[idx] [ identifier[i] ]]+= identifier[C] [ literal[string] . identifier[format] ( identifier[i] )]
keyword[return] identifier[ln_a_n_max]
|
def _get_ln_a_n_max(self, C, n_sites, idx, rup):
"""
Defines the rock site amplification defined in equations 10a and 10b
"""
ln_a_n_max = C['lnSC1AM'] * np.ones(n_sites)
for i in [2, 3, 4]:
if np.any(idx[i]):
ln_a_n_max[idx[i]] += C['S{:g}'.format(i)] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return ln_a_n_max
|
def _apply_to_values(self, entity, function):
"""Apply a function to the property value/values of a given entity.
This retrieves the property value, applies the function, and then
stores the value back. For a repeated property, the function is
applied separately to each of the values in the list. The
resulting value or list of values is both stored back in the
entity and returned from this method.
"""
value = self._retrieve_value(entity, self._default)
if self._repeated:
if value is None:
value = []
self._store_value(entity, value)
else:
value[:] = map(function, value)
else:
if value is not None:
newvalue = function(value)
if newvalue is not None and newvalue is not value:
self._store_value(entity, newvalue)
value = newvalue
return value
|
def function[_apply_to_values, parameter[self, entity, function]]:
constant[Apply a function to the property value/values of a given entity.
This retrieves the property value, applies the function, and then
stores the value back. For a repeated property, the function is
applied separately to each of the values in the list. The
resulting value or list of values is both stored back in the
entity and returned from this method.
]
variable[value] assign[=] call[name[self]._retrieve_value, parameter[name[entity], name[self]._default]]
if name[self]._repeated begin[:]
if compare[name[value] is constant[None]] begin[:]
variable[value] assign[=] list[[]]
call[name[self]._store_value, parameter[name[entity], name[value]]]
return[name[value]]
|
keyword[def] identifier[_apply_to_values] ( identifier[self] , identifier[entity] , identifier[function] ):
literal[string]
identifier[value] = identifier[self] . identifier[_retrieve_value] ( identifier[entity] , identifier[self] . identifier[_default] )
keyword[if] identifier[self] . identifier[_repeated] :
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[value] =[]
identifier[self] . identifier[_store_value] ( identifier[entity] , identifier[value] )
keyword[else] :
identifier[value] [:]= identifier[map] ( identifier[function] , identifier[value] )
keyword[else] :
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[newvalue] = identifier[function] ( identifier[value] )
keyword[if] identifier[newvalue] keyword[is] keyword[not] keyword[None] keyword[and] identifier[newvalue] keyword[is] keyword[not] identifier[value] :
identifier[self] . identifier[_store_value] ( identifier[entity] , identifier[newvalue] )
identifier[value] = identifier[newvalue]
keyword[return] identifier[value]
|
def _apply_to_values(self, entity, function):
"""Apply a function to the property value/values of a given entity.
This retrieves the property value, applies the function, and then
stores the value back. For a repeated property, the function is
applied separately to each of the values in the list. The
resulting value or list of values is both stored back in the
entity and returned from this method.
"""
value = self._retrieve_value(entity, self._default)
if self._repeated:
if value is None:
value = []
self._store_value(entity, value) # depends on [control=['if'], data=['value']]
else:
value[:] = map(function, value) # depends on [control=['if'], data=[]]
elif value is not None:
newvalue = function(value)
if newvalue is not None and newvalue is not value:
self._store_value(entity, newvalue)
value = newvalue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']]
return value
|
async def SetExternalControllerInfo(self, controllers):
'''
controllers : typing.Sequence[~SetExternalControllerInfoParams]
Returns -> typing.Sequence[~ErrorResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ExternalControllerUpdater',
request='SetExternalControllerInfo',
version=1,
params=_params)
_params['controllers'] = controllers
reply = await self.rpc(msg)
return reply
|
<ast.AsyncFunctionDef object at 0x7da1b0ebc4c0>
|
keyword[async] keyword[def] identifier[SetExternalControllerInfo] ( identifier[self] , identifier[controllers] ):
literal[string]
identifier[_params] = identifier[dict] ()
identifier[msg] = identifier[dict] ( identifier[type] = literal[string] ,
identifier[request] = literal[string] ,
identifier[version] = literal[int] ,
identifier[params] = identifier[_params] )
identifier[_params] [ literal[string] ]= identifier[controllers]
identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] )
keyword[return] identifier[reply]
|
async def SetExternalControllerInfo(self, controllers):
"""
controllers : typing.Sequence[~SetExternalControllerInfoParams]
Returns -> typing.Sequence[~ErrorResult]
"""
# map input types to rpc msg
_params = dict()
msg = dict(type='ExternalControllerUpdater', request='SetExternalControllerInfo', version=1, params=_params)
_params['controllers'] = controllers
reply = await self.rpc(msg)
return reply
|
def calcValueAtBirth(cLvlHist,BirthBool,PlvlHist,MrkvHist,DiscFac,CRRA):
'''
Calculate expected value of being born in each Markov state using the realizations
of consumption for a history of many consumers. The histories should already be
trimmed of the "burn in" periods.
Parameters
----------
cLvlHist : np.array
TxN array of consumption level history for many agents across many periods.
Agents who die are replaced by newborms.
BirthBool : np.array
TxN boolean array indicating when agents are born, replacing one who died.
PlvlHist : np.array
T length vector of aggregate permanent productivity levels.
MrkvHist : np.array
T length vector of integers for the Markov index in each period.
DiscFac : float
Intertemporal discount factor.
CRRA : float
Coefficient of relative risk aversion.
Returns
-------
vAtBirth : np.array
J length vector of average lifetime value at birth by Markov state.
'''
J = np.max(MrkvHist) + 1 # Number of Markov states
T = MrkvHist.size # Length of simulation
I = cLvlHist.shape[1] # Number of agent indices in histories
u = lambda c : CRRAutility(c,gam=CRRA)
# Initialize an array to hold each agent's lifetime utility
BirthsByPeriod = np.sum(BirthBool,axis=1)
BirthsByState = np.zeros(J,dtype=int)
for j in range(J):
these = MrkvHist == j
BirthsByState[j] = np.sum(BirthsByPeriod[these])
N = np.max(BirthsByState) # Array must hold this many agents per row at least
vArray = np.zeros((J,N)) + np.nan
n = np.zeros(J,dtype=int)
# Loop through each agent index
DiscVec = DiscFac**np.arange(T)
for i in range(I):
birth_t = np.where(BirthBool[:,i])[0]
# Loop through each agent who lived and died in this index
for k in range(birth_t.size-1): # Last birth event has no death, so ignore
# Get lifespan of this agent and circumstances at birth
t0 = birth_t[k]
t1 = birth_t[k+1]
span = t1-t0
j = MrkvHist[t0]
# Calculate discounted flow of utility for this agent and store it
cVec = cLvlHist[t0:t1,i]/PlvlHist[t0]
uVec = u(cVec)
v = np.dot(DiscVec[:span],uVec)
vArray[j,n[j]] = v
n[j] += 1
# Calculate expected value at birth by state and return it
vAtBirth = np.nanmean(vArray,axis=1)
return vAtBirth
|
def function[calcValueAtBirth, parameter[cLvlHist, BirthBool, PlvlHist, MrkvHist, DiscFac, CRRA]]:
constant[
Calculate expected value of being born in each Markov state using the realizations
of consumption for a history of many consumers. The histories should already be
trimmed of the "burn in" periods.
Parameters
----------
cLvlHist : np.array
TxN array of consumption level history for many agents across many periods.
Agents who die are replaced by newborms.
BirthBool : np.array
TxN boolean array indicating when agents are born, replacing one who died.
PlvlHist : np.array
T length vector of aggregate permanent productivity levels.
MrkvHist : np.array
T length vector of integers for the Markov index in each period.
DiscFac : float
Intertemporal discount factor.
CRRA : float
Coefficient of relative risk aversion.
Returns
-------
vAtBirth : np.array
J length vector of average lifetime value at birth by Markov state.
]
variable[J] assign[=] binary_operation[call[name[np].max, parameter[name[MrkvHist]]] + constant[1]]
variable[T] assign[=] name[MrkvHist].size
variable[I] assign[=] call[name[cLvlHist].shape][constant[1]]
variable[u] assign[=] <ast.Lambda object at 0x7da2041d9ff0>
variable[BirthsByPeriod] assign[=] call[name[np].sum, parameter[name[BirthBool]]]
variable[BirthsByState] assign[=] call[name[np].zeros, parameter[name[J]]]
for taget[name[j]] in starred[call[name[range], parameter[name[J]]]] begin[:]
variable[these] assign[=] compare[name[MrkvHist] equal[==] name[j]]
call[name[BirthsByState]][name[j]] assign[=] call[name[np].sum, parameter[call[name[BirthsByPeriod]][name[these]]]]
variable[N] assign[=] call[name[np].max, parameter[name[BirthsByState]]]
variable[vArray] assign[=] binary_operation[call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da2041db970>, <ast.Name object at 0x7da2041db2e0>]]]] + name[np].nan]
variable[n] assign[=] call[name[np].zeros, parameter[name[J]]]
variable[DiscVec] assign[=] binary_operation[name[DiscFac] ** call[name[np].arange, parameter[name[T]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[I]]]] begin[:]
variable[birth_t] assign[=] call[call[name[np].where, parameter[call[name[BirthBool]][tuple[[<ast.Slice object at 0x7da2041d8a30>, <ast.Name object at 0x7da2041d8d30>]]]]]][constant[0]]
for taget[name[k]] in starred[call[name[range], parameter[binary_operation[name[birth_t].size - constant[1]]]]] begin[:]
variable[t0] assign[=] call[name[birth_t]][name[k]]
variable[t1] assign[=] call[name[birth_t]][binary_operation[name[k] + constant[1]]]
variable[span] assign[=] binary_operation[name[t1] - name[t0]]
variable[j] assign[=] call[name[MrkvHist]][name[t0]]
variable[cVec] assign[=] binary_operation[call[name[cLvlHist]][tuple[[<ast.Slice object at 0x7da204564d90>, <ast.Name object at 0x7da204566020>]]] / call[name[PlvlHist]][name[t0]]]
variable[uVec] assign[=] call[name[u], parameter[name[cVec]]]
variable[v] assign[=] call[name[np].dot, parameter[call[name[DiscVec]][<ast.Slice object at 0x7da204567490>], name[uVec]]]
call[name[vArray]][tuple[[<ast.Name object at 0x7da204564580>, <ast.Subscript object at 0x7da204566590>]]] assign[=] name[v]
<ast.AugAssign object at 0x7da2045677c0>
variable[vAtBirth] assign[=] call[name[np].nanmean, parameter[name[vArray]]]
return[name[vAtBirth]]
|
keyword[def] identifier[calcValueAtBirth] ( identifier[cLvlHist] , identifier[BirthBool] , identifier[PlvlHist] , identifier[MrkvHist] , identifier[DiscFac] , identifier[CRRA] ):
literal[string]
identifier[J] = identifier[np] . identifier[max] ( identifier[MrkvHist] )+ literal[int]
identifier[T] = identifier[MrkvHist] . identifier[size]
identifier[I] = identifier[cLvlHist] . identifier[shape] [ literal[int] ]
identifier[u] = keyword[lambda] identifier[c] : identifier[CRRAutility] ( identifier[c] , identifier[gam] = identifier[CRRA] )
identifier[BirthsByPeriod] = identifier[np] . identifier[sum] ( identifier[BirthBool] , identifier[axis] = literal[int] )
identifier[BirthsByState] = identifier[np] . identifier[zeros] ( identifier[J] , identifier[dtype] = identifier[int] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[J] ):
identifier[these] = identifier[MrkvHist] == identifier[j]
identifier[BirthsByState] [ identifier[j] ]= identifier[np] . identifier[sum] ( identifier[BirthsByPeriod] [ identifier[these] ])
identifier[N] = identifier[np] . identifier[max] ( identifier[BirthsByState] )
identifier[vArray] = identifier[np] . identifier[zeros] (( identifier[J] , identifier[N] ))+ identifier[np] . identifier[nan]
identifier[n] = identifier[np] . identifier[zeros] ( identifier[J] , identifier[dtype] = identifier[int] )
identifier[DiscVec] = identifier[DiscFac] ** identifier[np] . identifier[arange] ( identifier[T] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[I] ):
identifier[birth_t] = identifier[np] . identifier[where] ( identifier[BirthBool] [:, identifier[i] ])[ literal[int] ]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[birth_t] . identifier[size] - literal[int] ):
identifier[t0] = identifier[birth_t] [ identifier[k] ]
identifier[t1] = identifier[birth_t] [ identifier[k] + literal[int] ]
identifier[span] = identifier[t1] - identifier[t0]
identifier[j] = identifier[MrkvHist] [ identifier[t0] ]
identifier[cVec] = identifier[cLvlHist] [ identifier[t0] : identifier[t1] , identifier[i] ]/ identifier[PlvlHist] [ identifier[t0] ]
identifier[uVec] = identifier[u] ( identifier[cVec] )
identifier[v] = identifier[np] . identifier[dot] ( identifier[DiscVec] [: identifier[span] ], identifier[uVec] )
identifier[vArray] [ identifier[j] , identifier[n] [ identifier[j] ]]= identifier[v]
identifier[n] [ identifier[j] ]+= literal[int]
identifier[vAtBirth] = identifier[np] . identifier[nanmean] ( identifier[vArray] , identifier[axis] = literal[int] )
keyword[return] identifier[vAtBirth]
|
def calcValueAtBirth(cLvlHist, BirthBool, PlvlHist, MrkvHist, DiscFac, CRRA):
"""
Calculate expected value of being born in each Markov state using the realizations
of consumption for a history of many consumers. The histories should already be
trimmed of the "burn in" periods.
Parameters
----------
cLvlHist : np.array
TxN array of consumption level history for many agents across many periods.
Agents who die are replaced by newborms.
BirthBool : np.array
TxN boolean array indicating when agents are born, replacing one who died.
PlvlHist : np.array
T length vector of aggregate permanent productivity levels.
MrkvHist : np.array
T length vector of integers for the Markov index in each period.
DiscFac : float
Intertemporal discount factor.
CRRA : float
Coefficient of relative risk aversion.
Returns
-------
vAtBirth : np.array
J length vector of average lifetime value at birth by Markov state.
"""
J = np.max(MrkvHist) + 1 # Number of Markov states
T = MrkvHist.size # Length of simulation
I = cLvlHist.shape[1] # Number of agent indices in histories
u = lambda c: CRRAutility(c, gam=CRRA)
# Initialize an array to hold each agent's lifetime utility
BirthsByPeriod = np.sum(BirthBool, axis=1)
BirthsByState = np.zeros(J, dtype=int)
for j in range(J):
these = MrkvHist == j
BirthsByState[j] = np.sum(BirthsByPeriod[these]) # depends on [control=['for'], data=['j']]
N = np.max(BirthsByState) # Array must hold this many agents per row at least
vArray = np.zeros((J, N)) + np.nan
n = np.zeros(J, dtype=int)
# Loop through each agent index
DiscVec = DiscFac ** np.arange(T)
for i in range(I):
birth_t = np.where(BirthBool[:, i])[0]
# Loop through each agent who lived and died in this index
for k in range(birth_t.size - 1): # Last birth event has no death, so ignore
# Get lifespan of this agent and circumstances at birth
t0 = birth_t[k]
t1 = birth_t[k + 1]
span = t1 - t0
j = MrkvHist[t0]
# Calculate discounted flow of utility for this agent and store it
cVec = cLvlHist[t0:t1, i] / PlvlHist[t0]
uVec = u(cVec)
v = np.dot(DiscVec[:span], uVec)
vArray[j, n[j]] = v
n[j] += 1 # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['i']]
# Calculate expected value at birth by state and return it
vAtBirth = np.nanmean(vArray, axis=1)
return vAtBirth
|
def create_customer_gateway(vpn_connection_type, ip_address, bgp_asn,
customer_gateway_name=None, tags=None,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid VPN connection type, a static IP address and a customer
gateway’s Border Gateway Protocol (BGP) Autonomous System Number,
create a customer gateway.
Returns the customer gateway id if the customer gateway was created and
returns False if the customer gateway was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create_customer_gateway 'ipsec.1', '12.1.2.3', 65534
'''
return _create_resource('customer_gateway', customer_gateway_name,
type=vpn_connection_type,
ip_address=ip_address, bgp_asn=bgp_asn,
tags=tags, region=region, key=key,
keyid=keyid, profile=profile)
|
def function[create_customer_gateway, parameter[vpn_connection_type, ip_address, bgp_asn, customer_gateway_name, tags, region, key, keyid, profile]]:
constant[
Given a valid VPN connection type, a static IP address and a customer
gateway’s Border Gateway Protocol (BGP) Autonomous System Number,
create a customer gateway.
Returns the customer gateway id if the customer gateway was created and
returns False if the customer gateway was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create_customer_gateway 'ipsec.1', '12.1.2.3', 65534
]
return[call[name[_create_resource], parameter[constant[customer_gateway], name[customer_gateway_name]]]]
|
keyword[def] identifier[create_customer_gateway] ( identifier[vpn_connection_type] , identifier[ip_address] , identifier[bgp_asn] ,
identifier[customer_gateway_name] = keyword[None] , identifier[tags] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
keyword[return] identifier[_create_resource] ( literal[string] , identifier[customer_gateway_name] ,
identifier[type] = identifier[vpn_connection_type] ,
identifier[ip_address] = identifier[ip_address] , identifier[bgp_asn] = identifier[bgp_asn] ,
identifier[tags] = identifier[tags] , identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
|
def create_customer_gateway(vpn_connection_type, ip_address, bgp_asn, customer_gateway_name=None, tags=None, region=None, key=None, keyid=None, profile=None):
"""
Given a valid VPN connection type, a static IP address and a customer
gateway’s Border Gateway Protocol (BGP) Autonomous System Number,
create a customer gateway.
Returns the customer gateway id if the customer gateway was created and
returns False if the customer gateway was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.create_customer_gateway 'ipsec.1', '12.1.2.3', 65534
"""
return _create_resource('customer_gateway', customer_gateway_name, type=vpn_connection_type, ip_address=ip_address, bgp_asn=bgp_asn, tags=tags, region=region, key=key, keyid=keyid, profile=profile)
|
def _pre_heat_deploy(self):
"""Setup before the Heat stack create or update has been done."""
clients = self.app.client_manager
compute_client = clients.compute
self.log.debug("Checking hypervisor stats")
if utils.check_hypervisor_stats(compute_client) is None:
raise exceptions.DeploymentError(
"Expected hypervisor stats not met")
return True
|
def function[_pre_heat_deploy, parameter[self]]:
constant[Setup before the Heat stack create or update has been done.]
variable[clients] assign[=] name[self].app.client_manager
variable[compute_client] assign[=] name[clients].compute
call[name[self].log.debug, parameter[constant[Checking hypervisor stats]]]
if compare[call[name[utils].check_hypervisor_stats, parameter[name[compute_client]]] is constant[None]] begin[:]
<ast.Raise object at 0x7da2054a7400>
return[constant[True]]
|
keyword[def] identifier[_pre_heat_deploy] ( identifier[self] ):
literal[string]
identifier[clients] = identifier[self] . identifier[app] . identifier[client_manager]
identifier[compute_client] = identifier[clients] . identifier[compute]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] )
keyword[if] identifier[utils] . identifier[check_hypervisor_stats] ( identifier[compute_client] ) keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[DeploymentError] (
literal[string] )
keyword[return] keyword[True]
|
def _pre_heat_deploy(self):
"""Setup before the Heat stack create or update has been done."""
clients = self.app.client_manager
compute_client = clients.compute
self.log.debug('Checking hypervisor stats')
if utils.check_hypervisor_stats(compute_client) is None:
raise exceptions.DeploymentError('Expected hypervisor stats not met') # depends on [control=['if'], data=[]]
return True
|
def relabel_nodes(G, mapping, copy=True):
"""Relabel the nodes of the graph G.
Parameters
----------
G : graph
A NetworkX graph
mapping : dictionary
A dictionary with the old labels as keys and new labels as values.
A partial mapping is allowed.
copy : bool (optional, default=True)
If True return a copy, or if False relabel the nodes in place.
Examples
--------
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b',2:'c'}
>>> H=nx.relabel_nodes(G,mapping)
>>> print(sorted(H.nodes()))
['a', 'b', 'c']
>>> G=nx.path_graph(26) # nodes 0..25
>>> mapping=dict(zip(G.nodes(),"abcdefghijklmnopqrstuvwxyz"))
>>> H=nx.relabel_nodes(G,mapping) # nodes a..z
>>> mapping=dict(zip(G.nodes(),range(1,27)))
>>> G1=nx.relabel_nodes(G,mapping) # nodes 1..26
Partial in-place mapping:
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b'} # 0->'a' and 1->'b'
>>> G=nx.relabel_nodes(G,mapping, copy=False)
print(G.nodes())
[2, 'b', 'a']
Mapping as function:
>>> G=nx.path_graph(3)
>>> def mapping(x):
... return x**2
>>> H=nx.relabel_nodes(G,mapping)
>>> print(H.nodes())
[0, 1, 4]
Notes
-----
Only the nodes specified in the mapping will be relabeled.
The keyword setting copy=False modifies the graph in place.
This is not always possible if the mapping is circular.
In that case use copy=True.
See Also
--------
convert_node_labels_to_integers
"""
# you can pass a function f(old_label)->new_label
# but we'll just make a dictionary here regardless
if not hasattr(mapping, "__getitem__"):
m = dict((n, mapping(n)) for n in G)
else:
m = mapping
if copy:
return _relabel_copy(G, m)
else:
return _relabel_inplace(G, m)
|
def function[relabel_nodes, parameter[G, mapping, copy]]:
constant[Relabel the nodes of the graph G.
Parameters
----------
G : graph
A NetworkX graph
mapping : dictionary
A dictionary with the old labels as keys and new labels as values.
A partial mapping is allowed.
copy : bool (optional, default=True)
If True return a copy, or if False relabel the nodes in place.
Examples
--------
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b',2:'c'}
>>> H=nx.relabel_nodes(G,mapping)
>>> print(sorted(H.nodes()))
['a', 'b', 'c']
>>> G=nx.path_graph(26) # nodes 0..25
>>> mapping=dict(zip(G.nodes(),"abcdefghijklmnopqrstuvwxyz"))
>>> H=nx.relabel_nodes(G,mapping) # nodes a..z
>>> mapping=dict(zip(G.nodes(),range(1,27)))
>>> G1=nx.relabel_nodes(G,mapping) # nodes 1..26
Partial in-place mapping:
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b'} # 0->'a' and 1->'b'
>>> G=nx.relabel_nodes(G,mapping, copy=False)
print(G.nodes())
[2, 'b', 'a']
Mapping as function:
>>> G=nx.path_graph(3)
>>> def mapping(x):
... return x**2
>>> H=nx.relabel_nodes(G,mapping)
>>> print(H.nodes())
[0, 1, 4]
Notes
-----
Only the nodes specified in the mapping will be relabeled.
The keyword setting copy=False modifies the graph in place.
This is not always possible if the mapping is circular.
In that case use copy=True.
See Also
--------
convert_node_labels_to_integers
]
if <ast.UnaryOp object at 0x7da20c795ed0> begin[:]
variable[m] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c794220>]]
if name[copy] begin[:]
return[call[name[_relabel_copy], parameter[name[G], name[m]]]]
|
keyword[def] identifier[relabel_nodes] ( identifier[G] , identifier[mapping] , identifier[copy] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[mapping] , literal[string] ):
identifier[m] = identifier[dict] (( identifier[n] , identifier[mapping] ( identifier[n] )) keyword[for] identifier[n] keyword[in] identifier[G] )
keyword[else] :
identifier[m] = identifier[mapping]
keyword[if] identifier[copy] :
keyword[return] identifier[_relabel_copy] ( identifier[G] , identifier[m] )
keyword[else] :
keyword[return] identifier[_relabel_inplace] ( identifier[G] , identifier[m] )
|
def relabel_nodes(G, mapping, copy=True):
"""Relabel the nodes of the graph G.
Parameters
----------
G : graph
A NetworkX graph
mapping : dictionary
A dictionary with the old labels as keys and new labels as values.
A partial mapping is allowed.
copy : bool (optional, default=True)
If True return a copy, or if False relabel the nodes in place.
Examples
--------
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b',2:'c'}
>>> H=nx.relabel_nodes(G,mapping)
>>> print(sorted(H.nodes()))
['a', 'b', 'c']
>>> G=nx.path_graph(26) # nodes 0..25
>>> mapping=dict(zip(G.nodes(),"abcdefghijklmnopqrstuvwxyz"))
>>> H=nx.relabel_nodes(G,mapping) # nodes a..z
>>> mapping=dict(zip(G.nodes(),range(1,27)))
>>> G1=nx.relabel_nodes(G,mapping) # nodes 1..26
Partial in-place mapping:
>>> G=nx.path_graph(3) # nodes 0-1-2
>>> mapping={0:'a',1:'b'} # 0->'a' and 1->'b'
>>> G=nx.relabel_nodes(G,mapping, copy=False)
print(G.nodes())
[2, 'b', 'a']
Mapping as function:
>>> G=nx.path_graph(3)
>>> def mapping(x):
... return x**2
>>> H=nx.relabel_nodes(G,mapping)
>>> print(H.nodes())
[0, 1, 4]
Notes
-----
Only the nodes specified in the mapping will be relabeled.
The keyword setting copy=False modifies the graph in place.
This is not always possible if the mapping is circular.
In that case use copy=True.
See Also
--------
convert_node_labels_to_integers
"""
# you can pass a function f(old_label)->new_label
# but we'll just make a dictionary here regardless
if not hasattr(mapping, '__getitem__'):
m = dict(((n, mapping(n)) for n in G)) # depends on [control=['if'], data=[]]
else:
m = mapping
if copy:
return _relabel_copy(G, m) # depends on [control=['if'], data=[]]
else:
return _relabel_inplace(G, m)
|
def preprocess_na(sent, label_type):
"""Preprocess Na sentences
Args:
sent: A sentence
label_type: The type of label provided
"""
if label_type == "phonemes_and_tones":
phonemes = True
tones = True
tgm = True
elif label_type == "phonemes_and_tones_no_tgm":
phonemes = True
tones = True
tgm = False
elif label_type == "phonemes":
phonemes = True
tones = False
tgm = False
elif label_type == "tones":
phonemes = False
tones = True
tgm = True
elif label_type == "tones_notgm":
phonemes = False
tones = True
tgm = False
else:
raise ValueError("Unrecognized label type: %s" % label_type)
def pop_phoneme(sentence):
"""Pop phonemes off a sentence one at a time"""
# TODO desperately needs refactoring
# Treating fillers as single tokens; normalizing to əəə and mmm
if phonemes:
if sentence[:4] in ["əəə…", "mmm…"]:
return sentence[:4], sentence[4:]
if sentence.startswith("ə…"):
return "əəə…", sentence[2:]
if sentence.startswith("m…"):
return "mmm…", sentence[2:]
if sentence.startswith("mm…"):
return "mmm…", sentence[3:]
# Normalizing some stuff
if sentence[:3] == "wæ̃":
if phonemes:
return "w̃æ", sentence[3:]
else:
return None, sentence[3:]
if sentence[:3] == "ṽ̩":
if phonemes:
return "ṽ̩", sentence[3:]
else:
return None, sentence[3:]
if sentence[:3] in TRI_PHNS:
if phonemes:
return sentence[:3], sentence[3:]
else:
return None, sentence[3:]
if sentence[:2] in BI_PHNS:
if phonemes:
return sentence[:2], sentence[2:]
else:
return None, sentence[2:]
if sentence[:2] == "˧̩":
return "˧", sentence[2:]
if sentence[:2] == "˧̍":
return "˧", sentence[2:]
if sentence[0] in UNI_PHNS:
if phonemes:
return sentence[0], sentence[1:]
else:
return None, sentence[1:]
if sentence[:2] in BI_TONES:
if tones:
return sentence[:2], sentence[2:]
else:
return None, sentence[2:]
if sentence[0] in UNI_TONES:
if tones:
return sentence[0], sentence[1:]
else:
return None, sentence[1:]
if sentence[0] in MISC_SYMBOLS:
# We assume these symbols cannot be captured.
return None, sentence[1:]
if sentence[0] in BAD_NA_SYMBOLS:
return None, sentence[1:]
if sentence[0] in PUNC_SYMBOLS:
return None, sentence[1:]
if sentence[0] in ["-", "ʰ", "/"]:
return None, sentence[1:]
if sentence[0] in set(["<", ">"]):
# We keep everything literal, thus including what is in <>
# brackets; so we just remove these tokens"
return None, sentence[1:]
if sentence[0] == "[":
# It's an opening square bracket, so ignore everything until we
# find a closing one.
if sentence.find("]") == len(sentence)-1:
# If the closing bracket is the last char
return None, ""
else:
return None, sentence[sentence.find("]")+1:]
if sentence[0] in set([" ", "\t", "\n"]):
# Return a space char so that it can be identified in word segmentation
# processing.
return " ", sentence[1:]
if sentence[0] == "|" or sentence[0] == "ǀ" or sentence[0] == "◊":
# TODO Address extrametrical span symbol ◊ differently. For now,
# treating it as a tone group boundary marker for consistency with
# previous work.
if tgm:
return "|", sentence[1:]
else:
return None, sentence[1:]
if sentence[0] in "()":
return None, sentence[1:]
print("***" + sentence)
raise ValueError("Next character not recognized: " + sentence[:1])
def filter_for_phonemes(sentence):
""" Returns a sequence of phonemes and pipes (word delimiters). Tones,
syllable boundaries, whitespace are all removed."""
filtered_sentence = []
while sentence != "":
phoneme, sentence = pop_phoneme(sentence)
if phoneme != " ":
filtered_sentence.append(phoneme)
filtered_sentence = [item for item in filtered_sentence if item != None]
return " ".join(filtered_sentence)
# Filter utterances with certain words
if "BEGAIEMENT" in sent:
return ""
sent = filter_for_phonemes(sent)
return sent
|
def function[preprocess_na, parameter[sent, label_type]]:
constant[Preprocess Na sentences
Args:
sent: A sentence
label_type: The type of label provided
]
if compare[name[label_type] equal[==] constant[phonemes_and_tones]] begin[:]
variable[phonemes] assign[=] constant[True]
variable[tones] assign[=] constant[True]
variable[tgm] assign[=] constant[True]
def function[pop_phoneme, parameter[sentence]]:
constant[Pop phonemes off a sentence one at a time]
if name[phonemes] begin[:]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11d7310>] in list[[<ast.Constant object at 0x7da1b11d4a60>, <ast.Constant object at 0x7da1b11d7070>]]] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11d5720>, <ast.Subscript object at 0x7da1b11d6ef0>]]]
if call[name[sentence].startswith, parameter[constant[ə…]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11d5390>, <ast.Subscript object at 0x7da1b11d5fc0>]]]
if call[name[sentence].startswith, parameter[constant[m…]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11d7850>, <ast.Subscript object at 0x7da1b11d77f0>]]]
if call[name[sentence].startswith, parameter[constant[mm…]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11d62f0>, <ast.Subscript object at 0x7da1b11d72b0>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11d46a0>] equal[==] constant[wæ̃]] begin[:]
if name[phonemes] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11d5180>, <ast.Subscript object at 0x7da1b11d7820>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11d6a40>] equal[==] constant[ṽ̩]] begin[:]
if name[phonemes] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11d4730>, <ast.Subscript object at 0x7da1b11d5210>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11d4e80>] in name[TRI_PHNS]] begin[:]
if name[phonemes] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11d5ed0>, <ast.Subscript object at 0x7da1b11d5870>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11d6020>] in name[BI_PHNS]] begin[:]
if name[phonemes] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11d44f0>, <ast.Subscript object at 0x7da1b11d63b0>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11f8ac0>] equal[==] constant[˧̩]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fa710>, <ast.Subscript object at 0x7da1b11f9ff0>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11fa890>] equal[==] constant[˧̍]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fa680>, <ast.Subscript object at 0x7da1b11f98a0>]]]
if compare[call[name[sentence]][constant[0]] in name[UNI_PHNS]] begin[:]
if name[phonemes] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11fb400>, <ast.Subscript object at 0x7da1b11f9210>]]]
if compare[call[name[sentence]][<ast.Slice object at 0x7da1b11fafe0>] in name[BI_TONES]] begin[:]
if name[tones] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11f9f90>, <ast.Subscript object at 0x7da1b11fa110>]]]
if compare[call[name[sentence]][constant[0]] in name[UNI_TONES]] begin[:]
if name[tones] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11f9000>, <ast.Subscript object at 0x7da1b11f8580>]]]
if compare[call[name[sentence]][constant[0]] in name[MISC_SYMBOLS]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fb940>, <ast.Subscript object at 0x7da1b11fbd60>]]]
if compare[call[name[sentence]][constant[0]] in name[BAD_NA_SYMBOLS]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fbaf0>, <ast.Subscript object at 0x7da1b11fa8f0>]]]
if compare[call[name[sentence]][constant[0]] in name[PUNC_SYMBOLS]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11f8a90>, <ast.Subscript object at 0x7da1b11f8670>]]]
if compare[call[name[sentence]][constant[0]] in list[[<ast.Constant object at 0x7da1b11faef0>, <ast.Constant object at 0x7da1b11fb010>, <ast.Constant object at 0x7da1b11fafb0>]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11f8160>, <ast.Subscript object at 0x7da1b11fb040>]]]
if compare[call[name[sentence]][constant[0]] in call[name[set], parameter[list[[<ast.Constant object at 0x7da1b11f8280>, <ast.Constant object at 0x7da1b11f86d0>]]]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fae00>, <ast.Subscript object at 0x7da1b11fb3a0>]]]
if compare[call[name[sentence]][constant[0]] equal[==] constant[[]] begin[:]
if compare[call[name[sentence].find, parameter[constant[]]]] equal[==] binary_operation[call[name[len], parameter[name[sentence]]] - constant[1]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11fa4d0>, <ast.Constant object at 0x7da1b11fa740>]]]
if compare[call[name[sentence]][constant[0]] in call[name[set], parameter[list[[<ast.Constant object at 0x7da1b11fab90>, <ast.Constant object at 0x7da1b11f8d00>, <ast.Constant object at 0x7da1b11f8310>]]]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11f83d0>, <ast.Subscript object at 0x7da1b11f8490>]]]
if <ast.BoolOp object at 0x7da1b11fb5e0> begin[:]
if name[tgm] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11bdc30>, <ast.Subscript object at 0x7da1b11bcd00>]]]
if compare[call[name[sentence]][constant[0]] in constant[()]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b11be7d0>, <ast.Subscript object at 0x7da1b11bd240>]]]
call[name[print], parameter[binary_operation[constant[***] + name[sentence]]]]
<ast.Raise object at 0x7da1b11bc2e0>
def function[filter_for_phonemes, parameter[sentence]]:
constant[ Returns a sequence of phonemes and pipes (word delimiters). Tones,
syllable boundaries, whitespace are all removed.]
variable[filtered_sentence] assign[=] list[[]]
while compare[name[sentence] not_equal[!=] constant[]] begin[:]
<ast.Tuple object at 0x7da1b11be4d0> assign[=] call[name[pop_phoneme], parameter[name[sentence]]]
if compare[name[phoneme] not_equal[!=] constant[ ]] begin[:]
call[name[filtered_sentence].append, parameter[name[phoneme]]]
variable[filtered_sentence] assign[=] <ast.ListComp object at 0x7da1b11be860>
return[call[constant[ ].join, parameter[name[filtered_sentence]]]]
if compare[constant[BEGAIEMENT] in name[sent]] begin[:]
return[constant[]]
variable[sent] assign[=] call[name[filter_for_phonemes], parameter[name[sent]]]
return[name[sent]]
|
keyword[def] identifier[preprocess_na] ( identifier[sent] , identifier[label_type] ):
literal[string]
keyword[if] identifier[label_type] == literal[string] :
identifier[phonemes] = keyword[True]
identifier[tones] = keyword[True]
identifier[tgm] = keyword[True]
keyword[elif] identifier[label_type] == literal[string] :
identifier[phonemes] = keyword[True]
identifier[tones] = keyword[True]
identifier[tgm] = keyword[False]
keyword[elif] identifier[label_type] == literal[string] :
identifier[phonemes] = keyword[True]
identifier[tones] = keyword[False]
identifier[tgm] = keyword[False]
keyword[elif] identifier[label_type] == literal[string] :
identifier[phonemes] = keyword[False]
identifier[tones] = keyword[True]
identifier[tgm] = keyword[True]
keyword[elif] identifier[label_type] == literal[string] :
identifier[phonemes] = keyword[False]
identifier[tones] = keyword[True]
identifier[tgm] = keyword[False]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[label_type] )
keyword[def] identifier[pop_phoneme] ( identifier[sentence] ):
literal[string]
keyword[if] identifier[phonemes] :
keyword[if] identifier[sentence] [: literal[int] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[sentence] [: literal[int] ], identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ]== literal[string] :
keyword[if] identifier[phonemes] :
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ]== literal[string] :
keyword[if] identifier[phonemes] :
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ] keyword[in] identifier[TRI_PHNS] :
keyword[if] identifier[phonemes] :
keyword[return] identifier[sentence] [: literal[int] ], identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ] keyword[in] identifier[BI_PHNS] :
keyword[if] identifier[phonemes] :
keyword[return] identifier[sentence] [: literal[int] ], identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ]== literal[string] :
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ]== literal[string] :
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[UNI_PHNS] :
keyword[if] identifier[phonemes] :
keyword[return] identifier[sentence] [ literal[int] ], identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [: literal[int] ] keyword[in] identifier[BI_TONES] :
keyword[if] identifier[tones] :
keyword[return] identifier[sentence] [: literal[int] ], identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[UNI_TONES] :
keyword[if] identifier[tones] :
keyword[return] identifier[sentence] [ literal[int] ], identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[MISC_SYMBOLS] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[BAD_NA_SYMBOLS] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[PUNC_SYMBOLS] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[set] ([ literal[string] , literal[string] ]):
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ]== literal[string] :
keyword[if] identifier[sentence] . identifier[find] ( literal[string] )== identifier[len] ( identifier[sentence] )- literal[int] :
keyword[return] keyword[None] , literal[string]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ identifier[sentence] . identifier[find] ( literal[string] )+ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] identifier[set] ([ literal[string] , literal[string] , literal[string] ]):
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ]== literal[string] keyword[or] identifier[sentence] [ literal[int] ]== literal[string] keyword[or] identifier[sentence] [ literal[int] ]== literal[string] :
keyword[if] identifier[tgm] :
keyword[return] literal[string] , identifier[sentence] [ literal[int] :]
keyword[else] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
keyword[if] identifier[sentence] [ literal[int] ] keyword[in] literal[string] :
keyword[return] keyword[None] , identifier[sentence] [ literal[int] :]
identifier[print] ( literal[string] + identifier[sentence] )
keyword[raise] identifier[ValueError] ( literal[string] + identifier[sentence] [: literal[int] ])
keyword[def] identifier[filter_for_phonemes] ( identifier[sentence] ):
literal[string]
identifier[filtered_sentence] =[]
keyword[while] identifier[sentence] != literal[string] :
identifier[phoneme] , identifier[sentence] = identifier[pop_phoneme] ( identifier[sentence] )
keyword[if] identifier[phoneme] != literal[string] :
identifier[filtered_sentence] . identifier[append] ( identifier[phoneme] )
identifier[filtered_sentence] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[filtered_sentence] keyword[if] identifier[item] != keyword[None] ]
keyword[return] literal[string] . identifier[join] ( identifier[filtered_sentence] )
keyword[if] literal[string] keyword[in] identifier[sent] :
keyword[return] literal[string]
identifier[sent] = identifier[filter_for_phonemes] ( identifier[sent] )
keyword[return] identifier[sent]
|
def preprocess_na(sent, label_type):
"""Preprocess Na sentences
Args:
sent: A sentence
label_type: The type of label provided
"""
if label_type == 'phonemes_and_tones':
phonemes = True
tones = True
tgm = True # depends on [control=['if'], data=[]]
elif label_type == 'phonemes_and_tones_no_tgm':
phonemes = True
tones = True
tgm = False # depends on [control=['if'], data=[]]
elif label_type == 'phonemes':
phonemes = True
tones = False
tgm = False # depends on [control=['if'], data=[]]
elif label_type == 'tones':
phonemes = False
tones = True
tgm = True # depends on [control=['if'], data=[]]
elif label_type == 'tones_notgm':
phonemes = False
tones = True
tgm = False # depends on [control=['if'], data=[]]
else:
raise ValueError('Unrecognized label type: %s' % label_type)
def pop_phoneme(sentence):
"""Pop phonemes off a sentence one at a time"""
# TODO desperately needs refactoring
# Treating fillers as single tokens; normalizing to əəə and mmm
if phonemes:
if sentence[:4] in ['əəə…', 'mmm…']:
return (sentence[:4], sentence[4:]) # depends on [control=['if'], data=[]]
if sentence.startswith('ə…'):
return ('əəə…', sentence[2:]) # depends on [control=['if'], data=[]]
if sentence.startswith('m…'):
return ('mmm…', sentence[2:]) # depends on [control=['if'], data=[]]
if sentence.startswith('mm…'):
return ('mmm…', sentence[3:]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Normalizing some stuff
if sentence[:3] == 'wæ̃':
if phonemes:
return ('w̃æ', sentence[3:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[3:]) # depends on [control=['if'], data=[]]
if sentence[:3] == 'ṽ̩':
if phonemes:
return ('ṽ̩', sentence[3:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[3:]) # depends on [control=['if'], data=[]]
if sentence[:3] in TRI_PHNS:
if phonemes:
return (sentence[:3], sentence[3:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[3:]) # depends on [control=['if'], data=[]]
if sentence[:2] in BI_PHNS:
if phonemes:
return (sentence[:2], sentence[2:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[2:]) # depends on [control=['if'], data=[]]
if sentence[:2] == '˧̩':
return ('˧', sentence[2:]) # depends on [control=['if'], data=[]]
if sentence[:2] == '˧̍':
return ('˧', sentence[2:]) # depends on [control=['if'], data=[]]
if sentence[0] in UNI_PHNS:
if phonemes:
return (sentence[0], sentence[1:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[:2] in BI_TONES:
if tones:
return (sentence[:2], sentence[2:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[2:]) # depends on [control=['if'], data=[]]
if sentence[0] in UNI_TONES:
if tones:
return (sentence[0], sentence[1:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in MISC_SYMBOLS:
# We assume these symbols cannot be captured.
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in BAD_NA_SYMBOLS:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in PUNC_SYMBOLS:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in ['-', 'ʰ', '/']:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in set(['<', '>']):
# We keep everything literal, thus including what is in <>
# brackets; so we just remove these tokens"
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] == '[':
# It's an opening square bracket, so ignore everything until we
# find a closing one.
if sentence.find(']') == len(sentence) - 1:
# If the closing bracket is the last char
return (None, '') # depends on [control=['if'], data=[]]
else:
return (None, sentence[sentence.find(']') + 1:]) # depends on [control=['if'], data=[]]
if sentence[0] in set([' ', '\t', '\n']):
# Return a space char so that it can be identified in word segmentation
# processing.
return (' ', sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] == '|' or sentence[0] == 'ǀ' or sentence[0] == '◊':
# TODO Address extrametrical span symbol ◊ differently. For now,
# treating it as a tone group boundary marker for consistency with
# previous work.
if tgm:
return ('|', sentence[1:]) # depends on [control=['if'], data=[]]
else:
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
if sentence[0] in '()':
return (None, sentence[1:]) # depends on [control=['if'], data=[]]
print('***' + sentence)
raise ValueError('Next character not recognized: ' + sentence[:1])
def filter_for_phonemes(sentence):
""" Returns a sequence of phonemes and pipes (word delimiters). Tones,
syllable boundaries, whitespace are all removed."""
filtered_sentence = []
while sentence != '':
(phoneme, sentence) = pop_phoneme(sentence)
if phoneme != ' ':
filtered_sentence.append(phoneme) # depends on [control=['if'], data=['phoneme']] # depends on [control=['while'], data=['sentence']]
filtered_sentence = [item for item in filtered_sentence if item != None]
return ' '.join(filtered_sentence)
# Filter utterances with certain words
if 'BEGAIEMENT' in sent:
return '' # depends on [control=['if'], data=[]]
sent = filter_for_phonemes(sent)
return sent
|
def _check_jwt_claims(jwt_claims):
"""Checks whether the JWT claims should be accepted.
Specifically, this method checks the "exp" claim and the "nbf" claim (if
present), and raises UnauthenticatedException if 1) the current time is
before the time identified by the "nbf" claim, or 2) the current time is
equal to or after the time identified by the "exp" claim.
Args:
jwt_claims: the JWT claims whose expiratio to be checked.
Raises:
UnauthenticatedException: When the "exp" claim is malformed or the JWT has
already expired.
"""
current_time = time.time()
expiration = jwt_claims[u"exp"]
if not isinstance(expiration, INT_TYPES):
raise suppliers.UnauthenticatedException(u'Malformed claim: "exp" must be an integer')
if current_time >= expiration:
raise suppliers.UnauthenticatedException(u"The auth token has already expired")
if u"nbf" not in jwt_claims:
return
not_before_time = jwt_claims[u"nbf"]
if not isinstance(not_before_time, INT_TYPES):
raise suppliers.UnauthenticatedException(u'Malformed claim: "nbf" must be an integer')
if current_time < not_before_time:
raise suppliers.UnauthenticatedException(u'Current time is less than the "nbf" time')
|
def function[_check_jwt_claims, parameter[jwt_claims]]:
constant[Checks whether the JWT claims should be accepted.
Specifically, this method checks the "exp" claim and the "nbf" claim (if
present), and raises UnauthenticatedException if 1) the current time is
before the time identified by the "nbf" claim, or 2) the current time is
equal to or after the time identified by the "exp" claim.
Args:
jwt_claims: the JWT claims whose expiratio to be checked.
Raises:
UnauthenticatedException: When the "exp" claim is malformed or the JWT has
already expired.
]
variable[current_time] assign[=] call[name[time].time, parameter[]]
variable[expiration] assign[=] call[name[jwt_claims]][constant[exp]]
if <ast.UnaryOp object at 0x7da1b0472f80> begin[:]
<ast.Raise object at 0x7da1b0470c40>
if compare[name[current_time] greater_or_equal[>=] name[expiration]] begin[:]
<ast.Raise object at 0x7da1b0470640>
if compare[constant[nbf] <ast.NotIn object at 0x7da2590d7190> name[jwt_claims]] begin[:]
return[None]
variable[not_before_time] assign[=] call[name[jwt_claims]][constant[nbf]]
if <ast.UnaryOp object at 0x7da18bc730d0> begin[:]
<ast.Raise object at 0x7da18bc71810>
if compare[name[current_time] less[<] name[not_before_time]] begin[:]
<ast.Raise object at 0x7da18bc73d60>
|
keyword[def] identifier[_check_jwt_claims] ( identifier[jwt_claims] ):
literal[string]
identifier[current_time] = identifier[time] . identifier[time] ()
identifier[expiration] = identifier[jwt_claims] [ literal[string] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[expiration] , identifier[INT_TYPES] ):
keyword[raise] identifier[suppliers] . identifier[UnauthenticatedException] ( literal[string] )
keyword[if] identifier[current_time] >= identifier[expiration] :
keyword[raise] identifier[suppliers] . identifier[UnauthenticatedException] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[jwt_claims] :
keyword[return]
identifier[not_before_time] = identifier[jwt_claims] [ literal[string] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[not_before_time] , identifier[INT_TYPES] ):
keyword[raise] identifier[suppliers] . identifier[UnauthenticatedException] ( literal[string] )
keyword[if] identifier[current_time] < identifier[not_before_time] :
keyword[raise] identifier[suppliers] . identifier[UnauthenticatedException] ( literal[string] )
|
def _check_jwt_claims(jwt_claims):
"""Checks whether the JWT claims should be accepted.
Specifically, this method checks the "exp" claim and the "nbf" claim (if
present), and raises UnauthenticatedException if 1) the current time is
before the time identified by the "nbf" claim, or 2) the current time is
equal to or after the time identified by the "exp" claim.
Args:
jwt_claims: the JWT claims whose expiratio to be checked.
Raises:
UnauthenticatedException: When the "exp" claim is malformed or the JWT has
already expired.
"""
current_time = time.time()
expiration = jwt_claims[u'exp']
if not isinstance(expiration, INT_TYPES):
raise suppliers.UnauthenticatedException(u'Malformed claim: "exp" must be an integer') # depends on [control=['if'], data=[]]
if current_time >= expiration:
raise suppliers.UnauthenticatedException(u'The auth token has already expired') # depends on [control=['if'], data=[]]
if u'nbf' not in jwt_claims:
return # depends on [control=['if'], data=[]]
not_before_time = jwt_claims[u'nbf']
if not isinstance(not_before_time, INT_TYPES):
raise suppliers.UnauthenticatedException(u'Malformed claim: "nbf" must be an integer') # depends on [control=['if'], data=[]]
if current_time < not_before_time:
raise suppliers.UnauthenticatedException(u'Current time is less than the "nbf" time') # depends on [control=['if'], data=[]]
|
async def _analog_message(self, data):
"""
This is a private message handler method.
It is a message handler for analog messages.
:param data: message data
:returns: None - but saves the data in the pins structure
"""
pin = data[0]
value = (data[PrivateConstants.MSB] << 7) + data[PrivateConstants.LSB]
# if self.analog_pins[pin].current_value != value:
self.analog_pins[pin].current_value = value
# append pin number, pin value, and pin type to return value and return as a list
message = [pin, value, Constants.ANALOG]
if self.analog_pins[pin].cb:
if self.analog_pins[pin].cb_type:
await self.analog_pins[pin].cb(message)
else:
loop = self.loop
loop.call_soon(self.analog_pins[pin].cb, message)
# is there a latch entry for this pin?
key = 'A' + str(pin)
if key in self.latch_map:
await self._check_latch_data(key, message[1])
|
<ast.AsyncFunctionDef object at 0x7da20c76d0f0>
|
keyword[async] keyword[def] identifier[_analog_message] ( identifier[self] , identifier[data] ):
literal[string]
identifier[pin] = identifier[data] [ literal[int] ]
identifier[value] =( identifier[data] [ identifier[PrivateConstants] . identifier[MSB] ]<< literal[int] )+ identifier[data] [ identifier[PrivateConstants] . identifier[LSB] ]
identifier[self] . identifier[analog_pins] [ identifier[pin] ]. identifier[current_value] = identifier[value]
identifier[message] =[ identifier[pin] , identifier[value] , identifier[Constants] . identifier[ANALOG] ]
keyword[if] identifier[self] . identifier[analog_pins] [ identifier[pin] ]. identifier[cb] :
keyword[if] identifier[self] . identifier[analog_pins] [ identifier[pin] ]. identifier[cb_type] :
keyword[await] identifier[self] . identifier[analog_pins] [ identifier[pin] ]. identifier[cb] ( identifier[message] )
keyword[else] :
identifier[loop] = identifier[self] . identifier[loop]
identifier[loop] . identifier[call_soon] ( identifier[self] . identifier[analog_pins] [ identifier[pin] ]. identifier[cb] , identifier[message] )
identifier[key] = literal[string] + identifier[str] ( identifier[pin] )
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[latch_map] :
keyword[await] identifier[self] . identifier[_check_latch_data] ( identifier[key] , identifier[message] [ literal[int] ])
|
async def _analog_message(self, data):
"""
This is a private message handler method.
It is a message handler for analog messages.
:param data: message data
:returns: None - but saves the data in the pins structure
"""
pin = data[0]
value = (data[PrivateConstants.MSB] << 7) + data[PrivateConstants.LSB]
# if self.analog_pins[pin].current_value != value:
self.analog_pins[pin].current_value = value
# append pin number, pin value, and pin type to return value and return as a list
message = [pin, value, Constants.ANALOG]
if self.analog_pins[pin].cb:
if self.analog_pins[pin].cb_type:
await self.analog_pins[pin].cb(message) # depends on [control=['if'], data=[]]
else:
loop = self.loop
loop.call_soon(self.analog_pins[pin].cb, message) # depends on [control=['if'], data=[]]
# is there a latch entry for this pin?
key = 'A' + str(pin)
if key in self.latch_map:
await self._check_latch_data(key, message[1]) # depends on [control=['if'], data=['key']]
|
def contains(self, key, counter_id):
"""
Return whether a counter_id is present for a given instance key.
If the key is not in the cache, raises a KeyError.
"""
with self._lock:
return counter_id in self._metadata[key]
|
def function[contains, parameter[self, key, counter_id]]:
constant[
Return whether a counter_id is present for a given instance key.
If the key is not in the cache, raises a KeyError.
]
with name[self]._lock begin[:]
return[compare[name[counter_id] in call[name[self]._metadata][name[key]]]]
|
keyword[def] identifier[contains] ( identifier[self] , identifier[key] , identifier[counter_id] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
keyword[return] identifier[counter_id] keyword[in] identifier[self] . identifier[_metadata] [ identifier[key] ]
|
def contains(self, key, counter_id):
"""
Return whether a counter_id is present for a given instance key.
If the key is not in the cache, raises a KeyError.
"""
with self._lock:
return counter_id in self._metadata[key] # depends on [control=['with'], data=[]]
|
def encrypt_python_item(item, crypto_config):
# type: (dynamodb_types.ITEM, CryptoConfig) -> dynamodb_types.ITEM
"""Encrypt a dictionary for DynamoDB.
>>> from dynamodb_encryption_sdk.encrypted.item import encrypt_python_item
>>> plaintext_item = {
... 'some': 'data',
... 'more': 5
... }
>>> encrypted_item = encrypt_python_item(
... item=plaintext_item,
... crypto_config=my_crypto_config
... )
.. note::
This handles human-friendly dictionaries and is for use with the boto3 DynamoDB service or table resource.
:param dict item: Plaintext dictionary
:param CryptoConfig crypto_config: Cryptographic configuration
:returns: Encrypted and signed dictionary
:rtype: dict
"""
ddb_item = dict_to_ddb(item)
encrypted_ddb_item = encrypt_dynamodb_item(ddb_item, crypto_config)
return ddb_to_dict(encrypted_ddb_item)
|
def function[encrypt_python_item, parameter[item, crypto_config]]:
constant[Encrypt a dictionary for DynamoDB.
>>> from dynamodb_encryption_sdk.encrypted.item import encrypt_python_item
>>> plaintext_item = {
... 'some': 'data',
... 'more': 5
... }
>>> encrypted_item = encrypt_python_item(
... item=plaintext_item,
... crypto_config=my_crypto_config
... )
.. note::
This handles human-friendly dictionaries and is for use with the boto3 DynamoDB service or table resource.
:param dict item: Plaintext dictionary
:param CryptoConfig crypto_config: Cryptographic configuration
:returns: Encrypted and signed dictionary
:rtype: dict
]
variable[ddb_item] assign[=] call[name[dict_to_ddb], parameter[name[item]]]
variable[encrypted_ddb_item] assign[=] call[name[encrypt_dynamodb_item], parameter[name[ddb_item], name[crypto_config]]]
return[call[name[ddb_to_dict], parameter[name[encrypted_ddb_item]]]]
|
keyword[def] identifier[encrypt_python_item] ( identifier[item] , identifier[crypto_config] ):
literal[string]
identifier[ddb_item] = identifier[dict_to_ddb] ( identifier[item] )
identifier[encrypted_ddb_item] = identifier[encrypt_dynamodb_item] ( identifier[ddb_item] , identifier[crypto_config] )
keyword[return] identifier[ddb_to_dict] ( identifier[encrypted_ddb_item] )
|
def encrypt_python_item(item, crypto_config):
# type: (dynamodb_types.ITEM, CryptoConfig) -> dynamodb_types.ITEM
"Encrypt a dictionary for DynamoDB.\n\n >>> from dynamodb_encryption_sdk.encrypted.item import encrypt_python_item\n >>> plaintext_item = {\n ... 'some': 'data',\n ... 'more': 5\n ... }\n >>> encrypted_item = encrypt_python_item(\n ... item=plaintext_item,\n ... crypto_config=my_crypto_config\n ... )\n\n .. note::\n\n This handles human-friendly dictionaries and is for use with the boto3 DynamoDB service or table resource.\n\n :param dict item: Plaintext dictionary\n :param CryptoConfig crypto_config: Cryptographic configuration\n :returns: Encrypted and signed dictionary\n :rtype: dict\n "
ddb_item = dict_to_ddb(item)
encrypted_ddb_item = encrypt_dynamodb_item(ddb_item, crypto_config)
return ddb_to_dict(encrypted_ddb_item)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.