code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def quick_menu(self, banner, list_line_format, choice_list):
"""
Function to display a quick menu for user input
**Parameters:**
- **banner:** Text to display before menu
- **list_line_format:** Print'ing string with format spots for index + tuple values
- **choice_list:** List of tuple values that you want returned if selected (and printed)
**Returns:** Tuple that was selected.
"""
# Setup menu
invalid = True
menu_int = -1
# loop until valid
while invalid:
print(banner)
for item_index, item_value in enumerate(choice_list):
print(list_line_format.format(item_index + 1, *item_value))
menu_choice = compat_input("\nChoose a Number or (Q)uit: ")
if str(menu_choice).lower() in ['q']:
# exit
print("Exiting..")
# best effort logout
self._parent_class.get.logout()
sys.exit(0)
# verify number entered
try:
menu_int = int(menu_choice)
sanity = True
except ValueError:
# not a number
print("ERROR: ", menu_choice)
sanity = False
# validate number chosen
if sanity and 1 <= menu_int <= len(choice_list):
invalid = False
else:
print("Invalid input, needs to be between 1 and {0}.\n".format(len(choice_list)))
# return the choice_list tuple that matches the entry.
return choice_list[int(menu_int) - 1]
|
def function[quick_menu, parameter[self, banner, list_line_format, choice_list]]:
constant[
Function to display a quick menu for user input
**Parameters:**
- **banner:** Text to display before menu
- **list_line_format:** Print'ing string with format spots for index + tuple values
- **choice_list:** List of tuple values that you want returned if selected (and printed)
**Returns:** Tuple that was selected.
]
variable[invalid] assign[=] constant[True]
variable[menu_int] assign[=] <ast.UnaryOp object at 0x7da18dc985b0>
while name[invalid] begin[:]
call[name[print], parameter[name[banner]]]
for taget[tuple[[<ast.Name object at 0x7da18dc9b0a0>, <ast.Name object at 0x7da18dc99180>]]] in starred[call[name[enumerate], parameter[name[choice_list]]]] begin[:]
call[name[print], parameter[call[name[list_line_format].format, parameter[binary_operation[name[item_index] + constant[1]], <ast.Starred object at 0x7da18dc98ac0>]]]]
variable[menu_choice] assign[=] call[name[compat_input], parameter[constant[
Choose a Number or (Q)uit: ]]]
if compare[call[call[name[str], parameter[name[menu_choice]]].lower, parameter[]] in list[[<ast.Constant object at 0x7da18dc9bbb0>]]] begin[:]
call[name[print], parameter[constant[Exiting..]]]
call[name[self]._parent_class.get.logout, parameter[]]
call[name[sys].exit, parameter[constant[0]]]
<ast.Try object at 0x7da18dc9b250>
if <ast.BoolOp object at 0x7da18dc9ae00> begin[:]
variable[invalid] assign[=] constant[False]
return[call[name[choice_list]][binary_operation[call[name[int], parameter[name[menu_int]]] - constant[1]]]]
|
keyword[def] identifier[quick_menu] ( identifier[self] , identifier[banner] , identifier[list_line_format] , identifier[choice_list] ):
literal[string]
identifier[invalid] = keyword[True]
identifier[menu_int] =- literal[int]
keyword[while] identifier[invalid] :
identifier[print] ( identifier[banner] )
keyword[for] identifier[item_index] , identifier[item_value] keyword[in] identifier[enumerate] ( identifier[choice_list] ):
identifier[print] ( identifier[list_line_format] . identifier[format] ( identifier[item_index] + literal[int] ,* identifier[item_value] ))
identifier[menu_choice] = identifier[compat_input] ( literal[string] )
keyword[if] identifier[str] ( identifier[menu_choice] ). identifier[lower] () keyword[in] [ literal[string] ]:
identifier[print] ( literal[string] )
identifier[self] . identifier[_parent_class] . identifier[get] . identifier[logout] ()
identifier[sys] . identifier[exit] ( literal[int] )
keyword[try] :
identifier[menu_int] = identifier[int] ( identifier[menu_choice] )
identifier[sanity] = keyword[True]
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] , identifier[menu_choice] )
identifier[sanity] = keyword[False]
keyword[if] identifier[sanity] keyword[and] literal[int] <= identifier[menu_int] <= identifier[len] ( identifier[choice_list] ):
identifier[invalid] = keyword[False]
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[choice_list] )))
keyword[return] identifier[choice_list] [ identifier[int] ( identifier[menu_int] )- literal[int] ]
|
def quick_menu(self, banner, list_line_format, choice_list):
"""
Function to display a quick menu for user input
**Parameters:**
- **banner:** Text to display before menu
- **list_line_format:** Print'ing string with format spots for index + tuple values
- **choice_list:** List of tuple values that you want returned if selected (and printed)
**Returns:** Tuple that was selected.
"""
# Setup menu
invalid = True
menu_int = -1
# loop until valid
while invalid:
print(banner)
for (item_index, item_value) in enumerate(choice_list):
print(list_line_format.format(item_index + 1, *item_value)) # depends on [control=['for'], data=[]]
menu_choice = compat_input('\nChoose a Number or (Q)uit: ')
if str(menu_choice).lower() in ['q']:
# exit
print('Exiting..')
# best effort logout
self._parent_class.get.logout()
sys.exit(0) # depends on [control=['if'], data=[]]
# verify number entered
try:
menu_int = int(menu_choice)
sanity = True # depends on [control=['try'], data=[]]
except ValueError:
# not a number
print('ERROR: ', menu_choice)
sanity = False # depends on [control=['except'], data=[]]
# validate number chosen
if sanity and 1 <= menu_int <= len(choice_list):
invalid = False # depends on [control=['if'], data=[]]
else:
print('Invalid input, needs to be between 1 and {0}.\n'.format(len(choice_list))) # depends on [control=['while'], data=[]]
# return the choice_list tuple that matches the entry.
return choice_list[int(menu_int) - 1]
|
def validate(config):
'''
Validate the beacon configuration
'''
# Configuration for adb beacon should be a dictionary with states array
if not isinstance(config, list):
log.info('Configuration for adb beacon must be a list.')
return False, ('Configuration for adb beacon must be a list.')
_config = {}
list(map(_config.update, config))
if 'states' not in _config:
log.info('Configuration for adb beacon must include a states array.')
return False, ('Configuration for adb beacon must include a states array.')
else:
if not isinstance(_config['states'], list):
log.info('Configuration for adb beacon must include a states array.')
return False, ('Configuration for adb beacon must include a states array.')
else:
states = ['offline', 'bootloader', 'device', 'host',
'recovery', 'no permissions',
'sideload', 'unauthorized', 'unknown', 'missing']
if any(s not in states for s in _config['states']):
log.info('Need a one of the following adb '
'states: %s', ', '.join(states))
return False, ('Need a one of the following adb '
'states: {0}'.format(', '.join(states)))
return True, 'Valid beacon configuration'
|
def function[validate, parameter[config]]:
constant[
Validate the beacon configuration
]
if <ast.UnaryOp object at 0x7da1b21ea710> begin[:]
call[name[log].info, parameter[constant[Configuration for adb beacon must be a list.]]]
return[tuple[[<ast.Constant object at 0x7da1b21e8df0>, <ast.Constant object at 0x7da1b21ea9b0>]]]
variable[_config] assign[=] dictionary[[], []]
call[name[list], parameter[call[name[map], parameter[name[_config].update, name[config]]]]]
if compare[constant[states] <ast.NotIn object at 0x7da2590d7190> name[_config]] begin[:]
call[name[log].info, parameter[constant[Configuration for adb beacon must include a states array.]]]
return[tuple[[<ast.Constant object at 0x7da1b21eb6d0>, <ast.Constant object at 0x7da1b21e9360>]]]
return[tuple[[<ast.Constant object at 0x7da1b21e9300>, <ast.Constant object at 0x7da1b21e9600>]]]
|
keyword[def] identifier[validate] ( identifier[config] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[config] , identifier[list] ):
identifier[log] . identifier[info] ( literal[string] )
keyword[return] keyword[False] ,( literal[string] )
identifier[_config] ={}
identifier[list] ( identifier[map] ( identifier[_config] . identifier[update] , identifier[config] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[_config] :
identifier[log] . identifier[info] ( literal[string] )
keyword[return] keyword[False] ,( literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[_config] [ literal[string] ], identifier[list] ):
identifier[log] . identifier[info] ( literal[string] )
keyword[return] keyword[False] ,( literal[string] )
keyword[else] :
identifier[states] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[any] ( identifier[s] keyword[not] keyword[in] identifier[states] keyword[for] identifier[s] keyword[in] identifier[_config] [ literal[string] ]):
identifier[log] . identifier[info] ( literal[string]
literal[string] , literal[string] . identifier[join] ( identifier[states] ))
keyword[return] keyword[False] ,( literal[string]
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[states] )))
keyword[return] keyword[True] , literal[string]
|
def validate(config):
"""
Validate the beacon configuration
"""
# Configuration for adb beacon should be a dictionary with states array
if not isinstance(config, list):
log.info('Configuration for adb beacon must be a list.')
return (False, 'Configuration for adb beacon must be a list.') # depends on [control=['if'], data=[]]
_config = {}
list(map(_config.update, config))
if 'states' not in _config:
log.info('Configuration for adb beacon must include a states array.')
return (False, 'Configuration for adb beacon must include a states array.') # depends on [control=['if'], data=[]]
elif not isinstance(_config['states'], list):
log.info('Configuration for adb beacon must include a states array.')
return (False, 'Configuration for adb beacon must include a states array.') # depends on [control=['if'], data=[]]
else:
states = ['offline', 'bootloader', 'device', 'host', 'recovery', 'no permissions', 'sideload', 'unauthorized', 'unknown', 'missing']
if any((s not in states for s in _config['states'])):
log.info('Need a one of the following adb states: %s', ', '.join(states))
return (False, 'Need a one of the following adb states: {0}'.format(', '.join(states))) # depends on [control=['if'], data=[]]
return (True, 'Valid beacon configuration')
|
def add_permission(self, topic, label, account_ids, actions):
"""
Adds a statement to a topic's access control policy, granting
access for the specified AWS accounts to the specified actions.
:type topic: string
:param topic: The ARN of the topic.
:type label: string
:param label: A unique identifier for the new policy statement.
:type account_ids: list of strings
:param account_ids: The AWS account ids of the users who will be
give access to the specified actions.
:type actions: list of strings
:param actions: The actions you want to allow for each of the
specified principal(s).
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Label' : label}
self.build_list_params(params, account_ids, 'AWSAccountId')
self.build_list_params(params, actions, 'ActionName')
response = self.make_request('AddPermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
|
def function[add_permission, parameter[self, topic, label, account_ids, actions]]:
constant[
Adds a statement to a topic's access control policy, granting
access for the specified AWS accounts to the specified actions.
:type topic: string
:param topic: The ARN of the topic.
:type label: string
:param label: A unique identifier for the new policy statement.
:type account_ids: list of strings
:param account_ids: The AWS account ids of the users who will be
give access to the specified actions.
:type actions: list of strings
:param actions: The actions you want to allow for each of the
specified principal(s).
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b253fbe0>, <ast.Constant object at 0x7da1b253e680>, <ast.Constant object at 0x7da1b253d3f0>], [<ast.Constant object at 0x7da1b253c2b0>, <ast.Name object at 0x7da1b253d360>, <ast.Name object at 0x7da1b253ece0>]]
call[name[self].build_list_params, parameter[name[params], name[account_ids], constant[AWSAccountId]]]
call[name[self].build_list_params, parameter[name[params], name[actions], constant[ActionName]]]
variable[response] assign[=] call[name[self].make_request, parameter[constant[AddPermission], name[params], constant[/], constant[GET]]]
variable[body] assign[=] call[name[response].read, parameter[]]
if compare[name[response].status equal[==] constant[200]] begin[:]
return[call[name[json].loads, parameter[name[body]]]]
|
keyword[def] identifier[add_permission] ( identifier[self] , identifier[topic] , identifier[label] , identifier[account_ids] , identifier[actions] ):
literal[string]
identifier[params] ={ literal[string] : literal[string] ,
literal[string] : identifier[topic] ,
literal[string] : identifier[label] }
identifier[self] . identifier[build_list_params] ( identifier[params] , identifier[account_ids] , literal[string] )
identifier[self] . identifier[build_list_params] ( identifier[params] , identifier[actions] , literal[string] )
identifier[response] = identifier[self] . identifier[make_request] ( literal[string] , identifier[params] , literal[string] , literal[string] )
identifier[body] = identifier[response] . identifier[read] ()
keyword[if] identifier[response] . identifier[status] == literal[int] :
keyword[return] identifier[json] . identifier[loads] ( identifier[body] )
keyword[else] :
identifier[boto] . identifier[log] . identifier[error] ( literal[string] %( identifier[response] . identifier[status] , identifier[response] . identifier[reason] ))
identifier[boto] . identifier[log] . identifier[error] ( literal[string] % identifier[body] )
keyword[raise] identifier[self] . identifier[ResponseError] ( identifier[response] . identifier[status] , identifier[response] . identifier[reason] , identifier[body] )
|
def add_permission(self, topic, label, account_ids, actions):
"""
Adds a statement to a topic's access control policy, granting
access for the specified AWS accounts to the specified actions.
:type topic: string
:param topic: The ARN of the topic.
:type label: string
:param label: A unique identifier for the new policy statement.
:type account_ids: list of strings
:param account_ids: The AWS account ids of the users who will be
give access to the specified actions.
:type actions: list of strings
:param actions: The actions you want to allow for each of the
specified principal(s).
"""
params = {'ContentType': 'JSON', 'TopicArn': topic, 'Label': label}
self.build_list_params(params, account_ids, 'AWSAccountId')
self.build_list_params(params, actions, 'ActionName')
response = self.make_request('AddPermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body) # depends on [control=['if'], data=[]]
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
|
def consumer(self, service, data, to_update, status):
"""
call the consumer and handle the data
:param service:
:param data:
:param to_update:
:param status:
:return: status
"""
# consumer - the service which uses the data
service_consumer = default_provider.get_service(str(service.consumer.name.name))
kwargs = {'user': service.user}
getattr(service_consumer, '__init__')(service.consumer.token, **kwargs)
instance = getattr(service_consumer, 'save_data')
# 2) for each one
for d in data:
d['userservice_id'] = service.consumer.id
# the consumer will save the data and return if success or not
status = instance(service.id, **d)
to_update = True
return to_update, status
|
def function[consumer, parameter[self, service, data, to_update, status]]:
constant[
call the consumer and handle the data
:param service:
:param data:
:param to_update:
:param status:
:return: status
]
variable[service_consumer] assign[=] call[name[default_provider].get_service, parameter[call[name[str], parameter[name[service].consumer.name.name]]]]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e4ac0>], [<ast.Attribute object at 0x7da20c6e62f0>]]
call[call[name[getattr], parameter[name[service_consumer], constant[__init__]]], parameter[name[service].consumer.token]]
variable[instance] assign[=] call[name[getattr], parameter[name[service_consumer], constant[save_data]]]
for taget[name[d]] in starred[name[data]] begin[:]
call[name[d]][constant[userservice_id]] assign[=] name[service].consumer.id
variable[status] assign[=] call[name[instance], parameter[name[service].id]]
variable[to_update] assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da204960cd0>, <ast.Name object at 0x7da2049617b0>]]]
|
keyword[def] identifier[consumer] ( identifier[self] , identifier[service] , identifier[data] , identifier[to_update] , identifier[status] ):
literal[string]
identifier[service_consumer] = identifier[default_provider] . identifier[get_service] ( identifier[str] ( identifier[service] . identifier[consumer] . identifier[name] . identifier[name] ))
identifier[kwargs] ={ literal[string] : identifier[service] . identifier[user] }
identifier[getattr] ( identifier[service_consumer] , literal[string] )( identifier[service] . identifier[consumer] . identifier[token] ,** identifier[kwargs] )
identifier[instance] = identifier[getattr] ( identifier[service_consumer] , literal[string] )
keyword[for] identifier[d] keyword[in] identifier[data] :
identifier[d] [ literal[string] ]= identifier[service] . identifier[consumer] . identifier[id]
identifier[status] = identifier[instance] ( identifier[service] . identifier[id] ,** identifier[d] )
identifier[to_update] = keyword[True]
keyword[return] identifier[to_update] , identifier[status]
|
def consumer(self, service, data, to_update, status):
"""
call the consumer and handle the data
:param service:
:param data:
:param to_update:
:param status:
:return: status
"""
# consumer - the service which uses the data
service_consumer = default_provider.get_service(str(service.consumer.name.name))
kwargs = {'user': service.user}
getattr(service_consumer, '__init__')(service.consumer.token, **kwargs)
instance = getattr(service_consumer, 'save_data')
# 2) for each one
for d in data:
d['userservice_id'] = service.consumer.id
# the consumer will save the data and return if success or not
status = instance(service.id, **d)
to_update = True # depends on [control=['for'], data=['d']]
return (to_update, status)
|
def _read_settings(self, retry=True):
"""Read the information from the Hottop.
Read the settings from the serial interface and convert them into a
human-readable format that can be shared back to the end-user. Reading
from the serial interface will occasionally produce strange results or
blank reads, so a retry process has been built into the function as a
recursive check.
:returns: dict
"""
if not self._conn.isOpen():
self._log.debug("Reopening connection")
self._conn.open()
self._conn.flushInput()
self._conn.flushOutput()
buffer = self._conn.read(36)
if len(buffer) != 36:
self._log.debug('Buffer length (%d) did not match 36' % len(buffer))
if self._conn.isOpen():
self._log.debug('Closing connection')
self._conn.close()
self._read_settings(retry=True)
try:
check = self._validate_checksum(buffer)
except:
check = False
if not check and (retry and self._retry_count <= 3):
if self._retry_count == 3:
self._log.error('Retry count reached on buffer check')
self._read_settings(retry=False)
else:
self._retry_count += 1
self._read_settings(retry=True)
try:
settings = dict()
settings['heater'] = hex2int(buffer[10])
settings['fan'] = hex2int(buffer[11])
settings['main_fan'] = hex2int(buffer[12])
et = hex2int(buffer[23] + buffer[24])
settings['environment_temp'] = celsius2fahrenheit(et)
bt = hex2int(buffer[25] + buffer[26])
settings['bean_temp'] = celsius2fahrenheit(bt)
settings['solenoid'] = hex2int(buffer[16])
settings['drum_motor'] = hex2int(buffer[17])
settings['cooling_motor'] = hex2int(buffer[18])
settings['chaff_tray'] = hex2int(buffer[19])
self._retry_count = 0
except Exception:
self._log.error("Pulled a cache configuration!")
settings = self._generate_config()
return settings
|
def function[_read_settings, parameter[self, retry]]:
constant[Read the information from the Hottop.
Read the settings from the serial interface and convert them into a
human-readable format that can be shared back to the end-user. Reading
from the serial interface will occasionally produce strange results or
blank reads, so a retry process has been built into the function as a
recursive check.
:returns: dict
]
if <ast.UnaryOp object at 0x7da18fe92470> begin[:]
call[name[self]._log.debug, parameter[constant[Reopening connection]]]
call[name[self]._conn.open, parameter[]]
call[name[self]._conn.flushInput, parameter[]]
call[name[self]._conn.flushOutput, parameter[]]
variable[buffer] assign[=] call[name[self]._conn.read, parameter[constant[36]]]
if compare[call[name[len], parameter[name[buffer]]] not_equal[!=] constant[36]] begin[:]
call[name[self]._log.debug, parameter[binary_operation[constant[Buffer length (%d) did not match 36] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[buffer]]]]]]
if call[name[self]._conn.isOpen, parameter[]] begin[:]
call[name[self]._log.debug, parameter[constant[Closing connection]]]
call[name[self]._conn.close, parameter[]]
call[name[self]._read_settings, parameter[]]
<ast.Try object at 0x7da18fe93c10>
if <ast.BoolOp object at 0x7da18fe914b0> begin[:]
if compare[name[self]._retry_count equal[==] constant[3]] begin[:]
call[name[self]._log.error, parameter[constant[Retry count reached on buffer check]]]
call[name[self]._read_settings, parameter[]]
<ast.Try object at 0x7da18fe90130>
return[name[settings]]
|
keyword[def] identifier[_read_settings] ( identifier[self] , identifier[retry] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_conn] . identifier[isOpen] ():
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_conn] . identifier[open] ()
identifier[self] . identifier[_conn] . identifier[flushInput] ()
identifier[self] . identifier[_conn] . identifier[flushOutput] ()
identifier[buffer] = identifier[self] . identifier[_conn] . identifier[read] ( literal[int] )
keyword[if] identifier[len] ( identifier[buffer] )!= literal[int] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] % identifier[len] ( identifier[buffer] ))
keyword[if] identifier[self] . identifier[_conn] . identifier[isOpen] ():
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_conn] . identifier[close] ()
identifier[self] . identifier[_read_settings] ( identifier[retry] = keyword[True] )
keyword[try] :
identifier[check] = identifier[self] . identifier[_validate_checksum] ( identifier[buffer] )
keyword[except] :
identifier[check] = keyword[False]
keyword[if] keyword[not] identifier[check] keyword[and] ( identifier[retry] keyword[and] identifier[self] . identifier[_retry_count] <= literal[int] ):
keyword[if] identifier[self] . identifier[_retry_count] == literal[int] :
identifier[self] . identifier[_log] . identifier[error] ( literal[string] )
identifier[self] . identifier[_read_settings] ( identifier[retry] = keyword[False] )
keyword[else] :
identifier[self] . identifier[_retry_count] += literal[int]
identifier[self] . identifier[_read_settings] ( identifier[retry] = keyword[True] )
keyword[try] :
identifier[settings] = identifier[dict] ()
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[et] = identifier[hex2int] ( identifier[buffer] [ literal[int] ]+ identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[celsius2fahrenheit] ( identifier[et] )
identifier[bt] = identifier[hex2int] ( identifier[buffer] [ literal[int] ]+ identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[celsius2fahrenheit] ( identifier[bt] )
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[settings] [ literal[string] ]= identifier[hex2int] ( identifier[buffer] [ literal[int] ])
identifier[self] . identifier[_retry_count] = literal[int]
keyword[except] identifier[Exception] :
identifier[self] . identifier[_log] . identifier[error] ( literal[string] )
identifier[settings] = identifier[self] . identifier[_generate_config] ()
keyword[return] identifier[settings]
|
def _read_settings(self, retry=True):
"""Read the information from the Hottop.
Read the settings from the serial interface and convert them into a
human-readable format that can be shared back to the end-user. Reading
from the serial interface will occasionally produce strange results or
blank reads, so a retry process has been built into the function as a
recursive check.
:returns: dict
"""
if not self._conn.isOpen():
self._log.debug('Reopening connection')
self._conn.open() # depends on [control=['if'], data=[]]
self._conn.flushInput()
self._conn.flushOutput()
buffer = self._conn.read(36)
if len(buffer) != 36:
self._log.debug('Buffer length (%d) did not match 36' % len(buffer))
if self._conn.isOpen():
self._log.debug('Closing connection')
self._conn.close()
self._read_settings(retry=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
check = self._validate_checksum(buffer) # depends on [control=['try'], data=[]]
except:
check = False # depends on [control=['except'], data=[]]
if not check and (retry and self._retry_count <= 3):
if self._retry_count == 3:
self._log.error('Retry count reached on buffer check')
self._read_settings(retry=False) # depends on [control=['if'], data=[]]
else:
self._retry_count += 1
self._read_settings(retry=True) # depends on [control=['if'], data=[]]
try:
settings = dict()
settings['heater'] = hex2int(buffer[10])
settings['fan'] = hex2int(buffer[11])
settings['main_fan'] = hex2int(buffer[12])
et = hex2int(buffer[23] + buffer[24])
settings['environment_temp'] = celsius2fahrenheit(et)
bt = hex2int(buffer[25] + buffer[26])
settings['bean_temp'] = celsius2fahrenheit(bt)
settings['solenoid'] = hex2int(buffer[16])
settings['drum_motor'] = hex2int(buffer[17])
settings['cooling_motor'] = hex2int(buffer[18])
settings['chaff_tray'] = hex2int(buffer[19])
self._retry_count = 0 # depends on [control=['try'], data=[]]
except Exception:
self._log.error('Pulled a cache configuration!')
settings = self._generate_config() # depends on [control=['except'], data=[]]
return settings
|
def _prepare_trans_tar(name, sls_opts, mods=None,
pillar=None, extra_filerefs=''):
'''
Prepares a self contained tarball that has the state
to be applied in the container
'''
chunks = _compile_state(sls_opts, mods)
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
_mk_fileclient()
trans_tar = salt.client.ssh.state.prep_trans_tar(
__context__['cp.fileclient'],
chunks, refs, pillar, name)
return trans_tar
|
def function[_prepare_trans_tar, parameter[name, sls_opts, mods, pillar, extra_filerefs]]:
constant[
Prepares a self contained tarball that has the state
to be applied in the container
]
variable[chunks] assign[=] call[name[_compile_state], parameter[name[sls_opts], name[mods]]]
variable[refs] assign[=] call[name[salt].client.ssh.state.lowstate_file_refs, parameter[name[chunks], name[extra_filerefs]]]
call[name[_mk_fileclient], parameter[]]
variable[trans_tar] assign[=] call[name[salt].client.ssh.state.prep_trans_tar, parameter[call[name[__context__]][constant[cp.fileclient]], name[chunks], name[refs], name[pillar], name[name]]]
return[name[trans_tar]]
|
keyword[def] identifier[_prepare_trans_tar] ( identifier[name] , identifier[sls_opts] , identifier[mods] = keyword[None] ,
identifier[pillar] = keyword[None] , identifier[extra_filerefs] = literal[string] ):
literal[string]
identifier[chunks] = identifier[_compile_state] ( identifier[sls_opts] , identifier[mods] )
identifier[refs] = identifier[salt] . identifier[client] . identifier[ssh] . identifier[state] . identifier[lowstate_file_refs] ( identifier[chunks] , identifier[extra_filerefs] )
identifier[_mk_fileclient] ()
identifier[trans_tar] = identifier[salt] . identifier[client] . identifier[ssh] . identifier[state] . identifier[prep_trans_tar] (
identifier[__context__] [ literal[string] ],
identifier[chunks] , identifier[refs] , identifier[pillar] , identifier[name] )
keyword[return] identifier[trans_tar]
|
def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=''):
"""
Prepares a self contained tarball that has the state
to be applied in the container
"""
chunks = _compile_state(sls_opts, mods)
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
_mk_fileclient()
trans_tar = salt.client.ssh.state.prep_trans_tar(__context__['cp.fileclient'], chunks, refs, pillar, name)
return trans_tar
|
def parse_filename(filename):
"""Parse media filename for metadata.
:param str filename: the name of media file
:returns: dict of metadata attributes found in filename
or None if no matching expression.
:rtype: dict
"""
_patterns = patterns.get_expressions()
result = {}
for cmatcher in _patterns:
match = cmatcher.match(filename)
if match:
namedgroups = match.groupdict().keys()
result['pattern'] = cmatcher.pattern
result['series_name'] = match.group('seriesname')
result['season_number'] = _get_season_no(match, namedgroups)
result['episode_numbers'] = _get_episodes(match, namedgroups)
break
else:
result = None
return result
|
def function[parse_filename, parameter[filename]]:
constant[Parse media filename for metadata.
:param str filename: the name of media file
:returns: dict of metadata attributes found in filename
or None if no matching expression.
:rtype: dict
]
variable[_patterns] assign[=] call[name[patterns].get_expressions, parameter[]]
variable[result] assign[=] dictionary[[], []]
for taget[name[cmatcher]] in starred[name[_patterns]] begin[:]
variable[match] assign[=] call[name[cmatcher].match, parameter[name[filename]]]
if name[match] begin[:]
variable[namedgroups] assign[=] call[call[name[match].groupdict, parameter[]].keys, parameter[]]
call[name[result]][constant[pattern]] assign[=] name[cmatcher].pattern
call[name[result]][constant[series_name]] assign[=] call[name[match].group, parameter[constant[seriesname]]]
call[name[result]][constant[season_number]] assign[=] call[name[_get_season_no], parameter[name[match], name[namedgroups]]]
call[name[result]][constant[episode_numbers]] assign[=] call[name[_get_episodes], parameter[name[match], name[namedgroups]]]
break
return[name[result]]
|
keyword[def] identifier[parse_filename] ( identifier[filename] ):
literal[string]
identifier[_patterns] = identifier[patterns] . identifier[get_expressions] ()
identifier[result] ={}
keyword[for] identifier[cmatcher] keyword[in] identifier[_patterns] :
identifier[match] = identifier[cmatcher] . identifier[match] ( identifier[filename] )
keyword[if] identifier[match] :
identifier[namedgroups] = identifier[match] . identifier[groupdict] (). identifier[keys] ()
identifier[result] [ literal[string] ]= identifier[cmatcher] . identifier[pattern]
identifier[result] [ literal[string] ]= identifier[match] . identifier[group] ( literal[string] )
identifier[result] [ literal[string] ]= identifier[_get_season_no] ( identifier[match] , identifier[namedgroups] )
identifier[result] [ literal[string] ]= identifier[_get_episodes] ( identifier[match] , identifier[namedgroups] )
keyword[break]
keyword[else] :
identifier[result] = keyword[None]
keyword[return] identifier[result]
|
def parse_filename(filename):
"""Parse media filename for metadata.
:param str filename: the name of media file
:returns: dict of metadata attributes found in filename
or None if no matching expression.
:rtype: dict
"""
_patterns = patterns.get_expressions()
result = {}
for cmatcher in _patterns:
match = cmatcher.match(filename)
if match:
namedgroups = match.groupdict().keys()
result['pattern'] = cmatcher.pattern
result['series_name'] = match.group('seriesname')
result['season_number'] = _get_season_no(match, namedgroups)
result['episode_numbers'] = _get_episodes(match, namedgroups)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cmatcher']]
else:
result = None
return result
|
def _y_axis(self):
"""Make the y axis: labels and guides"""
if not self._y_labels or not self.show_y_labels:
return
axis = self.svg.node(
self.nodes['plot'],
class_="axis y%s" % (' always_show' if self.show_y_guides else '')
)
if (0 not in [label[1] for label in self._y_labels]
and self.show_y_guides):
self.svg.node(
axis,
'path',
d='M%f %f h%f' % (
0, 0 if self.inverse_y_axis else self.view.height,
self.view.width
),
class_='line'
)
for label, position in self._y_labels:
if self.horizontal:
major = label in self._y_labels_major
else:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue
guides = self.svg.node(
axis,
class_='%sguides' %
('logarithmic ' if self.logarithmic else '')
)
x = -5
y = self.view.y(position)
if not y:
continue
if self.show_y_guides:
self.svg.node(
guides,
'path',
d='M%f %f h%f' % (0, y, self.view.width),
class_='%s%s%sline' % (
'axis ' if label == "0" else '', 'major '
if major else '', 'guide ' if position != 0 else ''
)
)
text = self.svg.node(
guides,
'text',
x=x,
y=y + .35 * self.style.label_font_size,
class_='major' if major else ''
)
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
self.y_label_rotation, x, y
)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
self.svg.node(
guides,
'title',
).text = self._y_format(position)
if self._y_2nd_labels:
secondary_ax = self.svg.node(self.nodes['plot'], class_="axis y2")
for label, position in self._y_2nd_labels:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue
# it is needed, to have the same structure as primary axis
guides = self.svg.node(secondary_ax, class_='guides')
x = self.view.width + 5
y = self.view.y(position)
text = self.svg.node(
guides,
'text',
x=x,
y=y + .35 * self.style.label_font_size,
class_='major' if major else ''
)
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
self.y_label_rotation, x, y
)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
|
def function[_y_axis, parameter[self]]:
constant[Make the y axis: labels and guides]
if <ast.BoolOp object at 0x7da2045649d0> begin[:]
return[None]
variable[axis] assign[=] call[name[self].svg.node, parameter[call[name[self].nodes][constant[plot]]]]
if <ast.BoolOp object at 0x7da204565750> begin[:]
call[name[self].svg.node, parameter[name[axis], constant[path]]]
for taget[tuple[[<ast.Name object at 0x7da204566ad0>, <ast.Name object at 0x7da204567c40>]]] in starred[name[self]._y_labels] begin[:]
if name[self].horizontal begin[:]
variable[major] assign[=] compare[name[label] in name[self]._y_labels_major]
if <ast.UnaryOp object at 0x7da204565240> begin[:]
continue
variable[guides] assign[=] call[name[self].svg.node, parameter[name[axis]]]
variable[x] assign[=] <ast.UnaryOp object at 0x7da2045664d0>
variable[y] assign[=] call[name[self].view.y, parameter[name[position]]]
if <ast.UnaryOp object at 0x7da204567e50> begin[:]
continue
if name[self].show_y_guides begin[:]
call[name[self].svg.node, parameter[name[guides], constant[path]]]
variable[text] assign[=] call[name[self].svg.node, parameter[name[guides], constant[text]]]
name[text].text assign[=] name[label]
if name[self].y_label_rotation begin[:]
call[name[text].attrib][constant[transform]] assign[=] binary_operation[constant[rotate(%d %f %f)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204567190>, <ast.Name object at 0x7da204567dc0>, <ast.Name object at 0x7da204566110>]]]
if compare[constant[90] less[<] name[self].y_label_rotation] begin[:]
call[name[text].attrib][constant[class]] assign[=] call[constant[ ].join, parameter[binary_operation[<ast.BoolOp object at 0x7da204566e60> + list[[<ast.Constant object at 0x7da204567520>]]]]]
call[name[self].svg.node, parameter[name[guides], constant[title]]].text assign[=] call[name[self]._y_format, parameter[name[position]]]
if name[self]._y_2nd_labels begin[:]
variable[secondary_ax] assign[=] call[name[self].svg.node, parameter[call[name[self].nodes][constant[plot]]]]
for taget[tuple[[<ast.Name object at 0x7da204565960>, <ast.Name object at 0x7da2045654e0>]]] in starred[name[self]._y_2nd_labels] begin[:]
variable[major] assign[=] compare[name[position] in name[self]._y_labels_major]
if <ast.UnaryOp object at 0x7da2045677f0> begin[:]
continue
variable[guides] assign[=] call[name[self].svg.node, parameter[name[secondary_ax]]]
variable[x] assign[=] binary_operation[name[self].view.width + constant[5]]
variable[y] assign[=] call[name[self].view.y, parameter[name[position]]]
variable[text] assign[=] call[name[self].svg.node, parameter[name[guides], constant[text]]]
name[text].text assign[=] name[label]
if name[self].y_label_rotation begin[:]
call[name[text].attrib][constant[transform]] assign[=] binary_operation[constant[rotate(%d %f %f)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c992d10>, <ast.Name object at 0x7da20c991c00>, <ast.Name object at 0x7da20c992ef0>]]]
if compare[constant[90] less[<] name[self].y_label_rotation] begin[:]
call[name[text].attrib][constant[class]] assign[=] call[constant[ ].join, parameter[binary_operation[<ast.BoolOp object at 0x7da20c76ffd0> + list[[<ast.Constant object at 0x7da20c76dba0>]]]]]
|
keyword[def] identifier[_y_axis] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_y_labels] keyword[or] keyword[not] identifier[self] . identifier[show_y_labels] :
keyword[return]
identifier[axis] = identifier[self] . identifier[svg] . identifier[node] (
identifier[self] . identifier[nodes] [ literal[string] ],
identifier[class_] = literal[string] %( literal[string] keyword[if] identifier[self] . identifier[show_y_guides] keyword[else] literal[string] )
)
keyword[if] ( literal[int] keyword[not] keyword[in] [ identifier[label] [ literal[int] ] keyword[for] identifier[label] keyword[in] identifier[self] . identifier[_y_labels] ]
keyword[and] identifier[self] . identifier[show_y_guides] ):
identifier[self] . identifier[svg] . identifier[node] (
identifier[axis] ,
literal[string] ,
identifier[d] = literal[string] %(
literal[int] , literal[int] keyword[if] identifier[self] . identifier[inverse_y_axis] keyword[else] identifier[self] . identifier[view] . identifier[height] ,
identifier[self] . identifier[view] . identifier[width]
),
identifier[class_] = literal[string]
)
keyword[for] identifier[label] , identifier[position] keyword[in] identifier[self] . identifier[_y_labels] :
keyword[if] identifier[self] . identifier[horizontal] :
identifier[major] = identifier[label] keyword[in] identifier[self] . identifier[_y_labels_major]
keyword[else] :
identifier[major] = identifier[position] keyword[in] identifier[self] . identifier[_y_labels_major]
keyword[if] keyword[not] ( identifier[self] . identifier[show_minor_y_labels] keyword[or] identifier[major] ):
keyword[continue]
identifier[guides] = identifier[self] . identifier[svg] . identifier[node] (
identifier[axis] ,
identifier[class_] = literal[string] %
( literal[string] keyword[if] identifier[self] . identifier[logarithmic] keyword[else] literal[string] )
)
identifier[x] =- literal[int]
identifier[y] = identifier[self] . identifier[view] . identifier[y] ( identifier[position] )
keyword[if] keyword[not] identifier[y] :
keyword[continue]
keyword[if] identifier[self] . identifier[show_y_guides] :
identifier[self] . identifier[svg] . identifier[node] (
identifier[guides] ,
literal[string] ,
identifier[d] = literal[string] %( literal[int] , identifier[y] , identifier[self] . identifier[view] . identifier[width] ),
identifier[class_] = literal[string] %(
literal[string] keyword[if] identifier[label] == literal[string] keyword[else] literal[string] , literal[string]
keyword[if] identifier[major] keyword[else] literal[string] , literal[string] keyword[if] identifier[position] != literal[int] keyword[else] literal[string]
)
)
identifier[text] = identifier[self] . identifier[svg] . identifier[node] (
identifier[guides] ,
literal[string] ,
identifier[x] = identifier[x] ,
identifier[y] = identifier[y] + literal[int] * identifier[self] . identifier[style] . identifier[label_font_size] ,
identifier[class_] = literal[string] keyword[if] identifier[major] keyword[else] literal[string]
)
identifier[text] . identifier[text] = identifier[label]
keyword[if] identifier[self] . identifier[y_label_rotation] :
identifier[text] . identifier[attrib] [ literal[string] ]= literal[string] %(
identifier[self] . identifier[y_label_rotation] , identifier[x] , identifier[y]
)
keyword[if] literal[int] < identifier[self] . identifier[y_label_rotation] < literal[int] :
identifier[text] . identifier[attrib] [ literal[string] ]= literal[string] . identifier[join] ((
identifier[text] . identifier[attrib] [ literal[string] ]
keyword[and] identifier[text] . identifier[attrib] [ literal[string] ]. identifier[split] ( literal[string] ) keyword[or] []
)+[ literal[string] ])
identifier[self] . identifier[svg] . identifier[node] (
identifier[guides] ,
literal[string] ,
). identifier[text] = identifier[self] . identifier[_y_format] ( identifier[position] )
keyword[if] identifier[self] . identifier[_y_2nd_labels] :
identifier[secondary_ax] = identifier[self] . identifier[svg] . identifier[node] ( identifier[self] . identifier[nodes] [ literal[string] ], identifier[class_] = literal[string] )
keyword[for] identifier[label] , identifier[position] keyword[in] identifier[self] . identifier[_y_2nd_labels] :
identifier[major] = identifier[position] keyword[in] identifier[self] . identifier[_y_labels_major]
keyword[if] keyword[not] ( identifier[self] . identifier[show_minor_y_labels] keyword[or] identifier[major] ):
keyword[continue]
identifier[guides] = identifier[self] . identifier[svg] . identifier[node] ( identifier[secondary_ax] , identifier[class_] = literal[string] )
identifier[x] = identifier[self] . identifier[view] . identifier[width] + literal[int]
identifier[y] = identifier[self] . identifier[view] . identifier[y] ( identifier[position] )
identifier[text] = identifier[self] . identifier[svg] . identifier[node] (
identifier[guides] ,
literal[string] ,
identifier[x] = identifier[x] ,
identifier[y] = identifier[y] + literal[int] * identifier[self] . identifier[style] . identifier[label_font_size] ,
identifier[class_] = literal[string] keyword[if] identifier[major] keyword[else] literal[string]
)
identifier[text] . identifier[text] = identifier[label]
keyword[if] identifier[self] . identifier[y_label_rotation] :
identifier[text] . identifier[attrib] [ literal[string] ]= literal[string] %(
identifier[self] . identifier[y_label_rotation] , identifier[x] , identifier[y]
)
keyword[if] literal[int] < identifier[self] . identifier[y_label_rotation] < literal[int] :
identifier[text] . identifier[attrib] [ literal[string] ]= literal[string] . identifier[join] ((
identifier[text] . identifier[attrib] [ literal[string] ]
keyword[and] identifier[text] . identifier[attrib] [ literal[string] ]. identifier[split] ( literal[string] ) keyword[or] []
)+[ literal[string] ])
|
def _y_axis(self):
"""Make the y axis: labels and guides"""
if not self._y_labels or not self.show_y_labels:
return # depends on [control=['if'], data=[]]
axis = self.svg.node(self.nodes['plot'], class_='axis y%s' % (' always_show' if self.show_y_guides else ''))
if 0 not in [label[1] for label in self._y_labels] and self.show_y_guides:
self.svg.node(axis, 'path', d='M%f %f h%f' % (0, 0 if self.inverse_y_axis else self.view.height, self.view.width), class_='line') # depends on [control=['if'], data=[]]
for (label, position) in self._y_labels:
if self.horizontal:
major = label in self._y_labels_major # depends on [control=['if'], data=[]]
else:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue # depends on [control=['if'], data=[]]
guides = self.svg.node(axis, class_='%sguides' % ('logarithmic ' if self.logarithmic else ''))
x = -5
y = self.view.y(position)
if not y:
continue # depends on [control=['if'], data=[]]
if self.show_y_guides:
self.svg.node(guides, 'path', d='M%f %f h%f' % (0, y, self.view.width), class_='%s%s%sline' % ('axis ' if label == '0' else '', 'major ' if major else '', 'guide ' if position != 0 else '')) # depends on [control=['if'], data=[]]
text = self.svg.node(guides, 'text', x=x, y=y + 0.35 * self.style.label_font_size, class_='major' if major else '')
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = 'rotate(%d %f %f)' % (self.y_label_rotation, x, y)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((text.attrib['class'] and text.attrib['class'].split(' ') or []) + ['backwards']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self.svg.node(guides, 'title').text = self._y_format(position) # depends on [control=['for'], data=[]]
if self._y_2nd_labels:
secondary_ax = self.svg.node(self.nodes['plot'], class_='axis y2')
for (label, position) in self._y_2nd_labels:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue # depends on [control=['if'], data=[]]
# it is needed, to have the same structure as primary axis
guides = self.svg.node(secondary_ax, class_='guides')
x = self.view.width + 5
y = self.view.y(position)
text = self.svg.node(guides, 'text', x=x, y=y + 0.35 * self.style.label_font_size, class_='major' if major else '')
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = 'rotate(%d %f %f)' % (self.y_label_rotation, x, y)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((text.attrib['class'] and text.attrib['class'].split(' ') or []) + ['backwards']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
def define_page_breakpoint(self, dwProcessId, address, pages = 1,
condition = True,
action = None):
"""
Creates a disabled page breakpoint at the given address.
@see:
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint},
L{erase_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the first page to watch.
@type pages: int
@param pages: Number of pages to watch.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{PageBreakpoint}
@return: The page breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = PageBreakpoint(address, pages, condition, action)
begin = bp.get_address()
end = begin + bp.get_size()
address = begin
pageSize = MemoryAddresses.pageSize
while address < end:
key = (dwProcessId, address)
if key in self.__pageBP:
msg = "Already exists (PID %d) : %r"
msg = msg % (dwProcessId, self.__pageBP[key])
raise KeyError(msg)
address = address + pageSize
address = begin
while address < end:
key = (dwProcessId, address)
self.__pageBP[key] = bp
address = address + pageSize
return bp
|
def function[define_page_breakpoint, parameter[self, dwProcessId, address, pages, condition, action]]:
constant[
Creates a disabled page breakpoint at the given address.
@see:
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint},
L{erase_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the first page to watch.
@type pages: int
@param pages: Number of pages to watch.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{PageBreakpoint}
@return: The page breakpoint object.
]
variable[process] assign[=] call[name[self].system.get_process, parameter[name[dwProcessId]]]
variable[bp] assign[=] call[name[PageBreakpoint], parameter[name[address], name[pages], name[condition], name[action]]]
variable[begin] assign[=] call[name[bp].get_address, parameter[]]
variable[end] assign[=] binary_operation[name[begin] + call[name[bp].get_size, parameter[]]]
variable[address] assign[=] name[begin]
variable[pageSize] assign[=] name[MemoryAddresses].pageSize
while compare[name[address] less[<] name[end]] begin[:]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da20e9563e0>, <ast.Name object at 0x7da20e9579a0>]]
if compare[name[key] in name[self].__pageBP] begin[:]
variable[msg] assign[=] constant[Already exists (PID %d) : %r]
variable[msg] assign[=] binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e956a10>, <ast.Subscript object at 0x7da20e954910>]]]
<ast.Raise object at 0x7da20e956050>
variable[address] assign[=] binary_operation[name[address] + name[pageSize]]
variable[address] assign[=] name[begin]
while compare[name[address] less[<] name[end]] begin[:]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da18dc99870>, <ast.Name object at 0x7da18dc9a7a0>]]
call[name[self].__pageBP][name[key]] assign[=] name[bp]
variable[address] assign[=] binary_operation[name[address] + name[pageSize]]
return[name[bp]]
|
keyword[def] identifier[define_page_breakpoint] ( identifier[self] , identifier[dwProcessId] , identifier[address] , identifier[pages] = literal[int] ,
identifier[condition] = keyword[True] ,
identifier[action] = keyword[None] ):
literal[string]
identifier[process] = identifier[self] . identifier[system] . identifier[get_process] ( identifier[dwProcessId] )
identifier[bp] = identifier[PageBreakpoint] ( identifier[address] , identifier[pages] , identifier[condition] , identifier[action] )
identifier[begin] = identifier[bp] . identifier[get_address] ()
identifier[end] = identifier[begin] + identifier[bp] . identifier[get_size] ()
identifier[address] = identifier[begin]
identifier[pageSize] = identifier[MemoryAddresses] . identifier[pageSize]
keyword[while] identifier[address] < identifier[end] :
identifier[key] =( identifier[dwProcessId] , identifier[address] )
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__pageBP] :
identifier[msg] = literal[string]
identifier[msg] = identifier[msg] %( identifier[dwProcessId] , identifier[self] . identifier[__pageBP] [ identifier[key] ])
keyword[raise] identifier[KeyError] ( identifier[msg] )
identifier[address] = identifier[address] + identifier[pageSize]
identifier[address] = identifier[begin]
keyword[while] identifier[address] < identifier[end] :
identifier[key] =( identifier[dwProcessId] , identifier[address] )
identifier[self] . identifier[__pageBP] [ identifier[key] ]= identifier[bp]
identifier[address] = identifier[address] + identifier[pageSize]
keyword[return] identifier[bp]
|
def define_page_breakpoint(self, dwProcessId, address, pages=1, condition=True, action=None):
"""
Creates a disabled page breakpoint at the given address.
@see:
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint},
L{erase_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the first page to watch.
@type pages: int
@param pages: Number of pages to watch.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{PageBreakpoint}
@return: The page breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = PageBreakpoint(address, pages, condition, action)
begin = bp.get_address()
end = begin + bp.get_size()
address = begin
pageSize = MemoryAddresses.pageSize
while address < end:
key = (dwProcessId, address)
if key in self.__pageBP:
msg = 'Already exists (PID %d) : %r'
msg = msg % (dwProcessId, self.__pageBP[key])
raise KeyError(msg) # depends on [control=['if'], data=['key']]
address = address + pageSize # depends on [control=['while'], data=['address']]
address = begin
while address < end:
key = (dwProcessId, address)
self.__pageBP[key] = bp
address = address + pageSize # depends on [control=['while'], data=['address']]
return bp
|
def cleanup(self):
"""
Remove empty keys from this object. Should always be called after the result is final.
:return:
"""
empty_keys = [k for k, v in self.items() if not v]
for k in empty_keys:
del self[k]
|
def function[cleanup, parameter[self]]:
constant[
Remove empty keys from this object. Should always be called after the result is final.
:return:
]
variable[empty_keys] assign[=] <ast.ListComp object at 0x7da20c992710>
for taget[name[k]] in starred[name[empty_keys]] begin[:]
<ast.Delete object at 0x7da1b22e8fa0>
|
keyword[def] identifier[cleanup] ( identifier[self] ):
literal[string]
identifier[empty_keys] =[ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[items] () keyword[if] keyword[not] identifier[v] ]
keyword[for] identifier[k] keyword[in] identifier[empty_keys] :
keyword[del] identifier[self] [ identifier[k] ]
|
def cleanup(self):
"""
Remove empty keys from this object. Should always be called after the result is final.
:return:
"""
empty_keys = [k for (k, v) in self.items() if not v]
for k in empty_keys:
del self[k] # depends on [control=['for'], data=['k']]
|
def change_analysis_requests_id_formatting(portal, p_type="AnalysisRequest"):
"""Applies the system's Sample ID Formatting to Analysis Request
"""
ar_id_format = dict(
form='{sampleType}-{seq:04d}',
portal_type='AnalysisRequest',
prefix='analysisrequest',
sequence_type='generated',
counter_type='',
split_length=1)
bs = portal.bika_setup
id_formatting = bs.getIDFormatting()
ar_format = filter(lambda id: id["portal_type"] == p_type, id_formatting)
if p_type=="AnalysisRequest":
logger.info("Set ID Format for Analysis Request portal_type ...")
if not ar_format or "sample" in ar_format[0]["form"]:
# Copy the ID formatting set for Sample
change_analysis_requests_id_formatting(portal, p_type="Sample")
return
else:
logger.info("ID Format for Analysis Request already set: {} [SKIP]"
.format(ar_format[0]["form"]))
return
else:
ar_format = ar_format and ar_format[0].copy() or ar_id_format
# Set the Analysis Request ID Format
ar_id_format.update(ar_format)
ar_id_format["portal_type"] ="AnalysisRequest"
ar_id_format["prefix"] = "analysisrequest"
set_id_format(portal, ar_id_format)
# Find out the last ID for Sample and reseed AR to prevent ID already taken
# errors on AR creation
if p_type == "Sample":
number_generator = getUtility(INumberGenerator)
ar_keys = dict()
ar_keys_prev = dict()
for key, value in number_generator.storage.items():
if "sample-" in key:
ar_key = key.replace("sample-", "analysisrequest-")
ar_keys[ar_key] = api.to_int(value, 0)
elif "analysisrequest-" in key:
ar_keys_prev[key] = api.to_int(value, 0)
for key, value in ar_keys.items():
if key in ar_keys_prev:
# Maybe this upgrade step has already been run, so we don't
# want the ar IDs to be reseeded again!
if value <= ar_keys_prev[key]:
logger.info("ID for '{}' already seeded to '{}' [SKIP]"
.format(key, ar_keys_prev[key]))
continue
logger.info("Seeding {} to {}".format(key, value))
number_generator.set_number(key, value)
|
def function[change_analysis_requests_id_formatting, parameter[portal, p_type]]:
constant[Applies the system's Sample ID Formatting to Analysis Request
]
variable[ar_id_format] assign[=] call[name[dict], parameter[]]
variable[bs] assign[=] name[portal].bika_setup
variable[id_formatting] assign[=] call[name[bs].getIDFormatting, parameter[]]
variable[ar_format] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da18bcca9e0>, name[id_formatting]]]
if compare[name[p_type] equal[==] constant[AnalysisRequest]] begin[:]
call[name[logger].info, parameter[constant[Set ID Format for Analysis Request portal_type ...]]]
if <ast.BoolOp object at 0x7da2047ea740> begin[:]
call[name[change_analysis_requests_id_formatting], parameter[name[portal]]]
return[None]
call[name[ar_id_format].update, parameter[name[ar_format]]]
call[name[ar_id_format]][constant[portal_type]] assign[=] constant[AnalysisRequest]
call[name[ar_id_format]][constant[prefix]] assign[=] constant[analysisrequest]
call[name[set_id_format], parameter[name[portal], name[ar_id_format]]]
if compare[name[p_type] equal[==] constant[Sample]] begin[:]
variable[number_generator] assign[=] call[name[getUtility], parameter[name[INumberGenerator]]]
variable[ar_keys] assign[=] call[name[dict], parameter[]]
variable[ar_keys_prev] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f00efe0>, <ast.Name object at 0x7da18f00d960>]]] in starred[call[name[number_generator].storage.items, parameter[]]] begin[:]
if compare[constant[sample-] in name[key]] begin[:]
variable[ar_key] assign[=] call[name[key].replace, parameter[constant[sample-], constant[analysisrequest-]]]
call[name[ar_keys]][name[ar_key]] assign[=] call[name[api].to_int, parameter[name[value], constant[0]]]
for taget[tuple[[<ast.Name object at 0x7da18f00ec80>, <ast.Name object at 0x7da18f00c670>]]] in starred[call[name[ar_keys].items, parameter[]]] begin[:]
if compare[name[key] in name[ar_keys_prev]] begin[:]
if compare[name[value] less_or_equal[<=] call[name[ar_keys_prev]][name[key]]] begin[:]
call[name[logger].info, parameter[call[constant[ID for '{}' already seeded to '{}' [SKIP]].format, parameter[name[key], call[name[ar_keys_prev]][name[key]]]]]]
continue
call[name[logger].info, parameter[call[constant[Seeding {} to {}].format, parameter[name[key], name[value]]]]]
call[name[number_generator].set_number, parameter[name[key], name[value]]]
|
keyword[def] identifier[change_analysis_requests_id_formatting] ( identifier[portal] , identifier[p_type] = literal[string] ):
literal[string]
identifier[ar_id_format] = identifier[dict] (
identifier[form] = literal[string] ,
identifier[portal_type] = literal[string] ,
identifier[prefix] = literal[string] ,
identifier[sequence_type] = literal[string] ,
identifier[counter_type] = literal[string] ,
identifier[split_length] = literal[int] )
identifier[bs] = identifier[portal] . identifier[bika_setup]
identifier[id_formatting] = identifier[bs] . identifier[getIDFormatting] ()
identifier[ar_format] = identifier[filter] ( keyword[lambda] identifier[id] : identifier[id] [ literal[string] ]== identifier[p_type] , identifier[id_formatting] )
keyword[if] identifier[p_type] == literal[string] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] keyword[not] identifier[ar_format] keyword[or] literal[string] keyword[in] identifier[ar_format] [ literal[int] ][ literal[string] ]:
identifier[change_analysis_requests_id_formatting] ( identifier[portal] , identifier[p_type] = literal[string] )
keyword[return]
keyword[else] :
identifier[logger] . identifier[info] ( literal[string]
. identifier[format] ( identifier[ar_format] [ literal[int] ][ literal[string] ]))
keyword[return]
keyword[else] :
identifier[ar_format] = identifier[ar_format] keyword[and] identifier[ar_format] [ literal[int] ]. identifier[copy] () keyword[or] identifier[ar_id_format]
identifier[ar_id_format] . identifier[update] ( identifier[ar_format] )
identifier[ar_id_format] [ literal[string] ]= literal[string]
identifier[ar_id_format] [ literal[string] ]= literal[string]
identifier[set_id_format] ( identifier[portal] , identifier[ar_id_format] )
keyword[if] identifier[p_type] == literal[string] :
identifier[number_generator] = identifier[getUtility] ( identifier[INumberGenerator] )
identifier[ar_keys] = identifier[dict] ()
identifier[ar_keys_prev] = identifier[dict] ()
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[number_generator] . identifier[storage] . identifier[items] ():
keyword[if] literal[string] keyword[in] identifier[key] :
identifier[ar_key] = identifier[key] . identifier[replace] ( literal[string] , literal[string] )
identifier[ar_keys] [ identifier[ar_key] ]= identifier[api] . identifier[to_int] ( identifier[value] , literal[int] )
keyword[elif] literal[string] keyword[in] identifier[key] :
identifier[ar_keys_prev] [ identifier[key] ]= identifier[api] . identifier[to_int] ( identifier[value] , literal[int] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[ar_keys] . identifier[items] ():
keyword[if] identifier[key] keyword[in] identifier[ar_keys_prev] :
keyword[if] identifier[value] <= identifier[ar_keys_prev] [ identifier[key] ]:
identifier[logger] . identifier[info] ( literal[string]
. identifier[format] ( identifier[key] , identifier[ar_keys_prev] [ identifier[key] ]))
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] ))
identifier[number_generator] . identifier[set_number] ( identifier[key] , identifier[value] )
|
def change_analysis_requests_id_formatting(portal, p_type='AnalysisRequest'):
"""Applies the system's Sample ID Formatting to Analysis Request
"""
ar_id_format = dict(form='{sampleType}-{seq:04d}', portal_type='AnalysisRequest', prefix='analysisrequest', sequence_type='generated', counter_type='', split_length=1)
bs = portal.bika_setup
id_formatting = bs.getIDFormatting()
ar_format = filter(lambda id: id['portal_type'] == p_type, id_formatting)
if p_type == 'AnalysisRequest':
logger.info('Set ID Format for Analysis Request portal_type ...')
if not ar_format or 'sample' in ar_format[0]['form']:
# Copy the ID formatting set for Sample
change_analysis_requests_id_formatting(portal, p_type='Sample')
return # depends on [control=['if'], data=[]]
else:
logger.info('ID Format for Analysis Request already set: {} [SKIP]'.format(ar_format[0]['form']))
return # depends on [control=['if'], data=[]]
else:
ar_format = ar_format and ar_format[0].copy() or ar_id_format
# Set the Analysis Request ID Format
ar_id_format.update(ar_format)
ar_id_format['portal_type'] = 'AnalysisRequest'
ar_id_format['prefix'] = 'analysisrequest'
set_id_format(portal, ar_id_format)
# Find out the last ID for Sample and reseed AR to prevent ID already taken
# errors on AR creation
if p_type == 'Sample':
number_generator = getUtility(INumberGenerator)
ar_keys = dict()
ar_keys_prev = dict()
for (key, value) in number_generator.storage.items():
if 'sample-' in key:
ar_key = key.replace('sample-', 'analysisrequest-')
ar_keys[ar_key] = api.to_int(value, 0) # depends on [control=['if'], data=['key']]
elif 'analysisrequest-' in key:
ar_keys_prev[key] = api.to_int(value, 0) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]]
for (key, value) in ar_keys.items():
if key in ar_keys_prev:
# Maybe this upgrade step has already been run, so we don't
# want the ar IDs to be reseeded again!
if value <= ar_keys_prev[key]:
logger.info("ID for '{}' already seeded to '{}' [SKIP]".format(key, ar_keys_prev[key]))
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key', 'ar_keys_prev']]
logger.info('Seeding {} to {}'.format(key, value))
number_generator.set_number(key, value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
def add_not_null(self, model, *names):
"""Add not null."""
for name in names:
field = model._meta.fields[name]
field.null = False
self.ops.append(self.migrator.add_not_null(model._meta.table_name, field.column_name))
return model
|
def function[add_not_null, parameter[self, model]]:
constant[Add not null.]
for taget[name[name]] in starred[name[names]] begin[:]
variable[field] assign[=] call[name[model]._meta.fields][name[name]]
name[field].null assign[=] constant[False]
call[name[self].ops.append, parameter[call[name[self].migrator.add_not_null, parameter[name[model]._meta.table_name, name[field].column_name]]]]
return[name[model]]
|
keyword[def] identifier[add_not_null] ( identifier[self] , identifier[model] ,* identifier[names] ):
literal[string]
keyword[for] identifier[name] keyword[in] identifier[names] :
identifier[field] = identifier[model] . identifier[_meta] . identifier[fields] [ identifier[name] ]
identifier[field] . identifier[null] = keyword[False]
identifier[self] . identifier[ops] . identifier[append] ( identifier[self] . identifier[migrator] . identifier[add_not_null] ( identifier[model] . identifier[_meta] . identifier[table_name] , identifier[field] . identifier[column_name] ))
keyword[return] identifier[model]
|
def add_not_null(self, model, *names):
"""Add not null."""
for name in names:
field = model._meta.fields[name]
field.null = False
self.ops.append(self.migrator.add_not_null(model._meta.table_name, field.column_name)) # depends on [control=['for'], data=['name']]
return model
|
def view(request, namespace, docid):
"""The initial view, does not provide the document content yet"""
if flat.users.models.hasreadpermission(request.user.username, namespace, request):
if 'autodeclare' in settings.CONFIGURATIONS[request.session['configuration']]:
if flat.users.models.haswritepermission(request.user.username, namespace, request):
for annotationtype, set in settings.CONFIGURATIONS[request.session['configuration']]['autodeclare']:
try:
r = flat.comm.query(request, "USE " + namespace + "/" + docid + " DECLARE " + annotationtype + " OF " + set)
except Exception as e:
return fatalerror(request,e)
return initdoc(request, namespace,docid, 'metadata', 'metadata.html')
else:
return fatalerror(request,"Permission denied")
|
def function[view, parameter[request, namespace, docid]]:
constant[The initial view, does not provide the document content yet]
if call[name[flat].users.models.hasreadpermission, parameter[name[request].user.username, name[namespace], name[request]]] begin[:]
if compare[constant[autodeclare] in call[name[settings].CONFIGURATIONS][call[name[request].session][constant[configuration]]]] begin[:]
if call[name[flat].users.models.haswritepermission, parameter[name[request].user.username, name[namespace], name[request]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1800040>, <ast.Name object at 0x7da1b1802800>]]] in starred[call[call[name[settings].CONFIGURATIONS][call[name[request].session][constant[configuration]]]][constant[autodeclare]]] begin[:]
<ast.Try object at 0x7da1b18020e0>
return[call[name[initdoc], parameter[name[request], name[namespace], name[docid], constant[metadata], constant[metadata.html]]]]
|
keyword[def] identifier[view] ( identifier[request] , identifier[namespace] , identifier[docid] ):
literal[string]
keyword[if] identifier[flat] . identifier[users] . identifier[models] . identifier[hasreadpermission] ( identifier[request] . identifier[user] . identifier[username] , identifier[namespace] , identifier[request] ):
keyword[if] literal[string] keyword[in] identifier[settings] . identifier[CONFIGURATIONS] [ identifier[request] . identifier[session] [ literal[string] ]]:
keyword[if] identifier[flat] . identifier[users] . identifier[models] . identifier[haswritepermission] ( identifier[request] . identifier[user] . identifier[username] , identifier[namespace] , identifier[request] ):
keyword[for] identifier[annotationtype] , identifier[set] keyword[in] identifier[settings] . identifier[CONFIGURATIONS] [ identifier[request] . identifier[session] [ literal[string] ]][ literal[string] ]:
keyword[try] :
identifier[r] = identifier[flat] . identifier[comm] . identifier[query] ( identifier[request] , literal[string] + identifier[namespace] + literal[string] + identifier[docid] + literal[string] + identifier[annotationtype] + literal[string] + identifier[set] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] identifier[fatalerror] ( identifier[request] , identifier[e] )
keyword[return] identifier[initdoc] ( identifier[request] , identifier[namespace] , identifier[docid] , literal[string] , literal[string] )
keyword[else] :
keyword[return] identifier[fatalerror] ( identifier[request] , literal[string] )
|
def view(request, namespace, docid):
"""The initial view, does not provide the document content yet"""
if flat.users.models.hasreadpermission(request.user.username, namespace, request):
if 'autodeclare' in settings.CONFIGURATIONS[request.session['configuration']]:
if flat.users.models.haswritepermission(request.user.username, namespace, request):
for (annotationtype, set) in settings.CONFIGURATIONS[request.session['configuration']]['autodeclare']:
try:
r = flat.comm.query(request, 'USE ' + namespace + '/' + docid + ' DECLARE ' + annotationtype + ' OF ' + set) # depends on [control=['try'], data=[]]
except Exception as e:
return fatalerror(request, e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return initdoc(request, namespace, docid, 'metadata', 'metadata.html') # depends on [control=['if'], data=[]]
else:
return fatalerror(request, 'Permission denied')
|
def insert_and_publish_uow(self, job_record, start_id, end_id):
""" method creates and publishes a unit_of_work. it also handles DuplicateKeyError and attempts recovery
:return: tuple (uow, is_duplicate)
:raise UserWarning: if the recovery from DuplicateKeyError was unsuccessful
"""
process_name = job_record.process_name
timeperiod = job_record.timeperiod
start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod)
end_timeperiod = self.compute_end_timeperiod(job_record.process_name, job_record.timeperiod)
try:
is_duplicate = False
uow = self._insert_uow(process_name, timeperiod, start_timeperiod, end_timeperiod, start_id, end_id)
except DuplicateKeyError as e:
is_duplicate = True
msg = 'Catching up with latest UOW {0}@{1}, because of: {2}' \
.format(process_name, start_timeperiod, e)
self._log_message(WARNING, process_name, start_timeperiod, msg)
uow = self.uow_dao.recover_from_duplicatekeyerror(e)
if not uow:
msg = 'PERSISTENT TIER ERROR! Unable to locate UOW for {0}@{1}' \
.format(process_name, start_timeperiod)
self._log_message(WARNING, process_name, start_timeperiod, msg)
raise UserWarning(msg)
if uow.is_canceled:
# this UOW was marked for re-processing. recycle it
uow.created_at = datetime.utcnow() # reset created_at to bypass GC cancellation logic
uow.submitted_at = datetime.utcnow() # reset submitted_at to allow 1 hour free of GC resubmitting
del uow.started_at
del uow.finished_at
del uow.number_of_aggregated_documents
del uow.number_of_processed_documents
uow.state = unit_of_work.STATE_REQUESTED
self.uow_dao.update(uow)
# publish the created/recovered/recycled unit_of_work
self._publish_uow(uow)
return uow, is_duplicate
|
def function[insert_and_publish_uow, parameter[self, job_record, start_id, end_id]]:
constant[ method creates and publishes a unit_of_work. it also handles DuplicateKeyError and attempts recovery
:return: tuple (uow, is_duplicate)
:raise UserWarning: if the recovery from DuplicateKeyError was unsuccessful
]
variable[process_name] assign[=] name[job_record].process_name
variable[timeperiod] assign[=] name[job_record].timeperiod
variable[start_timeperiod] assign[=] call[name[self].compute_start_timeperiod, parameter[name[job_record].process_name, name[job_record].timeperiod]]
variable[end_timeperiod] assign[=] call[name[self].compute_end_timeperiod, parameter[name[job_record].process_name, name[job_record].timeperiod]]
<ast.Try object at 0x7da207f99c30>
if <ast.UnaryOp object at 0x7da207f9b0a0> begin[:]
variable[msg] assign[=] call[constant[PERSISTENT TIER ERROR! Unable to locate UOW for {0}@{1}].format, parameter[name[process_name], name[start_timeperiod]]]
call[name[self]._log_message, parameter[name[WARNING], name[process_name], name[start_timeperiod], name[msg]]]
<ast.Raise object at 0x7da207f991e0>
if name[uow].is_canceled begin[:]
name[uow].created_at assign[=] call[name[datetime].utcnow, parameter[]]
name[uow].submitted_at assign[=] call[name[datetime].utcnow, parameter[]]
<ast.Delete object at 0x7da207f99ab0>
<ast.Delete object at 0x7da207f9a260>
<ast.Delete object at 0x7da207f9a980>
<ast.Delete object at 0x7da207f9ac20>
name[uow].state assign[=] name[unit_of_work].STATE_REQUESTED
call[name[self].uow_dao.update, parameter[name[uow]]]
call[name[self]._publish_uow, parameter[name[uow]]]
return[tuple[[<ast.Name object at 0x7da2046218d0>, <ast.Name object at 0x7da2046234f0>]]]
|
keyword[def] identifier[insert_and_publish_uow] ( identifier[self] , identifier[job_record] , identifier[start_id] , identifier[end_id] ):
literal[string]
identifier[process_name] = identifier[job_record] . identifier[process_name]
identifier[timeperiod] = identifier[job_record] . identifier[timeperiod]
identifier[start_timeperiod] = identifier[self] . identifier[compute_start_timeperiod] ( identifier[job_record] . identifier[process_name] , identifier[job_record] . identifier[timeperiod] )
identifier[end_timeperiod] = identifier[self] . identifier[compute_end_timeperiod] ( identifier[job_record] . identifier[process_name] , identifier[job_record] . identifier[timeperiod] )
keyword[try] :
identifier[is_duplicate] = keyword[False]
identifier[uow] = identifier[self] . identifier[_insert_uow] ( identifier[process_name] , identifier[timeperiod] , identifier[start_timeperiod] , identifier[end_timeperiod] , identifier[start_id] , identifier[end_id] )
keyword[except] identifier[DuplicateKeyError] keyword[as] identifier[e] :
identifier[is_duplicate] = keyword[True]
identifier[msg] = literal[string] . identifier[format] ( identifier[process_name] , identifier[start_timeperiod] , identifier[e] )
identifier[self] . identifier[_log_message] ( identifier[WARNING] , identifier[process_name] , identifier[start_timeperiod] , identifier[msg] )
identifier[uow] = identifier[self] . identifier[uow_dao] . identifier[recover_from_duplicatekeyerror] ( identifier[e] )
keyword[if] keyword[not] identifier[uow] :
identifier[msg] = literal[string] . identifier[format] ( identifier[process_name] , identifier[start_timeperiod] )
identifier[self] . identifier[_log_message] ( identifier[WARNING] , identifier[process_name] , identifier[start_timeperiod] , identifier[msg] )
keyword[raise] identifier[UserWarning] ( identifier[msg] )
keyword[if] identifier[uow] . identifier[is_canceled] :
identifier[uow] . identifier[created_at] = identifier[datetime] . identifier[utcnow] ()
identifier[uow] . identifier[submitted_at] = identifier[datetime] . identifier[utcnow] ()
keyword[del] identifier[uow] . identifier[started_at]
keyword[del] identifier[uow] . identifier[finished_at]
keyword[del] identifier[uow] . identifier[number_of_aggregated_documents]
keyword[del] identifier[uow] . identifier[number_of_processed_documents]
identifier[uow] . identifier[state] = identifier[unit_of_work] . identifier[STATE_REQUESTED]
identifier[self] . identifier[uow_dao] . identifier[update] ( identifier[uow] )
identifier[self] . identifier[_publish_uow] ( identifier[uow] )
keyword[return] identifier[uow] , identifier[is_duplicate]
|
def insert_and_publish_uow(self, job_record, start_id, end_id):
""" method creates and publishes a unit_of_work. it also handles DuplicateKeyError and attempts recovery
:return: tuple (uow, is_duplicate)
:raise UserWarning: if the recovery from DuplicateKeyError was unsuccessful
"""
process_name = job_record.process_name
timeperiod = job_record.timeperiod
start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod)
end_timeperiod = self.compute_end_timeperiod(job_record.process_name, job_record.timeperiod)
try:
is_duplicate = False
uow = self._insert_uow(process_name, timeperiod, start_timeperiod, end_timeperiod, start_id, end_id) # depends on [control=['try'], data=[]]
except DuplicateKeyError as e:
is_duplicate = True
msg = 'Catching up with latest UOW {0}@{1}, because of: {2}'.format(process_name, start_timeperiod, e)
self._log_message(WARNING, process_name, start_timeperiod, msg)
uow = self.uow_dao.recover_from_duplicatekeyerror(e) # depends on [control=['except'], data=['e']]
if not uow:
msg = 'PERSISTENT TIER ERROR! Unable to locate UOW for {0}@{1}'.format(process_name, start_timeperiod)
self._log_message(WARNING, process_name, start_timeperiod, msg)
raise UserWarning(msg) # depends on [control=['if'], data=[]]
if uow.is_canceled:
# this UOW was marked for re-processing. recycle it
uow.created_at = datetime.utcnow() # reset created_at to bypass GC cancellation logic
uow.submitted_at = datetime.utcnow() # reset submitted_at to allow 1 hour free of GC resubmitting
del uow.started_at
del uow.finished_at
del uow.number_of_aggregated_documents
del uow.number_of_processed_documents
uow.state = unit_of_work.STATE_REQUESTED
self.uow_dao.update(uow) # depends on [control=['if'], data=[]]
# publish the created/recovered/recycled unit_of_work
self._publish_uow(uow)
return (uow, is_duplicate)
|
def assert_element_absent(self, selector, by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT):
""" Similar to wait_for_element_absent() - returns nothing.
As above, will raise an exception if the element stays present.
Returns True if successful. Default timeout = SMALL_TIMEOUT. """
if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT:
timeout = self.__get_new_timeout(timeout)
self.wait_for_element_absent(selector, by=by, timeout=timeout)
return True
|
def function[assert_element_absent, parameter[self, selector, by, timeout]]:
constant[ Similar to wait_for_element_absent() - returns nothing.
As above, will raise an exception if the element stays present.
Returns True if successful. Default timeout = SMALL_TIMEOUT. ]
if <ast.BoolOp object at 0x7da1b1bee050> begin[:]
variable[timeout] assign[=] call[name[self].__get_new_timeout, parameter[name[timeout]]]
call[name[self].wait_for_element_absent, parameter[name[selector]]]
return[constant[True]]
|
keyword[def] identifier[assert_element_absent] ( identifier[self] , identifier[selector] , identifier[by] = identifier[By] . identifier[CSS_SELECTOR] ,
identifier[timeout] = identifier[settings] . identifier[SMALL_TIMEOUT] ):
literal[string]
keyword[if] identifier[self] . identifier[timeout_multiplier] keyword[and] identifier[timeout] == identifier[settings] . identifier[SMALL_TIMEOUT] :
identifier[timeout] = identifier[self] . identifier[__get_new_timeout] ( identifier[timeout] )
identifier[self] . identifier[wait_for_element_absent] ( identifier[selector] , identifier[by] = identifier[by] , identifier[timeout] = identifier[timeout] )
keyword[return] keyword[True]
|
def assert_element_absent(self, selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT):
""" Similar to wait_for_element_absent() - returns nothing.
As above, will raise an exception if the element stays present.
Returns True if successful. Default timeout = SMALL_TIMEOUT. """
if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT:
timeout = self.__get_new_timeout(timeout) # depends on [control=['if'], data=[]]
self.wait_for_element_absent(selector, by=by, timeout=timeout)
return True
|
def clean():
"""
Clean data created by this script
"""
for queue in MyQueue.collection().instances():
queue.delete()
for job in MyJob.collection().instances():
job.delete()
for person in Person.collection().instances():
person.delete()
|
def function[clean, parameter[]]:
constant[
Clean data created by this script
]
for taget[name[queue]] in starred[call[call[name[MyQueue].collection, parameter[]].instances, parameter[]]] begin[:]
call[name[queue].delete, parameter[]]
for taget[name[job]] in starred[call[call[name[MyJob].collection, parameter[]].instances, parameter[]]] begin[:]
call[name[job].delete, parameter[]]
for taget[name[person]] in starred[call[call[name[Person].collection, parameter[]].instances, parameter[]]] begin[:]
call[name[person].delete, parameter[]]
|
keyword[def] identifier[clean] ():
literal[string]
keyword[for] identifier[queue] keyword[in] identifier[MyQueue] . identifier[collection] (). identifier[instances] ():
identifier[queue] . identifier[delete] ()
keyword[for] identifier[job] keyword[in] identifier[MyJob] . identifier[collection] (). identifier[instances] ():
identifier[job] . identifier[delete] ()
keyword[for] identifier[person] keyword[in] identifier[Person] . identifier[collection] (). identifier[instances] ():
identifier[person] . identifier[delete] ()
|
def clean():
"""
Clean data created by this script
"""
for queue in MyQueue.collection().instances():
queue.delete() # depends on [control=['for'], data=['queue']]
for job in MyJob.collection().instances():
job.delete() # depends on [control=['for'], data=['job']]
for person in Person.collection().instances():
person.delete() # depends on [control=['for'], data=['person']]
|
def add(self, entities):
"""
Adds the given entities to the cache, if they weren't saved before.
"""
if not utils.is_list_like(entities):
# Invariant: all "chats" and "users" are always iterables,
# and "user" never is (so we wrap it inside a list).
entities = itertools.chain(
getattr(entities, 'chats', []),
getattr(entities, 'users', []),
(hasattr(entities, 'user') and [entities.user]) or []
)
for entity in entities:
try:
pid = utils.get_peer_id(entity)
if pid not in self.__dict__:
# Note: `get_input_peer` already checks for `access_hash`
self.__dict__[pid] = utils.get_input_peer(entity)
except TypeError:
pass
|
def function[add, parameter[self, entities]]:
constant[
Adds the given entities to the cache, if they weren't saved before.
]
if <ast.UnaryOp object at 0x7da1b21daec0> begin[:]
variable[entities] assign[=] call[name[itertools].chain, parameter[call[name[getattr], parameter[name[entities], constant[chats], list[[]]]], call[name[getattr], parameter[name[entities], constant[users], list[[]]]], <ast.BoolOp object at 0x7da1b21dab00>]]
for taget[name[entity]] in starred[name[entities]] begin[:]
<ast.Try object at 0x7da1b21da890>
|
keyword[def] identifier[add] ( identifier[self] , identifier[entities] ):
literal[string]
keyword[if] keyword[not] identifier[utils] . identifier[is_list_like] ( identifier[entities] ):
identifier[entities] = identifier[itertools] . identifier[chain] (
identifier[getattr] ( identifier[entities] , literal[string] ,[]),
identifier[getattr] ( identifier[entities] , literal[string] ,[]),
( identifier[hasattr] ( identifier[entities] , literal[string] ) keyword[and] [ identifier[entities] . identifier[user] ]) keyword[or] []
)
keyword[for] identifier[entity] keyword[in] identifier[entities] :
keyword[try] :
identifier[pid] = identifier[utils] . identifier[get_peer_id] ( identifier[entity] )
keyword[if] identifier[pid] keyword[not] keyword[in] identifier[self] . identifier[__dict__] :
identifier[self] . identifier[__dict__] [ identifier[pid] ]= identifier[utils] . identifier[get_input_peer] ( identifier[entity] )
keyword[except] identifier[TypeError] :
keyword[pass]
|
def add(self, entities):
"""
Adds the given entities to the cache, if they weren't saved before.
"""
if not utils.is_list_like(entities):
# Invariant: all "chats" and "users" are always iterables,
# and "user" never is (so we wrap it inside a list).
entities = itertools.chain(getattr(entities, 'chats', []), getattr(entities, 'users', []), hasattr(entities, 'user') and [entities.user] or []) # depends on [control=['if'], data=[]]
for entity in entities:
try:
pid = utils.get_peer_id(entity)
if pid not in self.__dict__:
# Note: `get_input_peer` already checks for `access_hash`
self.__dict__[pid] = utils.get_input_peer(entity) # depends on [control=['if'], data=['pid']] # depends on [control=['try'], data=[]]
except TypeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['entity']]
|
def gp_size(self, _gp_size):
"""Store the new start address attribute of the BFD file being
processed.
"""
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.set_gp_size(self._ptr, _gp_size)
|
def function[gp_size, parameter[self, _gp_size]]:
constant[Store the new start address attribute of the BFD file being
processed.
]
if <ast.UnaryOp object at 0x7da207f9ad70> begin[:]
<ast.Raise object at 0x7da207f99210>
return[call[name[_bfd].set_gp_size, parameter[name[self]._ptr, name[_gp_size]]]]
|
keyword[def] identifier[gp_size] ( identifier[self] , identifier[_gp_size] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ptr] :
keyword[raise] identifier[BfdException] ( literal[string] )
keyword[return] identifier[_bfd] . identifier[set_gp_size] ( identifier[self] . identifier[_ptr] , identifier[_gp_size] )
|
def gp_size(self, _gp_size):
"""Store the new start address attribute of the BFD file being
processed.
"""
if not self._ptr:
raise BfdException('BFD not initialized') # depends on [control=['if'], data=[]]
return _bfd.set_gp_size(self._ptr, _gp_size)
|
def add(self, vm):
"""
Add a DalvikVMFormat to this Analysis
:param vm: :class:`dvm.DalvikVMFormat` to add to this Analysis
"""
self.vms.append(vm)
for current_class in vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(current_class)
for method in vm.get_methods():
self.methods[method] = MethodAnalysis(vm, method)
|
def function[add, parameter[self, vm]]:
constant[
Add a DalvikVMFormat to this Analysis
:param vm: :class:`dvm.DalvikVMFormat` to add to this Analysis
]
call[name[self].vms.append, parameter[name[vm]]]
for taget[name[current_class]] in starred[call[name[vm].get_classes, parameter[]]] begin[:]
call[name[self].classes][call[name[current_class].get_name, parameter[]]] assign[=] call[name[ClassAnalysis], parameter[name[current_class]]]
for taget[name[method]] in starred[call[name[vm].get_methods, parameter[]]] begin[:]
call[name[self].methods][name[method]] assign[=] call[name[MethodAnalysis], parameter[name[vm], name[method]]]
|
keyword[def] identifier[add] ( identifier[self] , identifier[vm] ):
literal[string]
identifier[self] . identifier[vms] . identifier[append] ( identifier[vm] )
keyword[for] identifier[current_class] keyword[in] identifier[vm] . identifier[get_classes] ():
identifier[self] . identifier[classes] [ identifier[current_class] . identifier[get_name] ()]= identifier[ClassAnalysis] ( identifier[current_class] )
keyword[for] identifier[method] keyword[in] identifier[vm] . identifier[get_methods] ():
identifier[self] . identifier[methods] [ identifier[method] ]= identifier[MethodAnalysis] ( identifier[vm] , identifier[method] )
|
def add(self, vm):
"""
Add a DalvikVMFormat to this Analysis
:param vm: :class:`dvm.DalvikVMFormat` to add to this Analysis
"""
self.vms.append(vm)
for current_class in vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(current_class) # depends on [control=['for'], data=['current_class']]
for method in vm.get_methods():
self.methods[method] = MethodAnalysis(vm, method) # depends on [control=['for'], data=['method']]
|
def pformat(tree):
"""Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
"""
if tree.empty():
return ''
buf = six.StringIO()
for line in _pformat(tree.root, 0):
buf.write(line + "\n")
return buf.getvalue().strip()
|
def function[pformat, parameter[tree]]:
constant[Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
]
if call[name[tree].empty, parameter[]] begin[:]
return[constant[]]
variable[buf] assign[=] call[name[six].StringIO, parameter[]]
for taget[name[line]] in starred[call[name[_pformat], parameter[name[tree].root, constant[0]]]] begin[:]
call[name[buf].write, parameter[binary_operation[name[line] + constant[
]]]]
return[call[call[name[buf].getvalue, parameter[]].strip, parameter[]]]
|
keyword[def] identifier[pformat] ( identifier[tree] ):
literal[string]
keyword[if] identifier[tree] . identifier[empty] ():
keyword[return] literal[string]
identifier[buf] = identifier[six] . identifier[StringIO] ()
keyword[for] identifier[line] keyword[in] identifier[_pformat] ( identifier[tree] . identifier[root] , literal[int] ):
identifier[buf] . identifier[write] ( identifier[line] + literal[string] )
keyword[return] identifier[buf] . identifier[getvalue] (). identifier[strip] ()
|
def pformat(tree):
"""Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
"""
if tree.empty():
return '' # depends on [control=['if'], data=[]]
buf = six.StringIO()
for line in _pformat(tree.root, 0):
buf.write(line + '\n') # depends on [control=['for'], data=['line']]
return buf.getvalue().strip()
|
def cycle_slice(sliceable, start, end):
"""Given a list, return right hand cycle direction slice from start to end.
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> cycle_slice(array, 4, 7) # from array[4] to array[7]
[4, 5, 6, 7]
>>> cycle_slice(array, 8, 2) # from array[8] to array[2]
[8, 9, 0, 1, 2]
"""
if type(sliceable) != list:
sliceable = list(sliceable)
if end >= start:
return sliceable[start:end+1]
else:
return sliceable[start:] + sliceable[:end+1]
|
def function[cycle_slice, parameter[sliceable, start, end]]:
constant[Given a list, return right hand cycle direction slice from start to end.
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> cycle_slice(array, 4, 7) # from array[4] to array[7]
[4, 5, 6, 7]
>>> cycle_slice(array, 8, 2) # from array[8] to array[2]
[8, 9, 0, 1, 2]
]
if compare[call[name[type], parameter[name[sliceable]]] not_equal[!=] name[list]] begin[:]
variable[sliceable] assign[=] call[name[list], parameter[name[sliceable]]]
if compare[name[end] greater_or_equal[>=] name[start]] begin[:]
return[call[name[sliceable]][<ast.Slice object at 0x7da18bc73e50>]]
|
keyword[def] identifier[cycle_slice] ( identifier[sliceable] , identifier[start] , identifier[end] ):
literal[string]
keyword[if] identifier[type] ( identifier[sliceable] )!= identifier[list] :
identifier[sliceable] = identifier[list] ( identifier[sliceable] )
keyword[if] identifier[end] >= identifier[start] :
keyword[return] identifier[sliceable] [ identifier[start] : identifier[end] + literal[int] ]
keyword[else] :
keyword[return] identifier[sliceable] [ identifier[start] :]+ identifier[sliceable] [: identifier[end] + literal[int] ]
|
def cycle_slice(sliceable, start, end):
"""Given a list, return right hand cycle direction slice from start to end.
Usage::
>>> array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> cycle_slice(array, 4, 7) # from array[4] to array[7]
[4, 5, 6, 7]
>>> cycle_slice(array, 8, 2) # from array[8] to array[2]
[8, 9, 0, 1, 2]
"""
if type(sliceable) != list:
sliceable = list(sliceable) # depends on [control=['if'], data=['list']]
if end >= start:
return sliceable[start:end + 1] # depends on [control=['if'], data=['end', 'start']]
else:
return sliceable[start:] + sliceable[:end + 1]
|
def mod_repo(repo, **kwargs):
'''
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as uri is defined.
The following options are available to modify a repo definition:
repo
alias by which opkg refers to the repo.
uri
the URI to the repo.
compressed
defines (True or False) if the index file is compressed
enabled
enable or disable (True or False) repository
but do not remove if disabled.
refresh
enable or disable (True or False) auto-refresh of the repositories
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo repo uri=http://new/uri
salt '*' pkg.mod_repo repo enabled=False
'''
repos = list_repos()
found = False
uri = ''
if 'uri' in kwargs:
uri = kwargs['uri']
for repository in repos:
source = repos[repository][0]
if source['name'] == repo:
found = True
repostr = ''
if 'enabled' in kwargs and not kwargs['enabled']:
repostr += '# '
if 'compressed' in kwargs:
repostr += 'src/gz ' if kwargs['compressed'] else 'src'
else:
repostr += 'src/gz' if source['compressed'] else 'src'
repo_alias = kwargs['alias'] if 'alias' in kwargs else repo
if ' ' in repo_alias:
repostr += ' "{0}"'.format(repo_alias)
else:
repostr += ' {0}'.format(repo_alias)
repostr += ' {0}'.format(kwargs['uri'] if 'uri' in kwargs else source['uri'])
trusted = kwargs.get('trusted')
repostr = _set_trusted_option_if_needed(repostr, trusted) if trusted is not None else \
_set_trusted_option_if_needed(repostr, source.get('trusted'))
_mod_repo_in_file(repo, repostr, source['file'])
elif uri and source['uri'] == uri:
raise CommandExecutionError(
'Repository \'{0}\' already exists as \'{1}\'.'.format(uri, source['name']))
if not found:
# Need to add a new repo
if 'uri' not in kwargs:
raise CommandExecutionError(
'Repository \'{0}\' not found and no URI passed to create one.'.format(repo))
properties = {'uri': kwargs['uri']}
# If compressed is not defined, assume True
properties['compressed'] = kwargs['compressed'] if 'compressed' in kwargs else True
# If enabled is not defined, assume True
properties['enabled'] = kwargs['enabled'] if 'enabled' in kwargs else True
properties['trusted'] = kwargs.get('trusted')
_add_new_repo(repo, properties)
if 'refresh' in kwargs:
refresh_db()
|
def function[mod_repo, parameter[repo]]:
constant[
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as uri is defined.
The following options are available to modify a repo definition:
repo
alias by which opkg refers to the repo.
uri
the URI to the repo.
compressed
defines (True or False) if the index file is compressed
enabled
enable or disable (True or False) repository
but do not remove if disabled.
refresh
enable or disable (True or False) auto-refresh of the repositories
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo repo uri=http://new/uri
salt '*' pkg.mod_repo repo enabled=False
]
variable[repos] assign[=] call[name[list_repos], parameter[]]
variable[found] assign[=] constant[False]
variable[uri] assign[=] constant[]
if compare[constant[uri] in name[kwargs]] begin[:]
variable[uri] assign[=] call[name[kwargs]][constant[uri]]
for taget[name[repository]] in starred[name[repos]] begin[:]
variable[source] assign[=] call[call[name[repos]][name[repository]]][constant[0]]
if compare[call[name[source]][constant[name]] equal[==] name[repo]] begin[:]
variable[found] assign[=] constant[True]
variable[repostr] assign[=] constant[]
if <ast.BoolOp object at 0x7da18bc704c0> begin[:]
<ast.AugAssign object at 0x7da18bc73fd0>
if compare[constant[compressed] in name[kwargs]] begin[:]
<ast.AugAssign object at 0x7da18bc73640>
variable[repo_alias] assign[=] <ast.IfExp object at 0x7da18bc711b0>
if compare[constant[ ] in name[repo_alias]] begin[:]
<ast.AugAssign object at 0x7da18bc70310>
<ast.AugAssign object at 0x7da18bc71150>
variable[trusted] assign[=] call[name[kwargs].get, parameter[constant[trusted]]]
variable[repostr] assign[=] <ast.IfExp object at 0x7da18bc739a0>
call[name[_mod_repo_in_file], parameter[name[repo], name[repostr], call[name[source]][constant[file]]]]
if <ast.UnaryOp object at 0x7da18bc73cd0> begin[:]
if compare[constant[uri] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
<ast.Raise object at 0x7da18bc727a0>
variable[properties] assign[=] dictionary[[<ast.Constant object at 0x7da18bc724a0>], [<ast.Subscript object at 0x7da18bc73490>]]
call[name[properties]][constant[compressed]] assign[=] <ast.IfExp object at 0x7da18bc72710>
call[name[properties]][constant[enabled]] assign[=] <ast.IfExp object at 0x7da18bc71930>
call[name[properties]][constant[trusted]] assign[=] call[name[kwargs].get, parameter[constant[trusted]]]
call[name[_add_new_repo], parameter[name[repo], name[properties]]]
if compare[constant[refresh] in name[kwargs]] begin[:]
call[name[refresh_db], parameter[]]
|
keyword[def] identifier[mod_repo] ( identifier[repo] ,** identifier[kwargs] ):
literal[string]
identifier[repos] = identifier[list_repos] ()
identifier[found] = keyword[False]
identifier[uri] = literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[uri] = identifier[kwargs] [ literal[string] ]
keyword[for] identifier[repository] keyword[in] identifier[repos] :
identifier[source] = identifier[repos] [ identifier[repository] ][ literal[int] ]
keyword[if] identifier[source] [ literal[string] ]== identifier[repo] :
identifier[found] = keyword[True]
identifier[repostr] = literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] keyword[not] identifier[kwargs] [ literal[string] ]:
identifier[repostr] += literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[repostr] += literal[string] keyword[if] identifier[kwargs] [ literal[string] ] keyword[else] literal[string]
keyword[else] :
identifier[repostr] += literal[string] keyword[if] identifier[source] [ literal[string] ] keyword[else] literal[string]
identifier[repo_alias] = identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[else] identifier[repo]
keyword[if] literal[string] keyword[in] identifier[repo_alias] :
identifier[repostr] += literal[string] . identifier[format] ( identifier[repo_alias] )
keyword[else] :
identifier[repostr] += literal[string] . identifier[format] ( identifier[repo_alias] )
identifier[repostr] += literal[string] . identifier[format] ( identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[else] identifier[source] [ literal[string] ])
identifier[trusted] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[repostr] = identifier[_set_trusted_option_if_needed] ( identifier[repostr] , identifier[trusted] ) keyword[if] identifier[trusted] keyword[is] keyword[not] keyword[None] keyword[else] identifier[_set_trusted_option_if_needed] ( identifier[repostr] , identifier[source] . identifier[get] ( literal[string] ))
identifier[_mod_repo_in_file] ( identifier[repo] , identifier[repostr] , identifier[source] [ literal[string] ])
keyword[elif] identifier[uri] keyword[and] identifier[source] [ literal[string] ]== identifier[uri] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[uri] , identifier[source] [ literal[string] ]))
keyword[if] keyword[not] identifier[found] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[repo] ))
identifier[properties] ={ literal[string] : identifier[kwargs] [ literal[string] ]}
identifier[properties] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[else] keyword[True]
identifier[properties] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[else] keyword[True]
identifier[properties] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] )
identifier[_add_new_repo] ( identifier[repo] , identifier[properties] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[refresh_db] ()
|
def mod_repo(repo, **kwargs):
"""
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as uri is defined.
The following options are available to modify a repo definition:
repo
alias by which opkg refers to the repo.
uri
the URI to the repo.
compressed
defines (True or False) if the index file is compressed
enabled
enable or disable (True or False) repository
but do not remove if disabled.
refresh
enable or disable (True or False) auto-refresh of the repositories
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo repo uri=http://new/uri
salt '*' pkg.mod_repo repo enabled=False
"""
repos = list_repos()
found = False
uri = ''
if 'uri' in kwargs:
uri = kwargs['uri'] # depends on [control=['if'], data=['kwargs']]
for repository in repos:
source = repos[repository][0]
if source['name'] == repo:
found = True
repostr = ''
if 'enabled' in kwargs and (not kwargs['enabled']):
repostr += '# ' # depends on [control=['if'], data=[]]
if 'compressed' in kwargs:
repostr += 'src/gz ' if kwargs['compressed'] else 'src' # depends on [control=['if'], data=['kwargs']]
else:
repostr += 'src/gz' if source['compressed'] else 'src'
repo_alias = kwargs['alias'] if 'alias' in kwargs else repo
if ' ' in repo_alias:
repostr += ' "{0}"'.format(repo_alias) # depends on [control=['if'], data=['repo_alias']]
else:
repostr += ' {0}'.format(repo_alias)
repostr += ' {0}'.format(kwargs['uri'] if 'uri' in kwargs else source['uri'])
trusted = kwargs.get('trusted')
repostr = _set_trusted_option_if_needed(repostr, trusted) if trusted is not None else _set_trusted_option_if_needed(repostr, source.get('trusted'))
_mod_repo_in_file(repo, repostr, source['file']) # depends on [control=['if'], data=['repo']]
elif uri and source['uri'] == uri:
raise CommandExecutionError("Repository '{0}' already exists as '{1}'.".format(uri, source['name'])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['repository']]
if not found:
# Need to add a new repo
if 'uri' not in kwargs:
raise CommandExecutionError("Repository '{0}' not found and no URI passed to create one.".format(repo)) # depends on [control=['if'], data=[]]
properties = {'uri': kwargs['uri']}
# If compressed is not defined, assume True
properties['compressed'] = kwargs['compressed'] if 'compressed' in kwargs else True
# If enabled is not defined, assume True
properties['enabled'] = kwargs['enabled'] if 'enabled' in kwargs else True
properties['trusted'] = kwargs.get('trusted')
_add_new_repo(repo, properties) # depends on [control=['if'], data=[]]
if 'refresh' in kwargs:
refresh_db() # depends on [control=['if'], data=[]]
|
def generate_run_info():
"""
获取当前运行状态
"""
uptime = datetime.datetime.now() - datetime.datetime.fromtimestamp(glb.run_info.create_time())
memory_usage = glb.run_info.memory_info().rss
msg = '[当前时间] {now:%H:%M:%S}\n[运行时间] {uptime}\n[内存占用] {memory}\n[发送消息] {messages}'.format(
now=datetime.datetime.now(),
uptime=str(uptime).split('.')[0],
memory='{:.2f} MB'.format(memory_usage / 1024 ** 2),
messages=len(glb.wxbot.bot.messages)
)
return msg
|
def function[generate_run_info, parameter[]]:
constant[
获取当前运行状态
]
variable[uptime] assign[=] binary_operation[call[name[datetime].datetime.now, parameter[]] - call[name[datetime].datetime.fromtimestamp, parameter[call[name[glb].run_info.create_time, parameter[]]]]]
variable[memory_usage] assign[=] call[name[glb].run_info.memory_info, parameter[]].rss
variable[msg] assign[=] call[constant[[当前时间] {now:%H:%M:%S}
[运行时间] {uptime}
[内存占用] {memory}
[发送消息] {messages}].format, parameter[]]
return[name[msg]]
|
keyword[def] identifier[generate_run_info] ():
literal[string]
identifier[uptime] = identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[glb] . identifier[run_info] . identifier[create_time] ())
identifier[memory_usage] = identifier[glb] . identifier[run_info] . identifier[memory_info] (). identifier[rss]
identifier[msg] = literal[string] . identifier[format] (
identifier[now] = identifier[datetime] . identifier[datetime] . identifier[now] (),
identifier[uptime] = identifier[str] ( identifier[uptime] ). identifier[split] ( literal[string] )[ literal[int] ],
identifier[memory] = literal[string] . identifier[format] ( identifier[memory_usage] / literal[int] ** literal[int] ),
identifier[messages] = identifier[len] ( identifier[glb] . identifier[wxbot] . identifier[bot] . identifier[messages] )
)
keyword[return] identifier[msg]
|
def generate_run_info():
"""
获取当前运行状态
"""
uptime = datetime.datetime.now() - datetime.datetime.fromtimestamp(glb.run_info.create_time())
memory_usage = glb.run_info.memory_info().rss
msg = '[当前时间] {now:%H:%M:%S}\n[运行时间] {uptime}\n[内存占用] {memory}\n[发送消息] {messages}'.format(now=datetime.datetime.now(), uptime=str(uptime).split('.')[0], memory='{:.2f} MB'.format(memory_usage / 1024 ** 2), messages=len(glb.wxbot.bot.messages))
return msg
|
def fetch_next(self):
"""A Future used with `gen.coroutine`_ to asynchronously retrieve the
next document in the result set, fetching a batch of documents from the
server if necessary. Resolves to ``False`` if there are no more
documents, otherwise :meth:`next_object` is guaranteed to return a
document.
.. _`gen.coroutine`: http://tornadoweb.org/en/stable/gen.html
.. doctest:: fetch_next
:hide:
>>> _ = MongoClient().test.test_collection.delete_many({})
>>> collection = MotorClient().test.test_collection
.. doctest:: fetch_next
>>> @gen.coroutine
... def f():
... yield collection.insert_many([{'_id': i} for i in range(5)])
... cursor = collection.find().sort([('_id', 1)])
... while (yield cursor.fetch_next):
... doc = cursor.next_object()
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
While it appears that fetch_next retrieves each document from
the server individually, the cursor actually fetches documents
efficiently in `large batches`_.
In Python 3.5 and newer, cursors can be iterated elegantly and very
efficiently in native coroutines with `async for`:
.. doctest:: fetch_next
>>> async def f():
... async for doc in collection.find():
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
.. _`large batches`: https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches
"""
if not self._buffer_size() and self.alive:
# Return the Future, which resolves to number of docs fetched or 0.
return self._get_more()
elif self._buffer_size():
future = self._framework.get_future(self.get_io_loop())
future.set_result(True)
return future
else:
# Dead
future = self._framework.get_future(self.get_io_loop())
future.set_result(False)
return future
|
def function[fetch_next, parameter[self]]:
constant[A Future used with `gen.coroutine`_ to asynchronously retrieve the
next document in the result set, fetching a batch of documents from the
server if necessary. Resolves to ``False`` if there are no more
documents, otherwise :meth:`next_object` is guaranteed to return a
document.
.. _`gen.coroutine`: http://tornadoweb.org/en/stable/gen.html
.. doctest:: fetch_next
:hide:
>>> _ = MongoClient().test.test_collection.delete_many({})
>>> collection = MotorClient().test.test_collection
.. doctest:: fetch_next
>>> @gen.coroutine
... def f():
... yield collection.insert_many([{'_id': i} for i in range(5)])
... cursor = collection.find().sort([('_id', 1)])
... while (yield cursor.fetch_next):
... doc = cursor.next_object()
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
While it appears that fetch_next retrieves each document from
the server individually, the cursor actually fetches documents
efficiently in `large batches`_.
In Python 3.5 and newer, cursors can be iterated elegantly and very
efficiently in native coroutines with `async for`:
.. doctest:: fetch_next
>>> async def f():
... async for doc in collection.find():
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
.. _`large batches`: https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches
]
if <ast.BoolOp object at 0x7da18f09ee30> begin[:]
return[call[name[self]._get_more, parameter[]]]
|
keyword[def] identifier[fetch_next] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_buffer_size] () keyword[and] identifier[self] . identifier[alive] :
keyword[return] identifier[self] . identifier[_get_more] ()
keyword[elif] identifier[self] . identifier[_buffer_size] ():
identifier[future] = identifier[self] . identifier[_framework] . identifier[get_future] ( identifier[self] . identifier[get_io_loop] ())
identifier[future] . identifier[set_result] ( keyword[True] )
keyword[return] identifier[future]
keyword[else] :
identifier[future] = identifier[self] . identifier[_framework] . identifier[get_future] ( identifier[self] . identifier[get_io_loop] ())
identifier[future] . identifier[set_result] ( keyword[False] )
keyword[return] identifier[future]
|
def fetch_next(self):
"""A Future used with `gen.coroutine`_ to asynchronously retrieve the
next document in the result set, fetching a batch of documents from the
server if necessary. Resolves to ``False`` if there are no more
documents, otherwise :meth:`next_object` is guaranteed to return a
document.
.. _`gen.coroutine`: http://tornadoweb.org/en/stable/gen.html
.. doctest:: fetch_next
:hide:
>>> _ = MongoClient().test.test_collection.delete_many({})
>>> collection = MotorClient().test.test_collection
.. doctest:: fetch_next
>>> @gen.coroutine
... def f():
... yield collection.insert_many([{'_id': i} for i in range(5)])
... cursor = collection.find().sort([('_id', 1)])
... while (yield cursor.fetch_next):
... doc = cursor.next_object()
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
While it appears that fetch_next retrieves each document from
the server individually, the cursor actually fetches documents
efficiently in `large batches`_.
In Python 3.5 and newer, cursors can be iterated elegantly and very
efficiently in native coroutines with `async for`:
.. doctest:: fetch_next
>>> async def f():
... async for doc in collection.find():
... sys.stdout.write(str(doc['_id']) + ', ')
... print('done')
...
>>> IOLoop.current().run_sync(f)
0, 1, 2, 3, 4, done
.. _`large batches`: https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches
"""
if not self._buffer_size() and self.alive:
# Return the Future, which resolves to number of docs fetched or 0.
return self._get_more() # depends on [control=['if'], data=[]]
elif self._buffer_size():
future = self._framework.get_future(self.get_io_loop())
future.set_result(True)
return future # depends on [control=['if'], data=[]]
else:
# Dead
future = self._framework.get_future(self.get_io_loop())
future.set_result(False)
return future
|
def get_bond_lengths(self, indices):
"""Return the distances between given atoms.
Calculates the distance between the atoms with
indices ``i`` and ``b``.
The indices can be given in three ways:
* As simple list ``[i, b]``
* As list of lists: ``[[i1, b1], [i2, b2]...]``
* As :class:`pd.DataFrame` where ``i`` is taken from the index and
``b`` from the respective column ``'b'``.
Args:
indices (list):
Returns:
:class:`numpy.ndarray`: Vector of angles in degrees.
"""
coords = ['x', 'y', 'z']
if isinstance(indices, pd.DataFrame):
i_pos = self.loc[indices.index, coords].values
b_pos = self.loc[indices.loc[:, 'b'], coords].values
else:
indices = np.array(indices)
if len(indices.shape) == 1:
indices = indices[None, :]
i_pos = self.loc[indices[:, 0], coords].values
b_pos = self.loc[indices[:, 1], coords].values
return np.linalg.norm(i_pos - b_pos, axis=1)
|
def function[get_bond_lengths, parameter[self, indices]]:
constant[Return the distances between given atoms.
Calculates the distance between the atoms with
indices ``i`` and ``b``.
The indices can be given in three ways:
* As simple list ``[i, b]``
* As list of lists: ``[[i1, b1], [i2, b2]...]``
* As :class:`pd.DataFrame` where ``i`` is taken from the index and
``b`` from the respective column ``'b'``.
Args:
indices (list):
Returns:
:class:`numpy.ndarray`: Vector of angles in degrees.
]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da18f00f040>, <ast.Constant object at 0x7da18f00d930>, <ast.Constant object at 0x7da18f00e920>]]
if call[name[isinstance], parameter[name[indices], name[pd].DataFrame]] begin[:]
variable[i_pos] assign[=] call[name[self].loc][tuple[[<ast.Attribute object at 0x7da18f00d6f0>, <ast.Name object at 0x7da18f00fa30>]]].values
variable[b_pos] assign[=] call[name[self].loc][tuple[[<ast.Subscript object at 0x7da18f00f3d0>, <ast.Name object at 0x7da18f00ded0>]]].values
return[call[name[np].linalg.norm, parameter[binary_operation[name[i_pos] - name[b_pos]]]]]
|
keyword[def] identifier[get_bond_lengths] ( identifier[self] , identifier[indices] ):
literal[string]
identifier[coords] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[isinstance] ( identifier[indices] , identifier[pd] . identifier[DataFrame] ):
identifier[i_pos] = identifier[self] . identifier[loc] [ identifier[indices] . identifier[index] , identifier[coords] ]. identifier[values]
identifier[b_pos] = identifier[self] . identifier[loc] [ identifier[indices] . identifier[loc] [:, literal[string] ], identifier[coords] ]. identifier[values]
keyword[else] :
identifier[indices] = identifier[np] . identifier[array] ( identifier[indices] )
keyword[if] identifier[len] ( identifier[indices] . identifier[shape] )== literal[int] :
identifier[indices] = identifier[indices] [ keyword[None] ,:]
identifier[i_pos] = identifier[self] . identifier[loc] [ identifier[indices] [:, literal[int] ], identifier[coords] ]. identifier[values]
identifier[b_pos] = identifier[self] . identifier[loc] [ identifier[indices] [:, literal[int] ], identifier[coords] ]. identifier[values]
keyword[return] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[i_pos] - identifier[b_pos] , identifier[axis] = literal[int] )
|
def get_bond_lengths(self, indices):
"""Return the distances between given atoms.
Calculates the distance between the atoms with
indices ``i`` and ``b``.
The indices can be given in three ways:
* As simple list ``[i, b]``
* As list of lists: ``[[i1, b1], [i2, b2]...]``
* As :class:`pd.DataFrame` where ``i`` is taken from the index and
``b`` from the respective column ``'b'``.
Args:
indices (list):
Returns:
:class:`numpy.ndarray`: Vector of angles in degrees.
"""
coords = ['x', 'y', 'z']
if isinstance(indices, pd.DataFrame):
i_pos = self.loc[indices.index, coords].values
b_pos = self.loc[indices.loc[:, 'b'], coords].values # depends on [control=['if'], data=[]]
else:
indices = np.array(indices)
if len(indices.shape) == 1:
indices = indices[None, :] # depends on [control=['if'], data=[]]
i_pos = self.loc[indices[:, 0], coords].values
b_pos = self.loc[indices[:, 1], coords].values
return np.linalg.norm(i_pos - b_pos, axis=1)
|
def jpegrescan(ext_args):
"""Run the EXTERNAL program jpegrescan."""
args = copy.copy(_JPEGRESCAN_ARGS)
if Settings.jpegrescan_multithread:
args += ['-t']
if Settings.destroy_metadata:
args += ['-s']
args += [ext_args.old_filename, ext_args.new_filename]
extern.run_ext(args)
return _JPEG_FORMAT
|
def function[jpegrescan, parameter[ext_args]]:
constant[Run the EXTERNAL program jpegrescan.]
variable[args] assign[=] call[name[copy].copy, parameter[name[_JPEGRESCAN_ARGS]]]
if name[Settings].jpegrescan_multithread begin[:]
<ast.AugAssign object at 0x7da18fe933a0>
if name[Settings].destroy_metadata begin[:]
<ast.AugAssign object at 0x7da18fe901c0>
<ast.AugAssign object at 0x7da18fe92860>
call[name[extern].run_ext, parameter[name[args]]]
return[name[_JPEG_FORMAT]]
|
keyword[def] identifier[jpegrescan] ( identifier[ext_args] ):
literal[string]
identifier[args] = identifier[copy] . identifier[copy] ( identifier[_JPEGRESCAN_ARGS] )
keyword[if] identifier[Settings] . identifier[jpegrescan_multithread] :
identifier[args] +=[ literal[string] ]
keyword[if] identifier[Settings] . identifier[destroy_metadata] :
identifier[args] +=[ literal[string] ]
identifier[args] +=[ identifier[ext_args] . identifier[old_filename] , identifier[ext_args] . identifier[new_filename] ]
identifier[extern] . identifier[run_ext] ( identifier[args] )
keyword[return] identifier[_JPEG_FORMAT]
|
def jpegrescan(ext_args):
"""Run the EXTERNAL program jpegrescan."""
args = copy.copy(_JPEGRESCAN_ARGS)
if Settings.jpegrescan_multithread:
args += ['-t'] # depends on [control=['if'], data=[]]
if Settings.destroy_metadata:
args += ['-s'] # depends on [control=['if'], data=[]]
args += [ext_args.old_filename, ext_args.new_filename]
extern.run_ext(args)
return _JPEG_FORMAT
|
def w_diffuser_inner(sed_inputs=sed_dict):
"""Return the inner width of each diffuser in the sedimentation tank.
Parameters
----------
sed_inputs : dict
A dictionary of all of the constant inputs needed for sedimentation tank
calculations can be found in sed.yaml
Returns
-------
float
Inner width of each diffuser in the sedimentation tank
Examples
--------
>>> from aide_design.play import*
>>>
"""
return ut.ceil_nearest(w_diffuser_inner_min(sed_inputs).magnitude,
(np.arange(1/16,1/4,1/16)*u.inch).magnitude)
|
def function[w_diffuser_inner, parameter[sed_inputs]]:
constant[Return the inner width of each diffuser in the sedimentation tank.
Parameters
----------
sed_inputs : dict
A dictionary of all of the constant inputs needed for sedimentation tank
calculations can be found in sed.yaml
Returns
-------
float
Inner width of each diffuser in the sedimentation tank
Examples
--------
>>> from aide_design.play import*
>>>
]
return[call[name[ut].ceil_nearest, parameter[call[name[w_diffuser_inner_min], parameter[name[sed_inputs]]].magnitude, binary_operation[call[name[np].arange, parameter[binary_operation[constant[1] / constant[16]], binary_operation[constant[1] / constant[4]], binary_operation[constant[1] / constant[16]]]] * name[u].inch].magnitude]]]
|
keyword[def] identifier[w_diffuser_inner] ( identifier[sed_inputs] = identifier[sed_dict] ):
literal[string]
keyword[return] identifier[ut] . identifier[ceil_nearest] ( identifier[w_diffuser_inner_min] ( identifier[sed_inputs] ). identifier[magnitude] ,
( identifier[np] . identifier[arange] ( literal[int] / literal[int] , literal[int] / literal[int] , literal[int] / literal[int] )* identifier[u] . identifier[inch] ). identifier[magnitude] )
|
def w_diffuser_inner(sed_inputs=sed_dict):
"""Return the inner width of each diffuser in the sedimentation tank.
Parameters
----------
sed_inputs : dict
A dictionary of all of the constant inputs needed for sedimentation tank
calculations can be found in sed.yaml
Returns
-------
float
Inner width of each diffuser in the sedimentation tank
Examples
--------
>>> from aide_design.play import*
>>>
"""
return ut.ceil_nearest(w_diffuser_inner_min(sed_inputs).magnitude, (np.arange(1 / 16, 1 / 4, 1 / 16) * u.inch).magnitude)
|
def coerce_value(cls, v):
"""Coerce a value to the right type for the collection, or return it if
it is already of the right type."""
if isinstance(v, cls.itemtype):
return v
else:
try:
return cls.coerceitem(v)
except Exception as e:
raise exc.CollectionItemCoerceError(
itemtype=cls.itemtype,
colltype=cls,
passed=v,
exc=e,
)
|
def function[coerce_value, parameter[cls, v]]:
constant[Coerce a value to the right type for the collection, or return it if
it is already of the right type.]
if call[name[isinstance], parameter[name[v], name[cls].itemtype]] begin[:]
return[name[v]]
|
keyword[def] identifier[coerce_value] ( identifier[cls] , identifier[v] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[v] , identifier[cls] . identifier[itemtype] ):
keyword[return] identifier[v]
keyword[else] :
keyword[try] :
keyword[return] identifier[cls] . identifier[coerceitem] ( identifier[v] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[exc] . identifier[CollectionItemCoerceError] (
identifier[itemtype] = identifier[cls] . identifier[itemtype] ,
identifier[colltype] = identifier[cls] ,
identifier[passed] = identifier[v] ,
identifier[exc] = identifier[e] ,
)
|
def coerce_value(cls, v):
"""Coerce a value to the right type for the collection, or return it if
it is already of the right type."""
if isinstance(v, cls.itemtype):
return v # depends on [control=['if'], data=[]]
else:
try:
return cls.coerceitem(v) # depends on [control=['try'], data=[]]
except Exception as e:
raise exc.CollectionItemCoerceError(itemtype=cls.itemtype, colltype=cls, passed=v, exc=e) # depends on [control=['except'], data=['e']]
|
def ConvCnstrMODMaskDcpl(*args, **kwargs):
"""A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
with Mask Decoupling problems, and returns an object instantiated
with the provided. parameters. The wrapper is designed to allow the
appropriate object to be created by calling this function using the
same syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'ism'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_IterSM`.
This method works well for a small number of training images, but is
very slow for larger training sets.
- ``'cg'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_CG`.
This method is slower than ``'ism'`` for small training sets, but has
better run time scaling as the training set grows.
- ``'cns'`` :
Use the implementation defined in
:class:`.ConvCnstrMODMaskDcpl_Consensus`. This method is the best choice
for large training sets.
The default value is ``'cns'``.
"""
# Extract method selection argument or set default
if 'method' in kwargs:
method = kwargs['method']
del kwargs['method']
else:
method = 'cns'
# Assign base class depending on method selection argument
if method == 'ism':
base = ConvCnstrMODMaskDcpl_IterSM
elif method == 'cg':
base = ConvCnstrMODMaskDcpl_CG
elif method == 'cns':
base = ConvCnstrMODMaskDcpl_Consensus
else:
raise ValueError('Unknown ConvCnstrMODMaskDcpl solver method %s'
% method)
# Nested class with dynamically determined inheritance
class ConvCnstrMODMaskDcpl(base):
def __init__(self, *args, **kwargs):
super(ConvCnstrMODMaskDcpl, self).__init__(*args, **kwargs)
# Allow pickling of objects of type ConvCnstrMODMaskDcpl
_fix_dynamic_class_lookup(ConvCnstrMODMaskDcpl, method)
# Return object of the nested class type
return ConvCnstrMODMaskDcpl(*args, **kwargs)
|
def function[ConvCnstrMODMaskDcpl, parameter[]]:
constant[A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
with Mask Decoupling problems, and returns an object instantiated
with the provided. parameters. The wrapper is designed to allow the
appropriate object to be created by calling this function using the
same syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'ism'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_IterSM`.
This method works well for a small number of training images, but is
very slow for larger training sets.
- ``'cg'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_CG`.
This method is slower than ``'ism'`` for small training sets, but has
better run time scaling as the training set grows.
- ``'cns'`` :
Use the implementation defined in
:class:`.ConvCnstrMODMaskDcpl_Consensus`. This method is the best choice
for large training sets.
The default value is ``'cns'``.
]
if compare[constant[method] in name[kwargs]] begin[:]
variable[method] assign[=] call[name[kwargs]][constant[method]]
<ast.Delete object at 0x7da1b0749300>
if compare[name[method] equal[==] constant[ism]] begin[:]
variable[base] assign[=] name[ConvCnstrMODMaskDcpl_IterSM]
class class[ConvCnstrMODMaskDcpl, parameter[]] begin[:]
def function[__init__, parameter[self]]:
call[call[name[super], parameter[name[ConvCnstrMODMaskDcpl], name[self]]].__init__, parameter[<ast.Starred object at 0x7da1b07f8130>]]
call[name[_fix_dynamic_class_lookup], parameter[name[ConvCnstrMODMaskDcpl], name[method]]]
return[call[name[ConvCnstrMODMaskDcpl], parameter[<ast.Starred object at 0x7da1b07f8610>]]]
|
keyword[def] identifier[ConvCnstrMODMaskDcpl] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[method] = identifier[kwargs] [ literal[string] ]
keyword[del] identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[method] = literal[string]
keyword[if] identifier[method] == literal[string] :
identifier[base] = identifier[ConvCnstrMODMaskDcpl_IterSM]
keyword[elif] identifier[method] == literal[string] :
identifier[base] = identifier[ConvCnstrMODMaskDcpl_CG]
keyword[elif] identifier[method] == literal[string] :
identifier[base] = identifier[ConvCnstrMODMaskDcpl_Consensus]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[method] )
keyword[class] identifier[ConvCnstrMODMaskDcpl] ( identifier[base] ):
keyword[def] identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[super] ( identifier[ConvCnstrMODMaskDcpl] , identifier[self] ). identifier[__init__] (* identifier[args] ,** identifier[kwargs] )
identifier[_fix_dynamic_class_lookup] ( identifier[ConvCnstrMODMaskDcpl] , identifier[method] )
keyword[return] identifier[ConvCnstrMODMaskDcpl] (* identifier[args] ,** identifier[kwargs] )
|
def ConvCnstrMODMaskDcpl(*args, **kwargs):
"""A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
with Mask Decoupling problems, and returns an object instantiated
with the provided. parameters. The wrapper is designed to allow the
appropriate object to be created by calling this function using the
same syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'ism'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_IterSM`.
This method works well for a small number of training images, but is
very slow for larger training sets.
- ``'cg'`` :
Use the implementation defined in :class:`.ConvCnstrMODMaskDcpl_CG`.
This method is slower than ``'ism'`` for small training sets, but has
better run time scaling as the training set grows.
- ``'cns'`` :
Use the implementation defined in
:class:`.ConvCnstrMODMaskDcpl_Consensus`. This method is the best choice
for large training sets.
The default value is ``'cns'``.
"""
# Extract method selection argument or set default
if 'method' in kwargs:
method = kwargs['method']
del kwargs['method'] # depends on [control=['if'], data=['kwargs']]
else:
method = 'cns'
# Assign base class depending on method selection argument
if method == 'ism':
base = ConvCnstrMODMaskDcpl_IterSM # depends on [control=['if'], data=[]]
elif method == 'cg':
base = ConvCnstrMODMaskDcpl_CG # depends on [control=['if'], data=[]]
elif method == 'cns':
base = ConvCnstrMODMaskDcpl_Consensus # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown ConvCnstrMODMaskDcpl solver method %s' % method)
# Nested class with dynamically determined inheritance
class ConvCnstrMODMaskDcpl(base):
def __init__(self, *args, **kwargs):
super(ConvCnstrMODMaskDcpl, self).__init__(*args, **kwargs)
# Allow pickling of objects of type ConvCnstrMODMaskDcpl
_fix_dynamic_class_lookup(ConvCnstrMODMaskDcpl, method)
# Return object of the nested class type
return ConvCnstrMODMaskDcpl(*args, **kwargs)
|
def ntp_server_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ntp = ET.SubElement(config, "ntp", xmlns="urn:brocade.com:mgmt:brocade-ntp")
server = ET.SubElement(ntp, "server")
use_vrf_key = ET.SubElement(server, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
ip = ET.SubElement(server, "ip")
ip.text = kwargs.pop('ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[ntp_server_ip, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ntp] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ntp]]]
variable[server] assign[=] call[name[ET].SubElement, parameter[name[ntp], constant[server]]]
variable[use_vrf_key] assign[=] call[name[ET].SubElement, parameter[name[server], constant[use-vrf]]]
name[use_vrf_key].text assign[=] call[name[kwargs].pop, parameter[constant[use_vrf]]]
variable[ip] assign[=] call[name[ET].SubElement, parameter[name[server], constant[ip]]]
name[ip].text assign[=] call[name[kwargs].pop, parameter[constant[ip]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[ntp_server_ip] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ntp] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[server] = identifier[ET] . identifier[SubElement] ( identifier[ntp] , literal[string] )
identifier[use_vrf_key] = identifier[ET] . identifier[SubElement] ( identifier[server] , literal[string] )
identifier[use_vrf_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[server] , literal[string] )
identifier[ip] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def ntp_server_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ntp = ET.SubElement(config, 'ntp', xmlns='urn:brocade.com:mgmt:brocade-ntp')
server = ET.SubElement(ntp, 'server')
use_vrf_key = ET.SubElement(server, 'use-vrf')
use_vrf_key.text = kwargs.pop('use_vrf')
ip = ET.SubElement(server, 'ip')
ip.text = kwargs.pop('ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
async def _do_ping(self):
""" Ping the agents """
# the list() call here is needed, as we remove entries from _registered_agents!
for agent_addr, friendly_name in list(self._registered_agents.items()):
try:
ping_count = self._ping_count.get(agent_addr, 0)
if ping_count > 5:
self._logger.warning("Agent %s (%s) does not respond: removing from list.", agent_addr, friendly_name)
delete_agent = True
else:
self._ping_count[agent_addr] = ping_count + 1
await ZMQUtils.send_with_addr(self._agent_socket, agent_addr, Ping())
delete_agent = False
except:
# This should not happen, but it's better to check anyway.
self._logger.exception("Failed to send ping to agent %s (%s). Removing it from list.", agent_addr, friendly_name)
delete_agent = True
if delete_agent:
try:
await self._delete_agent(agent_addr)
except:
self._logger.exception("Failed to delete agent %s (%s)!", agent_addr, friendly_name)
self._loop.call_later(1, self._create_safe_task, self._do_ping())
|
<ast.AsyncFunctionDef object at 0x7da207f98bb0>
|
keyword[async] keyword[def] identifier[_do_ping] ( identifier[self] ):
literal[string]
keyword[for] identifier[agent_addr] , identifier[friendly_name] keyword[in] identifier[list] ( identifier[self] . identifier[_registered_agents] . identifier[items] ()):
keyword[try] :
identifier[ping_count] = identifier[self] . identifier[_ping_count] . identifier[get] ( identifier[agent_addr] , literal[int] )
keyword[if] identifier[ping_count] > literal[int] :
identifier[self] . identifier[_logger] . identifier[warning] ( literal[string] , identifier[agent_addr] , identifier[friendly_name] )
identifier[delete_agent] = keyword[True]
keyword[else] :
identifier[self] . identifier[_ping_count] [ identifier[agent_addr] ]= identifier[ping_count] + literal[int]
keyword[await] identifier[ZMQUtils] . identifier[send_with_addr] ( identifier[self] . identifier[_agent_socket] , identifier[agent_addr] , identifier[Ping] ())
identifier[delete_agent] = keyword[False]
keyword[except] :
identifier[self] . identifier[_logger] . identifier[exception] ( literal[string] , identifier[agent_addr] , identifier[friendly_name] )
identifier[delete_agent] = keyword[True]
keyword[if] identifier[delete_agent] :
keyword[try] :
keyword[await] identifier[self] . identifier[_delete_agent] ( identifier[agent_addr] )
keyword[except] :
identifier[self] . identifier[_logger] . identifier[exception] ( literal[string] , identifier[agent_addr] , identifier[friendly_name] )
identifier[self] . identifier[_loop] . identifier[call_later] ( literal[int] , identifier[self] . identifier[_create_safe_task] , identifier[self] . identifier[_do_ping] ())
|
async def _do_ping(self):
""" Ping the agents """
# the list() call here is needed, as we remove entries from _registered_agents!
for (agent_addr, friendly_name) in list(self._registered_agents.items()):
try:
ping_count = self._ping_count.get(agent_addr, 0)
if ping_count > 5:
self._logger.warning('Agent %s (%s) does not respond: removing from list.', agent_addr, friendly_name)
delete_agent = True # depends on [control=['if'], data=[]]
else:
self._ping_count[agent_addr] = ping_count + 1
await ZMQUtils.send_with_addr(self._agent_socket, agent_addr, Ping())
delete_agent = False # depends on [control=['try'], data=[]]
except:
# This should not happen, but it's better to check anyway.
self._logger.exception('Failed to send ping to agent %s (%s). Removing it from list.', agent_addr, friendly_name)
delete_agent = True # depends on [control=['except'], data=[]]
if delete_agent:
try:
await self._delete_agent(agent_addr) # depends on [control=['try'], data=[]]
except:
self._logger.exception('Failed to delete agent %s (%s)!', agent_addr, friendly_name) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self._loop.call_later(1, self._create_safe_task, self._do_ping())
|
def pad_trajectories(trajectories, boundary=20):
"""Pad trajectories to a bucket length that is a multiple of boundary.
Args:
trajectories: list[(observation, actions, rewards)], where each observation
is shaped (t+1,) + OBS and actions & rewards are shaped (t,), with the
length of the list being B (batch size).
boundary: int, bucket length, the actions and rewards are padded to integer
multiples of boundary.
Returns:
tuple: (padding lengths, reward_mask, padded_observations, padded_actions,
padded_rewards) where padded_observations is shaped (B, T+1) + OBS and
padded_actions, padded_rewards & reward_mask are shaped (B, T).
Where T is max(t) rounded up to an integer multiple of boundary.
padded_length is how much padding we've added and
reward_mask is 1s for actual rewards and 0s for the padding.
"""
# Let's compute max(t) over all trajectories.
t_max = max(r.shape[0] for (_, _, r) in trajectories)
# t_max is rounded to the next multiple of `boundary`
boundary = int(boundary)
bucket_length = boundary * int(np.ceil(float(t_max) / boundary))
# So all obs will be padded to t_max + 1 and actions and rewards to t_max.
padded_observations = []
padded_actions = []
padded_rewards = []
padded_lengths = []
reward_masks = []
for (o, a, r) in trajectories:
# Determine the amount to pad, this holds true for obs, actions and rewards.
num_to_pad = bucket_length + 1 - o.shape[0]
padded_lengths.append(num_to_pad)
if num_to_pad == 0:
padded_observations.append(o)
padded_actions.append(a)
padded_rewards.append(r)
reward_masks.append(onp.ones_like(r, dtype=np.int32))
continue
# First pad observations.
padding_config = [(0, num_to_pad, 0)]
for _ in range(o.ndim - 1):
padding_config.append((0, 0, 0))
padding_config = tuple(padding_config)
padding_value = get_padding_value(o.dtype)
action_padding_value = get_padding_value(a.dtype)
reward_padding_value = get_padding_value(r.dtype)
padded_obs = lax.pad(o, padding_value, padding_config)
padded_observations.append(padded_obs)
# Now pad actions and rewards.
assert a.ndim == 1 and r.ndim == 1
padding_config = ((0, num_to_pad, 0),)
padded_action = lax.pad(a, action_padding_value, padding_config)
padded_actions.append(padded_action)
padded_reward = lax.pad(r, reward_padding_value, padding_config)
padded_rewards.append(padded_reward)
# Also create the mask to use later.
reward_mask = onp.ones_like(r, dtype=np.int32)
reward_masks.append(lax.pad(reward_mask, 0, padding_config))
return padded_lengths, np.stack(reward_masks), np.stack(
padded_observations), np.stack(padded_actions), np.stack(padded_rewards)
|
def function[pad_trajectories, parameter[trajectories, boundary]]:
constant[Pad trajectories to a bucket length that is a multiple of boundary.
Args:
trajectories: list[(observation, actions, rewards)], where each observation
is shaped (t+1,) + OBS and actions & rewards are shaped (t,), with the
length of the list being B (batch size).
boundary: int, bucket length, the actions and rewards are padded to integer
multiples of boundary.
Returns:
tuple: (padding lengths, reward_mask, padded_observations, padded_actions,
padded_rewards) where padded_observations is shaped (B, T+1) + OBS and
padded_actions, padded_rewards & reward_mask are shaped (B, T).
Where T is max(t) rounded up to an integer multiple of boundary.
padded_length is how much padding we've added and
reward_mask is 1s for actual rewards and 0s for the padding.
]
variable[t_max] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b2061bd0>]]
variable[boundary] assign[=] call[name[int], parameter[name[boundary]]]
variable[bucket_length] assign[=] binary_operation[name[boundary] * call[name[int], parameter[call[name[np].ceil, parameter[binary_operation[call[name[float], parameter[name[t_max]]] / name[boundary]]]]]]]
variable[padded_observations] assign[=] list[[]]
variable[padded_actions] assign[=] list[[]]
variable[padded_rewards] assign[=] list[[]]
variable[padded_lengths] assign[=] list[[]]
variable[reward_masks] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2062590>, <ast.Name object at 0x7da1b2061840>, <ast.Name object at 0x7da1b2062c20>]]] in starred[name[trajectories]] begin[:]
variable[num_to_pad] assign[=] binary_operation[binary_operation[name[bucket_length] + constant[1]] - call[name[o].shape][constant[0]]]
call[name[padded_lengths].append, parameter[name[num_to_pad]]]
if compare[name[num_to_pad] equal[==] constant[0]] begin[:]
call[name[padded_observations].append, parameter[name[o]]]
call[name[padded_actions].append, parameter[name[a]]]
call[name[padded_rewards].append, parameter[name[r]]]
call[name[reward_masks].append, parameter[call[name[onp].ones_like, parameter[name[r]]]]]
continue
variable[padding_config] assign[=] list[[<ast.Tuple object at 0x7da1b2061ea0>]]
for taget[name[_]] in starred[call[name[range], parameter[binary_operation[name[o].ndim - constant[1]]]]] begin[:]
call[name[padding_config].append, parameter[tuple[[<ast.Constant object at 0x7da1b2062170>, <ast.Constant object at 0x7da1b20626e0>, <ast.Constant object at 0x7da1b2063f70>]]]]
variable[padding_config] assign[=] call[name[tuple], parameter[name[padding_config]]]
variable[padding_value] assign[=] call[name[get_padding_value], parameter[name[o].dtype]]
variable[action_padding_value] assign[=] call[name[get_padding_value], parameter[name[a].dtype]]
variable[reward_padding_value] assign[=] call[name[get_padding_value], parameter[name[r].dtype]]
variable[padded_obs] assign[=] call[name[lax].pad, parameter[name[o], name[padding_value], name[padding_config]]]
call[name[padded_observations].append, parameter[name[padded_obs]]]
assert[<ast.BoolOp object at 0x7da1b2063d60>]
variable[padding_config] assign[=] tuple[[<ast.Tuple object at 0x7da1b1e10490>]]
variable[padded_action] assign[=] call[name[lax].pad, parameter[name[a], name[action_padding_value], name[padding_config]]]
call[name[padded_actions].append, parameter[name[padded_action]]]
variable[padded_reward] assign[=] call[name[lax].pad, parameter[name[r], name[reward_padding_value], name[padding_config]]]
call[name[padded_rewards].append, parameter[name[padded_reward]]]
variable[reward_mask] assign[=] call[name[onp].ones_like, parameter[name[r]]]
call[name[reward_masks].append, parameter[call[name[lax].pad, parameter[name[reward_mask], constant[0], name[padding_config]]]]]
return[tuple[[<ast.Name object at 0x7da1b1e11600>, <ast.Call object at 0x7da1b1e12da0>, <ast.Call object at 0x7da1b1e11420>, <ast.Call object at 0x7da1b1e102b0>, <ast.Call object at 0x7da1b1e125f0>]]]
|
keyword[def] identifier[pad_trajectories] ( identifier[trajectories] , identifier[boundary] = literal[int] ):
literal[string]
identifier[t_max] = identifier[max] ( identifier[r] . identifier[shape] [ literal[int] ] keyword[for] ( identifier[_] , identifier[_] , identifier[r] ) keyword[in] identifier[trajectories] )
identifier[boundary] = identifier[int] ( identifier[boundary] )
identifier[bucket_length] = identifier[boundary] * identifier[int] ( identifier[np] . identifier[ceil] ( identifier[float] ( identifier[t_max] )/ identifier[boundary] ))
identifier[padded_observations] =[]
identifier[padded_actions] =[]
identifier[padded_rewards] =[]
identifier[padded_lengths] =[]
identifier[reward_masks] =[]
keyword[for] ( identifier[o] , identifier[a] , identifier[r] ) keyword[in] identifier[trajectories] :
identifier[num_to_pad] = identifier[bucket_length] + literal[int] - identifier[o] . identifier[shape] [ literal[int] ]
identifier[padded_lengths] . identifier[append] ( identifier[num_to_pad] )
keyword[if] identifier[num_to_pad] == literal[int] :
identifier[padded_observations] . identifier[append] ( identifier[o] )
identifier[padded_actions] . identifier[append] ( identifier[a] )
identifier[padded_rewards] . identifier[append] ( identifier[r] )
identifier[reward_masks] . identifier[append] ( identifier[onp] . identifier[ones_like] ( identifier[r] , identifier[dtype] = identifier[np] . identifier[int32] ))
keyword[continue]
identifier[padding_config] =[( literal[int] , identifier[num_to_pad] , literal[int] )]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[o] . identifier[ndim] - literal[int] ):
identifier[padding_config] . identifier[append] (( literal[int] , literal[int] , literal[int] ))
identifier[padding_config] = identifier[tuple] ( identifier[padding_config] )
identifier[padding_value] = identifier[get_padding_value] ( identifier[o] . identifier[dtype] )
identifier[action_padding_value] = identifier[get_padding_value] ( identifier[a] . identifier[dtype] )
identifier[reward_padding_value] = identifier[get_padding_value] ( identifier[r] . identifier[dtype] )
identifier[padded_obs] = identifier[lax] . identifier[pad] ( identifier[o] , identifier[padding_value] , identifier[padding_config] )
identifier[padded_observations] . identifier[append] ( identifier[padded_obs] )
keyword[assert] identifier[a] . identifier[ndim] == literal[int] keyword[and] identifier[r] . identifier[ndim] == literal[int]
identifier[padding_config] =(( literal[int] , identifier[num_to_pad] , literal[int] ),)
identifier[padded_action] = identifier[lax] . identifier[pad] ( identifier[a] , identifier[action_padding_value] , identifier[padding_config] )
identifier[padded_actions] . identifier[append] ( identifier[padded_action] )
identifier[padded_reward] = identifier[lax] . identifier[pad] ( identifier[r] , identifier[reward_padding_value] , identifier[padding_config] )
identifier[padded_rewards] . identifier[append] ( identifier[padded_reward] )
identifier[reward_mask] = identifier[onp] . identifier[ones_like] ( identifier[r] , identifier[dtype] = identifier[np] . identifier[int32] )
identifier[reward_masks] . identifier[append] ( identifier[lax] . identifier[pad] ( identifier[reward_mask] , literal[int] , identifier[padding_config] ))
keyword[return] identifier[padded_lengths] , identifier[np] . identifier[stack] ( identifier[reward_masks] ), identifier[np] . identifier[stack] (
identifier[padded_observations] ), identifier[np] . identifier[stack] ( identifier[padded_actions] ), identifier[np] . identifier[stack] ( identifier[padded_rewards] )
|
def pad_trajectories(trajectories, boundary=20):
"""Pad trajectories to a bucket length that is a multiple of boundary.
Args:
trajectories: list[(observation, actions, rewards)], where each observation
is shaped (t+1,) + OBS and actions & rewards are shaped (t,), with the
length of the list being B (batch size).
boundary: int, bucket length, the actions and rewards are padded to integer
multiples of boundary.
Returns:
tuple: (padding lengths, reward_mask, padded_observations, padded_actions,
padded_rewards) where padded_observations is shaped (B, T+1) + OBS and
padded_actions, padded_rewards & reward_mask are shaped (B, T).
Where T is max(t) rounded up to an integer multiple of boundary.
padded_length is how much padding we've added and
reward_mask is 1s for actual rewards and 0s for the padding.
"""
# Let's compute max(t) over all trajectories.
t_max = max((r.shape[0] for (_, _, r) in trajectories))
# t_max is rounded to the next multiple of `boundary`
boundary = int(boundary)
bucket_length = boundary * int(np.ceil(float(t_max) / boundary))
# So all obs will be padded to t_max + 1 and actions and rewards to t_max.
padded_observations = []
padded_actions = []
padded_rewards = []
padded_lengths = []
reward_masks = []
for (o, a, r) in trajectories:
# Determine the amount to pad, this holds true for obs, actions and rewards.
num_to_pad = bucket_length + 1 - o.shape[0]
padded_lengths.append(num_to_pad)
if num_to_pad == 0:
padded_observations.append(o)
padded_actions.append(a)
padded_rewards.append(r)
reward_masks.append(onp.ones_like(r, dtype=np.int32))
continue # depends on [control=['if'], data=[]]
# First pad observations.
padding_config = [(0, num_to_pad, 0)]
for _ in range(o.ndim - 1):
padding_config.append((0, 0, 0)) # depends on [control=['for'], data=[]]
padding_config = tuple(padding_config)
padding_value = get_padding_value(o.dtype)
action_padding_value = get_padding_value(a.dtype)
reward_padding_value = get_padding_value(r.dtype)
padded_obs = lax.pad(o, padding_value, padding_config)
padded_observations.append(padded_obs)
# Now pad actions and rewards.
assert a.ndim == 1 and r.ndim == 1
padding_config = ((0, num_to_pad, 0),)
padded_action = lax.pad(a, action_padding_value, padding_config)
padded_actions.append(padded_action)
padded_reward = lax.pad(r, reward_padding_value, padding_config)
padded_rewards.append(padded_reward)
# Also create the mask to use later.
reward_mask = onp.ones_like(r, dtype=np.int32)
reward_masks.append(lax.pad(reward_mask, 0, padding_config)) # depends on [control=['for'], data=[]]
return (padded_lengths, np.stack(reward_masks), np.stack(padded_observations), np.stack(padded_actions), np.stack(padded_rewards))
|
def retrieve_xml(pdb_id, silent = True):
'''The RCSB website now compresses XML files.'''
xml_gz = retrieve_file_from_RCSB(get_rcsb_files_connection(), "/download/%s.xml.gz" % pdb_id, silent = silent)
cf = StringIO.StringIO()
cf.write(xml_gz)
cf.seek(0)
df = gzip.GzipFile(fileobj = cf, mode='rb')
contents = df.read()
df.close()
return contents
|
def function[retrieve_xml, parameter[pdb_id, silent]]:
constant[The RCSB website now compresses XML files.]
variable[xml_gz] assign[=] call[name[retrieve_file_from_RCSB], parameter[call[name[get_rcsb_files_connection], parameter[]], binary_operation[constant[/download/%s.xml.gz] <ast.Mod object at 0x7da2590d6920> name[pdb_id]]]]
variable[cf] assign[=] call[name[StringIO].StringIO, parameter[]]
call[name[cf].write, parameter[name[xml_gz]]]
call[name[cf].seek, parameter[constant[0]]]
variable[df] assign[=] call[name[gzip].GzipFile, parameter[]]
variable[contents] assign[=] call[name[df].read, parameter[]]
call[name[df].close, parameter[]]
return[name[contents]]
|
keyword[def] identifier[retrieve_xml] ( identifier[pdb_id] , identifier[silent] = keyword[True] ):
literal[string]
identifier[xml_gz] = identifier[retrieve_file_from_RCSB] ( identifier[get_rcsb_files_connection] (), literal[string] % identifier[pdb_id] , identifier[silent] = identifier[silent] )
identifier[cf] = identifier[StringIO] . identifier[StringIO] ()
identifier[cf] . identifier[write] ( identifier[xml_gz] )
identifier[cf] . identifier[seek] ( literal[int] )
identifier[df] = identifier[gzip] . identifier[GzipFile] ( identifier[fileobj] = identifier[cf] , identifier[mode] = literal[string] )
identifier[contents] = identifier[df] . identifier[read] ()
identifier[df] . identifier[close] ()
keyword[return] identifier[contents]
|
def retrieve_xml(pdb_id, silent=True):
"""The RCSB website now compresses XML files."""
xml_gz = retrieve_file_from_RCSB(get_rcsb_files_connection(), '/download/%s.xml.gz' % pdb_id, silent=silent)
cf = StringIO.StringIO()
cf.write(xml_gz)
cf.seek(0)
df = gzip.GzipFile(fileobj=cf, mode='rb')
contents = df.read()
df.close()
return contents
|
def delete(self, container_id=None, sudo=None):
'''delete an instance based on container_id.
Parameters
==========
container_id: the container_id to delete
sudo: whether to issue the command with sudo (or not)
a container started with sudo will belong to the root user
If started by a user, the user needs to control deleting it
if the user doesn't set to True/False, we use client self.sudo
Returns
=======
return_code: the return code from the delete command. 0 indicates a
successful delete, 255 indicates not.
'''
sudo = self._get_sudo(sudo)
container_id = self.get_container_id(container_id)
# singularity oci delete
cmd = self._init_command('delete')
# Add the container_id
cmd.append(container_id)
# Delete the container, return code goes to user (message to screen)
return self._run_and_return(cmd, sudo=sudo)
|
def function[delete, parameter[self, container_id, sudo]]:
constant[delete an instance based on container_id.
Parameters
==========
container_id: the container_id to delete
sudo: whether to issue the command with sudo (or not)
a container started with sudo will belong to the root user
If started by a user, the user needs to control deleting it
if the user doesn't set to True/False, we use client self.sudo
Returns
=======
return_code: the return code from the delete command. 0 indicates a
successful delete, 255 indicates not.
]
variable[sudo] assign[=] call[name[self]._get_sudo, parameter[name[sudo]]]
variable[container_id] assign[=] call[name[self].get_container_id, parameter[name[container_id]]]
variable[cmd] assign[=] call[name[self]._init_command, parameter[constant[delete]]]
call[name[cmd].append, parameter[name[container_id]]]
return[call[name[self]._run_and_return, parameter[name[cmd]]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[container_id] = keyword[None] , identifier[sudo] = keyword[None] ):
literal[string]
identifier[sudo] = identifier[self] . identifier[_get_sudo] ( identifier[sudo] )
identifier[container_id] = identifier[self] . identifier[get_container_id] ( identifier[container_id] )
identifier[cmd] = identifier[self] . identifier[_init_command] ( literal[string] )
identifier[cmd] . identifier[append] ( identifier[container_id] )
keyword[return] identifier[self] . identifier[_run_and_return] ( identifier[cmd] , identifier[sudo] = identifier[sudo] )
|
def delete(self, container_id=None, sudo=None):
"""delete an instance based on container_id.
Parameters
==========
container_id: the container_id to delete
sudo: whether to issue the command with sudo (or not)
a container started with sudo will belong to the root user
If started by a user, the user needs to control deleting it
if the user doesn't set to True/False, we use client self.sudo
Returns
=======
return_code: the return code from the delete command. 0 indicates a
successful delete, 255 indicates not.
"""
sudo = self._get_sudo(sudo)
container_id = self.get_container_id(container_id)
# singularity oci delete
cmd = self._init_command('delete')
# Add the container_id
cmd.append(container_id)
# Delete the container, return code goes to user (message to screen)
return self._run_and_return(cmd, sudo=sudo)
|
def write_frame(self, buf):
'''
Write the frame into an existing buffer.
'''
writer = Writer(buf)
writer.write_octet(self.type())
writer.write_short(self.channel_id)
# Track the position where we're going to write the total length
# of the frame arguments.
stream_args_len_pos = len(buf)
writer.write_long(0)
stream_method_pos = len(buf)
writer.write_short(self._class_id)
writer.write_short(self._weight)
writer.write_longlong(self._size)
# Like frame parsing, branch to faster code for default properties
if self.DEFAULT_PROPERTIES:
# Track the position where we're going to write the flags.
flags_pos = len(buf)
writer.write_short(0)
flag_bits = 0
for key, proptype, rfunc, wfunc, mask in self.PROPERTIES:
val = self._properties.get(key, None)
if val is not None:
flag_bits |= mask
wfunc(writer, val)
writer.write_short_at(flag_bits, flags_pos)
else:
shift = 15
flag_bits = 0
flags = []
stack = deque()
for key, proptype, rfunc, wfunc, mask in self.PROPERTIES:
val = self._properties.get(key, None)
if val is not None:
if shift == 0:
flags.append(flag_bits)
flag_bits = 0
shift = 15
flag_bits |= (1 << shift)
stack.append((wfunc, val))
shift -= 1
flags.append(flag_bits)
for flag_bits in flags:
writer.write_short(flag_bits)
for method, val in stack:
method(writer, val)
# Write the total length back at the beginning of the frame
stream_len = len(buf) - stream_method_pos
writer.write_long_at(stream_len, stream_args_len_pos)
writer.write_octet(0xce)
|
def function[write_frame, parameter[self, buf]]:
constant[
Write the frame into an existing buffer.
]
variable[writer] assign[=] call[name[Writer], parameter[name[buf]]]
call[name[writer].write_octet, parameter[call[name[self].type, parameter[]]]]
call[name[writer].write_short, parameter[name[self].channel_id]]
variable[stream_args_len_pos] assign[=] call[name[len], parameter[name[buf]]]
call[name[writer].write_long, parameter[constant[0]]]
variable[stream_method_pos] assign[=] call[name[len], parameter[name[buf]]]
call[name[writer].write_short, parameter[name[self]._class_id]]
call[name[writer].write_short, parameter[name[self]._weight]]
call[name[writer].write_longlong, parameter[name[self]._size]]
if name[self].DEFAULT_PROPERTIES begin[:]
variable[flags_pos] assign[=] call[name[len], parameter[name[buf]]]
call[name[writer].write_short, parameter[constant[0]]]
variable[flag_bits] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18ede6560>, <ast.Name object at 0x7da18ede7820>, <ast.Name object at 0x7da18ede7b20>, <ast.Name object at 0x7da18ede6ef0>, <ast.Name object at 0x7da18ede7c70>]]] in starred[name[self].PROPERTIES] begin[:]
variable[val] assign[=] call[name[self]._properties.get, parameter[name[key], constant[None]]]
if compare[name[val] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18ede7490>
call[name[wfunc], parameter[name[writer], name[val]]]
call[name[writer].write_short_at, parameter[name[flag_bits], name[flags_pos]]]
variable[stream_len] assign[=] binary_operation[call[name[len], parameter[name[buf]]] - name[stream_method_pos]]
call[name[writer].write_long_at, parameter[name[stream_len], name[stream_args_len_pos]]]
call[name[writer].write_octet, parameter[constant[206]]]
|
keyword[def] identifier[write_frame] ( identifier[self] , identifier[buf] ):
literal[string]
identifier[writer] = identifier[Writer] ( identifier[buf] )
identifier[writer] . identifier[write_octet] ( identifier[self] . identifier[type] ())
identifier[writer] . identifier[write_short] ( identifier[self] . identifier[channel_id] )
identifier[stream_args_len_pos] = identifier[len] ( identifier[buf] )
identifier[writer] . identifier[write_long] ( literal[int] )
identifier[stream_method_pos] = identifier[len] ( identifier[buf] )
identifier[writer] . identifier[write_short] ( identifier[self] . identifier[_class_id] )
identifier[writer] . identifier[write_short] ( identifier[self] . identifier[_weight] )
identifier[writer] . identifier[write_longlong] ( identifier[self] . identifier[_size] )
keyword[if] identifier[self] . identifier[DEFAULT_PROPERTIES] :
identifier[flags_pos] = identifier[len] ( identifier[buf] )
identifier[writer] . identifier[write_short] ( literal[int] )
identifier[flag_bits] = literal[int]
keyword[for] identifier[key] , identifier[proptype] , identifier[rfunc] , identifier[wfunc] , identifier[mask] keyword[in] identifier[self] . identifier[PROPERTIES] :
identifier[val] = identifier[self] . identifier[_properties] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
identifier[flag_bits] |= identifier[mask]
identifier[wfunc] ( identifier[writer] , identifier[val] )
identifier[writer] . identifier[write_short_at] ( identifier[flag_bits] , identifier[flags_pos] )
keyword[else] :
identifier[shift] = literal[int]
identifier[flag_bits] = literal[int]
identifier[flags] =[]
identifier[stack] = identifier[deque] ()
keyword[for] identifier[key] , identifier[proptype] , identifier[rfunc] , identifier[wfunc] , identifier[mask] keyword[in] identifier[self] . identifier[PROPERTIES] :
identifier[val] = identifier[self] . identifier[_properties] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[shift] == literal[int] :
identifier[flags] . identifier[append] ( identifier[flag_bits] )
identifier[flag_bits] = literal[int]
identifier[shift] = literal[int]
identifier[flag_bits] |=( literal[int] << identifier[shift] )
identifier[stack] . identifier[append] (( identifier[wfunc] , identifier[val] ))
identifier[shift] -= literal[int]
identifier[flags] . identifier[append] ( identifier[flag_bits] )
keyword[for] identifier[flag_bits] keyword[in] identifier[flags] :
identifier[writer] . identifier[write_short] ( identifier[flag_bits] )
keyword[for] identifier[method] , identifier[val] keyword[in] identifier[stack] :
identifier[method] ( identifier[writer] , identifier[val] )
identifier[stream_len] = identifier[len] ( identifier[buf] )- identifier[stream_method_pos]
identifier[writer] . identifier[write_long_at] ( identifier[stream_len] , identifier[stream_args_len_pos] )
identifier[writer] . identifier[write_octet] ( literal[int] )
|
def write_frame(self, buf):
"""
Write the frame into an existing buffer.
"""
writer = Writer(buf)
writer.write_octet(self.type())
writer.write_short(self.channel_id)
# Track the position where we're going to write the total length
# of the frame arguments.
stream_args_len_pos = len(buf)
writer.write_long(0)
stream_method_pos = len(buf)
writer.write_short(self._class_id)
writer.write_short(self._weight)
writer.write_longlong(self._size)
# Like frame parsing, branch to faster code for default properties
if self.DEFAULT_PROPERTIES:
# Track the position where we're going to write the flags.
flags_pos = len(buf)
writer.write_short(0)
flag_bits = 0
for (key, proptype, rfunc, wfunc, mask) in self.PROPERTIES:
val = self._properties.get(key, None)
if val is not None:
flag_bits |= mask
wfunc(writer, val) # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=[]]
writer.write_short_at(flag_bits, flags_pos) # depends on [control=['if'], data=[]]
else:
shift = 15
flag_bits = 0
flags = []
stack = deque()
for (key, proptype, rfunc, wfunc, mask) in self.PROPERTIES:
val = self._properties.get(key, None)
if val is not None:
if shift == 0:
flags.append(flag_bits)
flag_bits = 0
shift = 15 # depends on [control=['if'], data=['shift']]
flag_bits |= 1 << shift
stack.append((wfunc, val)) # depends on [control=['if'], data=['val']]
shift -= 1 # depends on [control=['for'], data=[]]
flags.append(flag_bits)
for flag_bits in flags:
writer.write_short(flag_bits) # depends on [control=['for'], data=['flag_bits']]
for (method, val) in stack:
method(writer, val) # depends on [control=['for'], data=[]]
# Write the total length back at the beginning of the frame
stream_len = len(buf) - stream_method_pos
writer.write_long_at(stream_len, stream_args_len_pos)
writer.write_octet(206)
|
def p_while_sentence(p):
""" statement : while_start co_statements_co label_end_while
| while_start program_co label_end_while
"""
gl.LOOPS.pop()
q = make_block(p[2], p[3])
if is_number(p[1]) and p[1].value:
if q is None:
warning(p[1].lineno, "Condition is always true and leads to an infinite loop.")
else:
warning(p[1].lineno, "Condition is always true and might lead to an infinite loop.")
p[0] = make_sentence('WHILE', p[1], q)
|
def function[p_while_sentence, parameter[p]]:
constant[ statement : while_start co_statements_co label_end_while
| while_start program_co label_end_while
]
call[name[gl].LOOPS.pop, parameter[]]
variable[q] assign[=] call[name[make_block], parameter[call[name[p]][constant[2]], call[name[p]][constant[3]]]]
if <ast.BoolOp object at 0x7da18f58f1c0> begin[:]
if compare[name[q] is constant[None]] begin[:]
call[name[warning], parameter[call[name[p]][constant[1]].lineno, constant[Condition is always true and leads to an infinite loop.]]]
call[name[p]][constant[0]] assign[=] call[name[make_sentence], parameter[constant[WHILE], call[name[p]][constant[1]], name[q]]]
|
keyword[def] identifier[p_while_sentence] ( identifier[p] ):
literal[string]
identifier[gl] . identifier[LOOPS] . identifier[pop] ()
identifier[q] = identifier[make_block] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ])
keyword[if] identifier[is_number] ( identifier[p] [ literal[int] ]) keyword[and] identifier[p] [ literal[int] ]. identifier[value] :
keyword[if] identifier[q] keyword[is] keyword[None] :
identifier[warning] ( identifier[p] [ literal[int] ]. identifier[lineno] , literal[string] )
keyword[else] :
identifier[warning] ( identifier[p] [ literal[int] ]. identifier[lineno] , literal[string] )
identifier[p] [ literal[int] ]= identifier[make_sentence] ( literal[string] , identifier[p] [ literal[int] ], identifier[q] )
|
def p_while_sentence(p):
""" statement : while_start co_statements_co label_end_while
| while_start program_co label_end_while
"""
gl.LOOPS.pop()
q = make_block(p[2], p[3])
if is_number(p[1]) and p[1].value:
if q is None:
warning(p[1].lineno, 'Condition is always true and leads to an infinite loop.') # depends on [control=['if'], data=[]]
else:
warning(p[1].lineno, 'Condition is always true and might lead to an infinite loop.') # depends on [control=['if'], data=[]]
p[0] = make_sentence('WHILE', p[1], q)
|
def _analyze(self):
"""
The main analysis routine.
:return: None
"""
self._pre_analysis()
if self._graph_visitor is None:
# There is no base graph that we can rely on. The analysis itself should generate successors for the
# current job.
# An example is the CFG recovery.
self._analysis_core_baremetal()
else:
# We have a base graph to follow. Just handle the current job.
self._analysis_core_graph()
self._post_analysis()
|
def function[_analyze, parameter[self]]:
constant[
The main analysis routine.
:return: None
]
call[name[self]._pre_analysis, parameter[]]
if compare[name[self]._graph_visitor is constant[None]] begin[:]
call[name[self]._analysis_core_baremetal, parameter[]]
call[name[self]._post_analysis, parameter[]]
|
keyword[def] identifier[_analyze] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_pre_analysis] ()
keyword[if] identifier[self] . identifier[_graph_visitor] keyword[is] keyword[None] :
identifier[self] . identifier[_analysis_core_baremetal] ()
keyword[else] :
identifier[self] . identifier[_analysis_core_graph] ()
identifier[self] . identifier[_post_analysis] ()
|
def _analyze(self):
"""
The main analysis routine.
:return: None
"""
self._pre_analysis()
if self._graph_visitor is None:
# There is no base graph that we can rely on. The analysis itself should generate successors for the
# current job.
# An example is the CFG recovery.
self._analysis_core_baremetal() # depends on [control=['if'], data=[]]
else:
# We have a base graph to follow. Just handle the current job.
self._analysis_core_graph()
self._post_analysis()
|
def format_cert_name(env='', account='', region='', certificate=None):
"""Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired
"""
cert_name = None
if certificate:
if certificate.startswith('arn'):
LOG.info("Full ARN provided...skipping lookup.")
cert_name = certificate
else:
generated_cert_name = generate_custom_cert_name(env, region, account, certificate)
if generated_cert_name:
LOG.info("Found generated certificate %s from template", generated_cert_name)
cert_name = generated_cert_name
else:
LOG.info("Using default certificate name logic")
cert_name = ('arn:aws:iam::{account}:server-certificate/{name}'.format(
account=account, name=certificate))
LOG.debug('Certificate name: %s', cert_name)
return cert_name
|
def function[format_cert_name, parameter[env, account, region, certificate]]:
constant[Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired
]
variable[cert_name] assign[=] constant[None]
if name[certificate] begin[:]
if call[name[certificate].startswith, parameter[constant[arn]]] begin[:]
call[name[LOG].info, parameter[constant[Full ARN provided...skipping lookup.]]]
variable[cert_name] assign[=] name[certificate]
call[name[LOG].debug, parameter[constant[Certificate name: %s], name[cert_name]]]
return[name[cert_name]]
|
keyword[def] identifier[format_cert_name] ( identifier[env] = literal[string] , identifier[account] = literal[string] , identifier[region] = literal[string] , identifier[certificate] = keyword[None] ):
literal[string]
identifier[cert_name] = keyword[None]
keyword[if] identifier[certificate] :
keyword[if] identifier[certificate] . identifier[startswith] ( literal[string] ):
identifier[LOG] . identifier[info] ( literal[string] )
identifier[cert_name] = identifier[certificate]
keyword[else] :
identifier[generated_cert_name] = identifier[generate_custom_cert_name] ( identifier[env] , identifier[region] , identifier[account] , identifier[certificate] )
keyword[if] identifier[generated_cert_name] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[generated_cert_name] )
identifier[cert_name] = identifier[generated_cert_name]
keyword[else] :
identifier[LOG] . identifier[info] ( literal[string] )
identifier[cert_name] =( literal[string] . identifier[format] (
identifier[account] = identifier[account] , identifier[name] = identifier[certificate] ))
identifier[LOG] . identifier[debug] ( literal[string] , identifier[cert_name] )
keyword[return] identifier[cert_name]
|
def format_cert_name(env='', account='', region='', certificate=None):
"""Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired
"""
cert_name = None
if certificate:
if certificate.startswith('arn'):
LOG.info('Full ARN provided...skipping lookup.')
cert_name = certificate # depends on [control=['if'], data=[]]
else:
generated_cert_name = generate_custom_cert_name(env, region, account, certificate)
if generated_cert_name:
LOG.info('Found generated certificate %s from template', generated_cert_name)
cert_name = generated_cert_name # depends on [control=['if'], data=[]]
else:
LOG.info('Using default certificate name logic')
cert_name = 'arn:aws:iam::{account}:server-certificate/{name}'.format(account=account, name=certificate) # depends on [control=['if'], data=[]]
LOG.debug('Certificate name: %s', cert_name)
return cert_name
|
def set_up(self):
"""
This class overrides this method
"""
self.menu.pause()
curses.def_prog_mode()
self.menu.clear_screen()
|
def function[set_up, parameter[self]]:
constant[
This class overrides this method
]
call[name[self].menu.pause, parameter[]]
call[name[curses].def_prog_mode, parameter[]]
call[name[self].menu.clear_screen, parameter[]]
|
keyword[def] identifier[set_up] ( identifier[self] ):
literal[string]
identifier[self] . identifier[menu] . identifier[pause] ()
identifier[curses] . identifier[def_prog_mode] ()
identifier[self] . identifier[menu] . identifier[clear_screen] ()
|
def set_up(self):
"""
This class overrides this method
"""
self.menu.pause()
curses.def_prog_mode()
self.menu.clear_screen()
|
def get_tables_for_bind(self, bind=None):
"""Returns a list of all tables relevant for a bind."""
result = []
for table in itervalues(self.Model.metadata.tables):
if table.info.get('bind_key') == bind:
result.append(table)
return result
|
def function[get_tables_for_bind, parameter[self, bind]]:
constant[Returns a list of all tables relevant for a bind.]
variable[result] assign[=] list[[]]
for taget[name[table]] in starred[call[name[itervalues], parameter[name[self].Model.metadata.tables]]] begin[:]
if compare[call[name[table].info.get, parameter[constant[bind_key]]] equal[==] name[bind]] begin[:]
call[name[result].append, parameter[name[table]]]
return[name[result]]
|
keyword[def] identifier[get_tables_for_bind] ( identifier[self] , identifier[bind] = keyword[None] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[table] keyword[in] identifier[itervalues] ( identifier[self] . identifier[Model] . identifier[metadata] . identifier[tables] ):
keyword[if] identifier[table] . identifier[info] . identifier[get] ( literal[string] )== identifier[bind] :
identifier[result] . identifier[append] ( identifier[table] )
keyword[return] identifier[result]
|
def get_tables_for_bind(self, bind=None):
"""Returns a list of all tables relevant for a bind."""
result = []
for table in itervalues(self.Model.metadata.tables):
if table.info.get('bind_key') == bind:
result.append(table) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['table']]
return result
|
def filter_out_spontaneous_genes(genes, custom_spont_id=None):
"""Return the DictList of genes that are not spontaneous in a model.
Args:
genes (DictList): Genes DictList
custom_spont_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001``
Returns:
DictList: genes excluding ones that are spontaneous
"""
new_genes = DictList()
for gene in genes:
if not is_spontaneous(gene, custom_id=custom_spont_id):
new_genes.append(gene)
return new_genes
|
def function[filter_out_spontaneous_genes, parameter[genes, custom_spont_id]]:
constant[Return the DictList of genes that are not spontaneous in a model.
Args:
genes (DictList): Genes DictList
custom_spont_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001``
Returns:
DictList: genes excluding ones that are spontaneous
]
variable[new_genes] assign[=] call[name[DictList], parameter[]]
for taget[name[gene]] in starred[name[genes]] begin[:]
if <ast.UnaryOp object at 0x7da204623af0> begin[:]
call[name[new_genes].append, parameter[name[gene]]]
return[name[new_genes]]
|
keyword[def] identifier[filter_out_spontaneous_genes] ( identifier[genes] , identifier[custom_spont_id] = keyword[None] ):
literal[string]
identifier[new_genes] = identifier[DictList] ()
keyword[for] identifier[gene] keyword[in] identifier[genes] :
keyword[if] keyword[not] identifier[is_spontaneous] ( identifier[gene] , identifier[custom_id] = identifier[custom_spont_id] ):
identifier[new_genes] . identifier[append] ( identifier[gene] )
keyword[return] identifier[new_genes]
|
def filter_out_spontaneous_genes(genes, custom_spont_id=None):
"""Return the DictList of genes that are not spontaneous in a model.
Args:
genes (DictList): Genes DictList
custom_spont_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001``
Returns:
DictList: genes excluding ones that are spontaneous
"""
new_genes = DictList()
for gene in genes:
if not is_spontaneous(gene, custom_id=custom_spont_id):
new_genes.append(gene) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['gene']]
return new_genes
|
def read_file(path):
"""Read the file from the given path.
If ``path`` is an absolute path, reads a file from the local filesystem. For relative paths, read the file
using the storage backend configured using :ref:`CA_FILE_STORAGE <settings-ca-file-storage>`.
"""
if os.path.isabs(path):
with wrap_file_exceptions():
with open(path, 'rb') as stream:
return stream.read()
with wrap_file_exceptions():
stream = ca_storage.open(path)
try:
return stream.read()
finally:
stream.close()
|
def function[read_file, parameter[path]]:
constant[Read the file from the given path.
If ``path`` is an absolute path, reads a file from the local filesystem. For relative paths, read the file
using the storage backend configured using :ref:`CA_FILE_STORAGE <settings-ca-file-storage>`.
]
if call[name[os].path.isabs, parameter[name[path]]] begin[:]
with call[name[wrap_file_exceptions], parameter[]] begin[:]
with call[name[open], parameter[name[path], constant[rb]]] begin[:]
return[call[name[stream].read, parameter[]]]
with call[name[wrap_file_exceptions], parameter[]] begin[:]
variable[stream] assign[=] call[name[ca_storage].open, parameter[name[path]]]
<ast.Try object at 0x7da20c991600>
|
keyword[def] identifier[read_file] ( identifier[path] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
keyword[with] identifier[wrap_file_exceptions] ():
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[stream] :
keyword[return] identifier[stream] . identifier[read] ()
keyword[with] identifier[wrap_file_exceptions] ():
identifier[stream] = identifier[ca_storage] . identifier[open] ( identifier[path] )
keyword[try] :
keyword[return] identifier[stream] . identifier[read] ()
keyword[finally] :
identifier[stream] . identifier[close] ()
|
def read_file(path):
"""Read the file from the given path.
If ``path`` is an absolute path, reads a file from the local filesystem. For relative paths, read the file
using the storage backend configured using :ref:`CA_FILE_STORAGE <settings-ca-file-storage>`.
"""
if os.path.isabs(path):
with wrap_file_exceptions():
with open(path, 'rb') as stream:
return stream.read() # depends on [control=['with'], data=['stream']] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
with wrap_file_exceptions():
stream = ca_storage.open(path) # depends on [control=['with'], data=[]]
try:
return stream.read() # depends on [control=['try'], data=[]]
finally:
stream.close()
|
def edit(self, image_id, name=None, note=None, tag=None):
"""Edit image related details.
:param int image_id: The ID of the image
:param string name: Name of the Image.
:param string note: Note of the image.
:param string tag: Tags of the image to be updated to.
"""
obj = {}
if name:
obj['name'] = name
if note:
obj['note'] = note
if obj:
self.vgbdtg.editObject(obj, id=image_id)
if tag:
self.vgbdtg.setTags(str(tag), id=image_id)
return bool(name or note or tag)
|
def function[edit, parameter[self, image_id, name, note, tag]]:
constant[Edit image related details.
:param int image_id: The ID of the image
:param string name: Name of the Image.
:param string note: Note of the image.
:param string tag: Tags of the image to be updated to.
]
variable[obj] assign[=] dictionary[[], []]
if name[name] begin[:]
call[name[obj]][constant[name]] assign[=] name[name]
if name[note] begin[:]
call[name[obj]][constant[note]] assign[=] name[note]
if name[obj] begin[:]
call[name[self].vgbdtg.editObject, parameter[name[obj]]]
if name[tag] begin[:]
call[name[self].vgbdtg.setTags, parameter[call[name[str], parameter[name[tag]]]]]
return[call[name[bool], parameter[<ast.BoolOp object at 0x7da18ede5030>]]]
|
keyword[def] identifier[edit] ( identifier[self] , identifier[image_id] , identifier[name] = keyword[None] , identifier[note] = keyword[None] , identifier[tag] = keyword[None] ):
literal[string]
identifier[obj] ={}
keyword[if] identifier[name] :
identifier[obj] [ literal[string] ]= identifier[name]
keyword[if] identifier[note] :
identifier[obj] [ literal[string] ]= identifier[note]
keyword[if] identifier[obj] :
identifier[self] . identifier[vgbdtg] . identifier[editObject] ( identifier[obj] , identifier[id] = identifier[image_id] )
keyword[if] identifier[tag] :
identifier[self] . identifier[vgbdtg] . identifier[setTags] ( identifier[str] ( identifier[tag] ), identifier[id] = identifier[image_id] )
keyword[return] identifier[bool] ( identifier[name] keyword[or] identifier[note] keyword[or] identifier[tag] )
|
def edit(self, image_id, name=None, note=None, tag=None):
"""Edit image related details.
:param int image_id: The ID of the image
:param string name: Name of the Image.
:param string note: Note of the image.
:param string tag: Tags of the image to be updated to.
"""
obj = {}
if name:
obj['name'] = name # depends on [control=['if'], data=[]]
if note:
obj['note'] = note # depends on [control=['if'], data=[]]
if obj:
self.vgbdtg.editObject(obj, id=image_id) # depends on [control=['if'], data=[]]
if tag:
self.vgbdtg.setTags(str(tag), id=image_id) # depends on [control=['if'], data=[]]
return bool(name or note or tag)
|
def create_box_comments(self, box_key, message, **kwargs):
'''Creates a comments in a box with the provided attributes.
Args:
box_key key for box
message message string
kwargs {} see StreakComment object for more information
return (status code, comment dict)
'''
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.comments_suffix
])
if not (box_key and message):
return requests.codes.bad_request, None
kwargs.update({'message':message})
new_cmt = StreakComment(**kwargs)
#print(new_pl.attributes)
#print(new_pl.to_dict())
#raw_input()
code, r_data = self._req('put', uri, new_cmt.to_dict())
return code, r_data
|
def function[create_box_comments, parameter[self, box_key, message]]:
constant[Creates a comments in a box with the provided attributes.
Args:
box_key key for box
message message string
kwargs {} see StreakComment object for more information
return (status code, comment dict)
]
variable[uri] assign[=] call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da1b1594160>, <ast.Attribute object at 0x7da1b1595300>, <ast.Name object at 0x7da1b15976d0>, <ast.Attribute object at 0x7da1b1597250>]]]]
if <ast.UnaryOp object at 0x7da1b1595c90> begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b15968c0>, <ast.Constant object at 0x7da18dc9ae30>]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc9a170>], [<ast.Name object at 0x7da18dc98e20>]]]]
variable[new_cmt] assign[=] call[name[StreakComment], parameter[]]
<ast.Tuple object at 0x7da18dc99480> assign[=] call[name[self]._req, parameter[constant[put], name[uri], call[name[new_cmt].to_dict, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da18dc9be50>, <ast.Name object at 0x7da18dc9bf10>]]]
|
keyword[def] identifier[create_box_comments] ( identifier[self] , identifier[box_key] , identifier[message] ,** identifier[kwargs] ):
literal[string]
identifier[uri] = literal[string] . identifier[join] ([
identifier[self] . identifier[api_uri] ,
identifier[self] . identifier[boxes_suffix] ,
identifier[box_key] ,
identifier[self] . identifier[comments_suffix]
])
keyword[if] keyword[not] ( identifier[box_key] keyword[and] identifier[message] ):
keyword[return] identifier[requests] . identifier[codes] . identifier[bad_request] , keyword[None]
identifier[kwargs] . identifier[update] ({ literal[string] : identifier[message] })
identifier[new_cmt] = identifier[StreakComment] (** identifier[kwargs] )
identifier[code] , identifier[r_data] = identifier[self] . identifier[_req] ( literal[string] , identifier[uri] , identifier[new_cmt] . identifier[to_dict] ())
keyword[return] identifier[code] , identifier[r_data]
|
def create_box_comments(self, box_key, message, **kwargs):
"""Creates a comments in a box with the provided attributes.
Args:
box_key key for box
message message string
kwargs {} see StreakComment object for more information
return (status code, comment dict)
"""
uri = '/'.join([self.api_uri, self.boxes_suffix, box_key, self.comments_suffix])
if not (box_key and message):
return (requests.codes.bad_request, None) # depends on [control=['if'], data=[]]
kwargs.update({'message': message})
new_cmt = StreakComment(**kwargs) #print(new_pl.attributes)
#print(new_pl.to_dict())
#raw_input()
(code, r_data) = self._req('put', uri, new_cmt.to_dict())
return (code, r_data)
|
def listen_forever(
self,
timeout_ms: int = 30000,
exception_handler: Callable[[Exception], None] = None,
bad_sync_timeout: int = 5,
):
"""
Keep listening for events forever.
Args:
timeout_ms: How long to poll the Home Server for before retrying.
exception_handler: Optional exception handler function which can
be used to handle exceptions in the caller thread.
bad_sync_timeout: Base time to wait after an error before retrying.
Will be increased according to exponential backoff.
"""
_bad_sync_timeout = bad_sync_timeout
self.should_listen = True
while self.should_listen:
try:
# may be killed and raise exception from _handle_thread
self._sync(timeout_ms)
_bad_sync_timeout = bad_sync_timeout
except MatrixRequestError as e:
log.warning('A MatrixRequestError occured during sync.')
if e.code >= 500:
log.warning(
'Problem occured serverside. Waiting',
wait_for=_bad_sync_timeout,
)
gevent.sleep(_bad_sync_timeout)
_bad_sync_timeout = min(_bad_sync_timeout * 2, self.bad_sync_timeout_limit)
else:
raise
except MatrixHttpLibError:
log.exception('A MatrixHttpLibError occured during sync.')
if self.should_listen:
gevent.sleep(_bad_sync_timeout)
_bad_sync_timeout = min(_bad_sync_timeout * 2, self.bad_sync_timeout_limit)
except Exception as e:
log.exception('Exception thrown during sync')
if exception_handler is not None:
exception_handler(e)
else:
raise
|
def function[listen_forever, parameter[self, timeout_ms, exception_handler, bad_sync_timeout]]:
constant[
Keep listening for events forever.
Args:
timeout_ms: How long to poll the Home Server for before retrying.
exception_handler: Optional exception handler function which can
be used to handle exceptions in the caller thread.
bad_sync_timeout: Base time to wait after an error before retrying.
Will be increased according to exponential backoff.
]
variable[_bad_sync_timeout] assign[=] name[bad_sync_timeout]
name[self].should_listen assign[=] constant[True]
while name[self].should_listen begin[:]
<ast.Try object at 0x7da1b170bb80>
|
keyword[def] identifier[listen_forever] (
identifier[self] ,
identifier[timeout_ms] : identifier[int] = literal[int] ,
identifier[exception_handler] : identifier[Callable] [[ identifier[Exception] ], keyword[None] ]= keyword[None] ,
identifier[bad_sync_timeout] : identifier[int] = literal[int] ,
):
literal[string]
identifier[_bad_sync_timeout] = identifier[bad_sync_timeout]
identifier[self] . identifier[should_listen] = keyword[True]
keyword[while] identifier[self] . identifier[should_listen] :
keyword[try] :
identifier[self] . identifier[_sync] ( identifier[timeout_ms] )
identifier[_bad_sync_timeout] = identifier[bad_sync_timeout]
keyword[except] identifier[MatrixRequestError] keyword[as] identifier[e] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[if] identifier[e] . identifier[code] >= literal[int] :
identifier[log] . identifier[warning] (
literal[string] ,
identifier[wait_for] = identifier[_bad_sync_timeout] ,
)
identifier[gevent] . identifier[sleep] ( identifier[_bad_sync_timeout] )
identifier[_bad_sync_timeout] = identifier[min] ( identifier[_bad_sync_timeout] * literal[int] , identifier[self] . identifier[bad_sync_timeout_limit] )
keyword[else] :
keyword[raise]
keyword[except] identifier[MatrixHttpLibError] :
identifier[log] . identifier[exception] ( literal[string] )
keyword[if] identifier[self] . identifier[should_listen] :
identifier[gevent] . identifier[sleep] ( identifier[_bad_sync_timeout] )
identifier[_bad_sync_timeout] = identifier[min] ( identifier[_bad_sync_timeout] * literal[int] , identifier[self] . identifier[bad_sync_timeout_limit] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] )
keyword[if] identifier[exception_handler] keyword[is] keyword[not] keyword[None] :
identifier[exception_handler] ( identifier[e] )
keyword[else] :
keyword[raise]
|
def listen_forever(self, timeout_ms: int=30000, exception_handler: Callable[[Exception], None]=None, bad_sync_timeout: int=5):
"""
Keep listening for events forever.
Args:
timeout_ms: How long to poll the Home Server for before retrying.
exception_handler: Optional exception handler function which can
be used to handle exceptions in the caller thread.
bad_sync_timeout: Base time to wait after an error before retrying.
Will be increased according to exponential backoff.
"""
_bad_sync_timeout = bad_sync_timeout
self.should_listen = True
while self.should_listen:
try:
# may be killed and raise exception from _handle_thread
self._sync(timeout_ms)
_bad_sync_timeout = bad_sync_timeout # depends on [control=['try'], data=[]]
except MatrixRequestError as e:
log.warning('A MatrixRequestError occured during sync.')
if e.code >= 500:
log.warning('Problem occured serverside. Waiting', wait_for=_bad_sync_timeout)
gevent.sleep(_bad_sync_timeout)
_bad_sync_timeout = min(_bad_sync_timeout * 2, self.bad_sync_timeout_limit) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']]
except MatrixHttpLibError:
log.exception('A MatrixHttpLibError occured during sync.')
if self.should_listen:
gevent.sleep(_bad_sync_timeout)
_bad_sync_timeout = min(_bad_sync_timeout * 2, self.bad_sync_timeout_limit) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
except Exception as e:
log.exception('Exception thrown during sync')
if exception_handler is not None:
exception_handler(e) # depends on [control=['if'], data=['exception_handler']]
else:
raise # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
|
def url_regex_generator(*, relative: bool, require_tld: bool) -> Pattern[str]:
"""
Url regex generator taken from Marshmallow library,
for details please follow library source code:
https://github.com/marshmallow-code/marshmallow/blob/298870ef6c089fb4d91efae9ca4168453ffe00d2/marshmallow/validate.py#L37
"""
return re.compile(
r''.join(
(
r'^',
r'(' if relative else r'',
r'(?:[a-z0-9\.\-\+]*)://', # scheme is validated separately
r'(?:[^:@]+?:[^:@]*?@|)', # basic auth
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+',
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|', # domain...
r'localhost|', # localhost...
(
r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.?)|' if not require_tld else r''
), # allow dotless hostnames
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|', # ...or ipv4
r'\[[A-F0-9]*:[A-F0-9:]+\])', # ...or ipv6
r'(?::\d+)?', # optional port
r')?' if relative else r'', # host is optional, allow for relative URLs
r'(?:/?|[/?]\S+)$',
)
),
re.IGNORECASE,
)
|
def function[url_regex_generator, parameter[]]:
constant[
Url regex generator taken from Marshmallow library,
for details please follow library source code:
https://github.com/marshmallow-code/marshmallow/blob/298870ef6c089fb4d91efae9ca4168453ffe00d2/marshmallow/validate.py#L37
]
return[call[name[re].compile, parameter[call[constant[].join, parameter[tuple[[<ast.Constant object at 0x7da1b21f3400>, <ast.IfExp object at 0x7da1b21f34c0>, <ast.Constant object at 0x7da1b21f3be0>, <ast.Constant object at 0x7da1b21f3640>, <ast.Constant object at 0x7da1b21f3f40>, <ast.Constant object at 0x7da1b21f3af0>, <ast.Constant object at 0x7da1b21f35e0>, <ast.IfExp object at 0x7da1b21f3a30>, <ast.Constant object at 0x7da1b21f3b80>, <ast.Constant object at 0x7da1b21f3310>, <ast.Constant object at 0x7da1b21f3eb0>, <ast.IfExp object at 0x7da1b21f3700>, <ast.Constant object at 0x7da1b21f3460>]]]], name[re].IGNORECASE]]]
|
keyword[def] identifier[url_regex_generator] (*, identifier[relative] : identifier[bool] , identifier[require_tld] : identifier[bool] )-> identifier[Pattern] [ identifier[str] ]:
literal[string]
keyword[return] identifier[re] . identifier[compile] (
literal[string] . identifier[join] (
(
literal[string] ,
literal[string] keyword[if] identifier[relative] keyword[else] literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
(
literal[string] keyword[if] keyword[not] identifier[require_tld] keyword[else] literal[string]
),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] keyword[if] identifier[relative] keyword[else] literal[string] ,
literal[string] ,
)
),
identifier[re] . identifier[IGNORECASE] ,
)
|
def url_regex_generator(*, relative: bool, require_tld: bool) -> Pattern[str]:
"""
Url regex generator taken from Marshmallow library,
for details please follow library source code:
https://github.com/marshmallow-code/marshmallow/blob/298870ef6c089fb4d91efae9ca4168453ffe00d2/marshmallow/validate.py#L37
""" # scheme is validated separately
# basic auth
# domain...
# localhost...
# allow dotless hostnames
# ...or ipv4
# ...or ipv6
# optional port
# host is optional, allow for relative URLs
return re.compile(''.join(('^', '(' if relative else '', '(?:[a-z0-9\\.\\-\\+]*)://', '(?:[^:@]+?:[^:@]*?@|)', '(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+', '(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|', 'localhost|', '(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.?)|' if not require_tld else '', '\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|', '\\[[A-F0-9]*:[A-F0-9:]+\\])', '(?::\\d+)?', ')?' if relative else '', '(?:/?|[/?]\\S+)$')), re.IGNORECASE)
|
def cnst_A(self, X):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint.
"""
return self.block_cat(self.cnst_A0(X), self.cnst_A1(X))
|
def function[cnst_A, parameter[self, X]]:
constant[Compute :math:`A \mathbf{x}` component of ADMM problem
constraint.
]
return[call[name[self].block_cat, parameter[call[name[self].cnst_A0, parameter[name[X]]], call[name[self].cnst_A1, parameter[name[X]]]]]]
|
keyword[def] identifier[cnst_A] ( identifier[self] , identifier[X] ):
literal[string]
keyword[return] identifier[self] . identifier[block_cat] ( identifier[self] . identifier[cnst_A0] ( identifier[X] ), identifier[self] . identifier[cnst_A1] ( identifier[X] ))
|
def cnst_A(self, X):
"""Compute :math:`A \\mathbf{x}` component of ADMM problem
constraint.
"""
return self.block_cat(self.cnst_A0(X), self.cnst_A1(X))
|
def _parse_proto(prototxt_fname):
"""Parse Caffe prototxt into symbol string
"""
proto = caffe_parser.read_prototxt(prototxt_fname)
# process data layer
input_name, input_dim, layers = _get_input(proto)
# only support single input, so always use `data` as the input data
mapping = {input_name: 'data'}
need_flatten = {input_name: False}
symbol_string = "import mxnet as mx\ndata = mx.symbol.Variable(name='data')\n"
flatten_count = 0
output_name = ""
prev_name = None
# convert reset layers one by one
for i, layer in enumerate(layers):
type_string = ''
param_string = ''
skip_layer = False
bottom_order = []
name = re.sub('[-/]', '_', layer.name)
if layer.type == 'Convolution' or layer.type == 4:
type_string = 'mx.symbol.Convolution'
param_string = _convert_conv_param(layer.convolution_param)
need_flatten[name] = True
if layer.type == 'Deconvolution' or layer.type == 39:
type_string = 'mx.symbol.Deconvolution'
param_string = _convert_conv_param(layer.convolution_param)
need_flatten[name] = True
if layer.type == 'Pooling' or layer.type == 17:
type_string = 'mx.symbol.Pooling'
param_string = _convert_pooling_param(layer.pooling_param)
need_flatten[name] = True
if layer.type == 'ReLU' or layer.type == 18:
type_string = 'mx.symbol.Activation'
param_string = "act_type='relu'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'TanH' or layer.type == 23:
type_string = 'mx.symbol.Activation'
param_string = "act_type='tanh'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'Sigmoid' or layer.type == 19:
type_string = 'mx.symbol.Activation'
param_string = "act_type='sigmoid'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'LRN' or layer.type == 15:
type_string = 'mx.symbol.LRN'
param = layer.lrn_param
param_string = "alpha=%f, beta=%f, knorm=%f, nsize=%d" % (
param.alpha, param.beta, param.k, param.local_size)
need_flatten[name] = True
if layer.type == 'InnerProduct' or layer.type == 14:
type_string = 'mx.symbol.FullyConnected'
param = layer.inner_product_param
param_string = "num_hidden=%d, no_bias=%s" % (
param.num_output, not param.bias_term)
need_flatten[name] = False
if layer.type == 'Dropout' or layer.type == 6:
type_string = 'mx.symbol.Dropout'
param = layer.dropout_param
param_string = "p=%f" % param.dropout_ratio
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'Softmax' or layer.type == 20:
if layer.softmax_param.axis == 2:
symbol_string += "%s = mx.symbol.transpose(%s, axes=(0,2,1))\n" %\
(mapping[layer.bottom[0]], mapping[layer.bottom[0]])
type_string = 'mx.symbol.SoftmaxActivation'
param_string = "mode='channel'"
need_flatten[name] = False
else:
type_string = 'mx.symbol.SoftmaxOutput'
if layer.type == 'Flatten' or layer.type == 8:
if 'softmax' in layer.bottom[0]:
prev_name = re.sub('[-/]', '_', layers[i-1].name)
skip_layer = True
else:
type_string = 'mx.symbol.Flatten'
need_flatten[name] = False
if layer.type == 'Split' or layer.type == 22:
type_string = 'split' # will process later
if layer.type == 'Concat' or layer.type == 3:
type_string = 'mx.symbol.Concat'
need_flatten[name] = True
if layer.type == 'Crop':
type_string = 'mx.symbol.Crop'
need_flatten[name] = True
param_string = 'center_crop=True'
if layer.type == 'BatchNorm':
type_string = 'mx.symbol.BatchNorm'
param = layer.batch_norm_param
# CuDNN requires eps to be greater than 1e-05
# We compensate for this change in convert_model
epsilon = param.eps
if (epsilon <= 1e-05):
epsilon = 1e-04
# if next layer is scale, don't fix gamma
fix_gamma = layers[i+1].type != 'Scale'
param_string = 'use_global_stats=%s, fix_gamma=%s, eps=%f' % (
param.use_global_stats, fix_gamma, epsilon)
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'Scale':
assert layers[i-1].type == 'BatchNorm'
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
skip_layer = True
prev_name = re.sub('[-/]', '_', layers[i-1].name)
if layer.type == 'PReLU':
type_string = 'mx.symbol.LeakyReLU'
param = layer.prelu_param
param_string = "act_type='prelu', slope=%f" % param.filler.value
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'Eltwise':
type_string = 'mx.symbol.broadcast_add'
param_string = ""
need_flatten[name] = False
if layer.type == 'Reshape':
type_string = 'mx.symbol.Reshape'
param = layer.reshape_param
param_string = 'shape=(' + ','.join([str(x) for x in list(param.shape.dim)]) + ')'
need_flatten[name] = True
if layer.type == 'AbsVal':
type_string = 'mx.symbol.abs'
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
if layer.type == 'Normalize':
bottom = re.sub('[-/]', '_', layer.bottom[0])
conv_layer = _find_layer(layers, bottom)
assert conv_layer is not None
param = layer.norm_param
assert not param.across_spatial and not param.channel_shared
assert param.scale_filler.type == 'constant'
if conv_layer.type == 'Convolution':
scale_name = "%s_scale" % name
symbol_string += "%s=mx.sym.Variable(name='%s', shape=(1, %d, 1, 1), init=mx.init.Constant(%f))\n" % \
(scale_name, scale_name, conv_layer.convolution_param.num_output,
param.scale_filler.value)
symbol_string += "%s=mx.symbol.L2Normalization(name='%s', data=%s, mode='channel')\n" %\
(name, name, mapping[layer.bottom[0]])
symbol_string += "%s=mx.symbol.broadcast_mul(lhs=%s, rhs=%s)\n" %\
(name, scale_name, name)
type_string = 'split'
need_flatten[name] = True
else:
raise ValueError('Unknown/Invalid normalize layer!')
if layer.type == 'Permute':
type_string = 'mx.symbol.transpose'
param_string = "axes=(%s)" % (','.join([str(x) for x in layer.permute_param.order]))
need_flatten[name] = True
from_name = ''
if layer.type == 'PriorBox':
param = layer.prior_box_param
if layer.bottom[0] == 'data':
bottom_order = [1]
else:
bottom_order = [0]
try:
import math
min_size = param.min_size[0] / input_dim[2]
max_size = math.sqrt(param.min_size[0] * param.max_size[0]) / input_dim[2]
sizes = '(%f, %f)' %(min_size, max_size)
except AttributeError:
min_size = param.min_size[0] / input_dim[2]
sizes = '(%f)' %(min_size)
ars = list(param.aspect_ratio)
ratios = [1.]
for ar in ars:
ratios.append(ar)
if param.flip:
ratios.append(1. / ar)
ratios_string = '(' + ','.join(str(x) for x in ratios) + ')'
clip = param.clip
if (param.step_h > 0 or param.step_w > 0):
step_h = param.step_h
step_w = param.step_w
elif param.step > 0:
step_h = param.step
step_w = param.step
else:
step_h = -1
step_w = -1
finput_dimh = float(input_dim[2])
finput_dimw = float(input_dim[3])
step = '(%f, %f)' % (step_h / finput_dimh, step_w / finput_dimw)
assert param.offset == 0.5, "currently only support offset = 0.5"
symbol_string += '%s = mx.contrib.symbol.MultiBoxPrior(%s, sizes=%s, ratios=%s, clip=%s, steps=%s, name="%s")\n' % \
(name, mapping[layer.bottom[0]], sizes, ratios_string, clip, step, name)
symbol_string += '%s = mx.symbol.Flatten(data=%s)\n' % (name, name)
type_string = 'split'
need_flatten[name] = False
if layer.type == 'DetectionOutput':
bottom_order = [1, 0, 2]
param = layer.detection_output_param
assert param.share_location == True
assert param.background_label_id == 0
nms_param = param.nms_param
type_string = 'mx.contrib.symbol.MultiBoxDetection'
param_string = "nms_threshold=%f, nms_topk=%d, clip=False" % \
(nms_param.nms_threshold, nms_param.top_k)
if skip_layer:
assert len(layer.bottom) == 1
symbol_string += "%s = %s\n" % (name, prev_name)
elif type_string == '':
raise ValueError('Unknown layer %s!' % layer.type)
elif type_string != 'split':
bottom = layer.bottom
if param_string != "":
param_string = ", " + param_string
if len(bottom) == 1:
# print(need_flatten)
if need_flatten[mapping[bottom[0]]] and type_string == 'mx.symbol.FullyConnected':
flatten_name = "flatten_%d" % flatten_count
symbol_string += "%s=mx.symbol.Flatten(name='%s', data=%s)\n" % (
flatten_name, flatten_name, mapping[bottom[0]])
flatten_count += 1
need_flatten[flatten_name] = False
bottom[0] = flatten_name
mapping[bottom[0]] = bottom[0]
symbol_string += "%s = %s(name='%s', data=%s %s)\n" % (
name, type_string, name, mapping[bottom[0]], param_string)
else:
if not bottom_order:
bottom_order = range(len(bottom))
symbol_string += "%s = %s(name='%s', *[%s] %s)\n" % \
(name, type_string, name, ','.join([mapping[bottom[x]] for x in bottom_order]), param_string)
if layer.type == 'Concat' and layer.concat_param.axis == 2:
symbol_string += "%s = mx.symbol.Reshape(data=%s, shape=(0, -1, 4), name='%s')\n" %\
(name, name, name)
for j in range(len(layer.top)):
mapping[layer.top[j]] = name
output_name = name
return symbol_string, output_name, input_dim
|
def function[_parse_proto, parameter[prototxt_fname]]:
constant[Parse Caffe prototxt into symbol string
]
variable[proto] assign[=] call[name[caffe_parser].read_prototxt, parameter[name[prototxt_fname]]]
<ast.Tuple object at 0x7da1b1e74640> assign[=] call[name[_get_input], parameter[name[proto]]]
variable[mapping] assign[=] dictionary[[<ast.Name object at 0x7da1b1e74460>], [<ast.Constant object at 0x7da1b1e74430>]]
variable[need_flatten] assign[=] dictionary[[<ast.Name object at 0x7da1b1e74370>], [<ast.Constant object at 0x7da1b1e74340>]]
variable[symbol_string] assign[=] constant[import mxnet as mx
data = mx.symbol.Variable(name='data')
]
variable[flatten_count] assign[=] constant[0]
variable[output_name] assign[=] constant[]
variable[prev_name] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da1b1e74070>, <ast.Name object at 0x7da1b1e74040>]]] in starred[call[name[enumerate], parameter[name[layers]]]] begin[:]
variable[type_string] assign[=] constant[]
variable[param_string] assign[=] constant[]
variable[skip_layer] assign[=] constant[False]
variable[bottom_order] assign[=] list[[]]
variable[name] assign[=] call[name[re].sub, parameter[constant[[-/]], constant[_], name[layer].name]]
if <ast.BoolOp object at 0x7da20c9922c0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Convolution]
variable[param_string] assign[=] call[name[_convert_conv_param], parameter[name[layer].convolution_param]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da18c4cdff0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Deconvolution]
variable[param_string] assign[=] call[name[_convert_conv_param], parameter[name[layer].convolution_param]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c991ae0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Pooling]
variable[param_string] assign[=] call[name[_convert_pooling_param], parameter[name[layer].pooling_param]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c9930a0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Activation]
variable[param_string] assign[=] constant[act_type='relu']
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if <ast.BoolOp object at 0x7da20c991a20> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Activation]
variable[param_string] assign[=] constant[act_type='tanh']
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if <ast.BoolOp object at 0x7da20c9913c0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Activation]
variable[param_string] assign[=] constant[act_type='sigmoid']
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if <ast.BoolOp object at 0x7da20c991780> begin[:]
variable[type_string] assign[=] constant[mx.symbol.LRN]
variable[param] assign[=] name[layer].lrn_param
variable[param_string] assign[=] binary_operation[constant[alpha=%f, beta=%f, knorm=%f, nsize=%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c992bf0>, <ast.Attribute object at 0x7da20c9901f0>, <ast.Attribute object at 0x7da20c990e20>, <ast.Attribute object at 0x7da20c990040>]]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c990940> begin[:]
variable[type_string] assign[=] constant[mx.symbol.FullyConnected]
variable[param] assign[=] name[layer].inner_product_param
variable[param_string] assign[=] binary_operation[constant[num_hidden=%d, no_bias=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c990ee0>, <ast.UnaryOp object at 0x7da20c992740>]]]
call[name[need_flatten]][name[name]] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20c993160> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Dropout]
variable[param] assign[=] name[layer].dropout_param
variable[param_string] assign[=] binary_operation[constant[p=%f] <ast.Mod object at 0x7da2590d6920> name[param].dropout_ratio]
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if <ast.BoolOp object at 0x7da20c992080> begin[:]
if compare[name[layer].softmax_param.axis equal[==] constant[2]] begin[:]
<ast.AugAssign object at 0x7da2054a63b0>
variable[type_string] assign[=] constant[mx.symbol.SoftmaxActivation]
variable[param_string] assign[=] constant[mode='channel']
call[name[need_flatten]][name[name]] assign[=] constant[False]
if <ast.BoolOp object at 0x7da18f810fa0> begin[:]
if compare[constant[softmax] in call[name[layer].bottom][constant[0]]] begin[:]
variable[prev_name] assign[=] call[name[re].sub, parameter[constant[[-/]], constant[_], call[name[layers]][binary_operation[name[i] - constant[1]]].name]]
variable[skip_layer] assign[=] constant[True]
call[name[need_flatten]][name[name]] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b1e14460> begin[:]
variable[type_string] assign[=] constant[split]
if <ast.BoolOp object at 0x7da1b1e179d0> begin[:]
variable[type_string] assign[=] constant[mx.symbol.Concat]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if compare[name[layer].type equal[==] constant[Crop]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.Crop]
call[name[need_flatten]][name[name]] assign[=] constant[True]
variable[param_string] assign[=] constant[center_crop=True]
if compare[name[layer].type equal[==] constant[BatchNorm]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.BatchNorm]
variable[param] assign[=] name[layer].batch_norm_param
variable[epsilon] assign[=] name[param].eps
if compare[name[epsilon] less_or_equal[<=] constant[1e-05]] begin[:]
variable[epsilon] assign[=] constant[0.0001]
variable[fix_gamma] assign[=] compare[call[name[layers]][binary_operation[name[i] + constant[1]]].type not_equal[!=] constant[Scale]]
variable[param_string] assign[=] binary_operation[constant[use_global_stats=%s, fix_gamma=%s, eps=%f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18fe92a40>, <ast.Name object at 0x7da18fe91c00>, <ast.Name object at 0x7da18fe916f0>]]]
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if compare[name[layer].type equal[==] constant[Scale]] begin[:]
assert[compare[call[name[layers]][binary_operation[name[i] - constant[1]]].type equal[==] constant[BatchNorm]]]
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
variable[skip_layer] assign[=] constant[True]
variable[prev_name] assign[=] call[name[re].sub, parameter[constant[[-/]], constant[_], call[name[layers]][binary_operation[name[i] - constant[1]]].name]]
if compare[name[layer].type equal[==] constant[PReLU]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.LeakyReLU]
variable[param] assign[=] name[layer].prelu_param
variable[param_string] assign[=] binary_operation[constant[act_type='prelu', slope=%f] <ast.Mod object at 0x7da2590d6920> name[param].filler.value]
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if compare[name[layer].type equal[==] constant[Eltwise]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.broadcast_add]
variable[param_string] assign[=] constant[]
call[name[need_flatten]][name[name]] assign[=] constant[False]
if compare[name[layer].type equal[==] constant[Reshape]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.Reshape]
variable[param] assign[=] name[layer].reshape_param
variable[param_string] assign[=] binary_operation[binary_operation[constant[shape=(] + call[constant[,].join, parameter[<ast.ListComp object at 0x7da18fe92d70>]]] + constant[)]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if compare[name[layer].type equal[==] constant[AbsVal]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.abs]
call[name[need_flatten]][name[name]] assign[=] call[name[need_flatten]][call[name[mapping]][call[name[layer].bottom][constant[0]]]]
if compare[name[layer].type equal[==] constant[Normalize]] begin[:]
variable[bottom] assign[=] call[name[re].sub, parameter[constant[[-/]], constant[_], call[name[layer].bottom][constant[0]]]]
variable[conv_layer] assign[=] call[name[_find_layer], parameter[name[layers], name[bottom]]]
assert[compare[name[conv_layer] is_not constant[None]]]
variable[param] assign[=] name[layer].norm_param
assert[<ast.BoolOp object at 0x7da20e957b80>]
assert[compare[name[param].scale_filler.type equal[==] constant[constant]]]
if compare[name[conv_layer].type equal[==] constant[Convolution]] begin[:]
variable[scale_name] assign[=] binary_operation[constant[%s_scale] <ast.Mod object at 0x7da2590d6920> name[name]]
<ast.AugAssign object at 0x7da20e954910>
<ast.AugAssign object at 0x7da20e9569b0>
<ast.AugAssign object at 0x7da20e954940>
variable[type_string] assign[=] constant[split]
call[name[need_flatten]][name[name]] assign[=] constant[True]
if compare[name[layer].type equal[==] constant[Permute]] begin[:]
variable[type_string] assign[=] constant[mx.symbol.transpose]
variable[param_string] assign[=] binary_operation[constant[axes=(%s)] <ast.Mod object at 0x7da2590d6920> call[constant[,].join, parameter[<ast.ListComp object at 0x7da1b201e3b0>]]]
call[name[need_flatten]][name[name]] assign[=] constant[True]
variable[from_name] assign[=] constant[]
if compare[name[layer].type equal[==] constant[PriorBox]] begin[:]
variable[param] assign[=] name[layer].prior_box_param
if compare[call[name[layer].bottom][constant[0]] equal[==] constant[data]] begin[:]
variable[bottom_order] assign[=] list[[<ast.Constant object at 0x7da1b201da20>]]
<ast.Try object at 0x7da1b201d120>
variable[ars] assign[=] call[name[list], parameter[name[param].aspect_ratio]]
variable[ratios] assign[=] list[[<ast.Constant object at 0x7da1b1ffe9e0>]]
for taget[name[ar]] in starred[name[ars]] begin[:]
call[name[ratios].append, parameter[name[ar]]]
if name[param].flip begin[:]
call[name[ratios].append, parameter[binary_operation[constant[1.0] / name[ar]]]]
variable[ratios_string] assign[=] binary_operation[binary_operation[constant[(] + call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da1b1ffc490>]]] + constant[)]]
variable[clip] assign[=] name[param].clip
if <ast.BoolOp object at 0x7da1b1ffdc60> begin[:]
variable[step_h] assign[=] name[param].step_h
variable[step_w] assign[=] name[param].step_w
variable[finput_dimh] assign[=] call[name[float], parameter[call[name[input_dim]][constant[2]]]]
variable[finput_dimw] assign[=] call[name[float], parameter[call[name[input_dim]][constant[3]]]]
variable[step] assign[=] binary_operation[constant[(%f, %f)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1ffcf40>, <ast.BinOp object at 0x7da1b1ffded0>]]]
assert[compare[name[param].offset equal[==] constant[0.5]]]
<ast.AugAssign object at 0x7da1b1ffeb00>
<ast.AugAssign object at 0x7da1b1fffb80>
variable[type_string] assign[=] constant[split]
call[name[need_flatten]][name[name]] assign[=] constant[False]
if compare[name[layer].type equal[==] constant[DetectionOutput]] begin[:]
variable[bottom_order] assign[=] list[[<ast.Constant object at 0x7da1b1ffe0e0>, <ast.Constant object at 0x7da1b1fffd90>, <ast.Constant object at 0x7da1b1fff0d0>]]
variable[param] assign[=] name[layer].detection_output_param
assert[compare[name[param].share_location equal[==] constant[True]]]
assert[compare[name[param].background_label_id equal[==] constant[0]]]
variable[nms_param] assign[=] name[param].nms_param
variable[type_string] assign[=] constant[mx.contrib.symbol.MultiBoxDetection]
variable[param_string] assign[=] binary_operation[constant[nms_threshold=%f, nms_topk=%d, clip=False] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b1ffe830>, <ast.Attribute object at 0x7da1b1ffd120>]]]
if name[skip_layer] begin[:]
assert[compare[call[name[len], parameter[name[layer].bottom]] equal[==] constant[1]]]
<ast.AugAssign object at 0x7da1b1ffdba0>
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[layer].top]]]]] begin[:]
call[name[mapping]][call[name[layer].top][name[j]]] assign[=] name[name]
variable[output_name] assign[=] name[name]
return[tuple[[<ast.Name object at 0x7da1b1ef07f0>, <ast.Name object at 0x7da1b1ef2f50>, <ast.Name object at 0x7da1b1ef2f20>]]]
|
keyword[def] identifier[_parse_proto] ( identifier[prototxt_fname] ):
literal[string]
identifier[proto] = identifier[caffe_parser] . identifier[read_prototxt] ( identifier[prototxt_fname] )
identifier[input_name] , identifier[input_dim] , identifier[layers] = identifier[_get_input] ( identifier[proto] )
identifier[mapping] ={ identifier[input_name] : literal[string] }
identifier[need_flatten] ={ identifier[input_name] : keyword[False] }
identifier[symbol_string] = literal[string]
identifier[flatten_count] = literal[int]
identifier[output_name] = literal[string]
identifier[prev_name] = keyword[None]
keyword[for] identifier[i] , identifier[layer] keyword[in] identifier[enumerate] ( identifier[layers] ):
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[skip_layer] = keyword[False]
identifier[bottom_order] =[]
identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[layer] . identifier[name] )
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = identifier[_convert_conv_param] ( identifier[layer] . identifier[convolution_param] )
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = identifier[_convert_conv_param] ( identifier[layer] . identifier[convolution_param] )
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = identifier[_convert_pooling_param] ( identifier[layer] . identifier[pooling_param] )
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[lrn_param]
identifier[param_string] = literal[string] %(
identifier[param] . identifier[alpha] , identifier[param] . identifier[beta] , identifier[param] . identifier[k] , identifier[param] . identifier[local_size] )
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[inner_product_param]
identifier[param_string] = literal[string] %(
identifier[param] . identifier[num_output] , keyword[not] identifier[param] . identifier[bias_term] )
identifier[need_flatten] [ identifier[name] ]= keyword[False]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[dropout_param]
identifier[param_string] = literal[string] % identifier[param] . identifier[dropout_ratio]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
keyword[if] identifier[layer] . identifier[softmax_param] . identifier[axis] == literal[int] :
identifier[symbol_string] += literal[string] %( identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]], identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]])
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[False]
keyword[else] :
identifier[type_string] = literal[string]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
keyword[if] literal[string] keyword[in] identifier[layer] . identifier[bottom] [ literal[int] ]:
identifier[prev_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[layers] [ identifier[i] - literal[int] ]. identifier[name] )
identifier[skip_layer] = keyword[True]
keyword[else] :
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[False]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[or] identifier[layer] . identifier[type] == literal[int] :
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[True]
identifier[param_string] = literal[string]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[batch_norm_param]
identifier[epsilon] = identifier[param] . identifier[eps]
keyword[if] ( identifier[epsilon] <= literal[int] ):
identifier[epsilon] = literal[int]
identifier[fix_gamma] = identifier[layers] [ identifier[i] + literal[int] ]. identifier[type] != literal[string]
identifier[param_string] = literal[string] %(
identifier[param] . identifier[use_global_stats] , identifier[fix_gamma] , identifier[epsilon] )
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
keyword[assert] identifier[layers] [ identifier[i] - literal[int] ]. identifier[type] == literal[string]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
identifier[skip_layer] = keyword[True]
identifier[prev_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[layers] [ identifier[i] - literal[int] ]. identifier[name] )
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[prelu_param]
identifier[param_string] = literal[string] % identifier[param] . identifier[filler] . identifier[value]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[param_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[False]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[param] = identifier[layer] . identifier[reshape_param]
identifier[param_string] = literal[string] + literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[list] ( identifier[param] . identifier[shape] . identifier[dim] )])+ literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= identifier[need_flatten] [ identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]]]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[bottom] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[layer] . identifier[bottom] [ literal[int] ])
identifier[conv_layer] = identifier[_find_layer] ( identifier[layers] , identifier[bottom] )
keyword[assert] identifier[conv_layer] keyword[is] keyword[not] keyword[None]
identifier[param] = identifier[layer] . identifier[norm_param]
keyword[assert] keyword[not] identifier[param] . identifier[across_spatial] keyword[and] keyword[not] identifier[param] . identifier[channel_shared]
keyword[assert] identifier[param] . identifier[scale_filler] . identifier[type] == literal[string]
keyword[if] identifier[conv_layer] . identifier[type] == literal[string] :
identifier[scale_name] = literal[string] % identifier[name]
identifier[symbol_string] += literal[string] %( identifier[scale_name] , identifier[scale_name] , identifier[conv_layer] . identifier[convolution_param] . identifier[num_output] ,
identifier[param] . identifier[scale_filler] . identifier[value] )
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[name] , identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]])
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[scale_name] , identifier[name] )
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[True]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[type_string] = literal[string]
identifier[param_string] = literal[string] %( literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[layer] . identifier[permute_param] . identifier[order] ]))
identifier[need_flatten] [ identifier[name] ]= keyword[True]
identifier[from_name] = literal[string]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[param] = identifier[layer] . identifier[prior_box_param]
keyword[if] identifier[layer] . identifier[bottom] [ literal[int] ]== literal[string] :
identifier[bottom_order] =[ literal[int] ]
keyword[else] :
identifier[bottom_order] =[ literal[int] ]
keyword[try] :
keyword[import] identifier[math]
identifier[min_size] = identifier[param] . identifier[min_size] [ literal[int] ]/ identifier[input_dim] [ literal[int] ]
identifier[max_size] = identifier[math] . identifier[sqrt] ( identifier[param] . identifier[min_size] [ literal[int] ]* identifier[param] . identifier[max_size] [ literal[int] ])/ identifier[input_dim] [ literal[int] ]
identifier[sizes] = literal[string] %( identifier[min_size] , identifier[max_size] )
keyword[except] identifier[AttributeError] :
identifier[min_size] = identifier[param] . identifier[min_size] [ literal[int] ]/ identifier[input_dim] [ literal[int] ]
identifier[sizes] = literal[string] %( identifier[min_size] )
identifier[ars] = identifier[list] ( identifier[param] . identifier[aspect_ratio] )
identifier[ratios] =[ literal[int] ]
keyword[for] identifier[ar] keyword[in] identifier[ars] :
identifier[ratios] . identifier[append] ( identifier[ar] )
keyword[if] identifier[param] . identifier[flip] :
identifier[ratios] . identifier[append] ( literal[int] / identifier[ar] )
identifier[ratios_string] = literal[string] + literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[ratios] )+ literal[string]
identifier[clip] = identifier[param] . identifier[clip]
keyword[if] ( identifier[param] . identifier[step_h] > literal[int] keyword[or] identifier[param] . identifier[step_w] > literal[int] ):
identifier[step_h] = identifier[param] . identifier[step_h]
identifier[step_w] = identifier[param] . identifier[step_w]
keyword[elif] identifier[param] . identifier[step] > literal[int] :
identifier[step_h] = identifier[param] . identifier[step]
identifier[step_w] = identifier[param] . identifier[step]
keyword[else] :
identifier[step_h] =- literal[int]
identifier[step_w] =- literal[int]
identifier[finput_dimh] = identifier[float] ( identifier[input_dim] [ literal[int] ])
identifier[finput_dimw] = identifier[float] ( identifier[input_dim] [ literal[int] ])
identifier[step] = literal[string] %( identifier[step_h] / identifier[finput_dimh] , identifier[step_w] / identifier[finput_dimw] )
keyword[assert] identifier[param] . identifier[offset] == literal[int] , literal[string]
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[mapping] [ identifier[layer] . identifier[bottom] [ literal[int] ]], identifier[sizes] , identifier[ratios_string] , identifier[clip] , identifier[step] , identifier[name] )
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[name] )
identifier[type_string] = literal[string]
identifier[need_flatten] [ identifier[name] ]= keyword[False]
keyword[if] identifier[layer] . identifier[type] == literal[string] :
identifier[bottom_order] =[ literal[int] , literal[int] , literal[int] ]
identifier[param] = identifier[layer] . identifier[detection_output_param]
keyword[assert] identifier[param] . identifier[share_location] == keyword[True]
keyword[assert] identifier[param] . identifier[background_label_id] == literal[int]
identifier[nms_param] = identifier[param] . identifier[nms_param]
identifier[type_string] = literal[string]
identifier[param_string] = literal[string] %( identifier[nms_param] . identifier[nms_threshold] , identifier[nms_param] . identifier[top_k] )
keyword[if] identifier[skip_layer] :
keyword[assert] identifier[len] ( identifier[layer] . identifier[bottom] )== literal[int]
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[prev_name] )
keyword[elif] identifier[type_string] == literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[layer] . identifier[type] )
keyword[elif] identifier[type_string] != literal[string] :
identifier[bottom] = identifier[layer] . identifier[bottom]
keyword[if] identifier[param_string] != literal[string] :
identifier[param_string] = literal[string] + identifier[param_string]
keyword[if] identifier[len] ( identifier[bottom] )== literal[int] :
keyword[if] identifier[need_flatten] [ identifier[mapping] [ identifier[bottom] [ literal[int] ]]] keyword[and] identifier[type_string] == literal[string] :
identifier[flatten_name] = literal[string] % identifier[flatten_count]
identifier[symbol_string] += literal[string] %(
identifier[flatten_name] , identifier[flatten_name] , identifier[mapping] [ identifier[bottom] [ literal[int] ]])
identifier[flatten_count] += literal[int]
identifier[need_flatten] [ identifier[flatten_name] ]= keyword[False]
identifier[bottom] [ literal[int] ]= identifier[flatten_name]
identifier[mapping] [ identifier[bottom] [ literal[int] ]]= identifier[bottom] [ literal[int] ]
identifier[symbol_string] += literal[string] %(
identifier[name] , identifier[type_string] , identifier[name] , identifier[mapping] [ identifier[bottom] [ literal[int] ]], identifier[param_string] )
keyword[else] :
keyword[if] keyword[not] identifier[bottom_order] :
identifier[bottom_order] = identifier[range] ( identifier[len] ( identifier[bottom] ))
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[type_string] , identifier[name] , literal[string] . identifier[join] ([ identifier[mapping] [ identifier[bottom] [ identifier[x] ]] keyword[for] identifier[x] keyword[in] identifier[bottom_order] ]), identifier[param_string] )
keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[and] identifier[layer] . identifier[concat_param] . identifier[axis] == literal[int] :
identifier[symbol_string] += literal[string] %( identifier[name] , identifier[name] , identifier[name] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[layer] . identifier[top] )):
identifier[mapping] [ identifier[layer] . identifier[top] [ identifier[j] ]]= identifier[name]
identifier[output_name] = identifier[name]
keyword[return] identifier[symbol_string] , identifier[output_name] , identifier[input_dim]
|
def _parse_proto(prototxt_fname):
"""Parse Caffe prototxt into symbol string
"""
proto = caffe_parser.read_prototxt(prototxt_fname)
# process data layer
(input_name, input_dim, layers) = _get_input(proto)
# only support single input, so always use `data` as the input data
mapping = {input_name: 'data'}
need_flatten = {input_name: False}
symbol_string = "import mxnet as mx\ndata = mx.symbol.Variable(name='data')\n"
flatten_count = 0
output_name = ''
prev_name = None
# convert reset layers one by one
for (i, layer) in enumerate(layers):
type_string = ''
param_string = ''
skip_layer = False
bottom_order = []
name = re.sub('[-/]', '_', layer.name)
if layer.type == 'Convolution' or layer.type == 4:
type_string = 'mx.symbol.Convolution'
param_string = _convert_conv_param(layer.convolution_param)
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'Deconvolution' or layer.type == 39:
type_string = 'mx.symbol.Deconvolution'
param_string = _convert_conv_param(layer.convolution_param)
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'Pooling' or layer.type == 17:
type_string = 'mx.symbol.Pooling'
param_string = _convert_pooling_param(layer.pooling_param)
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'ReLU' or layer.type == 18:
type_string = 'mx.symbol.Activation'
param_string = "act_type='relu'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'TanH' or layer.type == 23:
type_string = 'mx.symbol.Activation'
param_string = "act_type='tanh'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'Sigmoid' or layer.type == 19:
type_string = 'mx.symbol.Activation'
param_string = "act_type='sigmoid'"
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'LRN' or layer.type == 15:
type_string = 'mx.symbol.LRN'
param = layer.lrn_param
param_string = 'alpha=%f, beta=%f, knorm=%f, nsize=%d' % (param.alpha, param.beta, param.k, param.local_size)
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'InnerProduct' or layer.type == 14:
type_string = 'mx.symbol.FullyConnected'
param = layer.inner_product_param
param_string = 'num_hidden=%d, no_bias=%s' % (param.num_output, not param.bias_term)
need_flatten[name] = False # depends on [control=['if'], data=[]]
if layer.type == 'Dropout' or layer.type == 6:
type_string = 'mx.symbol.Dropout'
param = layer.dropout_param
param_string = 'p=%f' % param.dropout_ratio
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'Softmax' or layer.type == 20:
if layer.softmax_param.axis == 2:
symbol_string += '%s = mx.symbol.transpose(%s, axes=(0,2,1))\n' % (mapping[layer.bottom[0]], mapping[layer.bottom[0]])
type_string = 'mx.symbol.SoftmaxActivation'
param_string = "mode='channel'"
need_flatten[name] = False # depends on [control=['if'], data=[]]
else:
type_string = 'mx.symbol.SoftmaxOutput' # depends on [control=['if'], data=[]]
if layer.type == 'Flatten' or layer.type == 8:
if 'softmax' in layer.bottom[0]:
prev_name = re.sub('[-/]', '_', layers[i - 1].name)
skip_layer = True # depends on [control=['if'], data=[]]
else:
type_string = 'mx.symbol.Flatten'
need_flatten[name] = False # depends on [control=['if'], data=[]]
if layer.type == 'Split' or layer.type == 22:
type_string = 'split' # will process later # depends on [control=['if'], data=[]]
if layer.type == 'Concat' or layer.type == 3:
type_string = 'mx.symbol.Concat'
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'Crop':
type_string = 'mx.symbol.Crop'
need_flatten[name] = True
param_string = 'center_crop=True' # depends on [control=['if'], data=[]]
if layer.type == 'BatchNorm':
type_string = 'mx.symbol.BatchNorm'
param = layer.batch_norm_param
# CuDNN requires eps to be greater than 1e-05
# We compensate for this change in convert_model
epsilon = param.eps
if epsilon <= 1e-05:
epsilon = 0.0001 # depends on [control=['if'], data=['epsilon']]
# if next layer is scale, don't fix gamma
fix_gamma = layers[i + 1].type != 'Scale'
param_string = 'use_global_stats=%s, fix_gamma=%s, eps=%f' % (param.use_global_stats, fix_gamma, epsilon)
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'Scale':
assert layers[i - 1].type == 'BatchNorm'
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]]
skip_layer = True
prev_name = re.sub('[-/]', '_', layers[i - 1].name) # depends on [control=['if'], data=[]]
if layer.type == 'PReLU':
type_string = 'mx.symbol.LeakyReLU'
param = layer.prelu_param
param_string = "act_type='prelu', slope=%f" % param.filler.value
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'Eltwise':
type_string = 'mx.symbol.broadcast_add'
param_string = ''
need_flatten[name] = False # depends on [control=['if'], data=[]]
if layer.type == 'Reshape':
type_string = 'mx.symbol.Reshape'
param = layer.reshape_param
param_string = 'shape=(' + ','.join([str(x) for x in list(param.shape.dim)]) + ')'
need_flatten[name] = True # depends on [control=['if'], data=[]]
if layer.type == 'AbsVal':
type_string = 'mx.symbol.abs'
need_flatten[name] = need_flatten[mapping[layer.bottom[0]]] # depends on [control=['if'], data=[]]
if layer.type == 'Normalize':
bottom = re.sub('[-/]', '_', layer.bottom[0])
conv_layer = _find_layer(layers, bottom)
assert conv_layer is not None
param = layer.norm_param
assert not param.across_spatial and (not param.channel_shared)
assert param.scale_filler.type == 'constant'
if conv_layer.type == 'Convolution':
scale_name = '%s_scale' % name
symbol_string += "%s=mx.sym.Variable(name='%s', shape=(1, %d, 1, 1), init=mx.init.Constant(%f))\n" % (scale_name, scale_name, conv_layer.convolution_param.num_output, param.scale_filler.value)
symbol_string += "%s=mx.symbol.L2Normalization(name='%s', data=%s, mode='channel')\n" % (name, name, mapping[layer.bottom[0]])
symbol_string += '%s=mx.symbol.broadcast_mul(lhs=%s, rhs=%s)\n' % (name, scale_name, name)
type_string = 'split'
need_flatten[name] = True # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown/Invalid normalize layer!') # depends on [control=['if'], data=[]]
if layer.type == 'Permute':
type_string = 'mx.symbol.transpose'
param_string = 'axes=(%s)' % ','.join([str(x) for x in layer.permute_param.order])
need_flatten[name] = True
from_name = '' # depends on [control=['if'], data=[]]
if layer.type == 'PriorBox':
param = layer.prior_box_param
if layer.bottom[0] == 'data':
bottom_order = [1] # depends on [control=['if'], data=[]]
else:
bottom_order = [0]
try:
import math
min_size = param.min_size[0] / input_dim[2]
max_size = math.sqrt(param.min_size[0] * param.max_size[0]) / input_dim[2]
sizes = '(%f, %f)' % (min_size, max_size) # depends on [control=['try'], data=[]]
except AttributeError:
min_size = param.min_size[0] / input_dim[2]
sizes = '(%f)' % min_size # depends on [control=['except'], data=[]]
ars = list(param.aspect_ratio)
ratios = [1.0]
for ar in ars:
ratios.append(ar)
if param.flip:
ratios.append(1.0 / ar) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ar']]
ratios_string = '(' + ','.join((str(x) for x in ratios)) + ')'
clip = param.clip
if param.step_h > 0 or param.step_w > 0:
step_h = param.step_h
step_w = param.step_w # depends on [control=['if'], data=[]]
elif param.step > 0:
step_h = param.step
step_w = param.step # depends on [control=['if'], data=[]]
else:
step_h = -1
step_w = -1
finput_dimh = float(input_dim[2])
finput_dimw = float(input_dim[3])
step = '(%f, %f)' % (step_h / finput_dimh, step_w / finput_dimw)
assert param.offset == 0.5, 'currently only support offset = 0.5'
symbol_string += '%s = mx.contrib.symbol.MultiBoxPrior(%s, sizes=%s, ratios=%s, clip=%s, steps=%s, name="%s")\n' % (name, mapping[layer.bottom[0]], sizes, ratios_string, clip, step, name)
symbol_string += '%s = mx.symbol.Flatten(data=%s)\n' % (name, name)
type_string = 'split'
need_flatten[name] = False # depends on [control=['if'], data=[]]
if layer.type == 'DetectionOutput':
bottom_order = [1, 0, 2]
param = layer.detection_output_param
assert param.share_location == True
assert param.background_label_id == 0
nms_param = param.nms_param
type_string = 'mx.contrib.symbol.MultiBoxDetection'
param_string = 'nms_threshold=%f, nms_topk=%d, clip=False' % (nms_param.nms_threshold, nms_param.top_k) # depends on [control=['if'], data=[]]
if skip_layer:
assert len(layer.bottom) == 1
symbol_string += '%s = %s\n' % (name, prev_name) # depends on [control=['if'], data=[]]
elif type_string == '':
raise ValueError('Unknown layer %s!' % layer.type) # depends on [control=['if'], data=[]]
elif type_string != 'split':
bottom = layer.bottom
if param_string != '':
param_string = ', ' + param_string # depends on [control=['if'], data=['param_string']]
if len(bottom) == 1:
# print(need_flatten)
if need_flatten[mapping[bottom[0]]] and type_string == 'mx.symbol.FullyConnected':
flatten_name = 'flatten_%d' % flatten_count
symbol_string += "%s=mx.symbol.Flatten(name='%s', data=%s)\n" % (flatten_name, flatten_name, mapping[bottom[0]])
flatten_count += 1
need_flatten[flatten_name] = False
bottom[0] = flatten_name
mapping[bottom[0]] = bottom[0] # depends on [control=['if'], data=[]]
symbol_string += "%s = %s(name='%s', data=%s %s)\n" % (name, type_string, name, mapping[bottom[0]], param_string) # depends on [control=['if'], data=[]]
else:
if not bottom_order:
bottom_order = range(len(bottom)) # depends on [control=['if'], data=[]]
symbol_string += "%s = %s(name='%s', *[%s] %s)\n" % (name, type_string, name, ','.join([mapping[bottom[x]] for x in bottom_order]), param_string)
if layer.type == 'Concat' and layer.concat_param.axis == 2:
symbol_string += "%s = mx.symbol.Reshape(data=%s, shape=(0, -1, 4), name='%s')\n" % (name, name, name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['type_string']]
for j in range(len(layer.top)):
mapping[layer.top[j]] = name # depends on [control=['for'], data=['j']]
output_name = name # depends on [control=['for'], data=[]]
return (symbol_string, output_name, input_dim)
|
def _compute_stress_drop_adjustment(self, SC, mag, scale_fac):
"""
Compute equation (6) p. 2200
"""
return scale_fac * np.minimum(
SC['delta'] + 0.05,
0.05 + SC['delta'] * (
np.maximum(mag - SC['M1'], 0) / (SC['Mh'] - SC['M1'])
)
)
|
def function[_compute_stress_drop_adjustment, parameter[self, SC, mag, scale_fac]]:
constant[
Compute equation (6) p. 2200
]
return[binary_operation[name[scale_fac] * call[name[np].minimum, parameter[binary_operation[call[name[SC]][constant[delta]] + constant[0.05]], binary_operation[constant[0.05] + binary_operation[call[name[SC]][constant[delta]] * binary_operation[call[name[np].maximum, parameter[binary_operation[name[mag] - call[name[SC]][constant[M1]]], constant[0]]] / binary_operation[call[name[SC]][constant[Mh]] - call[name[SC]][constant[M1]]]]]]]]]]
|
keyword[def] identifier[_compute_stress_drop_adjustment] ( identifier[self] , identifier[SC] , identifier[mag] , identifier[scale_fac] ):
literal[string]
keyword[return] identifier[scale_fac] * identifier[np] . identifier[minimum] (
identifier[SC] [ literal[string] ]+ literal[int] ,
literal[int] + identifier[SC] [ literal[string] ]*(
identifier[np] . identifier[maximum] ( identifier[mag] - identifier[SC] [ literal[string] ], literal[int] )/( identifier[SC] [ literal[string] ]- identifier[SC] [ literal[string] ])
)
)
|
def _compute_stress_drop_adjustment(self, SC, mag, scale_fac):
"""
Compute equation (6) p. 2200
"""
return scale_fac * np.minimum(SC['delta'] + 0.05, 0.05 + SC['delta'] * (np.maximum(mag - SC['M1'], 0) / (SC['Mh'] - SC['M1'])))
|
def get_template_object(template_file=''):
"""Retrieve template.
Args:
template_file (str): Name of template file.
Returns:
jinja2.Template: Template ready to render.
Raises:
AssertionError: Configured path for templates does not exist.
:obj:`foremast.exceptions.ForemastTemplateNotFound`: Requested template
is not available.
"""
jinja_template_paths_obj = []
if TEMPLATES_PATH:
external_templates = pathlib.Path(TEMPLATES_PATH).expanduser().resolve()
assert os.path.isdir(external_templates), 'External template path "{0}" not found'.format(external_templates)
jinja_template_paths_obj.append(external_templates)
jinja_template_paths_obj.append(LOCAL_TEMPLATES)
jinja_template_paths = [str(path) for path in jinja_template_paths_obj]
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(jinja_template_paths))
try:
template = jinjaenv.get_template(template_file)
except jinja2.TemplateNotFound:
message = 'Unable to find template "{template_file}" in paths {paths}'.format(
template_file=template_file, paths=jinjaenv.loader.searchpath)
LOG.error(message)
raise ForemastTemplateNotFound(message)
return template
|
def function[get_template_object, parameter[template_file]]:
constant[Retrieve template.
Args:
template_file (str): Name of template file.
Returns:
jinja2.Template: Template ready to render.
Raises:
AssertionError: Configured path for templates does not exist.
:obj:`foremast.exceptions.ForemastTemplateNotFound`: Requested template
is not available.
]
variable[jinja_template_paths_obj] assign[=] list[[]]
if name[TEMPLATES_PATH] begin[:]
variable[external_templates] assign[=] call[call[call[name[pathlib].Path, parameter[name[TEMPLATES_PATH]]].expanduser, parameter[]].resolve, parameter[]]
assert[call[name[os].path.isdir, parameter[name[external_templates]]]]
call[name[jinja_template_paths_obj].append, parameter[name[external_templates]]]
call[name[jinja_template_paths_obj].append, parameter[name[LOCAL_TEMPLATES]]]
variable[jinja_template_paths] assign[=] <ast.ListComp object at 0x7da20c993250>
variable[jinjaenv] assign[=] call[name[jinja2].Environment, parameter[]]
<ast.Try object at 0x7da20c991ba0>
return[name[template]]
|
keyword[def] identifier[get_template_object] ( identifier[template_file] = literal[string] ):
literal[string]
identifier[jinja_template_paths_obj] =[]
keyword[if] identifier[TEMPLATES_PATH] :
identifier[external_templates] = identifier[pathlib] . identifier[Path] ( identifier[TEMPLATES_PATH] ). identifier[expanduser] (). identifier[resolve] ()
keyword[assert] identifier[os] . identifier[path] . identifier[isdir] ( identifier[external_templates] ), literal[string] . identifier[format] ( identifier[external_templates] )
identifier[jinja_template_paths_obj] . identifier[append] ( identifier[external_templates] )
identifier[jinja_template_paths_obj] . identifier[append] ( identifier[LOCAL_TEMPLATES] )
identifier[jinja_template_paths] =[ identifier[str] ( identifier[path] ) keyword[for] identifier[path] keyword[in] identifier[jinja_template_paths_obj] ]
identifier[jinjaenv] = identifier[jinja2] . identifier[Environment] ( identifier[loader] = identifier[jinja2] . identifier[FileSystemLoader] ( identifier[jinja_template_paths] ))
keyword[try] :
identifier[template] = identifier[jinjaenv] . identifier[get_template] ( identifier[template_file] )
keyword[except] identifier[jinja2] . identifier[TemplateNotFound] :
identifier[message] = literal[string] . identifier[format] (
identifier[template_file] = identifier[template_file] , identifier[paths] = identifier[jinjaenv] . identifier[loader] . identifier[searchpath] )
identifier[LOG] . identifier[error] ( identifier[message] )
keyword[raise] identifier[ForemastTemplateNotFound] ( identifier[message] )
keyword[return] identifier[template]
|
def get_template_object(template_file=''):
"""Retrieve template.
Args:
template_file (str): Name of template file.
Returns:
jinja2.Template: Template ready to render.
Raises:
AssertionError: Configured path for templates does not exist.
:obj:`foremast.exceptions.ForemastTemplateNotFound`: Requested template
is not available.
"""
jinja_template_paths_obj = []
if TEMPLATES_PATH:
external_templates = pathlib.Path(TEMPLATES_PATH).expanduser().resolve()
assert os.path.isdir(external_templates), 'External template path "{0}" not found'.format(external_templates)
jinja_template_paths_obj.append(external_templates) # depends on [control=['if'], data=[]]
jinja_template_paths_obj.append(LOCAL_TEMPLATES)
jinja_template_paths = [str(path) for path in jinja_template_paths_obj]
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(jinja_template_paths))
try:
template = jinjaenv.get_template(template_file) # depends on [control=['try'], data=[]]
except jinja2.TemplateNotFound:
message = 'Unable to find template "{template_file}" in paths {paths}'.format(template_file=template_file, paths=jinjaenv.loader.searchpath)
LOG.error(message)
raise ForemastTemplateNotFound(message) # depends on [control=['except'], data=[]]
return template
|
def _decode_buffer(f):
"""
String types are normal (byte)strings
starting with an integer followed by ':'
which designates the string’s length.
Since there’s no way to specify the byte type
in bencoded files, we have to guess
"""
strlen = int(_readuntil(f, _TYPE_SEP))
buf = f.read(strlen)
if not len(buf) == strlen:
raise ValueError(
'string expected to be {} bytes long but the file ended after {} bytes'
.format(strlen, len(buf)))
try:
return buf.decode()
except UnicodeDecodeError:
return buf
|
def function[_decode_buffer, parameter[f]]:
constant[
String types are normal (byte)strings
starting with an integer followed by ':'
which designates the string’s length.
Since there’s no way to specify the byte type
in bencoded files, we have to guess
]
variable[strlen] assign[=] call[name[int], parameter[call[name[_readuntil], parameter[name[f], name[_TYPE_SEP]]]]]
variable[buf] assign[=] call[name[f].read, parameter[name[strlen]]]
if <ast.UnaryOp object at 0x7da18dc05c30> begin[:]
<ast.Raise object at 0x7da18dc049d0>
<ast.Try object at 0x7da18f09ce50>
|
keyword[def] identifier[_decode_buffer] ( identifier[f] ):
literal[string]
identifier[strlen] = identifier[int] ( identifier[_readuntil] ( identifier[f] , identifier[_TYPE_SEP] ))
identifier[buf] = identifier[f] . identifier[read] ( identifier[strlen] )
keyword[if] keyword[not] identifier[len] ( identifier[buf] )== identifier[strlen] :
keyword[raise] identifier[ValueError] (
literal[string]
. identifier[format] ( identifier[strlen] , identifier[len] ( identifier[buf] )))
keyword[try] :
keyword[return] identifier[buf] . identifier[decode] ()
keyword[except] identifier[UnicodeDecodeError] :
keyword[return] identifier[buf]
|
def _decode_buffer(f):
"""
String types are normal (byte)strings
starting with an integer followed by ':'
which designates the string’s length.
Since there’s no way to specify the byte type
in bencoded files, we have to guess
"""
strlen = int(_readuntil(f, _TYPE_SEP))
buf = f.read(strlen)
if not len(buf) == strlen:
raise ValueError('string expected to be {} bytes long but the file ended after {} bytes'.format(strlen, len(buf))) # depends on [control=['if'], data=[]]
try:
return buf.decode() # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
return buf # depends on [control=['except'], data=[]]
|
def compress_repr(self) -> Optional[str]:
"""Try to find a compressed parameter value representation and
return it.
|Parameter.compress_repr| raises a |NotImplementedError| when
failing to find a compressed representation.
.. testsetup::
>>> from hydpy import pub
>>> del pub.timegrids
For the following examples, we define a 1-dimensional sequence
handling time-dependent floating point values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 1
... TYPE = float
... TIME = True
>>> test = Test(None)
Before and directly after defining the parameter shape, `nan`
is returned:
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = 4
>>> test
test(?)
Due to the time-dependence of the values of our test class,
we need to specify a parameter and a simulation time step:
>>> test.parameterstep = '1d'
>>> test.simulationstep = '8h'
Compression succeeds when all required values are identical:
>>> test(3.0, 3.0, 3.0, 3.0)
>>> test.values
array([ 1., 1., 1., 1.])
>>> test.compress_repr()
'3.0'
>>> test
test(3.0)
Method |Parameter.compress_repr| returns |None| in case the
required values are not identical:
>>> test(1.0, 2.0, 3.0, 3.0)
>>> test.compress_repr()
>>> test
test(1.0, 2.0, 3.0, 3.0)
If some values are not required, indicate this by the `mask`
descriptor:
>>> import numpy
>>> test(3.0, 3.0, 3.0, numpy.nan)
>>> test
test(3.0, 3.0, 3.0, nan)
>>> Test.mask = numpy.array([True, True, True, False])
>>> test
test(3.0)
For a shape of zero, the string representing includes an empty list:
>>> test.shape = 0
>>> test.compress_repr()
'[]'
>>> test
test([])
Method |Parameter.compress_repr| works similarly for different
|Parameter| subclasses. The following examples focus on a
2-dimensional parameter handling integer values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 2
... TYPE = int
... TIME = None
>>> test = Test(None)
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = (2, 3)
>>> test
test(?)
>>> test([[3, 3, 3],
... [3, 3, 3]])
>>> test
test(3)
>>> test([[3, 3, -999999],
... [3, 3, 3]])
>>> test
test([[3, 3, -999999],
[3, 3, 3]])
>>> Test.mask = numpy.array([
... [True, True, False],
... [True, True, True]])
>>> test
test(3)
>>> test.shape = (0, 0)
>>> test
test([[]])
"""
if not hasattr(self, 'value'):
return '?'
if not self:
return f"{self.NDIM * '['}{self.NDIM * ']'}"
unique = numpy.unique(self[self.mask])
if sum(numpy.isnan(unique)) == len(unique.flatten()):
unique = numpy.array([numpy.nan])
else:
unique = self.revert_timefactor(unique)
if len(unique) == 1:
return objecttools.repr_(unique[0])
return None
|
def function[compress_repr, parameter[self]]:
constant[Try to find a compressed parameter value representation and
return it.
|Parameter.compress_repr| raises a |NotImplementedError| when
failing to find a compressed representation.
.. testsetup::
>>> from hydpy import pub
>>> del pub.timegrids
For the following examples, we define a 1-dimensional sequence
handling time-dependent floating point values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 1
... TYPE = float
... TIME = True
>>> test = Test(None)
Before and directly after defining the parameter shape, `nan`
is returned:
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = 4
>>> test
test(?)
Due to the time-dependence of the values of our test class,
we need to specify a parameter and a simulation time step:
>>> test.parameterstep = '1d'
>>> test.simulationstep = '8h'
Compression succeeds when all required values are identical:
>>> test(3.0, 3.0, 3.0, 3.0)
>>> test.values
array([ 1., 1., 1., 1.])
>>> test.compress_repr()
'3.0'
>>> test
test(3.0)
Method |Parameter.compress_repr| returns |None| in case the
required values are not identical:
>>> test(1.0, 2.0, 3.0, 3.0)
>>> test.compress_repr()
>>> test
test(1.0, 2.0, 3.0, 3.0)
If some values are not required, indicate this by the `mask`
descriptor:
>>> import numpy
>>> test(3.0, 3.0, 3.0, numpy.nan)
>>> test
test(3.0, 3.0, 3.0, nan)
>>> Test.mask = numpy.array([True, True, True, False])
>>> test
test(3.0)
For a shape of zero, the string representing includes an empty list:
>>> test.shape = 0
>>> test.compress_repr()
'[]'
>>> test
test([])
Method |Parameter.compress_repr| works similarly for different
|Parameter| subclasses. The following examples focus on a
2-dimensional parameter handling integer values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 2
... TYPE = int
... TIME = None
>>> test = Test(None)
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = (2, 3)
>>> test
test(?)
>>> test([[3, 3, 3],
... [3, 3, 3]])
>>> test
test(3)
>>> test([[3, 3, -999999],
... [3, 3, 3]])
>>> test
test([[3, 3, -999999],
[3, 3, 3]])
>>> Test.mask = numpy.array([
... [True, True, False],
... [True, True, True]])
>>> test
test(3)
>>> test.shape = (0, 0)
>>> test
test([[]])
]
if <ast.UnaryOp object at 0x7da2044c38b0> begin[:]
return[constant[?]]
if <ast.UnaryOp object at 0x7da2044c11e0> begin[:]
return[<ast.JoinedStr object at 0x7da2044c0a90>]
variable[unique] assign[=] call[name[numpy].unique, parameter[call[name[self]][name[self].mask]]]
if compare[call[name[sum], parameter[call[name[numpy].isnan, parameter[name[unique]]]]] equal[==] call[name[len], parameter[call[name[unique].flatten, parameter[]]]]] begin[:]
variable[unique] assign[=] call[name[numpy].array, parameter[list[[<ast.Attribute object at 0x7da2044c3a60>]]]]
if compare[call[name[len], parameter[name[unique]]] equal[==] constant[1]] begin[:]
return[call[name[objecttools].repr_, parameter[call[name[unique]][constant[0]]]]]
return[constant[None]]
|
keyword[def] identifier[compress_repr] ( identifier[self] )-> identifier[Optional] [ identifier[str] ]:
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] literal[string]
keyword[if] keyword[not] identifier[self] :
keyword[return] literal[string]
identifier[unique] = identifier[numpy] . identifier[unique] ( identifier[self] [ identifier[self] . identifier[mask] ])
keyword[if] identifier[sum] ( identifier[numpy] . identifier[isnan] ( identifier[unique] ))== identifier[len] ( identifier[unique] . identifier[flatten] ()):
identifier[unique] = identifier[numpy] . identifier[array] ([ identifier[numpy] . identifier[nan] ])
keyword[else] :
identifier[unique] = identifier[self] . identifier[revert_timefactor] ( identifier[unique] )
keyword[if] identifier[len] ( identifier[unique] )== literal[int] :
keyword[return] identifier[objecttools] . identifier[repr_] ( identifier[unique] [ literal[int] ])
keyword[return] keyword[None]
|
def compress_repr(self) -> Optional[str]:
"""Try to find a compressed parameter value representation and
return it.
|Parameter.compress_repr| raises a |NotImplementedError| when
failing to find a compressed representation.
.. testsetup::
>>> from hydpy import pub
>>> del pub.timegrids
For the following examples, we define a 1-dimensional sequence
handling time-dependent floating point values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 1
... TYPE = float
... TIME = True
>>> test = Test(None)
Before and directly after defining the parameter shape, `nan`
is returned:
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = 4
>>> test
test(?)
Due to the time-dependence of the values of our test class,
we need to specify a parameter and a simulation time step:
>>> test.parameterstep = '1d'
>>> test.simulationstep = '8h'
Compression succeeds when all required values are identical:
>>> test(3.0, 3.0, 3.0, 3.0)
>>> test.values
array([ 1., 1., 1., 1.])
>>> test.compress_repr()
'3.0'
>>> test
test(3.0)
Method |Parameter.compress_repr| returns |None| in case the
required values are not identical:
>>> test(1.0, 2.0, 3.0, 3.0)
>>> test.compress_repr()
>>> test
test(1.0, 2.0, 3.0, 3.0)
If some values are not required, indicate this by the `mask`
descriptor:
>>> import numpy
>>> test(3.0, 3.0, 3.0, numpy.nan)
>>> test
test(3.0, 3.0, 3.0, nan)
>>> Test.mask = numpy.array([True, True, True, False])
>>> test
test(3.0)
For a shape of zero, the string representing includes an empty list:
>>> test.shape = 0
>>> test.compress_repr()
'[]'
>>> test
test([])
Method |Parameter.compress_repr| works similarly for different
|Parameter| subclasses. The following examples focus on a
2-dimensional parameter handling integer values:
>>> from hydpy.core.parametertools import Parameter
>>> class Test(Parameter):
... NDIM = 2
... TYPE = int
... TIME = None
>>> test = Test(None)
>>> test.compress_repr()
'?'
>>> test
test(?)
>>> test.shape = (2, 3)
>>> test
test(?)
>>> test([[3, 3, 3],
... [3, 3, 3]])
>>> test
test(3)
>>> test([[3, 3, -999999],
... [3, 3, 3]])
>>> test
test([[3, 3, -999999],
[3, 3, 3]])
>>> Test.mask = numpy.array([
... [True, True, False],
... [True, True, True]])
>>> test
test(3)
>>> test.shape = (0, 0)
>>> test
test([[]])
"""
if not hasattr(self, 'value'):
return '?' # depends on [control=['if'], data=[]]
if not self:
return f"{self.NDIM * '['}{self.NDIM * ']'}" # depends on [control=['if'], data=[]]
unique = numpy.unique(self[self.mask])
if sum(numpy.isnan(unique)) == len(unique.flatten()):
unique = numpy.array([numpy.nan]) # depends on [control=['if'], data=[]]
else:
unique = self.revert_timefactor(unique)
if len(unique) == 1:
return objecttools.repr_(unique[0]) # depends on [control=['if'], data=[]]
return None
|
def create(self, file_or_path, **kwargs):
"""
Creates an upload for the given file or path.
"""
opened = False
if isinstance(file_or_path, str_type()):
file_or_path = open(file_or_path, 'rb')
opened = True
elif not getattr(file_or_path, 'read', False):
raise Exception("A file or path to a file is required for this operation.")
try:
return self.client._post(
self._url(),
file_or_path,
headers=self._resource_class.create_headers({}),
file_upload=True
)
finally:
if opened:
file_or_path.close()
|
def function[create, parameter[self, file_or_path]]:
constant[
Creates an upload for the given file or path.
]
variable[opened] assign[=] constant[False]
if call[name[isinstance], parameter[name[file_or_path], call[name[str_type], parameter[]]]] begin[:]
variable[file_or_path] assign[=] call[name[open], parameter[name[file_or_path], constant[rb]]]
variable[opened] assign[=] constant[True]
<ast.Try object at 0x7da18f58d030>
|
keyword[def] identifier[create] ( identifier[self] , identifier[file_or_path] ,** identifier[kwargs] ):
literal[string]
identifier[opened] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[file_or_path] , identifier[str_type] ()):
identifier[file_or_path] = identifier[open] ( identifier[file_or_path] , literal[string] )
identifier[opened] = keyword[True]
keyword[elif] keyword[not] identifier[getattr] ( identifier[file_or_path] , literal[string] , keyword[False] ):
keyword[raise] identifier[Exception] ( literal[string] )
keyword[try] :
keyword[return] identifier[self] . identifier[client] . identifier[_post] (
identifier[self] . identifier[_url] (),
identifier[file_or_path] ,
identifier[headers] = identifier[self] . identifier[_resource_class] . identifier[create_headers] ({}),
identifier[file_upload] = keyword[True]
)
keyword[finally] :
keyword[if] identifier[opened] :
identifier[file_or_path] . identifier[close] ()
|
def create(self, file_or_path, **kwargs):
"""
Creates an upload for the given file or path.
"""
opened = False
if isinstance(file_or_path, str_type()):
file_or_path = open(file_or_path, 'rb')
opened = True # depends on [control=['if'], data=[]]
elif not getattr(file_or_path, 'read', False):
raise Exception('A file or path to a file is required for this operation.') # depends on [control=['if'], data=[]]
try:
return self.client._post(self._url(), file_or_path, headers=self._resource_class.create_headers({}), file_upload=True) # depends on [control=['try'], data=[]]
finally:
if opened:
file_or_path.close() # depends on [control=['if'], data=[]]
|
def predict_log_proba(self, X):
"""
Return log-probability estimates for the test vector X.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array-like, shape = [n_samples, n_classes]
Returns the log-probability of the samples for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute `classes_`.
"""
jll = self._joint_log_likelihood(X)
# normalize by P(x) = P(f_1, ..., f_n)
log_prob_x = logsumexp(jll, axis=1) # return shape = (2,)
return jll - np.atleast_2d(log_prob_x).T
|
def function[predict_log_proba, parameter[self, X]]:
constant[
Return log-probability estimates for the test vector X.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array-like, shape = [n_samples, n_classes]
Returns the log-probability of the samples for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute `classes_`.
]
variable[jll] assign[=] call[name[self]._joint_log_likelihood, parameter[name[X]]]
variable[log_prob_x] assign[=] call[name[logsumexp], parameter[name[jll]]]
return[binary_operation[name[jll] - call[name[np].atleast_2d, parameter[name[log_prob_x]]].T]]
|
keyword[def] identifier[predict_log_proba] ( identifier[self] , identifier[X] ):
literal[string]
identifier[jll] = identifier[self] . identifier[_joint_log_likelihood] ( identifier[X] )
identifier[log_prob_x] = identifier[logsumexp] ( identifier[jll] , identifier[axis] = literal[int] )
keyword[return] identifier[jll] - identifier[np] . identifier[atleast_2d] ( identifier[log_prob_x] ). identifier[T]
|
def predict_log_proba(self, X):
"""
Return log-probability estimates for the test vector X.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array-like, shape = [n_samples, n_classes]
Returns the log-probability of the samples for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute `classes_`.
"""
jll = self._joint_log_likelihood(X)
# normalize by P(x) = P(f_1, ..., f_n)
log_prob_x = logsumexp(jll, axis=1) # return shape = (2,)
return jll - np.atleast_2d(log_prob_x).T
|
def cudnnActivationBackward(handle, mode, alpha, srcDesc, srcData, srcDiffDesc, srcDiffData,
destDesc, destData, beta, destDiffDesc, destDiffData):
""""
Gradient of activation function.
This routine computes the gradient of a neuron activation function.
In-place operation is allowed for this routine; i.e., srcData and destData
pointers may be equal and srcDiffData and destDiffData pointers may be equal.
However, this requires the corresponding tensor descriptors to be identical
(particularly, the strides of the input and output must match for in-place operation
to be allowed).
Parameters
----------
handle : cudnnHandle
Handle to a previously created cuDNN context.
mode : cudnnActivationMode
Enumerant to specify the activation mode.
alpha: float
Scaling factor with which every element of the input tensor is multiplied.
srcDesc : cudnnTensorDescriptor
Handle to the previously initialized input tensor descriptor.
srcData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDesc.
srcDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized input differential tensor descriptor.
srcDiffData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDiffData.
destDesc : cudnnTensorDescriptor
Handle to the previously initialized output tensor descriptor.
destData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDesc.
beta: float
Scaling factor which is applied on every element of the output tensor prior
to adding the result of the activation gradient. Note that if beta is zero, the
output is not read and can contain any uninitialized data (including Nan numbers).
destDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized output differential tensor descriptor.
destDiffData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDiffDesc.
"""
dataType = cudnnGetTensor4dDescriptor(destDesc)[0]
if dataType == cudnnDataType['CUDNN_DATA_DOUBLE']:
alphaRef = ctypes.byref(ctypes.c_double(alpha))
betaRef = ctypes.byref(ctypes.c_double(beta))
else:
alphaRef = ctypes.byref(ctypes.c_float(alpha))
betaRef = ctypes.byref(ctypes.c_float(beta))
status = _libcudnn.cudnnActivationBackward(handle, mode, alphaRef, srcDesc, srcData,
srcDiffDesc, srcDiffData,
destDesc, destData, betaRef,
destDiffDesc, destDiffData)
cudnnCheckStatus(status)
|
def function[cudnnActivationBackward, parameter[handle, mode, alpha, srcDesc, srcData, srcDiffDesc, srcDiffData, destDesc, destData, beta, destDiffDesc, destDiffData]]:
constant["
Gradient of activation function.
This routine computes the gradient of a neuron activation function.
In-place operation is allowed for this routine; i.e., srcData and destData
pointers may be equal and srcDiffData and destDiffData pointers may be equal.
However, this requires the corresponding tensor descriptors to be identical
(particularly, the strides of the input and output must match for in-place operation
to be allowed).
Parameters
----------
handle : cudnnHandle
Handle to a previously created cuDNN context.
mode : cudnnActivationMode
Enumerant to specify the activation mode.
alpha: float
Scaling factor with which every element of the input tensor is multiplied.
srcDesc : cudnnTensorDescriptor
Handle to the previously initialized input tensor descriptor.
srcData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDesc.
srcDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized input differential tensor descriptor.
srcDiffData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDiffData.
destDesc : cudnnTensorDescriptor
Handle to the previously initialized output tensor descriptor.
destData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDesc.
beta: float
Scaling factor which is applied on every element of the output tensor prior
to adding the result of the activation gradient. Note that if beta is zero, the
output is not read and can contain any uninitialized data (including Nan numbers).
destDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized output differential tensor descriptor.
destDiffData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDiffDesc.
]
variable[dataType] assign[=] call[call[name[cudnnGetTensor4dDescriptor], parameter[name[destDesc]]]][constant[0]]
if compare[name[dataType] equal[==] call[name[cudnnDataType]][constant[CUDNN_DATA_DOUBLE]]] begin[:]
variable[alphaRef] assign[=] call[name[ctypes].byref, parameter[call[name[ctypes].c_double, parameter[name[alpha]]]]]
variable[betaRef] assign[=] call[name[ctypes].byref, parameter[call[name[ctypes].c_double, parameter[name[beta]]]]]
variable[status] assign[=] call[name[_libcudnn].cudnnActivationBackward, parameter[name[handle], name[mode], name[alphaRef], name[srcDesc], name[srcData], name[srcDiffDesc], name[srcDiffData], name[destDesc], name[destData], name[betaRef], name[destDiffDesc], name[destDiffData]]]
call[name[cudnnCheckStatus], parameter[name[status]]]
|
keyword[def] identifier[cudnnActivationBackward] ( identifier[handle] , identifier[mode] , identifier[alpha] , identifier[srcDesc] , identifier[srcData] , identifier[srcDiffDesc] , identifier[srcDiffData] ,
identifier[destDesc] , identifier[destData] , identifier[beta] , identifier[destDiffDesc] , identifier[destDiffData] ):
literal[string]
identifier[dataType] = identifier[cudnnGetTensor4dDescriptor] ( identifier[destDesc] )[ literal[int] ]
keyword[if] identifier[dataType] == identifier[cudnnDataType] [ literal[string] ]:
identifier[alphaRef] = identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_double] ( identifier[alpha] ))
identifier[betaRef] = identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_double] ( identifier[beta] ))
keyword[else] :
identifier[alphaRef] = identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[alpha] ))
identifier[betaRef] = identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[beta] ))
identifier[status] = identifier[_libcudnn] . identifier[cudnnActivationBackward] ( identifier[handle] , identifier[mode] , identifier[alphaRef] , identifier[srcDesc] , identifier[srcData] ,
identifier[srcDiffDesc] , identifier[srcDiffData] ,
identifier[destDesc] , identifier[destData] , identifier[betaRef] ,
identifier[destDiffDesc] , identifier[destDiffData] )
identifier[cudnnCheckStatus] ( identifier[status] )
|
def cudnnActivationBackward(handle, mode, alpha, srcDesc, srcData, srcDiffDesc, srcDiffData, destDesc, destData, beta, destDiffDesc, destDiffData):
""""
Gradient of activation function.
This routine computes the gradient of a neuron activation function.
In-place operation is allowed for this routine; i.e., srcData and destData
pointers may be equal and srcDiffData and destDiffData pointers may be equal.
However, this requires the corresponding tensor descriptors to be identical
(particularly, the strides of the input and output must match for in-place operation
to be allowed).
Parameters
----------
handle : cudnnHandle
Handle to a previously created cuDNN context.
mode : cudnnActivationMode
Enumerant to specify the activation mode.
alpha: float
Scaling factor with which every element of the input tensor is multiplied.
srcDesc : cudnnTensorDescriptor
Handle to the previously initialized input tensor descriptor.
srcData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDesc.
srcDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized input differential tensor descriptor.
srcDiffData : void_p
Data pointer to GPU memory associated with the tensor descriptor
srcDiffData.
destDesc : cudnnTensorDescriptor
Handle to the previously initialized output tensor descriptor.
destData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDesc.
beta: float
Scaling factor which is applied on every element of the output tensor prior
to adding the result of the activation gradient. Note that if beta is zero, the
output is not read and can contain any uninitialized data (including Nan numbers).
destDiffDesc : cudnnTensorDescriptor
Handle to the previously initialized output differential tensor descriptor.
destDiffData : void_p
Data pointer to GPU memory associated with the output tensor descriptor
destDiffDesc.
"""
dataType = cudnnGetTensor4dDescriptor(destDesc)[0]
if dataType == cudnnDataType['CUDNN_DATA_DOUBLE']:
alphaRef = ctypes.byref(ctypes.c_double(alpha))
betaRef = ctypes.byref(ctypes.c_double(beta)) # depends on [control=['if'], data=[]]
else:
alphaRef = ctypes.byref(ctypes.c_float(alpha))
betaRef = ctypes.byref(ctypes.c_float(beta))
status = _libcudnn.cudnnActivationBackward(handle, mode, alphaRef, srcDesc, srcData, srcDiffDesc, srcDiffData, destDesc, destData, betaRef, destDiffDesc, destDiffData)
cudnnCheckStatus(status)
|
def Cplm(self):
r'''Liquid-phase heat capacity of the mixture at its current
temperature and composition, in units of [J/mol/K]. For calculation of
this property at other temperatures or compositions, or specifying
manually the method used to calculate it, and more - see the object
oriented interface :obj:`thermo.heat_capacity.HeatCapacityLiquidMixture`;
each Mixture instance creates one to actually perform the calculations.
Examples
--------
>>> Mixture(['toluene', 'decane'], ws=[.9, .1], T=300).Cplm
168.29127923518843
'''
return self.HeatCapacityLiquidMixture(self.T, self.P, self.zs, self.ws)
|
def function[Cplm, parameter[self]]:
constant[Liquid-phase heat capacity of the mixture at its current
temperature and composition, in units of [J/mol/K]. For calculation of
this property at other temperatures or compositions, or specifying
manually the method used to calculate it, and more - see the object
oriented interface :obj:`thermo.heat_capacity.HeatCapacityLiquidMixture`;
each Mixture instance creates one to actually perform the calculations.
Examples
--------
>>> Mixture(['toluene', 'decane'], ws=[.9, .1], T=300).Cplm
168.29127923518843
]
return[call[name[self].HeatCapacityLiquidMixture, parameter[name[self].T, name[self].P, name[self].zs, name[self].ws]]]
|
keyword[def] identifier[Cplm] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[HeatCapacityLiquidMixture] ( identifier[self] . identifier[T] , identifier[self] . identifier[P] , identifier[self] . identifier[zs] , identifier[self] . identifier[ws] )
|
def Cplm(self):
"""Liquid-phase heat capacity of the mixture at its current
temperature and composition, in units of [J/mol/K]. For calculation of
this property at other temperatures or compositions, or specifying
manually the method used to calculate it, and more - see the object
oriented interface :obj:`thermo.heat_capacity.HeatCapacityLiquidMixture`;
each Mixture instance creates one to actually perform the calculations.
Examples
--------
>>> Mixture(['toluene', 'decane'], ws=[.9, .1], T=300).Cplm
168.29127923518843
"""
return self.HeatCapacityLiquidMixture(self.T, self.P, self.zs, self.ws)
|
def is_break_tag(self, el):
"""Check if tag is an element we should break on."""
name = el.name
return name in self.break_tags or name in self.user_break_tags
|
def function[is_break_tag, parameter[self, el]]:
constant[Check if tag is an element we should break on.]
variable[name] assign[=] name[el].name
return[<ast.BoolOp object at 0x7da18f8107f0>]
|
keyword[def] identifier[is_break_tag] ( identifier[self] , identifier[el] ):
literal[string]
identifier[name] = identifier[el] . identifier[name]
keyword[return] identifier[name] keyword[in] identifier[self] . identifier[break_tags] keyword[or] identifier[name] keyword[in] identifier[self] . identifier[user_break_tags]
|
def is_break_tag(self, el):
"""Check if tag is an element we should break on."""
name = el.name
return name in self.break_tags or name in self.user_break_tags
|
def _check_endings(self):
"""Check begin/end of slug, raises Error if malformed."""
if self.slug.startswith("/") and self.slug.endswith("/"):
raise InvalidSlugError(
_("Invalid slug. Did you mean {}, without the leading and trailing slashes?".format(self.slug.strip("/"))))
elif self.slug.startswith("/"):
raise InvalidSlugError(
_("Invalid slug. Did you mean {}, without the leading slash?".format(self.slug.strip("/"))))
elif self.slug.endswith("/"):
raise InvalidSlugError(
_("Invalid slug. Did you mean {}, without the trailing slash?".format(self.slug.strip("/"))))
|
def function[_check_endings, parameter[self]]:
constant[Check begin/end of slug, raises Error if malformed.]
if <ast.BoolOp object at 0x7da18ede6710> begin[:]
<ast.Raise object at 0x7da18ede50f0>
|
keyword[def] identifier[_check_endings] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[slug] . identifier[startswith] ( literal[string] ) keyword[and] identifier[self] . identifier[slug] . identifier[endswith] ( literal[string] ):
keyword[raise] identifier[InvalidSlugError] (
identifier[_] ( literal[string] . identifier[format] ( identifier[self] . identifier[slug] . identifier[strip] ( literal[string] ))))
keyword[elif] identifier[self] . identifier[slug] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[InvalidSlugError] (
identifier[_] ( literal[string] . identifier[format] ( identifier[self] . identifier[slug] . identifier[strip] ( literal[string] ))))
keyword[elif] identifier[self] . identifier[slug] . identifier[endswith] ( literal[string] ):
keyword[raise] identifier[InvalidSlugError] (
identifier[_] ( literal[string] . identifier[format] ( identifier[self] . identifier[slug] . identifier[strip] ( literal[string] ))))
|
def _check_endings(self):
"""Check begin/end of slug, raises Error if malformed."""
if self.slug.startswith('/') and self.slug.endswith('/'):
raise InvalidSlugError(_('Invalid slug. Did you mean {}, without the leading and trailing slashes?'.format(self.slug.strip('/')))) # depends on [control=['if'], data=[]]
elif self.slug.startswith('/'):
raise InvalidSlugError(_('Invalid slug. Did you mean {}, without the leading slash?'.format(self.slug.strip('/')))) # depends on [control=['if'], data=[]]
elif self.slug.endswith('/'):
raise InvalidSlugError(_('Invalid slug. Did you mean {}, without the trailing slash?'.format(self.slug.strip('/')))) # depends on [control=['if'], data=[]]
|
def mass_3d(self, R, Rs, rho0, r_core):
"""
mass enclosed a 3d sphere or radius r
:param r:
:param Ra:
:param Rs:
:return:
"""
b = r_core * Rs ** -1
x = R * Rs ** -1
M_0 = 4 * np.pi * Rs**3 * rho0
return M_0 * (x * (1+x) ** -1 * (-1+b) ** -1 + (-1+b) ** -2 *
((2*b-1)*np.log(1/(1+x)) + b **2 * np.log(x / b + 1)))
|
def function[mass_3d, parameter[self, R, Rs, rho0, r_core]]:
constant[
mass enclosed a 3d sphere or radius r
:param r:
:param Ra:
:param Rs:
:return:
]
variable[b] assign[=] binary_operation[name[r_core] * binary_operation[name[Rs] ** <ast.UnaryOp object at 0x7da1b26af430>]]
variable[x] assign[=] binary_operation[name[R] * binary_operation[name[Rs] ** <ast.UnaryOp object at 0x7da1b26aee30>]]
variable[M_0] assign[=] binary_operation[binary_operation[binary_operation[constant[4] * name[np].pi] * binary_operation[name[Rs] ** constant[3]]] * name[rho0]]
return[binary_operation[name[M_0] * binary_operation[binary_operation[binary_operation[name[x] * binary_operation[binary_operation[constant[1] + name[x]] ** <ast.UnaryOp object at 0x7da1b26aff10>]] * binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b26ae4d0> + name[b]] ** <ast.UnaryOp object at 0x7da1b26aca30>]] + binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b26af760> + name[b]] ** <ast.UnaryOp object at 0x7da1b26af4c0>] * binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[b]] - constant[1]] * call[name[np].log, parameter[binary_operation[constant[1] / binary_operation[constant[1] + name[x]]]]]] + binary_operation[binary_operation[name[b] ** constant[2]] * call[name[np].log, parameter[binary_operation[binary_operation[name[x] / name[b]] + constant[1]]]]]]]]]]
|
keyword[def] identifier[mass_3d] ( identifier[self] , identifier[R] , identifier[Rs] , identifier[rho0] , identifier[r_core] ):
literal[string]
identifier[b] = identifier[r_core] * identifier[Rs] **- literal[int]
identifier[x] = identifier[R] * identifier[Rs] **- literal[int]
identifier[M_0] = literal[int] * identifier[np] . identifier[pi] * identifier[Rs] ** literal[int] * identifier[rho0]
keyword[return] identifier[M_0] *( identifier[x] *( literal[int] + identifier[x] )**- literal[int] *(- literal[int] + identifier[b] )**- literal[int] +(- literal[int] + identifier[b] )**- literal[int] *
(( literal[int] * identifier[b] - literal[int] )* identifier[np] . identifier[log] ( literal[int] /( literal[int] + identifier[x] ))+ identifier[b] ** literal[int] * identifier[np] . identifier[log] ( identifier[x] / identifier[b] + literal[int] )))
|
def mass_3d(self, R, Rs, rho0, r_core):
"""
mass enclosed a 3d sphere or radius r
:param r:
:param Ra:
:param Rs:
:return:
"""
b = r_core * Rs ** (-1)
x = R * Rs ** (-1)
M_0 = 4 * np.pi * Rs ** 3 * rho0
return M_0 * (x * (1 + x) ** (-1) * (-1 + b) ** (-1) + (-1 + b) ** (-2) * ((2 * b - 1) * np.log(1 / (1 + x)) + b ** 2 * np.log(x / b + 1)))
|
def dfs_preorder(self, reverse=False):
"""Generator that returns each element of the tree in Preorder order.
Keyword arguments:
reverse -- if true, the search is done from right to left."""
stack = deque()
stack.append(self)
while stack:
node = stack.pop()
yield node
if hasattr(node, "childs"):
if reverse:
stack.extend(node.childs)
else:
stack.extend(node.childs[::-1])
|
def function[dfs_preorder, parameter[self, reverse]]:
constant[Generator that returns each element of the tree in Preorder order.
Keyword arguments:
reverse -- if true, the search is done from right to left.]
variable[stack] assign[=] call[name[deque], parameter[]]
call[name[stack].append, parameter[name[self]]]
while name[stack] begin[:]
variable[node] assign[=] call[name[stack].pop, parameter[]]
<ast.Yield object at 0x7da1b0ffba00>
if call[name[hasattr], parameter[name[node], constant[childs]]] begin[:]
if name[reverse] begin[:]
call[name[stack].extend, parameter[name[node].childs]]
|
keyword[def] identifier[dfs_preorder] ( identifier[self] , identifier[reverse] = keyword[False] ):
literal[string]
identifier[stack] = identifier[deque] ()
identifier[stack] . identifier[append] ( identifier[self] )
keyword[while] identifier[stack] :
identifier[node] = identifier[stack] . identifier[pop] ()
keyword[yield] identifier[node]
keyword[if] identifier[hasattr] ( identifier[node] , literal[string] ):
keyword[if] identifier[reverse] :
identifier[stack] . identifier[extend] ( identifier[node] . identifier[childs] )
keyword[else] :
identifier[stack] . identifier[extend] ( identifier[node] . identifier[childs] [::- literal[int] ])
|
def dfs_preorder(self, reverse=False):
"""Generator that returns each element of the tree in Preorder order.
Keyword arguments:
reverse -- if true, the search is done from right to left."""
stack = deque()
stack.append(self)
while stack:
node = stack.pop()
yield node
if hasattr(node, 'childs'):
if reverse:
stack.extend(node.childs) # depends on [control=['if'], data=[]]
else:
stack.extend(node.childs[::-1]) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def get(self, request, *args, **kwargs):
"""
Do the login and password protection.
"""
response = super(EntryProtectionMixin, self).get(
request, *args, **kwargs)
if self.object.login_required and not request.user.is_authenticated:
return self.login()
if (self.object.password and self.object.password !=
self.request.session.get(self.session_key % self.object.pk)):
return self.password()
return response
|
def function[get, parameter[self, request]]:
constant[
Do the login and password protection.
]
variable[response] assign[=] call[call[name[super], parameter[name[EntryProtectionMixin], name[self]]].get, parameter[name[request], <ast.Starred object at 0x7da18fe93430>]]
if <ast.BoolOp object at 0x7da18fe926b0> begin[:]
return[call[name[self].login, parameter[]]]
if <ast.BoolOp object at 0x7da18fe91180> begin[:]
return[call[name[self].password, parameter[]]]
return[name[response]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[super] ( identifier[EntryProtectionMixin] , identifier[self] ). identifier[get] (
identifier[request] ,* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[self] . identifier[object] . identifier[login_required] keyword[and] keyword[not] identifier[request] . identifier[user] . identifier[is_authenticated] :
keyword[return] identifier[self] . identifier[login] ()
keyword[if] ( identifier[self] . identifier[object] . identifier[password] keyword[and] identifier[self] . identifier[object] . identifier[password] !=
identifier[self] . identifier[request] . identifier[session] . identifier[get] ( identifier[self] . identifier[session_key] % identifier[self] . identifier[object] . identifier[pk] )):
keyword[return] identifier[self] . identifier[password] ()
keyword[return] identifier[response]
|
def get(self, request, *args, **kwargs):
"""
Do the login and password protection.
"""
response = super(EntryProtectionMixin, self).get(request, *args, **kwargs)
if self.object.login_required and (not request.user.is_authenticated):
return self.login() # depends on [control=['if'], data=[]]
if self.object.password and self.object.password != self.request.session.get(self.session_key % self.object.pk):
return self.password() # depends on [control=['if'], data=[]]
return response
|
def memoize(func):
"""
Decorator to cause a function to cache it's results for each combination of
inputs and return the cached result on subsequent calls. Does not support
named arguments or arg values that are not hashable.
>>> @memoize
... def foo(x):
... print('running function with', x)
... return x+3
...
>>> foo(10)
running function with 10
13
>>> foo(10)
13
>>> foo(11)
running function with 11
14
>>> @memoize
... def range_tuple(limit):
... print('running function')
... return tuple(i for i in range(limit))
...
>>> range_tuple(3)
running function
(0, 1, 2)
>>> range_tuple(3)
(0, 1, 2)
>>> @memoize
... def range_iter(limit):
... print('running function')
... return (i for i in range(limit))
...
>>> range_iter(3)
Traceback (most recent call last):
TypeError: Can't memoize a generator or non-hashable object!
"""
func._result_cache = {} # pylint: disable-msg=W0212
@wraps(func)
def _memoized_func(*args, **kwargs):
key = (args, tuple(sorted(kwargs.items())))
if key in func._result_cache: # pylint: disable-msg=W0212
return func._result_cache[key] # pylint: disable-msg=W0212
else:
result = func(*args, **kwargs)
if isinstance(result, GeneratorType) or not isinstance(result, Hashable):
raise TypeError("Can't memoize a generator or non-hashable object!")
func._result_cache[key] = result # pylint: disable-msg=W0212
return result
return _memoized_func
|
def function[memoize, parameter[func]]:
constant[
Decorator to cause a function to cache it's results for each combination of
inputs and return the cached result on subsequent calls. Does not support
named arguments or arg values that are not hashable.
>>> @memoize
... def foo(x):
... print('running function with', x)
... return x+3
...
>>> foo(10)
running function with 10
13
>>> foo(10)
13
>>> foo(11)
running function with 11
14
>>> @memoize
... def range_tuple(limit):
... print('running function')
... return tuple(i for i in range(limit))
...
>>> range_tuple(3)
running function
(0, 1, 2)
>>> range_tuple(3)
(0, 1, 2)
>>> @memoize
... def range_iter(limit):
... print('running function')
... return (i for i in range(limit))
...
>>> range_iter(3)
Traceback (most recent call last):
TypeError: Can't memoize a generator or non-hashable object!
]
name[func]._result_cache assign[=] dictionary[[], []]
def function[_memoized_func, parameter[]]:
variable[key] assign[=] tuple[[<ast.Name object at 0x7da1b19309a0>, <ast.Call object at 0x7da1b1933f10>]]
if compare[name[key] in name[func]._result_cache] begin[:]
return[call[name[func]._result_cache][name[key]]]
return[name[_memoized_func]]
|
keyword[def] identifier[memoize] ( identifier[func] ):
literal[string]
identifier[func] . identifier[_result_cache] ={}
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[_memoized_func] (* identifier[args] ,** identifier[kwargs] ):
identifier[key] =( identifier[args] , identifier[tuple] ( identifier[sorted] ( identifier[kwargs] . identifier[items] ())))
keyword[if] identifier[key] keyword[in] identifier[func] . identifier[_result_cache] :
keyword[return] identifier[func] . identifier[_result_cache] [ identifier[key] ]
keyword[else] :
identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[result] , identifier[GeneratorType] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[result] , identifier[Hashable] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[func] . identifier[_result_cache] [ identifier[key] ]= identifier[result]
keyword[return] identifier[result]
keyword[return] identifier[_memoized_func]
|
def memoize(func):
"""
Decorator to cause a function to cache it's results for each combination of
inputs and return the cached result on subsequent calls. Does not support
named arguments or arg values that are not hashable.
>>> @memoize
... def foo(x):
... print('running function with', x)
... return x+3
...
>>> foo(10)
running function with 10
13
>>> foo(10)
13
>>> foo(11)
running function with 11
14
>>> @memoize
... def range_tuple(limit):
... print('running function')
... return tuple(i for i in range(limit))
...
>>> range_tuple(3)
running function
(0, 1, 2)
>>> range_tuple(3)
(0, 1, 2)
>>> @memoize
... def range_iter(limit):
... print('running function')
... return (i for i in range(limit))
...
>>> range_iter(3)
Traceback (most recent call last):
TypeError: Can't memoize a generator or non-hashable object!
"""
func._result_cache = {} # pylint: disable-msg=W0212
@wraps(func)
def _memoized_func(*args, **kwargs):
key = (args, tuple(sorted(kwargs.items())))
if key in func._result_cache: # pylint: disable-msg=W0212
return func._result_cache[key] # pylint: disable-msg=W0212 # depends on [control=['if'], data=['key']]
else:
result = func(*args, **kwargs)
if isinstance(result, GeneratorType) or not isinstance(result, Hashable):
raise TypeError("Can't memoize a generator or non-hashable object!") # depends on [control=['if'], data=[]]
func._result_cache[key] = result # pylint: disable-msg=W0212
return result
return _memoized_func
|
def create_network_interface(SubnetId=None, Description=None, PrivateIpAddress=None, Groups=None, PrivateIpAddresses=None, SecondaryPrivateIpAddressCount=None, Ipv6Addresses=None, Ipv6AddressCount=None, DryRun=None):
"""
Creates a network interface in the specified subnet.
For more information about network interfaces, see Elastic Network Interfaces in the Amazon Virtual Private Cloud User Guide .
See also: AWS API Documentation
Examples
This example creates a network interface for the specified subnet.
Expected Output:
:example: response = client.create_network_interface(
SubnetId='string',
Description='string',
PrivateIpAddress='string',
Groups=[
'string',
],
PrivateIpAddresses=[
{
'PrivateIpAddress': 'string',
'Primary': True|False
},
],
SecondaryPrivateIpAddressCount=123,
Ipv6Addresses=[
{
'Ipv6Address': 'string'
},
],
Ipv6AddressCount=123,
DryRun=True|False
)
:type SubnetId: string
:param SubnetId: [REQUIRED]
The ID of the subnet to associate with the network interface.
:type Description: string
:param Description: A description for the network interface.
:type PrivateIpAddress: string
:param PrivateIpAddress: The primary private IPv4 address of the network interface. If you don't specify an IPv4 address, Amazon EC2 selects one for you from the subnet's IPv4 CIDR range. If you specify an IP address, you cannot indicate any IP addresses specified in privateIpAddresses as primary (only one IP address can be designated as primary).
:type Groups: list
:param Groups: The IDs of one or more security groups.
(string) --
:type PrivateIpAddresses: list
:param PrivateIpAddresses: One or more private IPv4 addresses.
(dict) --Describes a secondary private IPv4 address for a network interface.
PrivateIpAddress (string) -- [REQUIRED]The private IPv4 addresses.
Primary (boolean) --Indicates whether the private IPv4 address is the primary private IPv4 address. Only one IPv4 address can be designated as primary.
:type SecondaryPrivateIpAddressCount: integer
:param SecondaryPrivateIpAddressCount: The number of secondary private IPv4 addresses to assign to a network interface. When you specify a number of secondary IPv4 addresses, Amazon EC2 selects these IP addresses within the subnet's IPv4 CIDR range. You can't specify this option and specify more than one private IP address using privateIpAddresses .
The number of IP addresses you can assign to a network interface varies by instance type. For more information, see IP Addresses Per ENI Per Instance Type in the Amazon Virtual Private Cloud User Guide .
:type Ipv6Addresses: list
:param Ipv6Addresses: One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. You can't use this option if you're specifying a number of IPv6 addresses.
(dict) --Describes an IPv6 address.
Ipv6Address (string) --The IPv6 address.
:type Ipv6AddressCount: integer
:param Ipv6AddressCount: The number of IPv6 addresses to assign to a network interface. Amazon EC2 automatically selects the IPv6 addresses from the subnet range. You can't use this option if specifying specific IPv6 addresses. If your subnet has the AssignIpv6AddressOnCreation attribute set to true , you can specify 0 to override this setting.
:type DryRun: boolean
:param DryRun: Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is DryRunOperation . Otherwise, it is UnauthorizedOperation .
:rtype: dict
:return: {
'NetworkInterface': {
'NetworkInterfaceId': 'string',
'SubnetId': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'Description': 'string',
'OwnerId': 'string',
'RequesterId': 'string',
'RequesterManaged': True|False,
'Status': 'available'|'attaching'|'in-use'|'detaching',
'MacAddress': 'string',
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'SourceDestCheck': True|False,
'Groups': [
{
'GroupName': 'string',
'GroupId': 'string'
},
],
'Attachment': {
'AttachmentId': 'string',
'InstanceId': 'string',
'InstanceOwnerId': 'string',
'DeviceIndex': 123,
'Status': 'attaching'|'attached'|'detaching'|'detached',
'AttachTime': datetime(2015, 1, 1),
'DeleteOnTermination': True|False
},
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
},
'TagSet': [
{
'Key': 'string',
'Value': 'string'
},
],
'PrivateIpAddresses': [
{
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'Primary': True|False,
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
}
},
],
'Ipv6Addresses': [
{
'Ipv6Address': 'string'
},
],
'InterfaceType': 'interface'|'natGateway'
}
}
"""
pass
|
def function[create_network_interface, parameter[SubnetId, Description, PrivateIpAddress, Groups, PrivateIpAddresses, SecondaryPrivateIpAddressCount, Ipv6Addresses, Ipv6AddressCount, DryRun]]:
constant[
Creates a network interface in the specified subnet.
For more information about network interfaces, see Elastic Network Interfaces in the Amazon Virtual Private Cloud User Guide .
See also: AWS API Documentation
Examples
This example creates a network interface for the specified subnet.
Expected Output:
:example: response = client.create_network_interface(
SubnetId='string',
Description='string',
PrivateIpAddress='string',
Groups=[
'string',
],
PrivateIpAddresses=[
{
'PrivateIpAddress': 'string',
'Primary': True|False
},
],
SecondaryPrivateIpAddressCount=123,
Ipv6Addresses=[
{
'Ipv6Address': 'string'
},
],
Ipv6AddressCount=123,
DryRun=True|False
)
:type SubnetId: string
:param SubnetId: [REQUIRED]
The ID of the subnet to associate with the network interface.
:type Description: string
:param Description: A description for the network interface.
:type PrivateIpAddress: string
:param PrivateIpAddress: The primary private IPv4 address of the network interface. If you don't specify an IPv4 address, Amazon EC2 selects one for you from the subnet's IPv4 CIDR range. If you specify an IP address, you cannot indicate any IP addresses specified in privateIpAddresses as primary (only one IP address can be designated as primary).
:type Groups: list
:param Groups: The IDs of one or more security groups.
(string) --
:type PrivateIpAddresses: list
:param PrivateIpAddresses: One or more private IPv4 addresses.
(dict) --Describes a secondary private IPv4 address for a network interface.
PrivateIpAddress (string) -- [REQUIRED]The private IPv4 addresses.
Primary (boolean) --Indicates whether the private IPv4 address is the primary private IPv4 address. Only one IPv4 address can be designated as primary.
:type SecondaryPrivateIpAddressCount: integer
:param SecondaryPrivateIpAddressCount: The number of secondary private IPv4 addresses to assign to a network interface. When you specify a number of secondary IPv4 addresses, Amazon EC2 selects these IP addresses within the subnet's IPv4 CIDR range. You can't specify this option and specify more than one private IP address using privateIpAddresses .
The number of IP addresses you can assign to a network interface varies by instance type. For more information, see IP Addresses Per ENI Per Instance Type in the Amazon Virtual Private Cloud User Guide .
:type Ipv6Addresses: list
:param Ipv6Addresses: One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. You can't use this option if you're specifying a number of IPv6 addresses.
(dict) --Describes an IPv6 address.
Ipv6Address (string) --The IPv6 address.
:type Ipv6AddressCount: integer
:param Ipv6AddressCount: The number of IPv6 addresses to assign to a network interface. Amazon EC2 automatically selects the IPv6 addresses from the subnet range. You can't use this option if specifying specific IPv6 addresses. If your subnet has the AssignIpv6AddressOnCreation attribute set to true , you can specify 0 to override this setting.
:type DryRun: boolean
:param DryRun: Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is DryRunOperation . Otherwise, it is UnauthorizedOperation .
:rtype: dict
:return: {
'NetworkInterface': {
'NetworkInterfaceId': 'string',
'SubnetId': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'Description': 'string',
'OwnerId': 'string',
'RequesterId': 'string',
'RequesterManaged': True|False,
'Status': 'available'|'attaching'|'in-use'|'detaching',
'MacAddress': 'string',
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'SourceDestCheck': True|False,
'Groups': [
{
'GroupName': 'string',
'GroupId': 'string'
},
],
'Attachment': {
'AttachmentId': 'string',
'InstanceId': 'string',
'InstanceOwnerId': 'string',
'DeviceIndex': 123,
'Status': 'attaching'|'attached'|'detaching'|'detached',
'AttachTime': datetime(2015, 1, 1),
'DeleteOnTermination': True|False
},
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
},
'TagSet': [
{
'Key': 'string',
'Value': 'string'
},
],
'PrivateIpAddresses': [
{
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'Primary': True|False,
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
}
},
],
'Ipv6Addresses': [
{
'Ipv6Address': 'string'
},
],
'InterfaceType': 'interface'|'natGateway'
}
}
]
pass
|
keyword[def] identifier[create_network_interface] ( identifier[SubnetId] = keyword[None] , identifier[Description] = keyword[None] , identifier[PrivateIpAddress] = keyword[None] , identifier[Groups] = keyword[None] , identifier[PrivateIpAddresses] = keyword[None] , identifier[SecondaryPrivateIpAddressCount] = keyword[None] , identifier[Ipv6Addresses] = keyword[None] , identifier[Ipv6AddressCount] = keyword[None] , identifier[DryRun] = keyword[None] ):
literal[string]
keyword[pass]
|
def create_network_interface(SubnetId=None, Description=None, PrivateIpAddress=None, Groups=None, PrivateIpAddresses=None, SecondaryPrivateIpAddressCount=None, Ipv6Addresses=None, Ipv6AddressCount=None, DryRun=None):
"""
Creates a network interface in the specified subnet.
For more information about network interfaces, see Elastic Network Interfaces in the Amazon Virtual Private Cloud User Guide .
See also: AWS API Documentation
Examples
This example creates a network interface for the specified subnet.
Expected Output:
:example: response = client.create_network_interface(
SubnetId='string',
Description='string',
PrivateIpAddress='string',
Groups=[
'string',
],
PrivateIpAddresses=[
{
'PrivateIpAddress': 'string',
'Primary': True|False
},
],
SecondaryPrivateIpAddressCount=123,
Ipv6Addresses=[
{
'Ipv6Address': 'string'
},
],
Ipv6AddressCount=123,
DryRun=True|False
)
:type SubnetId: string
:param SubnetId: [REQUIRED]
The ID of the subnet to associate with the network interface.
:type Description: string
:param Description: A description for the network interface.
:type PrivateIpAddress: string
:param PrivateIpAddress: The primary private IPv4 address of the network interface. If you don't specify an IPv4 address, Amazon EC2 selects one for you from the subnet's IPv4 CIDR range. If you specify an IP address, you cannot indicate any IP addresses specified in privateIpAddresses as primary (only one IP address can be designated as primary).
:type Groups: list
:param Groups: The IDs of one or more security groups.
(string) --
:type PrivateIpAddresses: list
:param PrivateIpAddresses: One or more private IPv4 addresses.
(dict) --Describes a secondary private IPv4 address for a network interface.
PrivateIpAddress (string) -- [REQUIRED]The private IPv4 addresses.
Primary (boolean) --Indicates whether the private IPv4 address is the primary private IPv4 address. Only one IPv4 address can be designated as primary.
:type SecondaryPrivateIpAddressCount: integer
:param SecondaryPrivateIpAddressCount: The number of secondary private IPv4 addresses to assign to a network interface. When you specify a number of secondary IPv4 addresses, Amazon EC2 selects these IP addresses within the subnet's IPv4 CIDR range. You can't specify this option and specify more than one private IP address using privateIpAddresses .
The number of IP addresses you can assign to a network interface varies by instance type. For more information, see IP Addresses Per ENI Per Instance Type in the Amazon Virtual Private Cloud User Guide .
:type Ipv6Addresses: list
:param Ipv6Addresses: One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. You can't use this option if you're specifying a number of IPv6 addresses.
(dict) --Describes an IPv6 address.
Ipv6Address (string) --The IPv6 address.
:type Ipv6AddressCount: integer
:param Ipv6AddressCount: The number of IPv6 addresses to assign to a network interface. Amazon EC2 automatically selects the IPv6 addresses from the subnet range. You can't use this option if specifying specific IPv6 addresses. If your subnet has the AssignIpv6AddressOnCreation attribute set to true , you can specify 0 to override this setting.
:type DryRun: boolean
:param DryRun: Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is DryRunOperation . Otherwise, it is UnauthorizedOperation .
:rtype: dict
:return: {
'NetworkInterface': {
'NetworkInterfaceId': 'string',
'SubnetId': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'Description': 'string',
'OwnerId': 'string',
'RequesterId': 'string',
'RequesterManaged': True|False,
'Status': 'available'|'attaching'|'in-use'|'detaching',
'MacAddress': 'string',
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'SourceDestCheck': True|False,
'Groups': [
{
'GroupName': 'string',
'GroupId': 'string'
},
],
'Attachment': {
'AttachmentId': 'string',
'InstanceId': 'string',
'InstanceOwnerId': 'string',
'DeviceIndex': 123,
'Status': 'attaching'|'attached'|'detaching'|'detached',
'AttachTime': datetime(2015, 1, 1),
'DeleteOnTermination': True|False
},
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
},
'TagSet': [
{
'Key': 'string',
'Value': 'string'
},
],
'PrivateIpAddresses': [
{
'PrivateIpAddress': 'string',
'PrivateDnsName': 'string',
'Primary': True|False,
'Association': {
'PublicIp': 'string',
'PublicDnsName': 'string',
'IpOwnerId': 'string',
'AllocationId': 'string',
'AssociationId': 'string'
}
},
],
'Ipv6Addresses': [
{
'Ipv6Address': 'string'
},
],
'InterfaceType': 'interface'|'natGateway'
}
}
"""
pass
|
def create(self, name, nopassword=None, secret=None, encryption=None):
"""Creates a new user on the local system.
Creating users requires either a secret (password) or the nopassword
keyword to be specified.
Args:
name (str): The name of the user to craete
nopassword (bool): Configures the user to be able to authenticate
without a password challenage
secret (str): The secret (password) to assign to this user
encryption (str): Specifies how the secret is encoded. Valid
values are "cleartext", "md5", "sha512". The default is
"cleartext"
Returns:
True if the operation was successful otherwise False
Raises:
TypeError: if the required arguments are not satisfied
"""
if secret is not None:
return self.create_with_secret(name, secret, encryption)
elif nopassword is True:
return self.create_with_nopassword(name)
else:
raise TypeError('either "nopassword" or "secret" must be '
'specified to create a user')
|
def function[create, parameter[self, name, nopassword, secret, encryption]]:
constant[Creates a new user on the local system.
Creating users requires either a secret (password) or the nopassword
keyword to be specified.
Args:
name (str): The name of the user to craete
nopassword (bool): Configures the user to be able to authenticate
without a password challenage
secret (str): The secret (password) to assign to this user
encryption (str): Specifies how the secret is encoded. Valid
values are "cleartext", "md5", "sha512". The default is
"cleartext"
Returns:
True if the operation was successful otherwise False
Raises:
TypeError: if the required arguments are not satisfied
]
if compare[name[secret] is_not constant[None]] begin[:]
return[call[name[self].create_with_secret, parameter[name[name], name[secret], name[encryption]]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[nopassword] = keyword[None] , identifier[secret] = keyword[None] , identifier[encryption] = keyword[None] ):
literal[string]
keyword[if] identifier[secret] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[create_with_secret] ( identifier[name] , identifier[secret] , identifier[encryption] )
keyword[elif] identifier[nopassword] keyword[is] keyword[True] :
keyword[return] identifier[self] . identifier[create_with_nopassword] ( identifier[name] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
|
def create(self, name, nopassword=None, secret=None, encryption=None):
"""Creates a new user on the local system.
Creating users requires either a secret (password) or the nopassword
keyword to be specified.
Args:
name (str): The name of the user to craete
nopassword (bool): Configures the user to be able to authenticate
without a password challenage
secret (str): The secret (password) to assign to this user
encryption (str): Specifies how the secret is encoded. Valid
values are "cleartext", "md5", "sha512". The default is
"cleartext"
Returns:
True if the operation was successful otherwise False
Raises:
TypeError: if the required arguments are not satisfied
"""
if secret is not None:
return self.create_with_secret(name, secret, encryption) # depends on [control=['if'], data=['secret']]
elif nopassword is True:
return self.create_with_nopassword(name) # depends on [control=['if'], data=[]]
else:
raise TypeError('either "nopassword" or "secret" must be specified to create a user')
|
def cuda_architecture_flags(device_info):
"""
Emit a list of architecture flags for each CUDA device found
['--gpu-architecture=sm_30', '--gpu-architecture=sm_52']
"""
# Figure out the necessary device architectures
if len(device_info['devices']) == 0:
archs = ['--gpu-architecture=sm_30']
log.info("No CUDA devices found, defaulting to architecture '{}'".format(archs[0]))
else:
archs = set()
for device in device_info['devices']:
arch_str = '--gpu-architecture=sm_{}{}'.format(device['major'], device['minor'])
log.info("Using '{}' for '{}'".format(arch_str, device['name']))
archs.add(arch_str)
return list(archs)
|
def function[cuda_architecture_flags, parameter[device_info]]:
constant[
Emit a list of architecture flags for each CUDA device found
['--gpu-architecture=sm_30', '--gpu-architecture=sm_52']
]
if compare[call[name[len], parameter[call[name[device_info]][constant[devices]]]] equal[==] constant[0]] begin[:]
variable[archs] assign[=] list[[<ast.Constant object at 0x7da1b1041780>]]
call[name[log].info, parameter[call[constant[No CUDA devices found, defaulting to architecture '{}'].format, parameter[call[name[archs]][constant[0]]]]]]
return[call[name[list], parameter[name[archs]]]]
|
keyword[def] identifier[cuda_architecture_flags] ( identifier[device_info] ):
literal[string]
keyword[if] identifier[len] ( identifier[device_info] [ literal[string] ])== literal[int] :
identifier[archs] =[ literal[string] ]
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[archs] [ literal[int] ]))
keyword[else] :
identifier[archs] = identifier[set] ()
keyword[for] identifier[device] keyword[in] identifier[device_info] [ literal[string] ]:
identifier[arch_str] = literal[string] . identifier[format] ( identifier[device] [ literal[string] ], identifier[device] [ literal[string] ])
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[arch_str] , identifier[device] [ literal[string] ]))
identifier[archs] . identifier[add] ( identifier[arch_str] )
keyword[return] identifier[list] ( identifier[archs] )
|
def cuda_architecture_flags(device_info):
"""
Emit a list of architecture flags for each CUDA device found
['--gpu-architecture=sm_30', '--gpu-architecture=sm_52']
"""
# Figure out the necessary device architectures
if len(device_info['devices']) == 0:
archs = ['--gpu-architecture=sm_30']
log.info("No CUDA devices found, defaulting to architecture '{}'".format(archs[0])) # depends on [control=['if'], data=[]]
else:
archs = set()
for device in device_info['devices']:
arch_str = '--gpu-architecture=sm_{}{}'.format(device['major'], device['minor'])
log.info("Using '{}' for '{}'".format(arch_str, device['name']))
archs.add(arch_str) # depends on [control=['for'], data=['device']]
return list(archs)
|
def _parse_authors(authors):
"""
Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author \
found.
"""
link = authors.find("a")
link = link[0].params.get("href") if link else None
author_list = _strip_content(authors)
if "(" in author_list:
author_list = author_list.split("(")[0]
if not author_list.strip():
return []
return map(
lambda author: Author(author.strip(), link),
author_list.strip().split(",")
)
|
def function[_parse_authors, parameter[authors]]:
constant[
Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author found.
]
variable[link] assign[=] call[name[authors].find, parameter[constant[a]]]
variable[link] assign[=] <ast.IfExp object at 0x7da204566260>
variable[author_list] assign[=] call[name[_strip_content], parameter[name[authors]]]
if compare[constant[(] in name[author_list]] begin[:]
variable[author_list] assign[=] call[call[name[author_list].split, parameter[constant[(]]]][constant[0]]
if <ast.UnaryOp object at 0x7da18f723040> begin[:]
return[list[[]]]
return[call[name[map], parameter[<ast.Lambda object at 0x7da18f721ea0>, call[call[name[author_list].strip, parameter[]].split, parameter[constant[,]]]]]]
|
keyword[def] identifier[_parse_authors] ( identifier[authors] ):
literal[string]
identifier[link] = identifier[authors] . identifier[find] ( literal[string] )
identifier[link] = identifier[link] [ literal[int] ]. identifier[params] . identifier[get] ( literal[string] ) keyword[if] identifier[link] keyword[else] keyword[None]
identifier[author_list] = identifier[_strip_content] ( identifier[authors] )
keyword[if] literal[string] keyword[in] identifier[author_list] :
identifier[author_list] = identifier[author_list] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] keyword[not] identifier[author_list] . identifier[strip] ():
keyword[return] []
keyword[return] identifier[map] (
keyword[lambda] identifier[author] : identifier[Author] ( identifier[author] . identifier[strip] (), identifier[link] ),
identifier[author_list] . identifier[strip] (). identifier[split] ( literal[string] )
)
|
def _parse_authors(authors):
"""
Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author found.
"""
link = authors.find('a')
link = link[0].params.get('href') if link else None
author_list = _strip_content(authors)
if '(' in author_list:
author_list = author_list.split('(')[0] # depends on [control=['if'], data=['author_list']]
if not author_list.strip():
return [] # depends on [control=['if'], data=[]]
return map(lambda author: Author(author.strip(), link), author_list.strip().split(','))
|
def get(self, r):
""" Returns precomputed value of the given expression
"""
if r is None:
return None
if r.lower() == '(sp)' and self.stack:
return self.stack[-1]
if r[:1] == '(':
return self.mem[r[1:-1]]
r = r.lower()
if is_number(r):
return str(valnum(r))
if not is_register(r):
return None
return self.regs[r]
|
def function[get, parameter[self, r]]:
constant[ Returns precomputed value of the given expression
]
if compare[name[r] is constant[None]] begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da1b069c880> begin[:]
return[call[name[self].stack][<ast.UnaryOp object at 0x7da1b069c550>]]
if compare[call[name[r]][<ast.Slice object at 0x7da1b069c6a0>] equal[==] constant[(]] begin[:]
return[call[name[self].mem][call[name[r]][<ast.Slice object at 0x7da1b069ff70>]]]
variable[r] assign[=] call[name[r].lower, parameter[]]
if call[name[is_number], parameter[name[r]]] begin[:]
return[call[name[str], parameter[call[name[valnum], parameter[name[r]]]]]]
if <ast.UnaryOp object at 0x7da1b069d210> begin[:]
return[constant[None]]
return[call[name[self].regs][name[r]]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[r] ):
literal[string]
keyword[if] identifier[r] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[r] . identifier[lower] ()== literal[string] keyword[and] identifier[self] . identifier[stack] :
keyword[return] identifier[self] . identifier[stack] [- literal[int] ]
keyword[if] identifier[r] [: literal[int] ]== literal[string] :
keyword[return] identifier[self] . identifier[mem] [ identifier[r] [ literal[int] :- literal[int] ]]
identifier[r] = identifier[r] . identifier[lower] ()
keyword[if] identifier[is_number] ( identifier[r] ):
keyword[return] identifier[str] ( identifier[valnum] ( identifier[r] ))
keyword[if] keyword[not] identifier[is_register] ( identifier[r] ):
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[regs] [ identifier[r] ]
|
def get(self, r):
""" Returns precomputed value of the given expression
"""
if r is None:
return None # depends on [control=['if'], data=[]]
if r.lower() == '(sp)' and self.stack:
return self.stack[-1] # depends on [control=['if'], data=[]]
if r[:1] == '(':
return self.mem[r[1:-1]] # depends on [control=['if'], data=[]]
r = r.lower()
if is_number(r):
return str(valnum(r)) # depends on [control=['if'], data=[]]
if not is_register(r):
return None # depends on [control=['if'], data=[]]
return self.regs[r]
|
def graphql_to_sql(schema, graphql_query, parameters, compiler_metadata,
type_equivalence_hints=None):
"""Compile the GraphQL input using the schema into a SQL query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_query: the GraphQL query to compile to SQL, as a string
parameters: dict, mapping argument name to its value, for every parameter the query expects.
compiler_metadata: SqlMetadata object, provides SQLAlchemy specific backend
information
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object, containing:
- query: string, the resulting compiled and parameterized query string
- language: string, specifying the language to which the query was compiled
- output_metadata: dict, output name -> OutputMetadata namedtuple object
- input_metadata: dict, name of input variables -> inferred GraphQL type, based on use
"""
compilation_result = compile_graphql_to_sql(
schema, graphql_query, compiler_metadata, type_equivalence_hints=type_equivalence_hints)
return compilation_result._replace(
query=insert_arguments_into_query(compilation_result, parameters))
|
def function[graphql_to_sql, parameter[schema, graphql_query, parameters, compiler_metadata, type_equivalence_hints]]:
constant[Compile the GraphQL input using the schema into a SQL query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_query: the GraphQL query to compile to SQL, as a string
parameters: dict, mapping argument name to its value, for every parameter the query expects.
compiler_metadata: SqlMetadata object, provides SQLAlchemy specific backend
information
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object, containing:
- query: string, the resulting compiled and parameterized query string
- language: string, specifying the language to which the query was compiled
- output_metadata: dict, output name -> OutputMetadata namedtuple object
- input_metadata: dict, name of input variables -> inferred GraphQL type, based on use
]
variable[compilation_result] assign[=] call[name[compile_graphql_to_sql], parameter[name[schema], name[graphql_query], name[compiler_metadata]]]
return[call[name[compilation_result]._replace, parameter[]]]
|
keyword[def] identifier[graphql_to_sql] ( identifier[schema] , identifier[graphql_query] , identifier[parameters] , identifier[compiler_metadata] ,
identifier[type_equivalence_hints] = keyword[None] ):
literal[string]
identifier[compilation_result] = identifier[compile_graphql_to_sql] (
identifier[schema] , identifier[graphql_query] , identifier[compiler_metadata] , identifier[type_equivalence_hints] = identifier[type_equivalence_hints] )
keyword[return] identifier[compilation_result] . identifier[_replace] (
identifier[query] = identifier[insert_arguments_into_query] ( identifier[compilation_result] , identifier[parameters] ))
|
def graphql_to_sql(schema, graphql_query, parameters, compiler_metadata, type_equivalence_hints=None):
"""Compile the GraphQL input using the schema into a SQL query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_query: the GraphQL query to compile to SQL, as a string
parameters: dict, mapping argument name to its value, for every parameter the query expects.
compiler_metadata: SqlMetadata object, provides SQLAlchemy specific backend
information
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object, containing:
- query: string, the resulting compiled and parameterized query string
- language: string, specifying the language to which the query was compiled
- output_metadata: dict, output name -> OutputMetadata namedtuple object
- input_metadata: dict, name of input variables -> inferred GraphQL type, based on use
"""
compilation_result = compile_graphql_to_sql(schema, graphql_query, compiler_metadata, type_equivalence_hints=type_equivalence_hints)
return compilation_result._replace(query=insert_arguments_into_query(compilation_result, parameters))
|
def sync_camera_gyro(image_sequence_or_flow, image_timestamps, gyro_data, gyro_timestamps, levels=6, full_output=False):
"""Get time offset that aligns image timestamps with gyro timestamps.
Given an image sequence, and gyroscope data, with their respective timestamps,
calculate the offset that aligns the image data with the gyro data.
The timestamps must only differ by an offset, not a scale factor.
This function finds an approximation of the offset *d* that makes this transformation
t_gyro = t_camera + d
i.e. your new image timestamps should be
image_timestamps_aligned = image_timestamps + d
The offset is calculated using zero-mean cross correlation of the gyroscope data magnitude
and the optical flow magnitude, calculated from the image sequence.
ZNCC is performed using pyramids to make it quick.
The offset is accurate up to about +/- 2 frames, so you should run
*refine_time_offset* if you need better accuracy.
Parameters
---------------
image_sequence_or_flow : sequence of image data, or ndarray
This must be either a list or generator that provides a stream of
images that are used for optical flow calculations.
image_timestamps : ndarray
Timestamps of the images in image_sequence
gyro_data : (3, N) ndarray
Gyroscope measurements (angular velocity)
gyro_timestamps : ndarray
Timestamps of data in gyro_data
levels : int
Number of pyramid levels
full_output : bool
If False, only return the offset, otherwise return extra data
Returns
--------------
time_offset : float
The time offset to add to image_timestamps to align the image data
with the gyroscope data
flow : ndarray
(Only if full_output=True)
The calculated optical flow magnitude
"""
# If input is not flow, then create from iamge sequence
try:
assert image_sequence_or_flow.ndim == 1
flow_org = image_sequence_or_flow
except AssertionError:
flow_org = tracking.optical_flow_magnitude(image_sequence_or_flow)
# Gyro from gyro data
gyro_mag = np.sum(gyro_data**2, axis=0)
flow_timestamps = image_timestamps[:-2]
# Resample to match highest
rate = lambda ts: len(ts) / (ts[-1] - ts[0])
freq_gyro = rate(gyro_timestamps)
freq_image = rate(flow_timestamps)
if freq_gyro > freq_image:
rel_rate = freq_gyro / freq_image
flow_mag = znccpyr.upsample(flow_org, rel_rate)
else:
flow_mag = flow_org
rel_rate = freq_image / freq_gyro
gyro_mag = znccpyr.upsample(gyro_mag, rel_rate)
ishift = znccpyr.find_shift_pyr(flow_mag, gyro_mag, levels)
if freq_gyro > freq_image:
flow_shift = int(-ishift / rel_rate)
else:
flow_shift = int(-ishift)
time_offset = flow_timestamps[flow_shift]
if full_output:
return time_offset, flow_org # Return the orginal flow, not the upsampled version
else:
return time_offset
|
def function[sync_camera_gyro, parameter[image_sequence_or_flow, image_timestamps, gyro_data, gyro_timestamps, levels, full_output]]:
constant[Get time offset that aligns image timestamps with gyro timestamps.
Given an image sequence, and gyroscope data, with their respective timestamps,
calculate the offset that aligns the image data with the gyro data.
The timestamps must only differ by an offset, not a scale factor.
This function finds an approximation of the offset *d* that makes this transformation
t_gyro = t_camera + d
i.e. your new image timestamps should be
image_timestamps_aligned = image_timestamps + d
The offset is calculated using zero-mean cross correlation of the gyroscope data magnitude
and the optical flow magnitude, calculated from the image sequence.
ZNCC is performed using pyramids to make it quick.
The offset is accurate up to about +/- 2 frames, so you should run
*refine_time_offset* if you need better accuracy.
Parameters
---------------
image_sequence_or_flow : sequence of image data, or ndarray
This must be either a list or generator that provides a stream of
images that are used for optical flow calculations.
image_timestamps : ndarray
Timestamps of the images in image_sequence
gyro_data : (3, N) ndarray
Gyroscope measurements (angular velocity)
gyro_timestamps : ndarray
Timestamps of data in gyro_data
levels : int
Number of pyramid levels
full_output : bool
If False, only return the offset, otherwise return extra data
Returns
--------------
time_offset : float
The time offset to add to image_timestamps to align the image data
with the gyroscope data
flow : ndarray
(Only if full_output=True)
The calculated optical flow magnitude
]
<ast.Try object at 0x7da20c6c62f0>
variable[gyro_mag] assign[=] call[name[np].sum, parameter[binary_operation[name[gyro_data] ** constant[2]]]]
variable[flow_timestamps] assign[=] call[name[image_timestamps]][<ast.Slice object at 0x7da20c6c4fa0>]
variable[rate] assign[=] <ast.Lambda object at 0x7da20c6c5ab0>
variable[freq_gyro] assign[=] call[name[rate], parameter[name[gyro_timestamps]]]
variable[freq_image] assign[=] call[name[rate], parameter[name[flow_timestamps]]]
if compare[name[freq_gyro] greater[>] name[freq_image]] begin[:]
variable[rel_rate] assign[=] binary_operation[name[freq_gyro] / name[freq_image]]
variable[flow_mag] assign[=] call[name[znccpyr].upsample, parameter[name[flow_org], name[rel_rate]]]
variable[ishift] assign[=] call[name[znccpyr].find_shift_pyr, parameter[name[flow_mag], name[gyro_mag], name[levels]]]
if compare[name[freq_gyro] greater[>] name[freq_image]] begin[:]
variable[flow_shift] assign[=] call[name[int], parameter[binary_operation[<ast.UnaryOp object at 0x7da2041da410> / name[rel_rate]]]]
variable[time_offset] assign[=] call[name[flow_timestamps]][name[flow_shift]]
if name[full_output] begin[:]
return[tuple[[<ast.Name object at 0x7da20c6a9510>, <ast.Name object at 0x7da20c6ab730>]]]
|
keyword[def] identifier[sync_camera_gyro] ( identifier[image_sequence_or_flow] , identifier[image_timestamps] , identifier[gyro_data] , identifier[gyro_timestamps] , identifier[levels] = literal[int] , identifier[full_output] = keyword[False] ):
literal[string]
keyword[try] :
keyword[assert] identifier[image_sequence_or_flow] . identifier[ndim] == literal[int]
identifier[flow_org] = identifier[image_sequence_or_flow]
keyword[except] identifier[AssertionError] :
identifier[flow_org] = identifier[tracking] . identifier[optical_flow_magnitude] ( identifier[image_sequence_or_flow] )
identifier[gyro_mag] = identifier[np] . identifier[sum] ( identifier[gyro_data] ** literal[int] , identifier[axis] = literal[int] )
identifier[flow_timestamps] = identifier[image_timestamps] [:- literal[int] ]
identifier[rate] = keyword[lambda] identifier[ts] : identifier[len] ( identifier[ts] )/( identifier[ts] [- literal[int] ]- identifier[ts] [ literal[int] ])
identifier[freq_gyro] = identifier[rate] ( identifier[gyro_timestamps] )
identifier[freq_image] = identifier[rate] ( identifier[flow_timestamps] )
keyword[if] identifier[freq_gyro] > identifier[freq_image] :
identifier[rel_rate] = identifier[freq_gyro] / identifier[freq_image]
identifier[flow_mag] = identifier[znccpyr] . identifier[upsample] ( identifier[flow_org] , identifier[rel_rate] )
keyword[else] :
identifier[flow_mag] = identifier[flow_org]
identifier[rel_rate] = identifier[freq_image] / identifier[freq_gyro]
identifier[gyro_mag] = identifier[znccpyr] . identifier[upsample] ( identifier[gyro_mag] , identifier[rel_rate] )
identifier[ishift] = identifier[znccpyr] . identifier[find_shift_pyr] ( identifier[flow_mag] , identifier[gyro_mag] , identifier[levels] )
keyword[if] identifier[freq_gyro] > identifier[freq_image] :
identifier[flow_shift] = identifier[int] (- identifier[ishift] / identifier[rel_rate] )
keyword[else] :
identifier[flow_shift] = identifier[int] (- identifier[ishift] )
identifier[time_offset] = identifier[flow_timestamps] [ identifier[flow_shift] ]
keyword[if] identifier[full_output] :
keyword[return] identifier[time_offset] , identifier[flow_org]
keyword[else] :
keyword[return] identifier[time_offset]
|
def sync_camera_gyro(image_sequence_or_flow, image_timestamps, gyro_data, gyro_timestamps, levels=6, full_output=False):
"""Get time offset that aligns image timestamps with gyro timestamps.
Given an image sequence, and gyroscope data, with their respective timestamps,
calculate the offset that aligns the image data with the gyro data.
The timestamps must only differ by an offset, not a scale factor.
This function finds an approximation of the offset *d* that makes this transformation
t_gyro = t_camera + d
i.e. your new image timestamps should be
image_timestamps_aligned = image_timestamps + d
The offset is calculated using zero-mean cross correlation of the gyroscope data magnitude
and the optical flow magnitude, calculated from the image sequence.
ZNCC is performed using pyramids to make it quick.
The offset is accurate up to about +/- 2 frames, so you should run
*refine_time_offset* if you need better accuracy.
Parameters
---------------
image_sequence_or_flow : sequence of image data, or ndarray
This must be either a list or generator that provides a stream of
images that are used for optical flow calculations.
image_timestamps : ndarray
Timestamps of the images in image_sequence
gyro_data : (3, N) ndarray
Gyroscope measurements (angular velocity)
gyro_timestamps : ndarray
Timestamps of data in gyro_data
levels : int
Number of pyramid levels
full_output : bool
If False, only return the offset, otherwise return extra data
Returns
--------------
time_offset : float
The time offset to add to image_timestamps to align the image data
with the gyroscope data
flow : ndarray
(Only if full_output=True)
The calculated optical flow magnitude
"""
# If input is not flow, then create from iamge sequence
try:
assert image_sequence_or_flow.ndim == 1
flow_org = image_sequence_or_flow # depends on [control=['try'], data=[]]
except AssertionError:
flow_org = tracking.optical_flow_magnitude(image_sequence_or_flow) # depends on [control=['except'], data=[]]
# Gyro from gyro data
gyro_mag = np.sum(gyro_data ** 2, axis=0)
flow_timestamps = image_timestamps[:-2]
# Resample to match highest
rate = lambda ts: len(ts) / (ts[-1] - ts[0])
freq_gyro = rate(gyro_timestamps)
freq_image = rate(flow_timestamps)
if freq_gyro > freq_image:
rel_rate = freq_gyro / freq_image
flow_mag = znccpyr.upsample(flow_org, rel_rate) # depends on [control=['if'], data=['freq_gyro', 'freq_image']]
else:
flow_mag = flow_org
rel_rate = freq_image / freq_gyro
gyro_mag = znccpyr.upsample(gyro_mag, rel_rate)
ishift = znccpyr.find_shift_pyr(flow_mag, gyro_mag, levels)
if freq_gyro > freq_image:
flow_shift = int(-ishift / rel_rate) # depends on [control=['if'], data=[]]
else:
flow_shift = int(-ishift)
time_offset = flow_timestamps[flow_shift]
if full_output:
return (time_offset, flow_org) # Return the orginal flow, not the upsampled version # depends on [control=['if'], data=[]]
else:
return time_offset
|
def parse_int(str_num):
""" Given an integer number, return its value,
or None if it could not be parsed.
Allowed formats: DECIMAL, HEXA (0xnnn, $nnnn or nnnnh)
:param str_num: (string) the number to be parsed
:return: an integer number or None if it could not be parsedd
"""
str_num = (str_num or "").strip().upper()
if not str_num:
return None
base = 10
if str_num.startswith('0X'):
base = 16
str_num = str_num[2:]
if str_num.endswith('H'):
base = 16
str_num = str_num[:-1]
if str_num.startswith('$'):
base = 16
str_num = str_num[1:]
try:
return int(str_num, base)
except ValueError:
return None
|
def function[parse_int, parameter[str_num]]:
constant[ Given an integer number, return its value,
or None if it could not be parsed.
Allowed formats: DECIMAL, HEXA (0xnnn, $nnnn or nnnnh)
:param str_num: (string) the number to be parsed
:return: an integer number or None if it could not be parsedd
]
variable[str_num] assign[=] call[call[<ast.BoolOp object at 0x7da20cabfd00>.strip, parameter[]].upper, parameter[]]
if <ast.UnaryOp object at 0x7da20cabd330> begin[:]
return[constant[None]]
variable[base] assign[=] constant[10]
if call[name[str_num].startswith, parameter[constant[0X]]] begin[:]
variable[base] assign[=] constant[16]
variable[str_num] assign[=] call[name[str_num]][<ast.Slice object at 0x7da20cabd570>]
if call[name[str_num].endswith, parameter[constant[H]]] begin[:]
variable[base] assign[=] constant[16]
variable[str_num] assign[=] call[name[str_num]][<ast.Slice object at 0x7da20cabe080>]
if call[name[str_num].startswith, parameter[constant[$]]] begin[:]
variable[base] assign[=] constant[16]
variable[str_num] assign[=] call[name[str_num]][<ast.Slice object at 0x7da20cabcd60>]
<ast.Try object at 0x7da20cabf4c0>
|
keyword[def] identifier[parse_int] ( identifier[str_num] ):
literal[string]
identifier[str_num] =( identifier[str_num] keyword[or] literal[string] ). identifier[strip] (). identifier[upper] ()
keyword[if] keyword[not] identifier[str_num] :
keyword[return] keyword[None]
identifier[base] = literal[int]
keyword[if] identifier[str_num] . identifier[startswith] ( literal[string] ):
identifier[base] = literal[int]
identifier[str_num] = identifier[str_num] [ literal[int] :]
keyword[if] identifier[str_num] . identifier[endswith] ( literal[string] ):
identifier[base] = literal[int]
identifier[str_num] = identifier[str_num] [:- literal[int] ]
keyword[if] identifier[str_num] . identifier[startswith] ( literal[string] ):
identifier[base] = literal[int]
identifier[str_num] = identifier[str_num] [ literal[int] :]
keyword[try] :
keyword[return] identifier[int] ( identifier[str_num] , identifier[base] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[None]
|
def parse_int(str_num):
""" Given an integer number, return its value,
or None if it could not be parsed.
Allowed formats: DECIMAL, HEXA (0xnnn, $nnnn or nnnnh)
:param str_num: (string) the number to be parsed
:return: an integer number or None if it could not be parsedd
"""
str_num = (str_num or '').strip().upper()
if not str_num:
return None # depends on [control=['if'], data=[]]
base = 10
if str_num.startswith('0X'):
base = 16
str_num = str_num[2:] # depends on [control=['if'], data=[]]
if str_num.endswith('H'):
base = 16
str_num = str_num[:-1] # depends on [control=['if'], data=[]]
if str_num.startswith('$'):
base = 16
str_num = str_num[1:] # depends on [control=['if'], data=[]]
try:
return int(str_num, base) # depends on [control=['try'], data=[]]
except ValueError:
return None # depends on [control=['except'], data=[]]
|
def in_project_directory() -> bool:
"""
Returns whether or not the current working directory is a Cauldron project
directory, which contains a cauldron.json file.
"""
current_directory = os.path.realpath(os.curdir)
project_path = os.path.join(current_directory, 'cauldron.json')
return os.path.exists(project_path) and os.path.isfile(project_path)
|
def function[in_project_directory, parameter[]]:
constant[
Returns whether or not the current working directory is a Cauldron project
directory, which contains a cauldron.json file.
]
variable[current_directory] assign[=] call[name[os].path.realpath, parameter[name[os].curdir]]
variable[project_path] assign[=] call[name[os].path.join, parameter[name[current_directory], constant[cauldron.json]]]
return[<ast.BoolOp object at 0x7da1b1b84e80>]
|
keyword[def] identifier[in_project_directory] ()-> identifier[bool] :
literal[string]
identifier[current_directory] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[curdir] )
identifier[project_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[current_directory] , literal[string] )
keyword[return] identifier[os] . identifier[path] . identifier[exists] ( identifier[project_path] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[project_path] )
|
def in_project_directory() -> bool:
"""
Returns whether or not the current working directory is a Cauldron project
directory, which contains a cauldron.json file.
"""
current_directory = os.path.realpath(os.curdir)
project_path = os.path.join(current_directory, 'cauldron.json')
return os.path.exists(project_path) and os.path.isfile(project_path)
|
def events(self):
'''
A generator that will return all ansible job events in the order that they were emitted from Ansible
Example:
{
"event":"runner_on_ok",
"uuid":"00a50d9c-161a-4b74-b978-9f60becaf209",
"stdout":"ok: [localhost] => {\\r\\n \\" msg\\":\\"Test!\\"\\r\\n}",
"counter":6,
"pid":740,
"created":"2018-04-05T18:24:36.096725",
"end_line":10,
"start_line":7,
"event_data":{
"play_pattern":"all",
"play":"all",
"task":"debug",
"task_args":"msg=Test!",
"remote_addr":"localhost",
"res":{
"msg":"Test!",
"changed":false,
"_ansible_verbose_always":true,
"_ansible_no_log":false
},
"pid":740,
"play_uuid":"0242ac11-0002-443b-cdb1-000000000006",
"task_uuid":"0242ac11-0002-443b-cdb1-000000000008",
"event_loop":null,
"playbook_uuid":"634edeee-3228-4c17-a1b4-f010fdd42eb2",
"playbook":"test.yml",
"task_action":"debug",
"host":"localhost",
"task_path":"/tmp/demo/project/test.yml:3"
}
}
'''
event_path = os.path.join(self.config.artifact_dir, 'job_events')
if not os.path.exists(event_path):
raise AnsibleRunnerException("events missing")
dir_events = os.listdir(event_path)
dir_events_actual = []
for each_file in dir_events:
if re.match("^[0-9]+-.+json$", each_file):
dir_events_actual.append(each_file)
dir_events_actual.sort(key=lambda filenm: int(filenm.split("-", 1)[0]))
for event_file in dir_events_actual:
with codecs.open(os.path.join(event_path, event_file), 'r', encoding='utf-8') as event_file_actual:
event = json.load(event_file_actual)
yield event
|
def function[events, parameter[self]]:
constant[
A generator that will return all ansible job events in the order that they were emitted from Ansible
Example:
{
"event":"runner_on_ok",
"uuid":"00a50d9c-161a-4b74-b978-9f60becaf209",
"stdout":"ok: [localhost] => {\r\n \" msg\":\"Test!\"\r\n}",
"counter":6,
"pid":740,
"created":"2018-04-05T18:24:36.096725",
"end_line":10,
"start_line":7,
"event_data":{
"play_pattern":"all",
"play":"all",
"task":"debug",
"task_args":"msg=Test!",
"remote_addr":"localhost",
"res":{
"msg":"Test!",
"changed":false,
"_ansible_verbose_always":true,
"_ansible_no_log":false
},
"pid":740,
"play_uuid":"0242ac11-0002-443b-cdb1-000000000006",
"task_uuid":"0242ac11-0002-443b-cdb1-000000000008",
"event_loop":null,
"playbook_uuid":"634edeee-3228-4c17-a1b4-f010fdd42eb2",
"playbook":"test.yml",
"task_action":"debug",
"host":"localhost",
"task_path":"/tmp/demo/project/test.yml:3"
}
}
]
variable[event_path] assign[=] call[name[os].path.join, parameter[name[self].config.artifact_dir, constant[job_events]]]
if <ast.UnaryOp object at 0x7da20c6c6fb0> begin[:]
<ast.Raise object at 0x7da20c6c47f0>
variable[dir_events] assign[=] call[name[os].listdir, parameter[name[event_path]]]
variable[dir_events_actual] assign[=] list[[]]
for taget[name[each_file]] in starred[name[dir_events]] begin[:]
if call[name[re].match, parameter[constant[^[0-9]+-.+json$], name[each_file]]] begin[:]
call[name[dir_events_actual].append, parameter[name[each_file]]]
call[name[dir_events_actual].sort, parameter[]]
for taget[name[event_file]] in starred[name[dir_events_actual]] begin[:]
with call[name[codecs].open, parameter[call[name[os].path.join, parameter[name[event_path], name[event_file]]], constant[r]]] begin[:]
variable[event] assign[=] call[name[json].load, parameter[name[event_file_actual]]]
<ast.Yield object at 0x7da20c6c5870>
|
keyword[def] identifier[events] ( identifier[self] ):
literal[string]
identifier[event_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[config] . identifier[artifact_dir] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[event_path] ):
keyword[raise] identifier[AnsibleRunnerException] ( literal[string] )
identifier[dir_events] = identifier[os] . identifier[listdir] ( identifier[event_path] )
identifier[dir_events_actual] =[]
keyword[for] identifier[each_file] keyword[in] identifier[dir_events] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[each_file] ):
identifier[dir_events_actual] . identifier[append] ( identifier[each_file] )
identifier[dir_events_actual] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[filenm] : identifier[int] ( identifier[filenm] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]))
keyword[for] identifier[event_file] keyword[in] identifier[dir_events_actual] :
keyword[with] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[event_path] , identifier[event_file] ), literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[event_file_actual] :
identifier[event] = identifier[json] . identifier[load] ( identifier[event_file_actual] )
keyword[yield] identifier[event]
|
def events(self):
"""
A generator that will return all ansible job events in the order that they were emitted from Ansible
Example:
{
"event":"runner_on_ok",
"uuid":"00a50d9c-161a-4b74-b978-9f60becaf209",
"stdout":"ok: [localhost] => {\\r\\n \\" msg\\":\\"Test!\\"\\r\\n}",
"counter":6,
"pid":740,
"created":"2018-04-05T18:24:36.096725",
"end_line":10,
"start_line":7,
"event_data":{
"play_pattern":"all",
"play":"all",
"task":"debug",
"task_args":"msg=Test!",
"remote_addr":"localhost",
"res":{
"msg":"Test!",
"changed":false,
"_ansible_verbose_always":true,
"_ansible_no_log":false
},
"pid":740,
"play_uuid":"0242ac11-0002-443b-cdb1-000000000006",
"task_uuid":"0242ac11-0002-443b-cdb1-000000000008",
"event_loop":null,
"playbook_uuid":"634edeee-3228-4c17-a1b4-f010fdd42eb2",
"playbook":"test.yml",
"task_action":"debug",
"host":"localhost",
"task_path":"/tmp/demo/project/test.yml:3"
}
}
"""
event_path = os.path.join(self.config.artifact_dir, 'job_events')
if not os.path.exists(event_path):
raise AnsibleRunnerException('events missing') # depends on [control=['if'], data=[]]
dir_events = os.listdir(event_path)
dir_events_actual = []
for each_file in dir_events:
if re.match('^[0-9]+-.+json$', each_file):
dir_events_actual.append(each_file) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['each_file']]
dir_events_actual.sort(key=lambda filenm: int(filenm.split('-', 1)[0]))
for event_file in dir_events_actual:
with codecs.open(os.path.join(event_path, event_file), 'r', encoding='utf-8') as event_file_actual:
event = json.load(event_file_actual) # depends on [control=['with'], data=['event_file_actual']]
yield event # depends on [control=['for'], data=['event_file']]
|
def update_network(network,
update_nodes = True,
update_links = True,
update_groups = True,
update_scenarios = True,
**kwargs):
"""
Update an entire network
"""
log.info("Updating Network %s", network.name)
user_id = kwargs.get('user_id')
#check_perm('update_network')
try:
net_i = db.DBSession.query(Network).filter(Network.id == network.id).one()
except NoResultFound:
raise ResourceNotFoundError("Network with id %s not found"%(network.id))
net_i.project_id = network.project_id
net_i.name = network.name
net_i.description = network.description
net_i.projection = network.projection
net_i.layout = network.get_layout()
all_resource_attrs = {}
new_network_attributes = _update_attributes(net_i, network.attributes)
all_resource_attrs.update(new_network_attributes)
hdb.add_resource_types(net_i, network.types)
#Maps temporary node_ids to real node_ids
node_id_map = dict()
if network.nodes is not None and update_nodes is True:
log.info("Updating nodes")
t0 = time.time()
#First add all the nodes
node_id_map = dict([(n.id, n) for n in net_i.nodes])
for node in network.nodes:
#If we get a negative or null node id, we know
#it is a new node.
if node.id is not None and node.id > 0:
n = node_id_map[node.id]
n.name = node.name
n.description = node.description
n.x = node.x
n.y = node.y
n.status = node.status
n.layout = node.get_layout()
else:
log.info("Adding new node %s", node.name)
n = net_i.add_node(node.name,
node.description,
node.get_layout(),
node.x,
node.y)
net_i.nodes.append(n)
node_id_map[n.id] = n
all_resource_attrs.update(_update_attributes(n, node.attributes))
hdb.add_resource_types(n, node.types)
log.info("Updating nodes took %s", time.time() - t0)
link_id_map = dict()
if network.links is not None and update_links is True:
log.info("Updating links")
t0 = time.time()
link_id_map = dict([(l.link_id, l) for l in net_i.links])
for link in network.links:
node_1 = node_id_map[link.node_1_id]
node_2 = node_id_map[link.node_2_id]
if link.id is None or link.id < 0:
log.info("Adding new link %s", link.name)
l = net_i.add_link(link.name,
link.description,
link.get_layout(),
node_1,
node_2)
net_i.links.append(l)
link_id_map[link.id] = l
else:
l = link_id_map[link.id]
l.name = link.name
l.link_descripion = link.description
l.node_a = node_1
l.node_b = node_2
l.layout = link.get_layout()
all_resource_attrs.update(_update_attributes(l, link.attributes))
hdb.add_resource_types(l, link.types)
log.info("Updating links took %s", time.time() - t0)
group_id_map = dict()
#Next all the groups
if network.resourcegroups is not None and update_groups is True:
log.info("Updating groups")
t0 = time.time()
group_id_map = dict([(g.group_id, g) for g in net_i.resourcegroups])
for group in network.resourcegroups:
#If we get a negative or null group id, we know
#it is a new group.
if group.id is not None and group.id > 0:
g_i = group_id_map[group.id]
g_i.name = group.name
g_i.description = group.description
g_i.status = group.status
else:
log.info("Adding new group %s", group.name)
g_i = net_i.add_group(group.name,
group.description,
group.status)
net_i.resourcegroups.append(net_i)
group_id_map[g_i.group_id] = g_i
all_resource_attrs.update(_update_attributes(g_i, group.attributes))
hdb.add_resource_types(g_i, group.types)
group_id_map[group.id] = g_i
log.info("Updating groups took %s", time.time() - t0)
errors = []
if network.scenarios is not None and update_scenarios is True:
for s in network.scenarios:
add_scenario = False
if s.id is not None:
if s.id > 0:
try:
scen_i = db.DBSession.query(Scenario).filter(Scenario.id==s.id).one()
if scen_i.locked == 'Y':
errors.append('Scenario %s was not updated as it is locked'%(s.id))
continue
scenario.update_scenario(s, flush=False, **kwargs)
except NoResultFound:
raise ResourceNotFoundError("Scenario %s not found"%(s.id))
else:
add_scenario = True
else:
add_scenario = True
if add_scenario is True:
log.info("Adding new scenario %s to network", s.name)
scenario.add_scenario(network.id, s, **kwargs)
db.DBSession.flush()
updated_net = get_network(network.id, summary=True, **kwargs)
return updated_net
|
def function[update_network, parameter[network, update_nodes, update_links, update_groups, update_scenarios]]:
constant[
Update an entire network
]
call[name[log].info, parameter[constant[Updating Network %s], name[network].name]]
variable[user_id] assign[=] call[name[kwargs].get, parameter[constant[user_id]]]
<ast.Try object at 0x7da20cabd8a0>
name[net_i].project_id assign[=] name[network].project_id
name[net_i].name assign[=] name[network].name
name[net_i].description assign[=] name[network].description
name[net_i].projection assign[=] name[network].projection
name[net_i].layout assign[=] call[name[network].get_layout, parameter[]]
variable[all_resource_attrs] assign[=] dictionary[[], []]
variable[new_network_attributes] assign[=] call[name[_update_attributes], parameter[name[net_i], name[network].attributes]]
call[name[all_resource_attrs].update, parameter[name[new_network_attributes]]]
call[name[hdb].add_resource_types, parameter[name[net_i], name[network].types]]
variable[node_id_map] assign[=] call[name[dict], parameter[]]
if <ast.BoolOp object at 0x7da20c796ad0> begin[:]
call[name[log].info, parameter[constant[Updating nodes]]]
variable[t0] assign[=] call[name[time].time, parameter[]]
variable[node_id_map] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da20cabece0>]]
for taget[name[node]] in starred[name[network].nodes] begin[:]
if <ast.BoolOp object at 0x7da20cabc610> begin[:]
variable[n] assign[=] call[name[node_id_map]][name[node].id]
name[n].name assign[=] name[node].name
name[n].description assign[=] name[node].description
name[n].x assign[=] name[node].x
name[n].y assign[=] name[node].y
name[n].status assign[=] name[node].status
name[n].layout assign[=] call[name[node].get_layout, parameter[]]
call[name[all_resource_attrs].update, parameter[call[name[_update_attributes], parameter[name[n], name[node].attributes]]]]
call[name[hdb].add_resource_types, parameter[name[n], name[node].types]]
call[name[log].info, parameter[constant[Updating nodes took %s], binary_operation[call[name[time].time, parameter[]] - name[t0]]]]
variable[link_id_map] assign[=] call[name[dict], parameter[]]
if <ast.BoolOp object at 0x7da18bccbca0> begin[:]
call[name[log].info, parameter[constant[Updating links]]]
variable[t0] assign[=] call[name[time].time, parameter[]]
variable[link_id_map] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18bcc89a0>]]
for taget[name[link]] in starred[name[network].links] begin[:]
variable[node_1] assign[=] call[name[node_id_map]][name[link].node_1_id]
variable[node_2] assign[=] call[name[node_id_map]][name[link].node_2_id]
if <ast.BoolOp object at 0x7da18bcc8df0> begin[:]
call[name[log].info, parameter[constant[Adding new link %s], name[link].name]]
variable[l] assign[=] call[name[net_i].add_link, parameter[name[link].name, name[link].description, call[name[link].get_layout, parameter[]], name[node_1], name[node_2]]]
call[name[net_i].links.append, parameter[name[l]]]
call[name[link_id_map]][name[link].id] assign[=] name[l]
call[name[all_resource_attrs].update, parameter[call[name[_update_attributes], parameter[name[l], name[link].attributes]]]]
call[name[hdb].add_resource_types, parameter[name[l], name[link].types]]
call[name[log].info, parameter[constant[Updating links took %s], binary_operation[call[name[time].time, parameter[]] - name[t0]]]]
variable[group_id_map] assign[=] call[name[dict], parameter[]]
if <ast.BoolOp object at 0x7da18bccbfa0> begin[:]
call[name[log].info, parameter[constant[Updating groups]]]
variable[t0] assign[=] call[name[time].time, parameter[]]
variable[group_id_map] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18bccbeb0>]]
for taget[name[group]] in starred[name[network].resourcegroups] begin[:]
if <ast.BoolOp object at 0x7da18bcca9b0> begin[:]
variable[g_i] assign[=] call[name[group_id_map]][name[group].id]
name[g_i].name assign[=] name[group].name
name[g_i].description assign[=] name[group].description
name[g_i].status assign[=] name[group].status
call[name[all_resource_attrs].update, parameter[call[name[_update_attributes], parameter[name[g_i], name[group].attributes]]]]
call[name[hdb].add_resource_types, parameter[name[g_i], name[group].types]]
call[name[group_id_map]][name[group].id] assign[=] name[g_i]
call[name[log].info, parameter[constant[Updating groups took %s], binary_operation[call[name[time].time, parameter[]] - name[t0]]]]
variable[errors] assign[=] list[[]]
if <ast.BoolOp object at 0x7da20e955990> begin[:]
for taget[name[s]] in starred[name[network].scenarios] begin[:]
variable[add_scenario] assign[=] constant[False]
if compare[name[s].id is_not constant[None]] begin[:]
if compare[name[s].id greater[>] constant[0]] begin[:]
<ast.Try object at 0x7da20e955e10>
if compare[name[add_scenario] is constant[True]] begin[:]
call[name[log].info, parameter[constant[Adding new scenario %s to network], name[s].name]]
call[name[scenario].add_scenario, parameter[name[network].id, name[s]]]
call[name[db].DBSession.flush, parameter[]]
variable[updated_net] assign[=] call[name[get_network], parameter[name[network].id]]
return[name[updated_net]]
|
keyword[def] identifier[update_network] ( identifier[network] ,
identifier[update_nodes] = keyword[True] ,
identifier[update_links] = keyword[True] ,
identifier[update_groups] = keyword[True] ,
identifier[update_scenarios] = keyword[True] ,
** identifier[kwargs] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] , identifier[network] . identifier[name] )
identifier[user_id] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[try] :
identifier[net_i] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Network] ). identifier[filter] ( identifier[Network] . identifier[id] == identifier[network] . identifier[id] ). identifier[one] ()
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[network] . identifier[id] ))
identifier[net_i] . identifier[project_id] = identifier[network] . identifier[project_id]
identifier[net_i] . identifier[name] = identifier[network] . identifier[name]
identifier[net_i] . identifier[description] = identifier[network] . identifier[description]
identifier[net_i] . identifier[projection] = identifier[network] . identifier[projection]
identifier[net_i] . identifier[layout] = identifier[network] . identifier[get_layout] ()
identifier[all_resource_attrs] ={}
identifier[new_network_attributes] = identifier[_update_attributes] ( identifier[net_i] , identifier[network] . identifier[attributes] )
identifier[all_resource_attrs] . identifier[update] ( identifier[new_network_attributes] )
identifier[hdb] . identifier[add_resource_types] ( identifier[net_i] , identifier[network] . identifier[types] )
identifier[node_id_map] = identifier[dict] ()
keyword[if] identifier[network] . identifier[nodes] keyword[is] keyword[not] keyword[None] keyword[and] identifier[update_nodes] keyword[is] keyword[True] :
identifier[log] . identifier[info] ( literal[string] )
identifier[t0] = identifier[time] . identifier[time] ()
identifier[node_id_map] = identifier[dict] ([( identifier[n] . identifier[id] , identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[net_i] . identifier[nodes] ])
keyword[for] identifier[node] keyword[in] identifier[network] . identifier[nodes] :
keyword[if] identifier[node] . identifier[id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[node] . identifier[id] > literal[int] :
identifier[n] = identifier[node_id_map] [ identifier[node] . identifier[id] ]
identifier[n] . identifier[name] = identifier[node] . identifier[name]
identifier[n] . identifier[description] = identifier[node] . identifier[description]
identifier[n] . identifier[x] = identifier[node] . identifier[x]
identifier[n] . identifier[y] = identifier[node] . identifier[y]
identifier[n] . identifier[status] = identifier[node] . identifier[status]
identifier[n] . identifier[layout] = identifier[node] . identifier[get_layout] ()
keyword[else] :
identifier[log] . identifier[info] ( literal[string] , identifier[node] . identifier[name] )
identifier[n] = identifier[net_i] . identifier[add_node] ( identifier[node] . identifier[name] ,
identifier[node] . identifier[description] ,
identifier[node] . identifier[get_layout] (),
identifier[node] . identifier[x] ,
identifier[node] . identifier[y] )
identifier[net_i] . identifier[nodes] . identifier[append] ( identifier[n] )
identifier[node_id_map] [ identifier[n] . identifier[id] ]= identifier[n]
identifier[all_resource_attrs] . identifier[update] ( identifier[_update_attributes] ( identifier[n] , identifier[node] . identifier[attributes] ))
identifier[hdb] . identifier[add_resource_types] ( identifier[n] , identifier[node] . identifier[types] )
identifier[log] . identifier[info] ( literal[string] , identifier[time] . identifier[time] ()- identifier[t0] )
identifier[link_id_map] = identifier[dict] ()
keyword[if] identifier[network] . identifier[links] keyword[is] keyword[not] keyword[None] keyword[and] identifier[update_links] keyword[is] keyword[True] :
identifier[log] . identifier[info] ( literal[string] )
identifier[t0] = identifier[time] . identifier[time] ()
identifier[link_id_map] = identifier[dict] ([( identifier[l] . identifier[link_id] , identifier[l] ) keyword[for] identifier[l] keyword[in] identifier[net_i] . identifier[links] ])
keyword[for] identifier[link] keyword[in] identifier[network] . identifier[links] :
identifier[node_1] = identifier[node_id_map] [ identifier[link] . identifier[node_1_id] ]
identifier[node_2] = identifier[node_id_map] [ identifier[link] . identifier[node_2_id] ]
keyword[if] identifier[link] . identifier[id] keyword[is] keyword[None] keyword[or] identifier[link] . identifier[id] < literal[int] :
identifier[log] . identifier[info] ( literal[string] , identifier[link] . identifier[name] )
identifier[l] = identifier[net_i] . identifier[add_link] ( identifier[link] . identifier[name] ,
identifier[link] . identifier[description] ,
identifier[link] . identifier[get_layout] (),
identifier[node_1] ,
identifier[node_2] )
identifier[net_i] . identifier[links] . identifier[append] ( identifier[l] )
identifier[link_id_map] [ identifier[link] . identifier[id] ]= identifier[l]
keyword[else] :
identifier[l] = identifier[link_id_map] [ identifier[link] . identifier[id] ]
identifier[l] . identifier[name] = identifier[link] . identifier[name]
identifier[l] . identifier[link_descripion] = identifier[link] . identifier[description]
identifier[l] . identifier[node_a] = identifier[node_1]
identifier[l] . identifier[node_b] = identifier[node_2]
identifier[l] . identifier[layout] = identifier[link] . identifier[get_layout] ()
identifier[all_resource_attrs] . identifier[update] ( identifier[_update_attributes] ( identifier[l] , identifier[link] . identifier[attributes] ))
identifier[hdb] . identifier[add_resource_types] ( identifier[l] , identifier[link] . identifier[types] )
identifier[log] . identifier[info] ( literal[string] , identifier[time] . identifier[time] ()- identifier[t0] )
identifier[group_id_map] = identifier[dict] ()
keyword[if] identifier[network] . identifier[resourcegroups] keyword[is] keyword[not] keyword[None] keyword[and] identifier[update_groups] keyword[is] keyword[True] :
identifier[log] . identifier[info] ( literal[string] )
identifier[t0] = identifier[time] . identifier[time] ()
identifier[group_id_map] = identifier[dict] ([( identifier[g] . identifier[group_id] , identifier[g] ) keyword[for] identifier[g] keyword[in] identifier[net_i] . identifier[resourcegroups] ])
keyword[for] identifier[group] keyword[in] identifier[network] . identifier[resourcegroups] :
keyword[if] identifier[group] . identifier[id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[group] . identifier[id] > literal[int] :
identifier[g_i] = identifier[group_id_map] [ identifier[group] . identifier[id] ]
identifier[g_i] . identifier[name] = identifier[group] . identifier[name]
identifier[g_i] . identifier[description] = identifier[group] . identifier[description]
identifier[g_i] . identifier[status] = identifier[group] . identifier[status]
keyword[else] :
identifier[log] . identifier[info] ( literal[string] , identifier[group] . identifier[name] )
identifier[g_i] = identifier[net_i] . identifier[add_group] ( identifier[group] . identifier[name] ,
identifier[group] . identifier[description] ,
identifier[group] . identifier[status] )
identifier[net_i] . identifier[resourcegroups] . identifier[append] ( identifier[net_i] )
identifier[group_id_map] [ identifier[g_i] . identifier[group_id] ]= identifier[g_i]
identifier[all_resource_attrs] . identifier[update] ( identifier[_update_attributes] ( identifier[g_i] , identifier[group] . identifier[attributes] ))
identifier[hdb] . identifier[add_resource_types] ( identifier[g_i] , identifier[group] . identifier[types] )
identifier[group_id_map] [ identifier[group] . identifier[id] ]= identifier[g_i]
identifier[log] . identifier[info] ( literal[string] , identifier[time] . identifier[time] ()- identifier[t0] )
identifier[errors] =[]
keyword[if] identifier[network] . identifier[scenarios] keyword[is] keyword[not] keyword[None] keyword[and] identifier[update_scenarios] keyword[is] keyword[True] :
keyword[for] identifier[s] keyword[in] identifier[network] . identifier[scenarios] :
identifier[add_scenario] = keyword[False]
keyword[if] identifier[s] . identifier[id] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[s] . identifier[id] > literal[int] :
keyword[try] :
identifier[scen_i] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Scenario] ). identifier[filter] ( identifier[Scenario] . identifier[id] == identifier[s] . identifier[id] ). identifier[one] ()
keyword[if] identifier[scen_i] . identifier[locked] == literal[string] :
identifier[errors] . identifier[append] ( literal[string] %( identifier[s] . identifier[id] ))
keyword[continue]
identifier[scenario] . identifier[update_scenario] ( identifier[s] , identifier[flush] = keyword[False] ,** identifier[kwargs] )
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[s] . identifier[id] ))
keyword[else] :
identifier[add_scenario] = keyword[True]
keyword[else] :
identifier[add_scenario] = keyword[True]
keyword[if] identifier[add_scenario] keyword[is] keyword[True] :
identifier[log] . identifier[info] ( literal[string] , identifier[s] . identifier[name] )
identifier[scenario] . identifier[add_scenario] ( identifier[network] . identifier[id] , identifier[s] ,** identifier[kwargs] )
identifier[db] . identifier[DBSession] . identifier[flush] ()
identifier[updated_net] = identifier[get_network] ( identifier[network] . identifier[id] , identifier[summary] = keyword[True] ,** identifier[kwargs] )
keyword[return] identifier[updated_net]
|
def update_network(network, update_nodes=True, update_links=True, update_groups=True, update_scenarios=True, **kwargs):
"""
Update an entire network
"""
log.info('Updating Network %s', network.name)
user_id = kwargs.get('user_id')
#check_perm('update_network')
try:
net_i = db.DBSession.query(Network).filter(Network.id == network.id).one() # depends on [control=['try'], data=[]]
except NoResultFound:
raise ResourceNotFoundError('Network with id %s not found' % network.id) # depends on [control=['except'], data=[]]
net_i.project_id = network.project_id
net_i.name = network.name
net_i.description = network.description
net_i.projection = network.projection
net_i.layout = network.get_layout()
all_resource_attrs = {}
new_network_attributes = _update_attributes(net_i, network.attributes)
all_resource_attrs.update(new_network_attributes)
hdb.add_resource_types(net_i, network.types)
#Maps temporary node_ids to real node_ids
node_id_map = dict()
if network.nodes is not None and update_nodes is True:
log.info('Updating nodes')
t0 = time.time()
#First add all the nodes
node_id_map = dict([(n.id, n) for n in net_i.nodes])
for node in network.nodes:
#If we get a negative or null node id, we know
#it is a new node.
if node.id is not None and node.id > 0:
n = node_id_map[node.id]
n.name = node.name
n.description = node.description
n.x = node.x
n.y = node.y
n.status = node.status
n.layout = node.get_layout() # depends on [control=['if'], data=[]]
else:
log.info('Adding new node %s', node.name)
n = net_i.add_node(node.name, node.description, node.get_layout(), node.x, node.y)
net_i.nodes.append(n)
node_id_map[n.id] = n
all_resource_attrs.update(_update_attributes(n, node.attributes))
hdb.add_resource_types(n, node.types) # depends on [control=['for'], data=['node']]
log.info('Updating nodes took %s', time.time() - t0) # depends on [control=['if'], data=[]]
link_id_map = dict()
if network.links is not None and update_links is True:
log.info('Updating links')
t0 = time.time()
link_id_map = dict([(l.link_id, l) for l in net_i.links])
for link in network.links:
node_1 = node_id_map[link.node_1_id]
node_2 = node_id_map[link.node_2_id]
if link.id is None or link.id < 0:
log.info('Adding new link %s', link.name)
l = net_i.add_link(link.name, link.description, link.get_layout(), node_1, node_2)
net_i.links.append(l)
link_id_map[link.id] = l # depends on [control=['if'], data=[]]
else:
l = link_id_map[link.id]
l.name = link.name
l.link_descripion = link.description
l.node_a = node_1
l.node_b = node_2
l.layout = link.get_layout()
all_resource_attrs.update(_update_attributes(l, link.attributes))
hdb.add_resource_types(l, link.types) # depends on [control=['for'], data=['link']]
log.info('Updating links took %s', time.time() - t0) # depends on [control=['if'], data=[]]
group_id_map = dict()
#Next all the groups
if network.resourcegroups is not None and update_groups is True:
log.info('Updating groups')
t0 = time.time()
group_id_map = dict([(g.group_id, g) for g in net_i.resourcegroups])
for group in network.resourcegroups:
#If we get a negative or null group id, we know
#it is a new group.
if group.id is not None and group.id > 0:
g_i = group_id_map[group.id]
g_i.name = group.name
g_i.description = group.description
g_i.status = group.status # depends on [control=['if'], data=[]]
else:
log.info('Adding new group %s', group.name)
g_i = net_i.add_group(group.name, group.description, group.status)
net_i.resourcegroups.append(net_i)
group_id_map[g_i.group_id] = g_i
all_resource_attrs.update(_update_attributes(g_i, group.attributes))
hdb.add_resource_types(g_i, group.types)
group_id_map[group.id] = g_i # depends on [control=['for'], data=['group']]
log.info('Updating groups took %s', time.time() - t0) # depends on [control=['if'], data=[]]
errors = []
if network.scenarios is not None and update_scenarios is True:
for s in network.scenarios:
add_scenario = False
if s.id is not None:
if s.id > 0:
try:
scen_i = db.DBSession.query(Scenario).filter(Scenario.id == s.id).one()
if scen_i.locked == 'Y':
errors.append('Scenario %s was not updated as it is locked' % s.id)
continue # depends on [control=['if'], data=[]]
scenario.update_scenario(s, flush=False, **kwargs) # depends on [control=['try'], data=[]]
except NoResultFound:
raise ResourceNotFoundError('Scenario %s not found' % s.id) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
add_scenario = True # depends on [control=['if'], data=[]]
else:
add_scenario = True
if add_scenario is True:
log.info('Adding new scenario %s to network', s.name)
scenario.add_scenario(network.id, s, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
db.DBSession.flush()
updated_net = get_network(network.id, summary=True, **kwargs)
return updated_net
|
def mtf_image_transformer_base_imagenet_mp64():
"""Model parallel ImageNet parameters."""
hparams = mtf_image_transformer_base_imagenet()
hparams.mesh_shape = "model:8;batch:4"
hparams.layout = "batch:batch;d_ff:model;heads:model"
hparams.batch_size = 8
hparams.img_len = 64
hparams.num_decoder_layers = 8
return hparams
|
def function[mtf_image_transformer_base_imagenet_mp64, parameter[]]:
constant[Model parallel ImageNet parameters.]
variable[hparams] assign[=] call[name[mtf_image_transformer_base_imagenet], parameter[]]
name[hparams].mesh_shape assign[=] constant[model:8;batch:4]
name[hparams].layout assign[=] constant[batch:batch;d_ff:model;heads:model]
name[hparams].batch_size assign[=] constant[8]
name[hparams].img_len assign[=] constant[64]
name[hparams].num_decoder_layers assign[=] constant[8]
return[name[hparams]]
|
keyword[def] identifier[mtf_image_transformer_base_imagenet_mp64] ():
literal[string]
identifier[hparams] = identifier[mtf_image_transformer_base_imagenet] ()
identifier[hparams] . identifier[mesh_shape] = literal[string]
identifier[hparams] . identifier[layout] = literal[string]
identifier[hparams] . identifier[batch_size] = literal[int]
identifier[hparams] . identifier[img_len] = literal[int]
identifier[hparams] . identifier[num_decoder_layers] = literal[int]
keyword[return] identifier[hparams]
|
def mtf_image_transformer_base_imagenet_mp64():
"""Model parallel ImageNet parameters."""
hparams = mtf_image_transformer_base_imagenet()
hparams.mesh_shape = 'model:8;batch:4'
hparams.layout = 'batch:batch;d_ff:model;heads:model'
hparams.batch_size = 8
hparams.img_len = 64
hparams.num_decoder_layers = 8
return hparams
|
def ip_to_long (ip):
"""
Convert ip address to a network byte order 32-bit integer.
"""
quad = ip.split('.')
if len(quad) == 1:
quad = quad + [0, 0, 0]
elif len(quad) < 4:
host = quad[-1:]
quad = quad[:-1] + [0,] * (4 - len(quad)) + host
lip = 0
for q in quad:
lip = (lip << 8) | int(q)
return lip
|
def function[ip_to_long, parameter[ip]]:
constant[
Convert ip address to a network byte order 32-bit integer.
]
variable[quad] assign[=] call[name[ip].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[quad]]] equal[==] constant[1]] begin[:]
variable[quad] assign[=] binary_operation[name[quad] + list[[<ast.Constant object at 0x7da1b2390340>, <ast.Constant object at 0x7da1b2393b50>, <ast.Constant object at 0x7da1b2393ee0>]]]
variable[lip] assign[=] constant[0]
for taget[name[q]] in starred[name[quad]] begin[:]
variable[lip] assign[=] binary_operation[binary_operation[name[lip] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> call[name[int], parameter[name[q]]]]
return[name[lip]]
|
keyword[def] identifier[ip_to_long] ( identifier[ip] ):
literal[string]
identifier[quad] = identifier[ip] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[quad] )== literal[int] :
identifier[quad] = identifier[quad] +[ literal[int] , literal[int] , literal[int] ]
keyword[elif] identifier[len] ( identifier[quad] )< literal[int] :
identifier[host] = identifier[quad] [- literal[int] :]
identifier[quad] = identifier[quad] [:- literal[int] ]+[ literal[int] ,]*( literal[int] - identifier[len] ( identifier[quad] ))+ identifier[host]
identifier[lip] = literal[int]
keyword[for] identifier[q] keyword[in] identifier[quad] :
identifier[lip] =( identifier[lip] << literal[int] )| identifier[int] ( identifier[q] )
keyword[return] identifier[lip]
|
def ip_to_long(ip):
"""
Convert ip address to a network byte order 32-bit integer.
"""
quad = ip.split('.')
if len(quad) == 1:
quad = quad + [0, 0, 0] # depends on [control=['if'], data=[]]
elif len(quad) < 4:
host = quad[-1:]
quad = quad[:-1] + [0] * (4 - len(quad)) + host # depends on [control=['if'], data=[]]
lip = 0
for q in quad:
lip = lip << 8 | int(q) # depends on [control=['for'], data=['q']]
return lip
|
def get_current_tag(self) -> typing.Optional[str]:
"""
:return: tag name if current commit is on tag, else None
:rtype: optional str
"""
tags = list(self.repo.tags)
if not tags:
LOGGER.debug('no tag found')
return None
for tag in tags:
LOGGER.debug('tag found: %s; comparing with commit', tag)
if tag.commit == self.latest_commit():
tag_name: str = tag.name
LOGGER.debug('found tag on commit: %s', tag_name)
return tag_name
LOGGER.debug('no tag found on latest commit')
return None
|
def function[get_current_tag, parameter[self]]:
constant[
:return: tag name if current commit is on tag, else None
:rtype: optional str
]
variable[tags] assign[=] call[name[list], parameter[name[self].repo.tags]]
if <ast.UnaryOp object at 0x7da2054a4fa0> begin[:]
call[name[LOGGER].debug, parameter[constant[no tag found]]]
return[constant[None]]
for taget[name[tag]] in starred[name[tags]] begin[:]
call[name[LOGGER].debug, parameter[constant[tag found: %s; comparing with commit], name[tag]]]
if compare[name[tag].commit equal[==] call[name[self].latest_commit, parameter[]]] begin[:]
<ast.AnnAssign object at 0x7da2054a6350>
call[name[LOGGER].debug, parameter[constant[found tag on commit: %s], name[tag_name]]]
return[name[tag_name]]
call[name[LOGGER].debug, parameter[constant[no tag found on latest commit]]]
return[constant[None]]
|
keyword[def] identifier[get_current_tag] ( identifier[self] )-> identifier[typing] . identifier[Optional] [ identifier[str] ]:
literal[string]
identifier[tags] = identifier[list] ( identifier[self] . identifier[repo] . identifier[tags] )
keyword[if] keyword[not] identifier[tags] :
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return] keyword[None]
keyword[for] identifier[tag] keyword[in] identifier[tags] :
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[tag] )
keyword[if] identifier[tag] . identifier[commit] == identifier[self] . identifier[latest_commit] ():
identifier[tag_name] : identifier[str] = identifier[tag] . identifier[name]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[tag_name] )
keyword[return] identifier[tag_name]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return] keyword[None]
|
def get_current_tag(self) -> typing.Optional[str]:
"""
:return: tag name if current commit is on tag, else None
:rtype: optional str
"""
tags = list(self.repo.tags)
if not tags:
LOGGER.debug('no tag found')
return None # depends on [control=['if'], data=[]]
for tag in tags:
LOGGER.debug('tag found: %s; comparing with commit', tag)
if tag.commit == self.latest_commit():
tag_name: str = tag.name
LOGGER.debug('found tag on commit: %s', tag_name)
return tag_name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tag']]
LOGGER.debug('no tag found on latest commit')
return None
|
def dateof(tag_name, tags):
"""Given a list of tags, returns the datetime of the tag with the given name; Otherwise None."""
for tag in tags:
if tag['name'] == tag_name:
commit = read_url(tag['commit']['url'])
return parse_timestamp(commit['commit']['committer']['date'])
return None
|
def function[dateof, parameter[tag_name, tags]]:
constant[Given a list of tags, returns the datetime of the tag with the given name; Otherwise None.]
for taget[name[tag]] in starred[name[tags]] begin[:]
if compare[call[name[tag]][constant[name]] equal[==] name[tag_name]] begin[:]
variable[commit] assign[=] call[name[read_url], parameter[call[call[name[tag]][constant[commit]]][constant[url]]]]
return[call[name[parse_timestamp], parameter[call[call[call[name[commit]][constant[commit]]][constant[committer]]][constant[date]]]]]
return[constant[None]]
|
keyword[def] identifier[dateof] ( identifier[tag_name] , identifier[tags] ):
literal[string]
keyword[for] identifier[tag] keyword[in] identifier[tags] :
keyword[if] identifier[tag] [ literal[string] ]== identifier[tag_name] :
identifier[commit] = identifier[read_url] ( identifier[tag] [ literal[string] ][ literal[string] ])
keyword[return] identifier[parse_timestamp] ( identifier[commit] [ literal[string] ][ literal[string] ][ literal[string] ])
keyword[return] keyword[None]
|
def dateof(tag_name, tags):
"""Given a list of tags, returns the datetime of the tag with the given name; Otherwise None."""
for tag in tags:
if tag['name'] == tag_name:
commit = read_url(tag['commit']['url'])
return parse_timestamp(commit['commit']['committer']['date']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tag']]
return None
|
def spklef(filename):
"""
Load an ephemeris file for use by the readers. Return that file's
handle, to be used by other SPK routines to refer to the file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spklef_c.html
:param filename: Name of the file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.spklef_c(filename, ctypes.byref(handle))
return handle.value
|
def function[spklef, parameter[filename]]:
constant[
Load an ephemeris file for use by the readers. Return that file's
handle, to be used by other SPK routines to refer to the file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spklef_c.html
:param filename: Name of the file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
]
variable[filename] assign[=] call[name[stypes].stringToCharP, parameter[name[filename]]]
variable[handle] assign[=] call[name[ctypes].c_int, parameter[]]
call[name[libspice].spklef_c, parameter[name[filename], call[name[ctypes].byref, parameter[name[handle]]]]]
return[name[handle].value]
|
keyword[def] identifier[spklef] ( identifier[filename] ):
literal[string]
identifier[filename] = identifier[stypes] . identifier[stringToCharP] ( identifier[filename] )
identifier[handle] = identifier[ctypes] . identifier[c_int] ()
identifier[libspice] . identifier[spklef_c] ( identifier[filename] , identifier[ctypes] . identifier[byref] ( identifier[handle] ))
keyword[return] identifier[handle] . identifier[value]
|
def spklef(filename):
"""
Load an ephemeris file for use by the readers. Return that file's
handle, to be used by other SPK routines to refer to the file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spklef_c.html
:param filename: Name of the file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.spklef_c(filename, ctypes.byref(handle))
return handle.value
|
def parse_args(argv):
"""
Parse commandline arguments.
Arguments:
argv -- An argument list without the program name.
"""
output_funcs = {
'json': dump_as_json,
'tsv': dump_as_two_item_tsv,
}
default_output_func_key = 'json'
parser = argparse.ArgumentParser()
parser.add_argument(
'-v', '--version', action='version',
version='%(prog)s {0}'.format(__version__))
parser.add_argument(
'input', metavar='inpath', nargs='*',
help='Input transaction file (default: stdin).',
type=argparse.FileType('r'), default=[sys.stdin])
parser.add_argument(
'-o', '--output', metavar='outpath',
help='Output file (default: stdout).',
type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument(
'-l', '--max-length', metavar='int',
help='Max length of relations (default: infinite).',
type=int, default=None)
parser.add_argument(
'-s', '--min-support', metavar='float',
help='Minimum support ratio (must be > 0, default: 0.1).',
type=float, default=0.1)
parser.add_argument(
'-c', '--min-confidence', metavar='float',
help='Minimum confidence (default: 0.5).',
type=float, default=0.5)
parser.add_argument(
'-t', '--min-lift', metavar='float',
help='Minimum lift (default: 0.0).',
type=float, default=0.0)
parser.add_argument(
'-d', '--delimiter', metavar='str',
help='Delimiter for items of transactions (default: tab).',
type=str, default='\t')
parser.add_argument(
'-f', '--out-format', metavar='str',
help='Output format ({0}; default: {1}).'.format(
', '.join(output_funcs.keys()), default_output_func_key),
type=str, choices=output_funcs.keys(), default=default_output_func_key)
args = parser.parse_args(argv)
args.output_func = output_funcs[args.out_format]
return args
|
def function[parse_args, parameter[argv]]:
constant[
Parse commandline arguments.
Arguments:
argv -- An argument list without the program name.
]
variable[output_funcs] assign[=] dictionary[[<ast.Constant object at 0x7da1b23470a0>, <ast.Constant object at 0x7da1b23460b0>], [<ast.Name object at 0x7da1b2347c10>, <ast.Name object at 0x7da1b2344a30>]]
variable[default_output_func_key] assign[=] constant[json]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[-v], constant[--version]]]
call[name[parser].add_argument, parameter[constant[input]]]
call[name[parser].add_argument, parameter[constant[-o], constant[--output]]]
call[name[parser].add_argument, parameter[constant[-l], constant[--max-length]]]
call[name[parser].add_argument, parameter[constant[-s], constant[--min-support]]]
call[name[parser].add_argument, parameter[constant[-c], constant[--min-confidence]]]
call[name[parser].add_argument, parameter[constant[-t], constant[--min-lift]]]
call[name[parser].add_argument, parameter[constant[-d], constant[--delimiter]]]
call[name[parser].add_argument, parameter[constant[-f], constant[--out-format]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[argv]]]
name[args].output_func assign[=] call[name[output_funcs]][name[args].out_format]
return[name[args]]
|
keyword[def] identifier[parse_args] ( identifier[argv] ):
literal[string]
identifier[output_funcs] ={
literal[string] : identifier[dump_as_json] ,
literal[string] : identifier[dump_as_two_item_tsv] ,
}
identifier[default_output_func_key] = literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[version] = literal[string] . identifier[format] ( identifier[__version__] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] , identifier[nargs] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[argparse] . identifier[FileType] ( literal[string] ), identifier[default] =[ identifier[sys] . identifier[stdin] ])
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[argparse] . identifier[FileType] ( literal[string] ), identifier[default] = identifier[sys] . identifier[stdout] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[int] , identifier[default] = keyword[None] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[float] , identifier[default] = literal[int] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[float] , identifier[default] = literal[int] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[float] , identifier[default] = literal[int] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] ,
identifier[type] = identifier[str] , identifier[default] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[output_funcs] . identifier[keys] ()), identifier[default_output_func_key] ),
identifier[type] = identifier[str] , identifier[choices] = identifier[output_funcs] . identifier[keys] (), identifier[default] = identifier[default_output_func_key] )
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[argv] )
identifier[args] . identifier[output_func] = identifier[output_funcs] [ identifier[args] . identifier[out_format] ]
keyword[return] identifier[args]
|
def parse_args(argv):
"""
Parse commandline arguments.
Arguments:
argv -- An argument list without the program name.
"""
output_funcs = {'json': dump_as_json, 'tsv': dump_as_two_item_tsv}
default_output_func_key = 'json'
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='%(prog)s {0}'.format(__version__))
parser.add_argument('input', metavar='inpath', nargs='*', help='Input transaction file (default: stdin).', type=argparse.FileType('r'), default=[sys.stdin])
parser.add_argument('-o', '--output', metavar='outpath', help='Output file (default: stdout).', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('-l', '--max-length', metavar='int', help='Max length of relations (default: infinite).', type=int, default=None)
parser.add_argument('-s', '--min-support', metavar='float', help='Minimum support ratio (must be > 0, default: 0.1).', type=float, default=0.1)
parser.add_argument('-c', '--min-confidence', metavar='float', help='Minimum confidence (default: 0.5).', type=float, default=0.5)
parser.add_argument('-t', '--min-lift', metavar='float', help='Minimum lift (default: 0.0).', type=float, default=0.0)
parser.add_argument('-d', '--delimiter', metavar='str', help='Delimiter for items of transactions (default: tab).', type=str, default='\t')
parser.add_argument('-f', '--out-format', metavar='str', help='Output format ({0}; default: {1}).'.format(', '.join(output_funcs.keys()), default_output_func_key), type=str, choices=output_funcs.keys(), default=default_output_func_key)
args = parser.parse_args(argv)
args.output_func = output_funcs[args.out_format]
return args
|
def list_records(self, limit=None, offset=None):
"""
Returns a list of all records configured for this domain.
"""
return self.manager.list_records(self, limit=limit, offset=offset)
|
def function[list_records, parameter[self, limit, offset]]:
constant[
Returns a list of all records configured for this domain.
]
return[call[name[self].manager.list_records, parameter[name[self]]]]
|
keyword[def] identifier[list_records] ( identifier[self] , identifier[limit] = keyword[None] , identifier[offset] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[manager] . identifier[list_records] ( identifier[self] , identifier[limit] = identifier[limit] , identifier[offset] = identifier[offset] )
|
def list_records(self, limit=None, offset=None):
"""
Returns a list of all records configured for this domain.
"""
return self.manager.list_records(self, limit=limit, offset=offset)
|
def check_signature(self, msgbuf, srcSystem, srcComponent):
'''check signature on incoming message'''
if isinstance(msgbuf, array.array):
msgbuf = msgbuf.tostring()
timestamp_buf = msgbuf[-12:-6]
link_id = msgbuf[-13]
(tlow, thigh) = struct.unpack('<IH', timestamp_buf)
timestamp = tlow + (thigh<<32)
# see if the timestamp is acceptable
stream_key = (link_id,srcSystem,srcComponent)
if stream_key in self.signing.stream_timestamps:
if timestamp <= self.signing.stream_timestamps[stream_key]:
# reject old timestamp
# print('old timestamp')
return False
else:
# a new stream has appeared. Accept the timestamp if it is at most
# one minute behind our current timestamp
if timestamp + 6000*1000 < self.signing.timestamp:
# print('bad new stream ', timestamp/(100.0*1000*60*60*24*365), self.signing.timestamp/(100.0*1000*60*60*24*365))
return False
self.signing.stream_timestamps[stream_key] = timestamp
# print('new stream')
h = hashlib.new('sha256')
h.update(self.signing.secret_key)
h.update(msgbuf[:-6])
sig1 = str(h.digest())[:6]
sig2 = str(msgbuf)[-6:]
if sig1 != sig2:
# print('sig mismatch')
return False
# the timestamp we next send with is the max of the received timestamp and
# our current timestamp
self.signing.timestamp = max(self.signing.timestamp, timestamp)
return True
|
def function[check_signature, parameter[self, msgbuf, srcSystem, srcComponent]]:
constant[check signature on incoming message]
if call[name[isinstance], parameter[name[msgbuf], name[array].array]] begin[:]
variable[msgbuf] assign[=] call[name[msgbuf].tostring, parameter[]]
variable[timestamp_buf] assign[=] call[name[msgbuf]][<ast.Slice object at 0x7da204620e80>]
variable[link_id] assign[=] call[name[msgbuf]][<ast.UnaryOp object at 0x7da204622710>]
<ast.Tuple object at 0x7da204623160> assign[=] call[name[struct].unpack, parameter[constant[<IH], name[timestamp_buf]]]
variable[timestamp] assign[=] binary_operation[name[tlow] + binary_operation[name[thigh] <ast.LShift object at 0x7da2590d69e0> constant[32]]]
variable[stream_key] assign[=] tuple[[<ast.Name object at 0x7da204622410>, <ast.Name object at 0x7da204623010>, <ast.Name object at 0x7da204622650>]]
if compare[name[stream_key] in name[self].signing.stream_timestamps] begin[:]
if compare[name[timestamp] less_or_equal[<=] call[name[self].signing.stream_timestamps][name[stream_key]]] begin[:]
return[constant[False]]
variable[h] assign[=] call[name[hashlib].new, parameter[constant[sha256]]]
call[name[h].update, parameter[name[self].signing.secret_key]]
call[name[h].update, parameter[call[name[msgbuf]][<ast.Slice object at 0x7da204620250>]]]
variable[sig1] assign[=] call[call[name[str], parameter[call[name[h].digest, parameter[]]]]][<ast.Slice object at 0x7da204621b10>]
variable[sig2] assign[=] call[call[name[str], parameter[name[msgbuf]]]][<ast.Slice object at 0x7da204620100>]
if compare[name[sig1] not_equal[!=] name[sig2]] begin[:]
return[constant[False]]
name[self].signing.timestamp assign[=] call[name[max], parameter[name[self].signing.timestamp, name[timestamp]]]
return[constant[True]]
|
keyword[def] identifier[check_signature] ( identifier[self] , identifier[msgbuf] , identifier[srcSystem] , identifier[srcComponent] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[msgbuf] , identifier[array] . identifier[array] ):
identifier[msgbuf] = identifier[msgbuf] . identifier[tostring] ()
identifier[timestamp_buf] = identifier[msgbuf] [- literal[int] :- literal[int] ]
identifier[link_id] = identifier[msgbuf] [- literal[int] ]
( identifier[tlow] , identifier[thigh] )= identifier[struct] . identifier[unpack] ( literal[string] , identifier[timestamp_buf] )
identifier[timestamp] = identifier[tlow] +( identifier[thigh] << literal[int] )
identifier[stream_key] =( identifier[link_id] , identifier[srcSystem] , identifier[srcComponent] )
keyword[if] identifier[stream_key] keyword[in] identifier[self] . identifier[signing] . identifier[stream_timestamps] :
keyword[if] identifier[timestamp] <= identifier[self] . identifier[signing] . identifier[stream_timestamps] [ identifier[stream_key] ]:
keyword[return] keyword[False]
keyword[else] :
keyword[if] identifier[timestamp] + literal[int] * literal[int] < identifier[self] . identifier[signing] . identifier[timestamp] :
keyword[return] keyword[False]
identifier[self] . identifier[signing] . identifier[stream_timestamps] [ identifier[stream_key] ]= identifier[timestamp]
identifier[h] = identifier[hashlib] . identifier[new] ( literal[string] )
identifier[h] . identifier[update] ( identifier[self] . identifier[signing] . identifier[secret_key] )
identifier[h] . identifier[update] ( identifier[msgbuf] [:- literal[int] ])
identifier[sig1] = identifier[str] ( identifier[h] . identifier[digest] ())[: literal[int] ]
identifier[sig2] = identifier[str] ( identifier[msgbuf] )[- literal[int] :]
keyword[if] identifier[sig1] != identifier[sig2] :
keyword[return] keyword[False]
identifier[self] . identifier[signing] . identifier[timestamp] = identifier[max] ( identifier[self] . identifier[signing] . identifier[timestamp] , identifier[timestamp] )
keyword[return] keyword[True]
|
def check_signature(self, msgbuf, srcSystem, srcComponent):
"""check signature on incoming message"""
if isinstance(msgbuf, array.array):
msgbuf = msgbuf.tostring() # depends on [control=['if'], data=[]]
timestamp_buf = msgbuf[-12:-6]
link_id = msgbuf[-13]
(tlow, thigh) = struct.unpack('<IH', timestamp_buf)
timestamp = tlow + (thigh << 32)
# see if the timestamp is acceptable
stream_key = (link_id, srcSystem, srcComponent)
if stream_key in self.signing.stream_timestamps:
if timestamp <= self.signing.stream_timestamps[stream_key]:
# reject old timestamp
# print('old timestamp')
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['stream_key']]
else:
# a new stream has appeared. Accept the timestamp if it is at most
# one minute behind our current timestamp
if timestamp + 6000 * 1000 < self.signing.timestamp:
# print('bad new stream ', timestamp/(100.0*1000*60*60*24*365), self.signing.timestamp/(100.0*1000*60*60*24*365))
return False # depends on [control=['if'], data=[]]
self.signing.stream_timestamps[stream_key] = timestamp
# print('new stream')
h = hashlib.new('sha256')
h.update(self.signing.secret_key)
h.update(msgbuf[:-6])
sig1 = str(h.digest())[:6]
sig2 = str(msgbuf)[-6:]
if sig1 != sig2:
# print('sig mismatch')
return False # depends on [control=['if'], data=[]]
# the timestamp we next send with is the max of the received timestamp and
# our current timestamp
self.signing.timestamp = max(self.signing.timestamp, timestamp)
return True
|
def _call_multi_fortran_z_attr(self, names, data_types, num_elems,
entry_nums, attr_nums, var_names,
input_type_code, func, data_offset=None):
"""Calls Fortran function that reads attribute data.
data_offset translates unsigned into signed.
If number read in is negative, offset added.
"""
# isolate input type code variables
idx, = np.where(data_types == input_type_code)
if len(idx) > 0:
# maximimum array dimension
max_num = num_elems[idx].max()
sub_num_elems = num_elems[idx]
sub_names = np.array(names)[idx]
sub_var_names = np.array(var_names)[idx]
# zVariable numbers, 'entry' number
sub_entry_nums = entry_nums[idx]
# attribute number
sub_attr_nums = attr_nums[idx]
status, data = func(self.fname, sub_attr_nums, sub_entry_nums,
len(sub_attr_nums), max_num, len(self.fname))
if (status == 0).all():
if data_offset is not None:
data = data.astype(int)
idx, idy, = np.where(data < 0)
data[idx, idy] += data_offset
self._process_return_multi_z_attr(data, sub_names,
sub_var_names, sub_num_elems)
else:
# raise ValueError('CDF Error code :', status)
idx, = np.where(status != 0)
# raise first error
raise IOError(fortran_cdf.statusreporter(status[idx][0]))
|
def function[_call_multi_fortran_z_attr, parameter[self, names, data_types, num_elems, entry_nums, attr_nums, var_names, input_type_code, func, data_offset]]:
constant[Calls Fortran function that reads attribute data.
data_offset translates unsigned into signed.
If number read in is negative, offset added.
]
<ast.Tuple object at 0x7da1b00d8bb0> assign[=] call[name[np].where, parameter[compare[name[data_types] equal[==] name[input_type_code]]]]
if compare[call[name[len], parameter[name[idx]]] greater[>] constant[0]] begin[:]
variable[max_num] assign[=] call[call[name[num_elems]][name[idx]].max, parameter[]]
variable[sub_num_elems] assign[=] call[name[num_elems]][name[idx]]
variable[sub_names] assign[=] call[call[name[np].array, parameter[name[names]]]][name[idx]]
variable[sub_var_names] assign[=] call[call[name[np].array, parameter[name[var_names]]]][name[idx]]
variable[sub_entry_nums] assign[=] call[name[entry_nums]][name[idx]]
variable[sub_attr_nums] assign[=] call[name[attr_nums]][name[idx]]
<ast.Tuple object at 0x7da1afe507c0> assign[=] call[name[func], parameter[name[self].fname, name[sub_attr_nums], name[sub_entry_nums], call[name[len], parameter[name[sub_attr_nums]]], name[max_num], call[name[len], parameter[name[self].fname]]]]
if call[compare[name[status] equal[==] constant[0]].all, parameter[]] begin[:]
if compare[name[data_offset] is_not constant[None]] begin[:]
variable[data] assign[=] call[name[data].astype, parameter[name[int]]]
<ast.Tuple object at 0x7da1afe53700> assign[=] call[name[np].where, parameter[compare[name[data] less[<] constant[0]]]]
<ast.AugAssign object at 0x7da1afe51ba0>
call[name[self]._process_return_multi_z_attr, parameter[name[data], name[sub_names], name[sub_var_names], name[sub_num_elems]]]
|
keyword[def] identifier[_call_multi_fortran_z_attr] ( identifier[self] , identifier[names] , identifier[data_types] , identifier[num_elems] ,
identifier[entry_nums] , identifier[attr_nums] , identifier[var_names] ,
identifier[input_type_code] , identifier[func] , identifier[data_offset] = keyword[None] ):
literal[string]
identifier[idx] ,= identifier[np] . identifier[where] ( identifier[data_types] == identifier[input_type_code] )
keyword[if] identifier[len] ( identifier[idx] )> literal[int] :
identifier[max_num] = identifier[num_elems] [ identifier[idx] ]. identifier[max] ()
identifier[sub_num_elems] = identifier[num_elems] [ identifier[idx] ]
identifier[sub_names] = identifier[np] . identifier[array] ( identifier[names] )[ identifier[idx] ]
identifier[sub_var_names] = identifier[np] . identifier[array] ( identifier[var_names] )[ identifier[idx] ]
identifier[sub_entry_nums] = identifier[entry_nums] [ identifier[idx] ]
identifier[sub_attr_nums] = identifier[attr_nums] [ identifier[idx] ]
identifier[status] , identifier[data] = identifier[func] ( identifier[self] . identifier[fname] , identifier[sub_attr_nums] , identifier[sub_entry_nums] ,
identifier[len] ( identifier[sub_attr_nums] ), identifier[max_num] , identifier[len] ( identifier[self] . identifier[fname] ))
keyword[if] ( identifier[status] == literal[int] ). identifier[all] ():
keyword[if] identifier[data_offset] keyword[is] keyword[not] keyword[None] :
identifier[data] = identifier[data] . identifier[astype] ( identifier[int] )
identifier[idx] , identifier[idy] ,= identifier[np] . identifier[where] ( identifier[data] < literal[int] )
identifier[data] [ identifier[idx] , identifier[idy] ]+= identifier[data_offset]
identifier[self] . identifier[_process_return_multi_z_attr] ( identifier[data] , identifier[sub_names] ,
identifier[sub_var_names] , identifier[sub_num_elems] )
keyword[else] :
identifier[idx] ,= identifier[np] . identifier[where] ( identifier[status] != literal[int] )
keyword[raise] identifier[IOError] ( identifier[fortran_cdf] . identifier[statusreporter] ( identifier[status] [ identifier[idx] ][ literal[int] ]))
|
def _call_multi_fortran_z_attr(self, names, data_types, num_elems, entry_nums, attr_nums, var_names, input_type_code, func, data_offset=None):
"""Calls Fortran function that reads attribute data.
data_offset translates unsigned into signed.
If number read in is negative, offset added.
"""
# isolate input type code variables
(idx,) = np.where(data_types == input_type_code)
if len(idx) > 0:
# maximimum array dimension
max_num = num_elems[idx].max()
sub_num_elems = num_elems[idx]
sub_names = np.array(names)[idx]
sub_var_names = np.array(var_names)[idx]
# zVariable numbers, 'entry' number
sub_entry_nums = entry_nums[idx]
# attribute number
sub_attr_nums = attr_nums[idx]
(status, data) = func(self.fname, sub_attr_nums, sub_entry_nums, len(sub_attr_nums), max_num, len(self.fname))
if (status == 0).all():
if data_offset is not None:
data = data.astype(int)
(idx, idy) = np.where(data < 0)
data[idx, idy] += data_offset # depends on [control=['if'], data=['data_offset']]
self._process_return_multi_z_attr(data, sub_names, sub_var_names, sub_num_elems) # depends on [control=['if'], data=[]]
else:
# raise ValueError('CDF Error code :', status)
(idx,) = np.where(status != 0)
# raise first error
raise IOError(fortran_cdf.statusreporter(status[idx][0])) # depends on [control=['if'], data=[]]
|
def league_scores(self, total_data, time, show_datetime,
use_12_hour_format):
"""Prints the data in a pretty format"""
for match in total_data['matches']:
self.scores(self.parse_result(match), add_new_line=not show_datetime)
if show_datetime:
click.secho(' %s' % Stdout.utc_to_local(match["utcDate"],
use_12_hour_format,
show_datetime),
fg=self.colors.TIME)
click.echo()
|
def function[league_scores, parameter[self, total_data, time, show_datetime, use_12_hour_format]]:
constant[Prints the data in a pretty format]
for taget[name[match]] in starred[call[name[total_data]][constant[matches]]] begin[:]
call[name[self].scores, parameter[call[name[self].parse_result, parameter[name[match]]]]]
if name[show_datetime] begin[:]
call[name[click].secho, parameter[binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> call[name[Stdout].utc_to_local, parameter[call[name[match]][constant[utcDate]], name[use_12_hour_format], name[show_datetime]]]]]]
call[name[click].echo, parameter[]]
|
keyword[def] identifier[league_scores] ( identifier[self] , identifier[total_data] , identifier[time] , identifier[show_datetime] ,
identifier[use_12_hour_format] ):
literal[string]
keyword[for] identifier[match] keyword[in] identifier[total_data] [ literal[string] ]:
identifier[self] . identifier[scores] ( identifier[self] . identifier[parse_result] ( identifier[match] ), identifier[add_new_line] = keyword[not] identifier[show_datetime] )
keyword[if] identifier[show_datetime] :
identifier[click] . identifier[secho] ( literal[string] % identifier[Stdout] . identifier[utc_to_local] ( identifier[match] [ literal[string] ],
identifier[use_12_hour_format] ,
identifier[show_datetime] ),
identifier[fg] = identifier[self] . identifier[colors] . identifier[TIME] )
identifier[click] . identifier[echo] ()
|
def league_scores(self, total_data, time, show_datetime, use_12_hour_format):
"""Prints the data in a pretty format"""
for match in total_data['matches']:
self.scores(self.parse_result(match), add_new_line=not show_datetime)
if show_datetime:
click.secho(' %s' % Stdout.utc_to_local(match['utcDate'], use_12_hour_format, show_datetime), fg=self.colors.TIME) # depends on [control=['if'], data=[]]
click.echo() # depends on [control=['for'], data=['match']]
|
def folderitem(self, obj, item, index):
"""Augment folder listing item with additional data
"""
url = item.get("url")
title = item.get("Title")
item["replace"]["Title"] = get_link(url, value=title)
item["getDate"] = self.localize_date(obj.getDate())
item["getValidFrom"] = self.localize_date(obj.getValidFrom())
item["getValidTo"] = self.localize_date(obj.getValidTo())
if obj.getInternal() is True:
item["replace"]["getAgency"] = ""
item["state_class"] = "%s %s" % \
(item["state_class"], "internalcertificate")
item["getDocument"] = ""
item["replace"]["getDocument"] = ""
doc = self.get_document(obj)
if doc and doc.get_size() > 0:
filename = doc.filename
download_url = "{}/at_download/Document".format(url)
anchor = get_link(download_url, filename)
item["getDocument"] = filename
item["replace"]["getDocument"] = anchor
# Latest valid certificate
if obj == self.latest_certificate:
item["state_class"] = "state-published"
# Valid certificate
elif obj in self.valid_certificates:
item["state_class"] = "state-valid state-published"
# Invalid certificates
else:
img = get_image("exclamation.png", title=t(_("Out of date")))
item["replace"]["getValidTo"] = "%s %s" % (item["getValidTo"], img)
item["state_class"] = "state-invalid"
return item
|
def function[folderitem, parameter[self, obj, item, index]]:
constant[Augment folder listing item with additional data
]
variable[url] assign[=] call[name[item].get, parameter[constant[url]]]
variable[title] assign[=] call[name[item].get, parameter[constant[Title]]]
call[call[name[item]][constant[replace]]][constant[Title]] assign[=] call[name[get_link], parameter[name[url]]]
call[name[item]][constant[getDate]] assign[=] call[name[self].localize_date, parameter[call[name[obj].getDate, parameter[]]]]
call[name[item]][constant[getValidFrom]] assign[=] call[name[self].localize_date, parameter[call[name[obj].getValidFrom, parameter[]]]]
call[name[item]][constant[getValidTo]] assign[=] call[name[self].localize_date, parameter[call[name[obj].getValidTo, parameter[]]]]
if compare[call[name[obj].getInternal, parameter[]] is constant[True]] begin[:]
call[call[name[item]][constant[replace]]][constant[getAgency]] assign[=] constant[]
call[name[item]][constant[state_class]] assign[=] binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b231db40>, <ast.Constant object at 0x7da1b231ef20>]]]
call[name[item]][constant[getDocument]] assign[=] constant[]
call[call[name[item]][constant[replace]]][constant[getDocument]] assign[=] constant[]
variable[doc] assign[=] call[name[self].get_document, parameter[name[obj]]]
if <ast.BoolOp object at 0x7da18f00dcc0> begin[:]
variable[filename] assign[=] name[doc].filename
variable[download_url] assign[=] call[constant[{}/at_download/Document].format, parameter[name[url]]]
variable[anchor] assign[=] call[name[get_link], parameter[name[download_url], name[filename]]]
call[name[item]][constant[getDocument]] assign[=] name[filename]
call[call[name[item]][constant[replace]]][constant[getDocument]] assign[=] name[anchor]
if compare[name[obj] equal[==] name[self].latest_certificate] begin[:]
call[name[item]][constant[state_class]] assign[=] constant[state-published]
return[name[item]]
|
keyword[def] identifier[folderitem] ( identifier[self] , identifier[obj] , identifier[item] , identifier[index] ):
literal[string]
identifier[url] = identifier[item] . identifier[get] ( literal[string] )
identifier[title] = identifier[item] . identifier[get] ( literal[string] )
identifier[item] [ literal[string] ][ literal[string] ]= identifier[get_link] ( identifier[url] , identifier[value] = identifier[title] )
identifier[item] [ literal[string] ]= identifier[self] . identifier[localize_date] ( identifier[obj] . identifier[getDate] ())
identifier[item] [ literal[string] ]= identifier[self] . identifier[localize_date] ( identifier[obj] . identifier[getValidFrom] ())
identifier[item] [ literal[string] ]= identifier[self] . identifier[localize_date] ( identifier[obj] . identifier[getValidTo] ())
keyword[if] identifier[obj] . identifier[getInternal] () keyword[is] keyword[True] :
identifier[item] [ literal[string] ][ literal[string] ]= literal[string]
identifier[item] [ literal[string] ]= literal[string] %( identifier[item] [ literal[string] ], literal[string] )
identifier[item] [ literal[string] ]= literal[string]
identifier[item] [ literal[string] ][ literal[string] ]= literal[string]
identifier[doc] = identifier[self] . identifier[get_document] ( identifier[obj] )
keyword[if] identifier[doc] keyword[and] identifier[doc] . identifier[get_size] ()> literal[int] :
identifier[filename] = identifier[doc] . identifier[filename]
identifier[download_url] = literal[string] . identifier[format] ( identifier[url] )
identifier[anchor] = identifier[get_link] ( identifier[download_url] , identifier[filename] )
identifier[item] [ literal[string] ]= identifier[filename]
identifier[item] [ literal[string] ][ literal[string] ]= identifier[anchor]
keyword[if] identifier[obj] == identifier[self] . identifier[latest_certificate] :
identifier[item] [ literal[string] ]= literal[string]
keyword[elif] identifier[obj] keyword[in] identifier[self] . identifier[valid_certificates] :
identifier[item] [ literal[string] ]= literal[string]
keyword[else] :
identifier[img] = identifier[get_image] ( literal[string] , identifier[title] = identifier[t] ( identifier[_] ( literal[string] )))
identifier[item] [ literal[string] ][ literal[string] ]= literal[string] %( identifier[item] [ literal[string] ], identifier[img] )
identifier[item] [ literal[string] ]= literal[string]
keyword[return] identifier[item]
|
def folderitem(self, obj, item, index):
"""Augment folder listing item with additional data
"""
url = item.get('url')
title = item.get('Title')
item['replace']['Title'] = get_link(url, value=title)
item['getDate'] = self.localize_date(obj.getDate())
item['getValidFrom'] = self.localize_date(obj.getValidFrom())
item['getValidTo'] = self.localize_date(obj.getValidTo())
if obj.getInternal() is True:
item['replace']['getAgency'] = ''
item['state_class'] = '%s %s' % (item['state_class'], 'internalcertificate') # depends on [control=['if'], data=[]]
item['getDocument'] = ''
item['replace']['getDocument'] = ''
doc = self.get_document(obj)
if doc and doc.get_size() > 0:
filename = doc.filename
download_url = '{}/at_download/Document'.format(url)
anchor = get_link(download_url, filename)
item['getDocument'] = filename
item['replace']['getDocument'] = anchor # depends on [control=['if'], data=[]]
# Latest valid certificate
if obj == self.latest_certificate:
item['state_class'] = 'state-published' # depends on [control=['if'], data=[]]
# Valid certificate
elif obj in self.valid_certificates:
item['state_class'] = 'state-valid state-published' # depends on [control=['if'], data=[]]
else:
# Invalid certificates
img = get_image('exclamation.png', title=t(_('Out of date')))
item['replace']['getValidTo'] = '%s %s' % (item['getValidTo'], img)
item['state_class'] = 'state-invalid'
return item
|
def ones(shape, dtype=float64, order='C'):
"""
Create a local bolt array of ones.
Parameters
----------
shape : tuple
Dimensions of the desired array
dtype : data-type, optional, default=float64
The desired data-type for the array. (see numpy)
order : {'C', 'F', 'A'}, optional, default='C'
The order of the array. (see numpy)
Returns
-------
BoltArrayLocal
"""
from numpy import ones
return ConstructLocal._wrap(ones, shape, dtype, order)
|
def function[ones, parameter[shape, dtype, order]]:
constant[
Create a local bolt array of ones.
Parameters
----------
shape : tuple
Dimensions of the desired array
dtype : data-type, optional, default=float64
The desired data-type for the array. (see numpy)
order : {'C', 'F', 'A'}, optional, default='C'
The order of the array. (see numpy)
Returns
-------
BoltArrayLocal
]
from relative_module[numpy] import module[ones]
return[call[name[ConstructLocal]._wrap, parameter[name[ones], name[shape], name[dtype], name[order]]]]
|
keyword[def] identifier[ones] ( identifier[shape] , identifier[dtype] = identifier[float64] , identifier[order] = literal[string] ):
literal[string]
keyword[from] identifier[numpy] keyword[import] identifier[ones]
keyword[return] identifier[ConstructLocal] . identifier[_wrap] ( identifier[ones] , identifier[shape] , identifier[dtype] , identifier[order] )
|
def ones(shape, dtype=float64, order='C'):
"""
Create a local bolt array of ones.
Parameters
----------
shape : tuple
Dimensions of the desired array
dtype : data-type, optional, default=float64
The desired data-type for the array. (see numpy)
order : {'C', 'F', 'A'}, optional, default='C'
The order of the array. (see numpy)
Returns
-------
BoltArrayLocal
"""
from numpy import ones
return ConstructLocal._wrap(ones, shape, dtype, order)
|
def metatiles_are_equal(tile_data_1, tile_data_2):
"""
Return True if the two tiles are both zipped metatiles and contain the
same set of files with the same contents. This ignores the timestamp of
the individual files in the zip files, as well as their order or any
other metadata.
"""
try:
buf_1 = StringIO.StringIO(tile_data_1)
buf_2 = StringIO.StringIO(tile_data_2)
with zipfile.ZipFile(buf_1, mode='r') as zip_1:
with zipfile.ZipFile(buf_2, mode='r') as zip_2:
return _metatile_contents_equal(zip_1, zip_2)
except (StandardError, zipfile.BadZipFile, zipfile.LargeZipFile):
# errors, such as files not being proper zip files, or missing
# some attributes or contents that we expect, are treated as not
# equal.
pass
return False
|
def function[metatiles_are_equal, parameter[tile_data_1, tile_data_2]]:
constant[
Return True if the two tiles are both zipped metatiles and contain the
same set of files with the same contents. This ignores the timestamp of
the individual files in the zip files, as well as their order or any
other metadata.
]
<ast.Try object at 0x7da20e9626b0>
return[constant[False]]
|
keyword[def] identifier[metatiles_are_equal] ( identifier[tile_data_1] , identifier[tile_data_2] ):
literal[string]
keyword[try] :
identifier[buf_1] = identifier[StringIO] . identifier[StringIO] ( identifier[tile_data_1] )
identifier[buf_2] = identifier[StringIO] . identifier[StringIO] ( identifier[tile_data_2] )
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[buf_1] , identifier[mode] = literal[string] ) keyword[as] identifier[zip_1] :
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[buf_2] , identifier[mode] = literal[string] ) keyword[as] identifier[zip_2] :
keyword[return] identifier[_metatile_contents_equal] ( identifier[zip_1] , identifier[zip_2] )
keyword[except] ( identifier[StandardError] , identifier[zipfile] . identifier[BadZipFile] , identifier[zipfile] . identifier[LargeZipFile] ):
keyword[pass]
keyword[return] keyword[False]
|
def metatiles_are_equal(tile_data_1, tile_data_2):
"""
Return True if the two tiles are both zipped metatiles and contain the
same set of files with the same contents. This ignores the timestamp of
the individual files in the zip files, as well as their order or any
other metadata.
"""
try:
buf_1 = StringIO.StringIO(tile_data_1)
buf_2 = StringIO.StringIO(tile_data_2)
with zipfile.ZipFile(buf_1, mode='r') as zip_1:
with zipfile.ZipFile(buf_2, mode='r') as zip_2:
return _metatile_contents_equal(zip_1, zip_2) # depends on [control=['with'], data=['zip_2']] # depends on [control=['with'], data=['zip_1']] # depends on [control=['try'], data=[]]
except (StandardError, zipfile.BadZipFile, zipfile.LargeZipFile):
# errors, such as files not being proper zip files, or missing
# some attributes or contents that we expect, are treated as not
# equal.
pass # depends on [control=['except'], data=[]]
return False
|
def sort_arbitrarily_ordered_nexson(blob):
"""Primarily used for testing (getting nice diffs). Calls
sort_meta_elements and then sorts otu, node and edge list by id
"""
# otu, node and edge elements have no necessary orger in v0.0 or v1.0
v = detect_nexson_version(blob)
nex = get_nexml_el(blob)
if _is_by_id_hbf(v):
return blob
sort_meta_elements(blob)
for ob in _get_index_list_of_values(nex, 'otus'):
_inplace_sort_by_id(ob.get('otu', []))
for tb in _get_index_list_of_values(nex, 'trees'):
for tree in _get_index_list_of_values(tb, 'tree'):
_inplace_sort_by_id(tree.get('node', []))
_inplace_sort_by_id(tree.get('edge', []))
return blob
|
def function[sort_arbitrarily_ordered_nexson, parameter[blob]]:
constant[Primarily used for testing (getting nice diffs). Calls
sort_meta_elements and then sorts otu, node and edge list by id
]
variable[v] assign[=] call[name[detect_nexson_version], parameter[name[blob]]]
variable[nex] assign[=] call[name[get_nexml_el], parameter[name[blob]]]
if call[name[_is_by_id_hbf], parameter[name[v]]] begin[:]
return[name[blob]]
call[name[sort_meta_elements], parameter[name[blob]]]
for taget[name[ob]] in starred[call[name[_get_index_list_of_values], parameter[name[nex], constant[otus]]]] begin[:]
call[name[_inplace_sort_by_id], parameter[call[name[ob].get, parameter[constant[otu], list[[]]]]]]
for taget[name[tb]] in starred[call[name[_get_index_list_of_values], parameter[name[nex], constant[trees]]]] begin[:]
for taget[name[tree]] in starred[call[name[_get_index_list_of_values], parameter[name[tb], constant[tree]]]] begin[:]
call[name[_inplace_sort_by_id], parameter[call[name[tree].get, parameter[constant[node], list[[]]]]]]
call[name[_inplace_sort_by_id], parameter[call[name[tree].get, parameter[constant[edge], list[[]]]]]]
return[name[blob]]
|
keyword[def] identifier[sort_arbitrarily_ordered_nexson] ( identifier[blob] ):
literal[string]
identifier[v] = identifier[detect_nexson_version] ( identifier[blob] )
identifier[nex] = identifier[get_nexml_el] ( identifier[blob] )
keyword[if] identifier[_is_by_id_hbf] ( identifier[v] ):
keyword[return] identifier[blob]
identifier[sort_meta_elements] ( identifier[blob] )
keyword[for] identifier[ob] keyword[in] identifier[_get_index_list_of_values] ( identifier[nex] , literal[string] ):
identifier[_inplace_sort_by_id] ( identifier[ob] . identifier[get] ( literal[string] ,[]))
keyword[for] identifier[tb] keyword[in] identifier[_get_index_list_of_values] ( identifier[nex] , literal[string] ):
keyword[for] identifier[tree] keyword[in] identifier[_get_index_list_of_values] ( identifier[tb] , literal[string] ):
identifier[_inplace_sort_by_id] ( identifier[tree] . identifier[get] ( literal[string] ,[]))
identifier[_inplace_sort_by_id] ( identifier[tree] . identifier[get] ( literal[string] ,[]))
keyword[return] identifier[blob]
|
def sort_arbitrarily_ordered_nexson(blob):
"""Primarily used for testing (getting nice diffs). Calls
sort_meta_elements and then sorts otu, node and edge list by id
"""
# otu, node and edge elements have no necessary orger in v0.0 or v1.0
v = detect_nexson_version(blob)
nex = get_nexml_el(blob)
if _is_by_id_hbf(v):
return blob # depends on [control=['if'], data=[]]
sort_meta_elements(blob)
for ob in _get_index_list_of_values(nex, 'otus'):
_inplace_sort_by_id(ob.get('otu', [])) # depends on [control=['for'], data=['ob']]
for tb in _get_index_list_of_values(nex, 'trees'):
for tree in _get_index_list_of_values(tb, 'tree'):
_inplace_sort_by_id(tree.get('node', []))
_inplace_sort_by_id(tree.get('edge', [])) # depends on [control=['for'], data=['tree']] # depends on [control=['for'], data=['tb']]
return blob
|
def raw(self, raw):
"""
Sets the raw of this RuntimeRawExtension.
Raw is the underlying serialization of this object.
:param raw: The raw of this RuntimeRawExtension.
:type: str
"""
if raw is None:
raise ValueError("Invalid value for `raw`, must not be `None`")
if raw is not None and not re.search('^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', raw):
raise ValueError("Invalid value for `raw`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`")
self._raw = raw
|
def function[raw, parameter[self, raw]]:
constant[
Sets the raw of this RuntimeRawExtension.
Raw is the underlying serialization of this object.
:param raw: The raw of this RuntimeRawExtension.
:type: str
]
if compare[name[raw] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6aa4d0>
if <ast.BoolOp object at 0x7da20c6a92a0> begin[:]
<ast.Raise object at 0x7da20c6a8dc0>
name[self]._raw assign[=] name[raw]
|
keyword[def] identifier[raw] ( identifier[self] , identifier[raw] ):
literal[string]
keyword[if] identifier[raw] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[raw] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[re] . identifier[search] ( literal[string] , identifier[raw] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_raw] = identifier[raw]
|
def raw(self, raw):
"""
Sets the raw of this RuntimeRawExtension.
Raw is the underlying serialization of this object.
:param raw: The raw of this RuntimeRawExtension.
:type: str
"""
if raw is None:
raise ValueError('Invalid value for `raw`, must not be `None`') # depends on [control=['if'], data=[]]
if raw is not None and (not re.search('^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$', raw)):
raise ValueError('Invalid value for `raw`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$/`') # depends on [control=['if'], data=[]]
self._raw = raw
|
def _parse2(self, fragments):
"""
Second stage of parsing: convert ``fragments`` into the list of code objects.
This method in fact does more than simple conversion of fragments into objects. It also attempts to group
certain fragments into one, if they in fact seem like a single piece. For example, decorators are grouped
together with the objects they decorate, comments that explain certain objects or statements are attached to
those as well.
"""
out = []
tokens = self._tokens
i = 0
saved_start = None
while i < len(fragments):
ftype, start, end = fragments[i]
assert start == (0 if i == 0 else fragments[i - 1][2]), "Discontinuity in `fragments` at i = %d" % i
if ftype == "whitespace" or ftype == "end":
assert saved_start is None
obj = Whitespace(tokens[start:end])
elif ftype == "docstring":
assert saved_start is None
obj = Docstring(tokens[start:end])
elif ftype == "comment":
assert saved_start is None
next_frag = fragments[i + 1][0] if i + 1 < len(fragments) else "end"
if next_frag in {"docstring", "end", "whitespace", "comment", "banner-comment"}:
# Possibly merge with the previous Comment instance
# if (len(out) >= 2 and isinstance(out[-1], Whitespace) and isinstance(out[-2], Comment) and
# out[-2].type != "banner"):
# obj = Comment(out[-2].tokens + out[-1].tokens + tokens[start:end])
# del out[-2:]
# else:
obj = Comment(tokens[start:end])
elif next_frag in {"decorator", "import", "def", "class", "code"}:
# save this comment for later
saved_start = start
i += 1
continue
else:
raise RuntimeError("Unknown token type %s" % next_frag)
elif ftype == "banner-comment":
assert saved_start is None
obj = Comment(tokens[start:end])
obj.type = "banner"
elif ftype == "decorator":
if saved_start is None:
saved_start = start
i += 1
continue
elif ftype == "import":
real_start = start if saved_start is None else saved_start
saved_start = None
obj = ImportBlock(tokens[real_start:end])
elif ftype in {"class", "def"}:
real_start = start if saved_start is None else saved_start
saved_start = None
obj = Callable(tokens[real_start:end])
obj.type = ftype
elif ftype == "code":
real_start = start if saved_start is None else saved_start
saved_start = None
obj = Expression(tokens[real_start:end])
else:
assert False, "Unknown fragment type %s" % ftype
out.append(obj)
i += 1
return out
|
def function[_parse2, parameter[self, fragments]]:
constant[
Second stage of parsing: convert ``fragments`` into the list of code objects.
This method in fact does more than simple conversion of fragments into objects. It also attempts to group
certain fragments into one, if they in fact seem like a single piece. For example, decorators are grouped
together with the objects they decorate, comments that explain certain objects or statements are attached to
those as well.
]
variable[out] assign[=] list[[]]
variable[tokens] assign[=] name[self]._tokens
variable[i] assign[=] constant[0]
variable[saved_start] assign[=] constant[None]
while compare[name[i] less[<] call[name[len], parameter[name[fragments]]]] begin[:]
<ast.Tuple object at 0x7da20c6e6980> assign[=] call[name[fragments]][name[i]]
assert[compare[name[start] equal[==] <ast.IfExp object at 0x7da20c6e5ba0>]]
if <ast.BoolOp object at 0x7da20c6e5330> begin[:]
assert[compare[name[saved_start] is constant[None]]]
variable[obj] assign[=] call[name[Whitespace], parameter[call[name[tokens]][<ast.Slice object at 0x7da20c6e7790>]]]
call[name[out].append, parameter[name[obj]]]
<ast.AugAssign object at 0x7da2054a67d0>
return[name[out]]
|
keyword[def] identifier[_parse2] ( identifier[self] , identifier[fragments] ):
literal[string]
identifier[out] =[]
identifier[tokens] = identifier[self] . identifier[_tokens]
identifier[i] = literal[int]
identifier[saved_start] = keyword[None]
keyword[while] identifier[i] < identifier[len] ( identifier[fragments] ):
identifier[ftype] , identifier[start] , identifier[end] = identifier[fragments] [ identifier[i] ]
keyword[assert] identifier[start] ==( literal[int] keyword[if] identifier[i] == literal[int] keyword[else] identifier[fragments] [ identifier[i] - literal[int] ][ literal[int] ]), literal[string] % identifier[i]
keyword[if] identifier[ftype] == literal[string] keyword[or] identifier[ftype] == literal[string] :
keyword[assert] identifier[saved_start] keyword[is] keyword[None]
identifier[obj] = identifier[Whitespace] ( identifier[tokens] [ identifier[start] : identifier[end] ])
keyword[elif] identifier[ftype] == literal[string] :
keyword[assert] identifier[saved_start] keyword[is] keyword[None]
identifier[obj] = identifier[Docstring] ( identifier[tokens] [ identifier[start] : identifier[end] ])
keyword[elif] identifier[ftype] == literal[string] :
keyword[assert] identifier[saved_start] keyword[is] keyword[None]
identifier[next_frag] = identifier[fragments] [ identifier[i] + literal[int] ][ literal[int] ] keyword[if] identifier[i] + literal[int] < identifier[len] ( identifier[fragments] ) keyword[else] literal[string]
keyword[if] identifier[next_frag] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }:
identifier[obj] = identifier[Comment] ( identifier[tokens] [ identifier[start] : identifier[end] ])
keyword[elif] identifier[next_frag] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }:
identifier[saved_start] = identifier[start]
identifier[i] += literal[int]
keyword[continue]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[next_frag] )
keyword[elif] identifier[ftype] == literal[string] :
keyword[assert] identifier[saved_start] keyword[is] keyword[None]
identifier[obj] = identifier[Comment] ( identifier[tokens] [ identifier[start] : identifier[end] ])
identifier[obj] . identifier[type] = literal[string]
keyword[elif] identifier[ftype] == literal[string] :
keyword[if] identifier[saved_start] keyword[is] keyword[None] :
identifier[saved_start] = identifier[start]
identifier[i] += literal[int]
keyword[continue]
keyword[elif] identifier[ftype] == literal[string] :
identifier[real_start] = identifier[start] keyword[if] identifier[saved_start] keyword[is] keyword[None] keyword[else] identifier[saved_start]
identifier[saved_start] = keyword[None]
identifier[obj] = identifier[ImportBlock] ( identifier[tokens] [ identifier[real_start] : identifier[end] ])
keyword[elif] identifier[ftype] keyword[in] { literal[string] , literal[string] }:
identifier[real_start] = identifier[start] keyword[if] identifier[saved_start] keyword[is] keyword[None] keyword[else] identifier[saved_start]
identifier[saved_start] = keyword[None]
identifier[obj] = identifier[Callable] ( identifier[tokens] [ identifier[real_start] : identifier[end] ])
identifier[obj] . identifier[type] = identifier[ftype]
keyword[elif] identifier[ftype] == literal[string] :
identifier[real_start] = identifier[start] keyword[if] identifier[saved_start] keyword[is] keyword[None] keyword[else] identifier[saved_start]
identifier[saved_start] = keyword[None]
identifier[obj] = identifier[Expression] ( identifier[tokens] [ identifier[real_start] : identifier[end] ])
keyword[else] :
keyword[assert] keyword[False] , literal[string] % identifier[ftype]
identifier[out] . identifier[append] ( identifier[obj] )
identifier[i] += literal[int]
keyword[return] identifier[out]
|
def _parse2(self, fragments):
"""
Second stage of parsing: convert ``fragments`` into the list of code objects.
This method in fact does more than simple conversion of fragments into objects. It also attempts to group
certain fragments into one, if they in fact seem like a single piece. For example, decorators are grouped
together with the objects they decorate, comments that explain certain objects or statements are attached to
those as well.
"""
out = []
tokens = self._tokens
i = 0
saved_start = None
while i < len(fragments):
(ftype, start, end) = fragments[i]
assert start == (0 if i == 0 else fragments[i - 1][2]), 'Discontinuity in `fragments` at i = %d' % i
if ftype == 'whitespace' or ftype == 'end':
assert saved_start is None
obj = Whitespace(tokens[start:end]) # depends on [control=['if'], data=[]]
elif ftype == 'docstring':
assert saved_start is None
obj = Docstring(tokens[start:end]) # depends on [control=['if'], data=[]]
elif ftype == 'comment':
assert saved_start is None
next_frag = fragments[i + 1][0] if i + 1 < len(fragments) else 'end'
if next_frag in {'docstring', 'end', 'whitespace', 'comment', 'banner-comment'}:
# Possibly merge with the previous Comment instance
# if (len(out) >= 2 and isinstance(out[-1], Whitespace) and isinstance(out[-2], Comment) and
# out[-2].type != "banner"):
# obj = Comment(out[-2].tokens + out[-1].tokens + tokens[start:end])
# del out[-2:]
# else:
obj = Comment(tokens[start:end]) # depends on [control=['if'], data=[]]
elif next_frag in {'decorator', 'import', 'def', 'class', 'code'}:
# save this comment for later
saved_start = start
i += 1
continue # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Unknown token type %s' % next_frag) # depends on [control=['if'], data=[]]
elif ftype == 'banner-comment':
assert saved_start is None
obj = Comment(tokens[start:end])
obj.type = 'banner' # depends on [control=['if'], data=[]]
elif ftype == 'decorator':
if saved_start is None:
saved_start = start # depends on [control=['if'], data=['saved_start']]
i += 1
continue # depends on [control=['if'], data=[]]
elif ftype == 'import':
real_start = start if saved_start is None else saved_start
saved_start = None
obj = ImportBlock(tokens[real_start:end]) # depends on [control=['if'], data=[]]
elif ftype in {'class', 'def'}:
real_start = start if saved_start is None else saved_start
saved_start = None
obj = Callable(tokens[real_start:end])
obj.type = ftype # depends on [control=['if'], data=['ftype']]
elif ftype == 'code':
real_start = start if saved_start is None else saved_start
saved_start = None
obj = Expression(tokens[real_start:end]) # depends on [control=['if'], data=[]]
else:
assert False, 'Unknown fragment type %s' % ftype
out.append(obj)
i += 1 # depends on [control=['while'], data=['i']]
return out
|
def _replace(expr, pat, repl, n=-1, case=True, flags=0, regex=True):
"""
Replace occurrence of pattern/regex in the sequence or scalar with some other string.
Equivalent to str.replace()
:param expr:
:param pat: Character sequence or regular expression
:param repl: Replacement
:param n: Number of replacements to make from start
:param case: if True, case sensitive
:param flags: re module flag, e.g. re.IGNORECASE
:return: sequence or scalar
"""
return _string_op(expr, Replace, _pat=pat, _repl=repl,
_n=n, _case=case, _flags=flags, _regex=regex)
|
def function[_replace, parameter[expr, pat, repl, n, case, flags, regex]]:
constant[
Replace occurrence of pattern/regex in the sequence or scalar with some other string.
Equivalent to str.replace()
:param expr:
:param pat: Character sequence or regular expression
:param repl: Replacement
:param n: Number of replacements to make from start
:param case: if True, case sensitive
:param flags: re module flag, e.g. re.IGNORECASE
:return: sequence or scalar
]
return[call[name[_string_op], parameter[name[expr], name[Replace]]]]
|
keyword[def] identifier[_replace] ( identifier[expr] , identifier[pat] , identifier[repl] , identifier[n] =- literal[int] , identifier[case] = keyword[True] , identifier[flags] = literal[int] , identifier[regex] = keyword[True] ):
literal[string]
keyword[return] identifier[_string_op] ( identifier[expr] , identifier[Replace] , identifier[_pat] = identifier[pat] , identifier[_repl] = identifier[repl] ,
identifier[_n] = identifier[n] , identifier[_case] = identifier[case] , identifier[_flags] = identifier[flags] , identifier[_regex] = identifier[regex] )
|
def _replace(expr, pat, repl, n=-1, case=True, flags=0, regex=True):
"""
Replace occurrence of pattern/regex in the sequence or scalar with some other string.
Equivalent to str.replace()
:param expr:
:param pat: Character sequence or regular expression
:param repl: Replacement
:param n: Number of replacements to make from start
:param case: if True, case sensitive
:param flags: re module flag, e.g. re.IGNORECASE
:return: sequence or scalar
"""
return _string_op(expr, Replace, _pat=pat, _repl=repl, _n=n, _case=case, _flags=flags, _regex=regex)
|
def _request(self, typ, id=0, method='GET', params=None, data=None, url=None):
"""
send the request, return response obj
"""
headers = { "Accept": "application/json" }
auth = None
if self.user:
auth = (self.user, self.password)
if not url:
if id:
url = "%s/%s/%s" % (self.url, typ, id)
else:
url = "%s/%s" % (self.url, typ)
return requests.request(method, url, params=params, data=data, auth=auth, headers=headers)
|
def function[_request, parameter[self, typ, id, method, params, data, url]]:
constant[
send the request, return response obj
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc9ae0>], [<ast.Constant object at 0x7da18bccb700>]]
variable[auth] assign[=] constant[None]
if name[self].user begin[:]
variable[auth] assign[=] tuple[[<ast.Attribute object at 0x7da18bcc9f60>, <ast.Attribute object at 0x7da18bccab90>]]
if <ast.UnaryOp object at 0x7da18bccac80> begin[:]
if name[id] begin[:]
variable[url] assign[=] binary_operation[constant[%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18bcc9630>, <ast.Name object at 0x7da18bccb850>, <ast.Name object at 0x7da18bcc9450>]]]
return[call[name[requests].request, parameter[name[method], name[url]]]]
|
keyword[def] identifier[_request] ( identifier[self] , identifier[typ] , identifier[id] = literal[int] , identifier[method] = literal[string] , identifier[params] = keyword[None] , identifier[data] = keyword[None] , identifier[url] = keyword[None] ):
literal[string]
identifier[headers] ={ literal[string] : literal[string] }
identifier[auth] = keyword[None]
keyword[if] identifier[self] . identifier[user] :
identifier[auth] =( identifier[self] . identifier[user] , identifier[self] . identifier[password] )
keyword[if] keyword[not] identifier[url] :
keyword[if] identifier[id] :
identifier[url] = literal[string] %( identifier[self] . identifier[url] , identifier[typ] , identifier[id] )
keyword[else] :
identifier[url] = literal[string] %( identifier[self] . identifier[url] , identifier[typ] )
keyword[return] identifier[requests] . identifier[request] ( identifier[method] , identifier[url] , identifier[params] = identifier[params] , identifier[data] = identifier[data] , identifier[auth] = identifier[auth] , identifier[headers] = identifier[headers] )
|
def _request(self, typ, id=0, method='GET', params=None, data=None, url=None):
"""
send the request, return response obj
"""
headers = {'Accept': 'application/json'}
auth = None
if self.user:
auth = (self.user, self.password) # depends on [control=['if'], data=[]]
if not url:
if id:
url = '%s/%s/%s' % (self.url, typ, id) # depends on [control=['if'], data=[]]
else:
url = '%s/%s' % (self.url, typ) # depends on [control=['if'], data=[]]
return requests.request(method, url, params=params, data=data, auth=auth, headers=headers)
|
def set_surfaces(self, df_surfaces):
'''
Reset the contents of the tree view to show one row per surface, with
a column containing the alpha multiplier for the corresponding surface
(in the range [0, 1]), indexed by surface name.
For example:
| index | alpha |
|--------|--------|
| layer1 | 1.00 |
| layer2 | 0.65 |
| ... | ... |
'''
for column in self.treeview_layers.get_columns():
self.treeview_layers.remove_column(column)
self.df_surfaces = pd.DataFrame(df_surfaces.index.values,
columns=[df_surfaces.index.name or
'index'],
index=df_surfaces.index)
if 'alpha' in df_surfaces:
self.df_surfaces['alpha'] = df_surfaces.alpha.copy()
else:
self.df_surfaces['alpha'] = 1.
self.df_py_dtypes, self.list_store = get_list_store(self.df_surfaces)
add_columns(self.treeview_layers, self.df_py_dtypes, self.list_store)
self._inserted_row_path = None
# Adjustment for alpha multiplier for each surface.
adjustment = gtk.Adjustment(1, 0, 1, .01, .1, 0)
column = [c for c in self.treeview_layers.get_columns()
if c.get_name() == 'alpha'][0]
cell_renderer = column.get_cells()[0]
cell_renderer.set_properties(digits=2, editable=True,
adjustment=adjustment)
cell_renderer.connect('edited', self.on_edited, column,
self.df_py_dtypes, self.list_store,
self.df_surfaces)
set_column_format(column, self.df_py_dtypes.ix['alpha'].i,
'{value:.2f}', cell_renderer=cell_renderer)
# Bind handlers for reordering of surface layers.
for k in ('inserted', 'deleted'):
self.list_store.connect('row-' + k, getattr(self, 'on_row_' + k))
|
def function[set_surfaces, parameter[self, df_surfaces]]:
constant[
Reset the contents of the tree view to show one row per surface, with
a column containing the alpha multiplier for the corresponding surface
(in the range [0, 1]), indexed by surface name.
For example:
| index | alpha |
|--------|--------|
| layer1 | 1.00 |
| layer2 | 0.65 |
| ... | ... |
]
for taget[name[column]] in starred[call[name[self].treeview_layers.get_columns, parameter[]]] begin[:]
call[name[self].treeview_layers.remove_column, parameter[name[column]]]
name[self].df_surfaces assign[=] call[name[pd].DataFrame, parameter[name[df_surfaces].index.values]]
if compare[constant[alpha] in name[df_surfaces]] begin[:]
call[name[self].df_surfaces][constant[alpha]] assign[=] call[name[df_surfaces].alpha.copy, parameter[]]
<ast.Tuple object at 0x7da2047e90f0> assign[=] call[name[get_list_store], parameter[name[self].df_surfaces]]
call[name[add_columns], parameter[name[self].treeview_layers, name[self].df_py_dtypes, name[self].list_store]]
name[self]._inserted_row_path assign[=] constant[None]
variable[adjustment] assign[=] call[name[gtk].Adjustment, parameter[constant[1], constant[0], constant[1], constant[0.01], constant[0.1], constant[0]]]
variable[column] assign[=] call[<ast.ListComp object at 0x7da2047e8ac0>][constant[0]]
variable[cell_renderer] assign[=] call[call[name[column].get_cells, parameter[]]][constant[0]]
call[name[cell_renderer].set_properties, parameter[]]
call[name[cell_renderer].connect, parameter[constant[edited], name[self].on_edited, name[column], name[self].df_py_dtypes, name[self].list_store, name[self].df_surfaces]]
call[name[set_column_format], parameter[name[column], call[name[self].df_py_dtypes.ix][constant[alpha]].i, constant[{value:.2f}]]]
for taget[name[k]] in starred[tuple[[<ast.Constant object at 0x7da20e9541f0>, <ast.Constant object at 0x7da20e957cd0>]]] begin[:]
call[name[self].list_store.connect, parameter[binary_operation[constant[row-] + name[k]], call[name[getattr], parameter[name[self], binary_operation[constant[on_row_] + name[k]]]]]]
|
keyword[def] identifier[set_surfaces] ( identifier[self] , identifier[df_surfaces] ):
literal[string]
keyword[for] identifier[column] keyword[in] identifier[self] . identifier[treeview_layers] . identifier[get_columns] ():
identifier[self] . identifier[treeview_layers] . identifier[remove_column] ( identifier[column] )
identifier[self] . identifier[df_surfaces] = identifier[pd] . identifier[DataFrame] ( identifier[df_surfaces] . identifier[index] . identifier[values] ,
identifier[columns] =[ identifier[df_surfaces] . identifier[index] . identifier[name] keyword[or]
literal[string] ],
identifier[index] = identifier[df_surfaces] . identifier[index] )
keyword[if] literal[string] keyword[in] identifier[df_surfaces] :
identifier[self] . identifier[df_surfaces] [ literal[string] ]= identifier[df_surfaces] . identifier[alpha] . identifier[copy] ()
keyword[else] :
identifier[self] . identifier[df_surfaces] [ literal[string] ]= literal[int]
identifier[self] . identifier[df_py_dtypes] , identifier[self] . identifier[list_store] = identifier[get_list_store] ( identifier[self] . identifier[df_surfaces] )
identifier[add_columns] ( identifier[self] . identifier[treeview_layers] , identifier[self] . identifier[df_py_dtypes] , identifier[self] . identifier[list_store] )
identifier[self] . identifier[_inserted_row_path] = keyword[None]
identifier[adjustment] = identifier[gtk] . identifier[Adjustment] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[column] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[treeview_layers] . identifier[get_columns] ()
keyword[if] identifier[c] . identifier[get_name] ()== literal[string] ][ literal[int] ]
identifier[cell_renderer] = identifier[column] . identifier[get_cells] ()[ literal[int] ]
identifier[cell_renderer] . identifier[set_properties] ( identifier[digits] = literal[int] , identifier[editable] = keyword[True] ,
identifier[adjustment] = identifier[adjustment] )
identifier[cell_renderer] . identifier[connect] ( literal[string] , identifier[self] . identifier[on_edited] , identifier[column] ,
identifier[self] . identifier[df_py_dtypes] , identifier[self] . identifier[list_store] ,
identifier[self] . identifier[df_surfaces] )
identifier[set_column_format] ( identifier[column] , identifier[self] . identifier[df_py_dtypes] . identifier[ix] [ literal[string] ]. identifier[i] ,
literal[string] , identifier[cell_renderer] = identifier[cell_renderer] )
keyword[for] identifier[k] keyword[in] ( literal[string] , literal[string] ):
identifier[self] . identifier[list_store] . identifier[connect] ( literal[string] + identifier[k] , identifier[getattr] ( identifier[self] , literal[string] + identifier[k] ))
|
def set_surfaces(self, df_surfaces):
"""
Reset the contents of the tree view to show one row per surface, with
a column containing the alpha multiplier for the corresponding surface
(in the range [0, 1]), indexed by surface name.
For example:
| index | alpha |
|--------|--------|
| layer1 | 1.00 |
| layer2 | 0.65 |
| ... | ... |
"""
for column in self.treeview_layers.get_columns():
self.treeview_layers.remove_column(column) # depends on [control=['for'], data=['column']]
self.df_surfaces = pd.DataFrame(df_surfaces.index.values, columns=[df_surfaces.index.name or 'index'], index=df_surfaces.index)
if 'alpha' in df_surfaces:
self.df_surfaces['alpha'] = df_surfaces.alpha.copy() # depends on [control=['if'], data=['df_surfaces']]
else:
self.df_surfaces['alpha'] = 1.0
(self.df_py_dtypes, self.list_store) = get_list_store(self.df_surfaces)
add_columns(self.treeview_layers, self.df_py_dtypes, self.list_store)
self._inserted_row_path = None
# Adjustment for alpha multiplier for each surface.
adjustment = gtk.Adjustment(1, 0, 1, 0.01, 0.1, 0)
column = [c for c in self.treeview_layers.get_columns() if c.get_name() == 'alpha'][0]
cell_renderer = column.get_cells()[0]
cell_renderer.set_properties(digits=2, editable=True, adjustment=adjustment)
cell_renderer.connect('edited', self.on_edited, column, self.df_py_dtypes, self.list_store, self.df_surfaces)
set_column_format(column, self.df_py_dtypes.ix['alpha'].i, '{value:.2f}', cell_renderer=cell_renderer)
# Bind handlers for reordering of surface layers.
for k in ('inserted', 'deleted'):
self.list_store.connect('row-' + k, getattr(self, 'on_row_' + k)) # depends on [control=['for'], data=['k']]
|
def profile_option(f):
"""
Configures --profile option for CLI
:param f: Callback Function to be passed to Click
"""
def callback(ctx, param, value):
state = ctx.ensure_object(Context)
state.profile = value
return value
return click.option('--profile',
expose_value=False,
help='Select a specific profile from your credential file to get AWS credentials.',
callback=callback)(f)
|
def function[profile_option, parameter[f]]:
constant[
Configures --profile option for CLI
:param f: Callback Function to be passed to Click
]
def function[callback, parameter[ctx, param, value]]:
variable[state] assign[=] call[name[ctx].ensure_object, parameter[name[Context]]]
name[state].profile assign[=] name[value]
return[name[value]]
return[call[call[name[click].option, parameter[constant[--profile]]], parameter[name[f]]]]
|
keyword[def] identifier[profile_option] ( identifier[f] ):
literal[string]
keyword[def] identifier[callback] ( identifier[ctx] , identifier[param] , identifier[value] ):
identifier[state] = identifier[ctx] . identifier[ensure_object] ( identifier[Context] )
identifier[state] . identifier[profile] = identifier[value]
keyword[return] identifier[value]
keyword[return] identifier[click] . identifier[option] ( literal[string] ,
identifier[expose_value] = keyword[False] ,
identifier[help] = literal[string] ,
identifier[callback] = identifier[callback] )( identifier[f] )
|
def profile_option(f):
"""
Configures --profile option for CLI
:param f: Callback Function to be passed to Click
"""
def callback(ctx, param, value):
state = ctx.ensure_object(Context)
state.profile = value
return value
return click.option('--profile', expose_value=False, help='Select a specific profile from your credential file to get AWS credentials.', callback=callback)(f)
|
def merge_into(self, other):
"""Merge two simple selectors together. This is expected to be the
selector being injected into `other` -- that is, `other` is the
selector for a block using ``@extend``, and `self` is a selector being
extended.
Element tokens must come first, and pseudo-element tokens must come
last, and there can only be one of each. The final selector thus looks
something like::
[element] [misc self tokens] [misc other tokens] [pseudo-element]
This method does not check for duplicate tokens; those are assumed to
have been removed earlier, during the search for a hinge.
"""
# TODO it shouldn't be possible to merge two elements or two pseudo
# elements, /but/ it shouldn't just be a fatal error here -- it
# shouldn't even be considered a candidate for extending!
# TODO this is slightly inconsistent with ruby, which treats a trailing
# set of self tokens like ':before.foo' as a single unit to be stuck at
# the end. but that's completely bogus anyway.
element = []
middle = []
pseudo = []
for token in self.tokens + other.tokens:
if token in CSS2_PSEUDO_ELEMENTS or token.startswith('::'):
pseudo.append(token)
elif token[0] in BODY_TOKEN_SIGILS:
middle.append(token)
else:
element.append(token)
new_tokens = element + middle + pseudo
if self.combinator == ' ' or self.combinator == other.combinator:
combinator = other.combinator
elif other.combinator == ' ':
combinator = self.combinator
else:
raise ValueError(
"Don't know how to merge conflicting combinators: "
"{0!r} and {1!r}"
.format(self, other))
return type(self)(combinator, new_tokens)
|
def function[merge_into, parameter[self, other]]:
constant[Merge two simple selectors together. This is expected to be the
selector being injected into `other` -- that is, `other` is the
selector for a block using ``@extend``, and `self` is a selector being
extended.
Element tokens must come first, and pseudo-element tokens must come
last, and there can only be one of each. The final selector thus looks
something like::
[element] [misc self tokens] [misc other tokens] [pseudo-element]
This method does not check for duplicate tokens; those are assumed to
have been removed earlier, during the search for a hinge.
]
variable[element] assign[=] list[[]]
variable[middle] assign[=] list[[]]
variable[pseudo] assign[=] list[[]]
for taget[name[token]] in starred[binary_operation[name[self].tokens + name[other].tokens]] begin[:]
if <ast.BoolOp object at 0x7da204564f10> begin[:]
call[name[pseudo].append, parameter[name[token]]]
variable[new_tokens] assign[=] binary_operation[binary_operation[name[element] + name[middle]] + name[pseudo]]
if <ast.BoolOp object at 0x7da204565c00> begin[:]
variable[combinator] assign[=] name[other].combinator
return[call[call[name[type], parameter[name[self]]], parameter[name[combinator], name[new_tokens]]]]
|
keyword[def] identifier[merge_into] ( identifier[self] , identifier[other] ):
literal[string]
identifier[element] =[]
identifier[middle] =[]
identifier[pseudo] =[]
keyword[for] identifier[token] keyword[in] identifier[self] . identifier[tokens] + identifier[other] . identifier[tokens] :
keyword[if] identifier[token] keyword[in] identifier[CSS2_PSEUDO_ELEMENTS] keyword[or] identifier[token] . identifier[startswith] ( literal[string] ):
identifier[pseudo] . identifier[append] ( identifier[token] )
keyword[elif] identifier[token] [ literal[int] ] keyword[in] identifier[BODY_TOKEN_SIGILS] :
identifier[middle] . identifier[append] ( identifier[token] )
keyword[else] :
identifier[element] . identifier[append] ( identifier[token] )
identifier[new_tokens] = identifier[element] + identifier[middle] + identifier[pseudo]
keyword[if] identifier[self] . identifier[combinator] == literal[string] keyword[or] identifier[self] . identifier[combinator] == identifier[other] . identifier[combinator] :
identifier[combinator] = identifier[other] . identifier[combinator]
keyword[elif] identifier[other] . identifier[combinator] == literal[string] :
identifier[combinator] = identifier[self] . identifier[combinator]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
. identifier[format] ( identifier[self] , identifier[other] ))
keyword[return] identifier[type] ( identifier[self] )( identifier[combinator] , identifier[new_tokens] )
|
def merge_into(self, other):
"""Merge two simple selectors together. This is expected to be the
selector being injected into `other` -- that is, `other` is the
selector for a block using ``@extend``, and `self` is a selector being
extended.
Element tokens must come first, and pseudo-element tokens must come
last, and there can only be one of each. The final selector thus looks
something like::
[element] [misc self tokens] [misc other tokens] [pseudo-element]
This method does not check for duplicate tokens; those are assumed to
have been removed earlier, during the search for a hinge.
"""
# TODO it shouldn't be possible to merge two elements or two pseudo
# elements, /but/ it shouldn't just be a fatal error here -- it
# shouldn't even be considered a candidate for extending!
# TODO this is slightly inconsistent with ruby, which treats a trailing
# set of self tokens like ':before.foo' as a single unit to be stuck at
# the end. but that's completely bogus anyway.
element = []
middle = []
pseudo = []
for token in self.tokens + other.tokens:
if token in CSS2_PSEUDO_ELEMENTS or token.startswith('::'):
pseudo.append(token) # depends on [control=['if'], data=[]]
elif token[0] in BODY_TOKEN_SIGILS:
middle.append(token) # depends on [control=['if'], data=[]]
else:
element.append(token) # depends on [control=['for'], data=['token']]
new_tokens = element + middle + pseudo
if self.combinator == ' ' or self.combinator == other.combinator:
combinator = other.combinator # depends on [control=['if'], data=[]]
elif other.combinator == ' ':
combinator = self.combinator # depends on [control=['if'], data=[]]
else:
raise ValueError("Don't know how to merge conflicting combinators: {0!r} and {1!r}".format(self, other))
return type(self)(combinator, new_tokens)
|
def command_drop_tables(self, meta_name=None):
'''
Drops all tables without dropping a database::
./manage.py sqla:drop_tables [meta_name]
'''
answer = six.moves.input(u'All data will lost. Are you sure? [y/N] ')
if answer.strip().lower()!='y':
sys.exit('Interrupted')
def _drop_metadata_tables(metadata):
table = next(six.itervalues(metadata.tables), None)
if table is None:
print('Failed to find engine')
else:
engine = self.session.get_bind(clause=table)
drop_everything(engine)
print('Done')
if isinstance(self.metadata, MetaData):
print('Droping tables... ', end='')
_drop_metadata_tables(self.metadata)
else:
for current_meta_name, metadata in self.metadata.items():
if meta_name not in (current_meta_name, None):
continue
print('Droping tables for {}... '.format(current_meta_name),
end='')
_drop_metadata_tables(metadata)
|
def function[command_drop_tables, parameter[self, meta_name]]:
constant[
Drops all tables without dropping a database::
./manage.py sqla:drop_tables [meta_name]
]
variable[answer] assign[=] call[name[six].moves.input, parameter[constant[All data will lost. Are you sure? [y/N] ]]]
if compare[call[call[name[answer].strip, parameter[]].lower, parameter[]] not_equal[!=] constant[y]] begin[:]
call[name[sys].exit, parameter[constant[Interrupted]]]
def function[_drop_metadata_tables, parameter[metadata]]:
variable[table] assign[=] call[name[next], parameter[call[name[six].itervalues, parameter[name[metadata].tables]], constant[None]]]
if compare[name[table] is constant[None]] begin[:]
call[name[print], parameter[constant[Failed to find engine]]]
if call[name[isinstance], parameter[name[self].metadata, name[MetaData]]] begin[:]
call[name[print], parameter[constant[Droping tables... ]]]
call[name[_drop_metadata_tables], parameter[name[self].metadata]]
|
keyword[def] identifier[command_drop_tables] ( identifier[self] , identifier[meta_name] = keyword[None] ):
literal[string]
identifier[answer] = identifier[six] . identifier[moves] . identifier[input] ( literal[string] )
keyword[if] identifier[answer] . identifier[strip] (). identifier[lower] ()!= literal[string] :
identifier[sys] . identifier[exit] ( literal[string] )
keyword[def] identifier[_drop_metadata_tables] ( identifier[metadata] ):
identifier[table] = identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[metadata] . identifier[tables] ), keyword[None] )
keyword[if] identifier[table] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[engine] = identifier[self] . identifier[session] . identifier[get_bind] ( identifier[clause] = identifier[table] )
identifier[drop_everything] ( identifier[engine] )
identifier[print] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[self] . identifier[metadata] , identifier[MetaData] ):
identifier[print] ( literal[string] , identifier[end] = literal[string] )
identifier[_drop_metadata_tables] ( identifier[self] . identifier[metadata] )
keyword[else] :
keyword[for] identifier[current_meta_name] , identifier[metadata] keyword[in] identifier[self] . identifier[metadata] . identifier[items] ():
keyword[if] identifier[meta_name] keyword[not] keyword[in] ( identifier[current_meta_name] , keyword[None] ):
keyword[continue]
identifier[print] ( literal[string] . identifier[format] ( identifier[current_meta_name] ),
identifier[end] = literal[string] )
identifier[_drop_metadata_tables] ( identifier[metadata] )
|
def command_drop_tables(self, meta_name=None):
"""
Drops all tables without dropping a database::
./manage.py sqla:drop_tables [meta_name]
"""
answer = six.moves.input(u'All data will lost. Are you sure? [y/N] ')
if answer.strip().lower() != 'y':
sys.exit('Interrupted') # depends on [control=['if'], data=[]]
def _drop_metadata_tables(metadata):
table = next(six.itervalues(metadata.tables), None)
if table is None:
print('Failed to find engine') # depends on [control=['if'], data=[]]
else:
engine = self.session.get_bind(clause=table)
drop_everything(engine)
print('Done')
if isinstance(self.metadata, MetaData):
print('Droping tables... ', end='')
_drop_metadata_tables(self.metadata) # depends on [control=['if'], data=[]]
else:
for (current_meta_name, metadata) in self.metadata.items():
if meta_name not in (current_meta_name, None):
continue # depends on [control=['if'], data=[]]
print('Droping tables for {}... '.format(current_meta_name), end='')
_drop_metadata_tables(metadata) # depends on [control=['for'], data=[]]
|
def reverse_delete_ipv6(self, subid, ipaddr, params=None):
''' /v1/server/reverse_delete_ipv6
POST - account
Remove a reverse DNS entry for an IPv6 address of a virtual machine.
Upon success, DNS changes may take 6-12 hours to become active.
Link: https://www.vultr.com/api/#server_reverse_delete_ipv6
'''
params = update_params(params, {
'SUBID': subid,
'ip': ipaddr
})
return self.request('/v1/server/reverse_delete_ipv6', params, 'POST')
|
def function[reverse_delete_ipv6, parameter[self, subid, ipaddr, params]]:
constant[ /v1/server/reverse_delete_ipv6
POST - account
Remove a reverse DNS entry for an IPv6 address of a virtual machine.
Upon success, DNS changes may take 6-12 hours to become active.
Link: https://www.vultr.com/api/#server_reverse_delete_ipv6
]
variable[params] assign[=] call[name[update_params], parameter[name[params], dictionary[[<ast.Constant object at 0x7da1b13935b0>, <ast.Constant object at 0x7da1b1390610>], [<ast.Name object at 0x7da1b13919c0>, <ast.Name object at 0x7da1b1391e10>]]]]
return[call[name[self].request, parameter[constant[/v1/server/reverse_delete_ipv6], name[params], constant[POST]]]]
|
keyword[def] identifier[reverse_delete_ipv6] ( identifier[self] , identifier[subid] , identifier[ipaddr] , identifier[params] = keyword[None] ):
literal[string]
identifier[params] = identifier[update_params] ( identifier[params] ,{
literal[string] : identifier[subid] ,
literal[string] : identifier[ipaddr]
})
keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[params] , literal[string] )
|
def reverse_delete_ipv6(self, subid, ipaddr, params=None):
""" /v1/server/reverse_delete_ipv6
POST - account
Remove a reverse DNS entry for an IPv6 address of a virtual machine.
Upon success, DNS changes may take 6-12 hours to become active.
Link: https://www.vultr.com/api/#server_reverse_delete_ipv6
"""
params = update_params(params, {'SUBID': subid, 'ip': ipaddr})
return self.request('/v1/server/reverse_delete_ipv6', params, 'POST')
|
def print_user_sets(wordsets, print_terms):
"""Print all user sets by title. If 'print_terms', also prints all terms of all user sets.
:param wordsets: List of WordSet.
:param print_terms: If True, also prints all terms of all user sets.
"""
if not wordsets:
print('No sets found')
else:
print('Found sets: {}'.format(len(wordsets)))
for wordset in wordsets:
print(' {}'.format(wordset))
if print_terms:
for term in wordset.terms:
print(' {}'.format(term))
|
def function[print_user_sets, parameter[wordsets, print_terms]]:
constant[Print all user sets by title. If 'print_terms', also prints all terms of all user sets.
:param wordsets: List of WordSet.
:param print_terms: If True, also prints all terms of all user sets.
]
if <ast.UnaryOp object at 0x7da18f09f160> begin[:]
call[name[print], parameter[constant[No sets found]]]
|
keyword[def] identifier[print_user_sets] ( identifier[wordsets] , identifier[print_terms] ):
literal[string]
keyword[if] keyword[not] identifier[wordsets] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[wordsets] )))
keyword[for] identifier[wordset] keyword[in] identifier[wordsets] :
identifier[print] ( literal[string] . identifier[format] ( identifier[wordset] ))
keyword[if] identifier[print_terms] :
keyword[for] identifier[term] keyword[in] identifier[wordset] . identifier[terms] :
identifier[print] ( literal[string] . identifier[format] ( identifier[term] ))
|
def print_user_sets(wordsets, print_terms):
"""Print all user sets by title. If 'print_terms', also prints all terms of all user sets.
:param wordsets: List of WordSet.
:param print_terms: If True, also prints all terms of all user sets.
"""
if not wordsets:
print('No sets found') # depends on [control=['if'], data=[]]
else:
print('Found sets: {}'.format(len(wordsets)))
for wordset in wordsets:
print(' {}'.format(wordset))
if print_terms:
for term in wordset.terms:
print(' {}'.format(term)) # depends on [control=['for'], data=['term']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['wordset']]
|
def is_all_field_none(self):
"""
:rtype: bool
"""
if self._uuid is not None:
return False
if self._created is not None:
return False
if self._updated is not None:
return False
if self._attachment is not None:
return False
return True
|
def function[is_all_field_none, parameter[self]]:
constant[
:rtype: bool
]
if compare[name[self]._uuid is_not constant[None]] begin[:]
return[constant[False]]
if compare[name[self]._created is_not constant[None]] begin[:]
return[constant[False]]
if compare[name[self]._updated is_not constant[None]] begin[:]
return[constant[False]]
if compare[name[self]._attachment is_not constant[None]] begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[is_all_field_none] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_uuid] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_created] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_updated] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_attachment] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def is_all_field_none(self):
"""
:rtype: bool
"""
if self._uuid is not None:
return False # depends on [control=['if'], data=[]]
if self._created is not None:
return False # depends on [control=['if'], data=[]]
if self._updated is not None:
return False # depends on [control=['if'], data=[]]
if self._attachment is not None:
return False # depends on [control=['if'], data=[]]
return True
|
def _uncached_match(self, text, pos, cache, error):
"""Return length of match, ``None`` if no match."""
m = self.re.match(text, pos)
if m is not None:
span = m.span()
node = RegexNode(self, text, pos, pos + span[1] - span[0])
node.match = m # TODO: A terrible idea for cache size?
return node
|
def function[_uncached_match, parameter[self, text, pos, cache, error]]:
constant[Return length of match, ``None`` if no match.]
variable[m] assign[=] call[name[self].re.match, parameter[name[text], name[pos]]]
if compare[name[m] is_not constant[None]] begin[:]
variable[span] assign[=] call[name[m].span, parameter[]]
variable[node] assign[=] call[name[RegexNode], parameter[name[self], name[text], name[pos], binary_operation[binary_operation[name[pos] + call[name[span]][constant[1]]] - call[name[span]][constant[0]]]]]
name[node].match assign[=] name[m]
return[name[node]]
|
keyword[def] identifier[_uncached_match] ( identifier[self] , identifier[text] , identifier[pos] , identifier[cache] , identifier[error] ):
literal[string]
identifier[m] = identifier[self] . identifier[re] . identifier[match] ( identifier[text] , identifier[pos] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[span] = identifier[m] . identifier[span] ()
identifier[node] = identifier[RegexNode] ( identifier[self] , identifier[text] , identifier[pos] , identifier[pos] + identifier[span] [ literal[int] ]- identifier[span] [ literal[int] ])
identifier[node] . identifier[match] = identifier[m]
keyword[return] identifier[node]
|
def _uncached_match(self, text, pos, cache, error):
"""Return length of match, ``None`` if no match."""
m = self.re.match(text, pos)
if m is not None:
span = m.span()
node = RegexNode(self, text, pos, pos + span[1] - span[0])
node.match = m # TODO: A terrible idea for cache size?
return node # depends on [control=['if'], data=['m']]
|
def info(self, **kwargs):
"""
Get the detailed information about a particular credit record. This is
currently only supported with the new credit model found in TV. These
ids can be found from any TV credit response as well as the tv_credits
and combined_credits methods for people.
The episodes object returns a list of episodes and are generally going
to be guest stars. The season array will return a list of season
numbers. Season credits are credits that were marked with the
"add to every season" option in the editing interface and are
assumed to be "season regulars".
Args:
language: (optional) ISO 639-1 code.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_credit_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
|
def function[info, parameter[self]]:
constant[
Get the detailed information about a particular credit record. This is
currently only supported with the new credit model found in TV. These
ids can be found from any TV credit response as well as the tv_credits
and combined_credits methods for people.
The episodes object returns a list of episodes and are generally going
to be guest stars. The season array will return a list of season
numbers. Season credits are credits that were marked with the
"add to every season" option in the editing interface and are
assumed to be "season regulars".
Args:
language: (optional) ISO 639-1 code.
Returns:
A dict respresentation of the JSON returned from the API.
]
variable[path] assign[=] call[name[self]._get_credit_id_path, parameter[constant[info]]]
variable[response] assign[=] call[name[self]._GET, parameter[name[path], name[kwargs]]]
call[name[self]._set_attrs_to_values, parameter[name[response]]]
return[name[response]]
|
keyword[def] identifier[info] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = identifier[self] . identifier[_get_credit_id_path] ( literal[string] )
identifier[response] = identifier[self] . identifier[_GET] ( identifier[path] , identifier[kwargs] )
identifier[self] . identifier[_set_attrs_to_values] ( identifier[response] )
keyword[return] identifier[response]
|
def info(self, **kwargs):
"""
Get the detailed information about a particular credit record. This is
currently only supported with the new credit model found in TV. These
ids can be found from any TV credit response as well as the tv_credits
and combined_credits methods for people.
The episodes object returns a list of episodes and are generally going
to be guest stars. The season array will return a list of season
numbers. Season credits are credits that were marked with the
"add to every season" option in the editing interface and are
assumed to be "season regulars".
Args:
language: (optional) ISO 639-1 code.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_credit_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.