code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _on_github_request(self, future, response):
"""Invoked as a response to the GitHub API request. Will decode the
response and set the result for the future to return the callback or
raise an exception
"""
try:
content = escape.json_decode(response.body)
except ValueError as error:
future.set_exception(Exception('Github error: %s' %
response.body))
return
if 'error' in content:
future.set_exception(Exception('Github error: %s' %
str(content['error'])))
return
future.set_result(content) | def function[_on_github_request, parameter[self, future, response]]:
constant[Invoked as a response to the GitHub API request. Will decode the
response and set the result for the future to return the callback or
raise an exception
]
<ast.Try object at 0x7da20e9623e0>
if compare[constant[error] in name[content]] begin[:]
call[name[future].set_exception, parameter[call[name[Exception], parameter[binary_operation[constant[Github error: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[call[name[content]][constant[error]]]]]]]]]
return[None]
call[name[future].set_result, parameter[name[content]]] | keyword[def] identifier[_on_github_request] ( identifier[self] , identifier[future] , identifier[response] ):
literal[string]
keyword[try] :
identifier[content] = identifier[escape] . identifier[json_decode] ( identifier[response] . identifier[body] )
keyword[except] identifier[ValueError] keyword[as] identifier[error] :
identifier[future] . identifier[set_exception] ( identifier[Exception] ( literal[string] %
identifier[response] . identifier[body] ))
keyword[return]
keyword[if] literal[string] keyword[in] identifier[content] :
identifier[future] . identifier[set_exception] ( identifier[Exception] ( literal[string] %
identifier[str] ( identifier[content] [ literal[string] ])))
keyword[return]
identifier[future] . identifier[set_result] ( identifier[content] ) | def _on_github_request(self, future, response):
"""Invoked as a response to the GitHub API request. Will decode the
response and set the result for the future to return the callback or
raise an exception
"""
try:
content = escape.json_decode(response.body) # depends on [control=['try'], data=[]]
except ValueError as error:
future.set_exception(Exception('Github error: %s' % response.body))
return # depends on [control=['except'], data=[]]
if 'error' in content:
future.set_exception(Exception('Github error: %s' % str(content['error'])))
return # depends on [control=['if'], data=['content']]
future.set_result(content) |
def everythingbut(self):
'''
Return a finite state machine which will accept any string NOT
accepted by self, and will not accept any string accepted by self.
This is more complicated if there are missing transitions, because the
missing "dead" state must now be reified.
'''
alphabet = self.alphabet
initial = {0 : self.initial}
def follow(current, symbol):
next = {}
if 0 in current and current[0] in self.map and symbol in self.map[current[0]]:
next[0] = self.map[current[0]][symbol]
return next
# state is final unless the original was
def final(state):
return not (0 in state and state[0] in self.finals)
return crawl(alphabet, initial, final, follow).reduce() | def function[everythingbut, parameter[self]]:
constant[
Return a finite state machine which will accept any string NOT
accepted by self, and will not accept any string accepted by self.
This is more complicated if there are missing transitions, because the
missing "dead" state must now be reified.
]
variable[alphabet] assign[=] name[self].alphabet
variable[initial] assign[=] dictionary[[<ast.Constant object at 0x7da1b04f5ff0>], [<ast.Attribute object at 0x7da1b04f5ba0>]]
def function[follow, parameter[current, symbol]]:
variable[next] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b04f7610> begin[:]
call[name[next]][constant[0]] assign[=] call[call[name[self].map][call[name[current]][constant[0]]]][name[symbol]]
return[name[next]]
def function[final, parameter[state]]:
return[<ast.UnaryOp object at 0x7da1b047acb0>]
return[call[call[name[crawl], parameter[name[alphabet], name[initial], name[final], name[follow]]].reduce, parameter[]]] | keyword[def] identifier[everythingbut] ( identifier[self] ):
literal[string]
identifier[alphabet] = identifier[self] . identifier[alphabet]
identifier[initial] ={ literal[int] : identifier[self] . identifier[initial] }
keyword[def] identifier[follow] ( identifier[current] , identifier[symbol] ):
identifier[next] ={}
keyword[if] literal[int] keyword[in] identifier[current] keyword[and] identifier[current] [ literal[int] ] keyword[in] identifier[self] . identifier[map] keyword[and] identifier[symbol] keyword[in] identifier[self] . identifier[map] [ identifier[current] [ literal[int] ]]:
identifier[next] [ literal[int] ]= identifier[self] . identifier[map] [ identifier[current] [ literal[int] ]][ identifier[symbol] ]
keyword[return] identifier[next]
keyword[def] identifier[final] ( identifier[state] ):
keyword[return] keyword[not] ( literal[int] keyword[in] identifier[state] keyword[and] identifier[state] [ literal[int] ] keyword[in] identifier[self] . identifier[finals] )
keyword[return] identifier[crawl] ( identifier[alphabet] , identifier[initial] , identifier[final] , identifier[follow] ). identifier[reduce] () | def everythingbut(self):
"""
Return a finite state machine which will accept any string NOT
accepted by self, and will not accept any string accepted by self.
This is more complicated if there are missing transitions, because the
missing "dead" state must now be reified.
"""
alphabet = self.alphabet
initial = {0: self.initial}
def follow(current, symbol):
next = {}
if 0 in current and current[0] in self.map and (symbol in self.map[current[0]]):
next[0] = self.map[current[0]][symbol] # depends on [control=['if'], data=[]]
return next # state is final unless the original was
def final(state):
return not (0 in state and state[0] in self.finals)
return crawl(alphabet, initial, final, follow).reduce() |
def install(self, connection, partition, table_name=None, columns=None, materialize=False,
logger=None):
""" Creates FDW or materialize view for given partition.
Args:
connection: connection to postgresql
partition (orm.Partition):
materialize (boolean): if True, create read-only table. If False create virtual table.
Returns:
str: name of the created table.
"""
partition.localize()
self._add_partition(connection, partition)
fdw_table = partition.vid
view_table = '{}_v'.format(fdw_table)
if materialize:
with connection.cursor() as cursor:
view_exists = self._relation_exists(connection, view_table)
if view_exists:
logger.debug(
'Materialized view of the partition already exists.\n partition: {}, view: {}'
.format(partition.name, view_table))
else:
query = 'CREATE MATERIALIZED VIEW {} AS SELECT * FROM {};'\
.format(view_table, fdw_table)
logger.debug(
'Creating new materialized view of the partition.'
'\n partition: {}, view: {}, query: {}'
.format(partition.name, view_table, query))
cursor.execute(query)
cursor.execute('COMMIT;')
final_table = view_table if materialize else fdw_table
with connection.cursor() as cursor:
view_q = "CREATE VIEW IF NOT EXISTS {} AS SELECT * FROM {} ".format(partition.vid, final_table)
cursor.execute(view_q)
cursor.execute('COMMIT;')
return partition.vid | def function[install, parameter[self, connection, partition, table_name, columns, materialize, logger]]:
constant[ Creates FDW or materialize view for given partition.
Args:
connection: connection to postgresql
partition (orm.Partition):
materialize (boolean): if True, create read-only table. If False create virtual table.
Returns:
str: name of the created table.
]
call[name[partition].localize, parameter[]]
call[name[self]._add_partition, parameter[name[connection], name[partition]]]
variable[fdw_table] assign[=] name[partition].vid
variable[view_table] assign[=] call[constant[{}_v].format, parameter[name[fdw_table]]]
if name[materialize] begin[:]
with call[name[connection].cursor, parameter[]] begin[:]
variable[view_exists] assign[=] call[name[self]._relation_exists, parameter[name[connection], name[view_table]]]
if name[view_exists] begin[:]
call[name[logger].debug, parameter[call[constant[Materialized view of the partition already exists.
partition: {}, view: {}].format, parameter[name[partition].name, name[view_table]]]]]
variable[final_table] assign[=] <ast.IfExp object at 0x7da1b23456f0>
with call[name[connection].cursor, parameter[]] begin[:]
variable[view_q] assign[=] call[constant[CREATE VIEW IF NOT EXISTS {} AS SELECT * FROM {} ].format, parameter[name[partition].vid, name[final_table]]]
call[name[cursor].execute, parameter[name[view_q]]]
call[name[cursor].execute, parameter[constant[COMMIT;]]]
return[name[partition].vid] | keyword[def] identifier[install] ( identifier[self] , identifier[connection] , identifier[partition] , identifier[table_name] = keyword[None] , identifier[columns] = keyword[None] , identifier[materialize] = keyword[False] ,
identifier[logger] = keyword[None] ):
literal[string]
identifier[partition] . identifier[localize] ()
identifier[self] . identifier[_add_partition] ( identifier[connection] , identifier[partition] )
identifier[fdw_table] = identifier[partition] . identifier[vid]
identifier[view_table] = literal[string] . identifier[format] ( identifier[fdw_table] )
keyword[if] identifier[materialize] :
keyword[with] identifier[connection] . identifier[cursor] () keyword[as] identifier[cursor] :
identifier[view_exists] = identifier[self] . identifier[_relation_exists] ( identifier[connection] , identifier[view_table] )
keyword[if] identifier[view_exists] :
identifier[logger] . identifier[debug] (
literal[string]
. identifier[format] ( identifier[partition] . identifier[name] , identifier[view_table] ))
keyword[else] :
identifier[query] = literal[string] . identifier[format] ( identifier[view_table] , identifier[fdw_table] )
identifier[logger] . identifier[debug] (
literal[string]
literal[string]
. identifier[format] ( identifier[partition] . identifier[name] , identifier[view_table] , identifier[query] ))
identifier[cursor] . identifier[execute] ( identifier[query] )
identifier[cursor] . identifier[execute] ( literal[string] )
identifier[final_table] = identifier[view_table] keyword[if] identifier[materialize] keyword[else] identifier[fdw_table]
keyword[with] identifier[connection] . identifier[cursor] () keyword[as] identifier[cursor] :
identifier[view_q] = literal[string] . identifier[format] ( identifier[partition] . identifier[vid] , identifier[final_table] )
identifier[cursor] . identifier[execute] ( identifier[view_q] )
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[return] identifier[partition] . identifier[vid] | def install(self, connection, partition, table_name=None, columns=None, materialize=False, logger=None):
""" Creates FDW or materialize view for given partition.
Args:
connection: connection to postgresql
partition (orm.Partition):
materialize (boolean): if True, create read-only table. If False create virtual table.
Returns:
str: name of the created table.
"""
partition.localize()
self._add_partition(connection, partition)
fdw_table = partition.vid
view_table = '{}_v'.format(fdw_table)
if materialize:
with connection.cursor() as cursor:
view_exists = self._relation_exists(connection, view_table)
if view_exists:
logger.debug('Materialized view of the partition already exists.\n partition: {}, view: {}'.format(partition.name, view_table)) # depends on [control=['if'], data=[]]
else:
query = 'CREATE MATERIALIZED VIEW {} AS SELECT * FROM {};'.format(view_table, fdw_table)
logger.debug('Creating new materialized view of the partition.\n partition: {}, view: {}, query: {}'.format(partition.name, view_table, query))
cursor.execute(query)
cursor.execute('COMMIT;') # depends on [control=['with'], data=['cursor']] # depends on [control=['if'], data=[]]
final_table = view_table if materialize else fdw_table
with connection.cursor() as cursor:
view_q = 'CREATE VIEW IF NOT EXISTS {} AS SELECT * FROM {} '.format(partition.vid, final_table)
cursor.execute(view_q)
cursor.execute('COMMIT;') # depends on [control=['with'], data=['cursor']]
return partition.vid |
def _update_or_create_user(self, data, credentials=None, content=None):
"""
Updates or creates :attr:`.user`.
:returns:
:class:`.User`
"""
if not self.user:
self.user = authomatic.core.User(self, credentials=credentials)
self.user.content = content
self.user.data = data
# Update.
for key in self.user.__dict__:
# Exclude data.
if key not in ('data', 'content'):
# Extract every data item whose key matches the user
# property name, but only if it has a value.
value = data.get(key)
if value:
setattr(self.user, key, value)
# Handle different structure of data by different providers.
self.user = self._x_user_parser(self.user, data)
if self.user.id:
self.user.id = str(self.user.id)
# TODO: Move to User
# If there is no user.name,
if not self.user.name:
if self.user.first_name and self.user.last_name:
# Create it from first name and last name if available.
self.user.name = ' '.join((self.user.first_name,
self.user.last_name))
else:
# Or use one of these.
self.user.name = (self.user.username or self.user.nickname or
self.user.first_name or self.user.last_name)
if not self.user.location:
if self.user.city and self.user.country:
self.user.location = '{0}, {1}'.format(self.user.city,
self.user.country)
else:
self.user.location = self.user.city or self.user.country
return self.user | def function[_update_or_create_user, parameter[self, data, credentials, content]]:
constant[
Updates or creates :attr:`.user`.
:returns:
:class:`.User`
]
if <ast.UnaryOp object at 0x7da1b0399060> begin[:]
name[self].user assign[=] call[name[authomatic].core.User, parameter[name[self]]]
name[self].user.content assign[=] name[content]
name[self].user.data assign[=] name[data]
for taget[name[key]] in starred[name[self].user.__dict__] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b039b6a0>, <ast.Constant object at 0x7da1b039ac20>]]] begin[:]
variable[value] assign[=] call[name[data].get, parameter[name[key]]]
if name[value] begin[:]
call[name[setattr], parameter[name[self].user, name[key], name[value]]]
name[self].user assign[=] call[name[self]._x_user_parser, parameter[name[self].user, name[data]]]
if name[self].user.id begin[:]
name[self].user.id assign[=] call[name[str], parameter[name[self].user.id]]
if <ast.UnaryOp object at 0x7da1b039b490> begin[:]
if <ast.BoolOp object at 0x7da1b039b1c0> begin[:]
name[self].user.name assign[=] call[constant[ ].join, parameter[tuple[[<ast.Attribute object at 0x7da1b0398430>, <ast.Attribute object at 0x7da1b03994b0>]]]]
if <ast.UnaryOp object at 0x7da1b039a200> begin[:]
if <ast.BoolOp object at 0x7da1b039a530> begin[:]
name[self].user.location assign[=] call[constant[{0}, {1}].format, parameter[name[self].user.city, name[self].user.country]]
return[name[self].user] | keyword[def] identifier[_update_or_create_user] ( identifier[self] , identifier[data] , identifier[credentials] = keyword[None] , identifier[content] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[user] :
identifier[self] . identifier[user] = identifier[authomatic] . identifier[core] . identifier[User] ( identifier[self] , identifier[credentials] = identifier[credentials] )
identifier[self] . identifier[user] . identifier[content] = identifier[content]
identifier[self] . identifier[user] . identifier[data] = identifier[data]
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[user] . identifier[__dict__] :
keyword[if] identifier[key] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[value] = identifier[data] . identifier[get] ( identifier[key] )
keyword[if] identifier[value] :
identifier[setattr] ( identifier[self] . identifier[user] , identifier[key] , identifier[value] )
identifier[self] . identifier[user] = identifier[self] . identifier[_x_user_parser] ( identifier[self] . identifier[user] , identifier[data] )
keyword[if] identifier[self] . identifier[user] . identifier[id] :
identifier[self] . identifier[user] . identifier[id] = identifier[str] ( identifier[self] . identifier[user] . identifier[id] )
keyword[if] keyword[not] identifier[self] . identifier[user] . identifier[name] :
keyword[if] identifier[self] . identifier[user] . identifier[first_name] keyword[and] identifier[self] . identifier[user] . identifier[last_name] :
identifier[self] . identifier[user] . identifier[name] = literal[string] . identifier[join] (( identifier[self] . identifier[user] . identifier[first_name] ,
identifier[self] . identifier[user] . identifier[last_name] ))
keyword[else] :
identifier[self] . identifier[user] . identifier[name] =( identifier[self] . identifier[user] . identifier[username] keyword[or] identifier[self] . identifier[user] . identifier[nickname] keyword[or]
identifier[self] . identifier[user] . identifier[first_name] keyword[or] identifier[self] . identifier[user] . identifier[last_name] )
keyword[if] keyword[not] identifier[self] . identifier[user] . identifier[location] :
keyword[if] identifier[self] . identifier[user] . identifier[city] keyword[and] identifier[self] . identifier[user] . identifier[country] :
identifier[self] . identifier[user] . identifier[location] = literal[string] . identifier[format] ( identifier[self] . identifier[user] . identifier[city] ,
identifier[self] . identifier[user] . identifier[country] )
keyword[else] :
identifier[self] . identifier[user] . identifier[location] = identifier[self] . identifier[user] . identifier[city] keyword[or] identifier[self] . identifier[user] . identifier[country]
keyword[return] identifier[self] . identifier[user] | def _update_or_create_user(self, data, credentials=None, content=None):
"""
Updates or creates :attr:`.user`.
:returns:
:class:`.User`
"""
if not self.user:
self.user = authomatic.core.User(self, credentials=credentials) # depends on [control=['if'], data=[]]
self.user.content = content
self.user.data = data
# Update.
for key in self.user.__dict__:
# Exclude data.
if key not in ('data', 'content'):
# Extract every data item whose key matches the user
# property name, but only if it has a value.
value = data.get(key)
if value:
setattr(self.user, key, value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
# Handle different structure of data by different providers.
self.user = self._x_user_parser(self.user, data)
if self.user.id:
self.user.id = str(self.user.id) # depends on [control=['if'], data=[]]
# TODO: Move to User
# If there is no user.name,
if not self.user.name:
if self.user.first_name and self.user.last_name:
# Create it from first name and last name if available.
self.user.name = ' '.join((self.user.first_name, self.user.last_name)) # depends on [control=['if'], data=[]]
else:
# Or use one of these.
self.user.name = self.user.username or self.user.nickname or self.user.first_name or self.user.last_name # depends on [control=['if'], data=[]]
if not self.user.location:
if self.user.city and self.user.country:
self.user.location = '{0}, {1}'.format(self.user.city, self.user.country) # depends on [control=['if'], data=[]]
else:
self.user.location = self.user.city or self.user.country # depends on [control=['if'], data=[]]
return self.user |
def get_scheduling_block(sub_array_id, block_id):
"""Return the list of scheduling blocks instances associated with the sub
array"""
block_ids = DB.get_sub_array_sbi_ids(sub_array_id)
if block_id in block_ids:
block = DB.get_block_details([block_id]).__next__()
return block, HTTPStatus.OK
return dict(error="unknown id"), HTTPStatus.NOT_FOUND | def function[get_scheduling_block, parameter[sub_array_id, block_id]]:
constant[Return the list of scheduling blocks instances associated with the sub
array]
variable[block_ids] assign[=] call[name[DB].get_sub_array_sbi_ids, parameter[name[sub_array_id]]]
if compare[name[block_id] in name[block_ids]] begin[:]
variable[block] assign[=] call[call[name[DB].get_block_details, parameter[list[[<ast.Name object at 0x7da1b03a4be0>]]]].__next__, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b03a4d60>, <ast.Attribute object at 0x7da1b03a46d0>]]]
return[tuple[[<ast.Call object at 0x7da1b03a5330>, <ast.Attribute object at 0x7da1b03a4b80>]]] | keyword[def] identifier[get_scheduling_block] ( identifier[sub_array_id] , identifier[block_id] ):
literal[string]
identifier[block_ids] = identifier[DB] . identifier[get_sub_array_sbi_ids] ( identifier[sub_array_id] )
keyword[if] identifier[block_id] keyword[in] identifier[block_ids] :
identifier[block] = identifier[DB] . identifier[get_block_details] ([ identifier[block_id] ]). identifier[__next__] ()
keyword[return] identifier[block] , identifier[HTTPStatus] . identifier[OK]
keyword[return] identifier[dict] ( identifier[error] = literal[string] ), identifier[HTTPStatus] . identifier[NOT_FOUND] | def get_scheduling_block(sub_array_id, block_id):
"""Return the list of scheduling blocks instances associated with the sub
array"""
block_ids = DB.get_sub_array_sbi_ids(sub_array_id)
if block_id in block_ids:
block = DB.get_block_details([block_id]).__next__()
return (block, HTTPStatus.OK) # depends on [control=['if'], data=['block_id']]
return (dict(error='unknown id'), HTTPStatus.NOT_FOUND) |
def _error_serializer(req, exc): # pylint: disable=unused-argument
""" Serializer for native falcon HTTPError exceptions.
We override the default serializer with our own so we
can ensure the errors are serialized in a JSON API
compliant format.
Surprisingly, most falcon error attributes map directly
to the JSON API spec. The few that don't can be mapped
accordingly:
HTTPError JSON API
~~~~~~~~~ ~~~~~~~~
exc.description -> error['detail']
exc.link['href'] -> error['links']['about']
Per the falcon docs this function should return a tuple
of (MIMETYPE, BODY PAYLOAD)
"""
error = {
'detail': exc.description,
'title': exc.title,
'status': exc.status,
}
try:
error['links'] = {'about': exc.link['href']}
except (TypeError, KeyError):
error['links'] = {'about': ''}
return (
goldman.config.JSONAPI_MIMETYPE,
json.dumps({'errors': [error]}),
) | def function[_error_serializer, parameter[req, exc]]:
constant[ Serializer for native falcon HTTPError exceptions.
We override the default serializer with our own so we
can ensure the errors are serialized in a JSON API
compliant format.
Surprisingly, most falcon error attributes map directly
to the JSON API spec. The few that don't can be mapped
accordingly:
HTTPError JSON API
~~~~~~~~~ ~~~~~~~~
exc.description -> error['detail']
exc.link['href'] -> error['links']['about']
Per the falcon docs this function should return a tuple
of (MIMETYPE, BODY PAYLOAD)
]
variable[error] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c4160>, <ast.Constant object at 0x7da20c6c59f0>, <ast.Constant object at 0x7da20c6c4700>], [<ast.Attribute object at 0x7da20c6c4880>, <ast.Attribute object at 0x7da20c6c6f80>, <ast.Attribute object at 0x7da20c6c45e0>]]
<ast.Try object at 0x7da20c6c5360>
return[tuple[[<ast.Attribute object at 0x7da20c6c5180>, <ast.Call object at 0x7da20c6c6ef0>]]] | keyword[def] identifier[_error_serializer] ( identifier[req] , identifier[exc] ):
literal[string]
identifier[error] ={
literal[string] : identifier[exc] . identifier[description] ,
literal[string] : identifier[exc] . identifier[title] ,
literal[string] : identifier[exc] . identifier[status] ,
}
keyword[try] :
identifier[error] [ literal[string] ]={ literal[string] : identifier[exc] . identifier[link] [ literal[string] ]}
keyword[except] ( identifier[TypeError] , identifier[KeyError] ):
identifier[error] [ literal[string] ]={ literal[string] : literal[string] }
keyword[return] (
identifier[goldman] . identifier[config] . identifier[JSONAPI_MIMETYPE] ,
identifier[json] . identifier[dumps] ({ literal[string] :[ identifier[error] ]}),
) | def _error_serializer(req, exc): # pylint: disable=unused-argument
" Serializer for native falcon HTTPError exceptions.\n\n We override the default serializer with our own so we\n can ensure the errors are serialized in a JSON API\n compliant format.\n\n Surprisingly, most falcon error attributes map directly\n to the JSON API spec. The few that don't can be mapped\n accordingly:\n\n\n HTTPError JSON API\n ~~~~~~~~~ ~~~~~~~~\n\n exc.description -> error['detail']\n exc.link['href'] -> error['links']['about']\n\n\n Per the falcon docs this function should return a tuple\n of (MIMETYPE, BODY PAYLOAD)\n "
error = {'detail': exc.description, 'title': exc.title, 'status': exc.status}
try:
error['links'] = {'about': exc.link['href']} # depends on [control=['try'], data=[]]
except (TypeError, KeyError):
error['links'] = {'about': ''} # depends on [control=['except'], data=[]]
return (goldman.config.JSONAPI_MIMETYPE, json.dumps({'errors': [error]})) |
def rewrite_references_json(json_content, rewrite_json):
""" general purpose references json rewriting by matching the id value """
for ref in json_content:
if ref.get("id") and ref.get("id") in rewrite_json:
for key, value in iteritems(rewrite_json.get(ref.get("id"))):
ref[key] = value
return json_content | def function[rewrite_references_json, parameter[json_content, rewrite_json]]:
constant[ general purpose references json rewriting by matching the id value ]
for taget[name[ref]] in starred[name[json_content]] begin[:]
if <ast.BoolOp object at 0x7da1b1080a90> begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b10816f0>, <ast.Name object at 0x7da1b1081000>]]] in starred[call[name[iteritems], parameter[call[name[rewrite_json].get, parameter[call[name[ref].get, parameter[constant[id]]]]]]]] begin[:]
call[name[ref]][name[key]] assign[=] name[value]
return[name[json_content]] | keyword[def] identifier[rewrite_references_json] ( identifier[json_content] , identifier[rewrite_json] ):
literal[string]
keyword[for] identifier[ref] keyword[in] identifier[json_content] :
keyword[if] identifier[ref] . identifier[get] ( literal[string] ) keyword[and] identifier[ref] . identifier[get] ( literal[string] ) keyword[in] identifier[rewrite_json] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[rewrite_json] . identifier[get] ( identifier[ref] . identifier[get] ( literal[string] ))):
identifier[ref] [ identifier[key] ]= identifier[value]
keyword[return] identifier[json_content] | def rewrite_references_json(json_content, rewrite_json):
""" general purpose references json rewriting by matching the id value """
for ref in json_content:
if ref.get('id') and ref.get('id') in rewrite_json:
for (key, value) in iteritems(rewrite_json.get(ref.get('id'))):
ref[key] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ref']]
return json_content |
def deregister(self, key):
""" Deregisters an existing key.
`key`
String key to deregister.
Returns boolean.
"""
res = super(ExtRegistry, self).deregister(key)
if key in self._type_info:
del self._type_info[key]
return res | def function[deregister, parameter[self, key]]:
constant[ Deregisters an existing key.
`key`
String key to deregister.
Returns boolean.
]
variable[res] assign[=] call[call[name[super], parameter[name[ExtRegistry], name[self]]].deregister, parameter[name[key]]]
if compare[name[key] in name[self]._type_info] begin[:]
<ast.Delete object at 0x7da1b15f4a30>
return[name[res]] | keyword[def] identifier[deregister] ( identifier[self] , identifier[key] ):
literal[string]
identifier[res] = identifier[super] ( identifier[ExtRegistry] , identifier[self] ). identifier[deregister] ( identifier[key] )
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[_type_info] :
keyword[del] identifier[self] . identifier[_type_info] [ identifier[key] ]
keyword[return] identifier[res] | def deregister(self, key):
""" Deregisters an existing key.
`key`
String key to deregister.
Returns boolean.
"""
res = super(ExtRegistry, self).deregister(key)
if key in self._type_info:
del self._type_info[key] # depends on [control=['if'], data=['key']]
return res |
def _argparse_minmax_type(self, string):
"""Custom type for argparse to enforce value limits"""
value = float(string)
if value < 0 or value > 8:
raise argparse.ArgumentTypeError(
'%s must be between 0.0 and 8.0' % string,
)
return value | def function[_argparse_minmax_type, parameter[self, string]]:
constant[Custom type for argparse to enforce value limits]
variable[value] assign[=] call[name[float], parameter[name[string]]]
if <ast.BoolOp object at 0x7da20c6c4610> begin[:]
<ast.Raise object at 0x7da20c6c5480>
return[name[value]] | keyword[def] identifier[_argparse_minmax_type] ( identifier[self] , identifier[string] ):
literal[string]
identifier[value] = identifier[float] ( identifier[string] )
keyword[if] identifier[value] < literal[int] keyword[or] identifier[value] > literal[int] :
keyword[raise] identifier[argparse] . identifier[ArgumentTypeError] (
literal[string] % identifier[string] ,
)
keyword[return] identifier[value] | def _argparse_minmax_type(self, string):
"""Custom type for argparse to enforce value limits"""
value = float(string)
if value < 0 or value > 8:
raise argparse.ArgumentTypeError('%s must be between 0.0 and 8.0' % string) # depends on [control=['if'], data=[]]
return value |
def select_grid_model_ria(lvgd, sector):
"""Select a typified grid for retail/industrial and agricultural
Parameters
----------
lvgd : ding0.core.structure.regions.LVGridDistrictDing0
Low-voltage grid district object
sector : str
Either 'retail/industrial' or 'agricultural'. Depending on choice
different parameters to grid topology apply
Returns
-------
:obj:`dict`
Parameters that describe branch lines of a sector
"""
cable_lf = cfg_ding0.get('assumptions',
'load_factor_lv_cable_lc_normal')
cos_phi_load = cfg_ding0.get('assumptions',
'cos_phi_load')
max_lv_branch_line_load = cfg_ding0.get('assumptions',
'max_lv_branch_line')
# make a distinction between sectors
if sector == 'retail/industrial':
max_branch_length = cfg_ding0.get(
"assumptions",
"branch_line_length_retail_industrial")
peak_load = lvgd.peak_load_retail + \
lvgd.peak_load_industrial
count_sector_areas = lvgd.sector_count_retail + \
lvgd.sector_count_industrial
elif sector == 'agricultural':
max_branch_length = cfg_ding0.get(
"assumptions",
"branch_line_length_agricultural")
peak_load = lvgd.peak_load_agricultural
count_sector_areas = lvgd.sector_count_agricultural
else:
raise ValueError('Sector {} does not exist!'.format(sector))
# determine size of a single load
single_peak_load = peak_load / count_sector_areas
# if this single load exceeds threshold of 300 kVA it is splitted
while single_peak_load > (max_lv_branch_line_load * (cable_lf * cos_phi_load)):
single_peak_load = single_peak_load / 2
count_sector_areas = count_sector_areas * 2
grid_model = {}
# determine parameters of branches and loads connected to the branch
# line
if 0 < single_peak_load:
grid_model['max_loads_per_branch'] = math.floor(
(max_lv_branch_line_load * (cable_lf * cos_phi_load)) / single_peak_load)
grid_model['single_peak_load'] = single_peak_load
grid_model['full_branches'] = math.floor(
count_sector_areas / grid_model['max_loads_per_branch'])
grid_model['remaining_loads'] = count_sector_areas - (
grid_model['full_branches'] * grid_model['max_loads_per_branch']
)
grid_model['load_distance'] = max_branch_length / (
grid_model['max_loads_per_branch'] + 1)
grid_model['load_distance_remaining'] = max_branch_length / (
grid_model['remaining_loads'] + 1)
else:
if count_sector_areas > 0:
logger.warning(
'LVGD {lvgd} has in sector {sector} no load but area count'
'is {count}. This is maybe related to #153'.format(
lvgd=lvgd,
sector=sector,
count=count_sector_areas))
grid_model = None
# add consumption to grid_model for assigning it to the load object
# consumption is given per sector and per individual load
if sector == 'retail/industrial':
grid_model['consumption'] = {
'retail': lvgd.sector_consumption_retail / (
grid_model['full_branches'] *
grid_model['max_loads_per_branch'] +
grid_model['remaining_loads']),
'industrial': lvgd.sector_consumption_industrial / (
grid_model['full_branches'] *
grid_model['max_loads_per_branch'] +
grid_model['remaining_loads'])}
elif sector == 'agricultural':
grid_model['consumption'] = {
'agricultural': lvgd.sector_consumption_agricultural / (
grid_model['full_branches'] *
grid_model['max_loads_per_branch'] +
grid_model['remaining_loads'])}
return grid_model | def function[select_grid_model_ria, parameter[lvgd, sector]]:
constant[Select a typified grid for retail/industrial and agricultural
Parameters
----------
lvgd : ding0.core.structure.regions.LVGridDistrictDing0
Low-voltage grid district object
sector : str
Either 'retail/industrial' or 'agricultural'. Depending on choice
different parameters to grid topology apply
Returns
-------
:obj:`dict`
Parameters that describe branch lines of a sector
]
variable[cable_lf] assign[=] call[name[cfg_ding0].get, parameter[constant[assumptions], constant[load_factor_lv_cable_lc_normal]]]
variable[cos_phi_load] assign[=] call[name[cfg_ding0].get, parameter[constant[assumptions], constant[cos_phi_load]]]
variable[max_lv_branch_line_load] assign[=] call[name[cfg_ding0].get, parameter[constant[assumptions], constant[max_lv_branch_line]]]
if compare[name[sector] equal[==] constant[retail/industrial]] begin[:]
variable[max_branch_length] assign[=] call[name[cfg_ding0].get, parameter[constant[assumptions], constant[branch_line_length_retail_industrial]]]
variable[peak_load] assign[=] binary_operation[name[lvgd].peak_load_retail + name[lvgd].peak_load_industrial]
variable[count_sector_areas] assign[=] binary_operation[name[lvgd].sector_count_retail + name[lvgd].sector_count_industrial]
variable[single_peak_load] assign[=] binary_operation[name[peak_load] / name[count_sector_areas]]
while compare[name[single_peak_load] greater[>] binary_operation[name[max_lv_branch_line_load] * binary_operation[name[cable_lf] * name[cos_phi_load]]]] begin[:]
variable[single_peak_load] assign[=] binary_operation[name[single_peak_load] / constant[2]]
variable[count_sector_areas] assign[=] binary_operation[name[count_sector_areas] * constant[2]]
variable[grid_model] assign[=] dictionary[[], []]
if compare[constant[0] less[<] name[single_peak_load]] begin[:]
call[name[grid_model]][constant[max_loads_per_branch]] assign[=] call[name[math].floor, parameter[binary_operation[binary_operation[name[max_lv_branch_line_load] * binary_operation[name[cable_lf] * name[cos_phi_load]]] / name[single_peak_load]]]]
call[name[grid_model]][constant[single_peak_load]] assign[=] name[single_peak_load]
call[name[grid_model]][constant[full_branches]] assign[=] call[name[math].floor, parameter[binary_operation[name[count_sector_areas] / call[name[grid_model]][constant[max_loads_per_branch]]]]]
call[name[grid_model]][constant[remaining_loads]] assign[=] binary_operation[name[count_sector_areas] - binary_operation[call[name[grid_model]][constant[full_branches]] * call[name[grid_model]][constant[max_loads_per_branch]]]]
call[name[grid_model]][constant[load_distance]] assign[=] binary_operation[name[max_branch_length] / binary_operation[call[name[grid_model]][constant[max_loads_per_branch]] + constant[1]]]
call[name[grid_model]][constant[load_distance_remaining]] assign[=] binary_operation[name[max_branch_length] / binary_operation[call[name[grid_model]][constant[remaining_loads]] + constant[1]]]
if compare[name[sector] equal[==] constant[retail/industrial]] begin[:]
call[name[grid_model]][constant[consumption]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9f30>, <ast.Constant object at 0x7da20c6a91e0>], [<ast.BinOp object at 0x7da20c6aa4a0>, <ast.BinOp object at 0x7da20c6a9fc0>]]
return[name[grid_model]] | keyword[def] identifier[select_grid_model_ria] ( identifier[lvgd] , identifier[sector] ):
literal[string]
identifier[cable_lf] = identifier[cfg_ding0] . identifier[get] ( literal[string] ,
literal[string] )
identifier[cos_phi_load] = identifier[cfg_ding0] . identifier[get] ( literal[string] ,
literal[string] )
identifier[max_lv_branch_line_load] = identifier[cfg_ding0] . identifier[get] ( literal[string] ,
literal[string] )
keyword[if] identifier[sector] == literal[string] :
identifier[max_branch_length] = identifier[cfg_ding0] . identifier[get] (
literal[string] ,
literal[string] )
identifier[peak_load] = identifier[lvgd] . identifier[peak_load_retail] + identifier[lvgd] . identifier[peak_load_industrial]
identifier[count_sector_areas] = identifier[lvgd] . identifier[sector_count_retail] + identifier[lvgd] . identifier[sector_count_industrial]
keyword[elif] identifier[sector] == literal[string] :
identifier[max_branch_length] = identifier[cfg_ding0] . identifier[get] (
literal[string] ,
literal[string] )
identifier[peak_load] = identifier[lvgd] . identifier[peak_load_agricultural]
identifier[count_sector_areas] = identifier[lvgd] . identifier[sector_count_agricultural]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sector] ))
identifier[single_peak_load] = identifier[peak_load] / identifier[count_sector_areas]
keyword[while] identifier[single_peak_load] >( identifier[max_lv_branch_line_load] *( identifier[cable_lf] * identifier[cos_phi_load] )):
identifier[single_peak_load] = identifier[single_peak_load] / literal[int]
identifier[count_sector_areas] = identifier[count_sector_areas] * literal[int]
identifier[grid_model] ={}
keyword[if] literal[int] < identifier[single_peak_load] :
identifier[grid_model] [ literal[string] ]= identifier[math] . identifier[floor] (
( identifier[max_lv_branch_line_load] *( identifier[cable_lf] * identifier[cos_phi_load] ))/ identifier[single_peak_load] )
identifier[grid_model] [ literal[string] ]= identifier[single_peak_load]
identifier[grid_model] [ literal[string] ]= identifier[math] . identifier[floor] (
identifier[count_sector_areas] / identifier[grid_model] [ literal[string] ])
identifier[grid_model] [ literal[string] ]= identifier[count_sector_areas] -(
identifier[grid_model] [ literal[string] ]* identifier[grid_model] [ literal[string] ]
)
identifier[grid_model] [ literal[string] ]= identifier[max_branch_length] /(
identifier[grid_model] [ literal[string] ]+ literal[int] )
identifier[grid_model] [ literal[string] ]= identifier[max_branch_length] /(
identifier[grid_model] [ literal[string] ]+ literal[int] )
keyword[else] :
keyword[if] identifier[count_sector_areas] > literal[int] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[lvgd] = identifier[lvgd] ,
identifier[sector] = identifier[sector] ,
identifier[count] = identifier[count_sector_areas] ))
identifier[grid_model] = keyword[None]
keyword[if] identifier[sector] == literal[string] :
identifier[grid_model] [ literal[string] ]={
literal[string] : identifier[lvgd] . identifier[sector_consumption_retail] /(
identifier[grid_model] [ literal[string] ]*
identifier[grid_model] [ literal[string] ]+
identifier[grid_model] [ literal[string] ]),
literal[string] : identifier[lvgd] . identifier[sector_consumption_industrial] /(
identifier[grid_model] [ literal[string] ]*
identifier[grid_model] [ literal[string] ]+
identifier[grid_model] [ literal[string] ])}
keyword[elif] identifier[sector] == literal[string] :
identifier[grid_model] [ literal[string] ]={
literal[string] : identifier[lvgd] . identifier[sector_consumption_agricultural] /(
identifier[grid_model] [ literal[string] ]*
identifier[grid_model] [ literal[string] ]+
identifier[grid_model] [ literal[string] ])}
keyword[return] identifier[grid_model] | def select_grid_model_ria(lvgd, sector):
"""Select a typified grid for retail/industrial and agricultural
Parameters
----------
lvgd : ding0.core.structure.regions.LVGridDistrictDing0
Low-voltage grid district object
sector : str
Either 'retail/industrial' or 'agricultural'. Depending on choice
different parameters to grid topology apply
Returns
-------
:obj:`dict`
Parameters that describe branch lines of a sector
"""
cable_lf = cfg_ding0.get('assumptions', 'load_factor_lv_cable_lc_normal')
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load')
max_lv_branch_line_load = cfg_ding0.get('assumptions', 'max_lv_branch_line')
# make a distinction between sectors
if sector == 'retail/industrial':
max_branch_length = cfg_ding0.get('assumptions', 'branch_line_length_retail_industrial')
peak_load = lvgd.peak_load_retail + lvgd.peak_load_industrial
count_sector_areas = lvgd.sector_count_retail + lvgd.sector_count_industrial # depends on [control=['if'], data=[]]
elif sector == 'agricultural':
max_branch_length = cfg_ding0.get('assumptions', 'branch_line_length_agricultural')
peak_load = lvgd.peak_load_agricultural
count_sector_areas = lvgd.sector_count_agricultural # depends on [control=['if'], data=[]]
else:
raise ValueError('Sector {} does not exist!'.format(sector))
# determine size of a single load
single_peak_load = peak_load / count_sector_areas
# if this single load exceeds threshold of 300 kVA it is splitted
while single_peak_load > max_lv_branch_line_load * (cable_lf * cos_phi_load):
single_peak_load = single_peak_load / 2
count_sector_areas = count_sector_areas * 2 # depends on [control=['while'], data=['single_peak_load']]
grid_model = {}
# determine parameters of branches and loads connected to the branch
# line
if 0 < single_peak_load:
grid_model['max_loads_per_branch'] = math.floor(max_lv_branch_line_load * (cable_lf * cos_phi_load) / single_peak_load)
grid_model['single_peak_load'] = single_peak_load
grid_model['full_branches'] = math.floor(count_sector_areas / grid_model['max_loads_per_branch'])
grid_model['remaining_loads'] = count_sector_areas - grid_model['full_branches'] * grid_model['max_loads_per_branch']
grid_model['load_distance'] = max_branch_length / (grid_model['max_loads_per_branch'] + 1)
grid_model['load_distance_remaining'] = max_branch_length / (grid_model['remaining_loads'] + 1) # depends on [control=['if'], data=['single_peak_load']]
elif count_sector_areas > 0:
logger.warning('LVGD {lvgd} has in sector {sector} no load but area countis {count}. This is maybe related to #153'.format(lvgd=lvgd, sector=sector, count=count_sector_areas))
grid_model = None # depends on [control=['if'], data=['count_sector_areas']]
# add consumption to grid_model for assigning it to the load object
# consumption is given per sector and per individual load
if sector == 'retail/industrial':
grid_model['consumption'] = {'retail': lvgd.sector_consumption_retail / (grid_model['full_branches'] * grid_model['max_loads_per_branch'] + grid_model['remaining_loads']), 'industrial': lvgd.sector_consumption_industrial / (grid_model['full_branches'] * grid_model['max_loads_per_branch'] + grid_model['remaining_loads'])} # depends on [control=['if'], data=[]]
elif sector == 'agricultural':
grid_model['consumption'] = {'agricultural': lvgd.sector_consumption_agricultural / (grid_model['full_branches'] * grid_model['max_loads_per_branch'] + grid_model['remaining_loads'])} # depends on [control=['if'], data=[]]
return grid_model |
def authorize_redirect(self, redirect_uri=None, client_id=None,
client_secret=None, extra_params=None ):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
"""
args = {
"redirect_uri": redirect_uri,
"client_id": client_id
}
if extra_params: args.update(extra_params)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args)) | def function[authorize_redirect, parameter[self, redirect_uri, client_id, client_secret, extra_params]]:
constant[Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da20cabf790>, <ast.Constant object at 0x7da20cabe0e0>], [<ast.Name object at 0x7da20cabc2e0>, <ast.Name object at 0x7da20cabd7e0>]]
if name[extra_params] begin[:]
call[name[args].update, parameter[name[extra_params]]]
call[name[self].redirect, parameter[call[name[url_concat], parameter[name[self]._OAUTH_AUTHORIZE_URL, name[args]]]]] | keyword[def] identifier[authorize_redirect] ( identifier[self] , identifier[redirect_uri] = keyword[None] , identifier[client_id] = keyword[None] ,
identifier[client_secret] = keyword[None] , identifier[extra_params] = keyword[None] ):
literal[string]
identifier[args] ={
literal[string] : identifier[redirect_uri] ,
literal[string] : identifier[client_id]
}
keyword[if] identifier[extra_params] : identifier[args] . identifier[update] ( identifier[extra_params] )
identifier[self] . identifier[redirect] (
identifier[url_concat] ( identifier[self] . identifier[_OAUTH_AUTHORIZE_URL] , identifier[args] )) | def authorize_redirect(self, redirect_uri=None, client_id=None, client_secret=None, extra_params=None):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
"""
args = {'redirect_uri': redirect_uri, 'client_id': client_id}
if extra_params:
args.update(extra_params) # depends on [control=['if'], data=[]]
self.redirect(url_concat(self._OAUTH_AUTHORIZE_URL, args)) |
def node_setup(domain, master, ticket):
'''
Setup the icinga2 node.
Returns::
icinga2 node setup --ticket TICKET_ID --endpoint master.domain.tld --zone domain.tld --master_host master.domain.tld --trustedcert \
/etc/icinga2/pki/trusted-master.crt
CLI Example:
.. code-block:: bash
salt '*' icinga2.node_setup domain.tld master.domain.tld TICKET_ID
'''
result = __salt__['cmd.run_all'](["icinga2", "node", "setup", "--ticket", ticket, "--endpoint", master, "--zone", domain, "--master_host", master, "--trustedcert", "{0}trusted-master.crt".format(get_certs_path())],
python_shell=False)
return result | def function[node_setup, parameter[domain, master, ticket]]:
constant[
Setup the icinga2 node.
Returns::
icinga2 node setup --ticket TICKET_ID --endpoint master.domain.tld --zone domain.tld --master_host master.domain.tld --trustedcert /etc/icinga2/pki/trusted-master.crt
CLI Example:
.. code-block:: bash
salt '*' icinga2.node_setup domain.tld master.domain.tld TICKET_ID
]
variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[list[[<ast.Constant object at 0x7da1b215e4d0>, <ast.Constant object at 0x7da1b215d120>, <ast.Constant object at 0x7da1b215d900>, <ast.Constant object at 0x7da1b215d8d0>, <ast.Name object at 0x7da1b215d420>, <ast.Constant object at 0x7da1b215efb0>, <ast.Name object at 0x7da1b215dde0>, <ast.Constant object at 0x7da1b215efe0>, <ast.Name object at 0x7da1b215e500>, <ast.Constant object at 0x7da1b215cd90>, <ast.Name object at 0x7da1b215e410>, <ast.Constant object at 0x7da1b215eec0>, <ast.Call object at 0x7da1b215ecb0>]]]]
return[name[result]] | keyword[def] identifier[node_setup] ( identifier[domain] , identifier[master] , identifier[ticket] ):
literal[string]
identifier[result] = identifier[__salt__] [ literal[string] ]([ literal[string] , literal[string] , literal[string] , literal[string] , identifier[ticket] , literal[string] , identifier[master] , literal[string] , identifier[domain] , literal[string] , identifier[master] , literal[string] , literal[string] . identifier[format] ( identifier[get_certs_path] ())],
identifier[python_shell] = keyword[False] )
keyword[return] identifier[result] | def node_setup(domain, master, ticket):
"""
Setup the icinga2 node.
Returns::
icinga2 node setup --ticket TICKET_ID --endpoint master.domain.tld --zone domain.tld --master_host master.domain.tld --trustedcert /etc/icinga2/pki/trusted-master.crt
CLI Example:
.. code-block:: bash
salt '*' icinga2.node_setup domain.tld master.domain.tld TICKET_ID
"""
result = __salt__['cmd.run_all'](['icinga2', 'node', 'setup', '--ticket', ticket, '--endpoint', master, '--zone', domain, '--master_host', master, '--trustedcert', '{0}trusted-master.crt'.format(get_certs_path())], python_shell=False)
return result |
def remove_objects_not_in(self, objects_to_keep, verbosity):
"""
Delete all the objects in the database that are not in objects_to_keep.
- objects_to_keep: A map where the keys are classes, and the values are a
set of the objects of that class we should keep.
"""
for class_ in objects_to_keep.keys():
current = class_.objects.all()
current_ids = set([x.pk for x in current])
keep_ids = set([x.pk for x in objects_to_keep[class_]])
remove_these_ones = current_ids.difference(keep_ids)
if remove_these_ones:
for obj in current:
if obj.pk in remove_these_ones:
obj.delete()
if verbosity >= 2:
print("Deleted object: %s" % six.u(obj))
if verbosity > 0 and remove_these_ones:
num_deleted = len(remove_these_ones)
if num_deleted > 1:
type_deleted = six.u(class_._meta.verbose_name_plural)
else:
type_deleted = six.u(class_._meta.verbose_name)
print("Deleted %s %s" % (str(num_deleted), type_deleted)) | def function[remove_objects_not_in, parameter[self, objects_to_keep, verbosity]]:
constant[
Delete all the objects in the database that are not in objects_to_keep.
- objects_to_keep: A map where the keys are classes, and the values are a
set of the objects of that class we should keep.
]
for taget[name[class_]] in starred[call[name[objects_to_keep].keys, parameter[]]] begin[:]
variable[current] assign[=] call[name[class_].objects.all, parameter[]]
variable[current_ids] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b17d7d30>]]
variable[keep_ids] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b17d77c0>]]
variable[remove_these_ones] assign[=] call[name[current_ids].difference, parameter[name[keep_ids]]]
if name[remove_these_ones] begin[:]
for taget[name[obj]] in starred[name[current]] begin[:]
if compare[name[obj].pk in name[remove_these_ones]] begin[:]
call[name[obj].delete, parameter[]]
if compare[name[verbosity] greater_or_equal[>=] constant[2]] begin[:]
call[name[print], parameter[binary_operation[constant[Deleted object: %s] <ast.Mod object at 0x7da2590d6920> call[name[six].u, parameter[name[obj]]]]]]
if <ast.BoolOp object at 0x7da1b17d7040> begin[:]
variable[num_deleted] assign[=] call[name[len], parameter[name[remove_these_ones]]]
if compare[name[num_deleted] greater[>] constant[1]] begin[:]
variable[type_deleted] assign[=] call[name[six].u, parameter[name[class_]._meta.verbose_name_plural]]
call[name[print], parameter[binary_operation[constant[Deleted %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b17d85b0>, <ast.Name object at 0x7da1b17dbc10>]]]]] | keyword[def] identifier[remove_objects_not_in] ( identifier[self] , identifier[objects_to_keep] , identifier[verbosity] ):
literal[string]
keyword[for] identifier[class_] keyword[in] identifier[objects_to_keep] . identifier[keys] ():
identifier[current] = identifier[class_] . identifier[objects] . identifier[all] ()
identifier[current_ids] = identifier[set] ([ identifier[x] . identifier[pk] keyword[for] identifier[x] keyword[in] identifier[current] ])
identifier[keep_ids] = identifier[set] ([ identifier[x] . identifier[pk] keyword[for] identifier[x] keyword[in] identifier[objects_to_keep] [ identifier[class_] ]])
identifier[remove_these_ones] = identifier[current_ids] . identifier[difference] ( identifier[keep_ids] )
keyword[if] identifier[remove_these_ones] :
keyword[for] identifier[obj] keyword[in] identifier[current] :
keyword[if] identifier[obj] . identifier[pk] keyword[in] identifier[remove_these_ones] :
identifier[obj] . identifier[delete] ()
keyword[if] identifier[verbosity] >= literal[int] :
identifier[print] ( literal[string] % identifier[six] . identifier[u] ( identifier[obj] ))
keyword[if] identifier[verbosity] > literal[int] keyword[and] identifier[remove_these_ones] :
identifier[num_deleted] = identifier[len] ( identifier[remove_these_ones] )
keyword[if] identifier[num_deleted] > literal[int] :
identifier[type_deleted] = identifier[six] . identifier[u] ( identifier[class_] . identifier[_meta] . identifier[verbose_name_plural] )
keyword[else] :
identifier[type_deleted] = identifier[six] . identifier[u] ( identifier[class_] . identifier[_meta] . identifier[verbose_name] )
identifier[print] ( literal[string] %( identifier[str] ( identifier[num_deleted] ), identifier[type_deleted] )) | def remove_objects_not_in(self, objects_to_keep, verbosity):
"""
Delete all the objects in the database that are not in objects_to_keep.
- objects_to_keep: A map where the keys are classes, and the values are a
set of the objects of that class we should keep.
"""
for class_ in objects_to_keep.keys():
current = class_.objects.all()
current_ids = set([x.pk for x in current])
keep_ids = set([x.pk for x in objects_to_keep[class_]])
remove_these_ones = current_ids.difference(keep_ids)
if remove_these_ones:
for obj in current:
if obj.pk in remove_these_ones:
obj.delete()
if verbosity >= 2:
print('Deleted object: %s' % six.u(obj)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']] # depends on [control=['if'], data=[]]
if verbosity > 0 and remove_these_ones:
num_deleted = len(remove_these_ones)
if num_deleted > 1:
type_deleted = six.u(class_._meta.verbose_name_plural) # depends on [control=['if'], data=[]]
else:
type_deleted = six.u(class_._meta.verbose_name)
print('Deleted %s %s' % (str(num_deleted), type_deleted)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['class_']] |
def create_payment_transaction(cls, payment_transaction, **kwargs):
"""Create PaymentTransaction
Create a new PaymentTransaction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_payment_transaction(payment_transaction, async=True)
>>> result = thread.get()
:param async bool
:param PaymentTransaction payment_transaction: Attributes of paymentTransaction to create (required)
:return: PaymentTransaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs)
else:
(data) = cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs)
return data | def function[create_payment_transaction, parameter[cls, payment_transaction]]:
constant[Create PaymentTransaction
Create a new PaymentTransaction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_payment_transaction(payment_transaction, async=True)
>>> result = thread.get()
:param async bool
:param PaymentTransaction payment_transaction: Attributes of paymentTransaction to create (required)
:return: PaymentTransaction
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._create_payment_transaction_with_http_info, parameter[name[payment_transaction]]]] | keyword[def] identifier[create_payment_transaction] ( identifier[cls] , identifier[payment_transaction] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_create_payment_transaction_with_http_info] ( identifier[payment_transaction] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_create_payment_transaction_with_http_info] ( identifier[payment_transaction] ,** identifier[kwargs] )
keyword[return] identifier[data] | def create_payment_transaction(cls, payment_transaction, **kwargs):
"""Create PaymentTransaction
Create a new PaymentTransaction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_payment_transaction(payment_transaction, async=True)
>>> result = thread.get()
:param async bool
:param PaymentTransaction payment_transaction: Attributes of paymentTransaction to create (required)
:return: PaymentTransaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs)
return data |
def extract_subtree(self, node):
'''Return a copy of the subtree rooted at ``node``
Args:
``node`` (``Node``): The root of the desired subtree
Returns:
``Tree``: A copy of the subtree rooted at ``node``
'''
if not isinstance(node, Node):
raise TypeError("node must be a Node")
r = self.root; self.root = node; o = copy(self); self.root = r; return o | def function[extract_subtree, parameter[self, node]]:
constant[Return a copy of the subtree rooted at ``node``
Args:
``node`` (``Node``): The root of the desired subtree
Returns:
``Tree``: A copy of the subtree rooted at ``node``
]
if <ast.UnaryOp object at 0x7da1b0b9c7f0> begin[:]
<ast.Raise object at 0x7da1b0b9fd00>
variable[r] assign[=] name[self].root
name[self].root assign[=] name[node]
variable[o] assign[=] call[name[copy], parameter[name[self]]]
name[self].root assign[=] name[r]
return[name[o]] | keyword[def] identifier[extract_subtree] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[node] , identifier[Node] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[r] = identifier[self] . identifier[root] ; identifier[self] . identifier[root] = identifier[node] ; identifier[o] = identifier[copy] ( identifier[self] ); identifier[self] . identifier[root] = identifier[r] ; keyword[return] identifier[o] | def extract_subtree(self, node):
"""Return a copy of the subtree rooted at ``node``
Args:
``node`` (``Node``): The root of the desired subtree
Returns:
``Tree``: A copy of the subtree rooted at ``node``
"""
if not isinstance(node, Node):
raise TypeError('node must be a Node') # depends on [control=['if'], data=[]]
r = self.root
self.root = node
o = copy(self)
self.root = r
return o |
def log_listener(log:logging.Logger=None, level=logging.INFO):
"""Progress Monitor listener that logs all updates to the given logger"""
if log is None:
log = logging.getLogger("ProgressMonitor")
def listen(monitor):
name = "{}: ".format(monitor.name) if monitor.name is not None else ""
perc = int(monitor.progress * 100)
msg = "[{name}{perc:3d}%] {monitor.message}".format(**locals())
log.log(level, msg)
return listen | def function[log_listener, parameter[log, level]]:
constant[Progress Monitor listener that logs all updates to the given logger]
if compare[name[log] is constant[None]] begin[:]
variable[log] assign[=] call[name[logging].getLogger, parameter[constant[ProgressMonitor]]]
def function[listen, parameter[monitor]]:
variable[name] assign[=] <ast.IfExp object at 0x7da1b021c520>
variable[perc] assign[=] call[name[int], parameter[binary_operation[name[monitor].progress * constant[100]]]]
variable[msg] assign[=] call[constant[[{name}{perc:3d}%] {monitor.message}].format, parameter[]]
call[name[log].log, parameter[name[level], name[msg]]]
return[name[listen]] | keyword[def] identifier[log_listener] ( identifier[log] : identifier[logging] . identifier[Logger] = keyword[None] , identifier[level] = identifier[logging] . identifier[INFO] ):
literal[string]
keyword[if] identifier[log] keyword[is] keyword[None] :
identifier[log] = identifier[logging] . identifier[getLogger] ( literal[string] )
keyword[def] identifier[listen] ( identifier[monitor] ):
identifier[name] = literal[string] . identifier[format] ( identifier[monitor] . identifier[name] ) keyword[if] identifier[monitor] . identifier[name] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
identifier[perc] = identifier[int] ( identifier[monitor] . identifier[progress] * literal[int] )
identifier[msg] = literal[string] . identifier[format] (** identifier[locals] ())
identifier[log] . identifier[log] ( identifier[level] , identifier[msg] )
keyword[return] identifier[listen] | def log_listener(log: logging.Logger=None, level=logging.INFO):
"""Progress Monitor listener that logs all updates to the given logger"""
if log is None:
log = logging.getLogger('ProgressMonitor') # depends on [control=['if'], data=['log']]
def listen(monitor):
name = '{}: '.format(monitor.name) if monitor.name is not None else ''
perc = int(monitor.progress * 100)
msg = '[{name}{perc:3d}%] {monitor.message}'.format(**locals())
log.log(level, msg)
return listen |
def json(self, json):
"""
Defines the JSON body to match.
``json`` argument can be an JSON string, a JSON serializable
Python structure, such as a ``dict`` or ``list`` or it can be
a regular expression used to match the body.
Arguments:
json (str|dict|list|regex): body JSON to match.
Returns:
self: current Mock instance.
"""
self._request.json = json
self.add_matcher(matcher('JSONMatcher', json)) | def function[json, parameter[self, json]]:
constant[
Defines the JSON body to match.
``json`` argument can be an JSON string, a JSON serializable
Python structure, such as a ``dict`` or ``list`` or it can be
a regular expression used to match the body.
Arguments:
json (str|dict|list|regex): body JSON to match.
Returns:
self: current Mock instance.
]
name[self]._request.json assign[=] name[json]
call[name[self].add_matcher, parameter[call[name[matcher], parameter[constant[JSONMatcher], name[json]]]]] | keyword[def] identifier[json] ( identifier[self] , identifier[json] ):
literal[string]
identifier[self] . identifier[_request] . identifier[json] = identifier[json]
identifier[self] . identifier[add_matcher] ( identifier[matcher] ( literal[string] , identifier[json] )) | def json(self, json):
"""
Defines the JSON body to match.
``json`` argument can be an JSON string, a JSON serializable
Python structure, such as a ``dict`` or ``list`` or it can be
a regular expression used to match the body.
Arguments:
json (str|dict|list|regex): body JSON to match.
Returns:
self: current Mock instance.
"""
self._request.json = json
self.add_matcher(matcher('JSONMatcher', json)) |
def log_player_trades_with_other_player(self, player, to_other, other, to_player):
"""
:param player: catan.game.Player
:param to_other: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
:param other: catan.board.Player
:param to_player: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
"""
self._log('{0} trades '.format(player.color))
# to_other items
self._log('[')
for i, (num, res) in enumerate(to_other):
if i > 0:
self._log(', ')
self._log('{0} {1}'.format(num, res.value))
self._log(']')
self._log(' to player {0} for '.format(other.color))
# to_player items
self._log('[')
for i, (num, res) in enumerate(to_player):
if i > 0:
self._log(', ')
self._log('{0} {1}'.format(num, res.value))
self._log(']')
self._log('\n') | def function[log_player_trades_with_other_player, parameter[self, player, to_other, other, to_player]]:
constant[
:param player: catan.game.Player
:param to_other: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
:param other: catan.board.Player
:param to_player: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
]
call[name[self]._log, parameter[call[constant[{0} trades ].format, parameter[name[player].color]]]]
call[name[self]._log, parameter[constant[[]]]
for taget[tuple[[<ast.Name object at 0x7da2044c1960>, <ast.Tuple object at 0x7da2044c3820>]]] in starred[call[name[enumerate], parameter[name[to_other]]]] begin[:]
if compare[name[i] greater[>] constant[0]] begin[:]
call[name[self]._log, parameter[constant[, ]]]
call[name[self]._log, parameter[call[constant[{0} {1}].format, parameter[name[num], name[res].value]]]]
call[name[self]._log, parameter[constant[]]]]
call[name[self]._log, parameter[call[constant[ to player {0} for ].format, parameter[name[other].color]]]]
call[name[self]._log, parameter[constant[[]]]
for taget[tuple[[<ast.Name object at 0x7da2054a7820>, <ast.Tuple object at 0x7da2054a54b0>]]] in starred[call[name[enumerate], parameter[name[to_player]]]] begin[:]
if compare[name[i] greater[>] constant[0]] begin[:]
call[name[self]._log, parameter[constant[, ]]]
call[name[self]._log, parameter[call[constant[{0} {1}].format, parameter[name[num], name[res].value]]]]
call[name[self]._log, parameter[constant[]]]]
call[name[self]._log, parameter[constant[
]]] | keyword[def] identifier[log_player_trades_with_other_player] ( identifier[self] , identifier[player] , identifier[to_other] , identifier[other] , identifier[to_player] ):
literal[string]
identifier[self] . identifier[_log] ( literal[string] . identifier[format] ( identifier[player] . identifier[color] ))
identifier[self] . identifier[_log] ( literal[string] )
keyword[for] identifier[i] ,( identifier[num] , identifier[res] ) keyword[in] identifier[enumerate] ( identifier[to_other] ):
keyword[if] identifier[i] > literal[int] :
identifier[self] . identifier[_log] ( literal[string] )
identifier[self] . identifier[_log] ( literal[string] . identifier[format] ( identifier[num] , identifier[res] . identifier[value] ))
identifier[self] . identifier[_log] ( literal[string] )
identifier[self] . identifier[_log] ( literal[string] . identifier[format] ( identifier[other] . identifier[color] ))
identifier[self] . identifier[_log] ( literal[string] )
keyword[for] identifier[i] ,( identifier[num] , identifier[res] ) keyword[in] identifier[enumerate] ( identifier[to_player] ):
keyword[if] identifier[i] > literal[int] :
identifier[self] . identifier[_log] ( literal[string] )
identifier[self] . identifier[_log] ( literal[string] . identifier[format] ( identifier[num] , identifier[res] . identifier[value] ))
identifier[self] . identifier[_log] ( literal[string] )
identifier[self] . identifier[_log] ( literal[string] ) | def log_player_trades_with_other_player(self, player, to_other, other, to_player):
"""
:param player: catan.game.Player
:param to_other: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
:param other: catan.board.Player
:param to_player: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
"""
self._log('{0} trades '.format(player.color))
# to_other items
self._log('[')
for (i, (num, res)) in enumerate(to_other):
if i > 0:
self._log(', ') # depends on [control=['if'], data=[]]
self._log('{0} {1}'.format(num, res.value)) # depends on [control=['for'], data=[]]
self._log(']')
self._log(' to player {0} for '.format(other.color))
# to_player items
self._log('[')
for (i, (num, res)) in enumerate(to_player):
if i > 0:
self._log(', ') # depends on [control=['if'], data=[]]
self._log('{0} {1}'.format(num, res.value)) # depends on [control=['for'], data=[]]
self._log(']')
self._log('\n') |
def create(deg, p=0.75, mode='x', tags=None):
""" Vel factory function """
return RandomRotate(deg, p, mode, tags) | def function[create, parameter[deg, p, mode, tags]]:
constant[ Vel factory function ]
return[call[name[RandomRotate], parameter[name[deg], name[p], name[mode], name[tags]]]] | keyword[def] identifier[create] ( identifier[deg] , identifier[p] = literal[int] , identifier[mode] = literal[string] , identifier[tags] = keyword[None] ):
literal[string]
keyword[return] identifier[RandomRotate] ( identifier[deg] , identifier[p] , identifier[mode] , identifier[tags] ) | def create(deg, p=0.75, mode='x', tags=None):
""" Vel factory function """
return RandomRotate(deg, p, mode, tags) |
def flexifunction_set_send(self, target_system, target_component, force_mavlink1=False):
'''
Depreciated but used as a compiler flag. Do not remove
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
return self.send(self.flexifunction_set_encode(target_system, target_component), force_mavlink1=force_mavlink1) | def function[flexifunction_set_send, parameter[self, target_system, target_component, force_mavlink1]]:
constant[
Depreciated but used as a compiler flag. Do not remove
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
]
return[call[name[self].send, parameter[call[name[self].flexifunction_set_encode, parameter[name[target_system], name[target_component]]]]]] | keyword[def] identifier[flexifunction_set_send] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[flexifunction_set_encode] ( identifier[target_system] , identifier[target_component] ), identifier[force_mavlink1] = identifier[force_mavlink1] ) | def flexifunction_set_send(self, target_system, target_component, force_mavlink1=False):
"""
Depreciated but used as a compiler flag. Do not remove
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
"""
return self.send(self.flexifunction_set_encode(target_system, target_component), force_mavlink1=force_mavlink1) |
def get_signature(self, value):
"""Returns the signature for the given value"""
value = want_bytes(value)
key = self.derive_key()
sig = self.algorithm.get_signature(key, value)
return base64_encode(sig) | def function[get_signature, parameter[self, value]]:
constant[Returns the signature for the given value]
variable[value] assign[=] call[name[want_bytes], parameter[name[value]]]
variable[key] assign[=] call[name[self].derive_key, parameter[]]
variable[sig] assign[=] call[name[self].algorithm.get_signature, parameter[name[key], name[value]]]
return[call[name[base64_encode], parameter[name[sig]]]] | keyword[def] identifier[get_signature] ( identifier[self] , identifier[value] ):
literal[string]
identifier[value] = identifier[want_bytes] ( identifier[value] )
identifier[key] = identifier[self] . identifier[derive_key] ()
identifier[sig] = identifier[self] . identifier[algorithm] . identifier[get_signature] ( identifier[key] , identifier[value] )
keyword[return] identifier[base64_encode] ( identifier[sig] ) | def get_signature(self, value):
"""Returns the signature for the given value"""
value = want_bytes(value)
key = self.derive_key()
sig = self.algorithm.get_signature(key, value)
return base64_encode(sig) |
def xieta_from_radecl(inra, indecl,
incenterra, incenterdecl,
deg=True):
'''This returns the image-plane projected xi-eta coords for inra, indecl.
Parameters
----------
inra,indecl : array-like
The equatorial coordinates to get the xi, eta coordinates for in decimal
degrees or radians.
incenterra,incenterdecl : float
The center coordinate values to use to calculate the plane-projected
coordinates around.
deg : bool
If this is True, the input angles are assumed to be in degrees and the
output is in degrees as well.
Returns
-------
tuple of np.arrays
This is the (`xi`, `eta`) coordinate pairs corresponding to the
image-plane projected coordinates for each pair of input equatorial
coordinates in (`inra`, `indecl`).
'''
if deg:
ra = np.radians(inra)
decl = np.radians(indecl)
centerra = np.radians(incenterra)
centerdecl = np.radians(incenterdecl)
else:
ra = inra
decl = indecl
centerra = incenterra
centerdecl = incenterdecl
cdecc = np.cos(centerdecl)
sdecc = np.sin(centerdecl)
crac = np.cos(centerra)
srac = np.sin(centerra)
uu = np.cos(decl)*np.cos(ra)
vv = np.cos(decl)*np.sin(ra)
ww = np.sin(decl)
uun = uu*cdecc*crac + vv*cdecc*srac + ww*sdecc
vvn = -uu*srac + vv*crac
wwn = -uu*sdecc*crac - vv*sdecc*srac + ww*cdecc
denom = vvn*vvn + wwn*wwn
aunn = np.zeros_like(uun)
aunn[uun >= 1.0] = 0.0
aunn[uun < 1.0] = np.arccos(uun)
xi, eta = np.zeros_like(aunn), np.zeros_like(aunn)
xi[(aunn <= 0.0) | (denom <= 0.0)] = 0.0
eta[(aunn <= 0.0) | (denom <= 0.0)] = 0.0
sdenom = np.sqrt(denom)
xi[(aunn > 0.0) | (denom > 0.0)] = aunn*vvn/sdenom
eta[(aunn > 0.0) | (denom > 0.0)] = aunn*wwn/sdenom
if deg:
return np.degrees(xi), np.degrees(eta)
else:
return xi, eta | def function[xieta_from_radecl, parameter[inra, indecl, incenterra, incenterdecl, deg]]:
constant[This returns the image-plane projected xi-eta coords for inra, indecl.
Parameters
----------
inra,indecl : array-like
The equatorial coordinates to get the xi, eta coordinates for in decimal
degrees or radians.
incenterra,incenterdecl : float
The center coordinate values to use to calculate the plane-projected
coordinates around.
deg : bool
If this is True, the input angles are assumed to be in degrees and the
output is in degrees as well.
Returns
-------
tuple of np.arrays
This is the (`xi`, `eta`) coordinate pairs corresponding to the
image-plane projected coordinates for each pair of input equatorial
coordinates in (`inra`, `indecl`).
]
if name[deg] begin[:]
variable[ra] assign[=] call[name[np].radians, parameter[name[inra]]]
variable[decl] assign[=] call[name[np].radians, parameter[name[indecl]]]
variable[centerra] assign[=] call[name[np].radians, parameter[name[incenterra]]]
variable[centerdecl] assign[=] call[name[np].radians, parameter[name[incenterdecl]]]
variable[cdecc] assign[=] call[name[np].cos, parameter[name[centerdecl]]]
variable[sdecc] assign[=] call[name[np].sin, parameter[name[centerdecl]]]
variable[crac] assign[=] call[name[np].cos, parameter[name[centerra]]]
variable[srac] assign[=] call[name[np].sin, parameter[name[centerra]]]
variable[uu] assign[=] binary_operation[call[name[np].cos, parameter[name[decl]]] * call[name[np].cos, parameter[name[ra]]]]
variable[vv] assign[=] binary_operation[call[name[np].cos, parameter[name[decl]]] * call[name[np].sin, parameter[name[ra]]]]
variable[ww] assign[=] call[name[np].sin, parameter[name[decl]]]
variable[uun] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[uu] * name[cdecc]] * name[crac]] + binary_operation[binary_operation[name[vv] * name[cdecc]] * name[srac]]] + binary_operation[name[ww] * name[sdecc]]]
variable[vvn] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da2054a79a0> * name[srac]] + binary_operation[name[vv] * name[crac]]]
variable[wwn] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da2054a40a0> * name[sdecc]] * name[crac]] - binary_operation[binary_operation[name[vv] * name[sdecc]] * name[srac]]] + binary_operation[name[ww] * name[cdecc]]]
variable[denom] assign[=] binary_operation[binary_operation[name[vvn] * name[vvn]] + binary_operation[name[wwn] * name[wwn]]]
variable[aunn] assign[=] call[name[np].zeros_like, parameter[name[uun]]]
call[name[aunn]][compare[name[uun] greater_or_equal[>=] constant[1.0]]] assign[=] constant[0.0]
call[name[aunn]][compare[name[uun] less[<] constant[1.0]]] assign[=] call[name[np].arccos, parameter[name[uun]]]
<ast.Tuple object at 0x7da2054a5870> assign[=] tuple[[<ast.Call object at 0x7da2054a7820>, <ast.Call object at 0x7da2054a68f0>]]
call[name[xi]][binary_operation[compare[name[aunn] less_or_equal[<=] constant[0.0]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[denom] less_or_equal[<=] constant[0.0]]]] assign[=] constant[0.0]
call[name[eta]][binary_operation[compare[name[aunn] less_or_equal[<=] constant[0.0]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[denom] less_or_equal[<=] constant[0.0]]]] assign[=] constant[0.0]
variable[sdenom] assign[=] call[name[np].sqrt, parameter[name[denom]]]
call[name[xi]][binary_operation[compare[name[aunn] greater[>] constant[0.0]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[denom] greater[>] constant[0.0]]]] assign[=] binary_operation[binary_operation[name[aunn] * name[vvn]] / name[sdenom]]
call[name[eta]][binary_operation[compare[name[aunn] greater[>] constant[0.0]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[denom] greater[>] constant[0.0]]]] assign[=] binary_operation[binary_operation[name[aunn] * name[wwn]] / name[sdenom]]
if name[deg] begin[:]
return[tuple[[<ast.Call object at 0x7da204346ce0>, <ast.Call object at 0x7da2043441c0>]]] | keyword[def] identifier[xieta_from_radecl] ( identifier[inra] , identifier[indecl] ,
identifier[incenterra] , identifier[incenterdecl] ,
identifier[deg] = keyword[True] ):
literal[string]
keyword[if] identifier[deg] :
identifier[ra] = identifier[np] . identifier[radians] ( identifier[inra] )
identifier[decl] = identifier[np] . identifier[radians] ( identifier[indecl] )
identifier[centerra] = identifier[np] . identifier[radians] ( identifier[incenterra] )
identifier[centerdecl] = identifier[np] . identifier[radians] ( identifier[incenterdecl] )
keyword[else] :
identifier[ra] = identifier[inra]
identifier[decl] = identifier[indecl]
identifier[centerra] = identifier[incenterra]
identifier[centerdecl] = identifier[incenterdecl]
identifier[cdecc] = identifier[np] . identifier[cos] ( identifier[centerdecl] )
identifier[sdecc] = identifier[np] . identifier[sin] ( identifier[centerdecl] )
identifier[crac] = identifier[np] . identifier[cos] ( identifier[centerra] )
identifier[srac] = identifier[np] . identifier[sin] ( identifier[centerra] )
identifier[uu] = identifier[np] . identifier[cos] ( identifier[decl] )* identifier[np] . identifier[cos] ( identifier[ra] )
identifier[vv] = identifier[np] . identifier[cos] ( identifier[decl] )* identifier[np] . identifier[sin] ( identifier[ra] )
identifier[ww] = identifier[np] . identifier[sin] ( identifier[decl] )
identifier[uun] = identifier[uu] * identifier[cdecc] * identifier[crac] + identifier[vv] * identifier[cdecc] * identifier[srac] + identifier[ww] * identifier[sdecc]
identifier[vvn] =- identifier[uu] * identifier[srac] + identifier[vv] * identifier[crac]
identifier[wwn] =- identifier[uu] * identifier[sdecc] * identifier[crac] - identifier[vv] * identifier[sdecc] * identifier[srac] + identifier[ww] * identifier[cdecc]
identifier[denom] = identifier[vvn] * identifier[vvn] + identifier[wwn] * identifier[wwn]
identifier[aunn] = identifier[np] . identifier[zeros_like] ( identifier[uun] )
identifier[aunn] [ identifier[uun] >= literal[int] ]= literal[int]
identifier[aunn] [ identifier[uun] < literal[int] ]= identifier[np] . identifier[arccos] ( identifier[uun] )
identifier[xi] , identifier[eta] = identifier[np] . identifier[zeros_like] ( identifier[aunn] ), identifier[np] . identifier[zeros_like] ( identifier[aunn] )
identifier[xi] [( identifier[aunn] <= literal[int] )|( identifier[denom] <= literal[int] )]= literal[int]
identifier[eta] [( identifier[aunn] <= literal[int] )|( identifier[denom] <= literal[int] )]= literal[int]
identifier[sdenom] = identifier[np] . identifier[sqrt] ( identifier[denom] )
identifier[xi] [( identifier[aunn] > literal[int] )|( identifier[denom] > literal[int] )]= identifier[aunn] * identifier[vvn] / identifier[sdenom]
identifier[eta] [( identifier[aunn] > literal[int] )|( identifier[denom] > literal[int] )]= identifier[aunn] * identifier[wwn] / identifier[sdenom]
keyword[if] identifier[deg] :
keyword[return] identifier[np] . identifier[degrees] ( identifier[xi] ), identifier[np] . identifier[degrees] ( identifier[eta] )
keyword[else] :
keyword[return] identifier[xi] , identifier[eta] | def xieta_from_radecl(inra, indecl, incenterra, incenterdecl, deg=True):
"""This returns the image-plane projected xi-eta coords for inra, indecl.
Parameters
----------
inra,indecl : array-like
The equatorial coordinates to get the xi, eta coordinates for in decimal
degrees or radians.
incenterra,incenterdecl : float
The center coordinate values to use to calculate the plane-projected
coordinates around.
deg : bool
If this is True, the input angles are assumed to be in degrees and the
output is in degrees as well.
Returns
-------
tuple of np.arrays
This is the (`xi`, `eta`) coordinate pairs corresponding to the
image-plane projected coordinates for each pair of input equatorial
coordinates in (`inra`, `indecl`).
"""
if deg:
ra = np.radians(inra)
decl = np.radians(indecl)
centerra = np.radians(incenterra)
centerdecl = np.radians(incenterdecl) # depends on [control=['if'], data=[]]
else:
ra = inra
decl = indecl
centerra = incenterra
centerdecl = incenterdecl
cdecc = np.cos(centerdecl)
sdecc = np.sin(centerdecl)
crac = np.cos(centerra)
srac = np.sin(centerra)
uu = np.cos(decl) * np.cos(ra)
vv = np.cos(decl) * np.sin(ra)
ww = np.sin(decl)
uun = uu * cdecc * crac + vv * cdecc * srac + ww * sdecc
vvn = -uu * srac + vv * crac
wwn = -uu * sdecc * crac - vv * sdecc * srac + ww * cdecc
denom = vvn * vvn + wwn * wwn
aunn = np.zeros_like(uun)
aunn[uun >= 1.0] = 0.0
aunn[uun < 1.0] = np.arccos(uun)
(xi, eta) = (np.zeros_like(aunn), np.zeros_like(aunn))
xi[(aunn <= 0.0) | (denom <= 0.0)] = 0.0
eta[(aunn <= 0.0) | (denom <= 0.0)] = 0.0
sdenom = np.sqrt(denom)
xi[(aunn > 0.0) | (denom > 0.0)] = aunn * vvn / sdenom
eta[(aunn > 0.0) | (denom > 0.0)] = aunn * wwn / sdenom
if deg:
return (np.degrees(xi), np.degrees(eta)) # depends on [control=['if'], data=[]]
else:
return (xi, eta) |
def create_subjects_file(filelist, labels, output_file, split=':'):
"""Creates a file where each line is <subject_file>:<subject_class_label>.
Parameters
----------
filelist: list of str
List of filepaths
labels: list of int, str or labels that can be transformed with str()
List of labels
output_file: str
Output file path
split: str
Split character for each line
"""
if len(filelist) != len(labels):
raise ValueError('Expected `filelist` and `labels` to have the same length.'
'Got {} and {}.'.format(len(filelist), len(labels)))
lines = []
for i, subj in enumerate(filelist):
lab = labels[i]
line = subj + split + str(lab)
lines.append(line)
lines = np.array(lines)
np.savetxt(output_file, lines, fmt='%s') | def function[create_subjects_file, parameter[filelist, labels, output_file, split]]:
constant[Creates a file where each line is <subject_file>:<subject_class_label>.
Parameters
----------
filelist: list of str
List of filepaths
labels: list of int, str or labels that can be transformed with str()
List of labels
output_file: str
Output file path
split: str
Split character for each line
]
if compare[call[name[len], parameter[name[filelist]]] not_equal[!=] call[name[len], parameter[name[labels]]]] begin[:]
<ast.Raise object at 0x7da1b004da20>
variable[lines] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b004ff10>, <ast.Name object at 0x7da1b004fe50>]]] in starred[call[name[enumerate], parameter[name[filelist]]]] begin[:]
variable[lab] assign[=] call[name[labels]][name[i]]
variable[line] assign[=] binary_operation[binary_operation[name[subj] + name[split]] + call[name[str], parameter[name[lab]]]]
call[name[lines].append, parameter[name[line]]]
variable[lines] assign[=] call[name[np].array, parameter[name[lines]]]
call[name[np].savetxt, parameter[name[output_file], name[lines]]] | keyword[def] identifier[create_subjects_file] ( identifier[filelist] , identifier[labels] , identifier[output_file] , identifier[split] = literal[string] ):
literal[string]
keyword[if] identifier[len] ( identifier[filelist] )!= identifier[len] ( identifier[labels] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[len] ( identifier[filelist] ), identifier[len] ( identifier[labels] )))
identifier[lines] =[]
keyword[for] identifier[i] , identifier[subj] keyword[in] identifier[enumerate] ( identifier[filelist] ):
identifier[lab] = identifier[labels] [ identifier[i] ]
identifier[line] = identifier[subj] + identifier[split] + identifier[str] ( identifier[lab] )
identifier[lines] . identifier[append] ( identifier[line] )
identifier[lines] = identifier[np] . identifier[array] ( identifier[lines] )
identifier[np] . identifier[savetxt] ( identifier[output_file] , identifier[lines] , identifier[fmt] = literal[string] ) | def create_subjects_file(filelist, labels, output_file, split=':'):
"""Creates a file where each line is <subject_file>:<subject_class_label>.
Parameters
----------
filelist: list of str
List of filepaths
labels: list of int, str or labels that can be transformed with str()
List of labels
output_file: str
Output file path
split: str
Split character for each line
"""
if len(filelist) != len(labels):
raise ValueError('Expected `filelist` and `labels` to have the same length.Got {} and {}.'.format(len(filelist), len(labels))) # depends on [control=['if'], data=[]]
lines = []
for (i, subj) in enumerate(filelist):
lab = labels[i]
line = subj + split + str(lab)
lines.append(line) # depends on [control=['for'], data=[]]
lines = np.array(lines)
np.savetxt(output_file, lines, fmt='%s') |
def shutdown(self):
"""Manually stop the pool. This is only necessary from tests, as the
pool will stop itself when the reactor stops under normal
circumstances."""
if not self._stop_evt:
return # pool is already stopped
self.reactor.removeSystemEventTrigger(self._stop_evt)
self._stop() | def function[shutdown, parameter[self]]:
constant[Manually stop the pool. This is only necessary from tests, as the
pool will stop itself when the reactor stops under normal
circumstances.]
if <ast.UnaryOp object at 0x7da18dc042b0> begin[:]
return[None]
call[name[self].reactor.removeSystemEventTrigger, parameter[name[self]._stop_evt]]
call[name[self]._stop, parameter[]] | keyword[def] identifier[shutdown] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_stop_evt] :
keyword[return]
identifier[self] . identifier[reactor] . identifier[removeSystemEventTrigger] ( identifier[self] . identifier[_stop_evt] )
identifier[self] . identifier[_stop] () | def shutdown(self):
"""Manually stop the pool. This is only necessary from tests, as the
pool will stop itself when the reactor stops under normal
circumstances."""
if not self._stop_evt:
return # pool is already stopped # depends on [control=['if'], data=[]]
self.reactor.removeSystemEventTrigger(self._stop_evt)
self._stop() |
def iter_genotypes(self):
"""Iterates on available markers.
Returns:
Genotypes instances.
"""
for v in self.get_vcf():
alleles = {v.REF} | set(v.ALT)
if self.quality_field:
variant = ImputedVariant(v.ID, v.CHROM, v.POS, alleles,
getattr(v, self.quality_field))
else:
variant = Variant(v.ID, v.CHROM, v.POS, alleles)
for coded_allele, g in self._make_genotypes(v.ALT, v.genotypes):
yield Genotypes(variant, g, v.REF, coded_allele,
multiallelic=len(v.ALT) > 1) | def function[iter_genotypes, parameter[self]]:
constant[Iterates on available markers.
Returns:
Genotypes instances.
]
for taget[name[v]] in starred[call[name[self].get_vcf, parameter[]]] begin[:]
variable[alleles] assign[=] binary_operation[<ast.Set object at 0x7da1b22400a0> <ast.BitOr object at 0x7da2590d6aa0> call[name[set], parameter[name[v].ALT]]]
if name[self].quality_field begin[:]
variable[variant] assign[=] call[name[ImputedVariant], parameter[name[v].ID, name[v].CHROM, name[v].POS, name[alleles], call[name[getattr], parameter[name[v], name[self].quality_field]]]]
for taget[tuple[[<ast.Name object at 0x7da1b2242950>, <ast.Name object at 0x7da1b2240820>]]] in starred[call[name[self]._make_genotypes, parameter[name[v].ALT, name[v].genotypes]]] begin[:]
<ast.Yield object at 0x7da1b220b940> | keyword[def] identifier[iter_genotypes] ( identifier[self] ):
literal[string]
keyword[for] identifier[v] keyword[in] identifier[self] . identifier[get_vcf] ():
identifier[alleles] ={ identifier[v] . identifier[REF] }| identifier[set] ( identifier[v] . identifier[ALT] )
keyword[if] identifier[self] . identifier[quality_field] :
identifier[variant] = identifier[ImputedVariant] ( identifier[v] . identifier[ID] , identifier[v] . identifier[CHROM] , identifier[v] . identifier[POS] , identifier[alleles] ,
identifier[getattr] ( identifier[v] , identifier[self] . identifier[quality_field] ))
keyword[else] :
identifier[variant] = identifier[Variant] ( identifier[v] . identifier[ID] , identifier[v] . identifier[CHROM] , identifier[v] . identifier[POS] , identifier[alleles] )
keyword[for] identifier[coded_allele] , identifier[g] keyword[in] identifier[self] . identifier[_make_genotypes] ( identifier[v] . identifier[ALT] , identifier[v] . identifier[genotypes] ):
keyword[yield] identifier[Genotypes] ( identifier[variant] , identifier[g] , identifier[v] . identifier[REF] , identifier[coded_allele] ,
identifier[multiallelic] = identifier[len] ( identifier[v] . identifier[ALT] )> literal[int] ) | def iter_genotypes(self):
"""Iterates on available markers.
Returns:
Genotypes instances.
"""
for v in self.get_vcf():
alleles = {v.REF} | set(v.ALT)
if self.quality_field:
variant = ImputedVariant(v.ID, v.CHROM, v.POS, alleles, getattr(v, self.quality_field)) # depends on [control=['if'], data=[]]
else:
variant = Variant(v.ID, v.CHROM, v.POS, alleles)
for (coded_allele, g) in self._make_genotypes(v.ALT, v.genotypes):
yield Genotypes(variant, g, v.REF, coded_allele, multiallelic=len(v.ALT) > 1) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['v']] |
def check_transport_host(self):
"""
Check if zeromq socket is available
on transport host
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('events-server', 8080))
if result == 0:
logging.info('port 8080 on zmq is open!')
return True
return False | def function[check_transport_host, parameter[self]]:
constant[
Check if zeromq socket is available
on transport host
]
variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
variable[result] assign[=] call[name[sock].connect_ex, parameter[tuple[[<ast.Constant object at 0x7da1b2248220>, <ast.Constant object at 0x7da1b224ab30>]]]]
if compare[name[result] equal[==] constant[0]] begin[:]
call[name[logging].info, parameter[constant[port 8080 on zmq is open!]]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[check_transport_host] ( identifier[self] ):
literal[string]
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
identifier[result] = identifier[sock] . identifier[connect_ex] (( literal[string] , literal[int] ))
keyword[if] identifier[result] == literal[int] :
identifier[logging] . identifier[info] ( literal[string] )
keyword[return] keyword[True]
keyword[return] keyword[False] | def check_transport_host(self):
"""
Check if zeromq socket is available
on transport host
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('events-server', 8080))
if result == 0:
logging.info('port 8080 on zmq is open!')
return True # depends on [control=['if'], data=[]]
return False |
def gauge(self, stat, value, tags=None):
"""Set a gauge."""
self.client.gauge(metric=stat, value=value, tags=tags) | def function[gauge, parameter[self, stat, value, tags]]:
constant[Set a gauge.]
call[name[self].client.gauge, parameter[]] | keyword[def] identifier[gauge] ( identifier[self] , identifier[stat] , identifier[value] , identifier[tags] = keyword[None] ):
literal[string]
identifier[self] . identifier[client] . identifier[gauge] ( identifier[metric] = identifier[stat] , identifier[value] = identifier[value] , identifier[tags] = identifier[tags] ) | def gauge(self, stat, value, tags=None):
"""Set a gauge."""
self.client.gauge(metric=stat, value=value, tags=tags) |
def changeLane(self, vehID, laneIndex, duration):
"""changeLane(string, int, int) -> None
Forces a lane change to the lane with the given index; if successful,
the lane will be chosen for the given amount of time (in ms).
"""
self._connection._beginMessage(
tc.CMD_SET_VEHICLE_VARIABLE, tc.CMD_CHANGELANE, vehID, 1 + 4 + 1 + 1 + 1 + 4)
self._connection._string += struct.pack(
"!BiBBBi", tc.TYPE_COMPOUND, 2, tc.TYPE_BYTE, laneIndex, tc.TYPE_INTEGER, duration)
self._connection._sendExact() | def function[changeLane, parameter[self, vehID, laneIndex, duration]]:
constant[changeLane(string, int, int) -> None
Forces a lane change to the lane with the given index; if successful,
the lane will be chosen for the given amount of time (in ms).
]
call[name[self]._connection._beginMessage, parameter[name[tc].CMD_SET_VEHICLE_VARIABLE, name[tc].CMD_CHANGELANE, name[vehID], binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] + constant[4]] + constant[1]] + constant[1]] + constant[1]] + constant[4]]]]
<ast.AugAssign object at 0x7da1b095da20>
call[name[self]._connection._sendExact, parameter[]] | keyword[def] identifier[changeLane] ( identifier[self] , identifier[vehID] , identifier[laneIndex] , identifier[duration] ):
literal[string]
identifier[self] . identifier[_connection] . identifier[_beginMessage] (
identifier[tc] . identifier[CMD_SET_VEHICLE_VARIABLE] , identifier[tc] . identifier[CMD_CHANGELANE] , identifier[vehID] , literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] )
identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] (
literal[string] , identifier[tc] . identifier[TYPE_COMPOUND] , literal[int] , identifier[tc] . identifier[TYPE_BYTE] , identifier[laneIndex] , identifier[tc] . identifier[TYPE_INTEGER] , identifier[duration] )
identifier[self] . identifier[_connection] . identifier[_sendExact] () | def changeLane(self, vehID, laneIndex, duration):
"""changeLane(string, int, int) -> None
Forces a lane change to the lane with the given index; if successful,
the lane will be chosen for the given amount of time (in ms).
"""
self._connection._beginMessage(tc.CMD_SET_VEHICLE_VARIABLE, tc.CMD_CHANGELANE, vehID, 1 + 4 + 1 + 1 + 1 + 4)
self._connection._string += struct.pack('!BiBBBi', tc.TYPE_COMPOUND, 2, tc.TYPE_BYTE, laneIndex, tc.TYPE_INTEGER, duration)
self._connection._sendExact() |
def get_width(self):
"""
Compute width of each surface element, and return area-weighted
average value (in km).
"""
areas = self._get_areas()
widths = numpy.array([surf.get_width() for surf in self.surfaces])
return numpy.sum(areas * widths) / numpy.sum(areas) | def function[get_width, parameter[self]]:
constant[
Compute width of each surface element, and return area-weighted
average value (in km).
]
variable[areas] assign[=] call[name[self]._get_areas, parameter[]]
variable[widths] assign[=] call[name[numpy].array, parameter[<ast.ListComp object at 0x7da1b133f640>]]
return[binary_operation[call[name[numpy].sum, parameter[binary_operation[name[areas] * name[widths]]]] / call[name[numpy].sum, parameter[name[areas]]]]] | keyword[def] identifier[get_width] ( identifier[self] ):
literal[string]
identifier[areas] = identifier[self] . identifier[_get_areas] ()
identifier[widths] = identifier[numpy] . identifier[array] ([ identifier[surf] . identifier[get_width] () keyword[for] identifier[surf] keyword[in] identifier[self] . identifier[surfaces] ])
keyword[return] identifier[numpy] . identifier[sum] ( identifier[areas] * identifier[widths] )/ identifier[numpy] . identifier[sum] ( identifier[areas] ) | def get_width(self):
"""
Compute width of each surface element, and return area-weighted
average value (in km).
"""
areas = self._get_areas()
widths = numpy.array([surf.get_width() for surf in self.surfaces])
return numpy.sum(areas * widths) / numpy.sum(areas) |
def guess_function_name(next_line, regexps=FUNCTION_REGEXPS):
"""
Attempt to determine the function name from the first code line
following the comment. The patterns recognized are described by
`regexps`, which defaults to FUNCTION_REGEXPS. If a match is successful,
returns the function name. Otherwise, returns None.
"""
for regexp in regexps:
match = re.search(regexp, next_line)
if match:
return match.group(1)
return None | def function[guess_function_name, parameter[next_line, regexps]]:
constant[
Attempt to determine the function name from the first code line
following the comment. The patterns recognized are described by
`regexps`, which defaults to FUNCTION_REGEXPS. If a match is successful,
returns the function name. Otherwise, returns None.
]
for taget[name[regexp]] in starred[name[regexps]] begin[:]
variable[match] assign[=] call[name[re].search, parameter[name[regexp], name[next_line]]]
if name[match] begin[:]
return[call[name[match].group, parameter[constant[1]]]]
return[constant[None]] | keyword[def] identifier[guess_function_name] ( identifier[next_line] , identifier[regexps] = identifier[FUNCTION_REGEXPS] ):
literal[string]
keyword[for] identifier[regexp] keyword[in] identifier[regexps] :
identifier[match] = identifier[re] . identifier[search] ( identifier[regexp] , identifier[next_line] )
keyword[if] identifier[match] :
keyword[return] identifier[match] . identifier[group] ( literal[int] )
keyword[return] keyword[None] | def guess_function_name(next_line, regexps=FUNCTION_REGEXPS):
"""
Attempt to determine the function name from the first code line
following the comment. The patterns recognized are described by
`regexps`, which defaults to FUNCTION_REGEXPS. If a match is successful,
returns the function name. Otherwise, returns None.
"""
for regexp in regexps:
match = re.search(regexp, next_line)
if match:
return match.group(1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['regexp']]
return None |
def segment(self, line):
"""
Tokenizes single sentence and adds special BOS and EOS tokens.
:param line: sentence
returns: list representing tokenized sentence
"""
line = line.strip().split()
entry = [self.tok2idx[i] for i in line]
entry = [config.BOS] + entry + [config.EOS]
return entry | def function[segment, parameter[self, line]]:
constant[
Tokenizes single sentence and adds special BOS and EOS tokens.
:param line: sentence
returns: list representing tokenized sentence
]
variable[line] assign[=] call[call[name[line].strip, parameter[]].split, parameter[]]
variable[entry] assign[=] <ast.ListComp object at 0x7da18f09cfd0>
variable[entry] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da18f09e620>]] + name[entry]] + list[[<ast.Attribute object at 0x7da18f09e500>]]]
return[name[entry]] | keyword[def] identifier[segment] ( identifier[self] , identifier[line] ):
literal[string]
identifier[line] = identifier[line] . identifier[strip] (). identifier[split] ()
identifier[entry] =[ identifier[self] . identifier[tok2idx] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[line] ]
identifier[entry] =[ identifier[config] . identifier[BOS] ]+ identifier[entry] +[ identifier[config] . identifier[EOS] ]
keyword[return] identifier[entry] | def segment(self, line):
"""
Tokenizes single sentence and adds special BOS and EOS tokens.
:param line: sentence
returns: list representing tokenized sentence
"""
line = line.strip().split()
entry = [self.tok2idx[i] for i in line]
entry = [config.BOS] + entry + [config.EOS]
return entry |
def acts_as_state_machine(cls):
"""
a decorator which sets two properties on a class:
* the 'current_state' property: a read-only property, returning the state machine's current state, as 'State' object
* the 'states' property: a tuple of all valid state machine states, as 'State' objects
class objects may use current_state and states freely
:param cls:
:return:
"""
assert not hasattr(cls, 'current_state'), '{0} already has a "current_state" attribute!'.format(cls)
assert not hasattr(cls, 'states'), '{0} already has a "states" attribute!'.format(cls)
def get_states(obj):
return StateInfo.get_states(obj.__class__)
def is_transition_failure_handler(obj):
return all([
any([
inspect.ismethod(obj), # python2
inspect.isfunction(obj), # python3
]),
getattr(obj, '___pystatemachine_is_transition_failure_handler', False),
])
transition_failure_handlers = sorted(
[value for name, value in inspect.getmembers(cls, is_transition_failure_handler)],
key=lambda m: getattr(m, '___pystatemachine_transition_failure_handler_calling_sequence', 0),
)
setattr(cls, '___pystatemachine_transition_failure_handlers', transition_failure_handlers)
cls.current_state = property(fget=StateInfo.get_current_state)
cls.states = property(fget=get_states)
return cls | def function[acts_as_state_machine, parameter[cls]]:
constant[
a decorator which sets two properties on a class:
* the 'current_state' property: a read-only property, returning the state machine's current state, as 'State' object
* the 'states' property: a tuple of all valid state machine states, as 'State' objects
class objects may use current_state and states freely
:param cls:
:return:
]
assert[<ast.UnaryOp object at 0x7da1b0b394b0>]
assert[<ast.UnaryOp object at 0x7da1b0b397e0>]
def function[get_states, parameter[obj]]:
return[call[name[StateInfo].get_states, parameter[name[obj].__class__]]]
def function[is_transition_failure_handler, parameter[obj]]:
return[call[name[all], parameter[list[[<ast.Call object at 0x7da1b0b3af20>, <ast.Call object at 0x7da1b0b39000>]]]]]
variable[transition_failure_handlers] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b0b3a440>]]
call[name[setattr], parameter[name[cls], constant[___pystatemachine_transition_failure_handlers], name[transition_failure_handlers]]]
name[cls].current_state assign[=] call[name[property], parameter[]]
name[cls].states assign[=] call[name[property], parameter[]]
return[name[cls]] | keyword[def] identifier[acts_as_state_machine] ( identifier[cls] ):
literal[string]
keyword[assert] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ), literal[string] . identifier[format] ( identifier[cls] )
keyword[assert] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ), literal[string] . identifier[format] ( identifier[cls] )
keyword[def] identifier[get_states] ( identifier[obj] ):
keyword[return] identifier[StateInfo] . identifier[get_states] ( identifier[obj] . identifier[__class__] )
keyword[def] identifier[is_transition_failure_handler] ( identifier[obj] ):
keyword[return] identifier[all] ([
identifier[any] ([
identifier[inspect] . identifier[ismethod] ( identifier[obj] ),
identifier[inspect] . identifier[isfunction] ( identifier[obj] ),
]),
identifier[getattr] ( identifier[obj] , literal[string] , keyword[False] ),
])
identifier[transition_failure_handlers] = identifier[sorted] (
[ identifier[value] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[cls] , identifier[is_transition_failure_handler] )],
identifier[key] = keyword[lambda] identifier[m] : identifier[getattr] ( identifier[m] , literal[string] , literal[int] ),
)
identifier[setattr] ( identifier[cls] , literal[string] , identifier[transition_failure_handlers] )
identifier[cls] . identifier[current_state] = identifier[property] ( identifier[fget] = identifier[StateInfo] . identifier[get_current_state] )
identifier[cls] . identifier[states] = identifier[property] ( identifier[fget] = identifier[get_states] )
keyword[return] identifier[cls] | def acts_as_state_machine(cls):
"""
a decorator which sets two properties on a class:
* the 'current_state' property: a read-only property, returning the state machine's current state, as 'State' object
* the 'states' property: a tuple of all valid state machine states, as 'State' objects
class objects may use current_state and states freely
:param cls:
:return:
"""
assert not hasattr(cls, 'current_state'), '{0} already has a "current_state" attribute!'.format(cls)
assert not hasattr(cls, 'states'), '{0} already has a "states" attribute!'.format(cls)
def get_states(obj):
return StateInfo.get_states(obj.__class__)
def is_transition_failure_handler(obj): # python2
# python3
return all([any([inspect.ismethod(obj), inspect.isfunction(obj)]), getattr(obj, '___pystatemachine_is_transition_failure_handler', False)])
transition_failure_handlers = sorted([value for (name, value) in inspect.getmembers(cls, is_transition_failure_handler)], key=lambda m: getattr(m, '___pystatemachine_transition_failure_handler_calling_sequence', 0))
setattr(cls, '___pystatemachine_transition_failure_handlers', transition_failure_handlers)
cls.current_state = property(fget=StateInfo.get_current_state)
cls.states = property(fget=get_states)
return cls |
def download_url(url, store_directory, save_name=None, messages=True, suffix=''):
"""Download a file from a url and save it to disk."""
i = url.rfind('/')
file = url[i+1:]
print(file)
dir_name = os.path.join(data_path, store_directory)
if save_name is None: save_name = os.path.join(dir_name, file)
else: save_name = os.path.join(dir_name, save_name)
if suffix is None: suffix=''
print("Downloading ", url, "->", save_name)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
try:
response = urlopen(url+suffix)
except URLError as e:
if not hasattr(e, "code"):
raise
response = e
if response.code > 399 and response.code<500:
raise ValueError('Tried url ' + url + suffix + ' and received client error ' + str(response.code))
elif response.code > 499:
raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code))
with open(save_name, 'wb') as f:
meta = response.info()
content_length_str = meta.get("Content-Length")
if content_length_str:
file_size = int(content_length_str)
else:
file_size = None
status = ""
file_size_dl = 0
block_sz = 8192
line_length=30
while True:
buff = response.read(block_sz)
if not buff:
break
file_size_dl += len(buff)
f.write(buff)
sys.stdout.write(" "*(len(status)) + "\r")
if file_size:
status = r"[{perc: <{ll}}] {dl:7.3f}/{full:.3f}MB".format(dl=file_size_dl/(1048576.),
full=file_size/(1048576.), ll=line_length,
perc="="*int(line_length*float(file_size_dl)/file_size))
else:
status = r"[{perc: <{ll}}] {dl:7.3f}MB".format(dl=file_size_dl/(1048576.),
ll=line_length,
perc="."*int(line_length*float(file_size_dl/(10*1048576.))))
sys.stdout.write(status)
sys.stdout.flush()
sys.stdout.write(" "*(len(status)) + "\r")
print(status) | def function[download_url, parameter[url, store_directory, save_name, messages, suffix]]:
constant[Download a file from a url and save it to disk.]
variable[i] assign[=] call[name[url].rfind, parameter[constant[/]]]
variable[file] assign[=] call[name[url]][<ast.Slice object at 0x7da1b1c0dd20>]
call[name[print], parameter[name[file]]]
variable[dir_name] assign[=] call[name[os].path.join, parameter[name[data_path], name[store_directory]]]
if compare[name[save_name] is constant[None]] begin[:]
variable[save_name] assign[=] call[name[os].path.join, parameter[name[dir_name], name[file]]]
if compare[name[suffix] is constant[None]] begin[:]
variable[suffix] assign[=] constant[]
call[name[print], parameter[constant[Downloading ], name[url], constant[->], name[save_name]]]
if <ast.UnaryOp object at 0x7da1b1c0cb20> begin[:]
call[name[os].makedirs, parameter[name[dir_name]]]
<ast.Try object at 0x7da1b1c0e740>
with call[name[open], parameter[name[save_name], constant[wb]]] begin[:]
variable[meta] assign[=] call[name[response].info, parameter[]]
variable[content_length_str] assign[=] call[name[meta].get, parameter[constant[Content-Length]]]
if name[content_length_str] begin[:]
variable[file_size] assign[=] call[name[int], parameter[name[content_length_str]]]
variable[status] assign[=] constant[]
variable[file_size_dl] assign[=] constant[0]
variable[block_sz] assign[=] constant[8192]
variable[line_length] assign[=] constant[30]
while constant[True] begin[:]
variable[buff] assign[=] call[name[response].read, parameter[name[block_sz]]]
if <ast.UnaryOp object at 0x7da1b1c0eb60> begin[:]
break
<ast.AugAssign object at 0x7da1b1c0ec80>
call[name[f].write, parameter[name[buff]]]
call[name[sys].stdout.write, parameter[binary_operation[binary_operation[constant[ ] * call[name[len], parameter[name[status]]]] + constant[
]]]]
if name[file_size] begin[:]
variable[status] assign[=] call[constant[[{perc: <{ll}}] {dl:7.3f}/{full:.3f}MB].format, parameter[]]
call[name[sys].stdout.write, parameter[name[status]]]
call[name[sys].stdout.flush, parameter[]]
call[name[sys].stdout.write, parameter[binary_operation[binary_operation[constant[ ] * call[name[len], parameter[name[status]]]] + constant[
]]]]
call[name[print], parameter[name[status]]] | keyword[def] identifier[download_url] ( identifier[url] , identifier[store_directory] , identifier[save_name] = keyword[None] , identifier[messages] = keyword[True] , identifier[suffix] = literal[string] ):
literal[string]
identifier[i] = identifier[url] . identifier[rfind] ( literal[string] )
identifier[file] = identifier[url] [ identifier[i] + literal[int] :]
identifier[print] ( identifier[file] )
identifier[dir_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , identifier[store_directory] )
keyword[if] identifier[save_name] keyword[is] keyword[None] : identifier[save_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_name] , identifier[file] )
keyword[else] : identifier[save_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_name] , identifier[save_name] )
keyword[if] identifier[suffix] keyword[is] keyword[None] : identifier[suffix] = literal[string]
identifier[print] ( literal[string] , identifier[url] , literal[string] , identifier[save_name] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir_name] ):
identifier[os] . identifier[makedirs] ( identifier[dir_name] )
keyword[try] :
identifier[response] = identifier[urlopen] ( identifier[url] + identifier[suffix] )
keyword[except] identifier[URLError] keyword[as] identifier[e] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[e] , literal[string] ):
keyword[raise]
identifier[response] = identifier[e]
keyword[if] identifier[response] . identifier[code] > literal[int] keyword[and] identifier[response] . identifier[code] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[url] + identifier[suffix] + literal[string] + identifier[str] ( identifier[response] . identifier[code] ))
keyword[elif] identifier[response] . identifier[code] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[url] + identifier[suffix] + literal[string] + identifier[str] ( identifier[response] . identifier[code] ))
keyword[with] identifier[open] ( identifier[save_name] , literal[string] ) keyword[as] identifier[f] :
identifier[meta] = identifier[response] . identifier[info] ()
identifier[content_length_str] = identifier[meta] . identifier[get] ( literal[string] )
keyword[if] identifier[content_length_str] :
identifier[file_size] = identifier[int] ( identifier[content_length_str] )
keyword[else] :
identifier[file_size] = keyword[None]
identifier[status] = literal[string]
identifier[file_size_dl] = literal[int]
identifier[block_sz] = literal[int]
identifier[line_length] = literal[int]
keyword[while] keyword[True] :
identifier[buff] = identifier[response] . identifier[read] ( identifier[block_sz] )
keyword[if] keyword[not] identifier[buff] :
keyword[break]
identifier[file_size_dl] += identifier[len] ( identifier[buff] )
identifier[f] . identifier[write] ( identifier[buff] )
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] *( identifier[len] ( identifier[status] ))+ literal[string] )
keyword[if] identifier[file_size] :
identifier[status] = literal[string] . identifier[format] ( identifier[dl] = identifier[file_size_dl] /( literal[int] ),
identifier[full] = identifier[file_size] /( literal[int] ), identifier[ll] = identifier[line_length] ,
identifier[perc] = literal[string] * identifier[int] ( identifier[line_length] * identifier[float] ( identifier[file_size_dl] )/ identifier[file_size] ))
keyword[else] :
identifier[status] = literal[string] . identifier[format] ( identifier[dl] = identifier[file_size_dl] /( literal[int] ),
identifier[ll] = identifier[line_length] ,
identifier[perc] = literal[string] * identifier[int] ( identifier[line_length] * identifier[float] ( identifier[file_size_dl] /( literal[int] * literal[int] ))))
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[status] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] *( identifier[len] ( identifier[status] ))+ literal[string] )
identifier[print] ( identifier[status] ) | def download_url(url, store_directory, save_name=None, messages=True, suffix=''):
"""Download a file from a url and save it to disk."""
i = url.rfind('/')
file = url[i + 1:]
print(file)
dir_name = os.path.join(data_path, store_directory)
if save_name is None:
save_name = os.path.join(dir_name, file) # depends on [control=['if'], data=['save_name']]
else:
save_name = os.path.join(dir_name, save_name)
if suffix is None:
suffix = '' # depends on [control=['if'], data=['suffix']]
print('Downloading ', url, '->', save_name)
if not os.path.exists(dir_name):
os.makedirs(dir_name) # depends on [control=['if'], data=[]]
try:
response = urlopen(url + suffix) # depends on [control=['try'], data=[]]
except URLError as e:
if not hasattr(e, 'code'):
raise # depends on [control=['if'], data=[]]
response = e
if response.code > 399 and response.code < 500:
raise ValueError('Tried url ' + url + suffix + ' and received client error ' + str(response.code)) # depends on [control=['if'], data=[]]
elif response.code > 499:
raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
with open(save_name, 'wb') as f:
meta = response.info()
content_length_str = meta.get('Content-Length')
if content_length_str:
file_size = int(content_length_str) # depends on [control=['if'], data=[]]
else:
file_size = None
status = ''
file_size_dl = 0
block_sz = 8192
line_length = 30
while True:
buff = response.read(block_sz)
if not buff:
break # depends on [control=['if'], data=[]]
file_size_dl += len(buff)
f.write(buff)
sys.stdout.write(' ' * len(status) + '\r')
if file_size:
status = '[{perc: <{ll}}] {dl:7.3f}/{full:.3f}MB'.format(dl=file_size_dl / 1048576.0, full=file_size / 1048576.0, ll=line_length, perc='=' * int(line_length * float(file_size_dl) / file_size)) # depends on [control=['if'], data=[]]
else:
status = '[{perc: <{ll}}] {dl:7.3f}MB'.format(dl=file_size_dl / 1048576.0, ll=line_length, perc='.' * int(line_length * float(file_size_dl / (10 * 1048576.0))))
sys.stdout.write(status)
sys.stdout.flush() # depends on [control=['while'], data=[]]
sys.stdout.write(' ' * len(status) + '\r')
print(status) # depends on [control=['with'], data=['f']] |
def delete_subtree(self, nodes): # noqa: D302
r"""
Delete nodes (and their sub-trees) from the tree.
:param nodes: Node(s) to delete
:type nodes: :ref:`NodeName` or list of :ref:`NodeName`
:raises:
* RuntimeError (Argument \`nodes\` is not valid)
* RuntimeError (Node *[node_name]* not in tree)
Using the same example tree created in
:py:meth:`ptrie.Trie.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.ptrie_example
>>> tobj = docs.support.ptrie_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2'])
>>> print(tobj)
root
└branch1 (*)
└leaf2 (*)
└subleaf2
"""
if self._validate_node_name(nodes):
raise RuntimeError("Argument `nodes` is not valid")
self._delete_subtree(nodes) | def function[delete_subtree, parameter[self, nodes]]:
constant[
Delete nodes (and their sub-trees) from the tree.
:param nodes: Node(s) to delete
:type nodes: :ref:`NodeName` or list of :ref:`NodeName`
:raises:
* RuntimeError (Argument \`nodes\` is not valid)
* RuntimeError (Node *[node_name]* not in tree)
Using the same example tree created in
:py:meth:`ptrie.Trie.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.ptrie_example
>>> tobj = docs.support.ptrie_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2'])
>>> print(tobj)
root
└branch1 (*)
└leaf2 (*)
└subleaf2
]
if call[name[self]._validate_node_name, parameter[name[nodes]]] begin[:]
<ast.Raise object at 0x7da1b10d41f0>
call[name[self]._delete_subtree, parameter[name[nodes]]] | keyword[def] identifier[delete_subtree] ( identifier[self] , identifier[nodes] ):
literal[string]
keyword[if] identifier[self] . identifier[_validate_node_name] ( identifier[nodes] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[_delete_subtree] ( identifier[nodes] ) | def delete_subtree(self, nodes): # noqa: D302
"\n Delete nodes (and their sub-trees) from the tree.\n\n :param nodes: Node(s) to delete\n :type nodes: :ref:`NodeName` or list of :ref:`NodeName`\n\n :raises:\n * RuntimeError (Argument \\`nodes\\` is not valid)\n\n * RuntimeError (Node *[node_name]* not in tree)\n\n Using the same example tree created in\n :py:meth:`ptrie.Trie.add_nodes`::\n\n >>> from __future__ import print_function\n >>> import docs.support.ptrie_example\n >>> tobj = docs.support.ptrie_example.create_tree()\n >>> print(tobj)\n root\n ├branch1 (*)\n │├leaf1\n ││└subleaf1 (*)\n │└leaf2 (*)\n │ └subleaf2\n └branch2\n >>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2'])\n >>> print(tobj)\n root\n └branch1 (*)\n └leaf2 (*)\n └subleaf2\n "
if self._validate_node_name(nodes):
raise RuntimeError('Argument `nodes` is not valid') # depends on [control=['if'], data=[]]
self._delete_subtree(nodes) |
def _extract_blocks(x, block_h, block_w):
"""Helper function for local 2d attention.
Args:
x: a [batch, height, width, depth] tensor
block_h: An integer. block height
block_w: An inteter. block width
returns:
a [batch, num_heads, height/block_h, width/block_w, depth] tensor
"""
(_, height, width, depth) = common_layers.shape_list(x)
assert height % block_h == 0
assert width % block_w == 0
x = tf.reshape(x, [-1, height//block_h, block_h,
width//block_w, block_w, depth])
return tf.transpose(x, [0, 1, 3, 2, 4, 5]) | def function[_extract_blocks, parameter[x, block_h, block_w]]:
constant[Helper function for local 2d attention.
Args:
x: a [batch, height, width, depth] tensor
block_h: An integer. block height
block_w: An inteter. block width
returns:
a [batch, num_heads, height/block_h, width/block_w, depth] tensor
]
<ast.Tuple object at 0x7da1b1ff1c90> assign[=] call[name[common_layers].shape_list, parameter[name[x]]]
assert[compare[binary_operation[name[height] <ast.Mod object at 0x7da2590d6920> name[block_h]] equal[==] constant[0]]]
assert[compare[binary_operation[name[width] <ast.Mod object at 0x7da2590d6920> name[block_w]] equal[==] constant[0]]]
variable[x] assign[=] call[name[tf].reshape, parameter[name[x], list[[<ast.UnaryOp object at 0x7da1b1ff0f70>, <ast.BinOp object at 0x7da1b1ff3910>, <ast.Name object at 0x7da1b1ff08e0>, <ast.BinOp object at 0x7da1b1ff14b0>, <ast.Name object at 0x7da1b1ff2290>, <ast.Name object at 0x7da1b1ff3790>]]]]
return[call[name[tf].transpose, parameter[name[x], list[[<ast.Constant object at 0x7da1b1ff1450>, <ast.Constant object at 0x7da1b1ff3ca0>, <ast.Constant object at 0x7da1b1ff33d0>, <ast.Constant object at 0x7da1b1ff3af0>, <ast.Constant object at 0x7da1b1ff12a0>, <ast.Constant object at 0x7da1b1e107f0>]]]]] | keyword[def] identifier[_extract_blocks] ( identifier[x] , identifier[block_h] , identifier[block_w] ):
literal[string]
( identifier[_] , identifier[height] , identifier[width] , identifier[depth] )= identifier[common_layers] . identifier[shape_list] ( identifier[x] )
keyword[assert] identifier[height] % identifier[block_h] == literal[int]
keyword[assert] identifier[width] % identifier[block_w] == literal[int]
identifier[x] = identifier[tf] . identifier[reshape] ( identifier[x] ,[- literal[int] , identifier[height] // identifier[block_h] , identifier[block_h] ,
identifier[width] // identifier[block_w] , identifier[block_w] , identifier[depth] ])
keyword[return] identifier[tf] . identifier[transpose] ( identifier[x] ,[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]) | def _extract_blocks(x, block_h, block_w):
"""Helper function for local 2d attention.
Args:
x: a [batch, height, width, depth] tensor
block_h: An integer. block height
block_w: An inteter. block width
returns:
a [batch, num_heads, height/block_h, width/block_w, depth] tensor
"""
(_, height, width, depth) = common_layers.shape_list(x)
assert height % block_h == 0
assert width % block_w == 0
x = tf.reshape(x, [-1, height // block_h, block_h, width // block_w, block_w, depth])
return tf.transpose(x, [0, 1, 3, 2, 4, 5]) |
def parse_requirements_list(requirements_list):
"""
Take a list and return a list of dicts with {package, versions) based on the requirements specs
:param requirements_list: string
:return: string
"""
req_list = []
for requirement in requirements_list:
requirement_no_comments = requirement.split('#')[0].strip()
# if matching requirement line (Thing==1.2.3), update dict, continue
req_match = re.match(
r'\s*(?P<package>[^\s\[\]]+)(?P<extras>\[\S+\])?==(?P<version>\S+)',
requirement_no_comments
)
if req_match:
req_list.append({
'package': req_match.group('package'),
'version': req_match.group('version'),
})
return req_list | def function[parse_requirements_list, parameter[requirements_list]]:
constant[
Take a list and return a list of dicts with {package, versions) based on the requirements specs
:param requirements_list: string
:return: string
]
variable[req_list] assign[=] list[[]]
for taget[name[requirement]] in starred[name[requirements_list]] begin[:]
variable[requirement_no_comments] assign[=] call[call[call[name[requirement].split, parameter[constant[#]]]][constant[0]].strip, parameter[]]
variable[req_match] assign[=] call[name[re].match, parameter[constant[\s*(?P<package>[^\s\[\]]+)(?P<extras>\[\S+\])?==(?P<version>\S+)], name[requirement_no_comments]]]
if name[req_match] begin[:]
call[name[req_list].append, parameter[dictionary[[<ast.Constant object at 0x7da1b20d56c0>, <ast.Constant object at 0x7da1b20d40a0>], [<ast.Call object at 0x7da1b20d4d60>, <ast.Call object at 0x7da1b20d6140>]]]]
return[name[req_list]] | keyword[def] identifier[parse_requirements_list] ( identifier[requirements_list] ):
literal[string]
identifier[req_list] =[]
keyword[for] identifier[requirement] keyword[in] identifier[requirements_list] :
identifier[requirement_no_comments] = identifier[requirement] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ()
identifier[req_match] = identifier[re] . identifier[match] (
literal[string] ,
identifier[requirement_no_comments]
)
keyword[if] identifier[req_match] :
identifier[req_list] . identifier[append] ({
literal[string] : identifier[req_match] . identifier[group] ( literal[string] ),
literal[string] : identifier[req_match] . identifier[group] ( literal[string] ),
})
keyword[return] identifier[req_list] | def parse_requirements_list(requirements_list):
"""
Take a list and return a list of dicts with {package, versions) based on the requirements specs
:param requirements_list: string
:return: string
"""
req_list = []
for requirement in requirements_list:
requirement_no_comments = requirement.split('#')[0].strip()
# if matching requirement line (Thing==1.2.3), update dict, continue
req_match = re.match('\\s*(?P<package>[^\\s\\[\\]]+)(?P<extras>\\[\\S+\\])?==(?P<version>\\S+)', requirement_no_comments)
if req_match:
req_list.append({'package': req_match.group('package'), 'version': req_match.group('version')}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['requirement']]
return req_list |
def __build_completer_map(cls):
"""Build a mapping from command names to completer names.
One command name maps to at most one completer method.
Multiple command names can map to the same completer method.
Only used by __init__() to initialize self._cmd_map. MUST NOT be used
elsewhere.
Raises:
PyShellError: A command maps to multiple helper methods.
"""
ret = {}
for name in dir(cls):
obj = getattr(cls, name)
if iscompleter(obj):
for cmd in obj.__complete_targets__:
if cmd in ret.keys():
raise PyShellError("The command '{}' already has"
" complter"
" method '{}', cannot register a"
" second method '{}'.".format( \
cmd, ret[cmd], obj.__name__))
ret[cmd] = obj.__name__
return ret | def function[__build_completer_map, parameter[cls]]:
constant[Build a mapping from command names to completer names.
One command name maps to at most one completer method.
Multiple command names can map to the same completer method.
Only used by __init__() to initialize self._cmd_map. MUST NOT be used
elsewhere.
Raises:
PyShellError: A command maps to multiple helper methods.
]
variable[ret] assign[=] dictionary[[], []]
for taget[name[name]] in starred[call[name[dir], parameter[name[cls]]]] begin[:]
variable[obj] assign[=] call[name[getattr], parameter[name[cls], name[name]]]
if call[name[iscompleter], parameter[name[obj]]] begin[:]
for taget[name[cmd]] in starred[name[obj].__complete_targets__] begin[:]
if compare[name[cmd] in call[name[ret].keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b13d77c0>
call[name[ret]][name[cmd]] assign[=] name[obj].__name__
return[name[ret]] | keyword[def] identifier[__build_completer_map] ( identifier[cls] ):
literal[string]
identifier[ret] ={}
keyword[for] identifier[name] keyword[in] identifier[dir] ( identifier[cls] ):
identifier[obj] = identifier[getattr] ( identifier[cls] , identifier[name] )
keyword[if] identifier[iscompleter] ( identifier[obj] ):
keyword[for] identifier[cmd] keyword[in] identifier[obj] . identifier[__complete_targets__] :
keyword[if] identifier[cmd] keyword[in] identifier[ret] . identifier[keys] ():
keyword[raise] identifier[PyShellError] ( literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[cmd] , identifier[ret] [ identifier[cmd] ], identifier[obj] . identifier[__name__] ))
identifier[ret] [ identifier[cmd] ]= identifier[obj] . identifier[__name__]
keyword[return] identifier[ret] | def __build_completer_map(cls):
"""Build a mapping from command names to completer names.
One command name maps to at most one completer method.
Multiple command names can map to the same completer method.
Only used by __init__() to initialize self._cmd_map. MUST NOT be used
elsewhere.
Raises:
PyShellError: A command maps to multiple helper methods.
"""
ret = {}
for name in dir(cls):
obj = getattr(cls, name)
if iscompleter(obj):
for cmd in obj.__complete_targets__:
if cmd in ret.keys():
raise PyShellError("The command '{}' already has complter method '{}', cannot register a second method '{}'.".format(cmd, ret[cmd], obj.__name__)) # depends on [control=['if'], data=['cmd']]
ret[cmd] = obj.__name__ # depends on [control=['for'], data=['cmd']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return ret |
def add_public_key(self, did, public_key):
"""
Add a public key object to the list of public keys.
:param public_key: Public key, PublicKeyHex
"""
logger.debug(f'Adding public key {public_key} to the did {did}')
self._public_keys.append(
PublicKeyBase(did, **{"owner": public_key, "type": "EthereumECDSAKey"})) | def function[add_public_key, parameter[self, did, public_key]]:
constant[
Add a public key object to the list of public keys.
:param public_key: Public key, PublicKeyHex
]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da18f09f8b0>]]
call[name[self]._public_keys.append, parameter[call[name[PublicKeyBase], parameter[name[did]]]]] | keyword[def] identifier[add_public_key] ( identifier[self] , identifier[did] , identifier[public_key] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_public_keys] . identifier[append] (
identifier[PublicKeyBase] ( identifier[did] ,**{ literal[string] : identifier[public_key] , literal[string] : literal[string] })) | def add_public_key(self, did, public_key):
"""
Add a public key object to the list of public keys.
:param public_key: Public key, PublicKeyHex
"""
logger.debug(f'Adding public key {public_key} to the did {did}')
self._public_keys.append(PublicKeyBase(did, **{'owner': public_key, 'type': 'EthereumECDSAKey'})) |
def _build_tree(self, side):
"""Build the KDTree for the observed data
:arg side if equal to DATA_X, build input data tree.
if equal to DATA_Y, build output data tree.
"""
if not self.nn_ready[side]:
self.kdtree[side] = scipy.spatial.cKDTree(self.data[side], compact_nodes=False, balanced_tree=False) # Those options are required with scipy >= 0.16
self.nn_ready[side] = True | def function[_build_tree, parameter[self, side]]:
constant[Build the KDTree for the observed data
:arg side if equal to DATA_X, build input data tree.
if equal to DATA_Y, build output data tree.
]
if <ast.UnaryOp object at 0x7da1b0ed2020> begin[:]
call[name[self].kdtree][name[side]] assign[=] call[name[scipy].spatial.cKDTree, parameter[call[name[self].data][name[side]]]]
call[name[self].nn_ready][name[side]] assign[=] constant[True] | keyword[def] identifier[_build_tree] ( identifier[self] , identifier[side] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[nn_ready] [ identifier[side] ]:
identifier[self] . identifier[kdtree] [ identifier[side] ]= identifier[scipy] . identifier[spatial] . identifier[cKDTree] ( identifier[self] . identifier[data] [ identifier[side] ], identifier[compact_nodes] = keyword[False] , identifier[balanced_tree] = keyword[False] )
identifier[self] . identifier[nn_ready] [ identifier[side] ]= keyword[True] | def _build_tree(self, side):
"""Build the KDTree for the observed data
:arg side if equal to DATA_X, build input data tree.
if equal to DATA_Y, build output data tree.
"""
if not self.nn_ready[side]:
self.kdtree[side] = scipy.spatial.cKDTree(self.data[side], compact_nodes=False, balanced_tree=False) # Those options are required with scipy >= 0.16
self.nn_ready[side] = True # depends on [control=['if'], data=[]] |
def Then3(self, f, arg1, arg2, *args, **kwargs):
"""
`Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
"""
args = (arg1, arg2) + args
return self.ThenAt(3, f, *args, **kwargs) | def function[Then3, parameter[self, f, arg1, arg2]]:
constant[
`Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
]
variable[args] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b10bce80>, <ast.Name object at 0x7da1b10bfca0>]] + name[args]]
return[call[name[self].ThenAt, parameter[constant[3], name[f], <ast.Starred object at 0x7da1b10bf8e0>]]] | keyword[def] identifier[Then3] ( identifier[self] , identifier[f] , identifier[arg1] , identifier[arg2] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[args] =( identifier[arg1] , identifier[arg2] )+ identifier[args]
keyword[return] identifier[self] . identifier[ThenAt] ( literal[int] , identifier[f] ,* identifier[args] ,** identifier[kwargs] ) | def Then3(self, f, arg1, arg2, *args, **kwargs):
"""
`Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
"""
args = (arg1, arg2) + args
return self.ThenAt(3, f, *args, **kwargs) |
def configmap_install_id_plugin(scout, app, map_name=None, namespace="default"):
"""
Scout id_plugin that uses a Kubernetes configmap to store the install ID.
:param scout: Scout instance that's calling the plugin
:param app: Name of the application that's using Scout
:param map_name: Optional ConfigMap name to use; defaults to "scout.config.$app"
:param namespace: Optional Kubernetes namespace to use; defaults to "default"
This plugin assumes that the KUBERNETES_SERVICE_{HOST,PORT,PORT_HTTPS}
environment variables are set correctly, and it assumes the default Kubernetes
namespace unless the 'namespace' keyword argument is used to select a different
namespace.
If KUBERNETES_ACCESS_TOKEN is set in the environment, use that for the apiserver
access token -- otherwise, the plugin assumes that it's running in a Kubernetes
pod and tries to read its token from /var/run/secrets.
"""
plugin_response = None
if not map_name:
map_name = "scout.config.{0}".format(app)
kube_host = os.environ.get('KUBERNETES_SERVICE_HOST', None)
try:
kube_port = int(os.environ.get('KUBERNETES_SERVICE_PORT', 443))
except ValueError:
scout.logger.debug("Scout: KUBERNETES_SERVICE_PORT isn't numeric, defaulting to 443")
kube_port = 443
kube_proto = "https" if (kube_port == 443) else "http"
kube_token = os.environ.get('KUBERNETES_ACCESS_TOKEN', None)
if not kube_host:
# We're not running in Kubernetes. Fall back to the usual filesystem stuff.
scout.logger.debug("Scout: no KUBERNETES_SERVICE_HOST, not running in Kubernetes")
return None
if not kube_token:
try:
kube_token = open("/var/run/secrets/kubernetes.io/serviceaccount/token", "r").read()
except OSError:
pass
if not kube_token:
# We're not running in Kubernetes. Fall back to the usual filesystem stuff.
scout.logger.debug("Scout: not running in Kubernetes")
return None
# OK, we're in a cluster. Load our map.
base_url = "%s://%s:%s" % (kube_proto, kube_host, kube_port)
url_path = "api/v1/namespaces/%s/configmaps" % namespace
auth_headers = { "Authorization": "Bearer " + kube_token }
install_id = None
cm_url = "%s/%s" % (base_url, url_path)
fetch_url = "%s/%s" % (cm_url, map_name)
scout.logger.debug("Scout: trying %s" % fetch_url)
try:
r = requests.get(fetch_url, headers=auth_headers, verify=False)
if r.status_code == 200:
# OK, the map is present. What do we see?
map_data = r.json()
if "data" not in map_data:
# This is "impossible".
scout.logger.error("Scout: no map data in returned map???")
else:
map_data = map_data.get("data", {})
scout.logger.debug("Scout: configmap has map data %s" % json.dumps(map_data))
install_id = map_data.get("install_id", None)
if install_id:
scout.logger.debug("Scout: got install_id %s from map" % install_id)
plugin_response = { "install_id": install_id }
except OSError as e:
scout.logger.debug("Scout: could not read configmap (map %s, namespace %s): %s" %
(map_name, namespace, e))
if not install_id:
# No extant install_id. Try to create a new one.
install_id = str(uuid4())
cm = {
"apiVersion":"v1",
"kind":"ConfigMap",
"metadata":{
"name": map_name,
"namespace": namespace,
},
"data": {
"install_id": install_id
}
}
scout.logger.debug("Scout: saving new install_id %s" % install_id)
saved = False
try:
r = requests.post(cm_url, headers=auth_headers, verify=False, json=cm)
if r.status_code == 201:
saved = True
scout.logger.debug("Scout: saved install_id %s" % install_id)
plugin_response = {
"install_id": install_id,
"new_install": True
}
else:
scout.logger.error("Scout: could not save install_id: {0}, {1}".format(r.status_code, r.text))
except OSError as e:
logging.debug("Scout: could not write configmap (map %s, namespace %s): %s" %
(map_name, namespace, e))
scout.logger.debug("Scout: plugin_response %s" % json.dumps(plugin_response))
return plugin_response | def function[configmap_install_id_plugin, parameter[scout, app, map_name, namespace]]:
constant[
Scout id_plugin that uses a Kubernetes configmap to store the install ID.
:param scout: Scout instance that's calling the plugin
:param app: Name of the application that's using Scout
:param map_name: Optional ConfigMap name to use; defaults to "scout.config.$app"
:param namespace: Optional Kubernetes namespace to use; defaults to "default"
This plugin assumes that the KUBERNETES_SERVICE_{HOST,PORT,PORT_HTTPS}
environment variables are set correctly, and it assumes the default Kubernetes
namespace unless the 'namespace' keyword argument is used to select a different
namespace.
If KUBERNETES_ACCESS_TOKEN is set in the environment, use that for the apiserver
access token -- otherwise, the plugin assumes that it's running in a Kubernetes
pod and tries to read its token from /var/run/secrets.
]
variable[plugin_response] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b24eab90> begin[:]
variable[map_name] assign[=] call[constant[scout.config.{0}].format, parameter[name[app]]]
variable[kube_host] assign[=] call[name[os].environ.get, parameter[constant[KUBERNETES_SERVICE_HOST], constant[None]]]
<ast.Try object at 0x7da1b24eaef0>
variable[kube_proto] assign[=] <ast.IfExp object at 0x7da1b24e9ea0>
variable[kube_token] assign[=] call[name[os].environ.get, parameter[constant[KUBERNETES_ACCESS_TOKEN], constant[None]]]
if <ast.UnaryOp object at 0x7da1b24e9150> begin[:]
call[name[scout].logger.debug, parameter[constant[Scout: no KUBERNETES_SERVICE_HOST, not running in Kubernetes]]]
return[constant[None]]
if <ast.UnaryOp object at 0x7da1b24eb280> begin[:]
<ast.Try object at 0x7da1b24e88e0>
if <ast.UnaryOp object at 0x7da1b24eaa10> begin[:]
call[name[scout].logger.debug, parameter[constant[Scout: not running in Kubernetes]]]
return[constant[None]]
variable[base_url] assign[=] binary_operation[constant[%s://%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24e89d0>, <ast.Name object at 0x7da1b24eb0a0>, <ast.Name object at 0x7da1b24ea8f0>]]]
variable[url_path] assign[=] binary_operation[constant[api/v1/namespaces/%s/configmaps] <ast.Mod object at 0x7da2590d6920> name[namespace]]
variable[auth_headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b24e9480>], [<ast.BinOp object at 0x7da1b24eba90>]]
variable[install_id] assign[=] constant[None]
variable[cm_url] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24eb940>, <ast.Name object at 0x7da1b24e90f0>]]]
variable[fetch_url] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24eb7c0>, <ast.Name object at 0x7da1b24ea380>]]]
call[name[scout].logger.debug, parameter[binary_operation[constant[Scout: trying %s] <ast.Mod object at 0x7da2590d6920> name[fetch_url]]]]
<ast.Try object at 0x7da1b24eb9a0>
if <ast.UnaryOp object at 0x7da1b24e3fd0> begin[:]
variable[install_id] assign[=] call[name[str], parameter[call[name[uuid4], parameter[]]]]
variable[cm] assign[=] dictionary[[<ast.Constant object at 0x7da1b24e2e90>, <ast.Constant object at 0x7da1b24e00d0>, <ast.Constant object at 0x7da1b24e0220>, <ast.Constant object at 0x7da1b24e2980>], [<ast.Constant object at 0x7da1b24e3280>, <ast.Constant object at 0x7da1b24e31f0>, <ast.Dict object at 0x7da1b24e3220>, <ast.Dict object at 0x7da1b24e33a0>]]
call[name[scout].logger.debug, parameter[binary_operation[constant[Scout: saving new install_id %s] <ast.Mod object at 0x7da2590d6920> name[install_id]]]]
variable[saved] assign[=] constant[False]
<ast.Try object at 0x7da1b24e2ad0>
call[name[scout].logger.debug, parameter[binary_operation[constant[Scout: plugin_response %s] <ast.Mod object at 0x7da2590d6920> call[name[json].dumps, parameter[name[plugin_response]]]]]]
return[name[plugin_response]] | keyword[def] identifier[configmap_install_id_plugin] ( identifier[scout] , identifier[app] , identifier[map_name] = keyword[None] , identifier[namespace] = literal[string] ):
literal[string]
identifier[plugin_response] = keyword[None]
keyword[if] keyword[not] identifier[map_name] :
identifier[map_name] = literal[string] . identifier[format] ( identifier[app] )
identifier[kube_host] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
keyword[try] :
identifier[kube_port] = identifier[int] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[int] ))
keyword[except] identifier[ValueError] :
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[kube_port] = literal[int]
identifier[kube_proto] = literal[string] keyword[if] ( identifier[kube_port] == literal[int] ) keyword[else] literal[string]
identifier[kube_token] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[kube_host] :
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[kube_token] :
keyword[try] :
identifier[kube_token] = identifier[open] ( literal[string] , literal[string] ). identifier[read] ()
keyword[except] identifier[OSError] :
keyword[pass]
keyword[if] keyword[not] identifier[kube_token] :
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[None]
identifier[base_url] = literal[string] %( identifier[kube_proto] , identifier[kube_host] , identifier[kube_port] )
identifier[url_path] = literal[string] % identifier[namespace]
identifier[auth_headers] ={ literal[string] : literal[string] + identifier[kube_token] }
identifier[install_id] = keyword[None]
identifier[cm_url] = literal[string] %( identifier[base_url] , identifier[url_path] )
identifier[fetch_url] = literal[string] %( identifier[cm_url] , identifier[map_name] )
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[fetch_url] )
keyword[try] :
identifier[r] = identifier[requests] . identifier[get] ( identifier[fetch_url] , identifier[headers] = identifier[auth_headers] , identifier[verify] = keyword[False] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
identifier[map_data] = identifier[r] . identifier[json] ()
keyword[if] literal[string] keyword[not] keyword[in] identifier[map_data] :
identifier[scout] . identifier[logger] . identifier[error] ( literal[string] )
keyword[else] :
identifier[map_data] = identifier[map_data] . identifier[get] ( literal[string] ,{})
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[json] . identifier[dumps] ( identifier[map_data] ))
identifier[install_id] = identifier[map_data] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[install_id] :
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[install_id] )
identifier[plugin_response] ={ literal[string] : identifier[install_id] }
keyword[except] identifier[OSError] keyword[as] identifier[e] :
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] %
( identifier[map_name] , identifier[namespace] , identifier[e] ))
keyword[if] keyword[not] identifier[install_id] :
identifier[install_id] = identifier[str] ( identifier[uuid4] ())
identifier[cm] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :{
literal[string] : identifier[map_name] ,
literal[string] : identifier[namespace] ,
},
literal[string] :{
literal[string] : identifier[install_id]
}
}
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[install_id] )
identifier[saved] = keyword[False]
keyword[try] :
identifier[r] = identifier[requests] . identifier[post] ( identifier[cm_url] , identifier[headers] = identifier[auth_headers] , identifier[verify] = keyword[False] , identifier[json] = identifier[cm] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
identifier[saved] = keyword[True]
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[install_id] )
identifier[plugin_response] ={
literal[string] : identifier[install_id] ,
literal[string] : keyword[True]
}
keyword[else] :
identifier[scout] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[r] . identifier[status_code] , identifier[r] . identifier[text] ))
keyword[except] identifier[OSError] keyword[as] identifier[e] :
identifier[logging] . identifier[debug] ( literal[string] %
( identifier[map_name] , identifier[namespace] , identifier[e] ))
identifier[scout] . identifier[logger] . identifier[debug] ( literal[string] % identifier[json] . identifier[dumps] ( identifier[plugin_response] ))
keyword[return] identifier[plugin_response] | def configmap_install_id_plugin(scout, app, map_name=None, namespace='default'):
"""
Scout id_plugin that uses a Kubernetes configmap to store the install ID.
:param scout: Scout instance that's calling the plugin
:param app: Name of the application that's using Scout
:param map_name: Optional ConfigMap name to use; defaults to "scout.config.$app"
:param namespace: Optional Kubernetes namespace to use; defaults to "default"
This plugin assumes that the KUBERNETES_SERVICE_{HOST,PORT,PORT_HTTPS}
environment variables are set correctly, and it assumes the default Kubernetes
namespace unless the 'namespace' keyword argument is used to select a different
namespace.
If KUBERNETES_ACCESS_TOKEN is set in the environment, use that for the apiserver
access token -- otherwise, the plugin assumes that it's running in a Kubernetes
pod and tries to read its token from /var/run/secrets.
"""
plugin_response = None
if not map_name:
map_name = 'scout.config.{0}'.format(app) # depends on [control=['if'], data=[]]
kube_host = os.environ.get('KUBERNETES_SERVICE_HOST', None)
try:
kube_port = int(os.environ.get('KUBERNETES_SERVICE_PORT', 443)) # depends on [control=['try'], data=[]]
except ValueError:
scout.logger.debug("Scout: KUBERNETES_SERVICE_PORT isn't numeric, defaulting to 443")
kube_port = 443 # depends on [control=['except'], data=[]]
kube_proto = 'https' if kube_port == 443 else 'http'
kube_token = os.environ.get('KUBERNETES_ACCESS_TOKEN', None)
if not kube_host:
# We're not running in Kubernetes. Fall back to the usual filesystem stuff.
scout.logger.debug('Scout: no KUBERNETES_SERVICE_HOST, not running in Kubernetes')
return None # depends on [control=['if'], data=[]]
if not kube_token:
try:
kube_token = open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r').read() # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if not kube_token:
# We're not running in Kubernetes. Fall back to the usual filesystem stuff.
scout.logger.debug('Scout: not running in Kubernetes')
return None # depends on [control=['if'], data=[]]
# OK, we're in a cluster. Load our map.
base_url = '%s://%s:%s' % (kube_proto, kube_host, kube_port)
url_path = 'api/v1/namespaces/%s/configmaps' % namespace
auth_headers = {'Authorization': 'Bearer ' + kube_token}
install_id = None
cm_url = '%s/%s' % (base_url, url_path)
fetch_url = '%s/%s' % (cm_url, map_name)
scout.logger.debug('Scout: trying %s' % fetch_url)
try:
r = requests.get(fetch_url, headers=auth_headers, verify=False)
if r.status_code == 200:
# OK, the map is present. What do we see?
map_data = r.json()
if 'data' not in map_data:
# This is "impossible".
scout.logger.error('Scout: no map data in returned map???') # depends on [control=['if'], data=[]]
else:
map_data = map_data.get('data', {})
scout.logger.debug('Scout: configmap has map data %s' % json.dumps(map_data))
install_id = map_data.get('install_id', None)
if install_id:
scout.logger.debug('Scout: got install_id %s from map' % install_id)
plugin_response = {'install_id': install_id} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except OSError as e:
scout.logger.debug('Scout: could not read configmap (map %s, namespace %s): %s' % (map_name, namespace, e)) # depends on [control=['except'], data=['e']]
if not install_id:
# No extant install_id. Try to create a new one.
install_id = str(uuid4())
cm = {'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'name': map_name, 'namespace': namespace}, 'data': {'install_id': install_id}}
scout.logger.debug('Scout: saving new install_id %s' % install_id)
saved = False
try:
r = requests.post(cm_url, headers=auth_headers, verify=False, json=cm)
if r.status_code == 201:
saved = True
scout.logger.debug('Scout: saved install_id %s' % install_id)
plugin_response = {'install_id': install_id, 'new_install': True} # depends on [control=['if'], data=[]]
else:
scout.logger.error('Scout: could not save install_id: {0}, {1}'.format(r.status_code, r.text)) # depends on [control=['try'], data=[]]
except OSError as e:
logging.debug('Scout: could not write configmap (map %s, namespace %s): %s' % (map_name, namespace, e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
scout.logger.debug('Scout: plugin_response %s' % json.dumps(plugin_response))
return plugin_response |
def _separate_dirs_files(models):
"""
Split an iterable of models into a list of file paths and a list of
directory paths.
"""
dirs = []
files = []
for model in models:
if model['type'] == 'directory':
dirs.append(model['path'])
else:
files.append(model['path'])
return dirs, files | def function[_separate_dirs_files, parameter[models]]:
constant[
Split an iterable of models into a list of file paths and a list of
directory paths.
]
variable[dirs] assign[=] list[[]]
variable[files] assign[=] list[[]]
for taget[name[model]] in starred[name[models]] begin[:]
if compare[call[name[model]][constant[type]] equal[==] constant[directory]] begin[:]
call[name[dirs].append, parameter[call[name[model]][constant[path]]]]
return[tuple[[<ast.Name object at 0x7da18c4cdde0>, <ast.Name object at 0x7da18c4cf430>]]] | keyword[def] identifier[_separate_dirs_files] ( identifier[models] ):
literal[string]
identifier[dirs] =[]
identifier[files] =[]
keyword[for] identifier[model] keyword[in] identifier[models] :
keyword[if] identifier[model] [ literal[string] ]== literal[string] :
identifier[dirs] . identifier[append] ( identifier[model] [ literal[string] ])
keyword[else] :
identifier[files] . identifier[append] ( identifier[model] [ literal[string] ])
keyword[return] identifier[dirs] , identifier[files] | def _separate_dirs_files(models):
"""
Split an iterable of models into a list of file paths and a list of
directory paths.
"""
dirs = []
files = []
for model in models:
if model['type'] == 'directory':
dirs.append(model['path']) # depends on [control=['if'], data=[]]
else:
files.append(model['path']) # depends on [control=['for'], data=['model']]
return (dirs, files) |
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
data_item = self.__class__()
# data format (temporary until moved to buffered data source)
data_item.large_format = self.large_format
data_item.set_data_and_metadata(copy.deepcopy(self.data_and_metadata), self.data_modified)
# metadata
data_item.created = self.created
data_item.timezone = self.timezone
data_item.timezone_offset = self.timezone_offset
data_item.metadata = self.metadata
data_item.title = self.title
data_item.caption = self.caption
data_item.description = self.description
data_item.session_id = self.session_id
data_item.session_data = copy.deepcopy(self.session_data)
return data_item | def function[snapshot, parameter[self]]:
constant[Return a new library item which is a copy of this one with any dynamic behavior made static.]
variable[data_item] assign[=] call[name[self].__class__, parameter[]]
name[data_item].large_format assign[=] name[self].large_format
call[name[data_item].set_data_and_metadata, parameter[call[name[copy].deepcopy, parameter[name[self].data_and_metadata]], name[self].data_modified]]
name[data_item].created assign[=] name[self].created
name[data_item].timezone assign[=] name[self].timezone
name[data_item].timezone_offset assign[=] name[self].timezone_offset
name[data_item].metadata assign[=] name[self].metadata
name[data_item].title assign[=] name[self].title
name[data_item].caption assign[=] name[self].caption
name[data_item].description assign[=] name[self].description
name[data_item].session_id assign[=] name[self].session_id
name[data_item].session_data assign[=] call[name[copy].deepcopy, parameter[name[self].session_data]]
return[name[data_item]] | keyword[def] identifier[snapshot] ( identifier[self] ):
literal[string]
identifier[data_item] = identifier[self] . identifier[__class__] ()
identifier[data_item] . identifier[large_format] = identifier[self] . identifier[large_format]
identifier[data_item] . identifier[set_data_and_metadata] ( identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[data_and_metadata] ), identifier[self] . identifier[data_modified] )
identifier[data_item] . identifier[created] = identifier[self] . identifier[created]
identifier[data_item] . identifier[timezone] = identifier[self] . identifier[timezone]
identifier[data_item] . identifier[timezone_offset] = identifier[self] . identifier[timezone_offset]
identifier[data_item] . identifier[metadata] = identifier[self] . identifier[metadata]
identifier[data_item] . identifier[title] = identifier[self] . identifier[title]
identifier[data_item] . identifier[caption] = identifier[self] . identifier[caption]
identifier[data_item] . identifier[description] = identifier[self] . identifier[description]
identifier[data_item] . identifier[session_id] = identifier[self] . identifier[session_id]
identifier[data_item] . identifier[session_data] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[session_data] )
keyword[return] identifier[data_item] | def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
data_item = self.__class__()
# data format (temporary until moved to buffered data source)
data_item.large_format = self.large_format
data_item.set_data_and_metadata(copy.deepcopy(self.data_and_metadata), self.data_modified)
# metadata
data_item.created = self.created
data_item.timezone = self.timezone
data_item.timezone_offset = self.timezone_offset
data_item.metadata = self.metadata
data_item.title = self.title
data_item.caption = self.caption
data_item.description = self.description
data_item.session_id = self.session_id
data_item.session_data = copy.deepcopy(self.session_data)
return data_item |
def to_topojson(self):
"""Adds points and converts to topojson string."""
topojson = self.topojson
topojson["objects"]["points"] = {
"type": "GeometryCollection",
"geometries": [point.to_topojson() for point in self.points.all()],
}
return json.dumps(topojson) | def function[to_topojson, parameter[self]]:
constant[Adds points and converts to topojson string.]
variable[topojson] assign[=] name[self].topojson
call[call[name[topojson]][constant[objects]]][constant[points]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18ab940>, <ast.Constant object at 0x7da1b18aa5f0>], [<ast.Constant object at 0x7da1b18a97b0>, <ast.ListComp object at 0x7da1b18aac20>]]
return[call[name[json].dumps, parameter[name[topojson]]]] | keyword[def] identifier[to_topojson] ( identifier[self] ):
literal[string]
identifier[topojson] = identifier[self] . identifier[topojson]
identifier[topojson] [ literal[string] ][ literal[string] ]={
literal[string] : literal[string] ,
literal[string] :[ identifier[point] . identifier[to_topojson] () keyword[for] identifier[point] keyword[in] identifier[self] . identifier[points] . identifier[all] ()],
}
keyword[return] identifier[json] . identifier[dumps] ( identifier[topojson] ) | def to_topojson(self):
"""Adds points and converts to topojson string."""
topojson = self.topojson
topojson['objects']['points'] = {'type': 'GeometryCollection', 'geometries': [point.to_topojson() for point in self.points.all()]}
return json.dumps(topojson) |
def read_moc(self, filename):
"""Read a file into the current running MOC object.
If the running MOC object has not yet been created, then
it is created by reading the file, which will import the
MOC metadata. Otherwise the metadata are not imported.
"""
if self.moc is None:
self.moc = MOC(filename=filename)
else:
self.moc.read(filename) | def function[read_moc, parameter[self, filename]]:
constant[Read a file into the current running MOC object.
If the running MOC object has not yet been created, then
it is created by reading the file, which will import the
MOC metadata. Otherwise the metadata are not imported.
]
if compare[name[self].moc is constant[None]] begin[:]
name[self].moc assign[=] call[name[MOC], parameter[]] | keyword[def] identifier[read_moc] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[if] identifier[self] . identifier[moc] keyword[is] keyword[None] :
identifier[self] . identifier[moc] = identifier[MOC] ( identifier[filename] = identifier[filename] )
keyword[else] :
identifier[self] . identifier[moc] . identifier[read] ( identifier[filename] ) | def read_moc(self, filename):
"""Read a file into the current running MOC object.
If the running MOC object has not yet been created, then
it is created by reading the file, which will import the
MOC metadata. Otherwise the metadata are not imported.
"""
if self.moc is None:
self.moc = MOC(filename=filename) # depends on [control=['if'], data=[]]
else:
self.moc.read(filename) |
def update_entity(self, table_name, entity, if_match='*', timeout=None):
'''
Updates an existing entity in a table. Throws if the entity does not exist.
The update_entity operation replaces the entire entity and can be used to
remove properties.
:param str table_name:
The name of the table containing the entity to update.
:param entity:
The entity to update. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: a dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The update operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional update, set If-Match to the wildcard character (*).
:param int timeout:
The server timeout, expressed in seconds.
:return: The etag of the entity.
:rtype: str
'''
_validate_not_none('table_name', table_name)
request = _update_entity(entity, if_match)
request.host = self._get_host()
request.path = _get_entity_path(table_name, entity['PartitionKey'], entity['RowKey'])
request.query += [('timeout', _int_to_str(timeout))]
response = self._perform_request(request)
return _extract_etag(response) | def function[update_entity, parameter[self, table_name, entity, if_match, timeout]]:
constant[
Updates an existing entity in a table. Throws if the entity does not exist.
The update_entity operation replaces the entire entity and can be used to
remove properties.
:param str table_name:
The name of the table containing the entity to update.
:param entity:
The entity to update. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: a dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The update operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional update, set If-Match to the wildcard character (*).
:param int timeout:
The server timeout, expressed in seconds.
:return: The etag of the entity.
:rtype: str
]
call[name[_validate_not_none], parameter[constant[table_name], name[table_name]]]
variable[request] assign[=] call[name[_update_entity], parameter[name[entity], name[if_match]]]
name[request].host assign[=] call[name[self]._get_host, parameter[]]
name[request].path assign[=] call[name[_get_entity_path], parameter[name[table_name], call[name[entity]][constant[PartitionKey]], call[name[entity]][constant[RowKey]]]]
<ast.AugAssign object at 0x7da18dc9b9a0>
variable[response] assign[=] call[name[self]._perform_request, parameter[name[request]]]
return[call[name[_extract_etag], parameter[name[response]]]] | keyword[def] identifier[update_entity] ( identifier[self] , identifier[table_name] , identifier[entity] , identifier[if_match] = literal[string] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[_validate_not_none] ( literal[string] , identifier[table_name] )
identifier[request] = identifier[_update_entity] ( identifier[entity] , identifier[if_match] )
identifier[request] . identifier[host] = identifier[self] . identifier[_get_host] ()
identifier[request] . identifier[path] = identifier[_get_entity_path] ( identifier[table_name] , identifier[entity] [ literal[string] ], identifier[entity] [ literal[string] ])
identifier[request] . identifier[query] +=[( literal[string] , identifier[_int_to_str] ( identifier[timeout] ))]
identifier[response] = identifier[self] . identifier[_perform_request] ( identifier[request] )
keyword[return] identifier[_extract_etag] ( identifier[response] ) | def update_entity(self, table_name, entity, if_match='*', timeout=None):
"""
Updates an existing entity in a table. Throws if the entity does not exist.
The update_entity operation replaces the entire entity and can be used to
remove properties.
:param str table_name:
The name of the table containing the entity to update.
:param entity:
The entity to update. Could be a dict or an entity object.
Must contain a PartitionKey and a RowKey.
:type entity: a dict or :class:`~azure.storage.table.models.Entity`
:param str if_match:
The client may specify the ETag for the entity on the
request in order to compare to the ETag maintained by the service
for the purpose of optimistic concurrency. The update operation
will be performed only if the ETag sent by the client matches the
value maintained by the server, indicating that the entity has
not been modified since it was retrieved by the client. To force
an unconditional update, set If-Match to the wildcard character (*).
:param int timeout:
The server timeout, expressed in seconds.
:return: The etag of the entity.
:rtype: str
"""
_validate_not_none('table_name', table_name)
request = _update_entity(entity, if_match)
request.host = self._get_host()
request.path = _get_entity_path(table_name, entity['PartitionKey'], entity['RowKey'])
request.query += [('timeout', _int_to_str(timeout))]
response = self._perform_request(request)
return _extract_etag(response) |
def create_extended_model(model, db_penalty=None, ex_penalty=None,
tp_penalty=None, penalties=None):
"""Create an extended model for gap-filling.
Create a :class:`psamm.metabolicmodel.MetabolicModel` with
all reactions added (the reaction database in the model is taken
to be the universal database) and also with artificial exchange
and transport reactions added. Return the extended
:class:`psamm.metabolicmodel.MetabolicModel`
and a weight dictionary for added reactions in that model.
Args:
model: :class:`psamm.datasource.native.NativeModel`.
db_penalty: penalty score for database reactions, default is `None`.
ex_penalty: penalty score for exchange reactions, default is `None`.
tb_penalty: penalty score for transport reactions, default is `None`.
penalties: a dictionary of penalty scores for database reactions.
"""
# Create metabolic model
model_extended = model.create_metabolic_model()
extra_compartment = model.extracellular_compartment
compartment_ids = set(c.id for c in model.compartments)
# Add database reactions to extended model
if len(compartment_ids) > 0:
logger.info(
'Using all database reactions in compartments: {}...'.format(
', '.join('{}'.format(c) for c in compartment_ids)))
db_added = add_all_database_reactions(model_extended, compartment_ids)
else:
logger.warning(
'No compartments specified in the model; database reactions will'
' not be used! Add compartment specification to model to include'
' database reactions for those compartments.')
db_added = set()
# Add exchange reactions to extended model
logger.info(
'Using artificial exchange reactions for compartment: {}...'.format(
extra_compartment))
ex_added = add_all_exchange_reactions(
model_extended, extra_compartment, allow_duplicates=True)
# Add transport reactions to extended model
boundaries = model.compartment_boundaries
if len(boundaries) > 0:
logger.info(
'Using artificial transport reactions for the compartment'
' boundaries: {}...'.format(
'; '.join('{}<->{}'.format(c1, c2) for c1, c2 in boundaries)))
tp_added = add_all_transport_reactions(
model_extended, boundaries, allow_duplicates=True)
else:
logger.warning(
'No compartment boundaries specified in the model;'
' artificial transport reactions will not be used!')
tp_added = set()
# Add penalty weights on reactions
weights = {}
if db_penalty is not None:
weights.update((rxnid, db_penalty) for rxnid in db_added)
if tp_penalty is not None:
weights.update((rxnid, tp_penalty) for rxnid in tp_added)
if ex_penalty is not None:
weights.update((rxnid, ex_penalty) for rxnid in ex_added)
if penalties is not None:
for rxnid, penalty in iteritems(penalties):
weights[rxnid] = penalty
return model_extended, weights | def function[create_extended_model, parameter[model, db_penalty, ex_penalty, tp_penalty, penalties]]:
constant[Create an extended model for gap-filling.
Create a :class:`psamm.metabolicmodel.MetabolicModel` with
all reactions added (the reaction database in the model is taken
to be the universal database) and also with artificial exchange
and transport reactions added. Return the extended
:class:`psamm.metabolicmodel.MetabolicModel`
and a weight dictionary for added reactions in that model.
Args:
model: :class:`psamm.datasource.native.NativeModel`.
db_penalty: penalty score for database reactions, default is `None`.
ex_penalty: penalty score for exchange reactions, default is `None`.
tb_penalty: penalty score for transport reactions, default is `None`.
penalties: a dictionary of penalty scores for database reactions.
]
variable[model_extended] assign[=] call[name[model].create_metabolic_model, parameter[]]
variable[extra_compartment] assign[=] name[model].extracellular_compartment
variable[compartment_ids] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da18f810e50>]]
if compare[call[name[len], parameter[name[compartment_ids]]] greater[>] constant[0]] begin[:]
call[name[logger].info, parameter[call[constant[Using all database reactions in compartments: {}...].format, parameter[call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da18f811fc0>]]]]]]
variable[db_added] assign[=] call[name[add_all_database_reactions], parameter[name[model_extended], name[compartment_ids]]]
call[name[logger].info, parameter[call[constant[Using artificial exchange reactions for compartment: {}...].format, parameter[name[extra_compartment]]]]]
variable[ex_added] assign[=] call[name[add_all_exchange_reactions], parameter[name[model_extended], name[extra_compartment]]]
variable[boundaries] assign[=] name[model].compartment_boundaries
if compare[call[name[len], parameter[name[boundaries]]] greater[>] constant[0]] begin[:]
call[name[logger].info, parameter[call[constant[Using artificial transport reactions for the compartment boundaries: {}...].format, parameter[call[constant[; ].join, parameter[<ast.GeneratorExp object at 0x7da18f811c30>]]]]]]
variable[tp_added] assign[=] call[name[add_all_transport_reactions], parameter[name[model_extended], name[boundaries]]]
variable[weights] assign[=] dictionary[[], []]
if compare[name[db_penalty] is_not constant[None]] begin[:]
call[name[weights].update, parameter[<ast.GeneratorExp object at 0x7da18f811ea0>]]
if compare[name[tp_penalty] is_not constant[None]] begin[:]
call[name[weights].update, parameter[<ast.GeneratorExp object at 0x7da18f8110f0>]]
if compare[name[ex_penalty] is_not constant[None]] begin[:]
call[name[weights].update, parameter[<ast.GeneratorExp object at 0x7da18f813760>]]
if compare[name[penalties] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f8128c0>, <ast.Name object at 0x7da18f8114b0>]]] in starred[call[name[iteritems], parameter[name[penalties]]]] begin[:]
call[name[weights]][name[rxnid]] assign[=] name[penalty]
return[tuple[[<ast.Name object at 0x7da18f813370>, <ast.Name object at 0x7da18f813a60>]]] | keyword[def] identifier[create_extended_model] ( identifier[model] , identifier[db_penalty] = keyword[None] , identifier[ex_penalty] = keyword[None] ,
identifier[tp_penalty] = keyword[None] , identifier[penalties] = keyword[None] ):
literal[string]
identifier[model_extended] = identifier[model] . identifier[create_metabolic_model] ()
identifier[extra_compartment] = identifier[model] . identifier[extracellular_compartment]
identifier[compartment_ids] = identifier[set] ( identifier[c] . identifier[id] keyword[for] identifier[c] keyword[in] identifier[model] . identifier[compartments] )
keyword[if] identifier[len] ( identifier[compartment_ids] )> literal[int] :
identifier[logger] . identifier[info] (
literal[string] . identifier[format] (
literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[compartment_ids] )))
identifier[db_added] = identifier[add_all_database_reactions] ( identifier[model_extended] , identifier[compartment_ids] )
keyword[else] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string]
literal[string] )
identifier[db_added] = identifier[set] ()
identifier[logger] . identifier[info] (
literal[string] . identifier[format] (
identifier[extra_compartment] ))
identifier[ex_added] = identifier[add_all_exchange_reactions] (
identifier[model_extended] , identifier[extra_compartment] , identifier[allow_duplicates] = keyword[True] )
identifier[boundaries] = identifier[model] . identifier[compartment_boundaries]
keyword[if] identifier[len] ( identifier[boundaries] )> literal[int] :
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] (
literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[c1] , identifier[c2] ) keyword[for] identifier[c1] , identifier[c2] keyword[in] identifier[boundaries] )))
identifier[tp_added] = identifier[add_all_transport_reactions] (
identifier[model_extended] , identifier[boundaries] , identifier[allow_duplicates] = keyword[True] )
keyword[else] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] )
identifier[tp_added] = identifier[set] ()
identifier[weights] ={}
keyword[if] identifier[db_penalty] keyword[is] keyword[not] keyword[None] :
identifier[weights] . identifier[update] (( identifier[rxnid] , identifier[db_penalty] ) keyword[for] identifier[rxnid] keyword[in] identifier[db_added] )
keyword[if] identifier[tp_penalty] keyword[is] keyword[not] keyword[None] :
identifier[weights] . identifier[update] (( identifier[rxnid] , identifier[tp_penalty] ) keyword[for] identifier[rxnid] keyword[in] identifier[tp_added] )
keyword[if] identifier[ex_penalty] keyword[is] keyword[not] keyword[None] :
identifier[weights] . identifier[update] (( identifier[rxnid] , identifier[ex_penalty] ) keyword[for] identifier[rxnid] keyword[in] identifier[ex_added] )
keyword[if] identifier[penalties] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[rxnid] , identifier[penalty] keyword[in] identifier[iteritems] ( identifier[penalties] ):
identifier[weights] [ identifier[rxnid] ]= identifier[penalty]
keyword[return] identifier[model_extended] , identifier[weights] | def create_extended_model(model, db_penalty=None, ex_penalty=None, tp_penalty=None, penalties=None):
"""Create an extended model for gap-filling.
Create a :class:`psamm.metabolicmodel.MetabolicModel` with
all reactions added (the reaction database in the model is taken
to be the universal database) and also with artificial exchange
and transport reactions added. Return the extended
:class:`psamm.metabolicmodel.MetabolicModel`
and a weight dictionary for added reactions in that model.
Args:
model: :class:`psamm.datasource.native.NativeModel`.
db_penalty: penalty score for database reactions, default is `None`.
ex_penalty: penalty score for exchange reactions, default is `None`.
tb_penalty: penalty score for transport reactions, default is `None`.
penalties: a dictionary of penalty scores for database reactions.
"""
# Create metabolic model
model_extended = model.create_metabolic_model()
extra_compartment = model.extracellular_compartment
compartment_ids = set((c.id for c in model.compartments))
# Add database reactions to extended model
if len(compartment_ids) > 0:
logger.info('Using all database reactions in compartments: {}...'.format(', '.join(('{}'.format(c) for c in compartment_ids))))
db_added = add_all_database_reactions(model_extended, compartment_ids) # depends on [control=['if'], data=[]]
else:
logger.warning('No compartments specified in the model; database reactions will not be used! Add compartment specification to model to include database reactions for those compartments.')
db_added = set()
# Add exchange reactions to extended model
logger.info('Using artificial exchange reactions for compartment: {}...'.format(extra_compartment))
ex_added = add_all_exchange_reactions(model_extended, extra_compartment, allow_duplicates=True)
# Add transport reactions to extended model
boundaries = model.compartment_boundaries
if len(boundaries) > 0:
logger.info('Using artificial transport reactions for the compartment boundaries: {}...'.format('; '.join(('{}<->{}'.format(c1, c2) for (c1, c2) in boundaries))))
tp_added = add_all_transport_reactions(model_extended, boundaries, allow_duplicates=True) # depends on [control=['if'], data=[]]
else:
logger.warning('No compartment boundaries specified in the model; artificial transport reactions will not be used!')
tp_added = set()
# Add penalty weights on reactions
weights = {}
if db_penalty is not None:
weights.update(((rxnid, db_penalty) for rxnid in db_added)) # depends on [control=['if'], data=['db_penalty']]
if tp_penalty is not None:
weights.update(((rxnid, tp_penalty) for rxnid in tp_added)) # depends on [control=['if'], data=['tp_penalty']]
if ex_penalty is not None:
weights.update(((rxnid, ex_penalty) for rxnid in ex_added)) # depends on [control=['if'], data=['ex_penalty']]
if penalties is not None:
for (rxnid, penalty) in iteritems(penalties):
weights[rxnid] = penalty # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['penalties']]
return (model_extended, weights) |
def env_string(name: str, required: bool=False, default: Union[Type[empty], str]=empty) -> str:
"""Pulls an environment variable out of the environment returning it as a
string. If not present in the environment and no default is specified, an
empty string is returned.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
value = get_env_value(name, default=default, required=required)
if value is empty:
value = ''
return value | def function[env_string, parameter[name, required, default]]:
constant[Pulls an environment variable out of the environment returning it as a
string. If not present in the environment and no default is specified, an
empty string is returned.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
]
variable[value] assign[=] call[name[get_env_value], parameter[name[name]]]
if compare[name[value] is name[empty]] begin[:]
variable[value] assign[=] constant[]
return[name[value]] | keyword[def] identifier[env_string] ( identifier[name] : identifier[str] , identifier[required] : identifier[bool] = keyword[False] , identifier[default] : identifier[Union] [ identifier[Type] [ identifier[empty] ], identifier[str] ]= identifier[empty] )-> identifier[str] :
literal[string]
identifier[value] = identifier[get_env_value] ( identifier[name] , identifier[default] = identifier[default] , identifier[required] = identifier[required] )
keyword[if] identifier[value] keyword[is] identifier[empty] :
identifier[value] = literal[string]
keyword[return] identifier[value] | def env_string(name: str, required: bool=False, default: Union[Type[empty], str]=empty) -> str:
"""Pulls an environment variable out of the environment returning it as a
string. If not present in the environment and no default is specified, an
empty string is returned.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
value = get_env_value(name, default=default, required=required)
if value is empty:
value = '' # depends on [control=['if'], data=['value']]
return value |
def __parse(self, raw_string):
""" parse raw string, replace function and variable with {}
Args:
raw_string(str): string with functions or varialbes
e.g. "ABC${func2($a, $b)}DE$c"
Returns:
string: "ABC{}DE{}"
args: ["${func2($a, $b)}", "$c"]
"""
self._args = []
def escape_braces(origin_string):
return origin_string.replace("{", "{{").replace("}", "}}")
try:
match_start_position = raw_string.index("$", 0)
begin_string = raw_string[0:match_start_position]
self._string = escape_braces(begin_string)
except ValueError:
self._string = escape_braces(raw_string)
return
while match_start_position < len(raw_string):
# Notice: notation priority
# $$ > ${func($a, $b)} > $var
# search $$
dollar_match = dolloar_regex_compile.match(raw_string, match_start_position)
if dollar_match:
match_start_position = dollar_match.end()
self._string += "$"
continue
# search function like ${func($a, $b)}
func_match = function_regex_compile.match(raw_string, match_start_position)
if func_match:
function_meta = parse_function_params(func_match.group(1))
function_meta = {
"func_name": func_match.group(1)
}
function_meta.update(parse_function_params(func_match.group(2)))
lazy_func = LazyFunction(
function_meta,
self.functions_mapping,
self.check_variables_set
)
self._args.append(lazy_func)
match_start_position = func_match.end()
self._string += "{}"
continue
# search variable like ${var} or $var
var_match = variable_regex_compile.match(raw_string, match_start_position)
if var_match:
var_name = var_match.group(1) or var_match.group(2)
# check if any variable undefined in check_variables_set
if var_name not in self.check_variables_set:
raise exceptions.VariableNotFound(var_name)
self._args.append(var_name)
match_start_position = var_match.end()
self._string += "{}"
continue
curr_position = match_start_position
try:
# find next $ location
match_start_position = raw_string.index("$", curr_position+1)
remain_string = raw_string[curr_position:match_start_position]
except ValueError:
remain_string = raw_string[curr_position:]
# break while loop
match_start_position = len(raw_string)
self._string += escape_braces(remain_string) | def function[__parse, parameter[self, raw_string]]:
constant[ parse raw string, replace function and variable with {}
Args:
raw_string(str): string with functions or varialbes
e.g. "ABC${func2($a, $b)}DE$c"
Returns:
string: "ABC{}DE{}"
args: ["${func2($a, $b)}", "$c"]
]
name[self]._args assign[=] list[[]]
def function[escape_braces, parameter[origin_string]]:
return[call[call[name[origin_string].replace, parameter[constant[{], constant[{{]]].replace, parameter[constant[}], constant[}}]]]]
<ast.Try object at 0x7da1b1b13490>
while compare[name[match_start_position] less[<] call[name[len], parameter[name[raw_string]]]] begin[:]
variable[dollar_match] assign[=] call[name[dolloar_regex_compile].match, parameter[name[raw_string], name[match_start_position]]]
if name[dollar_match] begin[:]
variable[match_start_position] assign[=] call[name[dollar_match].end, parameter[]]
<ast.AugAssign object at 0x7da1b1b13af0>
continue
variable[func_match] assign[=] call[name[function_regex_compile].match, parameter[name[raw_string], name[match_start_position]]]
if name[func_match] begin[:]
variable[function_meta] assign[=] call[name[parse_function_params], parameter[call[name[func_match].group, parameter[constant[1]]]]]
variable[function_meta] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b11de0>], [<ast.Call object at 0x7da1b1b12d10>]]
call[name[function_meta].update, parameter[call[name[parse_function_params], parameter[call[name[func_match].group, parameter[constant[2]]]]]]]
variable[lazy_func] assign[=] call[name[LazyFunction], parameter[name[function_meta], name[self].functions_mapping, name[self].check_variables_set]]
call[name[self]._args.append, parameter[name[lazy_func]]]
variable[match_start_position] assign[=] call[name[func_match].end, parameter[]]
<ast.AugAssign object at 0x7da1b1b126e0>
continue
variable[var_match] assign[=] call[name[variable_regex_compile].match, parameter[name[raw_string], name[match_start_position]]]
if name[var_match] begin[:]
variable[var_name] assign[=] <ast.BoolOp object at 0x7da18dc9a3e0>
if compare[name[var_name] <ast.NotIn object at 0x7da2590d7190> name[self].check_variables_set] begin[:]
<ast.Raise object at 0x7da18dc9a560>
call[name[self]._args.append, parameter[name[var_name]]]
variable[match_start_position] assign[=] call[name[var_match].end, parameter[]]
<ast.AugAssign object at 0x7da18dc9ac80>
continue
variable[curr_position] assign[=] name[match_start_position]
<ast.Try object at 0x7da18dc9a410>
<ast.AugAssign object at 0x7da18dc99cc0> | keyword[def] identifier[__parse] ( identifier[self] , identifier[raw_string] ):
literal[string]
identifier[self] . identifier[_args] =[]
keyword[def] identifier[escape_braces] ( identifier[origin_string] ):
keyword[return] identifier[origin_string] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[try] :
identifier[match_start_position] = identifier[raw_string] . identifier[index] ( literal[string] , literal[int] )
identifier[begin_string] = identifier[raw_string] [ literal[int] : identifier[match_start_position] ]
identifier[self] . identifier[_string] = identifier[escape_braces] ( identifier[begin_string] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_string] = identifier[escape_braces] ( identifier[raw_string] )
keyword[return]
keyword[while] identifier[match_start_position] < identifier[len] ( identifier[raw_string] ):
identifier[dollar_match] = identifier[dolloar_regex_compile] . identifier[match] ( identifier[raw_string] , identifier[match_start_position] )
keyword[if] identifier[dollar_match] :
identifier[match_start_position] = identifier[dollar_match] . identifier[end] ()
identifier[self] . identifier[_string] += literal[string]
keyword[continue]
identifier[func_match] = identifier[function_regex_compile] . identifier[match] ( identifier[raw_string] , identifier[match_start_position] )
keyword[if] identifier[func_match] :
identifier[function_meta] = identifier[parse_function_params] ( identifier[func_match] . identifier[group] ( literal[int] ))
identifier[function_meta] ={
literal[string] : identifier[func_match] . identifier[group] ( literal[int] )
}
identifier[function_meta] . identifier[update] ( identifier[parse_function_params] ( identifier[func_match] . identifier[group] ( literal[int] )))
identifier[lazy_func] = identifier[LazyFunction] (
identifier[function_meta] ,
identifier[self] . identifier[functions_mapping] ,
identifier[self] . identifier[check_variables_set]
)
identifier[self] . identifier[_args] . identifier[append] ( identifier[lazy_func] )
identifier[match_start_position] = identifier[func_match] . identifier[end] ()
identifier[self] . identifier[_string] += literal[string]
keyword[continue]
identifier[var_match] = identifier[variable_regex_compile] . identifier[match] ( identifier[raw_string] , identifier[match_start_position] )
keyword[if] identifier[var_match] :
identifier[var_name] = identifier[var_match] . identifier[group] ( literal[int] ) keyword[or] identifier[var_match] . identifier[group] ( literal[int] )
keyword[if] identifier[var_name] keyword[not] keyword[in] identifier[self] . identifier[check_variables_set] :
keyword[raise] identifier[exceptions] . identifier[VariableNotFound] ( identifier[var_name] )
identifier[self] . identifier[_args] . identifier[append] ( identifier[var_name] )
identifier[match_start_position] = identifier[var_match] . identifier[end] ()
identifier[self] . identifier[_string] += literal[string]
keyword[continue]
identifier[curr_position] = identifier[match_start_position]
keyword[try] :
identifier[match_start_position] = identifier[raw_string] . identifier[index] ( literal[string] , identifier[curr_position] + literal[int] )
identifier[remain_string] = identifier[raw_string] [ identifier[curr_position] : identifier[match_start_position] ]
keyword[except] identifier[ValueError] :
identifier[remain_string] = identifier[raw_string] [ identifier[curr_position] :]
identifier[match_start_position] = identifier[len] ( identifier[raw_string] )
identifier[self] . identifier[_string] += identifier[escape_braces] ( identifier[remain_string] ) | def __parse(self, raw_string):
""" parse raw string, replace function and variable with {}
Args:
raw_string(str): string with functions or varialbes
e.g. "ABC${func2($a, $b)}DE$c"
Returns:
string: "ABC{}DE{}"
args: ["${func2($a, $b)}", "$c"]
"""
self._args = []
def escape_braces(origin_string):
return origin_string.replace('{', '{{').replace('}', '}}')
try:
match_start_position = raw_string.index('$', 0)
begin_string = raw_string[0:match_start_position]
self._string = escape_braces(begin_string) # depends on [control=['try'], data=[]]
except ValueError:
self._string = escape_braces(raw_string)
return # depends on [control=['except'], data=[]]
while match_start_position < len(raw_string):
# Notice: notation priority
# $$ > ${func($a, $b)} > $var
# search $$
dollar_match = dolloar_regex_compile.match(raw_string, match_start_position)
if dollar_match:
match_start_position = dollar_match.end()
self._string += '$'
continue # depends on [control=['if'], data=[]]
# search function like ${func($a, $b)}
func_match = function_regex_compile.match(raw_string, match_start_position)
if func_match:
function_meta = parse_function_params(func_match.group(1))
function_meta = {'func_name': func_match.group(1)}
function_meta.update(parse_function_params(func_match.group(2)))
lazy_func = LazyFunction(function_meta, self.functions_mapping, self.check_variables_set)
self._args.append(lazy_func)
match_start_position = func_match.end()
self._string += '{}'
continue # depends on [control=['if'], data=[]]
# search variable like ${var} or $var
var_match = variable_regex_compile.match(raw_string, match_start_position)
if var_match:
var_name = var_match.group(1) or var_match.group(2)
# check if any variable undefined in check_variables_set
if var_name not in self.check_variables_set:
raise exceptions.VariableNotFound(var_name) # depends on [control=['if'], data=['var_name']]
self._args.append(var_name)
match_start_position = var_match.end()
self._string += '{}'
continue # depends on [control=['if'], data=[]]
curr_position = match_start_position
try:
# find next $ location
match_start_position = raw_string.index('$', curr_position + 1)
remain_string = raw_string[curr_position:match_start_position] # depends on [control=['try'], data=[]]
except ValueError:
remain_string = raw_string[curr_position:]
# break while loop
match_start_position = len(raw_string) # depends on [control=['except'], data=[]]
self._string += escape_braces(remain_string) # depends on [control=['while'], data=['match_start_position']] |
def chi_magic2(path_to_file='.', file_name='magic_measurements.txt',
save=False, save_folder='.', fmt='svg'):
"""
Generates plots that compare susceptibility to temperature at different
frequencies.
Optional Parameters (defaults are used if not specified)
----------
path_to_file : path to directory that contains file (default is current directory, '.')
file_name : name of file to be opened (default is 'magic_measurements.txt')
save : boolean argument to save plots (default is False)
save_folder : relative directory where plots will be saved (default is current directory, '.')
"""
cont, FTinit, BTinit, k = "", 0, 0, 0
complete_path = os.path.join(path_to_file, file_name)
Tind, cont = 0, ""
EXP = ""
#
meas_data, file_type = pmag.magic_read(complete_path)
#
# get list of unique experiment names
#
# initialize some variables (a continuation flag, plot initialization
# flags and the experiment counter
experiment_names = []
for rec in meas_data:
if rec['magic_experiment_name'] not in experiment_names:
experiment_names.append(rec['magic_experiment_name'])
#
# hunt through by experiment name
if EXP != "":
try:
k = experiment_names.index(EXP)
except:
print("Bad experiment name")
sys.exit()
while k < len(experiment_names):
e = experiment_names[k]
if EXP == "":
print(e, k + 1, 'out of ', len(experiment_names))
#
# initialize lists of data, susceptibility, temperature, frequency and
# field
X, T, F, B = [], [], [], []
for rec in meas_data:
methcodes = rec['magic_method_codes']
meths = methcodes.strip().split(':')
if rec['magic_experiment_name'] == e and "LP-X" in meths: # looking for chi measurement
if 'measurement_temp' not in list(rec.keys()):
rec['measurement_temp'] = '300' # set defaults
if 'measurement_freq' not in list(rec.keys()):
rec['measurement_freq'] = '0' # set defaults
if 'measurement_lab_field_ac' not in list(rec.keys()):
rec['measurement_lab_field_ac'] = '0' # set default
X.append(float(rec['measurement_x']))
T.append(float(rec['measurement_temp']))
F.append(float(rec['measurement_freq']))
B.append(float(rec['measurement_lab_field_ac']))
#
# get unique list of Ts,Fs, and Bs
#
Ts, Fs, Bs = [], [], []
for k in range(len(X)): # hunt through all the measurements
if T[k] not in Ts:
Ts.append(T[k]) # append if not in list
if F[k] not in Fs:
Fs.append(F[k])
if B[k] not in Bs:
Bs.append(B[k])
Ts.sort() # sort list of temperatures, frequencies and fields
Fs.sort()
Bs.sort()
if '-x' in sys.argv:
k = len(experiment_names) + 1 # just plot the one
else:
k += 1 # increment experiment number
#
# plot chi versus T and F holding B constant
#
plotnum = 1 # initialize plot number to 1
if len(X) > 2: # if there are any data to plot, continue
b = Bs[-1] # keeping field constant and at maximum
XTF = [] # initialize list of chi versus Temp and freq
for f in Fs: # step through frequencies sequentially
XT = [] # initialize list of chi versus temp
for kk in range(len(X)): # hunt through all the data
if F[kk] == f and B[kk] == b: # select data with given freq and field
XT.append([X[kk], T[kk]]) # append to list
XTF.append(XT) # append list to list of frequencies
if len(XT) > 1: # if there are any temperature dependent data
plt.figure(num=plotnum, figsize=(5, 5)) # initialize plot
# call the plotting function
pmagplotlib.plot_xtf(plotnum, XTF, Fs, e, b)
pmagplotlib.show_fig(plotnum)
plotnum += 1 # increment plot number
f = Fs[0] # set frequency to minimum
XTB = [] # initialize list if chi versus Temp and field
for b in Bs: # step through field values
XT = [] # initial chi versus temp list for this field
for kk in range(len(X)): # hunt through all the data
if F[kk] == f and B[kk] == b: # select data with given freq and field
XT.append([X[kk], T[kk]]) # append to list
XTB.append(XT)
if len(XT) > 1: # if there are any temperature dependent data
plt.figure(num=plotnum, figsize=(5, 5)) # set up plot
# call the plotting function
pmagplotlib.plot_xtb(plotnum, XTB, Bs, e, f)
pmagplotlib.show_fig(plotnum)
plotnum += 1 # increment plot number
if save == True:
files = {}
PLTS = {}
for p in range(1, plotnum):
key = str(p)
files[key] = e + '_' + key + '.' + fmt
PLTS[key] = p
for key in list(PLTS.keys()):
try:
plt.figure(num=PLTS[key])
plt.savefig(save_folder + '/' +
files[key].replace('/', '-'))
except:
print('could not save: ', PLTS[key], files[key])
print("output file format not supported ") | def function[chi_magic2, parameter[path_to_file, file_name, save, save_folder, fmt]]:
constant[
Generates plots that compare susceptibility to temperature at different
frequencies.
Optional Parameters (defaults are used if not specified)
----------
path_to_file : path to directory that contains file (default is current directory, '.')
file_name : name of file to be opened (default is 'magic_measurements.txt')
save : boolean argument to save plots (default is False)
save_folder : relative directory where plots will be saved (default is current directory, '.')
]
<ast.Tuple object at 0x7da20c6ab2b0> assign[=] tuple[[<ast.Constant object at 0x7da20c6a9780>, <ast.Constant object at 0x7da20c6abd60>, <ast.Constant object at 0x7da20c6ab0d0>, <ast.Constant object at 0x7da20c6aa290>]]
variable[complete_path] assign[=] call[name[os].path.join, parameter[name[path_to_file], name[file_name]]]
<ast.Tuple object at 0x7da20c6a8490> assign[=] tuple[[<ast.Constant object at 0x7da20c6a9ae0>, <ast.Constant object at 0x7da20c6ab9d0>]]
variable[EXP] assign[=] constant[]
<ast.Tuple object at 0x7da20c6a86d0> assign[=] call[name[pmag].magic_read, parameter[name[complete_path]]]
variable[experiment_names] assign[=] list[[]]
for taget[name[rec]] in starred[name[meas_data]] begin[:]
if compare[call[name[rec]][constant[magic_experiment_name]] <ast.NotIn object at 0x7da2590d7190> name[experiment_names]] begin[:]
call[name[experiment_names].append, parameter[call[name[rec]][constant[magic_experiment_name]]]]
if compare[name[EXP] not_equal[!=] constant[]] begin[:]
<ast.Try object at 0x7da20c6aa260>
while compare[name[k] less[<] call[name[len], parameter[name[experiment_names]]]] begin[:]
variable[e] assign[=] call[name[experiment_names]][name[k]]
if compare[name[EXP] equal[==] constant[]] begin[:]
call[name[print], parameter[name[e], binary_operation[name[k] + constant[1]], constant[out of ], call[name[len], parameter[name[experiment_names]]]]]
<ast.Tuple object at 0x7da20c6aa020> assign[=] tuple[[<ast.List object at 0x7da20c6a8dc0>, <ast.List object at 0x7da20c6a81c0>, <ast.List object at 0x7da20c6a9f30>, <ast.List object at 0x7da20c6aad40>]]
for taget[name[rec]] in starred[name[meas_data]] begin[:]
variable[methcodes] assign[=] call[name[rec]][constant[magic_method_codes]]
variable[meths] assign[=] call[call[name[methcodes].strip, parameter[]].split, parameter[constant[:]]]
if <ast.BoolOp object at 0x7da20c6ab8b0> begin[:]
if compare[constant[measurement_temp] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[measurement_temp]] assign[=] constant[300]
if compare[constant[measurement_freq] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[measurement_freq]] assign[=] constant[0]
if compare[constant[measurement_lab_field_ac] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[measurement_lab_field_ac]] assign[=] constant[0]
call[name[X].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_x]]]]]]
call[name[T].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_temp]]]]]]
call[name[F].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_freq]]]]]]
call[name[B].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_lab_field_ac]]]]]]
<ast.Tuple object at 0x7da1b05d9e40> assign[=] tuple[[<ast.List object at 0x7da1b05dbe20>, <ast.List object at 0x7da1b05db6a0>, <ast.List object at 0x7da1b05dae00>]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[X]]]]]] begin[:]
if compare[call[name[T]][name[k]] <ast.NotIn object at 0x7da2590d7190> name[Ts]] begin[:]
call[name[Ts].append, parameter[call[name[T]][name[k]]]]
if compare[call[name[F]][name[k]] <ast.NotIn object at 0x7da2590d7190> name[Fs]] begin[:]
call[name[Fs].append, parameter[call[name[F]][name[k]]]]
if compare[call[name[B]][name[k]] <ast.NotIn object at 0x7da2590d7190> name[Bs]] begin[:]
call[name[Bs].append, parameter[call[name[B]][name[k]]]]
call[name[Ts].sort, parameter[]]
call[name[Fs].sort, parameter[]]
call[name[Bs].sort, parameter[]]
if compare[constant[-x] in name[sys].argv] begin[:]
variable[k] assign[=] binary_operation[call[name[len], parameter[name[experiment_names]]] + constant[1]]
variable[plotnum] assign[=] constant[1]
if compare[call[name[len], parameter[name[X]]] greater[>] constant[2]] begin[:]
variable[b] assign[=] call[name[Bs]][<ast.UnaryOp object at 0x7da1b05dafe0>]
variable[XTF] assign[=] list[[]]
for taget[name[f]] in starred[name[Fs]] begin[:]
variable[XT] assign[=] list[[]]
for taget[name[kk]] in starred[call[name[range], parameter[call[name[len], parameter[name[X]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b05da080> begin[:]
call[name[XT].append, parameter[list[[<ast.Subscript object at 0x7da1b05da020>, <ast.Subscript object at 0x7da1b05dba30>]]]]
call[name[XTF].append, parameter[name[XT]]]
if compare[call[name[len], parameter[name[XT]]] greater[>] constant[1]] begin[:]
call[name[plt].figure, parameter[]]
call[name[pmagplotlib].plot_xtf, parameter[name[plotnum], name[XTF], name[Fs], name[e], name[b]]]
call[name[pmagplotlib].show_fig, parameter[name[plotnum]]]
<ast.AugAssign object at 0x7da1b05d8550>
variable[f] assign[=] call[name[Fs]][constant[0]]
variable[XTB] assign[=] list[[]]
for taget[name[b]] in starred[name[Bs]] begin[:]
variable[XT] assign[=] list[[]]
for taget[name[kk]] in starred[call[name[range], parameter[call[name[len], parameter[name[X]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b05d9360> begin[:]
call[name[XT].append, parameter[list[[<ast.Subscript object at 0x7da1b05db550>, <ast.Subscript object at 0x7da1b05da290>]]]]
call[name[XTB].append, parameter[name[XT]]]
if compare[call[name[len], parameter[name[XT]]] greater[>] constant[1]] begin[:]
call[name[plt].figure, parameter[]]
call[name[pmagplotlib].plot_xtb, parameter[name[plotnum], name[XTB], name[Bs], name[e], name[f]]]
call[name[pmagplotlib].show_fig, parameter[name[plotnum]]]
<ast.AugAssign object at 0x7da1b05dba90>
if compare[name[save] equal[==] constant[True]] begin[:]
variable[files] assign[=] dictionary[[], []]
variable[PLTS] assign[=] dictionary[[], []]
for taget[name[p]] in starred[call[name[range], parameter[constant[1], name[plotnum]]]] begin[:]
variable[key] assign[=] call[name[str], parameter[name[p]]]
call[name[files]][name[key]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[e] + constant[_]] + name[key]] + constant[.]] + name[fmt]]
call[name[PLTS]][name[key]] assign[=] name[p]
for taget[name[key]] in starred[call[name[list], parameter[call[name[PLTS].keys, parameter[]]]]] begin[:]
<ast.Try object at 0x7da18eb57af0> | keyword[def] identifier[chi_magic2] ( identifier[path_to_file] = literal[string] , identifier[file_name] = literal[string] ,
identifier[save] = keyword[False] , identifier[save_folder] = literal[string] , identifier[fmt] = literal[string] ):
literal[string]
identifier[cont] , identifier[FTinit] , identifier[BTinit] , identifier[k] = literal[string] , literal[int] , literal[int] , literal[int]
identifier[complete_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_to_file] , identifier[file_name] )
identifier[Tind] , identifier[cont] = literal[int] , literal[string]
identifier[EXP] = literal[string]
identifier[meas_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[complete_path] )
identifier[experiment_names] =[]
keyword[for] identifier[rec] keyword[in] identifier[meas_data] :
keyword[if] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[experiment_names] :
identifier[experiment_names] . identifier[append] ( identifier[rec] [ literal[string] ])
keyword[if] identifier[EXP] != literal[string] :
keyword[try] :
identifier[k] = identifier[experiment_names] . identifier[index] ( identifier[EXP] )
keyword[except] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[while] identifier[k] < identifier[len] ( identifier[experiment_names] ):
identifier[e] = identifier[experiment_names] [ identifier[k] ]
keyword[if] identifier[EXP] == literal[string] :
identifier[print] ( identifier[e] , identifier[k] + literal[int] , literal[string] , identifier[len] ( identifier[experiment_names] ))
identifier[X] , identifier[T] , identifier[F] , identifier[B] =[],[],[],[]
keyword[for] identifier[rec] keyword[in] identifier[meas_data] :
identifier[methcodes] = identifier[rec] [ literal[string] ]
identifier[meths] = identifier[methcodes] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[rec] [ literal[string] ]== identifier[e] keyword[and] literal[string] keyword[in] identifier[meths] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
identifier[X] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ]))
identifier[T] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ]))
identifier[F] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ]))
identifier[B] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ]))
identifier[Ts] , identifier[Fs] , identifier[Bs] =[],[],[]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )):
keyword[if] identifier[T] [ identifier[k] ] keyword[not] keyword[in] identifier[Ts] :
identifier[Ts] . identifier[append] ( identifier[T] [ identifier[k] ])
keyword[if] identifier[F] [ identifier[k] ] keyword[not] keyword[in] identifier[Fs] :
identifier[Fs] . identifier[append] ( identifier[F] [ identifier[k] ])
keyword[if] identifier[B] [ identifier[k] ] keyword[not] keyword[in] identifier[Bs] :
identifier[Bs] . identifier[append] ( identifier[B] [ identifier[k] ])
identifier[Ts] . identifier[sort] ()
identifier[Fs] . identifier[sort] ()
identifier[Bs] . identifier[sort] ()
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[k] = identifier[len] ( identifier[experiment_names] )+ literal[int]
keyword[else] :
identifier[k] += literal[int]
identifier[plotnum] = literal[int]
keyword[if] identifier[len] ( identifier[X] )> literal[int] :
identifier[b] = identifier[Bs] [- literal[int] ]
identifier[XTF] =[]
keyword[for] identifier[f] keyword[in] identifier[Fs] :
identifier[XT] =[]
keyword[for] identifier[kk] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )):
keyword[if] identifier[F] [ identifier[kk] ]== identifier[f] keyword[and] identifier[B] [ identifier[kk] ]== identifier[b] :
identifier[XT] . identifier[append] ([ identifier[X] [ identifier[kk] ], identifier[T] [ identifier[kk] ]])
identifier[XTF] . identifier[append] ( identifier[XT] )
keyword[if] identifier[len] ( identifier[XT] )> literal[int] :
identifier[plt] . identifier[figure] ( identifier[num] = identifier[plotnum] , identifier[figsize] =( literal[int] , literal[int] ))
identifier[pmagplotlib] . identifier[plot_xtf] ( identifier[plotnum] , identifier[XTF] , identifier[Fs] , identifier[e] , identifier[b] )
identifier[pmagplotlib] . identifier[show_fig] ( identifier[plotnum] )
identifier[plotnum] += literal[int]
identifier[f] = identifier[Fs] [ literal[int] ]
identifier[XTB] =[]
keyword[for] identifier[b] keyword[in] identifier[Bs] :
identifier[XT] =[]
keyword[for] identifier[kk] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )):
keyword[if] identifier[F] [ identifier[kk] ]== identifier[f] keyword[and] identifier[B] [ identifier[kk] ]== identifier[b] :
identifier[XT] . identifier[append] ([ identifier[X] [ identifier[kk] ], identifier[T] [ identifier[kk] ]])
identifier[XTB] . identifier[append] ( identifier[XT] )
keyword[if] identifier[len] ( identifier[XT] )> literal[int] :
identifier[plt] . identifier[figure] ( identifier[num] = identifier[plotnum] , identifier[figsize] =( literal[int] , literal[int] ))
identifier[pmagplotlib] . identifier[plot_xtb] ( identifier[plotnum] , identifier[XTB] , identifier[Bs] , identifier[e] , identifier[f] )
identifier[pmagplotlib] . identifier[show_fig] ( identifier[plotnum] )
identifier[plotnum] += literal[int]
keyword[if] identifier[save] == keyword[True] :
identifier[files] ={}
identifier[PLTS] ={}
keyword[for] identifier[p] keyword[in] identifier[range] ( literal[int] , identifier[plotnum] ):
identifier[key] = identifier[str] ( identifier[p] )
identifier[files] [ identifier[key] ]= identifier[e] + literal[string] + identifier[key] + literal[string] + identifier[fmt]
identifier[PLTS] [ identifier[key] ]= identifier[p]
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[PLTS] . identifier[keys] ()):
keyword[try] :
identifier[plt] . identifier[figure] ( identifier[num] = identifier[PLTS] [ identifier[key] ])
identifier[plt] . identifier[savefig] ( identifier[save_folder] + literal[string] +
identifier[files] [ identifier[key] ]. identifier[replace] ( literal[string] , literal[string] ))
keyword[except] :
identifier[print] ( literal[string] , identifier[PLTS] [ identifier[key] ], identifier[files] [ identifier[key] ])
identifier[print] ( literal[string] ) | def chi_magic2(path_to_file='.', file_name='magic_measurements.txt', save=False, save_folder='.', fmt='svg'):
"""
Generates plots that compare susceptibility to temperature at different
frequencies.
Optional Parameters (defaults are used if not specified)
----------
path_to_file : path to directory that contains file (default is current directory, '.')
file_name : name of file to be opened (default is 'magic_measurements.txt')
save : boolean argument to save plots (default is False)
save_folder : relative directory where plots will be saved (default is current directory, '.')
"""
(cont, FTinit, BTinit, k) = ('', 0, 0, 0)
complete_path = os.path.join(path_to_file, file_name)
(Tind, cont) = (0, '')
EXP = ''
#
(meas_data, file_type) = pmag.magic_read(complete_path)
#
# get list of unique experiment names
#
# initialize some variables (a continuation flag, plot initialization
# flags and the experiment counter
experiment_names = []
for rec in meas_data:
if rec['magic_experiment_name'] not in experiment_names:
experiment_names.append(rec['magic_experiment_name']) # depends on [control=['if'], data=['experiment_names']] # depends on [control=['for'], data=['rec']]
#
# hunt through by experiment name
if EXP != '':
try:
k = experiment_names.index(EXP) # depends on [control=['try'], data=[]]
except:
print('Bad experiment name')
sys.exit() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['EXP']]
while k < len(experiment_names):
e = experiment_names[k]
if EXP == '':
print(e, k + 1, 'out of ', len(experiment_names)) # depends on [control=['if'], data=[]]
#
# initialize lists of data, susceptibility, temperature, frequency and
# field
(X, T, F, B) = ([], [], [], [])
for rec in meas_data:
methcodes = rec['magic_method_codes']
meths = methcodes.strip().split(':')
if rec['magic_experiment_name'] == e and 'LP-X' in meths: # looking for chi measurement
if 'measurement_temp' not in list(rec.keys()):
rec['measurement_temp'] = '300' # set defaults # depends on [control=['if'], data=[]]
if 'measurement_freq' not in list(rec.keys()):
rec['measurement_freq'] = '0' # set defaults # depends on [control=['if'], data=[]]
if 'measurement_lab_field_ac' not in list(rec.keys()):
rec['measurement_lab_field_ac'] = '0' # set default # depends on [control=['if'], data=[]]
X.append(float(rec['measurement_x']))
T.append(float(rec['measurement_temp']))
F.append(float(rec['measurement_freq']))
B.append(float(rec['measurement_lab_field_ac'])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']]
#
# get unique list of Ts,Fs, and Bs
#
(Ts, Fs, Bs) = ([], [], [])
for k in range(len(X)): # hunt through all the measurements
if T[k] not in Ts:
Ts.append(T[k]) # append if not in list # depends on [control=['if'], data=['Ts']]
if F[k] not in Fs:
Fs.append(F[k]) # depends on [control=['if'], data=['Fs']]
if B[k] not in Bs:
Bs.append(B[k]) # depends on [control=['if'], data=['Bs']] # depends on [control=['for'], data=['k']]
Ts.sort() # sort list of temperatures, frequencies and fields
Fs.sort()
Bs.sort()
if '-x' in sys.argv:
k = len(experiment_names) + 1 # just plot the one # depends on [control=['if'], data=[]]
else:
k += 1 # increment experiment number
#
# plot chi versus T and F holding B constant
#
plotnum = 1 # initialize plot number to 1
if len(X) > 2: # if there are any data to plot, continue
b = Bs[-1] # keeping field constant and at maximum
XTF = [] # initialize list of chi versus Temp and freq
for f in Fs: # step through frequencies sequentially
XT = [] # initialize list of chi versus temp
for kk in range(len(X)): # hunt through all the data
if F[kk] == f and B[kk] == b: # select data with given freq and field
XT.append([X[kk], T[kk]]) # append to list # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['kk']]
XTF.append(XT) # append list to list of frequencies # depends on [control=['for'], data=['f']]
if len(XT) > 1: # if there are any temperature dependent data
plt.figure(num=plotnum, figsize=(5, 5)) # initialize plot
# call the plotting function
pmagplotlib.plot_xtf(plotnum, XTF, Fs, e, b)
pmagplotlib.show_fig(plotnum)
plotnum += 1 # increment plot number # depends on [control=['if'], data=[]]
f = Fs[0] # set frequency to minimum
XTB = [] # initialize list if chi versus Temp and field
for b in Bs: # step through field values
XT = [] # initial chi versus temp list for this field
for kk in range(len(X)): # hunt through all the data
if F[kk] == f and B[kk] == b: # select data with given freq and field
XT.append([X[kk], T[kk]]) # append to list # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['kk']]
XTB.append(XT) # depends on [control=['for'], data=['b']]
if len(XT) > 1: # if there are any temperature dependent data
plt.figure(num=plotnum, figsize=(5, 5)) # set up plot
# call the plotting function
pmagplotlib.plot_xtb(plotnum, XTB, Bs, e, f)
pmagplotlib.show_fig(plotnum)
plotnum += 1 # increment plot number # depends on [control=['if'], data=[]]
if save == True:
files = {}
PLTS = {}
for p in range(1, plotnum):
key = str(p)
files[key] = e + '_' + key + '.' + fmt
PLTS[key] = p # depends on [control=['for'], data=['p']]
for key in list(PLTS.keys()):
try:
plt.figure(num=PLTS[key])
plt.savefig(save_folder + '/' + files[key].replace('/', '-')) # depends on [control=['try'], data=[]]
except:
print('could not save: ', PLTS[key], files[key])
print('output file format not supported ') # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['k']] |
def _get_extra(self, attrs, exclude):
"""Read the extra properties, taking into account an exclude list"""
result = {}
for key in attrs.getNames():
if key not in exclude:
result[str(key)] = str(attrs[key])
return result | def function[_get_extra, parameter[self, attrs, exclude]]:
constant[Read the extra properties, taking into account an exclude list]
variable[result] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[attrs].getNames, parameter[]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[exclude]] begin[:]
call[name[result]][call[name[str], parameter[name[key]]]] assign[=] call[name[str], parameter[call[name[attrs]][name[key]]]]
return[name[result]] | keyword[def] identifier[_get_extra] ( identifier[self] , identifier[attrs] , identifier[exclude] ):
literal[string]
identifier[result] ={}
keyword[for] identifier[key] keyword[in] identifier[attrs] . identifier[getNames] ():
keyword[if] identifier[key] keyword[not] keyword[in] identifier[exclude] :
identifier[result] [ identifier[str] ( identifier[key] )]= identifier[str] ( identifier[attrs] [ identifier[key] ])
keyword[return] identifier[result] | def _get_extra(self, attrs, exclude):
"""Read the extra properties, taking into account an exclude list"""
result = {}
for key in attrs.getNames():
if key not in exclude:
result[str(key)] = str(attrs[key]) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
return result |
def is_valid(self):
"""
Check image integrity.
Tries to compute the checksum for each raster layer and returns False if this fails.
See this forum entry:
`How to check if image is valid? <https://lists.osgeo.org/pipermail/gdal-dev/2013-November/037520.html>`_.
Returns
-------
bool
is the file valid?
"""
for i in range(self.raster.RasterCount):
try:
checksum = self.raster.GetRasterBand(i + 1).Checksum()
except RuntimeError:
return False
return True | def function[is_valid, parameter[self]]:
constant[
Check image integrity.
Tries to compute the checksum for each raster layer and returns False if this fails.
See this forum entry:
`How to check if image is valid? <https://lists.osgeo.org/pipermail/gdal-dev/2013-November/037520.html>`_.
Returns
-------
bool
is the file valid?
]
for taget[name[i]] in starred[call[name[range], parameter[name[self].raster.RasterCount]]] begin[:]
<ast.Try object at 0x7da18c4cc220>
return[constant[True]] | keyword[def] identifier[is_valid] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[raster] . identifier[RasterCount] ):
keyword[try] :
identifier[checksum] = identifier[self] . identifier[raster] . identifier[GetRasterBand] ( identifier[i] + literal[int] ). identifier[Checksum] ()
keyword[except] identifier[RuntimeError] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_valid(self):
"""
Check image integrity.
Tries to compute the checksum for each raster layer and returns False if this fails.
See this forum entry:
`How to check if image is valid? <https://lists.osgeo.org/pipermail/gdal-dev/2013-November/037520.html>`_.
Returns
-------
bool
is the file valid?
"""
for i in range(self.raster.RasterCount):
try:
checksum = self.raster.GetRasterBand(i + 1).Checksum() # depends on [control=['try'], data=[]]
except RuntimeError:
return False # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
return True |
def center_cell_text(cell):
"""
Horizontally center the text within a cell's grid
Like this::
+---------+ +---------+
| foo | --> | foo |
+---------+ +---------+
Parameters
----------
cell : dashtable.data2rst.Cell
Returns
-------
cell : dashtable.data2rst.Cell
"""
lines = cell.text.split('\n')
cell_width = len(lines[0]) - 2
truncated_lines = ['']
for i in range(1, len(lines) - 1):
truncated = lines[i][2:len(lines[i]) - 2].rstrip()
truncated_lines.append(truncated)
truncated_lines.append('')
max_line_length = get_longest_line_length('\n'.join(truncated_lines))
remainder = cell_width - max_line_length
left_width = math.floor(remainder / 2)
left_space = left_width * ' '
for i in range(len(truncated_lines)):
truncated_lines[i] = left_space + truncated_lines[i]
right_width = cell_width - len(truncated_lines[i])
truncated_lines[i] += right_width * ' '
for i in range(1, len(lines) - 1):
lines[i] = ''.join([
lines[i][0], truncated_lines[i], lines[i][-1]
])
cell.text = '\n'.join(lines)
return cell | def function[center_cell_text, parameter[cell]]:
constant[
Horizontally center the text within a cell's grid
Like this::
+---------+ +---------+
| foo | --> | foo |
+---------+ +---------+
Parameters
----------
cell : dashtable.data2rst.Cell
Returns
-------
cell : dashtable.data2rst.Cell
]
variable[lines] assign[=] call[name[cell].text.split, parameter[constant[
]]]
variable[cell_width] assign[=] binary_operation[call[name[len], parameter[call[name[lines]][constant[0]]]] - constant[2]]
variable[truncated_lines] assign[=] list[[<ast.Constant object at 0x7da1b2346c50>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[lines]]] - constant[1]]]]] begin[:]
variable[truncated] assign[=] call[call[call[name[lines]][name[i]]][<ast.Slice object at 0x7da1b2344520>].rstrip, parameter[]]
call[name[truncated_lines].append, parameter[name[truncated]]]
call[name[truncated_lines].append, parameter[constant[]]]
variable[max_line_length] assign[=] call[name[get_longest_line_length], parameter[call[constant[
].join, parameter[name[truncated_lines]]]]]
variable[remainder] assign[=] binary_operation[name[cell_width] - name[max_line_length]]
variable[left_width] assign[=] call[name[math].floor, parameter[binary_operation[name[remainder] / constant[2]]]]
variable[left_space] assign[=] binary_operation[name[left_width] * constant[ ]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[truncated_lines]]]]]] begin[:]
call[name[truncated_lines]][name[i]] assign[=] binary_operation[name[left_space] + call[name[truncated_lines]][name[i]]]
variable[right_width] assign[=] binary_operation[name[cell_width] - call[name[len], parameter[call[name[truncated_lines]][name[i]]]]]
<ast.AugAssign object at 0x7da20c6c51b0>
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[lines]]] - constant[1]]]]] begin[:]
call[name[lines]][name[i]] assign[=] call[constant[].join, parameter[list[[<ast.Subscript object at 0x7da20c6c7c10>, <ast.Subscript object at 0x7da20c6c4100>, <ast.Subscript object at 0x7da20c6c5150>]]]]
name[cell].text assign[=] call[constant[
].join, parameter[name[lines]]]
return[name[cell]] | keyword[def] identifier[center_cell_text] ( identifier[cell] ):
literal[string]
identifier[lines] = identifier[cell] . identifier[text] . identifier[split] ( literal[string] )
identifier[cell_width] = identifier[len] ( identifier[lines] [ literal[int] ])- literal[int]
identifier[truncated_lines] =[ literal[string] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[lines] )- literal[int] ):
identifier[truncated] = identifier[lines] [ identifier[i] ][ literal[int] : identifier[len] ( identifier[lines] [ identifier[i] ])- literal[int] ]. identifier[rstrip] ()
identifier[truncated_lines] . identifier[append] ( identifier[truncated] )
identifier[truncated_lines] . identifier[append] ( literal[string] )
identifier[max_line_length] = identifier[get_longest_line_length] ( literal[string] . identifier[join] ( identifier[truncated_lines] ))
identifier[remainder] = identifier[cell_width] - identifier[max_line_length]
identifier[left_width] = identifier[math] . identifier[floor] ( identifier[remainder] / literal[int] )
identifier[left_space] = identifier[left_width] * literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[truncated_lines] )):
identifier[truncated_lines] [ identifier[i] ]= identifier[left_space] + identifier[truncated_lines] [ identifier[i] ]
identifier[right_width] = identifier[cell_width] - identifier[len] ( identifier[truncated_lines] [ identifier[i] ])
identifier[truncated_lines] [ identifier[i] ]+= identifier[right_width] * literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[lines] )- literal[int] ):
identifier[lines] [ identifier[i] ]= literal[string] . identifier[join] ([
identifier[lines] [ identifier[i] ][ literal[int] ], identifier[truncated_lines] [ identifier[i] ], identifier[lines] [ identifier[i] ][- literal[int] ]
])
identifier[cell] . identifier[text] = literal[string] . identifier[join] ( identifier[lines] )
keyword[return] identifier[cell] | def center_cell_text(cell):
"""
Horizontally center the text within a cell's grid
Like this::
+---------+ +---------+
| foo | --> | foo |
+---------+ +---------+
Parameters
----------
cell : dashtable.data2rst.Cell
Returns
-------
cell : dashtable.data2rst.Cell
"""
lines = cell.text.split('\n')
cell_width = len(lines[0]) - 2
truncated_lines = ['']
for i in range(1, len(lines) - 1):
truncated = lines[i][2:len(lines[i]) - 2].rstrip()
truncated_lines.append(truncated) # depends on [control=['for'], data=['i']]
truncated_lines.append('')
max_line_length = get_longest_line_length('\n'.join(truncated_lines))
remainder = cell_width - max_line_length
left_width = math.floor(remainder / 2)
left_space = left_width * ' '
for i in range(len(truncated_lines)):
truncated_lines[i] = left_space + truncated_lines[i]
right_width = cell_width - len(truncated_lines[i])
truncated_lines[i] += right_width * ' ' # depends on [control=['for'], data=['i']]
for i in range(1, len(lines) - 1):
lines[i] = ''.join([lines[i][0], truncated_lines[i], lines[i][-1]]) # depends on [control=['for'], data=['i']]
cell.text = '\n'.join(lines)
return cell |
def assert_lock(fname):
"""
If file is locked then terminate program else lock file.
"""
if not set_lock(fname):
logger.error('File {} is already locked. Terminating.'.format(fname))
sys.exit() | def function[assert_lock, parameter[fname]]:
constant[
If file is locked then terminate program else lock file.
]
if <ast.UnaryOp object at 0x7da1b0a36f80> begin[:]
call[name[logger].error, parameter[call[constant[File {} is already locked. Terminating.].format, parameter[name[fname]]]]]
call[name[sys].exit, parameter[]] | keyword[def] identifier[assert_lock] ( identifier[fname] ):
literal[string]
keyword[if] keyword[not] identifier[set_lock] ( identifier[fname] ):
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[fname] ))
identifier[sys] . identifier[exit] () | def assert_lock(fname):
"""
If file is locked then terminate program else lock file.
"""
if not set_lock(fname):
logger.error('File {} is already locked. Terminating.'.format(fname))
sys.exit() # depends on [control=['if'], data=[]] |
def write_ln(self, *text, sep=' '):
"""
Write line
:param text:
:param sep:
:return:
"""
if self.text and self.text[-1] != '\n':
self.text += '\n'
self.text += markdown.text(*text, sep) + '\n'
return self | def function[write_ln, parameter[self]]:
constant[
Write line
:param text:
:param sep:
:return:
]
if <ast.BoolOp object at 0x7da1b18f9960> begin[:]
<ast.AugAssign object at 0x7da1b18fb340>
<ast.AugAssign object at 0x7da1b18fa290>
return[name[self]] | keyword[def] identifier[write_ln] ( identifier[self] ,* identifier[text] , identifier[sep] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[text] keyword[and] identifier[self] . identifier[text] [- literal[int] ]!= literal[string] :
identifier[self] . identifier[text] += literal[string]
identifier[self] . identifier[text] += identifier[markdown] . identifier[text] (* identifier[text] , identifier[sep] )+ literal[string]
keyword[return] identifier[self] | def write_ln(self, *text, sep=' '):
"""
Write line
:param text:
:param sep:
:return:
"""
if self.text and self.text[-1] != '\n':
self.text += '\n' # depends on [control=['if'], data=[]]
self.text += markdown.text(*text, sep) + '\n'
return self |
def reload(filename=None,
url=r"https://raw.githubusercontent.com/googlei18n/emoji4unicode/master/data/emoji4unicode.xml",
loader_class=None):
u"""reload google's `emoji4unicode` project's xml file. must call this method first to use `e4u` library."""
if loader_class is None:
loader_class = loader.Loader
global _loader
_loader = loader_class()
_loader.load(filename, url) | def function[reload, parameter[filename, url, loader_class]]:
constant[reload google's `emoji4unicode` project's xml file. must call this method first to use `e4u` library.]
if compare[name[loader_class] is constant[None]] begin[:]
variable[loader_class] assign[=] name[loader].Loader
<ast.Global object at 0x7da18ede7bb0>
variable[_loader] assign[=] call[name[loader_class], parameter[]]
call[name[_loader].load, parameter[name[filename], name[url]]] | keyword[def] identifier[reload] ( identifier[filename] = keyword[None] ,
identifier[url] = literal[string] ,
identifier[loader_class] = keyword[None] ):
literal[string]
keyword[if] identifier[loader_class] keyword[is] keyword[None] :
identifier[loader_class] = identifier[loader] . identifier[Loader]
keyword[global] identifier[_loader]
identifier[_loader] = identifier[loader_class] ()
identifier[_loader] . identifier[load] ( identifier[filename] , identifier[url] ) | def reload(filename=None, url='https://raw.githubusercontent.com/googlei18n/emoji4unicode/master/data/emoji4unicode.xml', loader_class=None):
u"""reload google's `emoji4unicode` project's xml file. must call this method first to use `e4u` library."""
if loader_class is None:
loader_class = loader.Loader # depends on [control=['if'], data=['loader_class']]
global _loader
_loader = loader_class()
_loader.load(filename, url) |
def encrypt_item(table_name, aws_cmk_id):
"""Demonstrate use of EncryptedClient to transparently encrypt an item."""
index_key = {"partition_attribute": {"S": "is this"}, "sort_attribute": {"N": "55"}}
plaintext_item = {
"example": {"S": "data"},
"some numbers": {"N": "99"},
"and some binary": {"B": b"\x00\x01\x02"},
"leave me": {"S": "alone"}, # We want to ignore this attribute
}
# Collect all of the attributes that will be encrypted (used later).
encrypted_attributes = set(plaintext_item.keys())
encrypted_attributes.remove("leave me")
# Collect all of the attributes that will not be encrypted (used later).
unencrypted_attributes = set(index_key.keys())
unencrypted_attributes.add("leave me")
# Add the index pairs to the item.
plaintext_item.update(index_key)
# Create a normal client.
client = boto3.client("dynamodb")
# Create a crypto materials provider using the specified AWS KMS key.
aws_kms_cmp = AwsKmsCryptographicMaterialsProvider(key_id=aws_cmk_id)
# Create attribute actions that tells the encrypted client to encrypt all attributes except one.
actions = AttributeActions(
default_action=CryptoAction.ENCRYPT_AND_SIGN, attribute_actions={"leave me": CryptoAction.DO_NOTHING}
)
# Use these objects to create an encrypted client.
encrypted_client = EncryptedClient(client=client, materials_provider=aws_kms_cmp, attribute_actions=actions)
# Put the item to the table, using the encrypted client to transparently encrypt it.
encrypted_client.put_item(TableName=table_name, Item=plaintext_item)
# Get the encrypted item using the standard client.
encrypted_item = client.get_item(TableName=table_name, Key=index_key)["Item"]
# Get the item using the encrypted client, transparently decyrpting it.
decrypted_item = encrypted_client.get_item(TableName=table_name, Key=index_key)["Item"]
# Verify that all of the attributes are different in the encrypted item
for name in encrypted_attributes:
assert encrypted_item[name] != plaintext_item[name]
assert decrypted_item[name] == plaintext_item[name]
# Verify that all of the attributes that should not be encrypted were not.
for name in unencrypted_attributes:
assert decrypted_item[name] == encrypted_item[name] == plaintext_item[name]
# Clean up the item
encrypted_client.delete_item(TableName=table_name, Key=index_key) | def function[encrypt_item, parameter[table_name, aws_cmk_id]]:
constant[Demonstrate use of EncryptedClient to transparently encrypt an item.]
variable[index_key] assign[=] dictionary[[<ast.Constant object at 0x7da18f810c10>, <ast.Constant object at 0x7da18f811510>], [<ast.Dict object at 0x7da18f811630>, <ast.Dict object at 0x7da18ede5180>]]
variable[plaintext_item] assign[=] dictionary[[<ast.Constant object at 0x7da18ede6290>, <ast.Constant object at 0x7da18ede5600>, <ast.Constant object at 0x7da18ede5240>, <ast.Constant object at 0x7da18ede7310>], [<ast.Dict object at 0x7da18ede4fa0>, <ast.Dict object at 0x7da18ede4790>, <ast.Dict object at 0x7da18ede4b80>, <ast.Dict object at 0x7da18ede6770>]]
variable[encrypted_attributes] assign[=] call[name[set], parameter[call[name[plaintext_item].keys, parameter[]]]]
call[name[encrypted_attributes].remove, parameter[constant[leave me]]]
variable[unencrypted_attributes] assign[=] call[name[set], parameter[call[name[index_key].keys, parameter[]]]]
call[name[unencrypted_attributes].add, parameter[constant[leave me]]]
call[name[plaintext_item].update, parameter[name[index_key]]]
variable[client] assign[=] call[name[boto3].client, parameter[constant[dynamodb]]]
variable[aws_kms_cmp] assign[=] call[name[AwsKmsCryptographicMaterialsProvider], parameter[]]
variable[actions] assign[=] call[name[AttributeActions], parameter[]]
variable[encrypted_client] assign[=] call[name[EncryptedClient], parameter[]]
call[name[encrypted_client].put_item, parameter[]]
variable[encrypted_item] assign[=] call[call[name[client].get_item, parameter[]]][constant[Item]]
variable[decrypted_item] assign[=] call[call[name[encrypted_client].get_item, parameter[]]][constant[Item]]
for taget[name[name]] in starred[name[encrypted_attributes]] begin[:]
assert[compare[call[name[encrypted_item]][name[name]] not_equal[!=] call[name[plaintext_item]][name[name]]]]
assert[compare[call[name[decrypted_item]][name[name]] equal[==] call[name[plaintext_item]][name[name]]]]
for taget[name[name]] in starred[name[unencrypted_attributes]] begin[:]
assert[compare[call[name[decrypted_item]][name[name]] equal[==] call[name[encrypted_item]][name[name]]]]
call[name[encrypted_client].delete_item, parameter[]] | keyword[def] identifier[encrypt_item] ( identifier[table_name] , identifier[aws_cmk_id] ):
literal[string]
identifier[index_key] ={ literal[string] :{ literal[string] : literal[string] }, literal[string] :{ literal[string] : literal[string] }}
identifier[plaintext_item] ={
literal[string] :{ literal[string] : literal[string] },
literal[string] :{ literal[string] : literal[string] },
literal[string] :{ literal[string] : literal[string] },
literal[string] :{ literal[string] : literal[string] },
}
identifier[encrypted_attributes] = identifier[set] ( identifier[plaintext_item] . identifier[keys] ())
identifier[encrypted_attributes] . identifier[remove] ( literal[string] )
identifier[unencrypted_attributes] = identifier[set] ( identifier[index_key] . identifier[keys] ())
identifier[unencrypted_attributes] . identifier[add] ( literal[string] )
identifier[plaintext_item] . identifier[update] ( identifier[index_key] )
identifier[client] = identifier[boto3] . identifier[client] ( literal[string] )
identifier[aws_kms_cmp] = identifier[AwsKmsCryptographicMaterialsProvider] ( identifier[key_id] = identifier[aws_cmk_id] )
identifier[actions] = identifier[AttributeActions] (
identifier[default_action] = identifier[CryptoAction] . identifier[ENCRYPT_AND_SIGN] , identifier[attribute_actions] ={ literal[string] : identifier[CryptoAction] . identifier[DO_NOTHING] }
)
identifier[encrypted_client] = identifier[EncryptedClient] ( identifier[client] = identifier[client] , identifier[materials_provider] = identifier[aws_kms_cmp] , identifier[attribute_actions] = identifier[actions] )
identifier[encrypted_client] . identifier[put_item] ( identifier[TableName] = identifier[table_name] , identifier[Item] = identifier[plaintext_item] )
identifier[encrypted_item] = identifier[client] . identifier[get_item] ( identifier[TableName] = identifier[table_name] , identifier[Key] = identifier[index_key] )[ literal[string] ]
identifier[decrypted_item] = identifier[encrypted_client] . identifier[get_item] ( identifier[TableName] = identifier[table_name] , identifier[Key] = identifier[index_key] )[ literal[string] ]
keyword[for] identifier[name] keyword[in] identifier[encrypted_attributes] :
keyword[assert] identifier[encrypted_item] [ identifier[name] ]!= identifier[plaintext_item] [ identifier[name] ]
keyword[assert] identifier[decrypted_item] [ identifier[name] ]== identifier[plaintext_item] [ identifier[name] ]
keyword[for] identifier[name] keyword[in] identifier[unencrypted_attributes] :
keyword[assert] identifier[decrypted_item] [ identifier[name] ]== identifier[encrypted_item] [ identifier[name] ]== identifier[plaintext_item] [ identifier[name] ]
identifier[encrypted_client] . identifier[delete_item] ( identifier[TableName] = identifier[table_name] , identifier[Key] = identifier[index_key] ) | def encrypt_item(table_name, aws_cmk_id):
"""Demonstrate use of EncryptedClient to transparently encrypt an item."""
index_key = {'partition_attribute': {'S': 'is this'}, 'sort_attribute': {'N': '55'}} # We want to ignore this attribute
plaintext_item = {'example': {'S': 'data'}, 'some numbers': {'N': '99'}, 'and some binary': {'B': b'\x00\x01\x02'}, 'leave me': {'S': 'alone'}}
# Collect all of the attributes that will be encrypted (used later).
encrypted_attributes = set(plaintext_item.keys())
encrypted_attributes.remove('leave me')
# Collect all of the attributes that will not be encrypted (used later).
unencrypted_attributes = set(index_key.keys())
unencrypted_attributes.add('leave me')
# Add the index pairs to the item.
plaintext_item.update(index_key)
# Create a normal client.
client = boto3.client('dynamodb')
# Create a crypto materials provider using the specified AWS KMS key.
aws_kms_cmp = AwsKmsCryptographicMaterialsProvider(key_id=aws_cmk_id)
# Create attribute actions that tells the encrypted client to encrypt all attributes except one.
actions = AttributeActions(default_action=CryptoAction.ENCRYPT_AND_SIGN, attribute_actions={'leave me': CryptoAction.DO_NOTHING})
# Use these objects to create an encrypted client.
encrypted_client = EncryptedClient(client=client, materials_provider=aws_kms_cmp, attribute_actions=actions)
# Put the item to the table, using the encrypted client to transparently encrypt it.
encrypted_client.put_item(TableName=table_name, Item=plaintext_item)
# Get the encrypted item using the standard client.
encrypted_item = client.get_item(TableName=table_name, Key=index_key)['Item']
# Get the item using the encrypted client, transparently decyrpting it.
decrypted_item = encrypted_client.get_item(TableName=table_name, Key=index_key)['Item']
# Verify that all of the attributes are different in the encrypted item
for name in encrypted_attributes:
assert encrypted_item[name] != plaintext_item[name]
assert decrypted_item[name] == plaintext_item[name] # depends on [control=['for'], data=['name']]
# Verify that all of the attributes that should not be encrypted were not.
for name in unencrypted_attributes:
assert decrypted_item[name] == encrypted_item[name] == plaintext_item[name] # depends on [control=['for'], data=['name']]
# Clean up the item
encrypted_client.delete_item(TableName=table_name, Key=index_key) |
def is_same_api(self, other):
"""Check if this implements the same API as another _ApiInfo instance."""
if not isinstance(other, _ApiInfo):
return False
# pylint: disable=protected-access
return self.__common_info is other.__common_info | def function[is_same_api, parameter[self, other]]:
constant[Check if this implements the same API as another _ApiInfo instance.]
if <ast.UnaryOp object at 0x7da1b0ef1630> begin[:]
return[constant[False]]
return[compare[name[self].__common_info is name[other].__common_info]] | keyword[def] identifier[is_same_api] ( identifier[self] , identifier[other] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[_ApiInfo] ):
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[__common_info] keyword[is] identifier[other] . identifier[__common_info] | def is_same_api(self, other):
"""Check if this implements the same API as another _ApiInfo instance."""
if not isinstance(other, _ApiInfo):
return False # depends on [control=['if'], data=[]]
# pylint: disable=protected-access
return self.__common_info is other.__common_info |
def libvlc_media_player_has_vout(p_mi):
'''How many video outputs does this media player have?
@param p_mi: the media player.
@return: the number of video outputs.
'''
f = _Cfunctions.get('libvlc_media_player_has_vout', None) or \
_Cfunction('libvlc_media_player_has_vout', ((1,),), None,
ctypes.c_uint, MediaPlayer)
return f(p_mi) | def function[libvlc_media_player_has_vout, parameter[p_mi]]:
constant[How many video outputs does this media player have?
@param p_mi: the media player.
@return: the number of video outputs.
]
variable[f] assign[=] <ast.BoolOp object at 0x7da1b1622200>
return[call[name[f], parameter[name[p_mi]]]] | keyword[def] identifier[libvlc_media_player_has_vout] ( identifier[p_mi] ):
literal[string]
identifier[f] = identifier[_Cfunctions] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[_Cfunction] ( literal[string] ,(( literal[int] ,),), keyword[None] ,
identifier[ctypes] . identifier[c_uint] , identifier[MediaPlayer] )
keyword[return] identifier[f] ( identifier[p_mi] ) | def libvlc_media_player_has_vout(p_mi):
"""How many video outputs does this media player have?
@param p_mi: the media player.
@return: the number of video outputs.
"""
f = _Cfunctions.get('libvlc_media_player_has_vout', None) or _Cfunction('libvlc_media_player_has_vout', ((1,),), None, ctypes.c_uint, MediaPlayer)
return f(p_mi) |
def recursive_getattr(obj: Any, attr: str, *args) -> Any:
""" Recursive ``getattar``.
This can be used as a drop in for the standard ``getattr(...)``. Credit to:
https://stackoverflow.com/a/31174427
Args:
obj: Object to retrieve the attribute from.
attr: Name of the attribute, with each successive attribute separated by a ".".
Returns:
The requested attribute. (Same as ``getattr``).
Raises:
AttributeError: If the attribute was not found and no default was provided. (Same as ``getattr``).
"""
def _getattr(obj, attr):
return getattr(obj, attr, *args)
return functools.reduce(_getattr, [obj] + attr.split('.')) | def function[recursive_getattr, parameter[obj, attr]]:
constant[ Recursive ``getattar``.
This can be used as a drop in for the standard ``getattr(...)``. Credit to:
https://stackoverflow.com/a/31174427
Args:
obj: Object to retrieve the attribute from.
attr: Name of the attribute, with each successive attribute separated by a ".".
Returns:
The requested attribute. (Same as ``getattr``).
Raises:
AttributeError: If the attribute was not found and no default was provided. (Same as ``getattr``).
]
def function[_getattr, parameter[obj, attr]]:
return[call[name[getattr], parameter[name[obj], name[attr], <ast.Starred object at 0x7da1b1fba080>]]]
return[call[name[functools].reduce, parameter[name[_getattr], binary_operation[list[[<ast.Name object at 0x7da1b1fb8ac0>]] + call[name[attr].split, parameter[constant[.]]]]]]] | keyword[def] identifier[recursive_getattr] ( identifier[obj] : identifier[Any] , identifier[attr] : identifier[str] ,* identifier[args] )-> identifier[Any] :
literal[string]
keyword[def] identifier[_getattr] ( identifier[obj] , identifier[attr] ):
keyword[return] identifier[getattr] ( identifier[obj] , identifier[attr] ,* identifier[args] )
keyword[return] identifier[functools] . identifier[reduce] ( identifier[_getattr] ,[ identifier[obj] ]+ identifier[attr] . identifier[split] ( literal[string] )) | def recursive_getattr(obj: Any, attr: str, *args) -> Any:
""" Recursive ``getattar``.
This can be used as a drop in for the standard ``getattr(...)``. Credit to:
https://stackoverflow.com/a/31174427
Args:
obj: Object to retrieve the attribute from.
attr: Name of the attribute, with each successive attribute separated by a ".".
Returns:
The requested attribute. (Same as ``getattr``).
Raises:
AttributeError: If the attribute was not found and no default was provided. (Same as ``getattr``).
"""
def _getattr(obj, attr):
return getattr(obj, attr, *args)
return functools.reduce(_getattr, [obj] + attr.split('.')) |
def encode(binary_data):
'''
Encode binary data using Bell-202 AFSK
Expects a bitarray.bitarray() object of binary data as its argument.
Returns a generator of sound samples suitable for use with the
audiogen module.
'''
framed_data = frame(binary_data)
# set volume to 1/2, preceed packet with 1/20 s silence to allow for startup glitches
for sample in itertools.chain(
audiogen.silence(1.05),
multiply(modulate(framed_data), constant(0.5)),
audiogen.silence(1.05),
):
yield sample | def function[encode, parameter[binary_data]]:
constant[
Encode binary data using Bell-202 AFSK
Expects a bitarray.bitarray() object of binary data as its argument.
Returns a generator of sound samples suitable for use with the
audiogen module.
]
variable[framed_data] assign[=] call[name[frame], parameter[name[binary_data]]]
for taget[name[sample]] in starred[call[name[itertools].chain, parameter[call[name[audiogen].silence, parameter[constant[1.05]]], call[name[multiply], parameter[call[name[modulate], parameter[name[framed_data]]], call[name[constant], parameter[constant[0.5]]]]], call[name[audiogen].silence, parameter[constant[1.05]]]]]] begin[:]
<ast.Yield object at 0x7da18f09c5b0> | keyword[def] identifier[encode] ( identifier[binary_data] ):
literal[string]
identifier[framed_data] = identifier[frame] ( identifier[binary_data] )
keyword[for] identifier[sample] keyword[in] identifier[itertools] . identifier[chain] (
identifier[audiogen] . identifier[silence] ( literal[int] ),
identifier[multiply] ( identifier[modulate] ( identifier[framed_data] ), identifier[constant] ( literal[int] )),
identifier[audiogen] . identifier[silence] ( literal[int] ),
):
keyword[yield] identifier[sample] | def encode(binary_data):
"""
Encode binary data using Bell-202 AFSK
Expects a bitarray.bitarray() object of binary data as its argument.
Returns a generator of sound samples suitable for use with the
audiogen module.
"""
framed_data = frame(binary_data) # set volume to 1/2, preceed packet with 1/20 s silence to allow for startup glitches
for sample in itertools.chain(audiogen.silence(1.05), multiply(modulate(framed_data), constant(0.5)), audiogen.silence(1.05)):
yield sample # depends on [control=['for'], data=['sample']] |
def insert_ansi(p_string):
""" Returns a string with color information at the right positions. """
result = p_string.data
for pos, color in sorted(p_string.colors.items(), reverse=True):
color = lookup_color(color)
result = result[:pos] + color.as_ansi() + result[pos:]
return result | def function[insert_ansi, parameter[p_string]]:
constant[ Returns a string with color information at the right positions. ]
variable[result] assign[=] name[p_string].data
for taget[tuple[[<ast.Name object at 0x7da1b2346080>, <ast.Name object at 0x7da1b23470a0>]]] in starred[call[name[sorted], parameter[call[name[p_string].colors.items, parameter[]]]]] begin[:]
variable[color] assign[=] call[name[lookup_color], parameter[name[color]]]
variable[result] assign[=] binary_operation[binary_operation[call[name[result]][<ast.Slice object at 0x7da1b23466b0>] + call[name[color].as_ansi, parameter[]]] + call[name[result]][<ast.Slice object at 0x7da1b23473d0>]]
return[name[result]] | keyword[def] identifier[insert_ansi] ( identifier[p_string] ):
literal[string]
identifier[result] = identifier[p_string] . identifier[data]
keyword[for] identifier[pos] , identifier[color] keyword[in] identifier[sorted] ( identifier[p_string] . identifier[colors] . identifier[items] (), identifier[reverse] = keyword[True] ):
identifier[color] = identifier[lookup_color] ( identifier[color] )
identifier[result] = identifier[result] [: identifier[pos] ]+ identifier[color] . identifier[as_ansi] ()+ identifier[result] [ identifier[pos] :]
keyword[return] identifier[result] | def insert_ansi(p_string):
""" Returns a string with color information at the right positions. """
result = p_string.data
for (pos, color) in sorted(p_string.colors.items(), reverse=True):
color = lookup_color(color)
result = result[:pos] + color.as_ansi() + result[pos:] # depends on [control=['for'], data=[]]
return result |
def add_particle(
self,
pos=(0, 0, 0),
charge=1e-6,
mass=1e-3,
radius=0.005,
color=None,
vel=(0, 0, 0),
fixed=False,
negligible=False,
):
""" Adds a new particle with specified properties (in SI units) """
color = color or len(self.particles) # assigned or default color number
p = Particle(pos, charge, mass, radius, color, vel, fixed, negligible)
self.particles.append(p) | def function[add_particle, parameter[self, pos, charge, mass, radius, color, vel, fixed, negligible]]:
constant[ Adds a new particle with specified properties (in SI units) ]
variable[color] assign[=] <ast.BoolOp object at 0x7da1b06c5210>
variable[p] assign[=] call[name[Particle], parameter[name[pos], name[charge], name[mass], name[radius], name[color], name[vel], name[fixed], name[negligible]]]
call[name[self].particles.append, parameter[name[p]]] | keyword[def] identifier[add_particle] (
identifier[self] ,
identifier[pos] =( literal[int] , literal[int] , literal[int] ),
identifier[charge] = literal[int] ,
identifier[mass] = literal[int] ,
identifier[radius] = literal[int] ,
identifier[color] = keyword[None] ,
identifier[vel] =( literal[int] , literal[int] , literal[int] ),
identifier[fixed] = keyword[False] ,
identifier[negligible] = keyword[False] ,
):
literal[string]
identifier[color] = identifier[color] keyword[or] identifier[len] ( identifier[self] . identifier[particles] )
identifier[p] = identifier[Particle] ( identifier[pos] , identifier[charge] , identifier[mass] , identifier[radius] , identifier[color] , identifier[vel] , identifier[fixed] , identifier[negligible] )
identifier[self] . identifier[particles] . identifier[append] ( identifier[p] ) | def add_particle(self, pos=(0, 0, 0), charge=1e-06, mass=0.001, radius=0.005, color=None, vel=(0, 0, 0), fixed=False, negligible=False):
""" Adds a new particle with specified properties (in SI units) """
color = color or len(self.particles) # assigned or default color number
p = Particle(pos, charge, mass, radius, color, vel, fixed, negligible)
self.particles.append(p) |
def funcGauss1D(x, mu, sig):
""" Create 1D Gaussian. Source:
http://mathworld.wolfram.com/GaussianFunction.html
"""
arrOut = np.exp(-np.power((x - mu)/sig, 2.)/2)
# normalize
# arrOut = arrOut/(np.sqrt(2.*np.pi)*sig)
# normalize (laternative)
arrOut = arrOut/np.sum(arrOut)
return arrOut | def function[funcGauss1D, parameter[x, mu, sig]]:
constant[ Create 1D Gaussian. Source:
http://mathworld.wolfram.com/GaussianFunction.html
]
variable[arrOut] assign[=] call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0f04340> / constant[2]]]]
variable[arrOut] assign[=] binary_operation[name[arrOut] / call[name[np].sum, parameter[name[arrOut]]]]
return[name[arrOut]] | keyword[def] identifier[funcGauss1D] ( identifier[x] , identifier[mu] , identifier[sig] ):
literal[string]
identifier[arrOut] = identifier[np] . identifier[exp] (- identifier[np] . identifier[power] (( identifier[x] - identifier[mu] )/ identifier[sig] , literal[int] )/ literal[int] )
identifier[arrOut] = identifier[arrOut] / identifier[np] . identifier[sum] ( identifier[arrOut] )
keyword[return] identifier[arrOut] | def funcGauss1D(x, mu, sig):
""" Create 1D Gaussian. Source:
http://mathworld.wolfram.com/GaussianFunction.html
"""
arrOut = np.exp(-np.power((x - mu) / sig, 2.0) / 2)
# normalize
# arrOut = arrOut/(np.sqrt(2.*np.pi)*sig)
# normalize (laternative)
arrOut = arrOut / np.sum(arrOut)
return arrOut |
def _element_at_zoom(name, element, zoom):
"""
Return the element filtered by zoom level.
- An input integer or float gets returned as is.
- An input string is checked whether it starts with "zoom". Then, the
provided zoom level gets parsed and compared with the actual zoom
level. If zoom levels match, the element gets returned.
TODOs/gotchas:
- Elements are unordered, which can lead to unexpected results when
defining the YAML config.
- Provided zoom levels for one element in config file are not allowed
to "overlap", i.e. there is not yet a decision mechanism implemented
which handles this case.
"""
# If element is a dictionary, analyze subitems.
if isinstance(element, dict):
if "format" in element:
# we have an input or output driver here
return element
out_elements = {}
for sub_name, sub_element in element.items():
out_element = _element_at_zoom(sub_name, sub_element, zoom)
if name == "input":
out_elements[sub_name] = out_element
elif out_element is not None:
out_elements[sub_name] = out_element
# If there is only one subelement, collapse unless it is
# input. In such case, return a dictionary.
if len(out_elements) == 1 and name != "input":
return next(iter(out_elements.values()))
# If subelement is empty, return None
if len(out_elements) == 0:
return None
return out_elements
# If element is a zoom level statement, filter element.
elif isinstance(name, str):
if name.startswith("zoom"):
return _filter_by_zoom(
conf_string=name.strip("zoom").strip(), zoom=zoom,
element=element)
# If element is a string but not a zoom level statement, return
# element.
else:
return element
# Return all other types as they are.
else:
return element | def function[_element_at_zoom, parameter[name, element, zoom]]:
constant[
Return the element filtered by zoom level.
- An input integer or float gets returned as is.
- An input string is checked whether it starts with "zoom". Then, the
provided zoom level gets parsed and compared with the actual zoom
level. If zoom levels match, the element gets returned.
TODOs/gotchas:
- Elements are unordered, which can lead to unexpected results when
defining the YAML config.
- Provided zoom levels for one element in config file are not allowed
to "overlap", i.e. there is not yet a decision mechanism implemented
which handles this case.
]
if call[name[isinstance], parameter[name[element], name[dict]]] begin[:]
if compare[constant[format] in name[element]] begin[:]
return[name[element]]
variable[out_elements] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c992ad0>, <ast.Name object at 0x7da20c991c60>]]] in starred[call[name[element].items, parameter[]]] begin[:]
variable[out_element] assign[=] call[name[_element_at_zoom], parameter[name[sub_name], name[sub_element], name[zoom]]]
if compare[name[name] equal[==] constant[input]] begin[:]
call[name[out_elements]][name[sub_name]] assign[=] name[out_element]
if <ast.BoolOp object at 0x7da20c993e20> begin[:]
return[call[name[next], parameter[call[name[iter], parameter[call[name[out_elements].values, parameter[]]]]]]]
if compare[call[name[len], parameter[name[out_elements]]] equal[==] constant[0]] begin[:]
return[constant[None]]
return[name[out_elements]] | keyword[def] identifier[_element_at_zoom] ( identifier[name] , identifier[element] , identifier[zoom] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[element] , identifier[dict] ):
keyword[if] literal[string] keyword[in] identifier[element] :
keyword[return] identifier[element]
identifier[out_elements] ={}
keyword[for] identifier[sub_name] , identifier[sub_element] keyword[in] identifier[element] . identifier[items] ():
identifier[out_element] = identifier[_element_at_zoom] ( identifier[sub_name] , identifier[sub_element] , identifier[zoom] )
keyword[if] identifier[name] == literal[string] :
identifier[out_elements] [ identifier[sub_name] ]= identifier[out_element]
keyword[elif] identifier[out_element] keyword[is] keyword[not] keyword[None] :
identifier[out_elements] [ identifier[sub_name] ]= identifier[out_element]
keyword[if] identifier[len] ( identifier[out_elements] )== literal[int] keyword[and] identifier[name] != literal[string] :
keyword[return] identifier[next] ( identifier[iter] ( identifier[out_elements] . identifier[values] ()))
keyword[if] identifier[len] ( identifier[out_elements] )== literal[int] :
keyword[return] keyword[None]
keyword[return] identifier[out_elements]
keyword[elif] identifier[isinstance] ( identifier[name] , identifier[str] ):
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[_filter_by_zoom] (
identifier[conf_string] = identifier[name] . identifier[strip] ( literal[string] ). identifier[strip] (), identifier[zoom] = identifier[zoom] ,
identifier[element] = identifier[element] )
keyword[else] :
keyword[return] identifier[element]
keyword[else] :
keyword[return] identifier[element] | def _element_at_zoom(name, element, zoom):
"""
Return the element filtered by zoom level.
- An input integer or float gets returned as is.
- An input string is checked whether it starts with "zoom". Then, the
provided zoom level gets parsed and compared with the actual zoom
level. If zoom levels match, the element gets returned.
TODOs/gotchas:
- Elements are unordered, which can lead to unexpected results when
defining the YAML config.
- Provided zoom levels for one element in config file are not allowed
to "overlap", i.e. there is not yet a decision mechanism implemented
which handles this case.
"""
# If element is a dictionary, analyze subitems.
if isinstance(element, dict):
if 'format' in element:
# we have an input or output driver here
return element # depends on [control=['if'], data=['element']]
out_elements = {}
for (sub_name, sub_element) in element.items():
out_element = _element_at_zoom(sub_name, sub_element, zoom)
if name == 'input':
out_elements[sub_name] = out_element # depends on [control=['if'], data=[]]
elif out_element is not None:
out_elements[sub_name] = out_element # depends on [control=['if'], data=['out_element']] # depends on [control=['for'], data=[]]
# If there is only one subelement, collapse unless it is
# input. In such case, return a dictionary.
if len(out_elements) == 1 and name != 'input':
return next(iter(out_elements.values())) # depends on [control=['if'], data=[]]
# If subelement is empty, return None
if len(out_elements) == 0:
return None # depends on [control=['if'], data=[]]
return out_elements # depends on [control=['if'], data=[]]
# If element is a zoom level statement, filter element.
elif isinstance(name, str):
if name.startswith('zoom'):
return _filter_by_zoom(conf_string=name.strip('zoom').strip(), zoom=zoom, element=element) # depends on [control=['if'], data=[]]
else:
# If element is a string but not a zoom level statement, return
# element.
return element # depends on [control=['if'], data=[]]
else:
# Return all other types as they are.
return element |
def bm25_weight(X, K1=100, B=0.8):
""" Weighs each row of a sparse matrix X by BM25 weighting """
# calculate idf per term (user)
X = coo_matrix(X)
N = float(X.shape[0])
idf = log(N) - log1p(bincount(X.col))
# calculate length_norm per document (artist)
row_sums = numpy.ravel(X.sum(axis=1))
average_length = row_sums.mean()
length_norm = (1.0 - B) + B * row_sums / average_length
# weight matrix rows by bm25
X.data = X.data * (K1 + 1.0) / (K1 * length_norm[X.row] + X.data) * idf[X.col]
return X | def function[bm25_weight, parameter[X, K1, B]]:
constant[ Weighs each row of a sparse matrix X by BM25 weighting ]
variable[X] assign[=] call[name[coo_matrix], parameter[name[X]]]
variable[N] assign[=] call[name[float], parameter[call[name[X].shape][constant[0]]]]
variable[idf] assign[=] binary_operation[call[name[log], parameter[name[N]]] - call[name[log1p], parameter[call[name[bincount], parameter[name[X].col]]]]]
variable[row_sums] assign[=] call[name[numpy].ravel, parameter[call[name[X].sum, parameter[]]]]
variable[average_length] assign[=] call[name[row_sums].mean, parameter[]]
variable[length_norm] assign[=] binary_operation[binary_operation[constant[1.0] - name[B]] + binary_operation[binary_operation[name[B] * name[row_sums]] / name[average_length]]]
name[X].data assign[=] binary_operation[binary_operation[binary_operation[name[X].data * binary_operation[name[K1] + constant[1.0]]] / binary_operation[binary_operation[name[K1] * call[name[length_norm]][name[X].row]] + name[X].data]] * call[name[idf]][name[X].col]]
return[name[X]] | keyword[def] identifier[bm25_weight] ( identifier[X] , identifier[K1] = literal[int] , identifier[B] = literal[int] ):
literal[string]
identifier[X] = identifier[coo_matrix] ( identifier[X] )
identifier[N] = identifier[float] ( identifier[X] . identifier[shape] [ literal[int] ])
identifier[idf] = identifier[log] ( identifier[N] )- identifier[log1p] ( identifier[bincount] ( identifier[X] . identifier[col] ))
identifier[row_sums] = identifier[numpy] . identifier[ravel] ( identifier[X] . identifier[sum] ( identifier[axis] = literal[int] ))
identifier[average_length] = identifier[row_sums] . identifier[mean] ()
identifier[length_norm] =( literal[int] - identifier[B] )+ identifier[B] * identifier[row_sums] / identifier[average_length]
identifier[X] . identifier[data] = identifier[X] . identifier[data] *( identifier[K1] + literal[int] )/( identifier[K1] * identifier[length_norm] [ identifier[X] . identifier[row] ]+ identifier[X] . identifier[data] )* identifier[idf] [ identifier[X] . identifier[col] ]
keyword[return] identifier[X] | def bm25_weight(X, K1=100, B=0.8):
""" Weighs each row of a sparse matrix X by BM25 weighting """
# calculate idf per term (user)
X = coo_matrix(X)
N = float(X.shape[0])
idf = log(N) - log1p(bincount(X.col))
# calculate length_norm per document (artist)
row_sums = numpy.ravel(X.sum(axis=1))
average_length = row_sums.mean()
length_norm = 1.0 - B + B * row_sums / average_length
# weight matrix rows by bm25
X.data = X.data * (K1 + 1.0) / (K1 * length_norm[X.row] + X.data) * idf[X.col]
return X |
def get_new_cell_attr_state(self, key, attr_key):
"""Returns new attr cell state for toggles
Parameters
----------
key: 3-Tuple
\tCell for which attr toggle shall be returned
attr_key: Hashable
\tAttribute key
"""
cell_attributes = self.grid.code_array.cell_attributes
attr_values = self.attr_toggle_values[attr_key]
# Map attr_value to next attr_value
attr_map = dict(zip(attr_values, attr_values[1:] + attr_values[:1]))
# Return next value from attr_toggle_values value list
return attr_map[cell_attributes[key][attr_key]] | def function[get_new_cell_attr_state, parameter[self, key, attr_key]]:
constant[Returns new attr cell state for toggles
Parameters
----------
key: 3-Tuple
Cell for which attr toggle shall be returned
attr_key: Hashable
Attribute key
]
variable[cell_attributes] assign[=] name[self].grid.code_array.cell_attributes
variable[attr_values] assign[=] call[name[self].attr_toggle_values][name[attr_key]]
variable[attr_map] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[attr_values], binary_operation[call[name[attr_values]][<ast.Slice object at 0x7da1b16216f0>] + call[name[attr_values]][<ast.Slice object at 0x7da1b1621570>]]]]]]
return[call[name[attr_map]][call[call[name[cell_attributes]][name[key]]][name[attr_key]]]] | keyword[def] identifier[get_new_cell_attr_state] ( identifier[self] , identifier[key] , identifier[attr_key] ):
literal[string]
identifier[cell_attributes] = identifier[self] . identifier[grid] . identifier[code_array] . identifier[cell_attributes]
identifier[attr_values] = identifier[self] . identifier[attr_toggle_values] [ identifier[attr_key] ]
identifier[attr_map] = identifier[dict] ( identifier[zip] ( identifier[attr_values] , identifier[attr_values] [ literal[int] :]+ identifier[attr_values] [: literal[int] ]))
keyword[return] identifier[attr_map] [ identifier[cell_attributes] [ identifier[key] ][ identifier[attr_key] ]] | def get_new_cell_attr_state(self, key, attr_key):
"""Returns new attr cell state for toggles
Parameters
----------
key: 3-Tuple
Cell for which attr toggle shall be returned
attr_key: Hashable
Attribute key
"""
cell_attributes = self.grid.code_array.cell_attributes
attr_values = self.attr_toggle_values[attr_key]
# Map attr_value to next attr_value
attr_map = dict(zip(attr_values, attr_values[1:] + attr_values[:1]))
# Return next value from attr_toggle_values value list
return attr_map[cell_attributes[key][attr_key]] |
def _to_reader_frame(self):
"""Navigate to the KindleReader iframe."""
reader_frame = 'KindleReaderIFrame'
frame_loaded = lambda br: br.find_elements_by_id(reader_frame)
self._wait().until(frame_loaded)
self.switch_to.frame(reader_frame) # pylint: disable=no-member
reader_loaded = lambda br: br.find_elements_by_id('kindleReader_header')
self._wait().until(reader_loaded) | def function[_to_reader_frame, parameter[self]]:
constant[Navigate to the KindleReader iframe.]
variable[reader_frame] assign[=] constant[KindleReaderIFrame]
variable[frame_loaded] assign[=] <ast.Lambda object at 0x7da1b06cf5e0>
call[call[name[self]._wait, parameter[]].until, parameter[name[frame_loaded]]]
call[name[self].switch_to.frame, parameter[name[reader_frame]]]
variable[reader_loaded] assign[=] <ast.Lambda object at 0x7da1b06cd7b0>
call[call[name[self]._wait, parameter[]].until, parameter[name[reader_loaded]]] | keyword[def] identifier[_to_reader_frame] ( identifier[self] ):
literal[string]
identifier[reader_frame] = literal[string]
identifier[frame_loaded] = keyword[lambda] identifier[br] : identifier[br] . identifier[find_elements_by_id] ( identifier[reader_frame] )
identifier[self] . identifier[_wait] (). identifier[until] ( identifier[frame_loaded] )
identifier[self] . identifier[switch_to] . identifier[frame] ( identifier[reader_frame] )
identifier[reader_loaded] = keyword[lambda] identifier[br] : identifier[br] . identifier[find_elements_by_id] ( literal[string] )
identifier[self] . identifier[_wait] (). identifier[until] ( identifier[reader_loaded] ) | def _to_reader_frame(self):
"""Navigate to the KindleReader iframe."""
reader_frame = 'KindleReaderIFrame'
frame_loaded = lambda br: br.find_elements_by_id(reader_frame)
self._wait().until(frame_loaded)
self.switch_to.frame(reader_frame) # pylint: disable=no-member
reader_loaded = lambda br: br.find_elements_by_id('kindleReader_header')
self._wait().until(reader_loaded) |
def authenticate(self, api_key):
"""Logs user into Heroku with given api_key."""
self._api_key = api_key
# Attach auth to session.
self._session.auth = ('', self._api_key)
return self._verify_api_key() | def function[authenticate, parameter[self, api_key]]:
constant[Logs user into Heroku with given api_key.]
name[self]._api_key assign[=] name[api_key]
name[self]._session.auth assign[=] tuple[[<ast.Constant object at 0x7da18fe923b0>, <ast.Attribute object at 0x7da18fe93fa0>]]
return[call[name[self]._verify_api_key, parameter[]]] | keyword[def] identifier[authenticate] ( identifier[self] , identifier[api_key] ):
literal[string]
identifier[self] . identifier[_api_key] = identifier[api_key]
identifier[self] . identifier[_session] . identifier[auth] =( literal[string] , identifier[self] . identifier[_api_key] )
keyword[return] identifier[self] . identifier[_verify_api_key] () | def authenticate(self, api_key):
"""Logs user into Heroku with given api_key."""
self._api_key = api_key
# Attach auth to session.
self._session.auth = ('', self._api_key)
return self._verify_api_key() |
def get_permissions(self):
'''
Returns permissions for this directory or None if it's a special collection such as
.session or .algo
'''
response = self.client.getHelper(self.url, acl='true')
if response.status_code != 200:
raise DataApiError('Unable to get permissions:' + str(response.content))
content = response.json()
if 'acl' in content:
return Acl.from_acl_response(content['acl'])
else:
return None | def function[get_permissions, parameter[self]]:
constant[
Returns permissions for this directory or None if it's a special collection such as
.session or .algo
]
variable[response] assign[=] call[name[self].client.getHelper, parameter[name[self].url]]
if compare[name[response].status_code not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da207f00550>
variable[content] assign[=] call[name[response].json, parameter[]]
if compare[constant[acl] in name[content]] begin[:]
return[call[name[Acl].from_acl_response, parameter[call[name[content]][constant[acl]]]]] | keyword[def] identifier[get_permissions] ( identifier[self] ):
literal[string]
identifier[response] = identifier[self] . identifier[client] . identifier[getHelper] ( identifier[self] . identifier[url] , identifier[acl] = literal[string] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
keyword[raise] identifier[DataApiError] ( literal[string] + identifier[str] ( identifier[response] . identifier[content] ))
identifier[content] = identifier[response] . identifier[json] ()
keyword[if] literal[string] keyword[in] identifier[content] :
keyword[return] identifier[Acl] . identifier[from_acl_response] ( identifier[content] [ literal[string] ])
keyword[else] :
keyword[return] keyword[None] | def get_permissions(self):
"""
Returns permissions for this directory or None if it's a special collection such as
.session or .algo
"""
response = self.client.getHelper(self.url, acl='true')
if response.status_code != 200:
raise DataApiError('Unable to get permissions:' + str(response.content)) # depends on [control=['if'], data=[]]
content = response.json()
if 'acl' in content:
return Acl.from_acl_response(content['acl']) # depends on [control=['if'], data=['content']]
else:
return None |
def request_matches_route(self, actual_route: str, expected_route: str):
"""
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
"""
expected_params = self.get_url_params(expected_route)
actual_params = self.get_url_params(actual_route)
i = 0
if len(expected_params) == len(actual_params):
for param in actual_params:
if expected_params[i][0] != "{":
if param != expected_params[i]:
return False
i += 1
else:
return False
return True | def function[request_matches_route, parameter[self, actual_route, expected_route]]:
constant[
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
]
variable[expected_params] assign[=] call[name[self].get_url_params, parameter[name[expected_route]]]
variable[actual_params] assign[=] call[name[self].get_url_params, parameter[name[actual_route]]]
variable[i] assign[=] constant[0]
if compare[call[name[len], parameter[name[expected_params]]] equal[==] call[name[len], parameter[name[actual_params]]]] begin[:]
for taget[name[param]] in starred[name[actual_params]] begin[:]
if compare[call[call[name[expected_params]][name[i]]][constant[0]] not_equal[!=] constant[{]] begin[:]
if compare[name[param] not_equal[!=] call[name[expected_params]][name[i]]] begin[:]
return[constant[False]]
<ast.AugAssign object at 0x7da20c990a60>
return[constant[True]] | keyword[def] identifier[request_matches_route] ( identifier[self] , identifier[actual_route] : identifier[str] , identifier[expected_route] : identifier[str] ):
literal[string]
identifier[expected_params] = identifier[self] . identifier[get_url_params] ( identifier[expected_route] )
identifier[actual_params] = identifier[self] . identifier[get_url_params] ( identifier[actual_route] )
identifier[i] = literal[int]
keyword[if] identifier[len] ( identifier[expected_params] )== identifier[len] ( identifier[actual_params] ):
keyword[for] identifier[param] keyword[in] identifier[actual_params] :
keyword[if] identifier[expected_params] [ identifier[i] ][ literal[int] ]!= literal[string] :
keyword[if] identifier[param] != identifier[expected_params] [ identifier[i] ]:
keyword[return] keyword[False]
identifier[i] += literal[int]
keyword[else] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def request_matches_route(self, actual_route: str, expected_route: str):
"""
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
"""
expected_params = self.get_url_params(expected_route)
actual_params = self.get_url_params(actual_route)
i = 0
if len(expected_params) == len(actual_params):
for param in actual_params:
if expected_params[i][0] != '{':
if param != expected_params[i]:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['for'], data=['param']] # depends on [control=['if'], data=[]]
else:
return False
return True |
def radec2azel(ra_deg: float, dec_deg: float,
lat_deg: float, lon_deg: float,
time: datetime, usevallado: bool = False) -> Tuple[float, float]:
"""
sky coordinates (ra, dec) to viewing angle (az, el)
Parameters
----------
ra_deg : float or numpy.ndarray of float
ecliptic right ascension (degress)
dec_deg : float or numpy.ndarray of float
ecliptic declination (degrees)
lat_deg : float
observer latitude [-90, 90]
lon_deg : float
observer longitude [-180, 180] (degrees)
time : datetime.datetime
time of observation
usevallado : bool, optional
default use astropy. If true, use Vallado algorithm
Returns
-------
az_deg : float or numpy.ndarray of float
azimuth [degrees clockwize from North]
el_deg : float or numpy.ndarray of float
elevation [degrees above horizon (neglecting aberration)]
"""
if usevallado or Time is None:
return vradec2azel(ra_deg, dec_deg, lat_deg, lon_deg, time)
# %% input trapping
lat = np.atleast_1d(lat_deg)
lon = np.atleast_1d(lon_deg)
ra = np.atleast_1d(ra_deg)
dec = np.atleast_1d(dec_deg)
obs = EarthLocation(lat=lat * u.deg,
lon=lon * u.deg)
points = SkyCoord(Angle(ra, unit=u.deg),
Angle(dec, unit=u.deg),
equinox='J2000.0')
altaz = points.transform_to(AltAz(location=obs, obstime=Time(str2dt(time))))
return altaz.az.degree, altaz.alt.degree | def function[radec2azel, parameter[ra_deg, dec_deg, lat_deg, lon_deg, time, usevallado]]:
constant[
sky coordinates (ra, dec) to viewing angle (az, el)
Parameters
----------
ra_deg : float or numpy.ndarray of float
ecliptic right ascension (degress)
dec_deg : float or numpy.ndarray of float
ecliptic declination (degrees)
lat_deg : float
observer latitude [-90, 90]
lon_deg : float
observer longitude [-180, 180] (degrees)
time : datetime.datetime
time of observation
usevallado : bool, optional
default use astropy. If true, use Vallado algorithm
Returns
-------
az_deg : float or numpy.ndarray of float
azimuth [degrees clockwize from North]
el_deg : float or numpy.ndarray of float
elevation [degrees above horizon (neglecting aberration)]
]
if <ast.BoolOp object at 0x7da1b13ce050> begin[:]
return[call[name[vradec2azel], parameter[name[ra_deg], name[dec_deg], name[lat_deg], name[lon_deg], name[time]]]]
variable[lat] assign[=] call[name[np].atleast_1d, parameter[name[lat_deg]]]
variable[lon] assign[=] call[name[np].atleast_1d, parameter[name[lon_deg]]]
variable[ra] assign[=] call[name[np].atleast_1d, parameter[name[ra_deg]]]
variable[dec] assign[=] call[name[np].atleast_1d, parameter[name[dec_deg]]]
variable[obs] assign[=] call[name[EarthLocation], parameter[]]
variable[points] assign[=] call[name[SkyCoord], parameter[call[name[Angle], parameter[name[ra]]], call[name[Angle], parameter[name[dec]]]]]
variable[altaz] assign[=] call[name[points].transform_to, parameter[call[name[AltAz], parameter[]]]]
return[tuple[[<ast.Attribute object at 0x7da1b13cf280>, <ast.Attribute object at 0x7da1b13cec50>]]] | keyword[def] identifier[radec2azel] ( identifier[ra_deg] : identifier[float] , identifier[dec_deg] : identifier[float] ,
identifier[lat_deg] : identifier[float] , identifier[lon_deg] : identifier[float] ,
identifier[time] : identifier[datetime] , identifier[usevallado] : identifier[bool] = keyword[False] )-> identifier[Tuple] [ identifier[float] , identifier[float] ]:
literal[string]
keyword[if] identifier[usevallado] keyword[or] identifier[Time] keyword[is] keyword[None] :
keyword[return] identifier[vradec2azel] ( identifier[ra_deg] , identifier[dec_deg] , identifier[lat_deg] , identifier[lon_deg] , identifier[time] )
identifier[lat] = identifier[np] . identifier[atleast_1d] ( identifier[lat_deg] )
identifier[lon] = identifier[np] . identifier[atleast_1d] ( identifier[lon_deg] )
identifier[ra] = identifier[np] . identifier[atleast_1d] ( identifier[ra_deg] )
identifier[dec] = identifier[np] . identifier[atleast_1d] ( identifier[dec_deg] )
identifier[obs] = identifier[EarthLocation] ( identifier[lat] = identifier[lat] * identifier[u] . identifier[deg] ,
identifier[lon] = identifier[lon] * identifier[u] . identifier[deg] )
identifier[points] = identifier[SkyCoord] ( identifier[Angle] ( identifier[ra] , identifier[unit] = identifier[u] . identifier[deg] ),
identifier[Angle] ( identifier[dec] , identifier[unit] = identifier[u] . identifier[deg] ),
identifier[equinox] = literal[string] )
identifier[altaz] = identifier[points] . identifier[transform_to] ( identifier[AltAz] ( identifier[location] = identifier[obs] , identifier[obstime] = identifier[Time] ( identifier[str2dt] ( identifier[time] ))))
keyword[return] identifier[altaz] . identifier[az] . identifier[degree] , identifier[altaz] . identifier[alt] . identifier[degree] | def radec2azel(ra_deg: float, dec_deg: float, lat_deg: float, lon_deg: float, time: datetime, usevallado: bool=False) -> Tuple[float, float]:
"""
sky coordinates (ra, dec) to viewing angle (az, el)
Parameters
----------
ra_deg : float or numpy.ndarray of float
ecliptic right ascension (degress)
dec_deg : float or numpy.ndarray of float
ecliptic declination (degrees)
lat_deg : float
observer latitude [-90, 90]
lon_deg : float
observer longitude [-180, 180] (degrees)
time : datetime.datetime
time of observation
usevallado : bool, optional
default use astropy. If true, use Vallado algorithm
Returns
-------
az_deg : float or numpy.ndarray of float
azimuth [degrees clockwize from North]
el_deg : float or numpy.ndarray of float
elevation [degrees above horizon (neglecting aberration)]
"""
if usevallado or Time is None:
return vradec2azel(ra_deg, dec_deg, lat_deg, lon_deg, time) # depends on [control=['if'], data=[]]
# %% input trapping
lat = np.atleast_1d(lat_deg)
lon = np.atleast_1d(lon_deg)
ra = np.atleast_1d(ra_deg)
dec = np.atleast_1d(dec_deg)
obs = EarthLocation(lat=lat * u.deg, lon=lon * u.deg)
points = SkyCoord(Angle(ra, unit=u.deg), Angle(dec, unit=u.deg), equinox='J2000.0')
altaz = points.transform_to(AltAz(location=obs, obstime=Time(str2dt(time))))
return (altaz.az.degree, altaz.alt.degree) |
def copy_resource_dir(src, dest):
"""
To copy package data directory to destination
"""
package_name = "mocha"
dest = (dest + "/" + os.path.basename(src)).rstrip("/")
if pkg_resources.resource_isdir(package_name, src):
if not os.path.isdir(dest):
os.makedirs(dest)
for res in pkg_resources.resource_listdir(__name__, src):
copy_resource_dir(src + "/" + res, dest)
else:
if not os.path.isfile(dest) and os.path.splitext(src)[1] not in [".pyc"]:
copy_resource_file(src, dest) | def function[copy_resource_dir, parameter[src, dest]]:
constant[
To copy package data directory to destination
]
variable[package_name] assign[=] constant[mocha]
variable[dest] assign[=] call[binary_operation[binary_operation[name[dest] + constant[/]] + call[name[os].path.basename, parameter[name[src]]]].rstrip, parameter[constant[/]]]
if call[name[pkg_resources].resource_isdir, parameter[name[package_name], name[src]]] begin[:]
if <ast.UnaryOp object at 0x7da20c76cee0> begin[:]
call[name[os].makedirs, parameter[name[dest]]]
for taget[name[res]] in starred[call[name[pkg_resources].resource_listdir, parameter[name[__name__], name[src]]]] begin[:]
call[name[copy_resource_dir], parameter[binary_operation[binary_operation[name[src] + constant[/]] + name[res]], name[dest]]] | keyword[def] identifier[copy_resource_dir] ( identifier[src] , identifier[dest] ):
literal[string]
identifier[package_name] = literal[string]
identifier[dest] =( identifier[dest] + literal[string] + identifier[os] . identifier[path] . identifier[basename] ( identifier[src] )). identifier[rstrip] ( literal[string] )
keyword[if] identifier[pkg_resources] . identifier[resource_isdir] ( identifier[package_name] , identifier[src] ):
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dest] ):
identifier[os] . identifier[makedirs] ( identifier[dest] )
keyword[for] identifier[res] keyword[in] identifier[pkg_resources] . identifier[resource_listdir] ( identifier[__name__] , identifier[src] ):
identifier[copy_resource_dir] ( identifier[src] + literal[string] + identifier[res] , identifier[dest] )
keyword[else] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[dest] ) keyword[and] identifier[os] . identifier[path] . identifier[splitext] ( identifier[src] )[ literal[int] ] keyword[not] keyword[in] [ literal[string] ]:
identifier[copy_resource_file] ( identifier[src] , identifier[dest] ) | def copy_resource_dir(src, dest):
"""
To copy package data directory to destination
"""
package_name = 'mocha'
dest = (dest + '/' + os.path.basename(src)).rstrip('/')
if pkg_resources.resource_isdir(package_name, src):
if not os.path.isdir(dest):
os.makedirs(dest) # depends on [control=['if'], data=[]]
for res in pkg_resources.resource_listdir(__name__, src):
copy_resource_dir(src + '/' + res, dest) # depends on [control=['for'], data=['res']] # depends on [control=['if'], data=[]]
elif not os.path.isfile(dest) and os.path.splitext(src)[1] not in ['.pyc']:
copy_resource_file(src, dest) # depends on [control=['if'], data=[]] |
def query_organism_host():
"""
Returns list of host organism by query parameters
---
tags:
- Query functions
parameters:
- name: taxid
in: query
type: integer
required: false
description: NCBI taxonomy identifier
default: 9606
- name: entry_name
in: query
type: string
required: false
description: UniProt entry name
default: A4_HUMAN
- name: limit
in: query
type: integer
required: false
description: limit of results numbers
default: 10
"""
args = get_args(
request_args=request.args,
allowed_str_args=['entry_name'],
allowed_int_args=['taxid', 'limit']
)
return jsonify(query.organism_host(**args)) | def function[query_organism_host, parameter[]]:
constant[
Returns list of host organism by query parameters
---
tags:
- Query functions
parameters:
- name: taxid
in: query
type: integer
required: false
description: NCBI taxonomy identifier
default: 9606
- name: entry_name
in: query
type: string
required: false
description: UniProt entry name
default: A4_HUMAN
- name: limit
in: query
type: integer
required: false
description: limit of results numbers
default: 10
]
variable[args] assign[=] call[name[get_args], parameter[]]
return[call[name[jsonify], parameter[call[name[query].organism_host, parameter[]]]]] | keyword[def] identifier[query_organism_host] ():
literal[string]
identifier[args] = identifier[get_args] (
identifier[request_args] = identifier[request] . identifier[args] ,
identifier[allowed_str_args] =[ literal[string] ],
identifier[allowed_int_args] =[ literal[string] , literal[string] ]
)
keyword[return] identifier[jsonify] ( identifier[query] . identifier[organism_host] (** identifier[args] )) | def query_organism_host():
"""
Returns list of host organism by query parameters
---
tags:
- Query functions
parameters:
- name: taxid
in: query
type: integer
required: false
description: NCBI taxonomy identifier
default: 9606
- name: entry_name
in: query
type: string
required: false
description: UniProt entry name
default: A4_HUMAN
- name: limit
in: query
type: integer
required: false
description: limit of results numbers
default: 10
"""
args = get_args(request_args=request.args, allowed_str_args=['entry_name'], allowed_int_args=['taxid', 'limit'])
return jsonify(query.organism_host(**args)) |
def reload_programs(self):
"""
Reload all shader programs with the reloadable flag set
"""
print("Reloading programs:")
for name, program in self._programs.items():
if getattr(program, 'program', None):
print(" - {}".format(program.meta.label))
program.program = resources.programs.load(program.meta) | def function[reload_programs, parameter[self]]:
constant[
Reload all shader programs with the reloadable flag set
]
call[name[print], parameter[constant[Reloading programs:]]]
for taget[tuple[[<ast.Name object at 0x7da20c6e6770>, <ast.Name object at 0x7da20c6e4190>]]] in starred[call[name[self]._programs.items, parameter[]]] begin[:]
if call[name[getattr], parameter[name[program], constant[program], constant[None]]] begin[:]
call[name[print], parameter[call[constant[ - {}].format, parameter[name[program].meta.label]]]]
name[program].program assign[=] call[name[resources].programs.load, parameter[name[program].meta]] | keyword[def] identifier[reload_programs] ( identifier[self] ):
literal[string]
identifier[print] ( literal[string] )
keyword[for] identifier[name] , identifier[program] keyword[in] identifier[self] . identifier[_programs] . identifier[items] ():
keyword[if] identifier[getattr] ( identifier[program] , literal[string] , keyword[None] ):
identifier[print] ( literal[string] . identifier[format] ( identifier[program] . identifier[meta] . identifier[label] ))
identifier[program] . identifier[program] = identifier[resources] . identifier[programs] . identifier[load] ( identifier[program] . identifier[meta] ) | def reload_programs(self):
"""
Reload all shader programs with the reloadable flag set
"""
print('Reloading programs:')
for (name, program) in self._programs.items():
if getattr(program, 'program', None):
print(' - {}'.format(program.meta.label))
program.program = resources.programs.load(program.meta) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def next_frame_sv2p_cutoff():
"""SV2P model with additional cutoff in L2 loss for environments like pong."""
hparams = next_frame_sv2p()
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 1
return hparams | def function[next_frame_sv2p_cutoff, parameter[]]:
constant[SV2P model with additional cutoff in L2 loss for environments like pong.]
variable[hparams] assign[=] call[name[next_frame_sv2p], parameter[]]
name[hparams].video_modality_loss_cutoff assign[=] constant[0.4]
name[hparams].video_num_input_frames assign[=] constant[4]
name[hparams].video_num_target_frames assign[=] constant[1]
return[name[hparams]] | keyword[def] identifier[next_frame_sv2p_cutoff] ():
literal[string]
identifier[hparams] = identifier[next_frame_sv2p] ()
identifier[hparams] . identifier[video_modality_loss_cutoff] = literal[int]
identifier[hparams] . identifier[video_num_input_frames] = literal[int]
identifier[hparams] . identifier[video_num_target_frames] = literal[int]
keyword[return] identifier[hparams] | def next_frame_sv2p_cutoff():
"""SV2P model with additional cutoff in L2 loss for environments like pong."""
hparams = next_frame_sv2p()
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 1
return hparams |
def moving_tajima_d(ac, size, start=0, stop=None, step=None, min_sites=3):
"""Calculate the value of Tajima's D in moving windows of `size` variants.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
min_sites : int, optional
Minimum number of segregating sites for which to calculate a value. If
there are fewer, np.nan is returned. Defaults to 3.
Returns
-------
d : ndarray, float, shape (n_windows,)
Tajima's D.
Examples
--------
>>> import allel
>>> g = allel.GenotypeArray([[[0, 0], [0, 0]],
... [[0, 0], [0, 1]],
... [[0, 0], [1, 1]],
... [[0, 1], [1, 1]],
... [[1, 1], [1, 1]],
... [[0, 0], [1, 2]],
... [[0, 1], [1, 2]],
... [[0, 1], [-1, -1]],
... [[-1, -1], [-1, -1]]])
>>> ac = g.count_alleles()
>>> D = allel.moving_tajima_d(ac, size=4, step=2)
>>> D
array([0.1676558 , 2.01186954, 5.70029703])
"""
d = moving_statistic(values=ac, statistic=tajima_d, size=size, start=start, stop=stop,
step=step, min_sites=min_sites)
return d | def function[moving_tajima_d, parameter[ac, size, start, stop, step, min_sites]]:
constant[Calculate the value of Tajima's D in moving windows of `size` variants.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
min_sites : int, optional
Minimum number of segregating sites for which to calculate a value. If
there are fewer, np.nan is returned. Defaults to 3.
Returns
-------
d : ndarray, float, shape (n_windows,)
Tajima's D.
Examples
--------
>>> import allel
>>> g = allel.GenotypeArray([[[0, 0], [0, 0]],
... [[0, 0], [0, 1]],
... [[0, 0], [1, 1]],
... [[0, 1], [1, 1]],
... [[1, 1], [1, 1]],
... [[0, 0], [1, 2]],
... [[0, 1], [1, 2]],
... [[0, 1], [-1, -1]],
... [[-1, -1], [-1, -1]]])
>>> ac = g.count_alleles()
>>> D = allel.moving_tajima_d(ac, size=4, step=2)
>>> D
array([0.1676558 , 2.01186954, 5.70029703])
]
variable[d] assign[=] call[name[moving_statistic], parameter[]]
return[name[d]] | keyword[def] identifier[moving_tajima_d] ( identifier[ac] , identifier[size] , identifier[start] = literal[int] , identifier[stop] = keyword[None] , identifier[step] = keyword[None] , identifier[min_sites] = literal[int] ):
literal[string]
identifier[d] = identifier[moving_statistic] ( identifier[values] = identifier[ac] , identifier[statistic] = identifier[tajima_d] , identifier[size] = identifier[size] , identifier[start] = identifier[start] , identifier[stop] = identifier[stop] ,
identifier[step] = identifier[step] , identifier[min_sites] = identifier[min_sites] )
keyword[return] identifier[d] | def moving_tajima_d(ac, size, start=0, stop=None, step=None, min_sites=3):
"""Calculate the value of Tajima's D in moving windows of `size` variants.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
min_sites : int, optional
Minimum number of segregating sites for which to calculate a value. If
there are fewer, np.nan is returned. Defaults to 3.
Returns
-------
d : ndarray, float, shape (n_windows,)
Tajima's D.
Examples
--------
>>> import allel
>>> g = allel.GenotypeArray([[[0, 0], [0, 0]],
... [[0, 0], [0, 1]],
... [[0, 0], [1, 1]],
... [[0, 1], [1, 1]],
... [[1, 1], [1, 1]],
... [[0, 0], [1, 2]],
... [[0, 1], [1, 2]],
... [[0, 1], [-1, -1]],
... [[-1, -1], [-1, -1]]])
>>> ac = g.count_alleles()
>>> D = allel.moving_tajima_d(ac, size=4, step=2)
>>> D
array([0.1676558 , 2.01186954, 5.70029703])
"""
d = moving_statistic(values=ac, statistic=tajima_d, size=size, start=start, stop=stop, step=step, min_sites=min_sites)
return d |
def bias(self, arr:Collection, is_item:bool=True):
"Bias for item or user (based on `is_item`) for all in `arr`. (Set model to `cpu` and no grad.)"
idx = self.get_idx(arr, is_item)
m = self.model
layer = m.i_bias if is_item else m.u_bias
return layer(idx).squeeze() | def function[bias, parameter[self, arr, is_item]]:
constant[Bias for item or user (based on `is_item`) for all in `arr`. (Set model to `cpu` and no grad.)]
variable[idx] assign[=] call[name[self].get_idx, parameter[name[arr], name[is_item]]]
variable[m] assign[=] name[self].model
variable[layer] assign[=] <ast.IfExp object at 0x7da1b1df9c30>
return[call[call[name[layer], parameter[name[idx]]].squeeze, parameter[]]] | keyword[def] identifier[bias] ( identifier[self] , identifier[arr] : identifier[Collection] , identifier[is_item] : identifier[bool] = keyword[True] ):
literal[string]
identifier[idx] = identifier[self] . identifier[get_idx] ( identifier[arr] , identifier[is_item] )
identifier[m] = identifier[self] . identifier[model]
identifier[layer] = identifier[m] . identifier[i_bias] keyword[if] identifier[is_item] keyword[else] identifier[m] . identifier[u_bias]
keyword[return] identifier[layer] ( identifier[idx] ). identifier[squeeze] () | def bias(self, arr: Collection, is_item: bool=True):
"""Bias for item or user (based on `is_item`) for all in `arr`. (Set model to `cpu` and no grad.)"""
idx = self.get_idx(arr, is_item)
m = self.model
layer = m.i_bias if is_item else m.u_bias
return layer(idx).squeeze() |
def get_bookmarks(self, time=None, chan=None):
"""
Raises
------
IndexError
When there is no selected rater
"""
# get bookmarks inside window
try:
bookmarks = self.rater.find('bookmarks')
except AttributeError:
raise IndexError('You need to have at least one rater')
mrks = []
for m in bookmarks:
bookmark_start = float(m.find('bookmark_start').text)
bookmark_end = float(m.find('bookmark_end').text)
bookmark_chan = m.find('bookmark_chan').text
if bookmark_chan is None: # xml doesn't store empty string
bookmark_chan = ''
if time is None:
time_cond = True
else:
time_cond = (time[0] <= bookmark_end and
time[1] >= bookmark_start)
if chan is None:
chan_cond = True
else:
chan_cond = bookmark_chan == chan
if time_cond and chan_cond:
one_mrk = {'name': m.find('bookmark_name').text,
'start': bookmark_start,
'end': bookmark_end,
'chan': bookmark_chan.split(', '), # always a list
}
mrks.append(one_mrk)
return mrks | def function[get_bookmarks, parameter[self, time, chan]]:
constant[
Raises
------
IndexError
When there is no selected rater
]
<ast.Try object at 0x7da1b0d77370>
variable[mrks] assign[=] list[[]]
for taget[name[m]] in starred[name[bookmarks]] begin[:]
variable[bookmark_start] assign[=] call[name[float], parameter[call[name[m].find, parameter[constant[bookmark_start]]].text]]
variable[bookmark_end] assign[=] call[name[float], parameter[call[name[m].find, parameter[constant[bookmark_end]]].text]]
variable[bookmark_chan] assign[=] call[name[m].find, parameter[constant[bookmark_chan]]].text
if compare[name[bookmark_chan] is constant[None]] begin[:]
variable[bookmark_chan] assign[=] constant[]
if compare[name[time] is constant[None]] begin[:]
variable[time_cond] assign[=] constant[True]
if compare[name[chan] is constant[None]] begin[:]
variable[chan_cond] assign[=] constant[True]
if <ast.BoolOp object at 0x7da207f008e0> begin[:]
variable[one_mrk] assign[=] dictionary[[<ast.Constant object at 0x7da207f00ee0>, <ast.Constant object at 0x7da207f00640>, <ast.Constant object at 0x7da207f02ce0>, <ast.Constant object at 0x7da207f00220>], [<ast.Attribute object at 0x7da207f01720>, <ast.Name object at 0x7da207f02b30>, <ast.Name object at 0x7da207f01540>, <ast.Call object at 0x7da207f01900>]]
call[name[mrks].append, parameter[name[one_mrk]]]
return[name[mrks]] | keyword[def] identifier[get_bookmarks] ( identifier[self] , identifier[time] = keyword[None] , identifier[chan] = keyword[None] ):
literal[string]
keyword[try] :
identifier[bookmarks] = identifier[self] . identifier[rater] . identifier[find] ( literal[string] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[IndexError] ( literal[string] )
identifier[mrks] =[]
keyword[for] identifier[m] keyword[in] identifier[bookmarks] :
identifier[bookmark_start] = identifier[float] ( identifier[m] . identifier[find] ( literal[string] ). identifier[text] )
identifier[bookmark_end] = identifier[float] ( identifier[m] . identifier[find] ( literal[string] ). identifier[text] )
identifier[bookmark_chan] = identifier[m] . identifier[find] ( literal[string] ). identifier[text]
keyword[if] identifier[bookmark_chan] keyword[is] keyword[None] :
identifier[bookmark_chan] = literal[string]
keyword[if] identifier[time] keyword[is] keyword[None] :
identifier[time_cond] = keyword[True]
keyword[else] :
identifier[time_cond] =( identifier[time] [ literal[int] ]<= identifier[bookmark_end] keyword[and]
identifier[time] [ literal[int] ]>= identifier[bookmark_start] )
keyword[if] identifier[chan] keyword[is] keyword[None] :
identifier[chan_cond] = keyword[True]
keyword[else] :
identifier[chan_cond] = identifier[bookmark_chan] == identifier[chan]
keyword[if] identifier[time_cond] keyword[and] identifier[chan_cond] :
identifier[one_mrk] ={ literal[string] : identifier[m] . identifier[find] ( literal[string] ). identifier[text] ,
literal[string] : identifier[bookmark_start] ,
literal[string] : identifier[bookmark_end] ,
literal[string] : identifier[bookmark_chan] . identifier[split] ( literal[string] ),
}
identifier[mrks] . identifier[append] ( identifier[one_mrk] )
keyword[return] identifier[mrks] | def get_bookmarks(self, time=None, chan=None):
"""
Raises
------
IndexError
When there is no selected rater
"""
# get bookmarks inside window
try:
bookmarks = self.rater.find('bookmarks') # depends on [control=['try'], data=[]]
except AttributeError:
raise IndexError('You need to have at least one rater') # depends on [control=['except'], data=[]]
mrks = []
for m in bookmarks:
bookmark_start = float(m.find('bookmark_start').text)
bookmark_end = float(m.find('bookmark_end').text)
bookmark_chan = m.find('bookmark_chan').text
if bookmark_chan is None: # xml doesn't store empty string
bookmark_chan = '' # depends on [control=['if'], data=['bookmark_chan']]
if time is None:
time_cond = True # depends on [control=['if'], data=[]]
else:
time_cond = time[0] <= bookmark_end and time[1] >= bookmark_start
if chan is None:
chan_cond = True # depends on [control=['if'], data=[]]
else:
chan_cond = bookmark_chan == chan
if time_cond and chan_cond: # always a list
one_mrk = {'name': m.find('bookmark_name').text, 'start': bookmark_start, 'end': bookmark_end, 'chan': bookmark_chan.split(', ')}
mrks.append(one_mrk) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
return mrks |
def send_one_ping(self, current_socket):
"""
Send one ICMP ECHO_REQUEST.
"""
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
checksum = 0
# Make a dummy header with a 0 checksum.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
padBytes = []
startVal = 0x42
for i in range(startVal, startVal + (self.packet_size)):
padBytes += [(i & 0xff)] # Keep chars in the 0-255 range
data = bytes(padBytes)
# Calculate the checksum on the data and the dummy header.
checksum = calculate_checksum(header + data) # Checksum is in network order
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
packet = header + data
send_time = default_timer()
try:
current_socket.sendto(packet, (self.destination, 1)) # Port number is irrelevant for ICMP
except socket.error as e:
print("General failure (%s)" % (e.args[1]))
current_socket.close()
return
return send_time | def function[send_one_ping, parameter[self, current_socket]]:
constant[
Send one ICMP ECHO_REQUEST.
]
variable[checksum] assign[=] constant[0]
variable[header] assign[=] call[name[struct].pack, parameter[constant[!BBHHH], name[ICMP_ECHO], constant[0], name[checksum], name[self].own_id, name[self].seq_number]]
variable[padBytes] assign[=] list[[]]
variable[startVal] assign[=] constant[66]
for taget[name[i]] in starred[call[name[range], parameter[name[startVal], binary_operation[name[startVal] + name[self].packet_size]]]] begin[:]
<ast.AugAssign object at 0x7da204622590>
variable[data] assign[=] call[name[bytes], parameter[name[padBytes]]]
variable[checksum] assign[=] call[name[calculate_checksum], parameter[binary_operation[name[header] + name[data]]]]
variable[header] assign[=] call[name[struct].pack, parameter[constant[!BBHHH], name[ICMP_ECHO], constant[0], name[checksum], name[self].own_id, name[self].seq_number]]
variable[packet] assign[=] binary_operation[name[header] + name[data]]
variable[send_time] assign[=] call[name[default_timer], parameter[]]
<ast.Try object at 0x7da204621270>
return[name[send_time]] | keyword[def] identifier[send_one_ping] ( identifier[self] , identifier[current_socket] ):
literal[string]
identifier[checksum] = literal[int]
identifier[header] = identifier[struct] . identifier[pack] (
literal[string] , identifier[ICMP_ECHO] , literal[int] , identifier[checksum] , identifier[self] . identifier[own_id] , identifier[self] . identifier[seq_number]
)
identifier[padBytes] =[]
identifier[startVal] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[startVal] , identifier[startVal] +( identifier[self] . identifier[packet_size] )):
identifier[padBytes] +=[( identifier[i] & literal[int] )]
identifier[data] = identifier[bytes] ( identifier[padBytes] )
identifier[checksum] = identifier[calculate_checksum] ( identifier[header] + identifier[data] )
identifier[header] = identifier[struct] . identifier[pack] (
literal[string] , identifier[ICMP_ECHO] , literal[int] , identifier[checksum] , identifier[self] . identifier[own_id] , identifier[self] . identifier[seq_number]
)
identifier[packet] = identifier[header] + identifier[data]
identifier[send_time] = identifier[default_timer] ()
keyword[try] :
identifier[current_socket] . identifier[sendto] ( identifier[packet] ,( identifier[self] . identifier[destination] , literal[int] ))
keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[e] :
identifier[print] ( literal[string] %( identifier[e] . identifier[args] [ literal[int] ]))
identifier[current_socket] . identifier[close] ()
keyword[return]
keyword[return] identifier[send_time] | def send_one_ping(self, current_socket):
"""
Send one ICMP ECHO_REQUEST.
"""
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
checksum = 0
# Make a dummy header with a 0 checksum.
header = struct.pack('!BBHHH', ICMP_ECHO, 0, checksum, self.own_id, self.seq_number)
padBytes = []
startVal = 66
for i in range(startVal, startVal + self.packet_size):
padBytes += [i & 255] # Keep chars in the 0-255 range # depends on [control=['for'], data=['i']]
data = bytes(padBytes)
# Calculate the checksum on the data and the dummy header.
checksum = calculate_checksum(header + data) # Checksum is in network order
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy.
header = struct.pack('!BBHHH', ICMP_ECHO, 0, checksum, self.own_id, self.seq_number)
packet = header + data
send_time = default_timer()
try:
current_socket.sendto(packet, (self.destination, 1)) # Port number is irrelevant for ICMP # depends on [control=['try'], data=[]]
except socket.error as e:
print('General failure (%s)' % e.args[1])
current_socket.close()
return # depends on [control=['except'], data=['e']]
return send_time |
def notify(self, n: int = 1) -> None:
"""Wake ``n`` waiters."""
waiters = [] # Waiters we plan to run right now.
while n and self._waiters:
waiter = self._waiters.popleft()
if not waiter.done(): # Might have timed out.
n -= 1
waiters.append(waiter)
for waiter in waiters:
future_set_result_unless_cancelled(waiter, True) | def function[notify, parameter[self, n]]:
constant[Wake ``n`` waiters.]
variable[waiters] assign[=] list[[]]
while <ast.BoolOp object at 0x7da1b1fddc00> begin[:]
variable[waiter] assign[=] call[name[self]._waiters.popleft, parameter[]]
if <ast.UnaryOp object at 0x7da1b1fdf1f0> begin[:]
<ast.AugAssign object at 0x7da1b1fdf1c0>
call[name[waiters].append, parameter[name[waiter]]]
for taget[name[waiter]] in starred[name[waiters]] begin[:]
call[name[future_set_result_unless_cancelled], parameter[name[waiter], constant[True]]] | keyword[def] identifier[notify] ( identifier[self] , identifier[n] : identifier[int] = literal[int] )-> keyword[None] :
literal[string]
identifier[waiters] =[]
keyword[while] identifier[n] keyword[and] identifier[self] . identifier[_waiters] :
identifier[waiter] = identifier[self] . identifier[_waiters] . identifier[popleft] ()
keyword[if] keyword[not] identifier[waiter] . identifier[done] ():
identifier[n] -= literal[int]
identifier[waiters] . identifier[append] ( identifier[waiter] )
keyword[for] identifier[waiter] keyword[in] identifier[waiters] :
identifier[future_set_result_unless_cancelled] ( identifier[waiter] , keyword[True] ) | def notify(self, n: int=1) -> None:
"""Wake ``n`` waiters."""
waiters = [] # Waiters we plan to run right now.
while n and self._waiters:
waiter = self._waiters.popleft()
if not waiter.done(): # Might have timed out.
n -= 1
waiters.append(waiter) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
for waiter in waiters:
future_set_result_unless_cancelled(waiter, True) # depends on [control=['for'], data=['waiter']] |
def send_order(
self,
code=None,
amount=None,
time=None,
towards=None,
price=None,
money=None,
order_model=None,
amount_model=None,
*args,
**kwargs
):
"""
ATTENTION CHANGELOG 1.0.28
修改了Account的send_order方法, 区分按数量下单和按金额下单两种方式
- AMOUNT_MODEL.BY_PRICE ==> AMOUNT_MODEL.BY_MONEY # 按金额下单
- AMOUNT_MODEL.BY_AMOUNT # 按数量下单
在按金额下单的时候,应给予 money参数
在按数量下单的时候,应给予 amount参数
python code:
Account=QA.QA_Account()
Order_bymoney=Account.send_order(code='000001',
price=11,
money=0.3*Account.cash_available,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_MONEY
)
Order_byamount=Account.send_order(code='000001',
price=11,
amount=100,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_AMOUNT
)
:param code: 证券代码
:param amount: 买卖 数量多数股
:param time: Timestamp 对象 下单时间
:param towards: int , towards>0 买入 towards<0 卖出
:param price: 买入,卖出 标的证券的价格
:param money: 买卖 价格
:param order_model: 类型 QA.ORDER_MODE
:param amount_model:类型 QA.AMOUNT_MODEL
:return: QA_Order | False
@2018/12/23
send_order 是QA的标准返回, 如需对接其他接口, 只需要对于QA_Order做适配即可
@2018/12/27
在判断账户为期货账户(及 允许双向交易)
@2018/12/30 保证金账户的修改
1. 保证金账户冻结的金额
2. 保证金账户的结算
3. 保证金账户的判断
"""
wrong_reason = None
assert code is not None and time is not None and towards is not None and order_model is not None and amount_model is not None
# 🛠todo 移到Utils类中, 时间转换
# date 字符串 2011-10-11 长度10
date = str(time)[0:10] if len(str(time)) == 19 else str(time)
# time 字符串 20011-10-11 09:02:00 长度 19
time = str(time) if len(str(time)) == 19 else '{} 09:31:00'.format(
str(time)[0:10]
)
# 🛠todo 移到Utils类中, amount_to_money 成交量转金额
# BY_MONEY :: amount --钱 如10000元 因此 by_money里面 需要指定价格,来计算实际的股票数
# by_amount :: amount --股数 如10000股
if self.allow_margin:
amount = amount if amount_model is AMOUNT_MODEL.BY_AMOUNT else int(
money / (
self.market_preset.get_unit(code) *
self.market_preset.get_frozen(code) * price *
(1 + self.commission_coeff)
) / 100
) * 100
else:
amount = amount if amount_model is AMOUNT_MODEL.BY_AMOUNT else int(
money / (price * (1 + self.commission_coeff)) / 100
) * 100
# 🛠todo 移到Utils类中, money_to_amount 金额转成交量
if self.allow_margin:
money = amount * price * self.market_preset.get_unit(code)*self.market_preset.get_frozen(code) * \
(1+self.commission_coeff) if amount_model is AMOUNT_MODEL.BY_AMOUNT else money
else:
money = amount * price * \
(1+self.commission_coeff) if amount_model is AMOUNT_MODEL.BY_AMOUNT else money
# flag 判断买卖 数量和价格以及买卖方向是否正确
flag = False
assert (int(towards) != 0)
if int(towards) in [1, 2, 3]:
# 是买入的情况(包括买入.买开.买平)
if self.cash_available >= money:
if self.market_type == MARKET_TYPE.STOCK_CN: # 如果是股票 买入的时候有100股的最小限制
amount = int(amount / 100) * 100
self.cash_available -= money
flag = True
if self.running_environment == RUNNING_ENVIRONMENT.TZERO:
if abs(self.buy_available.get(code, 0)) >= amount:
flag = True
self.cash_available -= money
self.buy_available[code] -= amount
else:
flag = False
wrong_reason = 'T0交易买入超出限额'
if self.market_type == MARKET_TYPE.FUTURE_CN:
# 如果有负持仓-- 允许卖空的时候
if towards == 3: # 多平
_hold = self.sell_available.get(code, 0)
# 假设有负持仓:
# amount为下单数量 如 账户原先-3手 现在平1手
#left_amount = amount+_hold if _hold < 0 else amount
_money = abs(
float(amount * price * (1 + self.commission_coeff))
)
print(_hold)
if self.cash_available >= _money:
if _hold < 0:
self.cash_available -= _money
flag = True
else:
wrong_reason = '空单仓位不足'
else:
wrong_reason = '平多剩余资金不够'
if towards == 2:
self.cash_available -= money
flag = True
else:
wrong_reason = 'QAACCOUNT: 可用资金不足 cash_available {} code {} time {} amount {} towards {}'.format(
self.cash_available,
code,
time,
amount,
towards
)
elif int(towards) in [-1, -2, -3]:
# 是卖出的情况(包括卖出,卖出开仓allow_sellopen如果允许. 卖出平仓)
# print(self.sell_available[code])
_hold = self.sell_available.get(code, 0) # _hold 是你的持仓
# 如果你的hold> amount>0
# 持仓数量>卖出数量
if _hold >= amount:
self.sell_available[code] -= amount
# towards = ORDER_DIRECTION.SELL
flag = True
# 如果持仓数量<卖出数量
else:
# 如果是允许卖空开仓 实际计算时 先减去持仓(正持仓) 再计算 负持仓 就按原先的占用金额计算
if self.allow_sellopen and towards == -2:
if self.cash_available >= money: # 卖空的市值小于现金(有担保的卖空), 不允许裸卖空
# self.cash_available -= money
flag = True
else:
print('sellavailable', _hold)
print('amount', amount)
print('aqureMoney', money)
print('cash', self.cash_available)
wrong_reason = "卖空资金不足/不允许裸卖空"
else:
wrong_reason = "卖出仓位不足"
if flag and (amount > 0):
_order = QA_Order(
user_cookie=self.user_cookie,
strategy=self.strategy_name,
frequence=self.frequence,
account_cookie=self.account_cookie,
code=code,
market_type=self.market_type,
date=date,
datetime=time,
sending_time=time,
callback=self.receive_deal,
amount=amount,
price=price,
order_model=order_model,
towards=towards,
money=money,
broker=self.broker,
amount_model=amount_model,
commission_coeff=self.commission_coeff,
tax_coeff=self.tax_coeff,
*args,
**kwargs
) # init
# 历史委托order状态存储, 保存到 QA_Order 对象中的队列中
self.datetime = time
self.orders.insert_order(_order)
return _order
else:
print(
'ERROR : CODE {} TIME {} AMOUNT {} TOWARDS {}'.format(
code,
time,
amount,
towards
)
)
print(wrong_reason)
return False | def function[send_order, parameter[self, code, amount, time, towards, price, money, order_model, amount_model]]:
constant[
ATTENTION CHANGELOG 1.0.28
修改了Account的send_order方法, 区分按数量下单和按金额下单两种方式
- AMOUNT_MODEL.BY_PRICE ==> AMOUNT_MODEL.BY_MONEY # 按金额下单
- AMOUNT_MODEL.BY_AMOUNT # 按数量下单
在按金额下单的时候,应给予 money参数
在按数量下单的时候,应给予 amount参数
python code:
Account=QA.QA_Account()
Order_bymoney=Account.send_order(code='000001',
price=11,
money=0.3*Account.cash_available,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_MONEY
)
Order_byamount=Account.send_order(code='000001',
price=11,
amount=100,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_AMOUNT
)
:param code: 证券代码
:param amount: 买卖 数量多数股
:param time: Timestamp 对象 下单时间
:param towards: int , towards>0 买入 towards<0 卖出
:param price: 买入,卖出 标的证券的价格
:param money: 买卖 价格
:param order_model: 类型 QA.ORDER_MODE
:param amount_model:类型 QA.AMOUNT_MODEL
:return: QA_Order | False
@2018/12/23
send_order 是QA的标准返回, 如需对接其他接口, 只需要对于QA_Order做适配即可
@2018/12/27
在判断账户为期货账户(及 允许双向交易)
@2018/12/30 保证金账户的修改
1. 保证金账户冻结的金额
2. 保证金账户的结算
3. 保证金账户的判断
]
variable[wrong_reason] assign[=] constant[None]
assert[<ast.BoolOp object at 0x7da1b1faba30>]
variable[date] assign[=] <ast.IfExp object at 0x7da1b1fab6a0>
variable[time] assign[=] <ast.IfExp object at 0x7da1b1fab2e0>
if name[self].allow_margin begin[:]
variable[amount] assign[=] <ast.IfExp object at 0x7da1b1faae00>
if name[self].allow_margin begin[:]
variable[money] assign[=] <ast.IfExp object at 0x7da1b1faa2f0>
variable[flag] assign[=] constant[False]
assert[compare[call[name[int], parameter[name[towards]]] not_equal[!=] constant[0]]]
if compare[call[name[int], parameter[name[towards]]] in list[[<ast.Constant object at 0x7da1b1fa87f0>, <ast.Constant object at 0x7da1b1fa8820>, <ast.Constant object at 0x7da1b1fa8850>]]] begin[:]
if compare[name[self].cash_available greater_or_equal[>=] name[money]] begin[:]
if compare[name[self].market_type equal[==] name[MARKET_TYPE].STOCK_CN] begin[:]
variable[amount] assign[=] binary_operation[call[name[int], parameter[binary_operation[name[amount] / constant[100]]]] * constant[100]]
<ast.AugAssign object at 0x7da1b1fa8c40>
variable[flag] assign[=] constant[True]
if compare[name[self].running_environment equal[==] name[RUNNING_ENVIRONMENT].TZERO] begin[:]
if compare[call[name[abs], parameter[call[name[self].buy_available.get, parameter[name[code], constant[0]]]]] greater_or_equal[>=] name[amount]] begin[:]
variable[flag] assign[=] constant[True]
<ast.AugAssign object at 0x7da1b1fa9150>
<ast.AugAssign object at 0x7da1b1fa9ff0>
if compare[name[self].market_type equal[==] name[MARKET_TYPE].FUTURE_CN] begin[:]
if compare[name[towards] equal[==] constant[3]] begin[:]
variable[_hold] assign[=] call[name[self].sell_available.get, parameter[name[code], constant[0]]]
variable[_money] assign[=] call[name[abs], parameter[call[name[float], parameter[binary_operation[binary_operation[name[amount] * name[price]] * binary_operation[constant[1] + name[self].commission_coeff]]]]]]
call[name[print], parameter[name[_hold]]]
if compare[name[self].cash_available greater_or_equal[>=] name[_money]] begin[:]
if compare[name[_hold] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b1fa9510>
variable[flag] assign[=] constant[True]
if compare[name[towards] equal[==] constant[2]] begin[:]
<ast.AugAssign object at 0x7da1b20c9ab0>
variable[flag] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b20cb310> begin[:]
variable[_order] assign[=] call[name[QA_Order], parameter[<ast.Starred object at 0x7da1b20cb250>]]
name[self].datetime assign[=] name[time]
call[name[self].orders.insert_order, parameter[name[_order]]]
return[name[_order]] | keyword[def] identifier[send_order] (
identifier[self] ,
identifier[code] = keyword[None] ,
identifier[amount] = keyword[None] ,
identifier[time] = keyword[None] ,
identifier[towards] = keyword[None] ,
identifier[price] = keyword[None] ,
identifier[money] = keyword[None] ,
identifier[order_model] = keyword[None] ,
identifier[amount_model] = keyword[None] ,
* identifier[args] ,
** identifier[kwargs]
):
literal[string]
identifier[wrong_reason] = keyword[None]
keyword[assert] identifier[code] keyword[is] keyword[not] keyword[None] keyword[and] identifier[time] keyword[is] keyword[not] keyword[None] keyword[and] identifier[towards] keyword[is] keyword[not] keyword[None] keyword[and] identifier[order_model] keyword[is] keyword[not] keyword[None] keyword[and] identifier[amount_model] keyword[is] keyword[not] keyword[None]
identifier[date] = identifier[str] ( identifier[time] )[ literal[int] : literal[int] ] keyword[if] identifier[len] ( identifier[str] ( identifier[time] ))== literal[int] keyword[else] identifier[str] ( identifier[time] )
identifier[time] = identifier[str] ( identifier[time] ) keyword[if] identifier[len] ( identifier[str] ( identifier[time] ))== literal[int] keyword[else] literal[string] . identifier[format] (
identifier[str] ( identifier[time] )[ literal[int] : literal[int] ]
)
keyword[if] identifier[self] . identifier[allow_margin] :
identifier[amount] = identifier[amount] keyword[if] identifier[amount_model] keyword[is] identifier[AMOUNT_MODEL] . identifier[BY_AMOUNT] keyword[else] identifier[int] (
identifier[money] /(
identifier[self] . identifier[market_preset] . identifier[get_unit] ( identifier[code] )*
identifier[self] . identifier[market_preset] . identifier[get_frozen] ( identifier[code] )* identifier[price] *
( literal[int] + identifier[self] . identifier[commission_coeff] )
)/ literal[int]
)* literal[int]
keyword[else] :
identifier[amount] = identifier[amount] keyword[if] identifier[amount_model] keyword[is] identifier[AMOUNT_MODEL] . identifier[BY_AMOUNT] keyword[else] identifier[int] (
identifier[money] /( identifier[price] *( literal[int] + identifier[self] . identifier[commission_coeff] ))/ literal[int]
)* literal[int]
keyword[if] identifier[self] . identifier[allow_margin] :
identifier[money] = identifier[amount] * identifier[price] * identifier[self] . identifier[market_preset] . identifier[get_unit] ( identifier[code] )* identifier[self] . identifier[market_preset] . identifier[get_frozen] ( identifier[code] )*( literal[int] + identifier[self] . identifier[commission_coeff] ) keyword[if] identifier[amount_model] keyword[is] identifier[AMOUNT_MODEL] . identifier[BY_AMOUNT] keyword[else] identifier[money]
keyword[else] :
identifier[money] = identifier[amount] * identifier[price] *( literal[int] + identifier[self] . identifier[commission_coeff] ) keyword[if] identifier[amount_model] keyword[is] identifier[AMOUNT_MODEL] . identifier[BY_AMOUNT] keyword[else] identifier[money]
identifier[flag] = keyword[False]
keyword[assert] ( identifier[int] ( identifier[towards] )!= literal[int] )
keyword[if] identifier[int] ( identifier[towards] ) keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[if] identifier[self] . identifier[cash_available] >= identifier[money] :
keyword[if] identifier[self] . identifier[market_type] == identifier[MARKET_TYPE] . identifier[STOCK_CN] :
identifier[amount] = identifier[int] ( identifier[amount] / literal[int] )* literal[int]
identifier[self] . identifier[cash_available] -= identifier[money]
identifier[flag] = keyword[True]
keyword[if] identifier[self] . identifier[running_environment] == identifier[RUNNING_ENVIRONMENT] . identifier[TZERO] :
keyword[if] identifier[abs] ( identifier[self] . identifier[buy_available] . identifier[get] ( identifier[code] , literal[int] ))>= identifier[amount] :
identifier[flag] = keyword[True]
identifier[self] . identifier[cash_available] -= identifier[money]
identifier[self] . identifier[buy_available] [ identifier[code] ]-= identifier[amount]
keyword[else] :
identifier[flag] = keyword[False]
identifier[wrong_reason] = literal[string]
keyword[if] identifier[self] . identifier[market_type] == identifier[MARKET_TYPE] . identifier[FUTURE_CN] :
keyword[if] identifier[towards] == literal[int] :
identifier[_hold] = identifier[self] . identifier[sell_available] . identifier[get] ( identifier[code] , literal[int] )
identifier[_money] = identifier[abs] (
identifier[float] ( identifier[amount] * identifier[price] *( literal[int] + identifier[self] . identifier[commission_coeff] ))
)
identifier[print] ( identifier[_hold] )
keyword[if] identifier[self] . identifier[cash_available] >= identifier[_money] :
keyword[if] identifier[_hold] < literal[int] :
identifier[self] . identifier[cash_available] -= identifier[_money]
identifier[flag] = keyword[True]
keyword[else] :
identifier[wrong_reason] = literal[string]
keyword[else] :
identifier[wrong_reason] = literal[string]
keyword[if] identifier[towards] == literal[int] :
identifier[self] . identifier[cash_available] -= identifier[money]
identifier[flag] = keyword[True]
keyword[else] :
identifier[wrong_reason] = literal[string] . identifier[format] (
identifier[self] . identifier[cash_available] ,
identifier[code] ,
identifier[time] ,
identifier[amount] ,
identifier[towards]
)
keyword[elif] identifier[int] ( identifier[towards] ) keyword[in] [- literal[int] ,- literal[int] ,- literal[int] ]:
identifier[_hold] = identifier[self] . identifier[sell_available] . identifier[get] ( identifier[code] , literal[int] )
keyword[if] identifier[_hold] >= identifier[amount] :
identifier[self] . identifier[sell_available] [ identifier[code] ]-= identifier[amount]
identifier[flag] = keyword[True]
keyword[else] :
keyword[if] identifier[self] . identifier[allow_sellopen] keyword[and] identifier[towards] ==- literal[int] :
keyword[if] identifier[self] . identifier[cash_available] >= identifier[money] :
identifier[flag] = keyword[True]
keyword[else] :
identifier[print] ( literal[string] , identifier[_hold] )
identifier[print] ( literal[string] , identifier[amount] )
identifier[print] ( literal[string] , identifier[money] )
identifier[print] ( literal[string] , identifier[self] . identifier[cash_available] )
identifier[wrong_reason] = literal[string]
keyword[else] :
identifier[wrong_reason] = literal[string]
keyword[if] identifier[flag] keyword[and] ( identifier[amount] > literal[int] ):
identifier[_order] = identifier[QA_Order] (
identifier[user_cookie] = identifier[self] . identifier[user_cookie] ,
identifier[strategy] = identifier[self] . identifier[strategy_name] ,
identifier[frequence] = identifier[self] . identifier[frequence] ,
identifier[account_cookie] = identifier[self] . identifier[account_cookie] ,
identifier[code] = identifier[code] ,
identifier[market_type] = identifier[self] . identifier[market_type] ,
identifier[date] = identifier[date] ,
identifier[datetime] = identifier[time] ,
identifier[sending_time] = identifier[time] ,
identifier[callback] = identifier[self] . identifier[receive_deal] ,
identifier[amount] = identifier[amount] ,
identifier[price] = identifier[price] ,
identifier[order_model] = identifier[order_model] ,
identifier[towards] = identifier[towards] ,
identifier[money] = identifier[money] ,
identifier[broker] = identifier[self] . identifier[broker] ,
identifier[amount_model] = identifier[amount_model] ,
identifier[commission_coeff] = identifier[self] . identifier[commission_coeff] ,
identifier[tax_coeff] = identifier[self] . identifier[tax_coeff] ,
* identifier[args] ,
** identifier[kwargs]
)
identifier[self] . identifier[datetime] = identifier[time]
identifier[self] . identifier[orders] . identifier[insert_order] ( identifier[_order] )
keyword[return] identifier[_order]
keyword[else] :
identifier[print] (
literal[string] . identifier[format] (
identifier[code] ,
identifier[time] ,
identifier[amount] ,
identifier[towards]
)
)
identifier[print] ( identifier[wrong_reason] )
keyword[return] keyword[False] | def send_order(self, code=None, amount=None, time=None, towards=None, price=None, money=None, order_model=None, amount_model=None, *args, **kwargs):
"""
ATTENTION CHANGELOG 1.0.28
修改了Account的send_order方法, 区分按数量下单和按金额下单两种方式
- AMOUNT_MODEL.BY_PRICE ==> AMOUNT_MODEL.BY_MONEY # 按金额下单
- AMOUNT_MODEL.BY_AMOUNT # 按数量下单
在按金额下单的时候,应给予 money参数
在按数量下单的时候,应给予 amount参数
python code:
Account=QA.QA_Account()
Order_bymoney=Account.send_order(code='000001',
price=11,
money=0.3*Account.cash_available,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_MONEY
)
Order_byamount=Account.send_order(code='000001',
price=11,
amount=100,
time='2018-05-09',
towards=QA.ORDER_DIRECTION.BUY,
order_model=QA.ORDER_MODEL.MARKET,
amount_model=QA.AMOUNT_MODEL.BY_AMOUNT
)
:param code: 证券代码
:param amount: 买卖 数量多数股
:param time: Timestamp 对象 下单时间
:param towards: int , towards>0 买入 towards<0 卖出
:param price: 买入,卖出 标的证券的价格
:param money: 买卖 价格
:param order_model: 类型 QA.ORDER_MODE
:param amount_model:类型 QA.AMOUNT_MODEL
:return: QA_Order | False
@2018/12/23
send_order 是QA的标准返回, 如需对接其他接口, 只需要对于QA_Order做适配即可
@2018/12/27
在判断账户为期货账户(及 允许双向交易)
@2018/12/30 保证金账户的修改
1. 保证金账户冻结的金额
2. 保证金账户的结算
3. 保证金账户的判断
"""
wrong_reason = None
assert code is not None and time is not None and (towards is not None) and (order_model is not None) and (amount_model is not None)
# 🛠todo 移到Utils类中, 时间转换
# date 字符串 2011-10-11 长度10
date = str(time)[0:10] if len(str(time)) == 19 else str(time)
# time 字符串 20011-10-11 09:02:00 长度 19
time = str(time) if len(str(time)) == 19 else '{} 09:31:00'.format(str(time)[0:10])
# 🛠todo 移到Utils类中, amount_to_money 成交量转金额
# BY_MONEY :: amount --钱 如10000元 因此 by_money里面 需要指定价格,来计算实际的股票数
# by_amount :: amount --股数 如10000股
if self.allow_margin:
amount = amount if amount_model is AMOUNT_MODEL.BY_AMOUNT else int(money / (self.market_preset.get_unit(code) * self.market_preset.get_frozen(code) * price * (1 + self.commission_coeff)) / 100) * 100 # depends on [control=['if'], data=[]]
else:
amount = amount if amount_model is AMOUNT_MODEL.BY_AMOUNT else int(money / (price * (1 + self.commission_coeff)) / 100) * 100
# 🛠todo 移到Utils类中, money_to_amount 金额转成交量
if self.allow_margin:
money = amount * price * self.market_preset.get_unit(code) * self.market_preset.get_frozen(code) * (1 + self.commission_coeff) if amount_model is AMOUNT_MODEL.BY_AMOUNT else money # depends on [control=['if'], data=[]]
else:
money = amount * price * (1 + self.commission_coeff) if amount_model is AMOUNT_MODEL.BY_AMOUNT else money
# flag 判断买卖 数量和价格以及买卖方向是否正确
flag = False
assert int(towards) != 0
if int(towards) in [1, 2, 3]:
# 是买入的情况(包括买入.买开.买平)
if self.cash_available >= money:
if self.market_type == MARKET_TYPE.STOCK_CN: # 如果是股票 买入的时候有100股的最小限制
amount = int(amount / 100) * 100
self.cash_available -= money
flag = True # depends on [control=['if'], data=[]]
if self.running_environment == RUNNING_ENVIRONMENT.TZERO:
if abs(self.buy_available.get(code, 0)) >= amount:
flag = True
self.cash_available -= money
self.buy_available[code] -= amount # depends on [control=['if'], data=['amount']]
else:
flag = False
wrong_reason = 'T0交易买入超出限额' # depends on [control=['if'], data=[]]
if self.market_type == MARKET_TYPE.FUTURE_CN:
# 如果有负持仓-- 允许卖空的时候
if towards == 3: # 多平
_hold = self.sell_available.get(code, 0)
# 假设有负持仓:
# amount为下单数量 如 账户原先-3手 现在平1手
#left_amount = amount+_hold if _hold < 0 else amount
_money = abs(float(amount * price * (1 + self.commission_coeff)))
print(_hold)
if self.cash_available >= _money:
if _hold < 0:
self.cash_available -= _money
flag = True # depends on [control=['if'], data=[]]
else:
wrong_reason = '空单仓位不足' # depends on [control=['if'], data=['_money']]
else:
wrong_reason = '平多剩余资金不够' # depends on [control=['if'], data=[]]
if towards == 2:
self.cash_available -= money
flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['money']]
else:
wrong_reason = 'QAACCOUNT: 可用资金不足 cash_available {} code {} time {} amount {} towards {}'.format(self.cash_available, code, time, amount, towards) # depends on [control=['if'], data=[]]
elif int(towards) in [-1, -2, -3]:
# 是卖出的情况(包括卖出,卖出开仓allow_sellopen如果允许. 卖出平仓)
# print(self.sell_available[code])
_hold = self.sell_available.get(code, 0) # _hold 是你的持仓
# 如果你的hold> amount>0
# 持仓数量>卖出数量
if _hold >= amount:
self.sell_available[code] -= amount
# towards = ORDER_DIRECTION.SELL
flag = True # depends on [control=['if'], data=['amount']]
# 如果持仓数量<卖出数量
# 如果是允许卖空开仓 实际计算时 先减去持仓(正持仓) 再计算 负持仓 就按原先的占用金额计算
elif self.allow_sellopen and towards == -2:
if self.cash_available >= money: # 卖空的市值小于现金(有担保的卖空), 不允许裸卖空
# self.cash_available -= money
flag = True # depends on [control=['if'], data=[]]
else:
print('sellavailable', _hold)
print('amount', amount)
print('aqureMoney', money)
print('cash', self.cash_available)
wrong_reason = '卖空资金不足/不允许裸卖空' # depends on [control=['if'], data=[]]
else:
wrong_reason = '卖出仓位不足' # depends on [control=['if'], data=[]]
if flag and amount > 0:
_order = QA_Order(*args, user_cookie=self.user_cookie, strategy=self.strategy_name, frequence=self.frequence, account_cookie=self.account_cookie, code=code, market_type=self.market_type, date=date, datetime=time, sending_time=time, callback=self.receive_deal, amount=amount, price=price, order_model=order_model, towards=towards, money=money, broker=self.broker, amount_model=amount_model, commission_coeff=self.commission_coeff, tax_coeff=self.tax_coeff, **kwargs) # init
# 历史委托order状态存储, 保存到 QA_Order 对象中的队列中
self.datetime = time
self.orders.insert_order(_order)
return _order # depends on [control=['if'], data=[]]
else:
print('ERROR : CODE {} TIME {} AMOUNT {} TOWARDS {}'.format(code, time, amount, towards))
print(wrong_reason)
return False |
def serialize_single_xso(x):
"""
Serialize a single XSO `x` to a string. This is potentially very slow and
should only be used for debugging purposes. It is generally more efficient
to use a :class:`XMPPXMLGenerator` to stream elements.
"""
buf = io.BytesIO()
gen = XMPPXMLGenerator(buf,
short_empty_elements=True,
sorted_attributes=True)
x.unparse_to_sax(gen)
return buf.getvalue().decode("utf8") | def function[serialize_single_xso, parameter[x]]:
constant[
Serialize a single XSO `x` to a string. This is potentially very slow and
should only be used for debugging purposes. It is generally more efficient
to use a :class:`XMPPXMLGenerator` to stream elements.
]
variable[buf] assign[=] call[name[io].BytesIO, parameter[]]
variable[gen] assign[=] call[name[XMPPXMLGenerator], parameter[name[buf]]]
call[name[x].unparse_to_sax, parameter[name[gen]]]
return[call[call[name[buf].getvalue, parameter[]].decode, parameter[constant[utf8]]]] | keyword[def] identifier[serialize_single_xso] ( identifier[x] ):
literal[string]
identifier[buf] = identifier[io] . identifier[BytesIO] ()
identifier[gen] = identifier[XMPPXMLGenerator] ( identifier[buf] ,
identifier[short_empty_elements] = keyword[True] ,
identifier[sorted_attributes] = keyword[True] )
identifier[x] . identifier[unparse_to_sax] ( identifier[gen] )
keyword[return] identifier[buf] . identifier[getvalue] (). identifier[decode] ( literal[string] ) | def serialize_single_xso(x):
"""
Serialize a single XSO `x` to a string. This is potentially very slow and
should only be used for debugging purposes. It is generally more efficient
to use a :class:`XMPPXMLGenerator` to stream elements.
"""
buf = io.BytesIO()
gen = XMPPXMLGenerator(buf, short_empty_elements=True, sorted_attributes=True)
x.unparse_to_sax(gen)
return buf.getvalue().decode('utf8') |
def getSegmentInfo(self, collectActiveData = False):
"""Returns information about the distribution of segments, synapses and
permanence values in the current TP. If requested, also returns information
regarding the number of currently active segments and synapses.
The method returns the following tuple:
(
nSegments, # total number of segments
nSynapses, # total number of synapses
nActiveSegs, # total no. of active segments
nActiveSynapses, # total no. of active synapses
distSegSizes, # a dict where d[n] = number of segments with n synapses
distNSegsPerCell, # a dict where d[n] = number of cells with n segments
distPermValues, # a dict where d[p] = number of synapses with perm = p/10
distAges, # a list of tuples (ageRange, numSegments)
)
nActiveSegs and nActiveSynapses are 0 if collectActiveData is False
"""
nSegments, nSynapses = 0, 0
nActiveSegs, nActiveSynapses = 0, 0
distSegSizes, distNSegsPerCell = {}, {}
distPermValues = {} # Num synapses with given permanence values
numAgeBuckets = 20
distAges = []
ageBucketSize = int((self.lrnIterationIdx+20) / 20)
for i in range(numAgeBuckets):
distAges.append(['%d-%d' % (i*ageBucketSize, (i+1)*ageBucketSize-1), 0])
for c in xrange(self.numberOfCols):
for i in xrange(self.cellsPerColumn):
if len(self.cells[c][i]) > 0:
nSegmentsThisCell = len(self.cells[c][i])
nSegments += nSegmentsThisCell
if distNSegsPerCell.has_key(nSegmentsThisCell):
distNSegsPerCell[nSegmentsThisCell] += 1
else:
distNSegsPerCell[nSegmentsThisCell] = 1
for seg in self.cells[c][i]:
nSynapsesThisSeg = seg.getNumSynapses()
nSynapses += nSynapsesThisSeg
if distSegSizes.has_key(nSynapsesThisSeg):
distSegSizes[nSynapsesThisSeg] += 1
else:
distSegSizes[nSynapsesThisSeg] = 1
# Accumulate permanence value histogram
for syn in seg.syns:
p = int(syn[2]*10)
if distPermValues.has_key(p):
distPermValues[p] += 1
else:
distPermValues[p] = 1
# Accumulate segment age histogram
age = self.lrnIterationIdx - seg.lastActiveIteration
ageBucket = int(age/ageBucketSize)
distAges[ageBucket][1] += 1
# Get active synapse statistics if requested
if collectActiveData:
if self.isSegmentActive(seg, self.infActiveState['t']):
nActiveSegs += 1
for syn in seg.syns:
if self.activeState['t'][syn[0]][syn[1]] == 1:
nActiveSynapses += 1
return (nSegments, nSynapses, nActiveSegs, nActiveSynapses,
distSegSizes, distNSegsPerCell, distPermValues, distAges) | def function[getSegmentInfo, parameter[self, collectActiveData]]:
constant[Returns information about the distribution of segments, synapses and
permanence values in the current TP. If requested, also returns information
regarding the number of currently active segments and synapses.
The method returns the following tuple:
(
nSegments, # total number of segments
nSynapses, # total number of synapses
nActiveSegs, # total no. of active segments
nActiveSynapses, # total no. of active synapses
distSegSizes, # a dict where d[n] = number of segments with n synapses
distNSegsPerCell, # a dict where d[n] = number of cells with n segments
distPermValues, # a dict where d[p] = number of synapses with perm = p/10
distAges, # a list of tuples (ageRange, numSegments)
)
nActiveSegs and nActiveSynapses are 0 if collectActiveData is False
]
<ast.Tuple object at 0x7da1b08d9000> assign[=] tuple[[<ast.Constant object at 0x7da1b08d9090>, <ast.Constant object at 0x7da1b08d93f0>]]
<ast.Tuple object at 0x7da1b08d8d90> assign[=] tuple[[<ast.Constant object at 0x7da1b08d8fa0>, <ast.Constant object at 0x7da1b08d90c0>]]
<ast.Tuple object at 0x7da1b08d8130> assign[=] tuple[[<ast.Dict object at 0x7da1b08d8700>, <ast.Dict object at 0x7da1b08d8730>]]
variable[distPermValues] assign[=] dictionary[[], []]
variable[numAgeBuckets] assign[=] constant[20]
variable[distAges] assign[=] list[[]]
variable[ageBucketSize] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[self].lrnIterationIdx + constant[20]] / constant[20]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[numAgeBuckets]]]] begin[:]
call[name[distAges].append, parameter[list[[<ast.BinOp object at 0x7da1b08be440>, <ast.Constant object at 0x7da1b08be650>]]]]
for taget[name[c]] in starred[call[name[xrange], parameter[name[self].numberOfCols]]] begin[:]
for taget[name[i]] in starred[call[name[xrange], parameter[name[self].cellsPerColumn]]] begin[:]
if compare[call[name[len], parameter[call[call[name[self].cells][name[c]]][name[i]]]] greater[>] constant[0]] begin[:]
variable[nSegmentsThisCell] assign[=] call[name[len], parameter[call[call[name[self].cells][name[c]]][name[i]]]]
<ast.AugAssign object at 0x7da1b083c8e0>
if call[name[distNSegsPerCell].has_key, parameter[name[nSegmentsThisCell]]] begin[:]
<ast.AugAssign object at 0x7da1b083c7c0>
for taget[name[seg]] in starred[call[call[name[self].cells][name[c]]][name[i]]] begin[:]
variable[nSynapsesThisSeg] assign[=] call[name[seg].getNumSynapses, parameter[]]
<ast.AugAssign object at 0x7da1b083da80>
if call[name[distSegSizes].has_key, parameter[name[nSynapsesThisSeg]]] begin[:]
<ast.AugAssign object at 0x7da1b083c1c0>
for taget[name[syn]] in starred[name[seg].syns] begin[:]
variable[p] assign[=] call[name[int], parameter[binary_operation[call[name[syn]][constant[2]] * constant[10]]]]
if call[name[distPermValues].has_key, parameter[name[p]]] begin[:]
<ast.AugAssign object at 0x7da1b08bc310>
variable[age] assign[=] binary_operation[name[self].lrnIterationIdx - name[seg].lastActiveIteration]
variable[ageBucket] assign[=] call[name[int], parameter[binary_operation[name[age] / name[ageBucketSize]]]]
<ast.AugAssign object at 0x7da1b08bc580>
if name[collectActiveData] begin[:]
if call[name[self].isSegmentActive, parameter[name[seg], call[name[self].infActiveState][constant[t]]]] begin[:]
<ast.AugAssign object at 0x7da1b08bd1e0>
for taget[name[syn]] in starred[name[seg].syns] begin[:]
if compare[call[call[call[name[self].activeState][constant[t]]][call[name[syn]][constant[0]]]][call[name[syn]][constant[1]]] equal[==] constant[1]] begin[:]
<ast.AugAssign object at 0x7da2047eada0>
return[tuple[[<ast.Name object at 0x7da2047e9c60>, <ast.Name object at 0x7da2047e9810>, <ast.Name object at 0x7da2047eb520>, <ast.Name object at 0x7da2047e8e80>, <ast.Name object at 0x7da2047eb700>, <ast.Name object at 0x7da2047ea350>, <ast.Name object at 0x7da2047eb970>, <ast.Name object at 0x7da2047e9ea0>]]] | keyword[def] identifier[getSegmentInfo] ( identifier[self] , identifier[collectActiveData] = keyword[False] ):
literal[string]
identifier[nSegments] , identifier[nSynapses] = literal[int] , literal[int]
identifier[nActiveSegs] , identifier[nActiveSynapses] = literal[int] , literal[int]
identifier[distSegSizes] , identifier[distNSegsPerCell] ={},{}
identifier[distPermValues] ={}
identifier[numAgeBuckets] = literal[int]
identifier[distAges] =[]
identifier[ageBucketSize] = identifier[int] (( identifier[self] . identifier[lrnIterationIdx] + literal[int] )/ literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[numAgeBuckets] ):
identifier[distAges] . identifier[append] ([ literal[string] %( identifier[i] * identifier[ageBucketSize] ,( identifier[i] + literal[int] )* identifier[ageBucketSize] - literal[int] ), literal[int] ])
keyword[for] identifier[c] keyword[in] identifier[xrange] ( identifier[self] . identifier[numberOfCols] ):
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[self] . identifier[cellsPerColumn] ):
keyword[if] identifier[len] ( identifier[self] . identifier[cells] [ identifier[c] ][ identifier[i] ])> literal[int] :
identifier[nSegmentsThisCell] = identifier[len] ( identifier[self] . identifier[cells] [ identifier[c] ][ identifier[i] ])
identifier[nSegments] += identifier[nSegmentsThisCell]
keyword[if] identifier[distNSegsPerCell] . identifier[has_key] ( identifier[nSegmentsThisCell] ):
identifier[distNSegsPerCell] [ identifier[nSegmentsThisCell] ]+= literal[int]
keyword[else] :
identifier[distNSegsPerCell] [ identifier[nSegmentsThisCell] ]= literal[int]
keyword[for] identifier[seg] keyword[in] identifier[self] . identifier[cells] [ identifier[c] ][ identifier[i] ]:
identifier[nSynapsesThisSeg] = identifier[seg] . identifier[getNumSynapses] ()
identifier[nSynapses] += identifier[nSynapsesThisSeg]
keyword[if] identifier[distSegSizes] . identifier[has_key] ( identifier[nSynapsesThisSeg] ):
identifier[distSegSizes] [ identifier[nSynapsesThisSeg] ]+= literal[int]
keyword[else] :
identifier[distSegSizes] [ identifier[nSynapsesThisSeg] ]= literal[int]
keyword[for] identifier[syn] keyword[in] identifier[seg] . identifier[syns] :
identifier[p] = identifier[int] ( identifier[syn] [ literal[int] ]* literal[int] )
keyword[if] identifier[distPermValues] . identifier[has_key] ( identifier[p] ):
identifier[distPermValues] [ identifier[p] ]+= literal[int]
keyword[else] :
identifier[distPermValues] [ identifier[p] ]= literal[int]
identifier[age] = identifier[self] . identifier[lrnIterationIdx] - identifier[seg] . identifier[lastActiveIteration]
identifier[ageBucket] = identifier[int] ( identifier[age] / identifier[ageBucketSize] )
identifier[distAges] [ identifier[ageBucket] ][ literal[int] ]+= literal[int]
keyword[if] identifier[collectActiveData] :
keyword[if] identifier[self] . identifier[isSegmentActive] ( identifier[seg] , identifier[self] . identifier[infActiveState] [ literal[string] ]):
identifier[nActiveSegs] += literal[int]
keyword[for] identifier[syn] keyword[in] identifier[seg] . identifier[syns] :
keyword[if] identifier[self] . identifier[activeState] [ literal[string] ][ identifier[syn] [ literal[int] ]][ identifier[syn] [ literal[int] ]]== literal[int] :
identifier[nActiveSynapses] += literal[int]
keyword[return] ( identifier[nSegments] , identifier[nSynapses] , identifier[nActiveSegs] , identifier[nActiveSynapses] ,
identifier[distSegSizes] , identifier[distNSegsPerCell] , identifier[distPermValues] , identifier[distAges] ) | def getSegmentInfo(self, collectActiveData=False):
"""Returns information about the distribution of segments, synapses and
permanence values in the current TP. If requested, also returns information
regarding the number of currently active segments and synapses.
The method returns the following tuple:
(
nSegments, # total number of segments
nSynapses, # total number of synapses
nActiveSegs, # total no. of active segments
nActiveSynapses, # total no. of active synapses
distSegSizes, # a dict where d[n] = number of segments with n synapses
distNSegsPerCell, # a dict where d[n] = number of cells with n segments
distPermValues, # a dict where d[p] = number of synapses with perm = p/10
distAges, # a list of tuples (ageRange, numSegments)
)
nActiveSegs and nActiveSynapses are 0 if collectActiveData is False
"""
(nSegments, nSynapses) = (0, 0)
(nActiveSegs, nActiveSynapses) = (0, 0)
(distSegSizes, distNSegsPerCell) = ({}, {})
distPermValues = {} # Num synapses with given permanence values
numAgeBuckets = 20
distAges = []
ageBucketSize = int((self.lrnIterationIdx + 20) / 20)
for i in range(numAgeBuckets):
distAges.append(['%d-%d' % (i * ageBucketSize, (i + 1) * ageBucketSize - 1), 0]) # depends on [control=['for'], data=['i']]
for c in xrange(self.numberOfCols):
for i in xrange(self.cellsPerColumn):
if len(self.cells[c][i]) > 0:
nSegmentsThisCell = len(self.cells[c][i])
nSegments += nSegmentsThisCell
if distNSegsPerCell.has_key(nSegmentsThisCell):
distNSegsPerCell[nSegmentsThisCell] += 1 # depends on [control=['if'], data=[]]
else:
distNSegsPerCell[nSegmentsThisCell] = 1
for seg in self.cells[c][i]:
nSynapsesThisSeg = seg.getNumSynapses()
nSynapses += nSynapsesThisSeg
if distSegSizes.has_key(nSynapsesThisSeg):
distSegSizes[nSynapsesThisSeg] += 1 # depends on [control=['if'], data=[]]
else:
distSegSizes[nSynapsesThisSeg] = 1
# Accumulate permanence value histogram
for syn in seg.syns:
p = int(syn[2] * 10)
if distPermValues.has_key(p):
distPermValues[p] += 1 # depends on [control=['if'], data=[]]
else:
distPermValues[p] = 1 # depends on [control=['for'], data=['syn']]
# Accumulate segment age histogram
age = self.lrnIterationIdx - seg.lastActiveIteration
ageBucket = int(age / ageBucketSize)
distAges[ageBucket][1] += 1
# Get active synapse statistics if requested
if collectActiveData:
if self.isSegmentActive(seg, self.infActiveState['t']):
nActiveSegs += 1 # depends on [control=['if'], data=[]]
for syn in seg.syns:
if self.activeState['t'][syn[0]][syn[1]] == 1:
nActiveSynapses += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['syn']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seg']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['c']]
return (nSegments, nSynapses, nActiveSegs, nActiveSynapses, distSegSizes, distNSegsPerCell, distPermValues, distAges) |
def bib_keys(self):
"""List of all bib keys in the document (and input documents)."""
bib_keys = []
# Get bib keys in this document
for match in texutils.cite_pattern.finditer(self.text):
keys = match.group(5).split(',')
bib_keys += keys
# Recursion
for path, document in self._children.iteritems():
bib_keys += document.bib_keys
bib_keys = list(set(bib_keys))
return bib_keys | def function[bib_keys, parameter[self]]:
constant[List of all bib keys in the document (and input documents).]
variable[bib_keys] assign[=] list[[]]
for taget[name[match]] in starred[call[name[texutils].cite_pattern.finditer, parameter[name[self].text]]] begin[:]
variable[keys] assign[=] call[call[name[match].group, parameter[constant[5]]].split, parameter[constant[,]]]
<ast.AugAssign object at 0x7da2041dbaf0>
for taget[tuple[[<ast.Name object at 0x7da2041da440>, <ast.Name object at 0x7da2041db610>]]] in starred[call[name[self]._children.iteritems, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da2041da800>
variable[bib_keys] assign[=] call[name[list], parameter[call[name[set], parameter[name[bib_keys]]]]]
return[name[bib_keys]] | keyword[def] identifier[bib_keys] ( identifier[self] ):
literal[string]
identifier[bib_keys] =[]
keyword[for] identifier[match] keyword[in] identifier[texutils] . identifier[cite_pattern] . identifier[finditer] ( identifier[self] . identifier[text] ):
identifier[keys] = identifier[match] . identifier[group] ( literal[int] ). identifier[split] ( literal[string] )
identifier[bib_keys] += identifier[keys]
keyword[for] identifier[path] , identifier[document] keyword[in] identifier[self] . identifier[_children] . identifier[iteritems] ():
identifier[bib_keys] += identifier[document] . identifier[bib_keys]
identifier[bib_keys] = identifier[list] ( identifier[set] ( identifier[bib_keys] ))
keyword[return] identifier[bib_keys] | def bib_keys(self):
"""List of all bib keys in the document (and input documents)."""
bib_keys = []
# Get bib keys in this document
for match in texutils.cite_pattern.finditer(self.text):
keys = match.group(5).split(',')
bib_keys += keys # depends on [control=['for'], data=['match']]
# Recursion
for (path, document) in self._children.iteritems():
bib_keys += document.bib_keys # depends on [control=['for'], data=[]]
bib_keys = list(set(bib_keys))
return bib_keys |
def runner(
engine,
configfile,
output_vars,
interval,
pause,
mpi,
tracker,
port,
bmi_class
):
"""
run a BMI compatible model
"""
# keep track of info
# update mpi information or use rank 0
runner = mmi.runner.Runner(
engine=engine,
configfile=configfile,
output_vars=output_vars,
interval=interval,
pause=pause,
mpi=mpi,
tracker=tracker,
port=port,
bmi_class=bmi_class
)
runner.run() | def function[runner, parameter[engine, configfile, output_vars, interval, pause, mpi, tracker, port, bmi_class]]:
constant[
run a BMI compatible model
]
variable[runner] assign[=] call[name[mmi].runner.Runner, parameter[]]
call[name[runner].run, parameter[]] | keyword[def] identifier[runner] (
identifier[engine] ,
identifier[configfile] ,
identifier[output_vars] ,
identifier[interval] ,
identifier[pause] ,
identifier[mpi] ,
identifier[tracker] ,
identifier[port] ,
identifier[bmi_class]
):
literal[string]
identifier[runner] = identifier[mmi] . identifier[runner] . identifier[Runner] (
identifier[engine] = identifier[engine] ,
identifier[configfile] = identifier[configfile] ,
identifier[output_vars] = identifier[output_vars] ,
identifier[interval] = identifier[interval] ,
identifier[pause] = identifier[pause] ,
identifier[mpi] = identifier[mpi] ,
identifier[tracker] = identifier[tracker] ,
identifier[port] = identifier[port] ,
identifier[bmi_class] = identifier[bmi_class]
)
identifier[runner] . identifier[run] () | def runner(engine, configfile, output_vars, interval, pause, mpi, tracker, port, bmi_class):
"""
run a BMI compatible model
"""
# keep track of info
# update mpi information or use rank 0
runner = mmi.runner.Runner(engine=engine, configfile=configfile, output_vars=output_vars, interval=interval, pause=pause, mpi=mpi, tracker=tracker, port=port, bmi_class=bmi_class)
runner.run() |
def ready(self, node_id, metadata_priority=True):
"""Check whether a node is connected and ok to send more requests.
Arguments:
node_id (int): the id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if we are ready to send to the given node
"""
self.maybe_connect(node_id)
return self.is_ready(node_id, metadata_priority=metadata_priority) | def function[ready, parameter[self, node_id, metadata_priority]]:
constant[Check whether a node is connected and ok to send more requests.
Arguments:
node_id (int): the id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if we are ready to send to the given node
]
call[name[self].maybe_connect, parameter[name[node_id]]]
return[call[name[self].is_ready, parameter[name[node_id]]]] | keyword[def] identifier[ready] ( identifier[self] , identifier[node_id] , identifier[metadata_priority] = keyword[True] ):
literal[string]
identifier[self] . identifier[maybe_connect] ( identifier[node_id] )
keyword[return] identifier[self] . identifier[is_ready] ( identifier[node_id] , identifier[metadata_priority] = identifier[metadata_priority] ) | def ready(self, node_id, metadata_priority=True):
"""Check whether a node is connected and ok to send more requests.
Arguments:
node_id (int): the id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if we are ready to send to the given node
"""
self.maybe_connect(node_id)
return self.is_ready(node_id, metadata_priority=metadata_priority) |
def run():
"""CLI main entry point."""
# Use print() instead of logging when running in CLI mode:
set_pyftpsync_logger(None)
parser = argparse.ArgumentParser(
description="Synchronize folders over FTP.",
epilog="See also https://github.com/mar10/pyftpsync",
parents=[verbose_parser],
)
# Note: we want to allow --version to be combined with --verbose. However
# on Py2, argparse makes sub-commands mandatory, unless `action="version"` is used.
if check_cli_verbose(3) > 3:
version_info = "pyftpsync/{} Python/{} {}".format(
__version__, PYTHON_VERSION, platform.platform()
)
else:
version_info = "{}".format(__version__)
parser.add_argument("-V", "--version", action="version", version=version_info)
subparsers = parser.add_subparsers(help="sub-command help")
# --- Create the parser for the "upload" command ---------------------------
sp = subparsers.add_parser(
"upload",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="copy new and modified files to remote folder",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--force",
action="store_true",
help="overwrite remote files, even if the target is newer "
"(but no conflict was detected)",
)
sp.add_argument(
"--resolve",
default="ask",
choices=["local", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.add_argument(
"--delete",
action="store_true",
help="remove remote files if they don't exist locally",
)
sp.add_argument(
"--delete-unmatched",
action="store_true",
help="remove remote files if they don't exist locally "
"or don't match the current filter (implies '--delete' option)",
)
sp.set_defaults(command="upload")
# --- Create the parser for the "download" command -------------------------
sp = subparsers.add_parser(
"download",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="copy new and modified files from remote folder to local target",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--force",
action="store_true",
help="overwrite local files, even if the target is newer "
"(but no conflict was detected)",
)
sp.add_argument(
"--resolve",
default="ask",
choices=["remote", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.add_argument(
"--delete",
action="store_true",
help="remove local files if they don't exist on remote target",
)
sp.add_argument(
"--delete-unmatched",
action="store_true",
help="remove local files if they don't exist on remote target "
"or don't match the current filter (implies '--delete' option)",
)
sp.set_defaults(command="download")
# --- Create the parser for the "sync" command -----------------------------
sp = subparsers.add_parser(
"sync",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="synchronize new and modified files between remote folder and local target",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--resolve",
default="ask",
choices=["old", "new", "local", "remote", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.set_defaults(command="sync")
# --- Create the parser for the "run" command -----------------------------
add_run_parser(subparsers)
# --- Create the parser for the "scan" command -----------------------------
add_scan_parser(subparsers)
# --- Parse command line ---------------------------------------------------
args = parser.parse_args()
args.verbose -= args.quiet
del args.quiet
# print("verbose", args.verbose)
ftp_debug = 0
if args.verbose >= 6:
ftp_debug = 1
# Modify the `args` from the `pyftpsync.yaml` config:
if getattr(args, "command", None) == "run":
handle_run_command(parser, args)
if callable(getattr(args, "command", None)):
# scan_handler
try:
return args.command(parser, args)
except KeyboardInterrupt:
print("\nAborted by user.", file=sys.stderr)
sys.exit(3)
elif not hasattr(args, "command"):
parser.error(
"missing command (choose from 'upload', 'download', 'run', 'sync', 'scan')"
)
# Post-process and check arguments
if hasattr(args, "delete_unmatched") and args.delete_unmatched:
args.delete = True
args.local_target = make_target(args.local, {"ftp_debug": ftp_debug})
if args.remote == ".":
parser.error("'.' is expected to be the local target (not remote)")
args.remote_target = make_target(args.remote, {"ftp_debug": ftp_debug})
if not isinstance(args.local_target, FsTarget) and isinstance(
args.remote_target, FsTarget
):
parser.error("a file system target is expected to be local")
# Let the command handler do its thing
opts = namespace_to_dict(args)
if args.command == "upload":
s = UploadSynchronizer(args.local_target, args.remote_target, opts)
elif args.command == "download":
s = DownloadSynchronizer(args.local_target, args.remote_target, opts)
elif args.command == "sync":
s = BiDirSynchronizer(args.local_target, args.remote_target, opts)
else:
parser.error("unknown command '{}'".format(args.command))
s.is_script = True
try:
s.run()
except KeyboardInterrupt:
print("\nAborted by user.", file=sys.stderr)
sys.exit(3)
finally:
# Prevent sporadic exceptions in ftplib, when closing in __del__
s.local.close()
s.remote.close()
stats = s.get_stats()
if args.verbose >= 5:
pprint(stats)
elif args.verbose >= 1:
if args.dry_run:
print("(DRY-RUN) ", end="")
print(
"Wrote {}/{} files in {} directories, skipped: {}.".format(
stats["files_written"],
stats["local_files"],
stats["local_dirs"],
stats["conflict_files_skipped"],
),
end="",
)
if stats["interactive_ask"]:
print()
else:
print(" Elap: {}.".format(stats["elap_str"]))
return | def function[run, parameter[]]:
constant[CLI main entry point.]
call[name[set_pyftpsync_logger], parameter[constant[None]]]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
if compare[call[name[check_cli_verbose], parameter[constant[3]]] greater[>] constant[3]] begin[:]
variable[version_info] assign[=] call[constant[pyftpsync/{} Python/{} {}].format, parameter[name[__version__], name[PYTHON_VERSION], call[name[platform].platform, parameter[]]]]
call[name[parser].add_argument, parameter[constant[-V], constant[--version]]]
variable[subparsers] assign[=] call[name[parser].add_subparsers, parameter[]]
variable[sp] assign[=] call[name[subparsers].add_parser, parameter[constant[upload]]]
call[name[sp].add_argument, parameter[constant[local]]]
call[name[sp].add_argument, parameter[constant[remote]]]
call[name[sp].add_argument, parameter[constant[--force]]]
call[name[sp].add_argument, parameter[constant[--resolve]]]
call[name[sp].add_argument, parameter[constant[--delete]]]
call[name[sp].add_argument, parameter[constant[--delete-unmatched]]]
call[name[sp].set_defaults, parameter[]]
variable[sp] assign[=] call[name[subparsers].add_parser, parameter[constant[download]]]
call[name[sp].add_argument, parameter[constant[local]]]
call[name[sp].add_argument, parameter[constant[remote]]]
call[name[sp].add_argument, parameter[constant[--force]]]
call[name[sp].add_argument, parameter[constant[--resolve]]]
call[name[sp].add_argument, parameter[constant[--delete]]]
call[name[sp].add_argument, parameter[constant[--delete-unmatched]]]
call[name[sp].set_defaults, parameter[]]
variable[sp] assign[=] call[name[subparsers].add_parser, parameter[constant[sync]]]
call[name[sp].add_argument, parameter[constant[local]]]
call[name[sp].add_argument, parameter[constant[remote]]]
call[name[sp].add_argument, parameter[constant[--resolve]]]
call[name[sp].set_defaults, parameter[]]
call[name[add_run_parser], parameter[name[subparsers]]]
call[name[add_scan_parser], parameter[name[subparsers]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
<ast.AugAssign object at 0x7da1b050dcc0>
<ast.Delete object at 0x7da1b050f370>
variable[ftp_debug] assign[=] constant[0]
if compare[name[args].verbose greater_or_equal[>=] constant[6]] begin[:]
variable[ftp_debug] assign[=] constant[1]
if compare[call[name[getattr], parameter[name[args], constant[command], constant[None]]] equal[==] constant[run]] begin[:]
call[name[handle_run_command], parameter[name[parser], name[args]]]
if call[name[callable], parameter[call[name[getattr], parameter[name[args], constant[command], constant[None]]]]] begin[:]
<ast.Try object at 0x7da1b050c160>
if <ast.BoolOp object at 0x7da1b0405b10> begin[:]
name[args].delete assign[=] constant[True]
name[args].local_target assign[=] call[name[make_target], parameter[name[args].local, dictionary[[<ast.Constant object at 0x7da1b0406800>], [<ast.Name object at 0x7da1b0404490>]]]]
if compare[name[args].remote equal[==] constant[.]] begin[:]
call[name[parser].error, parameter[constant['.' is expected to be the local target (not remote)]]]
name[args].remote_target assign[=] call[name[make_target], parameter[name[args].remote, dictionary[[<ast.Constant object at 0x7da1b0406d10>], [<ast.Name object at 0x7da1b04063e0>]]]]
if <ast.BoolOp object at 0x7da1b0406ad0> begin[:]
call[name[parser].error, parameter[constant[a file system target is expected to be local]]]
variable[opts] assign[=] call[name[namespace_to_dict], parameter[name[args]]]
if compare[name[args].command equal[==] constant[upload]] begin[:]
variable[s] assign[=] call[name[UploadSynchronizer], parameter[name[args].local_target, name[args].remote_target, name[opts]]]
name[s].is_script assign[=] constant[True]
<ast.Try object at 0x7da1b0407310>
variable[stats] assign[=] call[name[s].get_stats, parameter[]]
if compare[name[args].verbose greater_or_equal[>=] constant[5]] begin[:]
call[name[pprint], parameter[name[stats]]]
return[None] | keyword[def] identifier[run] ():
literal[string]
identifier[set_pyftpsync_logger] ( keyword[None] )
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[description] = literal[string] ,
identifier[epilog] = literal[string] ,
identifier[parents] =[ identifier[verbose_parser] ],
)
keyword[if] identifier[check_cli_verbose] ( literal[int] )> literal[int] :
identifier[version_info] = literal[string] . identifier[format] (
identifier[__version__] , identifier[PYTHON_VERSION] , identifier[platform] . identifier[platform] ()
)
keyword[else] :
identifier[version_info] = literal[string] . identifier[format] ( identifier[__version__] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[version] = identifier[version_info] )
identifier[subparsers] = identifier[parser] . identifier[add_subparsers] ( identifier[help] = literal[string] )
identifier[sp] = identifier[subparsers] . identifier[add_parser] (
literal[string] ,
identifier[parents] =[ identifier[verbose_parser] , identifier[common_parser] , identifier[matcher_parser] , identifier[creds_parser] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[metavar] = literal[string] ,
identifier[default] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] ( literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] )
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] , literal[string] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
literal[string] ,
)
identifier[sp] . identifier[set_defaults] ( identifier[command] = literal[string] )
identifier[sp] = identifier[subparsers] . identifier[add_parser] (
literal[string] ,
identifier[parents] =[ identifier[verbose_parser] , identifier[common_parser] , identifier[matcher_parser] , identifier[creds_parser] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[metavar] = literal[string] ,
identifier[default] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] ( literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] )
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] , literal[string] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
literal[string] ,
)
identifier[sp] . identifier[set_defaults] ( identifier[command] = literal[string] )
identifier[sp] = identifier[subparsers] . identifier[add_parser] (
literal[string] ,
identifier[parents] =[ identifier[verbose_parser] , identifier[common_parser] , identifier[matcher_parser] , identifier[creds_parser] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[metavar] = literal[string] ,
identifier[default] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[add_argument] ( literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] )
identifier[sp] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ],
identifier[help] = literal[string] ,
)
identifier[sp] . identifier[set_defaults] ( identifier[command] = literal[string] )
identifier[add_run_parser] ( identifier[subparsers] )
identifier[add_scan_parser] ( identifier[subparsers] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[args] . identifier[verbose] -= identifier[args] . identifier[quiet]
keyword[del] identifier[args] . identifier[quiet]
identifier[ftp_debug] = literal[int]
keyword[if] identifier[args] . identifier[verbose] >= literal[int] :
identifier[ftp_debug] = literal[int]
keyword[if] identifier[getattr] ( identifier[args] , literal[string] , keyword[None] )== literal[string] :
identifier[handle_run_command] ( identifier[parser] , identifier[args] )
keyword[if] identifier[callable] ( identifier[getattr] ( identifier[args] , literal[string] , keyword[None] )):
keyword[try] :
keyword[return] identifier[args] . identifier[command] ( identifier[parser] , identifier[args] )
keyword[except] identifier[KeyboardInterrupt] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[elif] keyword[not] identifier[hasattr] ( identifier[args] , literal[string] ):
identifier[parser] . identifier[error] (
literal[string]
)
keyword[if] identifier[hasattr] ( identifier[args] , literal[string] ) keyword[and] identifier[args] . identifier[delete_unmatched] :
identifier[args] . identifier[delete] = keyword[True]
identifier[args] . identifier[local_target] = identifier[make_target] ( identifier[args] . identifier[local] ,{ literal[string] : identifier[ftp_debug] })
keyword[if] identifier[args] . identifier[remote] == literal[string] :
identifier[parser] . identifier[error] ( literal[string] )
identifier[args] . identifier[remote_target] = identifier[make_target] ( identifier[args] . identifier[remote] ,{ literal[string] : identifier[ftp_debug] })
keyword[if] keyword[not] identifier[isinstance] ( identifier[args] . identifier[local_target] , identifier[FsTarget] ) keyword[and] identifier[isinstance] (
identifier[args] . identifier[remote_target] , identifier[FsTarget]
):
identifier[parser] . identifier[error] ( literal[string] )
identifier[opts] = identifier[namespace_to_dict] ( identifier[args] )
keyword[if] identifier[args] . identifier[command] == literal[string] :
identifier[s] = identifier[UploadSynchronizer] ( identifier[args] . identifier[local_target] , identifier[args] . identifier[remote_target] , identifier[opts] )
keyword[elif] identifier[args] . identifier[command] == literal[string] :
identifier[s] = identifier[DownloadSynchronizer] ( identifier[args] . identifier[local_target] , identifier[args] . identifier[remote_target] , identifier[opts] )
keyword[elif] identifier[args] . identifier[command] == literal[string] :
identifier[s] = identifier[BiDirSynchronizer] ( identifier[args] . identifier[local_target] , identifier[args] . identifier[remote_target] , identifier[opts] )
keyword[else] :
identifier[parser] . identifier[error] ( literal[string] . identifier[format] ( identifier[args] . identifier[command] ))
identifier[s] . identifier[is_script] = keyword[True]
keyword[try] :
identifier[s] . identifier[run] ()
keyword[except] identifier[KeyboardInterrupt] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[finally] :
identifier[s] . identifier[local] . identifier[close] ()
identifier[s] . identifier[remote] . identifier[close] ()
identifier[stats] = identifier[s] . identifier[get_stats] ()
keyword[if] identifier[args] . identifier[verbose] >= literal[int] :
identifier[pprint] ( identifier[stats] )
keyword[elif] identifier[args] . identifier[verbose] >= literal[int] :
keyword[if] identifier[args] . identifier[dry_run] :
identifier[print] ( literal[string] , identifier[end] = literal[string] )
identifier[print] (
literal[string] . identifier[format] (
identifier[stats] [ literal[string] ],
identifier[stats] [ literal[string] ],
identifier[stats] [ literal[string] ],
identifier[stats] [ literal[string] ],
),
identifier[end] = literal[string] ,
)
keyword[if] identifier[stats] [ literal[string] ]:
identifier[print] ()
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[stats] [ literal[string] ]))
keyword[return] | def run():
"""CLI main entry point.""" # Use print() instead of logging when running in CLI mode:
set_pyftpsync_logger(None)
parser = argparse.ArgumentParser(description='Synchronize folders over FTP.', epilog='See also https://github.com/mar10/pyftpsync', parents=[verbose_parser]) # Note: we want to allow --version to be combined with --verbose. However
# on Py2, argparse makes sub-commands mandatory, unless `action="version"` is used.
if check_cli_verbose(3) > 3:
version_info = 'pyftpsync/{} Python/{} {}'.format(__version__, PYTHON_VERSION, platform.platform()) # depends on [control=['if'], data=[]]
else:
version_info = '{}'.format(__version__)
parser.add_argument('-V', '--version', action='version', version=version_info)
subparsers = parser.add_subparsers(help='sub-command help') # --- Create the parser for the "upload" command ---------------------------
sp = subparsers.add_parser('upload', parents=[verbose_parser, common_parser, matcher_parser, creds_parser], help='copy new and modified files to remote folder')
sp.add_argument('local', metavar='LOCAL', default='.', help='path to local folder (default: %(default)s)')
sp.add_argument('remote', metavar='REMOTE', help='path to remote folder')
sp.add_argument('--force', action='store_true', help='overwrite remote files, even if the target is newer (but no conflict was detected)')
sp.add_argument('--resolve', default='ask', choices=['local', 'skip', 'ask'], help="conflict resolving strategy (default: '%(default)s')")
sp.add_argument('--delete', action='store_true', help="remove remote files if they don't exist locally")
sp.add_argument('--delete-unmatched', action='store_true', help="remove remote files if they don't exist locally or don't match the current filter (implies '--delete' option)")
sp.set_defaults(command='upload') # --- Create the parser for the "download" command -------------------------
sp = subparsers.add_parser('download', parents=[verbose_parser, common_parser, matcher_parser, creds_parser], help='copy new and modified files from remote folder to local target')
sp.add_argument('local', metavar='LOCAL', default='.', help='path to local folder (default: %(default)s)')
sp.add_argument('remote', metavar='REMOTE', help='path to remote folder')
sp.add_argument('--force', action='store_true', help='overwrite local files, even if the target is newer (but no conflict was detected)')
sp.add_argument('--resolve', default='ask', choices=['remote', 'skip', 'ask'], help="conflict resolving strategy (default: '%(default)s')")
sp.add_argument('--delete', action='store_true', help="remove local files if they don't exist on remote target")
sp.add_argument('--delete-unmatched', action='store_true', help="remove local files if they don't exist on remote target or don't match the current filter (implies '--delete' option)")
sp.set_defaults(command='download') # --- Create the parser for the "sync" command -----------------------------
sp = subparsers.add_parser('sync', parents=[verbose_parser, common_parser, matcher_parser, creds_parser], help='synchronize new and modified files between remote folder and local target')
sp.add_argument('local', metavar='LOCAL', default='.', help='path to local folder (default: %(default)s)')
sp.add_argument('remote', metavar='REMOTE', help='path to remote folder')
sp.add_argument('--resolve', default='ask', choices=['old', 'new', 'local', 'remote', 'skip', 'ask'], help="conflict resolving strategy (default: '%(default)s')")
sp.set_defaults(command='sync') # --- Create the parser for the "run" command -----------------------------
add_run_parser(subparsers) # --- Create the parser for the "scan" command -----------------------------
add_scan_parser(subparsers) # --- Parse command line ---------------------------------------------------
args = parser.parse_args()
args.verbose -= args.quiet
del args.quiet # print("verbose", args.verbose)
ftp_debug = 0
if args.verbose >= 6:
ftp_debug = 1 # depends on [control=['if'], data=[]] # Modify the `args` from the `pyftpsync.yaml` config:
if getattr(args, 'command', None) == 'run':
handle_run_command(parser, args) # depends on [control=['if'], data=[]]
if callable(getattr(args, 'command', None)): # scan_handler
try:
return args.command(parser, args) # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
print('\nAborted by user.', file=sys.stderr)
sys.exit(3) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif not hasattr(args, 'command'):
parser.error("missing command (choose from 'upload', 'download', 'run', 'sync', 'scan')") # depends on [control=['if'], data=[]] # Post-process and check arguments
if hasattr(args, 'delete_unmatched') and args.delete_unmatched:
args.delete = True # depends on [control=['if'], data=[]]
args.local_target = make_target(args.local, {'ftp_debug': ftp_debug})
if args.remote == '.':
parser.error("'.' is expected to be the local target (not remote)") # depends on [control=['if'], data=[]]
args.remote_target = make_target(args.remote, {'ftp_debug': ftp_debug})
if not isinstance(args.local_target, FsTarget) and isinstance(args.remote_target, FsTarget):
parser.error('a file system target is expected to be local') # depends on [control=['if'], data=[]] # Let the command handler do its thing
opts = namespace_to_dict(args)
if args.command == 'upload':
s = UploadSynchronizer(args.local_target, args.remote_target, opts) # depends on [control=['if'], data=[]]
elif args.command == 'download':
s = DownloadSynchronizer(args.local_target, args.remote_target, opts) # depends on [control=['if'], data=[]]
elif args.command == 'sync':
s = BiDirSynchronizer(args.local_target, args.remote_target, opts) # depends on [control=['if'], data=[]]
else:
parser.error("unknown command '{}'".format(args.command))
s.is_script = True
try:
s.run() # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
print('\nAborted by user.', file=sys.stderr)
sys.exit(3) # depends on [control=['except'], data=[]]
finally: # Prevent sporadic exceptions in ftplib, when closing in __del__
s.local.close()
s.remote.close()
stats = s.get_stats()
if args.verbose >= 5:
pprint(stats) # depends on [control=['if'], data=[]]
elif args.verbose >= 1:
if args.dry_run:
print('(DRY-RUN) ', end='') # depends on [control=['if'], data=[]]
print('Wrote {}/{} files in {} directories, skipped: {}.'.format(stats['files_written'], stats['local_files'], stats['local_dirs'], stats['conflict_files_skipped']), end='')
if stats['interactive_ask']:
print() # depends on [control=['if'], data=[]]
else:
print(' Elap: {}.'.format(stats['elap_str'])) # depends on [control=['if'], data=[]]
return |
def update(self, key, value):
"""
:param key: a string
:value: a string
"""
if not is_string(key):
raise Exception("Key must be string")
# if len(key) > 32:
# raise Exception("Max key length is 32")
if not is_string(value):
raise Exception("Value must be string")
# if value == '':
# return self.delete(key)
old_root = copy.deepcopy(self.root_node)
self.root_node = self._update_and_delete_storage(
self.root_node,
bin_to_nibbles(to_string(key)),
to_string(value))
self.replace_root_hash(old_root, self.root_node) | def function[update, parameter[self, key, value]]:
constant[
:param key: a string
:value: a string
]
if <ast.UnaryOp object at 0x7da1b18c1c60> begin[:]
<ast.Raise object at 0x7da1b18c1b70>
if <ast.UnaryOp object at 0x7da1b18c0e20> begin[:]
<ast.Raise object at 0x7da1b18c1c30>
variable[old_root] assign[=] call[name[copy].deepcopy, parameter[name[self].root_node]]
name[self].root_node assign[=] call[name[self]._update_and_delete_storage, parameter[name[self].root_node, call[name[bin_to_nibbles], parameter[call[name[to_string], parameter[name[key]]]]], call[name[to_string], parameter[name[value]]]]]
call[name[self].replace_root_hash, parameter[name[old_root], name[self].root_node]] | keyword[def] identifier[update] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[is_string] ( identifier[key] ):
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] keyword[not] identifier[is_string] ( identifier[value] ):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[old_root] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[root_node] )
identifier[self] . identifier[root_node] = identifier[self] . identifier[_update_and_delete_storage] (
identifier[self] . identifier[root_node] ,
identifier[bin_to_nibbles] ( identifier[to_string] ( identifier[key] )),
identifier[to_string] ( identifier[value] ))
identifier[self] . identifier[replace_root_hash] ( identifier[old_root] , identifier[self] . identifier[root_node] ) | def update(self, key, value):
"""
:param key: a string
:value: a string
"""
if not is_string(key):
raise Exception('Key must be string') # depends on [control=['if'], data=[]]
# if len(key) > 32:
# raise Exception("Max key length is 32")
if not is_string(value):
raise Exception('Value must be string') # depends on [control=['if'], data=[]]
# if value == '':
# return self.delete(key)
old_root = copy.deepcopy(self.root_node)
self.root_node = self._update_and_delete_storage(self.root_node, bin_to_nibbles(to_string(key)), to_string(value))
self.replace_root_hash(old_root, self.root_node) |
def remove_metadata_key(self, obj, key):
"""
Removes the specified key from the object's metadata. If the key does
not exist in the metadata, nothing is done.
"""
meta_dict = {key: ""}
return self.set_metadata(obj, meta_dict) | def function[remove_metadata_key, parameter[self, obj, key]]:
constant[
Removes the specified key from the object's metadata. If the key does
not exist in the metadata, nothing is done.
]
variable[meta_dict] assign[=] dictionary[[<ast.Name object at 0x7da1b056da80>], [<ast.Constant object at 0x7da1b056c340>]]
return[call[name[self].set_metadata, parameter[name[obj], name[meta_dict]]]] | keyword[def] identifier[remove_metadata_key] ( identifier[self] , identifier[obj] , identifier[key] ):
literal[string]
identifier[meta_dict] ={ identifier[key] : literal[string] }
keyword[return] identifier[self] . identifier[set_metadata] ( identifier[obj] , identifier[meta_dict] ) | def remove_metadata_key(self, obj, key):
"""
Removes the specified key from the object's metadata. If the key does
not exist in the metadata, nothing is done.
"""
meta_dict = {key: ''}
return self.set_metadata(obj, meta_dict) |
def jsonify(obj): # pylint: disable=too-many-return-statements
"""Return a JSON-encodable representation of an object, recursively using
any available ``to_json`` methods, converting NumPy arrays and datatypes to
native lists and types along the way.
"""
# Call the `to_json` method if available and add metadata.
if hasattr(obj, 'to_json'):
d = obj.to_json()
_push_metadata(d, obj)
return jsonify(d)
# If we have a numpy array, convert it to a list.
if isinstance(obj, np.ndarray):
return obj.tolist()
# If we have NumPy datatypes, convert them to native types.
if isinstance(obj, (np.int32, np.int64)):
return int(obj)
if isinstance(obj, np.float64):
return float(obj)
# Recurse over dictionaries.
if isinstance(obj, dict):
return _jsonify_dict(obj)
# Recurse over object dictionaries.
if hasattr(obj, '__dict__'):
return _jsonify_dict(obj.__dict__)
# Recurse over lists and tuples.
if isinstance(obj, (list, tuple)):
return [jsonify(item) for item in obj]
# Otherwise, give up and hope it's serializable.
return obj | def function[jsonify, parameter[obj]]:
constant[Return a JSON-encodable representation of an object, recursively using
any available ``to_json`` methods, converting NumPy arrays and datatypes to
native lists and types along the way.
]
if call[name[hasattr], parameter[name[obj], constant[to_json]]] begin[:]
variable[d] assign[=] call[name[obj].to_json, parameter[]]
call[name[_push_metadata], parameter[name[d], name[obj]]]
return[call[name[jsonify], parameter[name[d]]]]
if call[name[isinstance], parameter[name[obj], name[np].ndarray]] begin[:]
return[call[name[obj].tolist, parameter[]]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Attribute object at 0x7da207f98e20>, <ast.Attribute object at 0x7da207f994e0>]]]] begin[:]
return[call[name[int], parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], name[np].float64]] begin[:]
return[call[name[float], parameter[name[obj]]]]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
return[call[name[_jsonify_dict], parameter[name[obj]]]]
if call[name[hasattr], parameter[name[obj], constant[__dict__]]] begin[:]
return[call[name[_jsonify_dict], parameter[name[obj].__dict__]]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da207f9a170>, <ast.Name object at 0x7da207f98640>]]]] begin[:]
return[<ast.ListComp object at 0x7da207f9b0a0>]
return[name[obj]] | keyword[def] identifier[jsonify] ( identifier[obj] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ):
identifier[d] = identifier[obj] . identifier[to_json] ()
identifier[_push_metadata] ( identifier[d] , identifier[obj] )
keyword[return] identifier[jsonify] ( identifier[d] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[np] . identifier[ndarray] ):
keyword[return] identifier[obj] . identifier[tolist] ()
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[np] . identifier[int32] , identifier[np] . identifier[int64] )):
keyword[return] identifier[int] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[np] . identifier[float64] ):
keyword[return] identifier[float] ( identifier[obj] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[return] identifier[_jsonify_dict] ( identifier[obj] )
keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[return] identifier[_jsonify_dict] ( identifier[obj] . identifier[__dict__] )
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[list] , identifier[tuple] )):
keyword[return] [ identifier[jsonify] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[obj] ]
keyword[return] identifier[obj] | def jsonify(obj): # pylint: disable=too-many-return-statements
'Return a JSON-encodable representation of an object, recursively using\n any available ``to_json`` methods, converting NumPy arrays and datatypes to\n native lists and types along the way.\n '
# Call the `to_json` method if available and add metadata.
if hasattr(obj, 'to_json'):
d = obj.to_json()
_push_metadata(d, obj)
return jsonify(d) # depends on [control=['if'], data=[]]
# If we have a numpy array, convert it to a list.
if isinstance(obj, np.ndarray):
return obj.tolist() # depends on [control=['if'], data=[]]
# If we have NumPy datatypes, convert them to native types.
if isinstance(obj, (np.int32, np.int64)):
return int(obj) # depends on [control=['if'], data=[]]
if isinstance(obj, np.float64):
return float(obj) # depends on [control=['if'], data=[]]
# Recurse over dictionaries.
if isinstance(obj, dict):
return _jsonify_dict(obj) # depends on [control=['if'], data=[]]
# Recurse over object dictionaries.
if hasattr(obj, '__dict__'):
return _jsonify_dict(obj.__dict__) # depends on [control=['if'], data=[]]
# Recurse over lists and tuples.
if isinstance(obj, (list, tuple)):
return [jsonify(item) for item in obj] # depends on [control=['if'], data=[]]
# Otherwise, give up and hope it's serializable.
return obj |
def _add_to_publish_stack(self, exchange, routing_key, message, properties):
"""Temporarily add the message to the stack to publish to RabbitMQ
:param str exchange: The exchange to publish to
:param str routing_key: The routing key to publish with
:param str message: The message body
:param pika.BasicProperties: The message properties
"""
global message_stack
message_stack.append((exchange, routing_key, message, properties)) | def function[_add_to_publish_stack, parameter[self, exchange, routing_key, message, properties]]:
constant[Temporarily add the message to the stack to publish to RabbitMQ
:param str exchange: The exchange to publish to
:param str routing_key: The routing key to publish with
:param str message: The message body
:param pika.BasicProperties: The message properties
]
<ast.Global object at 0x7da1b2525180>
call[name[message_stack].append, parameter[tuple[[<ast.Name object at 0x7da1b2527850>, <ast.Name object at 0x7da1b25279a0>, <ast.Name object at 0x7da1b2524fd0>, <ast.Name object at 0x7da1b25252d0>]]]] | keyword[def] identifier[_add_to_publish_stack] ( identifier[self] , identifier[exchange] , identifier[routing_key] , identifier[message] , identifier[properties] ):
literal[string]
keyword[global] identifier[message_stack]
identifier[message_stack] . identifier[append] (( identifier[exchange] , identifier[routing_key] , identifier[message] , identifier[properties] )) | def _add_to_publish_stack(self, exchange, routing_key, message, properties):
"""Temporarily add the message to the stack to publish to RabbitMQ
:param str exchange: The exchange to publish to
:param str routing_key: The routing key to publish with
:param str message: The message body
:param pika.BasicProperties: The message properties
"""
global message_stack
message_stack.append((exchange, routing_key, message, properties)) |
def _set_directories(self):
'''Initialize variables based on evidence about the directories.'''
if self._dirs['initial'] == None:
self._dirs['base'] = discover_base_dir(self._dirs['run'])
else:
self._dirs['base'] = discover_base_dir(self._dirs['initial'])
# now, if 'base' is None (no base directory was found) then the only
# allowed operation is init
self._update_dirs_on_base()
# we might have set the directory variables fine, but the tree
# might not exist yet. _tree_ready is a flag for that.
self._tree_ready = verify_dir_structure(self._dirs['base'])
if self._tree_ready:
self._read_site_config() | def function[_set_directories, parameter[self]]:
constant[Initialize variables based on evidence about the directories.]
if compare[call[name[self]._dirs][constant[initial]] equal[==] constant[None]] begin[:]
call[name[self]._dirs][constant[base]] assign[=] call[name[discover_base_dir], parameter[call[name[self]._dirs][constant[run]]]]
call[name[self]._update_dirs_on_base, parameter[]]
name[self]._tree_ready assign[=] call[name[verify_dir_structure], parameter[call[name[self]._dirs][constant[base]]]]
if name[self]._tree_ready begin[:]
call[name[self]._read_site_config, parameter[]] | keyword[def] identifier[_set_directories] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_dirs] [ literal[string] ]== keyword[None] :
identifier[self] . identifier[_dirs] [ literal[string] ]= identifier[discover_base_dir] ( identifier[self] . identifier[_dirs] [ literal[string] ])
keyword[else] :
identifier[self] . identifier[_dirs] [ literal[string] ]= identifier[discover_base_dir] ( identifier[self] . identifier[_dirs] [ literal[string] ])
identifier[self] . identifier[_update_dirs_on_base] ()
identifier[self] . identifier[_tree_ready] = identifier[verify_dir_structure] ( identifier[self] . identifier[_dirs] [ literal[string] ])
keyword[if] identifier[self] . identifier[_tree_ready] :
identifier[self] . identifier[_read_site_config] () | def _set_directories(self):
"""Initialize variables based on evidence about the directories."""
if self._dirs['initial'] == None:
self._dirs['base'] = discover_base_dir(self._dirs['run']) # depends on [control=['if'], data=[]]
else:
self._dirs['base'] = discover_base_dir(self._dirs['initial'])
# now, if 'base' is None (no base directory was found) then the only
# allowed operation is init
self._update_dirs_on_base()
# we might have set the directory variables fine, but the tree
# might not exist yet. _tree_ready is a flag for that.
self._tree_ready = verify_dir_structure(self._dirs['base'])
if self._tree_ready:
self._read_site_config() # depends on [control=['if'], data=[]] |
def samples(self, gp, Y_metadata):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
"""
N1, N2 = gp.shape
Ysim = np.zeros((N1,N2))
ind = Y_metadata['output_index'].flatten()
for j in np.unique(ind):
flt = ind==j
gp_filtered = gp[flt,:]
n1 = gp_filtered.shape[0]
lik = self.likelihoods_list[j]
_ysim = np.array([np.random.normal(lik.gp_link.transf(gpj), scale=np.sqrt(lik.variance), size=1) for gpj in gp_filtered.flatten()])
Ysim[flt,:] = _ysim.reshape(n1,N2)
return Ysim | def function[samples, parameter[self, gp, Y_metadata]]:
constant[
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
]
<ast.Tuple object at 0x7da1b21d7f40> assign[=] name[gp].shape
variable[Ysim] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b21d4e80>, <ast.Name object at 0x7da1b21d5810>]]]]
variable[ind] assign[=] call[call[name[Y_metadata]][constant[output_index]].flatten, parameter[]]
for taget[name[j]] in starred[call[name[np].unique, parameter[name[ind]]]] begin[:]
variable[flt] assign[=] compare[name[ind] equal[==] name[j]]
variable[gp_filtered] assign[=] call[name[gp]][tuple[[<ast.Name object at 0x7da1b21d54b0>, <ast.Slice object at 0x7da1b21d5de0>]]]
variable[n1] assign[=] call[name[gp_filtered].shape][constant[0]]
variable[lik] assign[=] call[name[self].likelihoods_list][name[j]]
variable[_ysim] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b21d5000>]]
call[name[Ysim]][tuple[[<ast.Name object at 0x7da20c6c6e90>, <ast.Slice object at 0x7da20c6c5cf0>]]] assign[=] call[name[_ysim].reshape, parameter[name[n1], name[N2]]]
return[name[Ysim]] | keyword[def] identifier[samples] ( identifier[self] , identifier[gp] , identifier[Y_metadata] ):
literal[string]
identifier[N1] , identifier[N2] = identifier[gp] . identifier[shape]
identifier[Ysim] = identifier[np] . identifier[zeros] (( identifier[N1] , identifier[N2] ))
identifier[ind] = identifier[Y_metadata] [ literal[string] ]. identifier[flatten] ()
keyword[for] identifier[j] keyword[in] identifier[np] . identifier[unique] ( identifier[ind] ):
identifier[flt] = identifier[ind] == identifier[j]
identifier[gp_filtered] = identifier[gp] [ identifier[flt] ,:]
identifier[n1] = identifier[gp_filtered] . identifier[shape] [ literal[int] ]
identifier[lik] = identifier[self] . identifier[likelihoods_list] [ identifier[j] ]
identifier[_ysim] = identifier[np] . identifier[array] ([ identifier[np] . identifier[random] . identifier[normal] ( identifier[lik] . identifier[gp_link] . identifier[transf] ( identifier[gpj] ), identifier[scale] = identifier[np] . identifier[sqrt] ( identifier[lik] . identifier[variance] ), identifier[size] = literal[int] ) keyword[for] identifier[gpj] keyword[in] identifier[gp_filtered] . identifier[flatten] ()])
identifier[Ysim] [ identifier[flt] ,:]= identifier[_ysim] . identifier[reshape] ( identifier[n1] , identifier[N2] )
keyword[return] identifier[Ysim] | def samples(self, gp, Y_metadata):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
"""
(N1, N2) = gp.shape
Ysim = np.zeros((N1, N2))
ind = Y_metadata['output_index'].flatten()
for j in np.unique(ind):
flt = ind == j
gp_filtered = gp[flt, :]
n1 = gp_filtered.shape[0]
lik = self.likelihoods_list[j]
_ysim = np.array([np.random.normal(lik.gp_link.transf(gpj), scale=np.sqrt(lik.variance), size=1) for gpj in gp_filtered.flatten()])
Ysim[flt, :] = _ysim.reshape(n1, N2) # depends on [control=['for'], data=['j']]
return Ysim |
def parse_header(input_array):
"""Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array"""
codec = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[0:4])[0]
length = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[4:8])[0]
param = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[8:12])[0]
return codec,length,param,input_array[12:] | def function[parse_header, parameter[input_array]]:
constant[Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array]
variable[codec] assign[=] call[call[name[struct].unpack, parameter[call[name[mmtf].utils.constants.NUM_DICT][constant[4]], call[name[input_array]][<ast.Slice object at 0x7da1b109aef0>]]]][constant[0]]
variable[length] assign[=] call[call[name[struct].unpack, parameter[call[name[mmtf].utils.constants.NUM_DICT][constant[4]], call[name[input_array]][<ast.Slice object at 0x7da1b109a740>]]]][constant[0]]
variable[param] assign[=] call[call[name[struct].unpack, parameter[call[name[mmtf].utils.constants.NUM_DICT][constant[4]], call[name[input_array]][<ast.Slice object at 0x7da1b1099000>]]]][constant[0]]
return[tuple[[<ast.Name object at 0x7da1b109ae00>, <ast.Name object at 0x7da1b1098df0>, <ast.Name object at 0x7da1b10990f0>, <ast.Subscript object at 0x7da1b109a080>]]] | keyword[def] identifier[parse_header] ( identifier[input_array] ):
literal[string]
identifier[codec] = identifier[struct] . identifier[unpack] ( identifier[mmtf] . identifier[utils] . identifier[constants] . identifier[NUM_DICT] [ literal[int] ], identifier[input_array] [ literal[int] : literal[int] ])[ literal[int] ]
identifier[length] = identifier[struct] . identifier[unpack] ( identifier[mmtf] . identifier[utils] . identifier[constants] . identifier[NUM_DICT] [ literal[int] ], identifier[input_array] [ literal[int] : literal[int] ])[ literal[int] ]
identifier[param] = identifier[struct] . identifier[unpack] ( identifier[mmtf] . identifier[utils] . identifier[constants] . identifier[NUM_DICT] [ literal[int] ], identifier[input_array] [ literal[int] : literal[int] ])[ literal[int] ]
keyword[return] identifier[codec] , identifier[length] , identifier[param] , identifier[input_array] [ literal[int] :] | def parse_header(input_array):
"""Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array"""
codec = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[0:4])[0]
length = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[4:8])[0]
param = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[8:12])[0]
return (codec, length, param, input_array[12:]) |
def removeRedundantVerbChains( foundChains, removeOverlapping = True, removeSingleAraAndEi = False ):
''' Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True);
Yldiselt on nii, et ylekattuvaid ei tohiks palju olla, kuna fraaside laiendamisel
pyytakse alati kontrollida, et laiendus ei kattuks m6ne olemasoleva fraasiga;
Peamiselt tekivad ylekattuvused siis, kui morf analyysi on sattunud valed
finiitverbi analyysid (v6i analyysid on j22nud mitmesteks) ja seega tuvastatakse
osalausest rohkem finiitverbe, kui oleks vaja.
Heuristik: kahe ylekattuva puhul j2tame alles fraasi, mis algab eespool ning
m2rgime sellel OTHER_VERBS v22rtuseks True, mis m2rgib, et kontekstis on mingi
segadus teiste verbidega.
'''
toDelete = []
for i in range(len(foundChains)):
matchObj1 = foundChains[i]
if removeOverlapping:
for j in range(i+1, len(foundChains)):
matchObj2 = foundChains[j]
if matchObj1 != matchObj2 and matchObj1[CLAUSE_IDX] == matchObj2[CLAUSE_IDX]:
phrase1 = set(matchObj1[PHRASE])
phrase2 = set(matchObj2[PHRASE])
intersect = phrase1.intersection(phrase2)
if len(intersect) > 0:
# Yldiselt on nii, et ylekattuvaid ei tohiks olla, kuna fraaside laiendamisel
# pyytakse alati kontrollida, et laiendus ei kattuks m6ne olemasoleva fraasiga;
# Peamiselt tekivad ylekattuvused siis, kui morf analyysil on finiitverbi
# analyysidesse j22nud sisse mitmesused (v6i on sattunud valed analyysid) ja
# seega tuvastatakse osalausest rohkem finiitverbe, kui oleks vaja.
# Heuristik: j2tame alles fraasi, mis algab eespool ning lisame selle otsa
# kysim2rgi (kuna pole kindel, et asjad on korras)
minWid1 = min(matchObj1[PHRASE])
minWid2 = min(matchObj2[PHRASE])
if minWid1 < minWid2:
matchObj1[OTHER_VERBS] = True
toDelete.append(j)
else:
matchObj2[OTHER_VERBS] = True
toDelete.append(i)
if removeSingleAraAndEi:
if ( len(matchObj1[PATTERN])==1 and re.match('^(ei|ära)$', matchObj1[PATTERN][0]) ):
toDelete.append(i)
if toDelete:
if len(set(toDelete)) != len(toDelete):
toDelete = list(set(toDelete)) # Eemaldame duplikaadid
toDelete = [ foundChains[i] for i in toDelete ]
for verbObj in toDelete:
foundChains.remove(verbObj) | def function[removeRedundantVerbChains, parameter[foundChains, removeOverlapping, removeSingleAraAndEi]]:
constant[ Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True);
Yldiselt on nii, et ylekattuvaid ei tohiks palju olla, kuna fraaside laiendamisel
pyytakse alati kontrollida, et laiendus ei kattuks m6ne olemasoleva fraasiga;
Peamiselt tekivad ylekattuvused siis, kui morf analyysi on sattunud valed
finiitverbi analyysid (v6i analyysid on j22nud mitmesteks) ja seega tuvastatakse
osalausest rohkem finiitverbe, kui oleks vaja.
Heuristik: kahe ylekattuva puhul j2tame alles fraasi, mis algab eespool ning
m2rgime sellel OTHER_VERBS v22rtuseks True, mis m2rgib, et kontekstis on mingi
segadus teiste verbidega.
]
variable[toDelete] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[foundChains]]]]]] begin[:]
variable[matchObj1] assign[=] call[name[foundChains]][name[i]]
if name[removeOverlapping] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], call[name[len], parameter[name[foundChains]]]]]] begin[:]
variable[matchObj2] assign[=] call[name[foundChains]][name[j]]
if <ast.BoolOp object at 0x7da20c9906d0> begin[:]
variable[phrase1] assign[=] call[name[set], parameter[call[name[matchObj1]][name[PHRASE]]]]
variable[phrase2] assign[=] call[name[set], parameter[call[name[matchObj2]][name[PHRASE]]]]
variable[intersect] assign[=] call[name[phrase1].intersection, parameter[name[phrase2]]]
if compare[call[name[len], parameter[name[intersect]]] greater[>] constant[0]] begin[:]
variable[minWid1] assign[=] call[name[min], parameter[call[name[matchObj1]][name[PHRASE]]]]
variable[minWid2] assign[=] call[name[min], parameter[call[name[matchObj2]][name[PHRASE]]]]
if compare[name[minWid1] less[<] name[minWid2]] begin[:]
call[name[matchObj1]][name[OTHER_VERBS]] assign[=] constant[True]
call[name[toDelete].append, parameter[name[j]]]
if name[removeSingleAraAndEi] begin[:]
if <ast.BoolOp object at 0x7da20c992a70> begin[:]
call[name[toDelete].append, parameter[name[i]]]
if name[toDelete] begin[:]
if compare[call[name[len], parameter[call[name[set], parameter[name[toDelete]]]]] not_equal[!=] call[name[len], parameter[name[toDelete]]]] begin[:]
variable[toDelete] assign[=] call[name[list], parameter[call[name[set], parameter[name[toDelete]]]]]
variable[toDelete] assign[=] <ast.ListComp object at 0x7da20c991870>
for taget[name[verbObj]] in starred[name[toDelete]] begin[:]
call[name[foundChains].remove, parameter[name[verbObj]]] | keyword[def] identifier[removeRedundantVerbChains] ( identifier[foundChains] , identifier[removeOverlapping] = keyword[True] , identifier[removeSingleAraAndEi] = keyword[False] ):
literal[string]
identifier[toDelete] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[foundChains] )):
identifier[matchObj1] = identifier[foundChains] [ identifier[i] ]
keyword[if] identifier[removeOverlapping] :
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[len] ( identifier[foundChains] )):
identifier[matchObj2] = identifier[foundChains] [ identifier[j] ]
keyword[if] identifier[matchObj1] != identifier[matchObj2] keyword[and] identifier[matchObj1] [ identifier[CLAUSE_IDX] ]== identifier[matchObj2] [ identifier[CLAUSE_IDX] ]:
identifier[phrase1] = identifier[set] ( identifier[matchObj1] [ identifier[PHRASE] ])
identifier[phrase2] = identifier[set] ( identifier[matchObj2] [ identifier[PHRASE] ])
identifier[intersect] = identifier[phrase1] . identifier[intersection] ( identifier[phrase2] )
keyword[if] identifier[len] ( identifier[intersect] )> literal[int] :
identifier[minWid1] = identifier[min] ( identifier[matchObj1] [ identifier[PHRASE] ])
identifier[minWid2] = identifier[min] ( identifier[matchObj2] [ identifier[PHRASE] ])
keyword[if] identifier[minWid1] < identifier[minWid2] :
identifier[matchObj1] [ identifier[OTHER_VERBS] ]= keyword[True]
identifier[toDelete] . identifier[append] ( identifier[j] )
keyword[else] :
identifier[matchObj2] [ identifier[OTHER_VERBS] ]= keyword[True]
identifier[toDelete] . identifier[append] ( identifier[i] )
keyword[if] identifier[removeSingleAraAndEi] :
keyword[if] ( identifier[len] ( identifier[matchObj1] [ identifier[PATTERN] ])== literal[int] keyword[and] identifier[re] . identifier[match] ( literal[string] , identifier[matchObj1] [ identifier[PATTERN] ][ literal[int] ])):
identifier[toDelete] . identifier[append] ( identifier[i] )
keyword[if] identifier[toDelete] :
keyword[if] identifier[len] ( identifier[set] ( identifier[toDelete] ))!= identifier[len] ( identifier[toDelete] ):
identifier[toDelete] = identifier[list] ( identifier[set] ( identifier[toDelete] ))
identifier[toDelete] =[ identifier[foundChains] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[toDelete] ]
keyword[for] identifier[verbObj] keyword[in] identifier[toDelete] :
identifier[foundChains] . identifier[remove] ( identifier[verbObj] ) | def removeRedundantVerbChains(foundChains, removeOverlapping=True, removeSingleAraAndEi=False):
""" Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True);
Yldiselt on nii, et ylekattuvaid ei tohiks palju olla, kuna fraaside laiendamisel
pyytakse alati kontrollida, et laiendus ei kattuks m6ne olemasoleva fraasiga;
Peamiselt tekivad ylekattuvused siis, kui morf analyysi on sattunud valed
finiitverbi analyysid (v6i analyysid on j22nud mitmesteks) ja seega tuvastatakse
osalausest rohkem finiitverbe, kui oleks vaja.
Heuristik: kahe ylekattuva puhul j2tame alles fraasi, mis algab eespool ning
m2rgime sellel OTHER_VERBS v22rtuseks True, mis m2rgib, et kontekstis on mingi
segadus teiste verbidega.
"""
toDelete = []
for i in range(len(foundChains)):
matchObj1 = foundChains[i]
if removeOverlapping:
for j in range(i + 1, len(foundChains)):
matchObj2 = foundChains[j]
if matchObj1 != matchObj2 and matchObj1[CLAUSE_IDX] == matchObj2[CLAUSE_IDX]:
phrase1 = set(matchObj1[PHRASE])
phrase2 = set(matchObj2[PHRASE])
intersect = phrase1.intersection(phrase2)
if len(intersect) > 0: # Yldiselt on nii, et ylekattuvaid ei tohiks olla, kuna fraaside laiendamisel
# pyytakse alati kontrollida, et laiendus ei kattuks m6ne olemasoleva fraasiga;
# Peamiselt tekivad ylekattuvused siis, kui morf analyysil on finiitverbi
# analyysidesse j22nud sisse mitmesused (v6i on sattunud valed analyysid) ja
# seega tuvastatakse osalausest rohkem finiitverbe, kui oleks vaja.
# Heuristik: j2tame alles fraasi, mis algab eespool ning lisame selle otsa
# kysim2rgi (kuna pole kindel, et asjad on korras)
minWid1 = min(matchObj1[PHRASE])
minWid2 = min(matchObj2[PHRASE])
if minWid1 < minWid2:
matchObj1[OTHER_VERBS] = True
toDelete.append(j) # depends on [control=['if'], data=[]]
else:
matchObj2[OTHER_VERBS] = True
toDelete.append(i) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]]
if removeSingleAraAndEi:
if len(matchObj1[PATTERN]) == 1 and re.match('^(ei|ära)$', matchObj1[PATTERN][0]):
toDelete.append(i) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if toDelete:
if len(set(toDelete)) != len(toDelete):
toDelete = list(set(toDelete)) # Eemaldame duplikaadid # depends on [control=['if'], data=[]]
toDelete = [foundChains[i] for i in toDelete]
for verbObj in toDelete:
foundChains.remove(verbObj) # depends on [control=['for'], data=['verbObj']] # depends on [control=['if'], data=[]] |
def GetHostMemUnmappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUnmappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret)
return counter.value | def function[GetHostMemUnmappedMB, parameter[self]]:
constant[Undocumented.]
variable[counter] assign[=] call[name[c_uint], parameter[]]
variable[ret] assign[=] call[name[vmGuestLib].VMGuestLib_GetHostMemUnmappedMB, parameter[name[self].handle.value, call[name[byref], parameter[name[counter]]]]]
if compare[name[ret] not_equal[!=] name[VMGUESTLIB_ERROR_SUCCESS]] begin[:]
<ast.Raise object at 0x7da20c796770>
return[name[counter].value] | keyword[def] identifier[GetHostMemUnmappedMB] ( identifier[self] ):
literal[string]
identifier[counter] = identifier[c_uint] ()
identifier[ret] = identifier[vmGuestLib] . identifier[VMGuestLib_GetHostMemUnmappedMB] ( identifier[self] . identifier[handle] . identifier[value] , identifier[byref] ( identifier[counter] ))
keyword[if] identifier[ret] != identifier[VMGUESTLIB_ERROR_SUCCESS] : keyword[raise] identifier[VMGuestLibException] ( identifier[ret] )
keyword[return] identifier[counter] . identifier[value] | def GetHostMemUnmappedMB(self):
"""Undocumented."""
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUnmappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret) # depends on [control=['if'], data=['ret']]
return counter.value |
def check_vocab(instance, vocab, code):
"""Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
"""
vocab_uses = getattr(enums, vocab + "_USES")
for k in vocab_uses.keys():
if instance['type'] == k:
for prop in vocab_uses[k]:
if prop not in instance:
continue
vocab_ov = getattr(enums, vocab + "_OV")
if type(instance[prop]) is list:
is_in = set(instance[prop]).issubset(set(vocab_ov))
else:
is_in = instance[prop] in vocab_ov
if not is_in:
vocab_name = vocab.replace('_', '-').lower()
yield JSONError("%s contains a value not in the %s-ov "
"vocabulary." % (prop, vocab_name),
instance['id'], code) | def function[check_vocab, parameter[instance, vocab, code]]:
constant[Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
]
variable[vocab_uses] assign[=] call[name[getattr], parameter[name[enums], binary_operation[name[vocab] + constant[_USES]]]]
for taget[name[k]] in starred[call[name[vocab_uses].keys, parameter[]]] begin[:]
if compare[call[name[instance]][constant[type]] equal[==] name[k]] begin[:]
for taget[name[prop]] in starred[call[name[vocab_uses]][name[k]]] begin[:]
if compare[name[prop] <ast.NotIn object at 0x7da2590d7190> name[instance]] begin[:]
continue
variable[vocab_ov] assign[=] call[name[getattr], parameter[name[enums], binary_operation[name[vocab] + constant[_OV]]]]
if compare[call[name[type], parameter[call[name[instance]][name[prop]]]] is name[list]] begin[:]
variable[is_in] assign[=] call[call[name[set], parameter[call[name[instance]][name[prop]]]].issubset, parameter[call[name[set], parameter[name[vocab_ov]]]]]
if <ast.UnaryOp object at 0x7da1b0fc7b80> begin[:]
variable[vocab_name] assign[=] call[call[name[vocab].replace, parameter[constant[_], constant[-]]].lower, parameter[]]
<ast.Yield object at 0x7da1b0fc4340> | keyword[def] identifier[check_vocab] ( identifier[instance] , identifier[vocab] , identifier[code] ):
literal[string]
identifier[vocab_uses] = identifier[getattr] ( identifier[enums] , identifier[vocab] + literal[string] )
keyword[for] identifier[k] keyword[in] identifier[vocab_uses] . identifier[keys] ():
keyword[if] identifier[instance] [ literal[string] ]== identifier[k] :
keyword[for] identifier[prop] keyword[in] identifier[vocab_uses] [ identifier[k] ]:
keyword[if] identifier[prop] keyword[not] keyword[in] identifier[instance] :
keyword[continue]
identifier[vocab_ov] = identifier[getattr] ( identifier[enums] , identifier[vocab] + literal[string] )
keyword[if] identifier[type] ( identifier[instance] [ identifier[prop] ]) keyword[is] identifier[list] :
identifier[is_in] = identifier[set] ( identifier[instance] [ identifier[prop] ]). identifier[issubset] ( identifier[set] ( identifier[vocab_ov] ))
keyword[else] :
identifier[is_in] = identifier[instance] [ identifier[prop] ] keyword[in] identifier[vocab_ov]
keyword[if] keyword[not] identifier[is_in] :
identifier[vocab_name] = identifier[vocab] . identifier[replace] ( literal[string] , literal[string] ). identifier[lower] ()
keyword[yield] identifier[JSONError] ( literal[string]
literal[string] %( identifier[prop] , identifier[vocab_name] ),
identifier[instance] [ literal[string] ], identifier[code] ) | def check_vocab(instance, vocab, code):
"""Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
"""
vocab_uses = getattr(enums, vocab + '_USES')
for k in vocab_uses.keys():
if instance['type'] == k:
for prop in vocab_uses[k]:
if prop not in instance:
continue # depends on [control=['if'], data=[]]
vocab_ov = getattr(enums, vocab + '_OV')
if type(instance[prop]) is list:
is_in = set(instance[prop]).issubset(set(vocab_ov)) # depends on [control=['if'], data=[]]
else:
is_in = instance[prop] in vocab_ov
if not is_in:
vocab_name = vocab.replace('_', '-').lower()
yield JSONError('%s contains a value not in the %s-ov vocabulary.' % (prop, vocab_name), instance['id'], code) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop']] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] |
def _check_registers(self, report=True):
''' check if this state might be used in unicorn (has no concrete register)'''
for r in self.state.arch.uc_regs.keys():
v = getattr(self.state.regs, r)
processed_v = self._process_value(v, 'reg')
if processed_v is None or processed_v.symbolic:
#l.info('detected symbolic register %s', r)
if report:
self._report_symbolic_blocker(v, 'reg')
return False
if self.state.arch.vex_conditional_helpers:
flags = ccall._get_flags(self.state)[0]
processed_flags = self._process_value(flags, 'reg')
if processed_flags is None or processed_flags.symbolic:
#l.info("detected symbolic rflags/eflags")
if report:
self._report_symbolic_blocker(flags, 'reg')
return False
#l.debug('passed quick check')
return True | def function[_check_registers, parameter[self, report]]:
constant[ check if this state might be used in unicorn (has no concrete register)]
for taget[name[r]] in starred[call[name[self].state.arch.uc_regs.keys, parameter[]]] begin[:]
variable[v] assign[=] call[name[getattr], parameter[name[self].state.regs, name[r]]]
variable[processed_v] assign[=] call[name[self]._process_value, parameter[name[v], constant[reg]]]
if <ast.BoolOp object at 0x7da18f00c700> begin[:]
if name[report] begin[:]
call[name[self]._report_symbolic_blocker, parameter[name[v], constant[reg]]]
return[constant[False]]
if name[self].state.arch.vex_conditional_helpers begin[:]
variable[flags] assign[=] call[call[name[ccall]._get_flags, parameter[name[self].state]]][constant[0]]
variable[processed_flags] assign[=] call[name[self]._process_value, parameter[name[flags], constant[reg]]]
if <ast.BoolOp object at 0x7da18f00da20> begin[:]
if name[report] begin[:]
call[name[self]._report_symbolic_blocker, parameter[name[flags], constant[reg]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[_check_registers] ( identifier[self] , identifier[report] = keyword[True] ):
literal[string]
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[state] . identifier[arch] . identifier[uc_regs] . identifier[keys] ():
identifier[v] = identifier[getattr] ( identifier[self] . identifier[state] . identifier[regs] , identifier[r] )
identifier[processed_v] = identifier[self] . identifier[_process_value] ( identifier[v] , literal[string] )
keyword[if] identifier[processed_v] keyword[is] keyword[None] keyword[or] identifier[processed_v] . identifier[symbolic] :
keyword[if] identifier[report] :
identifier[self] . identifier[_report_symbolic_blocker] ( identifier[v] , literal[string] )
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[state] . identifier[arch] . identifier[vex_conditional_helpers] :
identifier[flags] = identifier[ccall] . identifier[_get_flags] ( identifier[self] . identifier[state] )[ literal[int] ]
identifier[processed_flags] = identifier[self] . identifier[_process_value] ( identifier[flags] , literal[string] )
keyword[if] identifier[processed_flags] keyword[is] keyword[None] keyword[or] identifier[processed_flags] . identifier[symbolic] :
keyword[if] identifier[report] :
identifier[self] . identifier[_report_symbolic_blocker] ( identifier[flags] , literal[string] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def _check_registers(self, report=True):
""" check if this state might be used in unicorn (has no concrete register)"""
for r in self.state.arch.uc_regs.keys():
v = getattr(self.state.regs, r)
processed_v = self._process_value(v, 'reg')
if processed_v is None or processed_v.symbolic:
#l.info('detected symbolic register %s', r)
if report:
self._report_symbolic_blocker(v, 'reg') # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
if self.state.arch.vex_conditional_helpers:
flags = ccall._get_flags(self.state)[0]
processed_flags = self._process_value(flags, 'reg')
if processed_flags is None or processed_flags.symbolic:
#l.info("detected symbolic rflags/eflags")
if report:
self._report_symbolic_blocker(flags, 'reg') # depends on [control=['if'], data=[]]
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
#l.debug('passed quick check')
return True |
def update(self, interfaces=None):
"""
Method to update interface.
:param interfaces: List containing interface's desired to be updated on database.
:return: None.
"""
data = {'interfaces': interfaces}
return super(ApiInterfaceRequest, self).put('api/v3/interface/', data) | def function[update, parameter[self, interfaces]]:
constant[
Method to update interface.
:param interfaces: List containing interface's desired to be updated on database.
:return: None.
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da2047e9600>], [<ast.Name object at 0x7da2047eb970>]]
return[call[call[name[super], parameter[name[ApiInterfaceRequest], name[self]]].put, parameter[constant[api/v3/interface/], name[data]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[interfaces] = keyword[None] ):
literal[string]
identifier[data] ={ literal[string] : identifier[interfaces] }
keyword[return] identifier[super] ( identifier[ApiInterfaceRequest] , identifier[self] ). identifier[put] ( literal[string] , identifier[data] ) | def update(self, interfaces=None):
"""
Method to update interface.
:param interfaces: List containing interface's desired to be updated on database.
:return: None.
"""
data = {'interfaces': interfaces}
return super(ApiInterfaceRequest, self).put('api/v3/interface/', data) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.